summaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
-rw-r--r--paramiko/sftp_client.py5
-rw-r--r--paramiko/sftp_file.py7
-rwxr-xr-xtests/test_sftp.py3
-rw-r--r--tests/test_sftp_big.py18
4 files changed, 20 insertions, 13 deletions
diff --git a/paramiko/sftp_client.py b/paramiko/sftp_client.py
index 89840eaa..e4c3a37d 100644
--- a/paramiko/sftp_client.py
+++ b/paramiko/sftp_client.py
@@ -685,9 +685,10 @@ class SFTPClient(BaseSFTP, ClosingContextManager):
.. versionadded:: 1.10
"""
+ file_size = self.stat(remotepath).st_size
with self.open(remotepath, 'rb') as fr:
- file_size = self.stat(remotepath).st_size
- fr.prefetch()
+ fr.prefetch(file_size)
+
size = 0
while True:
data = fr.read(32768)
diff --git a/paramiko/sftp_file.py b/paramiko/sftp_file.py
index d0a37da3..8f73dcbf 100644
--- a/paramiko/sftp_file.py
+++ b/paramiko/sftp_file.py
@@ -379,7 +379,7 @@ class SFTPFile (BufferedFile):
"""
self.pipelined = pipelined
- def prefetch(self):
+ def prefetch(self, file_size=None):
"""
Pre-fetch the remaining contents of this file in anticipation of future
`.read` calls. If reading the entire file, pre-fetching can
@@ -393,12 +393,11 @@ class SFTPFile (BufferedFile):
.. versionadded:: 1.5.1
"""
- size = self.stat().st_size
# queue up async reads for the rest of the file
chunks = []
n = self._realpos
- while n < size:
- chunk = min(self.MAX_REQUEST_SIZE, size - n)
+ while n < file_size:
+ chunk = min(self.MAX_REQUEST_SIZE, file_size - n)
chunks.append((n, chunk))
n += chunk
if len(chunks) > 0:
diff --git a/tests/test_sftp.py b/tests/test_sftp.py
index cb8f7f84..55762c21 100755
--- a/tests/test_sftp.py
+++ b/tests/test_sftp.py
@@ -696,7 +696,8 @@ class SFTPTest (unittest.TestCase):
f.readv([(0, 12)])
with sftp.open(FOLDER + '/zero', 'r') as f:
- f.prefetch()
+ file_size = f.stat().st_size
+ f.prefetch(file_size)
f.read(100)
finally:
sftp.unlink(FOLDER + '/zero')
diff --git a/tests/test_sftp_big.py b/tests/test_sftp_big.py
index abed27b8..cfad5682 100644
--- a/tests/test_sftp_big.py
+++ b/tests/test_sftp_big.py
@@ -132,7 +132,8 @@ class BigSFTPTest (unittest.TestCase):
start = time.time()
with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
- f.prefetch()
+ file_size = f.stat().st_size
+ f.prefetch(file_size)
# read on odd boundaries to make sure the bytes aren't getting scrambled
n = 0
@@ -171,7 +172,8 @@ class BigSFTPTest (unittest.TestCase):
chunk = 793
for i in range(10):
with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
- f.prefetch()
+ file_size = f.stat().st_size
+ f.prefetch(file_size)
base_offset = (512 * 1024) + 17 * random.randint(1000, 2000)
offsets = [base_offset + j * chunk for j in range(100)]
# randomly seek around and read them out
@@ -245,9 +247,11 @@ class BigSFTPTest (unittest.TestCase):
for i in range(10):
with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f:
- f.prefetch()
+ file_size = f.stat().st_size
+ f.prefetch(file_size)
with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f:
- f.prefetch()
+ file_size = f.stat().st_size
+ f.prefetch(file_size)
for n in range(1024):
data = f.read(1024)
self.assertEqual(data, kblob)
@@ -275,7 +279,8 @@ class BigSFTPTest (unittest.TestCase):
self.assertEqual(sftp.stat('%s/hongry.txt' % FOLDER).st_size, 1024 * 1024)
with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f:
- f.prefetch()
+ file_size = f.stat().st_size
+ f.prefetch(file_size)
data = f.read(1024)
self.assertEqual(data, kblob)
@@ -353,7 +358,8 @@ class BigSFTPTest (unittest.TestCase):
# try to read it too.
with sftp.open('%s/hongry.txt' % FOLDER, 'r', 128 * 1024) as f:
- f.prefetch()
+ file_size = f.stat().st_size
+ f.prefetch(file_size)
total = 0
while total < 1024 * 1024:
total += len(f.read(32 * 1024))