summaryrefslogtreecommitdiffhomepage
path: root/tests/test_sftp_big.py
diff options
context:
space:
mode:
authorChris Rose <offline@offby1.net>2018-05-17 10:13:38 -0400
committerChris Rose <offline@offby1.net>2018-05-17 10:13:38 -0400
commit7f2c35052183b400827d9949a68b41c90f90a32d (patch)
treefea4a1ec04b7ee3ced14d61e8b6cf3f479e22704 /tests/test_sftp_big.py
parent52551321a2297bdb966869fa719e584c868dd857 (diff)
Blacken Paramiko on 2.4
Diffstat (limited to 'tests/test_sftp_big.py')
-rw-r--r--tests/test_sftp_big.py214
1 files changed, 124 insertions, 90 deletions
diff --git a/tests/test_sftp_big.py b/tests/test_sftp_big.py
index a659098d..7f74d5f6 100644
--- a/tests/test_sftp_big.py
+++ b/tests/test_sftp_big.py
@@ -37,6 +37,7 @@ from .util import slow
@slow
class TestBigSFTP(object):
+
def test_1_lots_of_files(self, sftp):
"""
create a bunch of files over the same session.
@@ -44,22 +45,24 @@ class TestBigSFTP(object):
numfiles = 100
try:
for i in range(numfiles):
- with sftp.open('%s/file%d.txt' % (sftp.FOLDER, i), 'w', 1) as f:
- f.write('this is file #%d.\n' % i)
- sftp.chmod('%s/file%d.txt' % (sftp.FOLDER, i), o660)
+ with sftp.open(
+ "%s/file%d.txt" % (sftp.FOLDER, i), "w", 1
+ ) as f:
+ f.write("this is file #%d.\n" % i)
+ sftp.chmod("%s/file%d.txt" % (sftp.FOLDER, i), o660)
# now make sure every file is there, by creating a list of filenmes
# and reading them in random order.
numlist = list(range(numfiles))
while len(numlist) > 0:
r = numlist[random.randint(0, len(numlist) - 1)]
- with sftp.open('%s/file%d.txt' % (sftp.FOLDER, r)) as f:
- assert f.readline() == 'this is file #%d.\n' % r
+ with sftp.open("%s/file%d.txt" % (sftp.FOLDER, r)) as f:
+ assert f.readline() == "this is file #%d.\n" % r
numlist.remove(r)
finally:
for i in range(numfiles):
try:
- sftp.remove('%s/file%d.txt' % (sftp.FOLDER, i))
+ sftp.remove("%s/file%d.txt" % (sftp.FOLDER, i))
except:
pass
@@ -67,52 +70,56 @@ class TestBigSFTP(object):
"""
write a 1MB file with no buffering.
"""
- kblob = (1024 * b'x')
+ kblob = (1024 * b"x")
start = time.time()
try:
- with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'w') as f:
+ with sftp.open("%s/hongry.txt" % sftp.FOLDER, "w") as f:
for n in range(1024):
f.write(kblob)
if n % 128 == 0:
- sys.stderr.write('.')
- sys.stderr.write(' ')
+ sys.stderr.write(".")
+ sys.stderr.write(" ")
- assert sftp.stat('%s/hongry.txt' % sftp.FOLDER).st_size == 1024 * 1024
+ assert (
+ sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024
+ )
end = time.time()
- sys.stderr.write('%ds ' % round(end - start))
-
+ sys.stderr.write("%ds " % round(end - start))
+
start = time.time()
- with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'r') as f:
+ with sftp.open("%s/hongry.txt" % sftp.FOLDER, "r") as f:
for n in range(1024):
data = f.read(1024)
assert data == kblob
end = time.time()
- sys.stderr.write('%ds ' % round(end - start))
+ sys.stderr.write("%ds " % round(end - start))
finally:
- sftp.remove('%s/hongry.txt' % sftp.FOLDER)
+ sftp.remove("%s/hongry.txt" % sftp.FOLDER)
def test_3_big_file_pipelined(self, sftp):
"""
write a 1MB file, with no linefeeds, using pipelining.
"""
- kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
+ kblob = bytes().join([struct.pack(">H", n) for n in range(512)])
start = time.time()
try:
- with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'wb') as f:
+ with sftp.open("%s/hongry.txt" % sftp.FOLDER, "wb") as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
if n % 128 == 0:
- sys.stderr.write('.')
- sys.stderr.write(' ')
+ sys.stderr.write(".")
+ sys.stderr.write(" ")
- assert sftp.stat('%s/hongry.txt' % sftp.FOLDER).st_size == 1024 * 1024
+ assert (
+ sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024
+ )
end = time.time()
- sys.stderr.write('%ds ' % round(end - start))
-
+ sys.stderr.write("%ds " % round(end - start))
+
start = time.time()
- with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'rb') as f:
+ with sftp.open("%s/hongry.txt" % sftp.FOLDER, "rb") as f:
file_size = f.stat().st_size
f.prefetch(file_size)
@@ -130,31 +137,35 @@ class TestBigSFTP(object):
n += chunk
end = time.time()
- sys.stderr.write('%ds ' % round(end - start))
+ sys.stderr.write("%ds " % round(end - start))
finally:
- sftp.remove('%s/hongry.txt' % sftp.FOLDER)
+ sftp.remove("%s/hongry.txt" % sftp.FOLDER)
def test_4_prefetch_seek(self, sftp):
- kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
+ kblob = bytes().join([struct.pack(">H", n) for n in range(512)])
try:
- with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'wb') as f:
+ with sftp.open("%s/hongry.txt" % sftp.FOLDER, "wb") as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
if n % 128 == 0:
- sys.stderr.write('.')
- sys.stderr.write(' ')
-
- assert sftp.stat('%s/hongry.txt' % sftp.FOLDER).st_size == 1024 * 1024
-
+ sys.stderr.write(".")
+ sys.stderr.write(" ")
+
+ assert (
+ sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024
+ )
+
start = time.time()
k2blob = kblob + kblob
chunk = 793
for i in range(10):
- with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'rb') as f:
+ with sftp.open("%s/hongry.txt" % sftp.FOLDER, "rb") as f:
file_size = f.stat().st_size
f.prefetch(file_size)
- base_offset = (512 * 1024) + 17 * random.randint(1000, 2000)
+ base_offset = (512 * 1024) + 17 * random.randint(
+ 1000, 2000
+ )
offsets = [base_offset + j * chunk for j in range(100)]
# randomly seek around and read them out
for j in range(100):
@@ -166,29 +177,33 @@ class TestBigSFTP(object):
assert data == k2blob[n_offset:n_offset + chunk]
offset += chunk
end = time.time()
- sys.stderr.write('%ds ' % round(end - start))
+ sys.stderr.write("%ds " % round(end - start))
finally:
- sftp.remove('%s/hongry.txt' % sftp.FOLDER)
+ sftp.remove("%s/hongry.txt" % sftp.FOLDER)
def test_5_readv_seek(self, sftp):
- kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
+ kblob = bytes().join([struct.pack(">H", n) for n in range(512)])
try:
- with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'wb') as f:
+ with sftp.open("%s/hongry.txt" % sftp.FOLDER, "wb") as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
if n % 128 == 0:
- sys.stderr.write('.')
- sys.stderr.write(' ')
+ sys.stderr.write(".")
+ sys.stderr.write(" ")
- assert sftp.stat('%s/hongry.txt' % sftp.FOLDER).st_size == 1024 * 1024
+ assert (
+ sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024
+ )
start = time.time()
k2blob = kblob + kblob
chunk = 793
for i in range(10):
- with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'rb') as f:
- base_offset = (512 * 1024) + 17 * random.randint(1000, 2000)
+ with sftp.open("%s/hongry.txt" % sftp.FOLDER, "rb") as f:
+ base_offset = (512 * 1024) + 17 * random.randint(
+ 1000, 2000
+ )
# make a bunch of offsets and put them in random order
offsets = [base_offset + j * chunk for j in range(100)]
readv_list = []
@@ -202,60 +217,64 @@ class TestBigSFTP(object):
n_offset = offset % 1024
assert next(ret) == k2blob[n_offset:n_offset + chunk]
end = time.time()
- sys.stderr.write('%ds ' % round(end - start))
+ sys.stderr.write("%ds " % round(end - start))
finally:
- sftp.remove('%s/hongry.txt' % sftp.FOLDER)
+ sftp.remove("%s/hongry.txt" % sftp.FOLDER)
def test_6_lots_of_prefetching(self, sftp):
"""
prefetch a 1MB file a bunch of times, discarding the file object
without using it, to verify that paramiko doesn't get confused.
"""
- kblob = (1024 * b'x')
+ kblob = (1024 * b"x")
try:
- with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'w') as f:
+ with sftp.open("%s/hongry.txt" % sftp.FOLDER, "w") as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
if n % 128 == 0:
- sys.stderr.write('.')
- sys.stderr.write(' ')
+ sys.stderr.write(".")
+ sys.stderr.write(" ")
- assert sftp.stat('%s/hongry.txt' % sftp.FOLDER).st_size == 1024 * 1024
+ assert (
+ sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024
+ )
for i in range(10):
- with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'r') as f:
+ with sftp.open("%s/hongry.txt" % sftp.FOLDER, "r") as f:
file_size = f.stat().st_size
f.prefetch(file_size)
- with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'r') as f:
+ with sftp.open("%s/hongry.txt" % sftp.FOLDER, "r") as f:
file_size = f.stat().st_size
f.prefetch(file_size)
for n in range(1024):
data = f.read(1024)
assert data == kblob
if n % 128 == 0:
- sys.stderr.write('.')
- sys.stderr.write(' ')
+ sys.stderr.write(".")
+ sys.stderr.write(" ")
finally:
- sftp.remove('%s/hongry.txt' % sftp.FOLDER)
-
+ sftp.remove("%s/hongry.txt" % sftp.FOLDER)
+
def test_7_prefetch_readv(self, sftp):
"""
verify that prefetch and readv don't conflict with each other.
"""
- kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
+ kblob = bytes().join([struct.pack(">H", n) for n in range(512)])
try:
- with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'wb') as f:
+ with sftp.open("%s/hongry.txt" % sftp.FOLDER, "wb") as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
if n % 128 == 0:
- sys.stderr.write('.')
- sys.stderr.write(' ')
-
- assert sftp.stat('%s/hongry.txt' % sftp.FOLDER).st_size == 1024 * 1024
+ sys.stderr.write(".")
+ sys.stderr.write(" ")
+
+ assert (
+ sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024
+ )
- with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'rb') as f:
+ with sftp.open("%s/hongry.txt" % sftp.FOLDER, "rb") as f:
file_size = f.stat().st_size
f.prefetch(file_size)
data = f.read(1024)
@@ -264,79 +283,94 @@ class TestBigSFTP(object):
chunk_size = 793
base_offset = 512 * 1024
k2blob = kblob + kblob
- chunks = [(base_offset + (chunk_size * i), chunk_size) for i in range(20)]
+ chunks = [
+ (base_offset + (chunk_size * i), chunk_size)
+ for i in range(20)
+ ]
for data in f.readv(chunks):
offset = base_offset % 1024
assert chunk_size == len(data)
assert k2blob[offset:offset + chunk_size] == data
base_offset += chunk_size
- sys.stderr.write(' ')
+ sys.stderr.write(" ")
finally:
- sftp.remove('%s/hongry.txt' % sftp.FOLDER)
-
+ sftp.remove("%s/hongry.txt" % sftp.FOLDER)
+
def test_8_large_readv(self, sftp):
"""
verify that a very large readv is broken up correctly and still
returned as a single blob.
"""
- kblob = bytes().join([struct.pack('>H', n) for n in range(512)])
+ kblob = bytes().join([struct.pack(">H", n) for n in range(512)])
try:
- with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'wb') as f:
+ with sftp.open("%s/hongry.txt" % sftp.FOLDER, "wb") as f:
f.set_pipelined(True)
for n in range(1024):
f.write(kblob)
if n % 128 == 0:
- sys.stderr.write('.')
- sys.stderr.write(' ')
+ sys.stderr.write(".")
+ sys.stderr.write(" ")
+
+ assert (
+ sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024
+ )
- assert sftp.stat('%s/hongry.txt' % sftp.FOLDER).st_size == 1024 * 1024
-
- with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'rb') as f:
+ with sftp.open("%s/hongry.txt" % sftp.FOLDER, "rb") as f:
data = list(f.readv([(23 * 1024, 128 * 1024)]))
assert len(data) == 1
data = data[0]
assert len(data) == 128 * 1024
-
- sys.stderr.write(' ')
+
+ sys.stderr.write(" ")
finally:
- sftp.remove('%s/hongry.txt' % sftp.FOLDER)
-
+ sftp.remove("%s/hongry.txt" % sftp.FOLDER)
+
def test_9_big_file_big_buffer(self, sftp):
"""
write a 1MB file, with no linefeeds, and a big buffer.
"""
- mblob = (1024 * 1024 * 'x')
+ mblob = (1024 * 1024 * "x")
try:
- with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'w', 128 * 1024) as f:
+ with sftp.open(
+ "%s/hongry.txt" % sftp.FOLDER, "w", 128 * 1024
+ ) as f:
f.write(mblob)
- assert sftp.stat('%s/hongry.txt' % sftp.FOLDER).st_size == 1024 * 1024
+ assert (
+ sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024
+ )
finally:
- sftp.remove('%s/hongry.txt' % sftp.FOLDER)
-
+ sftp.remove("%s/hongry.txt" % sftp.FOLDER)
+
def test_A_big_file_renegotiate(self, sftp):
"""
write a 1MB file, forcing key renegotiation in the middle.
"""
t = sftp.sock.get_transport()
t.packetizer.REKEY_BYTES = 512 * 1024
- k32blob = (32 * 1024 * 'x')
+ k32blob = (32 * 1024 * "x")
try:
- with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'w', 128 * 1024) as f:
+ with sftp.open(
+ "%s/hongry.txt" % sftp.FOLDER, "w", 128 * 1024
+ ) as f:
for i in range(32):
f.write(k32blob)
- assert sftp.stat('%s/hongry.txt' % sftp.FOLDER).st_size == 1024 * 1024
+ assert (
+ sftp.stat("%s/hongry.txt" % sftp.FOLDER).st_size == 1024 * 1024
+ )
assert t.H != t.session_id
-
+
# try to read it too.
- with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'r', 128 * 1024) as f:
+ with sftp.open(
+ "%s/hongry.txt" % sftp.FOLDER, "r", 128 * 1024
+ ) as f:
file_size = f.stat().st_size
f.prefetch(file_size)
total = 0
while total < 1024 * 1024:
total += len(f.read(32 * 1024))
finally:
- sftp.remove('%s/hongry.txt' % sftp.FOLDER)
+ sftp.remove("%s/hongry.txt" % sftp.FOLDER)
t.packetizer.REKEY_BYTES = pow(2, 30)