diff options
author | Jeff Forcier <jeff@bitprophet.org> | 2017-10-23 16:05:30 -0700 |
---|---|---|
committer | Jeff Forcier <jeff@bitprophet.org> | 2018-09-17 14:44:07 -0700 |
commit | 8868305a76e84e2ee00e35b5131b7b98e45f92a1 (patch) | |
tree | 9caa303d099bef32f7a7e9dbdf6e4688f34e3ecb /tests/test_sftp_big.py | |
parent | 903f21d99d9e47dfc519c1521f8dbb8b4ff5724a (diff) |
Start overhauling regular SFTP suite.
Includes attempt to split out the longer-lived server component from the
client component, in fixtures
Also starts tweaking early tests so they're less bad. E.g. test_2_close
had its docstring actively disagreeing with its code (which was super
confusing since both were committed at the same time in 2005...)
Diffstat (limited to 'tests/test_sftp_big.py')
-rw-r--r-- | tests/test_sftp_big.py | 83 |
1 files changed, 40 insertions, 43 deletions
diff --git a/tests/test_sftp_big.py b/tests/test_sftp_big.py index c58a7912..e5708312 100644 --- a/tests/test_sftp_big.py +++ b/tests/test_sftp_big.py @@ -33,9 +33,6 @@ import unittest from paramiko.common import o660 -FOLDER = os.environ.get('TEST_FOLDER', 'temp-testing000') - - class TestBigSFTP(object): def test_1_lots_of_files(self, sftp): """ @@ -44,22 +41,22 @@ class TestBigSFTP(object): numfiles = 100 try: for i in range(numfiles): - with sftp.open('%s/file%d.txt' % (FOLDER, i), 'w', 1) as f: + with sftp.open('%s/file%d.txt' % (sftp.FOLDER, i), 'w', 1) as f: f.write('this is file #%d.\n' % i) - sftp.chmod('%s/file%d.txt' % (FOLDER, i), o660) + sftp.chmod('%s/file%d.txt' % (sftp.FOLDER, i), o660) # now make sure every file is there, by creating a list of filenmes # and reading them in random order. numlist = list(range(numfiles)) while len(numlist) > 0: r = numlist[random.randint(0, len(numlist) - 1)] - with sftp.open('%s/file%d.txt' % (FOLDER, r)) as f: + with sftp.open('%s/file%d.txt' % (sftp.FOLDER, r)) as f: assert f.readline() == 'this is file #%d.\n' % r numlist.remove(r) finally: for i in range(numfiles): try: - sftp.remove('%s/file%d.txt' % (FOLDER, i)) + sftp.remove('%s/file%d.txt' % (sftp.FOLDER, i)) except: pass @@ -70,19 +67,19 @@ class TestBigSFTP(object): kblob = (1024 * b'x') start = time.time() try: - with sftp.open('%s/hongry.txt' % FOLDER, 'w') as f: + with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'w') as f: for n in range(1024): f.write(kblob) if n % 128 == 0: sys.stderr.write('.') sys.stderr.write(' ') - assert sftp.stat('%s/hongry.txt' % FOLDER).st_size == 1024 * 1024 + assert sftp.stat('%s/hongry.txt' % sftp.FOLDER).st_size == 1024 * 1024 end = time.time() sys.stderr.write('%ds ' % round(end - start)) start = time.time() - with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f: + with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'r') as f: for n in range(1024): data = f.read(1024) assert data == kblob @@ -90,7 +87,7 @@ class TestBigSFTP(object): end = time.time() sys.stderr.write('%ds ' % round(end - start)) finally: - sftp.remove('%s/hongry.txt' % FOLDER) + sftp.remove('%s/hongry.txt' % sftp.FOLDER) def test_3_big_file_pipelined(self, sftp): """ @@ -99,7 +96,7 @@ class TestBigSFTP(object): kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) start = time.time() try: - with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f: + with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'wb') as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) @@ -107,12 +104,12 @@ class TestBigSFTP(object): sys.stderr.write('.') sys.stderr.write(' ') - assert sftp.stat('%s/hongry.txt' % FOLDER).st_size == 1024 * 1024 + assert sftp.stat('%s/hongry.txt' % sftp.FOLDER).st_size == 1024 * 1024 end = time.time() sys.stderr.write('%ds ' % round(end - start)) start = time.time() - with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f: + with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'rb') as f: file_size = f.stat().st_size f.prefetch(file_size) @@ -132,12 +129,12 @@ class TestBigSFTP(object): end = time.time() sys.stderr.write('%ds ' % round(end - start)) finally: - sftp.remove('%s/hongry.txt' % FOLDER) + sftp.remove('%s/hongry.txt' % sftp.FOLDER) def test_4_prefetch_seek(self, sftp): kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) try: - with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f: + with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'wb') as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) @@ -145,13 +142,13 @@ class TestBigSFTP(object): sys.stderr.write('.') sys.stderr.write(' ') - assert sftp.stat('%s/hongry.txt' % FOLDER).st_size == 1024 * 1024 + assert sftp.stat('%s/hongry.txt' % sftp.FOLDER).st_size == 1024 * 1024 start = time.time() k2blob = kblob + kblob chunk = 793 for i in range(10): - with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f: + with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'rb') as f: file_size = f.stat().st_size f.prefetch(file_size) base_offset = (512 * 1024) + 17 * random.randint(1000, 2000) @@ -168,12 +165,12 @@ class TestBigSFTP(object): end = time.time() sys.stderr.write('%ds ' % round(end - start)) finally: - sftp.remove('%s/hongry.txt' % FOLDER) + sftp.remove('%s/hongry.txt' % sftp.FOLDER) def test_5_readv_seek(self, sftp): kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) try: - with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f: + with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'wb') as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) @@ -181,13 +178,13 @@ class TestBigSFTP(object): sys.stderr.write('.') sys.stderr.write(' ') - assert sftp.stat('%s/hongry.txt' % FOLDER).st_size == 1024 * 1024 + assert sftp.stat('%s/hongry.txt' % sftp.FOLDER).st_size == 1024 * 1024 start = time.time() k2blob = kblob + kblob chunk = 793 for i in range(10): - with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f: + with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'rb') as f: base_offset = (512 * 1024) + 17 * random.randint(1000, 2000) # make a bunch of offsets and put them in random order offsets = [base_offset + j * chunk for j in range(100)] @@ -204,7 +201,7 @@ class TestBigSFTP(object): end = time.time() sys.stderr.write('%ds ' % round(end - start)) finally: - sftp.remove('%s/hongry.txt' % FOLDER) + sftp.remove('%s/hongry.txt' % sftp.FOLDER) def test_6_lots_of_prefetching(self, sftp): """ @@ -213,7 +210,7 @@ class TestBigSFTP(object): """ kblob = (1024 * b'x') try: - with sftp.open('%s/hongry.txt' % FOLDER, 'w') as f: + with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'w') as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) @@ -221,13 +218,13 @@ class TestBigSFTP(object): sys.stderr.write('.') sys.stderr.write(' ') - assert sftp.stat('%s/hongry.txt' % FOLDER).st_size == 1024 * 1024 + assert sftp.stat('%s/hongry.txt' % sftp.FOLDER).st_size == 1024 * 1024 for i in range(10): - with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f: + with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'r') as f: file_size = f.stat().st_size f.prefetch(file_size) - with sftp.open('%s/hongry.txt' % FOLDER, 'r') as f: + with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'r') as f: file_size = f.stat().st_size f.prefetch(file_size) for n in range(1024): @@ -237,7 +234,7 @@ class TestBigSFTP(object): sys.stderr.write('.') sys.stderr.write(' ') finally: - sftp.remove('%s/hongry.txt' % FOLDER) + sftp.remove('%s/hongry.txt' % sftp.FOLDER) def test_7_prefetch_readv(self, sftp): """ @@ -245,7 +242,7 @@ class TestBigSFTP(object): """ kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) try: - with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f: + with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'wb') as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) @@ -253,9 +250,9 @@ class TestBigSFTP(object): sys.stderr.write('.') sys.stderr.write(' ') - assert sftp.stat('%s/hongry.txt' % FOLDER).st_size == 1024 * 1024 + assert sftp.stat('%s/hongry.txt' % sftp.FOLDER).st_size == 1024 * 1024 - with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f: + with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'rb') as f: file_size = f.stat().st_size f.prefetch(file_size) data = f.read(1024) @@ -273,7 +270,7 @@ class TestBigSFTP(object): sys.stderr.write(' ') finally: - sftp.remove('%s/hongry.txt' % FOLDER) + sftp.remove('%s/hongry.txt' % sftp.FOLDER) def test_8_large_readv(self, sftp): """ @@ -282,7 +279,7 @@ class TestBigSFTP(object): """ kblob = bytes().join([struct.pack('>H', n) for n in range(512)]) try: - with sftp.open('%s/hongry.txt' % FOLDER, 'wb') as f: + with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'wb') as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) @@ -290,9 +287,9 @@ class TestBigSFTP(object): sys.stderr.write('.') sys.stderr.write(' ') - assert sftp.stat('%s/hongry.txt' % FOLDER).st_size == 1024 * 1024 + assert sftp.stat('%s/hongry.txt' % sftp.FOLDER).st_size == 1024 * 1024 - with sftp.open('%s/hongry.txt' % FOLDER, 'rb') as f: + with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'rb') as f: data = list(f.readv([(23 * 1024, 128 * 1024)])) assert len(data) == 1 data = data[0] @@ -300,7 +297,7 @@ class TestBigSFTP(object): sys.stderr.write(' ') finally: - sftp.remove('%s/hongry.txt' % FOLDER) + sftp.remove('%s/hongry.txt' % sftp.FOLDER) def test_9_big_file_big_buffer(self, sftp): """ @@ -308,12 +305,12 @@ class TestBigSFTP(object): """ mblob = (1024 * 1024 * 'x') try: - with sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024) as f: + with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'w', 128 * 1024) as f: f.write(mblob) - assert sftp.stat('%s/hongry.txt' % FOLDER).st_size == 1024 * 1024 + assert sftp.stat('%s/hongry.txt' % sftp.FOLDER).st_size == 1024 * 1024 finally: - sftp.remove('%s/hongry.txt' % FOLDER) + sftp.remove('%s/hongry.txt' % sftp.FOLDER) def test_A_big_file_renegotiate(self, sftp): """ @@ -323,20 +320,20 @@ class TestBigSFTP(object): t.packetizer.REKEY_BYTES = 512 * 1024 k32blob = (32 * 1024 * 'x') try: - with sftp.open('%s/hongry.txt' % FOLDER, 'w', 128 * 1024) as f: + with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'w', 128 * 1024) as f: for i in range(32): f.write(k32blob) - assert sftp.stat('%s/hongry.txt' % FOLDER).st_size == 1024 * 1024 + assert sftp.stat('%s/hongry.txt' % sftp.FOLDER).st_size == 1024 * 1024 assert t.H != t.session_id # try to read it too. - with sftp.open('%s/hongry.txt' % FOLDER, 'r', 128 * 1024) as f: + with sftp.open('%s/hongry.txt' % sftp.FOLDER, 'r', 128 * 1024) as f: file_size = f.stat().st_size f.prefetch(file_size) total = 0 while total < 1024 * 1024: total += len(f.read(32 * 1024)) finally: - sftp.remove('%s/hongry.txt' % FOLDER) + sftp.remove('%s/hongry.txt' % sftp.FOLDER) t.packetizer.REKEY_BYTES = pow(2, 30) |