2021-08-22 19:38:03 +03:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# License: GPLv3 Copyright: 2021, Kovid Goyal <kovid at kovidgoyal.net>
|
|
|
|
|
|
|
|
|
|
|
|
import os
|
|
|
|
import shutil
|
2023-07-25 08:18:06 +03:00
|
|
|
import stat
|
2021-08-22 19:38:03 +03:00
|
|
|
import tempfile
|
2023-07-21 08:59:00 +03:00
|
|
|
from collections import namedtuple
|
2023-07-20 17:24:54 +03:00
|
|
|
from contextlib import contextmanager
|
2023-07-21 08:59:00 +03:00
|
|
|
from pathlib import Path
|
2021-08-22 19:38:03 +03:00
|
|
|
|
2023-07-18 19:13:40 +03:00
|
|
|
from kittens.transfer.rsync import Differ, Hasher, Patcher, decode_utf8_buffer, parse_ftc
|
2023-07-20 08:15:44 +03:00
|
|
|
from kittens.transfer.utils import set_paths
|
2023-07-20 17:24:54 +03:00
|
|
|
from kitty.constants import kitten_exe
|
2023-07-20 08:15:44 +03:00
|
|
|
from kitty.file_transmission import Action, Compression, FileTransmissionCommand, FileType, TransmissionType, ZlibDecompressor
|
2023-01-09 14:17:42 +03:00
|
|
|
from kitty.file_transmission import TestFileTransmission as FileTransmission
|
2021-08-22 19:38:03 +03:00
|
|
|
|
2023-07-20 17:24:54 +03:00
|
|
|
from . import PTY, BaseTest
|
2021-08-22 19:38:03 +03:00
|
|
|
|
|
|
|
|
2021-09-13 07:23:59 +03:00
|
|
|
def response(id='test', msg='', file_id='', name='', action='status', status='', size=-1):
|
2021-09-01 06:52:04 +03:00
|
|
|
ans = {'action': 'status'}
|
|
|
|
if id:
|
|
|
|
ans['id'] = id
|
|
|
|
if file_id:
|
|
|
|
ans['file_id'] = file_id
|
|
|
|
if name:
|
|
|
|
ans['name'] = name
|
|
|
|
if status:
|
|
|
|
ans['status'] = status
|
2021-09-09 10:29:31 +03:00
|
|
|
if size > -1:
|
|
|
|
ans['size'] = size
|
2021-09-01 06:52:04 +03:00
|
|
|
return ans
|
|
|
|
|
|
|
|
|
2021-08-23 19:40:26 +03:00
|
|
|
def names_in(path):
|
|
|
|
for dirpath, dirnames, filenames in os.walk(path):
|
|
|
|
for d in dirnames + filenames:
|
|
|
|
yield os.path.relpath(os.path.join(dirpath, d), path)
|
|
|
|
|
|
|
|
|
2021-08-22 19:38:03 +03:00
|
|
|
def serialized_cmd(**fields) -> str:
|
2021-09-13 07:23:59 +03:00
|
|
|
if 'id' not in fields:
|
|
|
|
fields['id'] = 'test'
|
2021-08-30 05:38:41 +03:00
|
|
|
for k, A in (('action', Action), ('ftype', FileType), ('ttype', TransmissionType), ('compression', Compression)):
|
2021-08-22 19:38:03 +03:00
|
|
|
if k in fields:
|
|
|
|
fields[k] = A[fields[k]]
|
|
|
|
if isinstance(fields.get('data'), str):
|
|
|
|
fields['data'] = fields['data'].encode('utf-8')
|
2021-08-30 05:38:41 +03:00
|
|
|
ans = FileTransmissionCommand(**fields)
|
2021-08-22 19:38:03 +03:00
|
|
|
return ans.serialize()
|
|
|
|
|
|
|
|
|
2023-07-18 19:13:40 +03:00
|
|
|
def generate_data(block_size, num_blocks, *extra) -> bytes:
|
|
|
|
extra = ''.join(extra)
|
|
|
|
b = b'_' * (block_size * num_blocks) + extra.encode()
|
|
|
|
ans = bytearray(b)
|
|
|
|
for i in range(num_blocks):
|
|
|
|
offset = i * block_size
|
|
|
|
p = str(i).encode()
|
|
|
|
ans[offset:offset+len(p)] = p
|
|
|
|
return bytes(ans)
|
|
|
|
|
|
|
|
|
|
|
|
def patch_data(data, *patches):
|
|
|
|
total_patch_size = 0
|
|
|
|
ans = bytearray(data)
|
|
|
|
for patch in patches:
|
|
|
|
o, sep, r = patch.partition(':')
|
|
|
|
r = r.encode()
|
|
|
|
total_patch_size += len(r)
|
|
|
|
offset = int(o)
|
|
|
|
ans[offset:offset+len(r)] = r
|
|
|
|
return bytes(ans), len(patches), total_patch_size
|
|
|
|
|
|
|
|
|
|
|
|
def run_roundtrip_test(self: 'TestFileTransmission', src_data, changed, num_of_patches, total_patch_size):
|
|
|
|
buf = memoryview(bytearray(30))
|
|
|
|
signature = bytearray(0)
|
2023-07-19 11:14:40 +03:00
|
|
|
p = Patcher(len(changed))
|
2023-07-18 19:13:40 +03:00
|
|
|
n = p.signature_header(buf)
|
|
|
|
signature.extend(buf[:n])
|
2023-07-19 11:14:40 +03:00
|
|
|
src = memoryview(changed)
|
2023-07-18 19:13:40 +03:00
|
|
|
bs = p.block_size
|
|
|
|
while src:
|
|
|
|
n = p.sign_block(src[:bs], buf)
|
|
|
|
signature.extend(buf[:n])
|
|
|
|
src = src[bs:]
|
|
|
|
d = Differ()
|
|
|
|
src = memoryview(signature)
|
|
|
|
while src:
|
|
|
|
d.add_signature_data(src[:13])
|
|
|
|
src = src[13:]
|
|
|
|
d.finish_signature_data()
|
2023-07-19 11:14:40 +03:00
|
|
|
del src, signature
|
2023-07-18 19:13:40 +03:00
|
|
|
src = memoryview(src_data)
|
|
|
|
delta = bytearray(0)
|
|
|
|
def read_into(b):
|
2023-07-19 08:38:37 +03:00
|
|
|
nonlocal src
|
2023-07-18 19:13:40 +03:00
|
|
|
n = min(len(b), len(src))
|
|
|
|
if n > 0:
|
|
|
|
b[:n] = src[:n]
|
|
|
|
src = src[n:]
|
|
|
|
return n
|
|
|
|
def write_delta(b):
|
|
|
|
delta.extend(b)
|
|
|
|
while d.next_op(read_into, write_delta):
|
|
|
|
pass
|
|
|
|
delta = memoryview(delta)
|
2023-07-19 11:14:40 +03:00
|
|
|
del src
|
2023-07-18 19:13:40 +03:00
|
|
|
|
|
|
|
def read_at(pos, output) -> int:
|
|
|
|
b = changed[pos:]
|
|
|
|
amt = min(len(output), len(b))
|
|
|
|
output[:amt] = b[:amt]
|
|
|
|
return amt
|
|
|
|
|
|
|
|
output = bytearray(0)
|
|
|
|
|
|
|
|
def write_changes(b):
|
|
|
|
output.extend(b)
|
|
|
|
|
2023-07-19 11:14:40 +03:00
|
|
|
def debug_msg():
|
|
|
|
return f'\n\nsrc:\n{src_data.decode()}\nchanged:\n{changed.decode()}\noutput:\n{output.decode()}'
|
|
|
|
try:
|
|
|
|
while delta:
|
|
|
|
p.apply_delta_data(delta[:11], read_at, write_changes)
|
|
|
|
delta = delta[11:]
|
|
|
|
p.finish_delta_data()
|
|
|
|
except Exception as err:
|
|
|
|
self.fail(f'{err}\n{debug_msg()}')
|
|
|
|
self.assertEqual(src_data, bytes(output), debug_msg())
|
2023-07-18 19:13:40 +03:00
|
|
|
limit = 2 * (p.block_size * num_of_patches)
|
|
|
|
if limit > -1:
|
2023-07-19 11:14:40 +03:00
|
|
|
self.assertLessEqual(
|
2023-07-18 19:13:40 +03:00
|
|
|
p.total_data_in_delta, limit, f'Unexpectedly poor delta performance: {total_patch_size=} {p.total_data_in_delta=} {limit=}')
|
|
|
|
|
|
|
|
|
|
|
|
def test_rsync_roundtrip(self: 'TestFileTransmission') -> None:
|
2023-07-19 08:38:37 +03:00
|
|
|
block_size = 16
|
|
|
|
src_data = generate_data(block_size, 16)
|
|
|
|
changed, num_of_patches, total_patch_size = patch_data(src_data, "3:patch1", "16:patch2", "130:ptch3", "176:patch4", "222:XXYY")
|
|
|
|
|
|
|
|
run_roundtrip_test(self, src_data, src_data[block_size:], 1, block_size)
|
|
|
|
run_roundtrip_test(self, src_data, changed, num_of_patches, total_patch_size)
|
|
|
|
run_roundtrip_test(self, src_data, b'', -1, 0)
|
|
|
|
run_roundtrip_test(self, src_data, src_data, 0, 0)
|
|
|
|
run_roundtrip_test(self, src_data, changed[:len(changed)-3], num_of_patches, total_patch_size)
|
|
|
|
run_roundtrip_test(self, src_data, changed[:37] + changed[81:], num_of_patches, total_patch_size)
|
|
|
|
|
|
|
|
block_size = 13
|
|
|
|
src_data = generate_data(block_size, 17, "trailer")
|
|
|
|
changed, num_of_patches, total_patch_size = patch_data(src_data, "0:patch1", "19:patch2")
|
|
|
|
run_roundtrip_test(self, src_data, changed, num_of_patches, total_patch_size)
|
|
|
|
run_roundtrip_test(self, src_data, changed[:len(changed)-3], num_of_patches, total_patch_size)
|
|
|
|
run_roundtrip_test(self, src_data, changed + b"xyz...", num_of_patches, total_patch_size)
|
2023-07-18 19:13:40 +03:00
|
|
|
|
|
|
|
|
2023-07-20 17:24:54 +03:00
|
|
|
class PtyFileTransmission(FileTransmission):
|
|
|
|
|
|
|
|
def __init__(self, pty, allow=True):
|
|
|
|
self.pty = pty
|
|
|
|
super().__init__(allow=allow)
|
|
|
|
self.pty.callbacks.ftc = self
|
|
|
|
|
|
|
|
def write_ftc_to_child(self, payload: FileTransmissionCommand, appendleft: bool = False, use_pending: bool = True) -> bool:
|
2023-07-20 18:21:12 +03:00
|
|
|
self.pty.write_to_child('\x1b]' + payload.serialize(prefix_with_osc_code=True) + '\x1b\\', flush=False)
|
2023-07-20 18:54:24 +03:00
|
|
|
return True
|
2023-07-20 17:24:54 +03:00
|
|
|
|
|
|
|
|
|
|
|
class TransferPTY(PTY):
|
|
|
|
|
2023-07-21 08:59:00 +03:00
|
|
|
def __init__(self, cmd, cwd, allow=True, env=None):
|
|
|
|
super().__init__(cmd, cwd=cwd, env=env)
|
2023-07-20 17:24:54 +03:00
|
|
|
self.fc = PtyFileTransmission(self, allow=allow)
|
|
|
|
|
|
|
|
|
2021-08-22 19:38:03 +03:00
|
|
|
class TestFileTransmission(BaseTest):
|
|
|
|
|
|
|
|
def setUp(self):
|
2022-06-07 08:28:03 +03:00
|
|
|
super().setUp()
|
2021-08-24 05:19:12 +03:00
|
|
|
self.tdir = os.path.realpath(tempfile.mkdtemp())
|
2021-09-01 06:52:04 +03:00
|
|
|
self.responses = []
|
2021-09-08 06:26:14 +03:00
|
|
|
self.orig_home = os.environ.get('HOME')
|
2021-08-22 19:38:03 +03:00
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
shutil.rmtree(self.tdir)
|
2021-09-01 06:52:04 +03:00
|
|
|
self.responses = []
|
2021-09-08 06:26:14 +03:00
|
|
|
if self.orig_home is None:
|
2021-09-16 20:01:18 +03:00
|
|
|
os.environ.pop('HOME', None)
|
2021-09-08 06:26:14 +03:00
|
|
|
else:
|
|
|
|
os.environ['HOME'] = self.orig_home
|
2022-06-07 08:28:03 +03:00
|
|
|
super().tearDown()
|
2021-08-22 19:38:03 +03:00
|
|
|
|
2021-08-23 19:40:26 +03:00
|
|
|
def clean_tdir(self):
|
|
|
|
shutil.rmtree(self.tdir)
|
2021-08-24 05:19:12 +03:00
|
|
|
self.tdir = os.path.realpath(tempfile.mkdtemp())
|
2021-09-08 06:26:14 +03:00
|
|
|
self.responses = []
|
2021-08-23 19:40:26 +03:00
|
|
|
|
2021-10-03 09:31:41 +03:00
|
|
|
def cr(self, a, b):
|
|
|
|
def f(r):
|
|
|
|
r.pop('size', None)
|
|
|
|
return r
|
|
|
|
a = tuple(f(r) for r in a if r.get('status') != 'PROGRESS')
|
|
|
|
b = tuple(f(r) for r in b if r.get('status') != 'PROGRESS')
|
|
|
|
self.ae(a, b)
|
|
|
|
|
2021-10-14 15:59:02 +03:00
|
|
|
def assertResponses(self, ft, limit=1024, **kw):
|
2021-09-01 06:52:04 +03:00
|
|
|
self.responses.append(response(**kw))
|
2021-10-14 15:59:02 +03:00
|
|
|
self.cr(ft.test_responses[:limit], self.responses[:limit])
|
2021-09-01 06:52:04 +03:00
|
|
|
|
2021-08-23 09:18:21 +03:00
|
|
|
def assertPathEqual(self, a, b):
|
|
|
|
a = os.path.abspath(os.path.realpath(a))
|
|
|
|
b = os.path.abspath(os.path.realpath(b))
|
|
|
|
self.ae(a, b)
|
|
|
|
|
2021-09-19 18:15:28 +03:00
|
|
|
def test_rsync_roundtrip(self):
|
2023-07-18 19:13:40 +03:00
|
|
|
test_rsync_roundtrip(self)
|
2021-09-19 18:15:28 +03:00
|
|
|
|
2021-10-14 15:59:02 +03:00
|
|
|
def test_file_get(self):
|
|
|
|
# send refusal
|
|
|
|
for quiet in (0, 1, 2):
|
|
|
|
ft = FileTransmission(allow=False)
|
|
|
|
ft.handle_serialized_command(serialized_cmd(action='receive', id='x', quiet=quiet))
|
|
|
|
self.cr(ft.test_responses, [] if quiet == 2 else [response(id='x', status='EPERM:User refused the transfer')])
|
|
|
|
self.assertFalse(ft.active_sends)
|
|
|
|
# reading metadata for specs
|
|
|
|
cwd = os.path.join(self.tdir, 'cwd')
|
|
|
|
home = os.path.join(self.tdir, 'home')
|
|
|
|
os.mkdir(cwd), os.mkdir(home)
|
|
|
|
with set_paths(cwd=cwd, home=home):
|
|
|
|
ft = FileTransmission()
|
|
|
|
self.responses = []
|
|
|
|
ft.handle_serialized_command(serialized_cmd(action='receive', size=1))
|
|
|
|
self.assertResponses(ft, status='OK')
|
2021-10-18 10:32:37 +03:00
|
|
|
ft.handle_serialized_command(serialized_cmd(action='file', file_id='missing', name='XXX'))
|
2021-10-14 15:59:02 +03:00
|
|
|
self.responses.append(response(status='ENOENT:Failed to read spec', file_id='missing'))
|
2021-10-21 17:36:53 +03:00
|
|
|
self.assertResponses(ft, status='OK', name=home)
|
2021-10-14 15:59:02 +03:00
|
|
|
ft = FileTransmission()
|
|
|
|
self.responses = []
|
|
|
|
ft.handle_serialized_command(serialized_cmd(action='receive', size=2))
|
|
|
|
self.assertResponses(ft, status='OK')
|
|
|
|
with open(os.path.join(home, 'a'), 'w') as f:
|
|
|
|
f.write('a')
|
|
|
|
os.mkdir(f.name + 'd')
|
|
|
|
with open(os.path.join(f.name + 'd', 'b'), 'w') as f2:
|
|
|
|
f2.write('bbb')
|
|
|
|
os.symlink(f.name, f.name + 'd/s')
|
|
|
|
os.link(f.name, f.name + 'd/h')
|
|
|
|
os.symlink('XXX', f.name + 'd/q')
|
2021-10-18 10:32:37 +03:00
|
|
|
ft.handle_serialized_command(serialized_cmd(action='file', file_id='a', name='a'))
|
|
|
|
ft.handle_serialized_command(serialized_cmd(action='file', file_id='b', name='ad'))
|
2021-10-14 15:59:02 +03:00
|
|
|
files = {r['name']: r for r in ft.test_responses if r['action'] == 'file'}
|
|
|
|
self.ae(len(files), 6)
|
|
|
|
q = files[f.name]
|
|
|
|
tgt = q['status'].encode('ascii')
|
|
|
|
self.ae(q['size'], 1), self.assertNotIn('ftype', q)
|
|
|
|
q = files[f.name + 'd']
|
|
|
|
self.ae(q['ftype'], 'directory')
|
|
|
|
q = files[f.name + 'd/b']
|
|
|
|
self.ae(q['size'], 3)
|
|
|
|
q = files[f.name + 'd/s']
|
|
|
|
self.ae(q['ftype'], 'symlink')
|
|
|
|
self.ae(q['data'], tgt)
|
|
|
|
q = files[f.name + 'd/h']
|
|
|
|
self.ae(q['ftype'], 'link')
|
|
|
|
self.ae(q['data'], tgt)
|
|
|
|
q = files[f.name + 'd/q']
|
|
|
|
self.ae(q['ftype'], 'symlink')
|
|
|
|
self.assertNotIn('data', q)
|
2021-11-05 10:39:45 +03:00
|
|
|
base = os.path.join(self.tdir, 'base')
|
|
|
|
os.mkdir(base)
|
|
|
|
src = os.path.join(base, 'src.bin')
|
|
|
|
data = os.urandom(16 * 1024)
|
|
|
|
with open(src, 'wb') as f:
|
|
|
|
f.write(data)
|
2021-11-05 10:48:58 +03:00
|
|
|
sl = os.path.join(base, 'src.link')
|
|
|
|
os.symlink(src, sl)
|
2021-11-05 10:39:45 +03:00
|
|
|
for compress in ('none', 'zlib'):
|
|
|
|
ft = FileTransmission()
|
|
|
|
self.responses = []
|
|
|
|
ft.handle_serialized_command(serialized_cmd(action='receive', size=1))
|
|
|
|
self.assertResponses(ft, status='OK')
|
|
|
|
ft.handle_serialized_command(serialized_cmd(action='file', file_id='src', name=src))
|
|
|
|
ft.active_sends['test'].metadata_sent = True
|
|
|
|
ft.test_responses = []
|
|
|
|
ft.handle_serialized_command(serialized_cmd(action='file', file_id='src', name=src, compression=compress))
|
|
|
|
received = b''.join(x['data'] for x in ft.test_responses)
|
|
|
|
if compress == 'zlib':
|
|
|
|
received = ZlibDecompressor()(received, True)
|
|
|
|
self.ae(data, received)
|
2021-11-05 10:48:58 +03:00
|
|
|
ft.test_responses = []
|
|
|
|
ft.handle_serialized_command(serialized_cmd(action='file', file_id='sl', name=sl, compression=compress))
|
|
|
|
received = b''.join(x['data'] for x in ft.test_responses)
|
|
|
|
self.ae(received.decode('utf-8'), src)
|
2021-10-14 15:59:02 +03:00
|
|
|
|
2021-09-25 11:35:21 +03:00
|
|
|
def test_parse_ftc(self):
|
|
|
|
def t(raw, *expected):
|
|
|
|
a = []
|
|
|
|
|
2023-05-28 10:48:31 +03:00
|
|
|
def c(k, v):
|
2021-09-25 11:35:21 +03:00
|
|
|
a.append(decode_utf8_buffer(k))
|
|
|
|
a.append(decode_utf8_buffer(v))
|
|
|
|
|
|
|
|
parse_ftc(raw, c)
|
|
|
|
self.ae(tuple(a), expected)
|
|
|
|
|
|
|
|
t('a=b', 'a', 'b')
|
|
|
|
t('a=b;', 'a', 'b')
|
2023-05-28 10:48:31 +03:00
|
|
|
t('a1=b1;c=d;;', 'a1', 'b1', 'c', 'd')
|
|
|
|
t('a1=b1;c=d;;e', 'a1', 'b1', 'c', 'd')
|
|
|
|
t('a1=b1;c=d;;;1=1', 'a1', 'b1', 'c', 'd', '1', '1')
|
2021-09-25 11:35:21 +03:00
|
|
|
|
2021-10-29 12:31:31 +03:00
|
|
|
def test_path_mapping_receive(self):
|
2023-07-20 08:00:57 +03:00
|
|
|
self.skipTest('TODO: Port this test')
|
2023-07-15 10:54:07 +03:00
|
|
|
|
|
|
|
def test_rsync_hashers(self):
|
|
|
|
h = Hasher("xxh3-64")
|
|
|
|
h.update(b'abcd')
|
|
|
|
self.assertEqual(h.hexdigest(), '6497a96f53a89890')
|
|
|
|
self.assertEqual(h.digest64(), 7248448420886124688)
|
|
|
|
h128 = Hasher("xxh3-128")
|
|
|
|
h128.update(b'abcd')
|
|
|
|
self.assertEqual(h128.hexdigest(), '8d6b60383dfa90c21be79eecd1b1353d')
|
2023-07-20 17:24:54 +03:00
|
|
|
|
|
|
|
@contextmanager
|
2023-07-20 18:54:24 +03:00
|
|
|
def run_kitten(self, cmd, home_dir='', allow=True):
|
2023-07-21 08:59:00 +03:00
|
|
|
cwd = os.path.realpath(tempfile.mkdtemp(suffix='-cwd', dir=self.tdir))
|
2023-07-20 17:24:54 +03:00
|
|
|
cmd = [kitten_exe(), 'transfer'] + cmd
|
2023-07-21 08:59:00 +03:00
|
|
|
env = {'PWD': cwd}
|
|
|
|
if home_dir:
|
|
|
|
env['HOME'] = home_dir
|
2023-07-20 17:24:54 +03:00
|
|
|
try:
|
2023-07-21 08:59:00 +03:00
|
|
|
pty = TransferPTY(cmd, cwd=cwd, allow=allow, env=env)
|
2023-07-20 17:24:54 +03:00
|
|
|
i = 10
|
|
|
|
while i > 0 and not pty.screen_contents().strip():
|
|
|
|
pty.process_input_from_child()
|
|
|
|
i -= 1
|
|
|
|
yield pty
|
|
|
|
finally:
|
2023-07-21 08:59:00 +03:00
|
|
|
if os.path.exists(cwd):
|
|
|
|
shutil.rmtree(cwd)
|
2023-07-20 17:24:54 +03:00
|
|
|
|
|
|
|
def test_transfer_send(self):
|
|
|
|
src = os.path.join(self.tdir, 'src')
|
2023-07-20 18:54:24 +03:00
|
|
|
self.src_data = os.urandom(9137)
|
2023-07-20 17:24:54 +03:00
|
|
|
with open(src, 'wb') as s:
|
2023-07-20 18:54:24 +03:00
|
|
|
s.write(self.src_data)
|
2023-07-20 17:24:54 +03:00
|
|
|
dest = os.path.join(self.tdir, 'dest')
|
2023-07-20 18:54:24 +03:00
|
|
|
|
|
|
|
with self.run_kitten([src, dest], allow=False) as pty:
|
|
|
|
pty.wait_till_child_exits(require_exit_code=1)
|
|
|
|
self.assertFalse(os.path.exists(dest))
|
|
|
|
|
|
|
|
def single_file(*cmd):
|
|
|
|
with self.run_kitten(list(cmd) + [src, dest]) as pty:
|
|
|
|
pty.wait_till_child_exits(require_exit_code=0)
|
|
|
|
with open(dest, 'rb') as f:
|
|
|
|
self.assertEqual(self.src_data, f.read())
|
|
|
|
|
|
|
|
single_file()
|
|
|
|
single_file()
|
|
|
|
single_file('--transmit-deltas')
|
|
|
|
with open(dest, 'wb') as d:
|
|
|
|
d.write(os.urandom(1023))
|
|
|
|
single_file('--transmit-deltas')
|
|
|
|
os.remove(dest)
|
|
|
|
single_file('--transmit-deltas')
|
2023-07-20 19:24:39 +03:00
|
|
|
single_file('--compress=never')
|
|
|
|
single_file('--compress=always')
|
2023-07-21 08:59:00 +03:00
|
|
|
single_file('--transmit-deltas', '--compress=never')
|
|
|
|
|
|
|
|
# remote home
|
|
|
|
fname = 'tstest-file'
|
|
|
|
with set_paths(home=self.tdir), self.run_kitten([src, '~/'+fname]) as pty:
|
|
|
|
pty.wait_till_child_exits(require_exit_code=0)
|
|
|
|
os.remove(os.path.expanduser('~/'+fname))
|
|
|
|
|
|
|
|
def multiple_files(*cmd):
|
|
|
|
src = os.path.join(self.tdir, 'msrc')
|
|
|
|
dest = os.path.join(self.tdir, 'mdest')
|
|
|
|
if os.path.exists(src):
|
|
|
|
shutil.rmtree(src)
|
|
|
|
os.mkdir(src)
|
|
|
|
os.makedirs(dest, exist_ok=True)
|
|
|
|
|
|
|
|
expected = {}
|
|
|
|
Entry = namedtuple('Entry', 'relpath mtime mode nlink')
|
|
|
|
|
|
|
|
def entry(path, base=src):
|
|
|
|
st = os.stat(path, follow_symlinks=False)
|
2023-07-25 08:18:06 +03:00
|
|
|
mtime = st.st_mtime_ns
|
|
|
|
if stat.S_ISDIR(st.st_mode):
|
|
|
|
mtime = 0 # mtime is flaky for dirs on CI even empty ones
|
|
|
|
return Entry(os.path.relpath(path, base), mtime, st.st_mode, st.st_nlink)
|
2023-07-21 08:59:00 +03:00
|
|
|
|
|
|
|
def se(path):
|
|
|
|
e = entry(path)
|
|
|
|
expected[e.relpath] = e
|
|
|
|
|
|
|
|
b = Path(src)
|
|
|
|
with open(b / 'simple', 'wb') as f:
|
|
|
|
f.write(os.urandom(1317))
|
|
|
|
os.fchmod(f.fileno(), 0o766)
|
2023-07-21 10:00:06 +03:00
|
|
|
os.link(f.name, b / 'hardlink')
|
|
|
|
os.utime(f.name, (1.3, 1.3))
|
2023-07-21 08:59:00 +03:00
|
|
|
se(f.name)
|
|
|
|
se(str(b/'hardlink'))
|
|
|
|
os.mkdir(b / 'empty')
|
|
|
|
se(str(b/'empty'))
|
|
|
|
s = b / 'sub'
|
|
|
|
os.mkdir(s)
|
|
|
|
with open(s / 'reg', 'wb') as f:
|
|
|
|
f.write(os.urandom(113))
|
2023-07-21 10:00:06 +03:00
|
|
|
os.utime(f.name, (1171.3, 1171.3))
|
|
|
|
se(f.name)
|
|
|
|
se(str(s))
|
2023-07-21 08:59:00 +03:00
|
|
|
os.symlink('/', b/'abssym')
|
|
|
|
se(b/'abssym')
|
|
|
|
os.symlink('sub/reg', b/'sym')
|
|
|
|
se(b/'sym')
|
|
|
|
|
|
|
|
with self.run_kitten(list(cmd) + [src, dest]) as pty:
|
|
|
|
pty.wait_till_child_exits(require_exit_code=0)
|
|
|
|
|
|
|
|
actual = {}
|
|
|
|
def de(path):
|
|
|
|
e = entry(path, os.path.join(dest, os.path.basename(src)))
|
|
|
|
if e.relpath != '.':
|
|
|
|
actual[e.relpath] = e
|
|
|
|
|
|
|
|
for dirpath, dirnames, filenames in os.walk(dest):
|
|
|
|
for x in dirnames:
|
|
|
|
de(os.path.join(dirpath, x))
|
|
|
|
for x in filenames:
|
|
|
|
de(os.path.join(dirpath, x))
|
|
|
|
|
|
|
|
self.assertEqual(expected, actual)
|
|
|
|
|
|
|
|
for key, e in expected.items():
|
|
|
|
ex = os.path.join(src, key)
|
|
|
|
ax = os.path.join(dest, os.path.basename(src), key)
|
|
|
|
if os.path.islink(ex):
|
|
|
|
self.ae(os.readlink(ex), os.readlink(ax))
|
|
|
|
elif os.path.isfile(ex):
|
|
|
|
with open(ex, 'rb') as ef, open(ax, 'rb') as af:
|
|
|
|
self.assertEqual(ef.read(), af.read())
|
|
|
|
|
|
|
|
multiple_files()
|
|
|
|
multiple_files('--compress=always')
|
|
|
|
self.clean_tdir()
|
|
|
|
multiple_files('--transmit-deltas')
|
|
|
|
multiple_files('--transmit-deltas')
|
|
|
|
|
|
|
|
# mirror mode
|
|
|
|
src_home = os.path.join(self.tdir, 'misrc')
|
|
|
|
os.mkdir(src_home)
|
|
|
|
open(os.path.join(src_home, fname), 'w').close()
|
|
|
|
with self.run_kitten(['--mode=mirror', '~/'+fname], home_dir=src_home) as pty:
|
|
|
|
pty.wait_till_child_exits(require_exit_code=0)
|
|
|
|
with open(os.path.expanduser('~/'+fname)) as f:
|
|
|
|
self.assertEqual('', f.read())
|
|
|
|
os.remove(f.name)
|