| Author | SHA1 | Message | Date |
|---|---|---|---|
|
|
17c22a1b9c | this wasn't added when I added the test cases.. | 3 months ago |
|
|
e6b5299db6 |
support a graph of mappings instead of just one layer...
This isn't the most effecient for now as we generate all the possible mappings, but it's assumed that the number of mappings will be low for now... |
3 months ago |
|
|
ed59121c5d |
Bramify this... work around the stupidity that is BEP-52
in most cases, keys are us-ascii, but BEP-52 allows some keys to be binary data.. It does appear that it only applies to dictionaries that are under 'piece layer', so pass down the parent key, and don't do encoding for the dictionary that is under it. It also had a bug where if a dictionary key had code points >127, it'd encode the values incorrectly, because it'd take the length of the string and not the encoded string.. |
1 year ago |
|
|
f523886240 | handle empty keys which apparently happen in v2 torrents | 1 year ago |
| @@ -103,5 +103,25 @@ | |||
| "title": "just mapping lists them", | |||
| "cmd": [ "mapping" ], | |||
| "stdout_re": "^efdb5d9c-d123-4b30-aaa8-45a9ea8f6053:/.*subdir/mapa <-> ceaa4862-dd00-41ba-9787-7480ec1b2679:/.*subdir/mapb\n$" | |||
| }, | |||
| { | |||
| "special": "set hostid", | |||
| "comment": "that a third host", | |||
| "hostid": "809c2a62-0df9-42a2-adae-c0a532ad860b" | |||
| }, | |||
| { | |||
| "title": "that creating host c works", | |||
| "cmd": [ "hosts" ], | |||
| "format": [ "cmd" ] | |||
| }, | |||
| { | |||
| "title": "that host mapping c works", | |||
| "cmd": [ "mapping", "--create", "mapc", "ceaa4862-dd00-41ba-9787-7480ec1b2679:{mappathb}" ], | |||
| "format": [ "cmd" ] | |||
| }, | |||
| { | |||
| "title": "the search will pass through other mappings to get a local mapping", | |||
| "cmd": [ "search", "file", "+sometag=anothervalue" ], | |||
| "stdout_re": "mapc/text.txt\n$" | |||
| } | |||
| ] | |||
| @@ -68,16 +68,23 @@ def decode_list(x, f): | |||
| r.append(v) | |||
| return (r, f + 1) | |||
| def decode_dict(x, f): | |||
| def decode_dict(x, f, parent=None): | |||
| r, f = {}, f+1 | |||
| lastkey = '' | |||
| lastkey = None | |||
| while x[f] != b'e'[0]: | |||
| k, f = decode_string(x, f) | |||
| k = k.decode('us-ascii') | |||
| if lastkey >= k: | |||
| if not parent or parent not in { 'piece layers' }: | |||
| k = k.decode('us-ascii') | |||
| if lastkey is not None and lastkey >= k: | |||
| raise ValueError | |||
| lastkey = k | |||
| r[k], f = decode_func[x[f]](x, f) | |||
| #decode value | |||
| fun = decode_func[x[f]] | |||
| kwargs = {} | |||
| if fun is decode_dict: | |||
| kwargs['parent'] = k | |||
| r[k], f = fun(x, f, **kwargs) | |||
| return (r, f + 1) | |||
| decode_func = {} | |||
| @@ -285,11 +292,18 @@ def encode_list(x,r): | |||
| encode_func[type(e)](e, r) | |||
| r.append(b'e') | |||
| def encode_dict(x,r): | |||
| def encode_dict(x,r, parent=None): | |||
| r.append(b'd') | |||
| for k,v in sorted(x.items()): | |||
| r.extend((b'%d:' % len(k),k.encode('UTF-8'))) | |||
| encode_func[type(v)](v, r) | |||
| origk = k | |||
| if not parent or parent not in { 'piece layers' }: | |||
| k = k.encode('us-ascii') | |||
| r.extend((b'%d:' % len(k),k)) | |||
| efun = encode_func[type(v)] | |||
| kwargs = {} | |||
| if efun is encode_dict: | |||
| kwargs['parent'] = origk | |||
| efun(v, r, **kwargs) | |||
| r.append(b'e') | |||
| encode_func = {} | |||
| @@ -339,9 +353,24 @@ class _TestCases(unittest.TestCase): | |||
| def test_bencode(self): | |||
| test_bencode() | |||
| self.assertEqual(bencode({'': 5}), b'd0:i5ee') | |||
| def test_round_trip_files(self): | |||
| import importlib | |||
| fixtures = importlib.resources.files(__name__[:__name__.rindex('.')]) / 'fixtures' | |||
| for i in fixtures.iterdir(): | |||
| with self.subTest(file=str(i)): | |||
| data = i.read_bytes() | |||
| self.assertEqual(data, bencode(bdecode(data))) | |||
| def test_bdecode(self): | |||
| test_bdecode() | |||
| self.assertEqual(bdecode(b'd0:i5ee'), { '': 5 }) | |||
| try: #pragma: no cover | |||
| import psyco | |||
| psyco.bind(bdecode) | |||
| @@ -647,13 +647,16 @@ class ObjectStore(object): | |||
| '''Returns the tuple (lclpath, hostid, rempath) for all | |||
| the mappings for this hostid.''' | |||
| # this is a little trick to return all possible mappings | |||
| # as we will have to explore all hosts that might have a | |||
| # mapping through us | |||
| hostid = _makeuuid(hostuuid()) | |||
| sel = select(orm.MetaDataObject.data).where( | |||
| orm.HostMapping.hostid == hostid, | |||
| orm.HostMapping.objid == orm.MetaDataObject.uuid) | |||
| orm.MetaDataObject.type == 'mapping') | |||
| res = [] | |||
| allmappings = collections.defaultdict(lambda: []) | |||
| with self._ses() as session: | |||
| # XXX - view | |||
| for obj in session.scalars(sel): | |||
| @@ -661,11 +664,29 @@ class ObjectStore(object): | |||
| pathlib.Path(b).resolve()))(*x.split(':', | |||
| 1)) for x in obj.mapping ] | |||
| for idx, (id, path) in enumerate(maps): | |||
| if hostid == id: | |||
| # add other to mapping | |||
| other = tuple(maps[(idx + 1) % | |||
| 2]) | |||
| res.append((path, ) + other) | |||
| other = tuple(maps[(idx + 1) % | |||
| 2]) | |||
| allmappings[id].append((path, ) + other) | |||
| res = set(allmappings[hostid]) | |||
| tovisit = res.copy() | |||
| visited = set() | |||
| while tovisit: | |||
| lclpath, remhid, rempath = tovisit.pop() | |||
| if (remhid, rempath) in visited: | |||
| continue | |||
| visited.add((remhid, rempath)) | |||
| addl = { (lclpath, x[1], x[2]) for x in | |||
| allmappings[remhid] if x[1] != hostid } | |||
| res.update(addl) | |||
| tovisit.update(addl) | |||
| return res | |||
| def by_file(self, fname, types=('metadata', )): | |||
| @@ -2588,6 +2609,47 @@ class _TestCases(unittest.TestCase): | |||
| # and that it can be verified | |||
| persona.verify(mdobj) | |||
| def test_get_hostmapping(self): | |||
| # that an object store | |||
| persona = self.persona | |||
| objst = ObjectStore.load(self.tempdir / 'sample.data.sqlite3') | |||
| with mock.patch(__name__ + '.hostuuid') as hostidpatch: | |||
| # with a local host uuid | |||
| hid = uuid.uuid4() | |||
| hostidpatch.return_value = hid | |||
| host = persona.Host(name='lclhost', hostuuid=hid) | |||
| objst.loadobj(host) | |||
| # and two other hosts: | |||
| hida = uuid.uuid4() | |||
| hidb = uuid.uuid4() | |||
| objst.loadobj(persona.Host(name='hosta', | |||
| hostuuid=hida)) | |||
| objst.loadobj(persona.Host(name='hostb', | |||
| hostuuid=hidb)) | |||
| # that when a mapping from lcl to a to b exists | |||
| objst.loadobj(persona.Mapping(mapping=[ | |||
| ':'.join((str(hid), '/lclpath')) , | |||
| ':'.join((str(hida), '/patha')) ])) | |||
| objst.loadobj(persona.Mapping(mapping=[ | |||
| ':'.join((str(hida), '/patha')) , | |||
| ':'.join((str(hidb), '/pathb')) ])) | |||
| # that both a and b are included in the mappings | |||
| mappings = sorted(objst.get_hostmappings()) | |||
| from pathlib import PosixPath | |||
| lclpath = PosixPath('/lclpath') | |||
| self.assertEqual(mappings, sorted([ | |||
| (lclpath, hida, PosixPath('/patha')), | |||
| (lclpath, hidb, PosixPath('/pathb')) | |||
| ])) | |||
| def test_objectstore(self): | |||
| persona = self.persona | |||
| objst = ObjectStore.load(self.tempdir / 'sample.data.sqlite3') | |||
| @@ -2797,11 +2859,17 @@ class _TestCases(unittest.TestCase): | |||
| mappathb = self.tempdir / 'mapb' | |||
| mappathb.mkdir() | |||
| mappathc = self.tempdir / 'mapc' | |||
| mappathc.mkdir() | |||
| filea = mappatha / 'text.txt' | |||
| filea.write_text('abc123\n') | |||
| fileb = mappathb / 'text.txt' | |||
| filec = mappathc / 'text.txt' | |||
| shutil.copyfile(filea, fileb) | |||
| shutil.copystat(filea, fileb) | |||
| shutil.copyfile(filea, filec) | |||
| shutil.copystat(filea, filec) | |||
| elif special == 'delete files': | |||
| for i in cmd['files']: | |||
| os.unlink(i) | |||
| @@ -1,7 +1,7 @@ | |||
| from .btv import _TestCases as btv_test_cases | |||
| from .btv.bencode import _TestCases as bencode_test_cases | |||
| from .mdb import _TestJSONEncoder | |||
| from .cli import _TestCononicalCoder, _TestCases as cli_test_cases | |||
| from .cli import _TestCononicalCoder, _TestCases as cli_test_cases, _TestPureFunctions | |||
| from .cli import _TestMigrations | |||
| from .tags import _TestTagCache | |||
| from .mtree import Test | |||