Browse Source

add container support, handle when a fileobject exists, but no metadata

The later happens after importing a container.
main
John-Mark Gurney 2 years ago
parent
commit
549de4082c
2 changed files with 155 additions and 17 deletions
  1. +38
    -0
      ui/fixtures/cmd.container.json
  2. +117
    -17
      ui/medashare/cli.py

+ 38
- 0
ui/fixtures/cmd.container.json View File

@@ -0,0 +1,38 @@
[
{
"title": "gen ident",
"cmd": [ "genident", "name=A Test User" ],
"exit": 0
},
{
"special": "setup bittorrent files"
},
{
"title": "add metadata before import",
"cmd": [ "modify", "+foo=bar", "somedir/fileb.txt" ]
},
{
"title": "import partial container",
"cmd": [ "container", "somedir.torrent" ],
"stderr": "Warning, incomple/invalid files, not added:\n\t'filea.txt'\n\t'filec.txt'\n\t'filee.txt'\n"
},
{
"special": "verify store object cnt",
"comment": "should only have one container and three files, and a metadata",
"count": 5
},
{
"title": "verify correct files imported",
"cmd": [ "dump" ],
"stdout_re": "fileb.txt.*file.*\n.*foo.*bar.*cc06808cbbee0510331aa97974132e8dc296aeb795be229d064bae784b0a87a5cf4281d82e8c99271b75db2148f08a026c1a60ed9cabdb8cac6d24242dac4063.*\n.*filed.txt.*file.*\n.*filef.txt.*file.*\n.*fileb.txt.*filed.txt.*filef.txt.*cc06808cbbee0510331aa97974132e8dc296aeb795be229d064bae784b0a87a5cf4281d82e8c99271b75db2148f08a026c1.*7831bd05e23877e08a97362bab2ad7bcc7d08d8f841f42e8dee545781792b987aa7637f12cec399e261f798c10d3475add0db7de2643af86a346b6b451a69ec4.*be688838ca8686e5c90689bf2ab585cef1137c.*container.*magnet:\\?xt=urn:btih:501cf3bd4797f49fd7a624e8a9a8ce5cccceb602&dn=somedir"
},
{
"title": "add metadata after import",
"cmd": [ "modify", "+foo=bar", "somedir/filed.txt" ]
},
{
"special": "verify store object cnt",
"comment": "should only have one container and three files, and a metadata",
"count": 6
}
]

+ 117
- 17
ui/medashare/cli.py View File

@@ -11,12 +11,16 @@ from unittest import mock


from .hostid import hostuuid from .hostid import hostuuid


from .btv import _TestCases as bttestcase, validate_file

import base64 import base64
import base58 import base58
from .btv import bencode
import copy import copy
import datetime import datetime
import functools import functools
import hashlib import hashlib
import importlib
import io import io
import itertools import itertools
import json import json
@@ -310,6 +314,11 @@ class Persona(object):


self._created_by_ref = self._identity.uuid self._created_by_ref = self._identity.uuid


def Container(self, *args, **kwargs):
kwargs['created_by_ref'] = self.uuid

return self.sign(Container(*args, **kwargs))

def MetaData(self, *args, **kwargs): def MetaData(self, *args, **kwargs):
kwargs['created_by_ref'] = self.uuid kwargs['created_by_ref'] = self.uuid


@@ -557,10 +566,7 @@ class ObjectStore(object):
def by_id(self, id): def by_id(self, id):
'''Look up an object by it's UUID.''' '''Look up an object by it's UUID.'''


if not isinstance(id, uuid.UUID):
uid = uuid.UUID(id)
else:
uid = id
uid = _makeuuid(id)


return self._uuids[uid] return self._uuids[uid]


@@ -570,6 +576,40 @@ class ObjectStore(object):
h = self.makehash(hash, strict=False) h = self.makehash(hash, strict=False)
return self._hashes[h] return self._hashes[h]


def get_metadata(self, fname, persona):
'''Get all MetaData objects for fname, or create one if
not found.

If a FileObject is not present, one will be created.

A Persona must be passed in to create the FileObject and
MetaData objects as needed.

Note: if a new MetaData object is created, it is not
stored in the database automatically. It is expected that
it will be modified and then saved, so call ObjectStore.loadobj
with it to save it.
'''

try:
fobj = self.by_file(fname, ('file',))[0]
#print('x:', repr(objs), file=_real_stderr)
except KeyError:
#print('b:', repr(fname), file=_real_stderr)

fobj = persona.by_file(fname)
#print('c:', repr(fobj), file=_real_stderr)

self.loadobj(fobj)

# we now have the fobj, get the metadata for it.
try:
objs = self.by_file(fname)
except KeyError:
objs = [ persona.MetaData(hashes=fobj.hashes) ]

return objs

def by_file(self, fname, types=('metadata', )): def by_file(self, fname, types=('metadata', )):
'''Return a metadata object for the file named fname. '''Return a metadata object for the file named fname.


@@ -582,6 +622,7 @@ class ObjectStore(object):


fid = FileObject.make_id(fname) fid = FileObject.make_id(fname)


#print('bf:', repr(fid), file=_real_stderr)
fobj = self.by_id(fid) fobj = self.by_id(fid)
fobj.verify() fobj.verify()


@@ -660,11 +701,15 @@ class FileObject(MDBase):
mtimets = datetime.datetime.fromtimestamp(s.st_mtime, mtimets = datetime.datetime.fromtimestamp(s.st_mtime,
tz=datetime.timezone.utc).timestamp() tz=datetime.timezone.utc).timestamp()


#print(repr(self), repr(s), s.st_mtime, file=_real_stderr)
if self.mtime.timestamp() != mtimets or \ if self.mtime.timestamp() != mtimets or \
self.size != s.st_size: self.size != s.st_size:
raise ValueError('file %s has changed' % raise ValueError('file %s has changed' %
repr(self.filename)) repr(self.filename))


class Container(MDBase):
_type = 'container'

def enumeratedir(_dir, created_by_ref): def enumeratedir(_dir, created_by_ref):
'''Enumerate all the files and directories (not recursive) in _dir. '''Enumerate all the files and directories (not recursive) in _dir.


@@ -772,22 +817,16 @@ def cmd_modify(options):
dels = [ x[1:] for x in props if x[0] == '-' ] dels = [ x[1:] for x in props if x[0] == '-' ]


for i in options.files: for i in options.files:
# Get MetaData
#print('a:', repr(i), file=_real_stderr)

try: try:
objs = objstr.by_file(i)
#print('x:', repr(objs), file=_real_stderr)
except KeyError:
try:
fobj = persona.by_file(i)
except FileNotFoundError:
print('ERROR: file not found: %s, or invalid tag specification.' % repr(i), file=sys.stderr)
sys.exit(1)
objs = objstr.get_metadata(i, persona)
#print('d:', repr(i), repr(objs), file=_real_stderr)
except FileNotFoundError:
print('ERROR: file not found: %s, or invalid tag specification.' % repr(i), file=sys.stderr)
sys.exit(1)


objstr.loadobj(fobj)
objs = [ persona.MetaData(hashes=fobj.hashes) ]
#print('y:', repr(objs), file=_real_stderr)


#print('b:', repr(i), repr(objs), file=_real_stderr)
for j in objs: for j in objs:
#print('c:', repr(j), file=_real_stderr) #print('c:', repr(j), file=_real_stderr)
# make into key/values # make into key/values
@@ -873,6 +912,47 @@ def cmd_list(options):


write_objstore(options, objstr) write_objstore(options, objstr)


def cmd_container(options):
persona, objstr = get_objstore(options)

for i in options.files:
good, bad = validate_file(i)

if bad:
print('Warning, incomple/invalid files, not added:',
file=sys.stderr)
print('\n'.join('\t%s' %
repr(str(pathlib.Path(*x.parts[1:]))) for x in
sorted(bad)), file=sys.stderr)

files = []
hashes = []
for j in sorted(good):
files.append(str(pathlib.PosixPath(*j.parts[1:])))
try:
fobj = objstr.by_file(j, ('file',))[0]
except:
# XXX - old file w/ incorrect hash may be
# left behind.
fobj = persona.by_file(j)
objstr.loadobj(fobj)

# XXX - ensure only one is added?
hashes.extend(fobj.hashes)

with open(i, 'rb') as fp:
torrent = bencode.bdecode(fp.read())
bencodedinfo = bencode.bencode(torrent['info'])
infohash = hashlib.sha1(bencodedinfo).hexdigest()
# XXX - not entirely happy w/ URI
cont = persona.Container(files=files, hashes=hashes,
uri='magnet:?xt=urn:btih:%s&dn=%s' % (infohash,
torrent['info']['name'].decode('utf-8')))

objstr.loadobj(cont)

write_objstore(options, objstr)

def cmd_import(options): def cmd_import(options):
persona, objstr = get_objstore(options) persona, objstr = get_objstore(options)


@@ -942,6 +1022,11 @@ def main():
help='files to modify') help='files to modify')
parser_list.set_defaults(func=cmd_list) parser_list.set_defaults(func=cmd_list)


parser_container = subparsers.add_parser('container', help='file is examined as a container and the internal files imported as entries')
parser_container.add_argument('files', nargs='+',
help='files to modify')
parser_container.set_defaults(func=cmd_container)

parser_dump = subparsers.add_parser('dump', help='dump all the objects') parser_dump = subparsers.add_parser('dump', help='dump all the objects')
parser_dump.set_defaults(func=cmd_dump) parser_dump.set_defaults(func=cmd_dump)


@@ -1447,6 +1532,21 @@ class _TestCases(unittest.TestCase):
objst = ObjectStore.load(storefname) objst = ObjectStore.load(storefname)
uniqobjs = len(set((x['uuid'] for x in objst))) uniqobjs = len(set((x['uuid'] for x in objst)))
self.assertEqual(len(list(objst)), uniqobjs) self.assertEqual(len(list(objst)), uniqobjs)
elif special == 'setup bittorrent files':
# copy in the torrent file
tor = importlib.resources.files('medashare.btv')
tor = tor / 'fixtures' / 'somedir.torrent'
shutil.copy(tor, self.tempdir)

# partly recreate files
missingfiles = bttestcase.origfiledata.copy()

missingfiles.update(bttestcase.badfiles)

sd = self.tempdir / bttestcase.dirname
sd.mkdir()

bttestcase.make_files(sd, missingfiles)
else: # pragma: no cover else: # pragma: no cover
raise ValueError('unhandled special: %s' % repr(special)) raise ValueError('unhandled special: %s' % repr(special))




Loading…
Cancel
Save