MetaData Sharing
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 

1505 lines
40 KiB

  1. #!/usr/bin/env python
  2. #import pdb, sys; mypdb = pdb.Pdb(stdout=sys.stderr); mypdb.set_trace()
  3. from cryptography.hazmat.primitives.asymmetric.ed448 import Ed448PrivateKey, \
  4. Ed448PublicKey
  5. from cryptography.hazmat.primitives.serialization import Encoding, \
  6. PrivateFormat, PublicFormat, NoEncryption
  7. from unittest import mock
  8. from .hostid import hostuuid
  9. import base64
  10. import base58
  11. import copy
  12. import datetime
  13. import functools
  14. import hashlib
  15. import io
  16. import itertools
  17. import json
  18. import os.path
  19. import pathlib
  20. import pasn1
  21. import re
  22. import shutil
  23. import string
  24. import sys
  25. import tempfile
  26. import unittest
  27. import uuid
  28. # The UUID for the namespace representing the path to a file
  29. _NAMESPACE_MEDASHARE_PATH = uuid.UUID('f6f36b62-3770-4a68-bc3d-dc3e31e429e6')
  30. # useful for debugging when stderr is redirected/captured
  31. _real_stderr = sys.stderr
  32. _defaulthash = 'sha512'
  33. _validhashes = set([ 'sha256', 'sha512' ])
  34. _hashlengths = { len(getattr(hashlib, x)().hexdigest()): x for x in
  35. _validhashes }
  36. def _keyordering(x):
  37. k, v = x
  38. try:
  39. return (MDBase._common_names_list.index(k), k, v)
  40. except ValueError:
  41. return (2**32, k, v)
  42. def _iterdictlist(obj, **kwargs):
  43. l = list(sorted(obj.items(**kwargs), key=_keyordering))
  44. for k, v in l:
  45. if isinstance(v, list):
  46. for i in sorted(v):
  47. yield k, i
  48. else:
  49. yield k, v
  50. def _makeuuid(s):
  51. if isinstance(s, uuid.UUID):
  52. return s
  53. if isinstance(s, bytes):
  54. return uuid.UUID(bytes=s)
  55. else:
  56. return uuid.UUID(s)
  57. def _makedatetime(s):
  58. if isinstance(s, datetime.datetime):
  59. return s
  60. return datetime.datetime.strptime(s, '%Y-%m-%dT%H:%M:%S.%fZ').replace(
  61. tzinfo=datetime.timezone.utc)
  62. def _makebytes(s):
  63. if isinstance(s, bytes):
  64. return s
  65. return base64.urlsafe_b64decode(s)
  66. # XXX - known issue, store is not atomic/safe, overwrites in place instead of
  67. # renames
  68. # XXX - add validation
  69. # XXX - how to add singletons
  70. class MDBase(object):
  71. '''This is a simple wrapper that turns a JSON object into a pythonesc
  72. object where attribute accesses work.'''
  73. _type = 'invalid'
  74. _generated_properties = {
  75. 'uuid': uuid.uuid4,
  76. 'modified': lambda: datetime.datetime.now(
  77. tz=datetime.timezone.utc),
  78. }
  79. # When decoding, the decoded value should be passed to this function
  80. # to get the correct type
  81. _instance_properties = {
  82. 'uuid': _makeuuid,
  83. 'modified': _makedatetime,
  84. 'created_by_ref': _makeuuid,
  85. #'parent_refs': lambda x: [ _makeuuid(y) for y in x ],
  86. 'sig': _makebytes,
  87. }
  88. # Override on a per subclass basis
  89. _class_instance_properties = {
  90. }
  91. _common_properties = [ 'type', 'created_by_ref' ] # XXX - add lang?
  92. _common_optional = set(('parent_refs', 'sig'))
  93. _common_names = set(_common_properties + list(
  94. _generated_properties.keys()))
  95. _common_names_list = _common_properties + list(
  96. _generated_properties.keys())
  97. def __init__(self, obj={}, **kwargs):
  98. obj = copy.deepcopy(obj)
  99. obj.update(kwargs)
  100. if self._type == MDBase._type:
  101. raise ValueError('call MDBase.create_obj instead so correct class is used.')
  102. if 'type' in obj and obj['type'] != self._type:
  103. raise ValueError(
  104. 'trying to create the wrong type of object, got: %s, expected: %s' %
  105. (repr(obj['type']), repr(self._type)))
  106. if 'type' not in obj:
  107. obj['type'] = self._type
  108. for x in self._common_properties:
  109. if x not in obj:
  110. raise ValueError('common property %s not present' % repr(x))
  111. for x, fun in itertools.chain(
  112. self._instance_properties.items(),
  113. self._class_instance_properties.items()):
  114. if x in obj:
  115. obj[x] = fun(obj[x])
  116. for x, fun in self._generated_properties.items():
  117. if x not in obj:
  118. obj[x] = fun()
  119. self._obj = obj
  120. @classmethod
  121. def create_obj(cls, obj):
  122. '''Using obj as a base, create an instance of MDBase of the
  123. correct type.
  124. If the correct type is not found, a ValueError is raised.'''
  125. if isinstance(obj, cls):
  126. obj = obj._obj
  127. ty = obj['type']
  128. for i in MDBase.__subclasses__():
  129. if i._type == ty:
  130. return i(obj)
  131. else:
  132. raise ValueError('Unable to find class for type %s' %
  133. repr(ty))
  134. def new_version(self, *args):
  135. '''For each k, v pair, add the property k as an additional one
  136. (or new one if first), with the value v.'''
  137. obj = copy.deepcopy(self._obj)
  138. common = self._common_names | self._common_optional
  139. for k, v in args:
  140. if k in common:
  141. obj[k] = v
  142. else:
  143. obj.setdefault(k, []).append(v)
  144. del obj['modified']
  145. return self.create_obj(obj)
  146. def __repr__(self): # pragma: no cover
  147. return '%s(%s)' % (self.__class__.__name__, repr(self._obj))
  148. def __getattr__(self, k):
  149. try:
  150. return self._obj[k]
  151. except KeyError:
  152. raise AttributeError(k)
  153. def __setattr__(self, k, v):
  154. if k[0] == '_': # direct attribute
  155. self.__dict__[k] = v
  156. else:
  157. self._obj[k] = v
  158. def __getitem__(self, k):
  159. return self._obj[k]
  160. def __to_dict__(self):
  161. return self._obj
  162. def __eq__(self, o):
  163. return self._obj == o
  164. def __contains__(self, k):
  165. return k in self._obj
  166. def items(self, skipcommon=True):
  167. return [ (k, v) for k, v in self._obj.items() if
  168. not skipcommon or k not in self._common_names ]
  169. def encode(self, meth='asn1'):
  170. if meth == 'asn1':
  171. return _asn1coder.dumps(self)
  172. return _jsonencoder.encode(self._obj)
  173. @classmethod
  174. def decode(cls, s, meth='asn1'):
  175. if meth == 'asn1':
  176. obj = _asn1coder.loads(s)
  177. else:
  178. obj = json.loads(s)
  179. return cls.create_obj(obj)
  180. class MetaData(MDBase):
  181. _type = 'metadata'
  182. class Identity(MDBase):
  183. _type = 'identity'
  184. # Identites don't need a created by
  185. _common_properties = [ x for x in MDBase._common_properties if x !=
  186. 'created_by_ref' ]
  187. _common_optional = set([ x for x in MDBase._common_optional if x !=
  188. 'parent_refs' ] + [ 'name', 'pubkey' ])
  189. _common_names = set(_common_properties + list(
  190. MDBase._generated_properties.keys()))
  191. def _trytodict(o):
  192. if isinstance(o, uuid.UUID):
  193. return 'bytes', o.bytes
  194. try:
  195. return 'dict', o.__to_dict__()
  196. except Exception: # pragma: no cover
  197. raise TypeError('unable to find __to_dict__ on %s: %s' %
  198. (type(o), repr(o)))
  199. class CanonicalCoder(pasn1.ASN1DictCoder):
  200. def enc_dict(self, obj, **kwargs):
  201. class FakeIter:
  202. def items(self):
  203. return iter(sorted(obj.items()))
  204. return pasn1.ASN1DictCoder.enc_dict(self, FakeIter(), **kwargs)
  205. _asn1coder = CanonicalCoder(coerce=_trytodict)
  206. class _JSONEncoder(json.JSONEncoder):
  207. def default(self, o):
  208. if isinstance(o, uuid.UUID):
  209. return str(o)
  210. elif isinstance(o, datetime.datetime):
  211. o = o.astimezone(datetime.timezone.utc)
  212. return o.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
  213. elif isinstance(o, bytes):
  214. return base64.urlsafe_b64encode(o).decode('US-ASCII')
  215. return json.JSONEncoder.default(self, o)
  216. _jsonencoder = _JSONEncoder()
  217. class Persona(object):
  218. '''The object that represents a persona, or identity. It will
  219. create the proper identity object, serialize for saving keys,
  220. create objects for that persona and other management.'''
  221. def __init__(self, identity=None, key=None):
  222. if identity is None:
  223. self._identity = Identity()
  224. else:
  225. self._identity = identity
  226. self._key = key
  227. self._pubkey = None
  228. if 'pubkey' in self._identity:
  229. pubkeybytes = self._identity.pubkey
  230. self._pubkey = Ed448PublicKey.from_public_bytes(
  231. pubkeybytes)
  232. self._created_by_ref = self._identity.uuid
  233. def MetaData(self, *args, **kwargs):
  234. kwargs['created_by_ref'] = self.uuid
  235. return self.sign(MetaData(*args, **kwargs))
  236. @property
  237. def uuid(self):
  238. '''Return the UUID of the identity represented.'''
  239. return self._identity.uuid
  240. def __repr__(self): # pragma: no cover
  241. r = '<Persona: has key: %s, has pubkey: %s, identity: %s>' % \
  242. (self._key is not None, self._pubkey is not None,
  243. repr(self._identity))
  244. return r
  245. @classmethod
  246. def from_pubkey(cls, pubkeystr):
  247. pubstr = base58.b58decode_check(pubkeystr)
  248. uuid, pubkey = _asn1coder.loads(pubstr)
  249. ident = Identity(uuid=uuid, pubkey=pubkey)
  250. return cls(ident)
  251. def get_identity(self):
  252. '''Return the Identity object for this Persona.'''
  253. return self._identity
  254. def get_pubkey(self):
  255. '''Get a printable version of the public key. This is used
  256. for importing into different programs, or for shared.'''
  257. idobj = self._identity
  258. pubstr = _asn1coder.dumps([ idobj.uuid, idobj.pubkey ])
  259. return base58.b58encode_check(pubstr)
  260. def new_version(self, *args):
  261. '''Update the Persona's Identity object.'''
  262. self._identity = self.sign(self._identity.new_version(*args))
  263. return self._identity
  264. def store(self, fname):
  265. '''Store the Persona to a file. If there is a private
  266. key associated w/ the Persona, it will be saved as well.'''
  267. with open(fname, 'wb') as fp:
  268. obj = {
  269. 'identity': self._identity,
  270. }
  271. if self._key is not None:
  272. obj['key'] = \
  273. self._key.private_bytes(Encoding.Raw,
  274. PrivateFormat.Raw, NoEncryption())
  275. fp.write(_asn1coder.dumps(obj))
  276. @classmethod
  277. def load(cls, fname):
  278. '''Load the Persona from the provided file.'''
  279. with open(fname, 'rb') as fp:
  280. objs = _asn1coder.loads(fp.read())
  281. kwargs = {}
  282. if 'key' in objs:
  283. kwargs['key'] = Ed448PrivateKey.from_private_bytes(
  284. objs['key'])
  285. return cls(Identity(objs['identity']), **kwargs)
  286. def generate_key(self):
  287. '''Generate a key for this Identity.
  288. Raises a RuntimeError if a key is already present.'''
  289. if self._key:
  290. raise RuntimeError('a key already exists')
  291. self._key = Ed448PrivateKey.generate()
  292. self._pubkey = self._key.public_key()
  293. pubkey = self._pubkey.public_bytes(Encoding.Raw,
  294. PublicFormat.Raw)
  295. self._identity = self.sign(self._identity.new_version(('pubkey',
  296. pubkey)))
  297. def _makesigbytes(self, obj):
  298. obj = dict(obj.items(False))
  299. try:
  300. del obj['sig']
  301. except KeyError:
  302. pass
  303. return _asn1coder.dumps(obj)
  304. def sign(self, obj):
  305. '''Takes the object, adds a signature, and returns the new
  306. object.'''
  307. sigbytes = self._makesigbytes(obj)
  308. sig = self._key.sign(sigbytes)
  309. newobj = MDBase.create_obj(obj)
  310. newobj.sig = sig
  311. return newobj
  312. def verify(self, obj):
  313. sigbytes = self._makesigbytes(obj)
  314. pubkey = self._pubkey.public_bytes(Encoding.Raw,
  315. PublicFormat.Raw)
  316. self._pubkey.verify(obj['sig'], sigbytes)
  317. return True
  318. def by_file(self, fname):
  319. '''Return a file object for the file named fname.'''
  320. fobj = FileObject.from_file(fname, self._created_by_ref)
  321. return self.sign(fobj)
  322. class ObjectStore(object):
  323. '''A container to store for the various Metadata objects.'''
  324. # The _uuids property contains both the UUIDv4 for objects, and
  325. # looking up the UUIDv5 for FileObjects.
  326. def __init__(self, created_by_ref):
  327. self._created_by_ref = created_by_ref
  328. self._uuids = {}
  329. self._hashes = {}
  330. @staticmethod
  331. def makehash(hashstr, strict=True):
  332. '''Take a hash or hash string, and return a valid hash
  333. string from it.
  334. This makes sure that it is of the correct type and length.
  335. If strict is False, the function will detect the length and
  336. return a valid hash string if one can be found.
  337. By default, the string must be prepended by the type,
  338. followed by a colon, followed by the value in hex in all
  339. lower case characters.'''
  340. try:
  341. hash, value = hashstr.split(':')
  342. except ValueError:
  343. if strict:
  344. raise
  345. hash = _hashlengths[len(hashstr)]
  346. value = hashstr
  347. bvalue = value.encode('ascii')
  348. if strict and len(bvalue.translate(None,
  349. string.hexdigits.lower().encode('ascii'))) != 0:
  350. raise ValueError('value has invalid hex digits (must be lower case)', value)
  351. if hash in _validhashes:
  352. return ':'.join((hash, value))
  353. raise ValueError
  354. def __len__(self):
  355. return len(self._uuids)
  356. def __iter__(self):
  357. return iter(self._uuids.values())
  358. def store(self, fname):
  359. '''Write out the objects in the store to the file named
  360. fname.'''
  361. # eliminate objs stored by multiple uuids (FileObjects)
  362. objs = { id(x): x for x in self._uuids.values() }
  363. with open(fname, 'wb') as fp:
  364. obj = {
  365. 'created_by_ref': self._created_by_ref,
  366. 'objects': list(objs.values()),
  367. }
  368. fp.write(_asn1coder.dumps(obj))
  369. def loadobj(self, obj):
  370. '''Load obj into the data store.'''
  371. obj = MDBase.create_obj(obj)
  372. self._uuids[obj.uuid] = obj
  373. if obj.type == 'file':
  374. self._uuids[_makeuuid(obj.id)] = obj
  375. for j in obj.hashes:
  376. h = self.makehash(j)
  377. self._hashes.setdefault(h, []).append(obj)
  378. @classmethod
  379. def load(cls, fname):
  380. '''Load objects from the provided file name.
  381. Basic validation will be done on the objects in the file.
  382. The objects will be accessible via other methods.'''
  383. with open(fname, 'rb') as fp:
  384. objs = _asn1coder.loads(fp.read())
  385. obj = cls(objs['created_by_ref'])
  386. for i in objs['objects']:
  387. obj.loadobj(i)
  388. return obj
  389. def by_id(self, id):
  390. '''Look up an object by it's UUID.'''
  391. if not isinstance(id, uuid.UUID):
  392. uid = uuid.UUID(id)
  393. else:
  394. uid = id
  395. return self._uuids[uid]
  396. def by_hash(self, hash):
  397. '''Look up an object by it's hash value.'''
  398. h = self.makehash(hash, strict=False)
  399. return self._hashes[h]
  400. def by_file(self, fname, types=('metadata', )):
  401. '''Return a metadata object for the file named fname.
  402. Will raise a KeyError if this file does not exist in
  403. the database.
  404. Will raise a ValueError if fname currently does not
  405. match what is in the database.
  406. '''
  407. fid = FileObject.make_id(fname)
  408. fobj = self.by_id(fid)
  409. fobj.verify()
  410. for i in fobj.hashes:
  411. j = self.by_hash(i)
  412. # Filter out non-metadata objects
  413. j = [ x for x in j if x.type in types ]
  414. if j:
  415. return j
  416. else:
  417. raise KeyError('unable to find metadata for file: %s' %
  418. repr(fname))
  419. def _readfp(fp):
  420. while True:
  421. r = fp.read(64*1024)
  422. if r == b'':
  423. return
  424. yield r
  425. def _hashfile(fname):
  426. hash = getattr(hashlib, _defaulthash)()
  427. with open(fname, 'rb') as fp:
  428. for r in _readfp(fp):
  429. hash.update(r)
  430. return '%s:%s' % (_defaulthash, hash.hexdigest())
  431. class FileObject(MDBase):
  432. _type = 'file'
  433. _class_instance_properties = {
  434. 'hostid': _makeuuid,
  435. 'id': _makeuuid,
  436. 'mtime': _makedatetime,
  437. }
  438. @staticmethod
  439. def make_id(fname):
  440. '''Take a local file name, and make the id for it. Note that
  441. converts from the local path separator to a forward slash so
  442. that it will be the same between Windows and Unix systems.'''
  443. fname = os.path.realpath(fname)
  444. return uuid.uuid5(_NAMESPACE_MEDASHARE_PATH,
  445. str(hostuuid()) + '/'.join(os.path.split(fname)))
  446. @classmethod
  447. def from_file(cls, filename, created_by_ref):
  448. filename = os.path.abspath(filename)
  449. s = os.stat(filename)
  450. # XXX - race here, fix w/ checking mtime before/after?
  451. obj = {
  452. 'created_by_ref': created_by_ref,
  453. 'hostid': hostuuid(),
  454. 'dir': os.path.dirname(filename),
  455. 'filename': os.path.basename(filename),
  456. 'id': cls.make_id(filename),
  457. 'mtime': datetime.datetime.fromtimestamp(s.st_mtime,
  458. tz=datetime.timezone.utc),
  459. 'size': s.st_size,
  460. 'hashes': [ _hashfile(filename), ],
  461. }
  462. return cls(obj)
  463. def verify(self, complete=False):
  464. '''Verify that this FileObject is still valid. It will
  465. by default, only do a mtime verification.
  466. It will raise a ValueError if the file does not match.'''
  467. s = os.stat(os.path.join(self.dir, self.filename))
  468. mtimets = datetime.datetime.fromtimestamp(s.st_mtime,
  469. tz=datetime.timezone.utc).timestamp()
  470. if self.mtime.timestamp() != mtimets or \
  471. self.size != s.st_size:
  472. raise ValueError('file %s has changed' %
  473. repr(self.filename))
  474. def enumeratedir(_dir, created_by_ref):
  475. '''Enumerate all the files and directories (not recursive) in _dir.
  476. Returned is a list of FileObjects.'''
  477. return [FileObject.from_file(os.path.join(_dir, x),
  478. created_by_ref) for x in os.listdir(_dir) if not
  479. os.path.isdir(os.path.join(_dir, x)) ]
  480. def get_objstore(options):
  481. persona = get_persona(options)
  482. storefname = os.path.expanduser('~/.medashare_store.pasn1')
  483. try:
  484. objstr = ObjectStore.load(storefname)
  485. except FileNotFoundError:
  486. objstr = ObjectStore(persona.get_identity().uuid)
  487. return persona, objstr
  488. def write_objstore(options, objstr):
  489. storefname = os.path.expanduser('~/.medashare_store.pasn1')
  490. objstr.store(storefname)
  491. def get_persona(options):
  492. identfname = os.path.expanduser('~/.medashare_identity.pasn1')
  493. try:
  494. persona = Persona.load(identfname)
  495. except FileNotFoundError:
  496. print('ERROR: Identity not created, create w/ genident.',
  497. file=sys.stderr)
  498. sys.exit(1)
  499. return persona
  500. def cmd_genident(options):
  501. identfname = os.path.expanduser('~/.medashare_identity.pasn1')
  502. if os.path.exists(identfname):
  503. print('Error: Identity already created.', file=sys.stderr)
  504. sys.exit(1)
  505. persona = Persona()
  506. persona.generate_key()
  507. persona.new_version(*(x.split('=', 1) for x in options.tagvalue))
  508. persona.store(identfname)
  509. def cmd_ident(options):
  510. identfname = os.path.expanduser('~/.medashare_identity.pasn1')
  511. persona = Persona.load(identfname)
  512. if options.tagvalue:
  513. persona.new_version(*(x.split('=', 1) for x in
  514. options.tagvalue))
  515. persona.store(identfname)
  516. else:
  517. ident = persona.get_identity()
  518. for k, v in _iterdictlist(ident, skipcommon=False):
  519. print('%s:\t%s' % (k, v))
  520. def cmd_pubkey(options):
  521. identfname = os.path.expanduser('~/.medashare_identity.pasn1')
  522. persona = Persona.load(identfname)
  523. print(persona.get_pubkey().decode('ascii'))
  524. def cmd_modify(options):
  525. persona, objstr = get_objstore(options)
  526. props = [[ x[0] ] + x[1:].split('=', 1) for x in options.modtagvalues]
  527. if any(x[0] not in ('+', '-') for x in props):
  528. print('ERROR: tag needs to start with a "+" (add) or a "-" (remove).', file=sys.stderr)
  529. sys.exit(1)
  530. badtags = list(x[1] for x in props if x[1] in (MDBase._common_names |
  531. MDBase._common_optional))
  532. if any(badtags):
  533. print('ERROR: invalid tag%s: %s.' % ( 's' if
  534. len(badtags) > 1 else '', repr(badtags)), file=sys.stderr)
  535. sys.exit(1)
  536. adds = [ x[1:] for x in props if x[0] == '+' ]
  537. if any((len(x) != 2 for x in adds)):
  538. print('ERROR: invalid tag, needs an "=".', file=sys.stderr)
  539. sys.exit(1)
  540. dels = [ x[1:] for x in props if x[0] == '-' ]
  541. for i in options.files:
  542. # Get MetaData
  543. try:
  544. objs = objstr.by_file(i)
  545. except KeyError:
  546. fobj = persona.by_file(i)
  547. objstr.loadobj(fobj)
  548. objs = [ persona.MetaData(hashes=fobj.hashes) ]
  549. for j in objs:
  550. # make into key/values
  551. obj = j.__to_dict__()
  552. # delete tags
  553. for k in dels:
  554. try:
  555. key, v = k
  556. except ValueError:
  557. del obj[k[0]]
  558. else:
  559. obj[key].remove(v)
  560. # add tags
  561. for k, v in adds:
  562. obj.setdefault(k, []).append(v)
  563. del obj['modified']
  564. nobj = MDBase.create_obj(obj)
  565. objstr.loadobj(nobj)
  566. write_objstore(options, objstr)
  567. def cmd_dump(options):
  568. persona, objstr = get_objstore(options)
  569. for i in objstr:
  570. print(repr(i))
  571. def cmd_list(options):
  572. persona, objstr = get_objstore(options)
  573. for i in options.files:
  574. try:
  575. objs = objstr.by_file(i)
  576. except (ValueError, KeyError):
  577. # create the file, it may have the same hash
  578. # as something else
  579. try:
  580. fobj = persona.by_file(i)
  581. objstr.loadobj(fobj)
  582. objs = objstr.by_file(i)
  583. except (FileNotFoundError, KeyError) as e:
  584. print('ERROR: file not found: %s' % repr(i), file=sys.stderr)
  585. sys.exit(1)
  586. except FileNotFoundError:
  587. # XXX - tell the difference?
  588. print('ERROR: file not found: %s' % repr(i),
  589. file=sys.stderr)
  590. sys.exit(1)
  591. for j in objstr.by_file(i):
  592. for k, v in _iterdictlist(j):
  593. print('%s:\t%s' % (k, v))
  594. # This is needed so that if it creates a FileObj, which may be
  595. # expensive (hashing large file), that it gets saved.
  596. write_objstore(options, objstr)
  597. def main():
  598. import argparse
  599. parser = argparse.ArgumentParser()
  600. parser.add_argument('--db', '-d', type=str,
  601. help='base name for storage')
  602. subparsers = parser.add_subparsers(title='subcommands',
  603. description='valid subcommands', help='additional help')
  604. parser_gi = subparsers.add_parser('genident', help='generate identity')
  605. parser_gi.add_argument('tagvalue', nargs='+',
  606. help='add the arg as metadata for the identity, tag=[value]')
  607. parser_gi.set_defaults(func=cmd_genident)
  608. parser_i = subparsers.add_parser('ident', help='update identity')
  609. parser_i.add_argument('tagvalue', nargs='*',
  610. help='add the arg as metadata for the identity, tag=[value]')
  611. parser_i.set_defaults(func=cmd_ident)
  612. parser_pubkey = subparsers.add_parser('pubkey', help='print public key of identity')
  613. parser_pubkey.set_defaults(func=cmd_pubkey)
  614. # used so that - isn't treated as an option
  615. parser_mod = subparsers.add_parser('modify', help='modify tags on file(s)', prefix_chars='@')
  616. parser_mod.add_argument('modtagvalues', nargs='+',
  617. help='add (+) or delete (-) the tag=[value], for the specified files')
  618. parser_mod.add_argument('files', nargs='+',
  619. help='files to modify')
  620. parser_mod.set_defaults(func=cmd_modify)
  621. parser_list = subparsers.add_parser('list', help='list tags on file(s)')
  622. parser_list.add_argument('files', nargs='+',
  623. help='files to modify')
  624. parser_list.set_defaults(func=cmd_list)
  625. parser_dump = subparsers.add_parser('dump', help='dump all the objects')
  626. parser_dump.set_defaults(func=cmd_dump)
  627. options = parser.parse_args()
  628. fun = options.func
  629. fun(options)
  630. if __name__ == '__main__': # pragma: no cover
  631. main()
  632. class _TestCononicalCoder(unittest.TestCase):
  633. def test_con(self):
  634. # make a dict
  635. obja = {
  636. 'foo': 23984732, 'a': 5, 'b': 6,
  637. 'something': '2398472398723498273dfasdfjlaksdfj'
  638. }
  639. # reorder the items in it
  640. objaitems = list(obja.items())
  641. objaitems.sort()
  642. objb = dict(objaitems)
  643. # and they are still the same
  644. self.assertEqual(obja, objb)
  645. # This is to make sure that item order changed
  646. self.assertNotEqual(list(obja.items()), list(objb.items()))
  647. astr = pasn1.dumps(obja)
  648. bstr = pasn1.dumps(objb)
  649. # that they normally will be serialized differently
  650. self.assertNotEqual(astr, bstr)
  651. # but w/ the special encoder
  652. astr = _asn1coder.dumps(obja)
  653. bstr = _asn1coder.dumps(objb)
  654. # they are now encoded the same
  655. self.assertEqual(astr, bstr)
  656. class _TestCases(unittest.TestCase):
  657. def setUp(self):
  658. self.fixtures = pathlib.Path('fixtures').resolve()
  659. d = pathlib.Path(tempfile.mkdtemp()).resolve()
  660. self.basetempdir = d
  661. self.tempdir = d / 'subdir'
  662. self.persona = Persona.load(os.path.join('fixtures', 'sample.persona.pasn1'))
  663. self.created_by_ref = self.persona.get_identity().uuid
  664. shutil.copytree(self.fixtures / 'testfiles', self.tempdir)
  665. self.oldcwd = os.getcwd()
  666. def tearDown(self):
  667. shutil.rmtree(self.basetempdir)
  668. self.tempdir = None
  669. os.chdir(self.oldcwd)
  670. def test_fileobject(self):
  671. os.chdir(self.tempdir)
  672. objst = ObjectStore(self.created_by_ref)
  673. a = self.persona.by_file('test.txt')
  674. # that the dir is absolute
  675. self.assertEqual(a.dir[0], '/')
  676. # make sure the file's hostid is a UUID
  677. self.assertIsInstance(a.hostid, uuid.UUID)
  678. # make sure the file's id is a UUID
  679. self.assertIsInstance(a.id, uuid.UUID)
  680. objst.loadobj(a)
  681. # write out the store
  682. objst.store('teststore.pasn1')
  683. # load it back in
  684. objstr = ObjectStore.load('teststore.pasn1')
  685. a = objstr.by_id(a['uuid'])
  686. # make sure the hostid is still a UUID
  687. self.assertIsInstance(a.hostid, uuid.UUID)
  688. # make sure the file's id is still a UUID
  689. self.assertIsInstance(a.id, uuid.UUID)
  690. # That it can be encoded to json
  691. jsfo = a.encode('json')
  692. # that it can be decoded from json
  693. jsloadedfo = MDBase.decode(jsfo, 'json')
  694. # and that it is equal
  695. self.assertEqual(jsloadedfo, a)
  696. def test_mdbase(self):
  697. self.assertRaises(ValueError, MDBase, created_by_ref='')
  698. self.assertRaises(ValueError, MDBase.create_obj, { 'type': 'unknosldkfj' })
  699. self.assertRaises(ValueError, MDBase.create_obj, { 'type': 'metadata' })
  700. baseobj = {
  701. 'type': 'metadata',
  702. 'created_by_ref': self.created_by_ref,
  703. }
  704. origbase = copy.deepcopy(baseobj)
  705. # that when an MDBase object is created
  706. md = MDBase.create_obj(baseobj)
  707. # it doesn't modify the passed in object (when adding
  708. # generated properties)
  709. self.assertEqual(baseobj, origbase)
  710. # and it has the generted properties
  711. # Note: cannot mock the functions as they are already
  712. # referenced at creation time
  713. self.assertIn('uuid', md)
  714. self.assertIn('modified', md)
  715. # That you can create a new version using new_version
  716. md2 = md.new_version(('dc:creator', 'Jim Bob',))
  717. # that they are different
  718. self.assertNotEqual(md, md2)
  719. # and that the new modified time is different from the old
  720. self.assertNotEqual(md.modified, md2.modified)
  721. # and that the modification is present
  722. self.assertEqual(md2['dc:creator'], [ 'Jim Bob' ])
  723. # that providing a value from common property
  724. fvalue = b'fakesig'
  725. md3 = md.new_version(('sig', fvalue))
  726. # gets set directly, and is not a list
  727. self.assertEqual(md3.sig, fvalue)
  728. # that invalid attribute access raises correct exception
  729. self.assertRaises(AttributeError, getattr, md, 'somerandombogusattribute')
  730. def test_mdbase_encode_decode(self):
  731. # that an object
  732. baseobj = {
  733. 'type': 'metadata',
  734. 'created_by_ref': self.created_by_ref,
  735. }
  736. obj = MDBase.create_obj(baseobj)
  737. # can be encoded
  738. coded = obj.encode()
  739. # and that the rsults can be decoded
  740. decobj = MDBase.decode(coded)
  741. # and that they are equal
  742. self.assertEqual(obj, decobj)
  743. # and in the encoded object
  744. eobj = _asn1coder.loads(coded)
  745. # the uuid property is a str instance
  746. self.assertIsInstance(eobj['uuid'], bytes)
  747. # and has the length of 16
  748. self.assertEqual(len(eobj['uuid']), 16)
  749. # and that json can be used to encode
  750. js = obj.encode('json')
  751. # and that it is valid json
  752. jsobj = json.loads(js)
  753. # and that it can be decoded
  754. jsdecobj = MDBase.decode(js, 'json')
  755. # and that it matches
  756. self.assertEqual(jsdecobj, obj)
  757. for key, inval in [
  758. ('modified', '2022-08-19T01:27:34.258676'),
  759. ('modified', '2022-08-19T01:27:34Z'),
  760. ('modified', '2022-08-19T01:27:34.258676+00:00'),
  761. ('uuid', 'z5336176-8086-4c21-984f-fda60ddaa172'),
  762. ('uuid', '05336176-8086-421-984f-fda60ddaa172'),
  763. ]:
  764. jsobj['modified'] = inval
  765. jstest = json.dumps(jsobj)
  766. self.assertRaises(ValueError, MDBase.decode, jstest, 'json')
  767. def test_mdbase_wrong_type(self):
  768. # that created_by_ref can be passed by kw
  769. obj = MetaData(created_by_ref=self.created_by_ref)
  770. self.assertRaises(ValueError, FileObject, dict(obj.items(False)))
  771. def test_makehash(self):
  772. self.assertRaises(ValueError, ObjectStore.makehash, 'slkj')
  773. self.assertRaises(ValueError, ObjectStore.makehash, 'sha256:91751cee0a1ab8414400238a761411daa29643ab4b8243e9a91649e25be53ADA')
  774. self.assertRaises(ValueError, ObjectStore.makehash, 'bogushash:9e0a1ab8414400238a761411daa29643ab4b8243e9a91649e25be53ADA', strict=False)
  775. self.assertEqual(ObjectStore.makehash('cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e', strict=False), 'sha512:cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e')
  776. self.assertEqual(ObjectStore.makehash('e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', strict=False), 'sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855')
  777. def test_enumeratedir(self):
  778. files = enumeratedir(self.tempdir, self.created_by_ref)
  779. ftest = files[1]
  780. fname = 'test.txt'
  781. # make sure that they are of type MDBase
  782. self.assertIsInstance(ftest, MDBase)
  783. oldid = ftest.id
  784. self.assertEqual(ftest.filename, fname)
  785. self.assertEqual(ftest.dir, str(self.tempdir))
  786. # XXX - do we add host information?
  787. self.assertEqual(ftest.id, uuid.uuid5(_NAMESPACE_MEDASHARE_PATH,
  788. str(hostuuid()) + '/'.join(os.path.split(self.tempdir) +
  789. ( fname, ))))
  790. self.assertEqual(ftest.mtime, datetime.datetime(2019, 5, 20,
  791. 21, 47, 36, tzinfo=datetime.timezone.utc))
  792. self.assertEqual(ftest.size, 15)
  793. self.assertIn('sha512:7d5768d47b6bc27dc4fa7e9732cfa2de506ca262a2749cb108923e5dddffde842bbfee6cb8d692fb43aca0f12946c521cce2633887914ca1f96898478d10ad3f', ftest.hashes)
  794. # XXX - make sure works w/ relative dirs
  795. files = enumeratedir(os.path.relpath(self.tempdir),
  796. self.created_by_ref)
  797. self.assertEqual(oldid, files[1].id)
  798. def test_mdbaseoverlay(self):
  799. objst = ObjectStore(self.created_by_ref)
  800. # that a base object
  801. bid = uuid.uuid4()
  802. objst.loadobj({
  803. 'type': 'metadata',
  804. 'uuid': bid,
  805. 'modified': datetime.datetime(2019, 6, 10, 14, 3, 10),
  806. 'created_by_ref': self.created_by_ref,
  807. 'hashes': [ 'sha256:91751cee0a1ab8414400238a761411daa29643ab4b8243e9a91649e25be53ada' ],
  808. 'someprop': [ 'somevalue' ],
  809. 'lang': 'en',
  810. })
  811. # can have an overlay object
  812. oid = uuid.uuid4()
  813. dhash = 'sha256:a7c96262c21db9a06fd49e307d694fd95f624569f9b35bb3ffacd880440f9787'
  814. objst.loadobj({
  815. 'type': 'metadata',
  816. 'uuid': oid,
  817. 'modified': datetime.datetime(2019, 6, 10, 18, 3, 10),
  818. 'created_by_ref': self.created_by_ref,
  819. 'hashes': [ dhash ],
  820. 'parent_refs': [ bid ],
  821. 'lang': 'en',
  822. })
  823. # and that when you get it's properties
  824. oobj = objst.by_id(oid)
  825. odict = dict(list(oobj.items()))
  826. # that is has the overlays property
  827. self.assertEqual(odict['parent_refs'], [ bid ])
  828. # that it doesn't have a common property
  829. self.assertNotIn('type', odict)
  830. # that when skipcommon is False
  831. odict = dict(oobj.items(False))
  832. # that it does have a common property
  833. self.assertIn('type', odict)
  834. def test_persona(self):
  835. # that a newly created persona
  836. persona = Persona()
  837. # has an identity object
  838. idobj = persona.get_identity()
  839. # and that it has a uuid attribute that matches
  840. self.assertEqual(persona.uuid, idobj['uuid'])
  841. # that a key can be generated
  842. persona.generate_key()
  843. # that the pubkey property is present
  844. idobj = persona.get_identity()
  845. self.assertIsInstance(idobj['pubkey'], bytes)
  846. # that get_pubkey returns the correct thing
  847. pubstr = _asn1coder.dumps([ idobj.uuid, idobj['pubkey'] ])
  848. self.assertEqual(persona.get_pubkey(),
  849. base58.b58encode_check(pubstr))
  850. # and that there is a signature
  851. self.assertIsInstance(idobj['sig'], bytes)
  852. # and that it can verify itself
  853. persona.verify(idobj)
  854. # and that a new persona can be created from the pubkey
  855. pkpersona = Persona.from_pubkey(persona.get_pubkey())
  856. # and that it can verify the old identity
  857. self.assertTrue(pkpersona.verify(idobj))
  858. # that a second time, it raises an exception
  859. self.assertRaises(RuntimeError, persona.generate_key)
  860. # that a file object created by it
  861. testfname = os.path.join(self.tempdir, 'test.txt')
  862. testobj = persona.by_file(testfname)
  863. # has the correct created_by_ref
  864. self.assertEqual(testobj.created_by_ref, idobj.uuid)
  865. self.assertEqual(testobj.type, 'file')
  866. # and has a signature
  867. self.assertIn('sig', testobj)
  868. # that a persona created from the identity object
  869. vpersona = Persona(idobj)
  870. # can verify the sig
  871. self.assertTrue(vpersona.verify(testobj))
  872. # and that a bogus signature
  873. bogussig = 'somebogussig'
  874. bogusobj = MDBase.create_obj(testobj)
  875. bogusobj.sig = bogussig
  876. # fails to verify
  877. self.assertRaises(Exception, vpersona.verify, bogusobj)
  878. # and that a modified object
  879. otherobj = testobj.new_version(('customprop', 'value'))
  880. # fails to verify
  881. self.assertRaises(Exception, vpersona.verify, otherobj)
  882. # that a persona object can be written
  883. perpath = os.path.join(self.basetempdir, 'persona.pasn1')
  884. persona.store(perpath)
  885. # and that when loaded back
  886. loadpersona = Persona.load(perpath)
  887. # the new persona object can sign an object
  888. nvtestobj = loadpersona.sign(testobj.new_version())
  889. # and the old persona can verify it.
  890. self.assertTrue(vpersona.verify(nvtestobj))
  891. def test_persona_metadata(self):
  892. # that a persona
  893. persona = Persona()
  894. persona.generate_key()
  895. # can create a metadata object
  896. hashobj = ['asdlfkj']
  897. mdobj = persona.MetaData(hashes=hashobj)
  898. # that the object has the correct created_by_ref
  899. self.assertEqual(mdobj.created_by_ref, persona.uuid)
  900. # and has the provided hashes
  901. self.assertEqual(mdobj.hashes, hashobj)
  902. # and that it can be verified
  903. persona.verify(mdobj)
  904. def test_objectstore(self):
  905. persona = Persona.load(os.path.join('fixtures', 'sample.persona.pasn1'))
  906. objst = ObjectStore.load(os.path.join('fixtures', 'sample.data.pasn1'))
  907. objst.loadobj({
  908. 'type': 'metadata',
  909. 'uuid': uuid.UUID('c9a1d1e2-3109-4efd-8948-577dc15e44e7'),
  910. 'modified': datetime.datetime(2019, 5, 31, 14, 3, 10,
  911. tzinfo=datetime.timezone.utc),
  912. 'created_by_ref': self.created_by_ref,
  913. 'hashes': [ 'sha256:91751cee0a1ab8414400238a761411daa29643ab4b8243e9a91649e25be53ada' ],
  914. 'lang': 'en',
  915. })
  916. lst = objst.by_hash('91751cee0a1ab8414400238a761411daa29643ab4b8243e9a91649e25be53ada')
  917. self.assertEqual(len(lst), 2)
  918. byid = objst.by_id('3e466e06-45de-4ecc-84ba-2d2a3d970e96')
  919. self.assertIsInstance(byid, MetaData)
  920. self.assertIn(byid, lst)
  921. r = byid
  922. self.assertEqual(r.uuid, uuid.UUID('3e466e06-45de-4ecc-84ba-2d2a3d970e96'))
  923. self.assertEqual(r['dc:creator'], [ 'John-Mark Gurney' ])
  924. # test storing the object store
  925. fname = 'testfile.pasn1'
  926. objst.store(fname)
  927. with open(fname, 'rb') as fp:
  928. objs = _asn1coder.loads(fp.read())
  929. os.unlink(fname)
  930. self.assertEqual(len(objs), len(objst))
  931. self.assertEqual(objs['created_by_ref'], self.created_by_ref.bytes)
  932. # make sure that the read back data matches
  933. for i in objs['objects']:
  934. i['created_by_ref'] = uuid.UUID(bytes=i['created_by_ref'])
  935. i['uuid'] = uuid.UUID(bytes=i['uuid'])
  936. self.assertEqual(objst.by_id(i['uuid']), i)
  937. # that a file
  938. testfname = os.path.join(self.tempdir, 'test.txt')
  939. # when registered
  940. objst.loadobj(persona.by_file(testfname))
  941. # can be found
  942. self.assertEqual(objst.by_file(testfname), [ byid ])
  943. self.assertEqual(objst.by_file(testfname), [ byid ])
  944. self.assertRaises(KeyError, objst.by_file, '/dev/null')
  945. # XXX make sure that object store contains fileobject
  946. # Tests to add:
  947. # Non-duplicates when same metadata is located by multiple hashes.
  948. def run_command_file(self, f):
  949. with open(f) as fp:
  950. cmds = json.load(fp)
  951. # setup object store
  952. storefname = self.tempdir / 'storefname'
  953. identfname = self.tempdir / 'identfname'
  954. # setup path mapping
  955. def expandusermock(arg):
  956. if arg == '~/.medashare_store.pasn1':
  957. return storefname
  958. elif arg == '~/.medashare_identity.pasn1':
  959. return identfname
  960. # setup test fname
  961. testfname = os.path.join(self.tempdir, 'test.txt')
  962. newtestfname = os.path.join(self.tempdir, 'newfile.txt')
  963. patches = []
  964. for cmd in cmds:
  965. try:
  966. if cmd['skip']:
  967. continue
  968. except KeyError:
  969. pass
  970. try:
  971. special = cmd['special']
  972. except KeyError:
  973. pass
  974. else:
  975. if special == 'copy newfile.txt to test.txt':
  976. shutil.copy(newtestfname, testfname)
  977. elif special == 'change newfile.txt':
  978. with open(newtestfname, 'w') as fp:
  979. fp.write('some new contents')
  980. elif special == 'verify store object cnt':
  981. with open(storefname, 'rb') as fp:
  982. pasn1obj = pasn1.loads(fp.read())
  983. objcnt = len(pasn1obj['objects'])
  984. self.assertEqual(objcnt, cmd['count'])
  985. elif special == 'set hostid':
  986. hostidpatch = mock.patch(__name__ + '.hostuuid')
  987. hostidpatch.start().return_value = uuid.uuid4()
  988. patches.append(hostidpatch)
  989. else: # pragma: no cover
  990. raise ValueError('unhandled special: %s' % repr(special))
  991. continue
  992. with self.subTest(file=f, title=cmd['title']), \
  993. mock.patch('os.path.expanduser',
  994. side_effect=expandusermock) as eu, \
  995. mock.patch('sys.stdout', io.StringIO()) as stdout, \
  996. mock.patch('sys.stderr', io.StringIO()) as stderr, \
  997. mock.patch('sys.argv', [ 'progname', ] +
  998. cmd['cmd']) as argv:
  999. with self.assertRaises(SystemExit) as cm:
  1000. main()
  1001. # XXX - Minor hack till other tests fixed
  1002. sys.exit(0)
  1003. # with the correct output
  1004. self.maxDiff = None
  1005. outre = cmd.get('stdout_re')
  1006. if outre:
  1007. self.assertRegex(stdout.getvalue(), outre)
  1008. else:
  1009. self.assertEqual(stdout.getvalue(), cmd.get('stdout', ''))
  1010. self.assertEqual(stderr.getvalue(), cmd.get('stderr', ''))
  1011. self.assertEqual(cm.exception.code, cmd.get('exit', 0))
  1012. patches.reverse()
  1013. for i in patches:
  1014. i.stop()
  1015. def test_cmds(self):
  1016. cmds = self.fixtures.glob('cmd.*.json')
  1017. for i in cmds:
  1018. os.chdir(self.tempdir)
  1019. self.run_command_file(i)
  1020. # XXX - the following test may no longer be needed
  1021. def test_main(self):
  1022. # Test the main runner, this is only testing things that are
  1023. # specific to running the program, like where the store is
  1024. # created.
  1025. # setup object store
  1026. storefname = os.path.join(self.tempdir, 'storefname')
  1027. identfname = os.path.join(self.tempdir, 'identfname')
  1028. # XXX part of the problem
  1029. shutil.copy(os.path.join('fixtures', 'sample.data.pasn1'), storefname)
  1030. # setup path mapping
  1031. def expandusermock(arg):
  1032. if arg == '~/.medashare_store.pasn1':
  1033. return storefname
  1034. elif arg == '~/.medashare_identity.pasn1':
  1035. return identfname
  1036. # setup test fname
  1037. testfname = os.path.join(self.tempdir, 'test.txt')
  1038. newtestfname = os.path.join(self.tempdir, 'newfile.txt')
  1039. import itertools
  1040. with mock.patch('os.path.expanduser', side_effect=expandusermock) \
  1041. as eu, mock.patch('medashare.cli.open') as op:
  1042. # that when opening the store and identity fails
  1043. op.side_effect = FileNotFoundError
  1044. # and there is no identity
  1045. with mock.patch('sys.stderr', io.StringIO()) as stderr, mock.patch('sys.argv', [ 'progname', 'list', 'afile' ]) as argv:
  1046. with self.assertRaises(SystemExit) as cm:
  1047. main()
  1048. # that it fails
  1049. self.assertEqual(cm.exception.code, 1)
  1050. # with the correct error message
  1051. self.assertEqual(stderr.getvalue(),
  1052. 'ERROR: Identity not created, create w/ genident.\n')
  1053. with mock.patch('os.path.expanduser', side_effect=expandusermock) \
  1054. as eu:
  1055. # that generating a new identity
  1056. with mock.patch('sys.stdout', io.StringIO()) as stdout, mock.patch('sys.argv', [ 'progname', 'genident', 'name=A Test User' ]) as argv:
  1057. main()
  1058. # does not output anything
  1059. self.assertEqual(stdout.getvalue(), '')
  1060. # looks up the correct file
  1061. eu.assert_called_with('~/.medashare_identity.pasn1')
  1062. # and that the identity
  1063. persona = Persona.load(identfname)
  1064. pident = persona.get_identity()
  1065. # has the correct name
  1066. self.assertEqual(pident.name, 'A Test User')
  1067. # that when generating an identity when one already exists
  1068. with mock.patch('sys.stderr', io.StringIO()) as stderr, mock.patch('sys.argv', [ 'progname', 'genident', 'name=A Test User' ]) as argv:
  1069. # that it exits
  1070. with self.assertRaises(SystemExit) as cm:
  1071. main()
  1072. # with error code 1
  1073. self.assertEqual(cm.exception.code, 1)
  1074. # and outputs an error message
  1075. self.assertEqual(stderr.getvalue(),
  1076. 'Error: Identity already created.\n')
  1077. # and looked up the correct file
  1078. eu.assert_called_with('~/.medashare_identity.pasn1')
  1079. # that when updating the identity
  1080. with mock.patch('sys.stdout', io.StringIO()) as stdout, mock.patch('sys.argv', [ 'progname', 'ident', 'name=Changed Name' ]) as argv:
  1081. main()
  1082. # it doesn't output anything
  1083. self.assertEqual(stdout.getvalue(), '')
  1084. # and looked up the correct file
  1085. eu.assert_called_with('~/.medashare_identity.pasn1')
  1086. npersona = Persona.load(identfname)
  1087. nident = npersona.get_identity()
  1088. # and has the new name
  1089. self.assertEqual(nident.name, 'Changed Name')
  1090. # and has the same old uuid
  1091. self.assertEqual(nident.uuid, pident.uuid)
  1092. # and that the modified date has changed
  1093. self.assertNotEqual(pident.modified, nident.modified)
  1094. # and that the old Persona can verify the new one
  1095. self.assertTrue(persona.verify(nident))
  1096. orig_open = open
  1097. with mock.patch('os.path.expanduser', side_effect=expandusermock) \
  1098. as eu, mock.patch('medashare.cli.open') as op:
  1099. # that when the store fails
  1100. def open_repl(fname, mode):
  1101. #print('or:', repr(fname), repr(mode), file=sys.stderr)
  1102. self.assertIn(mode, ('rb', 'wb'))
  1103. if fname == identfname or mode == 'wb':
  1104. return orig_open(fname, mode)
  1105. #print('foo:', repr(fname), repr(mode), file=sys.stderr)
  1106. raise FileNotFoundError
  1107. op.side_effect = open_repl
  1108. # and there is no store
  1109. with mock.patch('sys.stderr', io.StringIO()) as stderr, mock.patch('sys.argv', [ 'progname', 'list', 'foo', ]) as argv:
  1110. # that it exits
  1111. with self.assertRaises(SystemExit) as cm:
  1112. main()
  1113. # with error code 1
  1114. self.assertEqual(cm.exception.code, 1)
  1115. # and outputs an error message
  1116. self.assertEqual(stderr.getvalue(),
  1117. 'ERROR: file not found: \'foo\'\n')