MetaData Sharing
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 

980 lines
28 KiB

  1. #!/usr/bin/env python
  2. #import pdb, sys; mypdb = pdb.Pdb(stdout=sys.stderr); mypdb.set_trace()
  3. from cryptography.hazmat.primitives.asymmetric.ed448 import Ed448PrivateKey, \
  4. Ed448PublicKey
  5. from cryptography.hazmat.primitives.serialization import Encoding, \
  6. PrivateFormat, PublicFormat, NoEncryption
  7. import base58
  8. import copy
  9. import datetime
  10. import hashlib
  11. import mock
  12. import os.path
  13. import pasn1
  14. import shutil
  15. import string
  16. import tempfile
  17. import unittest
  18. import uuid
  19. from contextlib import nested
  20. # The UUID for the namespace representing the path to a file
  21. _NAMESPACE_MEDASHARE_PATH = uuid.UUID('f6f36b62-3770-4a68-bc3d-dc3e31e429e6')
  22. _defaulthash = 'sha512'
  23. _validhashes = set([ 'sha256', 'sha512' ])
  24. _hashlengths = { len(getattr(hashlib, x)().hexdigest()): x for x in _validhashes }
  25. def _iterdictlist(obj):
  26. itms = obj.items()
  27. itms.sort()
  28. for k, v in itms:
  29. if isinstance(v, list):
  30. v = v[:]
  31. v.sort()
  32. for i in v:
  33. yield k, i
  34. else:
  35. yield k, v
  36. def _makeuuid(s):
  37. if isinstance(s, uuid.UUID):
  38. return s
  39. return uuid.UUID(s)
  40. # XXX - known issue, store is not atomic/safe, overwrites in place instead of renames
  41. # XXX - add validation
  42. # XXX - how to add singletons
  43. class MDBase(object):
  44. '''This is a simple wrapper that turns a JSON object into a pythonesc
  45. object where attribute accesses work.'''
  46. _type = 'invalid'
  47. _generated_properties = {
  48. 'uuid': uuid.uuid4,
  49. 'modified': datetime.datetime.utcnow
  50. }
  51. # When decoding, the decoded value should be passed to this function
  52. # to get the correct type
  53. _instance_properties = {
  54. 'uuid': _makeuuid,
  55. 'created_by_ref': _makeuuid,
  56. }
  57. _common_properties = [ 'type', 'created_by_ref' ] # XXX - add lang?
  58. _common_optional = set(('overlay_refs', 'sig'))
  59. _common_names = set(_common_properties + _generated_properties.keys())
  60. def __init__(self, obj={}, **kwargs):
  61. obj = copy.deepcopy(obj)
  62. obj.update(kwargs)
  63. if self._type == MDBase._type:
  64. raise ValueError('call MDBase.create_obj instead so correct class is used.')
  65. if 'type' in obj and obj['type'] != self._type:
  66. raise ValueError(
  67. 'trying to create the wrong type of object, got: %s, expected: %s' %
  68. (`obj['type']`, `self._type`))
  69. if 'type' not in obj:
  70. obj['type'] = self._type
  71. for x in self._common_properties:
  72. if x not in obj:
  73. raise ValueError('common property %s not present' % `x`)
  74. for x, fun in self._instance_properties.iteritems():
  75. if x in obj:
  76. obj[x] = fun(obj[x])
  77. for x, fun in self._generated_properties.iteritems():
  78. if x not in obj:
  79. obj[x] = fun()
  80. self._obj = obj
  81. @classmethod
  82. def create_obj(cls, obj):
  83. '''Using obj as a base, create an instance of MDBase of the
  84. correct type.
  85. If the correct type is not found, a ValueError is raised.'''
  86. if isinstance(obj, cls):
  87. obj = obj._obj
  88. ty = obj['type']
  89. for i in MDBase.__subclasses__():
  90. if i._type == ty:
  91. return i(obj)
  92. else:
  93. raise ValueError('Unable to find class for type %s' %
  94. `ty`)
  95. def new_version(self, *args):
  96. '''For each k, v pari, add the property k as an additional one
  97. (or new one if first), with the value v.'''
  98. obj = copy.deepcopy(self._obj)
  99. common = self._common_names | self._common_optional
  100. for k, v in args:
  101. if k in common:
  102. obj[k] = v
  103. else:
  104. obj.setdefault(k, []).append(v)
  105. del obj['modified']
  106. return self.create_obj(obj)
  107. def __repr__(self): # pragma: no cover
  108. return '%s(%s)' % (self.__class__.__name__, `self._obj`)
  109. def __getattr__(self, k):
  110. return self._obj[k]
  111. def __setattr__(self, k, v):
  112. if k[0] == '_': # direct attribute
  113. self.__dict__[k] = v
  114. else:
  115. self._obj[k] = v
  116. def __getitem__(self, k):
  117. return self._obj[k]
  118. def __to_dict__(self):
  119. return self._obj
  120. def __eq__(self, o):
  121. return cmp(self._obj, o) == 0
  122. def __contains__(self, k):
  123. return k in self._obj
  124. def items(self, skipcommon=True):
  125. return [ (k, v) for k, v in self._obj.iteritems() if
  126. not skipcommon or k not in self._common_names ]
  127. def encode(self):
  128. return _asn1coder.dumps(self)
  129. @classmethod
  130. def decode(cls, s):
  131. return cls.create_obj(_asn1coder.loads(s))
  132. class MetaData(MDBase):
  133. _type = 'metadata'
  134. class Identity(MDBase):
  135. _type = 'identity'
  136. # Identites don't need a created by
  137. _common_properties = [ x for x in MDBase._common_properties if x !=
  138. 'created_by_ref' ]
  139. _common_optional = set([ x for x in MDBase._common_optional if x !=
  140. 'overlay_refs' ] + [ 'name', 'pubkey' ])
  141. _common_names = set(_common_properties + MDBase._generated_properties.keys())
  142. def _trytodict(o):
  143. if isinstance(o, uuid.UUID):
  144. return 'unicode', str(o)
  145. try:
  146. return 'dict', o.__to_dict__()
  147. except Exception: # pragma: no cover
  148. raise TypeError('unable to find __to_dict__ on %s: %s' % (type(o), `o`))
  149. _asn1coder = pasn1.ASN1DictCoder(coerce=_trytodict)
  150. class Persona(object):
  151. '''The object that represents a persona, or identity. It will
  152. create the proper identity object, serialize for saving keys,
  153. create objects for that persona and other management.'''
  154. def __init__(self, identity=None, key=None):
  155. if identity is None:
  156. self._identity = Identity()
  157. else:
  158. self._identity = identity
  159. self._key = key
  160. self._pubkey = None
  161. if 'pubkey' in self._identity:
  162. pubkeybytes = self._identity.pubkey
  163. self._pubkey = Ed448PublicKey.from_public_bytes(pubkeybytes)
  164. self._created_by_ref = self._identity.uuid
  165. def get_identity(self):
  166. '''Return the Identity object for this Persona.'''
  167. return self._identity
  168. def new_version(self, *args):
  169. '''Update the Persona's Identity object.'''
  170. self._identity = self.sign(self._identity.new_version(*args))
  171. return self._identity
  172. def store(self, fname):
  173. '''Store the Persona to a file. If there is a private
  174. key associated w/ the Persona, it will be saved as well.'''
  175. with open(fname, 'w') as fp:
  176. obj = {
  177. 'identity': self._identity,
  178. }
  179. if self._key is not None:
  180. obj['key'] = \
  181. self._key.private_bytes(Encoding.Raw,
  182. PrivateFormat.Raw, NoEncryption())
  183. fp.write(_asn1coder.dumps(obj))
  184. @classmethod
  185. def load(cls, fname):
  186. '''Load the Persona from the provided file.'''
  187. with open(fname) as fp:
  188. objs = _asn1coder.loads(fp.read())
  189. kwargs = {}
  190. if 'key' in objs:
  191. kwargs['key'] = Ed448PrivateKey.from_private_bytes(objs['key'])
  192. return cls(Identity(objs['identity']), **kwargs)
  193. def generate_key(self):
  194. '''Generate a key for this Identity.
  195. Raises a RuntimeError if a key is already present.'''
  196. if self._key:
  197. raise RuntimeError('a key already exists')
  198. self._key = Ed448PrivateKey.generate()
  199. self._pubkey = self._key.public_key()
  200. pubkey = self._pubkey.public_bytes(Encoding.Raw,
  201. PublicFormat.Raw)
  202. self._identity = self.sign(self._identity.new_version(('pubkey',
  203. pubkey)))
  204. def _makesigbytes(self, obj):
  205. obj = dict(obj.items(False))
  206. try:
  207. del obj['sig']
  208. except KeyError:
  209. pass
  210. return _asn1coder.dumps(obj)
  211. def sign(self, obj):
  212. '''Takes the object, adds a signature, and returns the new
  213. object.'''
  214. sigbytes = self._makesigbytes(obj)
  215. sig = self._key.sign(sigbytes)
  216. newobj = MDBase.create_obj(obj)
  217. newobj.sig = sig
  218. return newobj
  219. def verify(self, obj):
  220. sigbytes = self._makesigbytes(obj)
  221. self._pubkey.verify(obj['sig'], sigbytes)
  222. return True
  223. def by_file(self, fname):
  224. '''Return a metadata object for the file named fname.'''
  225. fobj = FileObject.from_file(fname, self._created_by_ref)
  226. return self.sign(fobj)
  227. class ObjectStore(object):
  228. '''A container to store for the various Metadata objects.'''
  229. # The _uuids property contains both the UUIDv4 for objects, and
  230. # looking up the UUIDv5 for FileObjects.
  231. def __init__(self, created_by_ref):
  232. self._created_by_ref = created_by_ref
  233. self._uuids = {}
  234. self._hashes = {}
  235. @staticmethod
  236. def makehash(hashstr, strict=True):
  237. '''Take a hash string, and return a valid hash string from it.
  238. This makes sure that it is of the correct type and length.
  239. If strict is False, the function will detect the length and
  240. return a valid hash if one can be found.'''
  241. try:
  242. hash, value = hashstr.split(':')
  243. except ValueError:
  244. if strict:
  245. raise
  246. hash = _hashlengths[len(hashstr)]
  247. value = hashstr
  248. if strict and len(str(value).translate(None, string.hexdigits.lower())) != 0:
  249. raise ValueError('value has invalid hex digits (must be lower case)', value)
  250. if hash in _validhashes:
  251. return ':'.join((hash, value))
  252. raise ValueError
  253. def __len__(self):
  254. return len(self._uuids)
  255. def store(self, fname):
  256. '''Write out the objects in the store to the file named
  257. fname.'''
  258. with open(fname, 'w') as fp:
  259. obj = {
  260. 'created_by_ref': self._created_by_ref,
  261. 'objects': self._uuids.values(),
  262. }
  263. fp.write(_asn1coder.dumps(obj))
  264. def loadobj(self, obj):
  265. '''Load obj into the data store.'''
  266. obj = MDBase.create_obj(obj)
  267. self._uuids[obj.uuid] = obj
  268. for j in obj.hashes:
  269. h = self.makehash(j)
  270. self._hashes.setdefault(h, []).append(obj)
  271. @classmethod
  272. def load(cls, fname):
  273. '''Load objects from the provided file name.
  274. Basic validation will be done on the objects in the file.
  275. The objects will be accessible via other methods.'''
  276. with open(fname) as fp:
  277. objs = _asn1coder.loads(fp.read())
  278. obj = cls(objs['created_by_ref'])
  279. for i in objs['objects']:
  280. obj.loadobj(i)
  281. return obj
  282. def by_id(self, id):
  283. '''Look up an object by it's UUID.'''
  284. if not isinstance(id, uuid.UUID):
  285. uid = uuid.UUID(id)
  286. else:
  287. uid = id
  288. return self._uuids[uid]
  289. def by_hash(self, hash):
  290. '''Look up an object by it's hash value.'''
  291. h = self.makehash(hash, strict=False)
  292. return self._hashes[h]
  293. def by_file(self, fname):
  294. '''Return a metadata object for the file named fname.'''
  295. fid = FileObject.make_id(fname)
  296. try:
  297. fobj = self.by_id(fid)
  298. except KeyError:
  299. # unable to find it
  300. fobj = FileObject.from_file(fname, self._created_by_ref)
  301. self.loadobj(fobj)
  302. for i in fobj.hashes:
  303. j = self.by_hash(i)
  304. # Filter out non-metadata objects
  305. j = [ x for x in j if x.type == 'metadata' ]
  306. if j:
  307. return j
  308. else:
  309. raise KeyError('unable to find metadata for file')
  310. def _hashfile(fname):
  311. hash = getattr(hashlib, _defaulthash)()
  312. with open(fname) as fp:
  313. r = fp.read()
  314. hash.update(r)
  315. return '%s:%s' % (_defaulthash, hash.hexdigest())
  316. class FileObject(MDBase):
  317. _type = 'file'
  318. @staticmethod
  319. def make_id(fname):
  320. '''Take a local file name, and make the id for it. Note that
  321. converts from the local path separator to a forward slash so
  322. that it will be the same between Windows and Unix systems.'''
  323. fname = os.path.realpath(fname)
  324. return uuid.uuid5(_NAMESPACE_MEDASHARE_PATH,
  325. '/'.join(os.path.split(fname)))
  326. @classmethod
  327. def from_file(cls, filename, created_by_ref):
  328. s = os.stat(filename)
  329. obj = {
  330. 'dir': os.path.dirname(filename),
  331. 'created_by_ref': created_by_ref,
  332. 'filename': os.path.basename(filename),
  333. 'id': cls.make_id(filename),
  334. 'mtime': datetime.datetime.utcfromtimestamp(s.st_mtime),
  335. 'size': s.st_size,
  336. 'hashes': [ _hashfile(filename), ],
  337. }
  338. return cls(obj)
  339. def enumeratedir(_dir, created_by_ref):
  340. '''Enumerate all the files and directories (not recursive) in _dir.
  341. Returned is a list of FileObjects.'''
  342. return map(lambda x: FileObject.from_file(os.path.join(_dir, x), created_by_ref),
  343. os.listdir(_dir))
  344. def main():
  345. from optparse import OptionParser
  346. import sys
  347. parser = OptionParser()
  348. parser.add_option('-a', action='append', dest='add',
  349. default=[], help='add the arg as metadata for files, tag=value')
  350. parser.add_option('-d', action='append', dest='delete',
  351. default=[], help='delete the arg as metadata from files. Either specify tag, and all tags are removed, or specify tag=value and that specific tag will be removed.')
  352. parser.add_option('-g', action='store_true', dest='generateident',
  353. default=False, help='generate an identity')
  354. parser.add_option('-i', action='store_true', dest='updateident',
  355. default=False, help='update the identity')
  356. parser.add_option('-l', action='store_true', dest='list',
  357. default=False, help='list metadata')
  358. parser.add_option('-p', action='store_true', dest='printpub',
  359. default=False, help='Print the public key of the identity')
  360. options, args = parser.parse_args()
  361. # this is shared between generateident and add
  362. addprops = map(lambda x: x.split('=', 1), options.add)
  363. if options.generateident or options.updateident or options.printpub:
  364. identfname = os.path.expanduser('~/.medashare_identity.pasn1')
  365. if options.generateident and os.path.exists(identfname):
  366. print >>sys.stderr, 'Error: Identity already created.'
  367. sys.exit(1)
  368. if options.generateident:
  369. persona = Persona()
  370. persona.generate_key()
  371. else:
  372. persona = Persona.load(identfname)
  373. if options.printpub:
  374. print base58.b58encode_check(persona.get_identity().pubkey)
  375. return
  376. persona.new_version(*addprops)
  377. persona.store(identfname)
  378. return
  379. storefname = os.path.expanduser('~/.medashare_store.pasn1')
  380. import sys
  381. #print >>sys.stderr, `storefname`
  382. objstr = ObjectStore.load(storefname)
  383. if options.list:
  384. for i in args:
  385. for j in objstr.by_file(i):
  386. #print >>sys.stderr, `j._obj`
  387. for k, v in _iterdictlist(j):
  388. print '%s:\t%s' % (k, v)
  389. elif options.add:
  390. for i in args:
  391. for j in objstr.by_file(i):
  392. nobj = j.new_version(*addprops)
  393. objstr.loadobj(nobj)
  394. elif options.delete:
  395. for i in args:
  396. for j in objstr.by_file(i):
  397. obj = j.__to_dict__()
  398. for k in options.delete:
  399. try:
  400. key, v = k.split('=', 1)
  401. obj[key].remove(v)
  402. except ValueError:
  403. del obj[k]
  404. nobj = MDBase.create_obj(obj)
  405. objstr.loadobj(nobj)
  406. else: # pragma: no cover
  407. raise NotImplementedError
  408. objstr.store(storefname)
  409. if __name__ == '__main__': # pragma: no cover
  410. main()
  411. class _TestCases(unittest.TestCase):
  412. def setUp(self):
  413. d = os.path.realpath(tempfile.mkdtemp())
  414. self.basetempdir = d
  415. self.tempdir = os.path.join(d, 'subdir')
  416. persona = Persona.load(os.path.join('fixtures', 'sample.persona.pasn1'))
  417. self.created_by_ref = persona.get_identity().uuid
  418. shutil.copytree(os.path.join('fixtures', 'testfiles'),
  419. self.tempdir)
  420. def tearDown(self):
  421. shutil.rmtree(self.basetempdir)
  422. self.tempdir = None
  423. def test_mdbase(self):
  424. self.assertRaises(ValueError, MDBase, created_by_ref='')
  425. self.assertRaises(ValueError, MDBase.create_obj, { 'type': 'unknosldkfj' })
  426. self.assertRaises(ValueError, MDBase.create_obj, { 'type': 'metadata' })
  427. baseobj = {
  428. 'type': 'metadata',
  429. 'created_by_ref': self.created_by_ref,
  430. }
  431. origbase = copy.deepcopy(baseobj)
  432. # that when an MDBase object is created
  433. md = MDBase.create_obj(baseobj)
  434. # it doesn't modify the passed in object (when adding
  435. # generated properties)
  436. self.assertEqual(baseobj, origbase)
  437. # and it has the generted properties
  438. # Note: cannot mock the functions as they are already
  439. # referenced at creation time
  440. self.assertIn('uuid', md)
  441. self.assertIn('modified', md)
  442. # That you can create a new version using new_version
  443. md2 = md.new_version(('dc:creator', 'Jim Bob',))
  444. # that they are different
  445. self.assertNotEqual(md, md2)
  446. # and that the new modified time is different from the old
  447. self.assertNotEqual(md.modified, md2.modified)
  448. # and that the modification is present
  449. self.assertEqual(md2['dc:creator'], [ 'Jim Bob' ])
  450. # that providing a value from common property
  451. fvalue = 'fakesig'
  452. md3 = md.new_version(('sig', fvalue))
  453. # gets set directly, and is not a list
  454. self.assertEqual(md3.sig, fvalue)
  455. def test_mdbase_encode_decode(self):
  456. # that an object
  457. baseobj = {
  458. 'type': 'metadata',
  459. 'created_by_ref': self.created_by_ref,
  460. }
  461. obj = MDBase.create_obj(baseobj)
  462. # can be encoded
  463. coded = obj.encode()
  464. # and that the rsults can be decoded
  465. decobj = MDBase.decode(coded)
  466. # and that they are equal
  467. self.assertEqual(obj, decobj)
  468. def test_mdbase_wrong_type(self):
  469. # that created_by_ref can be passed by kw
  470. obj = MetaData(created_by_ref=self.created_by_ref)
  471. self.assertRaises(ValueError, FileObject, dict(obj.items(False)))
  472. def test_makehash(self):
  473. self.assertRaises(ValueError, ObjectStore.makehash, 'slkj')
  474. self.assertRaises(ValueError, ObjectStore.makehash, 'sha256:91751cee0a1ab8414400238a761411daa29643ab4b8243e9a91649e25be53ADA')
  475. self.assertRaises(ValueError, ObjectStore.makehash, 'bogushash:9e0a1ab8414400238a761411daa29643ab4b8243e9a91649e25be53ADA', strict=False)
  476. self.assertEqual(ObjectStore.makehash('cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e', strict=False), 'sha512:cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e')
  477. self.assertEqual(ObjectStore.makehash('e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', strict=False), 'sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855')
  478. def test_enumeratedir(self):
  479. files = enumeratedir(self.tempdir, self.created_by_ref)
  480. ftest = files[0]
  481. fname = 'test.txt'
  482. # make sure that they are of type MDBase
  483. self.assertIsInstance(ftest, MDBase)
  484. oldid = ftest.id
  485. self.assertEqual(ftest.filename, fname)
  486. self.assertEqual(ftest.dir, self.tempdir)
  487. # XXX - do we add host information?
  488. self.assertEqual(ftest.id, uuid.uuid5(_NAMESPACE_MEDASHARE_PATH,
  489. '/'.join(os.path.split(self.tempdir) +
  490. ( fname, ))))
  491. self.assertEqual(ftest.mtime, datetime.datetime(2019, 5, 20, 21, 47, 36))
  492. self.assertEqual(ftest.size, 15)
  493. self.assertIn('sha512:7d5768d47b6bc27dc4fa7e9732cfa2de506ca262a2749cb108923e5dddffde842bbfee6cb8d692fb43aca0f12946c521cce2633887914ca1f96898478d10ad3f', ftest.hashes)
  494. # XXX - make sure works w/ relative dirs
  495. files = enumeratedir(os.path.relpath(self.tempdir),
  496. self.created_by_ref)
  497. self.assertEqual(oldid, files[0].id)
  498. def test_mdbaseoverlay(self):
  499. objst = ObjectStore(self.created_by_ref)
  500. # that a base object
  501. bid = uuid.uuid4()
  502. objst.loadobj({
  503. 'type': 'metadata',
  504. 'uuid': bid,
  505. 'modified': datetime.datetime(2019, 6, 10, 14, 3, 10),
  506. 'created_by_ref': self.created_by_ref,
  507. 'hashes': [ 'sha256:91751cee0a1ab8414400238a761411daa29643ab4b8243e9a91649e25be53ada' ],
  508. 'someprop': [ 'somevalue' ],
  509. 'lang': 'en',
  510. })
  511. # can have an overlay object
  512. oid = uuid.uuid4()
  513. dhash = 'sha256:a7c96262c21db9a06fd49e307d694fd95f624569f9b35bb3ffacd880440f9787'
  514. objst.loadobj({
  515. 'type': 'metadata',
  516. 'uuid': oid,
  517. 'modified': datetime.datetime(2019, 6, 10, 18, 3, 10),
  518. 'created_by_ref': self.created_by_ref,
  519. 'hashes': [ dhash ],
  520. 'overlay_refs': [ bid ],
  521. 'lang': 'en',
  522. })
  523. # and that when you get it's properties
  524. oobj = objst.by_id(oid)
  525. odict = dict(oobj.items())
  526. # that is has the overlays property
  527. self.assertEqual(odict['overlay_refs'], [ bid ])
  528. # that it doesn't have a common property
  529. self.assertNotIn('type', odict)
  530. # that when skipcommon is False
  531. odict = dict(oobj.items(False))
  532. # that it does have a common property
  533. self.assertIn('type', odict)
  534. def test_persona(self):
  535. # that a newly created persona
  536. persona = Persona()
  537. # has an identity object
  538. idobj = persona.get_identity()
  539. # that a key can be generated
  540. persona.generate_key()
  541. # that the pubkey property is present
  542. idobj = persona.get_identity()
  543. self.assertIsInstance(idobj['pubkey'], str)
  544. # and that there is a signature
  545. self.assertIsInstance(idobj['sig'], str)
  546. # that a second time, it raises an exception
  547. self.assertRaises(RuntimeError, persona.generate_key)
  548. # that a file object created by it
  549. testfname = os.path.join(self.tempdir, 'test.txt')
  550. testobj = persona.by_file(testfname)
  551. # has the correct created_by_ref
  552. self.assertEqual(testobj.created_by_ref, idobj.uuid)
  553. # and has a signature
  554. self.assertIn('sig', testobj)
  555. # that a persona created from the identity object
  556. vpersona = Persona(idobj)
  557. # can verify the sig
  558. self.assertTrue(vpersona.verify(testobj))
  559. # and that a bogus signature
  560. bogussig = 'somebogussig'
  561. bogusobj = MDBase.create_obj(testobj)
  562. bogusobj.sig = bogussig
  563. # fails to verify
  564. self.assertRaises(Exception, vpersona.verify, bogusobj)
  565. # and that a modified object
  566. otherobj = testobj.new_version(('customprop', 'value'))
  567. # fails to verify
  568. self.assertRaises(Exception, vpersona.verify, otherobj)
  569. # that a persona object can be written
  570. perpath = os.path.join(self.basetempdir, 'persona.pasn1')
  571. persona.store(perpath)
  572. # and that when loaded back
  573. loadpersona = Persona.load(perpath)
  574. # the new persona object can sign an object
  575. nvtestobj = loadpersona.sign(testobj.new_version())
  576. # and the old persona can verify it.
  577. self.assertTrue(vpersona.verify(nvtestobj))
  578. def test_objectstore(self):
  579. objst = ObjectStore.load(os.path.join('fixtures', 'sample.data.pasn1'))
  580. objst.loadobj({
  581. 'type': 'metadata',
  582. 'uuid': 'c9a1d1e2-3109-4efd-8948-577dc15e44e7',
  583. 'modified': datetime.datetime(2019, 5, 31, 14, 3, 10),
  584. 'created_by_ref': self.created_by_ref,
  585. 'hashes': [ 'sha256:91751cee0a1ab8414400238a761411daa29643ab4b8243e9a91649e25be53ada' ],
  586. 'lang': 'en',
  587. })
  588. lst = objst.by_hash('91751cee0a1ab8414400238a761411daa29643ab4b8243e9a91649e25be53ada')
  589. self.assertEqual(len(lst), 2)
  590. byid = objst.by_id('3e466e06-45de-4ecc-84ba-2d2a3d970e96')
  591. self.assertIsInstance(byid, MetaData)
  592. self.assertIn(byid, lst)
  593. r = byid
  594. self.assertEqual(r.uuid, uuid.UUID('3e466e06-45de-4ecc-84ba-2d2a3d970e96'))
  595. self.assertEqual(r['dc:creator'], [ u'John-Mark Gurney' ])
  596. fname = 'testfile.pasn1'
  597. objst.store(fname)
  598. with open(fname) as fp:
  599. objs = _asn1coder.loads(fp.read())
  600. os.unlink(fname)
  601. self.assertEqual(len(objs), len(objst))
  602. self.assertEqual(objs['created_by_ref'], str(self.created_by_ref))
  603. for i in objs['objects']:
  604. i['created_by_ref'] = uuid.UUID(i['created_by_ref'])
  605. i['uuid'] = uuid.UUID(i['uuid'])
  606. self.assertEqual(objst.by_id(i['uuid']), i)
  607. testfname = os.path.join(self.tempdir, 'test.txt')
  608. self.assertEqual(objst.by_file(testfname), [ byid ])
  609. self.assertEqual(objst.by_file(testfname), [ byid ])
  610. self.assertRaises(KeyError, objst.by_file, '/dev/null')
  611. # XXX make sure that object store contains fileobject
  612. # Tests to add:
  613. # Non-duplicates when same metadata is located by multiple hashes.
  614. def test_main(self):
  615. # Test the main runner, this is only testing things that are
  616. # specific to running the program, like where the store is
  617. # created.
  618. # setup object store
  619. storefname = os.path.join(self.tempdir, 'storefname')
  620. identfname = os.path.join(self.tempdir, 'identfname')
  621. shutil.copy(os.path.join('fixtures', 'sample.data.pasn1'), storefname)
  622. # setup path mapping
  623. def expandusermock(arg):
  624. if arg == '~/.medashare_store.pasn1':
  625. return storefname
  626. elif arg == '~/.medashare_identity.pasn1':
  627. return identfname
  628. # setup test fname
  629. testfname = os.path.join(self.tempdir, 'test.txt')
  630. import sys
  631. import StringIO
  632. import itertools
  633. with mock.patch('os.path.expanduser', side_effect=expandusermock) \
  634. as eu:
  635. # that generating a new identity
  636. with nested(mock.patch('sys.stdout',
  637. StringIO.StringIO()), mock.patch('sys.argv',
  638. [ 'progname', '-g', '-a', 'name=A Test User' ])) as (stdout, argv):
  639. main()
  640. # does not output anything
  641. self.assertEqual(stdout.getvalue(), '')
  642. # looks up the correct file
  643. eu.assert_called_with('~/.medashare_identity.pasn1')
  644. # and that the identity
  645. persona = Persona.load(identfname)
  646. pident = persona.get_identity()
  647. # has the correct name
  648. self.assertEqual(pident.name, 'A Test User')
  649. # that when generating an identity when one already exists
  650. with nested(mock.patch('sys.stderr',
  651. StringIO.StringIO()), mock.patch('sys.argv',
  652. [ 'progname', '-g', '-a', 'name=A Test User' ])) as (stderr, argv):
  653. # that it exits
  654. with self.assertRaises(SystemExit) as cm:
  655. main()
  656. # with error code 5
  657. self.assertEqual(cm.exception[0], 1)
  658. # and outputs an error message
  659. self.assertEqual(stderr.getvalue(),
  660. 'Error: Identity already created.\n')
  661. # and looked up the correct file
  662. eu.assert_called_with('~/.medashare_identity.pasn1')
  663. # that when updating the identity
  664. with nested(mock.patch('sys.stdout',
  665. StringIO.StringIO()), mock.patch('sys.argv',
  666. [ 'progname', '-i', '-a', 'name=Changed Name' ])) as (stdout, argv):
  667. main()
  668. # it doesn't output anything
  669. self.assertEqual(stdout.getvalue(), '')
  670. # and looked up the correct file
  671. eu.assert_called_with('~/.medashare_identity.pasn1')
  672. npersona = Persona.load(identfname)
  673. nident = npersona.get_identity()
  674. # and has the new name
  675. self.assertEqual(nident.name, 'Changed Name')
  676. # and has the same old uuid
  677. self.assertEqual(nident.uuid, pident.uuid)
  678. # and that the modified date has changed
  679. self.assertNotEqual(pident.modified, nident.modified)
  680. # and that the old Persona can verify the new one
  681. self.assertTrue(persona.verify(nident))
  682. # that when asked to print the public key
  683. with nested(mock.patch('sys.stdout',
  684. StringIO.StringIO()), mock.patch('sys.argv',
  685. [ 'progname', '-p' ])) as (stdout, argv):
  686. main()
  687. # the correct key is printed
  688. self.assertEqual(stdout.getvalue(),
  689. '%s\n' % base58.b58encode_check(pident.pubkey))
  690. # and looked up the correct file
  691. eu.assert_called_with('~/.medashare_identity.pasn1')
  692. with nested(mock.patch('sys.stdout',
  693. StringIO.StringIO()), mock.patch('sys.argv',
  694. [ 'progname', '-l', testfname ])) as (stdout, argv):
  695. main()
  696. self.assertEqual(stdout.getvalue(),
  697. 'dc:creator:\tJohn-Mark Gurney\nhashes:\tsha256:91751cee0a1ab8414400238a761411daa29643ab4b8243e9a91649e25be53ada\nhashes:\tsha512:7d5768d47b6bc27dc4fa7e9732cfa2de506ca262a2749cb108923e5dddffde842bbfee6cb8d692fb43aca0f12946c521cce2633887914ca1f96898478d10ad3f\nlang:\ten\n')
  698. eu.assert_called_with('~/.medashare_store.pasn1')
  699. with nested(mock.patch('sys.stdout',
  700. StringIO.StringIO()), mock.patch('sys.argv',
  701. [ 'progname', '-a', 'dc:creator=Another user', '-a', 'foo=bar=baz', testfname ])) as (stdout, argv):
  702. main()
  703. with nested(mock.patch('sys.stdout',
  704. StringIO.StringIO()), mock.patch('sys.argv',
  705. [ 'progname', '-l', testfname ])) as (stdout, argv):
  706. main()
  707. self.assertEqual(stdout.getvalue(),
  708. 'dc:creator:\tAnother user\ndc:creator:\tJohn-Mark Gurney\nfoo:\tbar=baz\nhashes:\tsha256:91751cee0a1ab8414400238a761411daa29643ab4b8243e9a91649e25be53ada\nhashes:\tsha512:7d5768d47b6bc27dc4fa7e9732cfa2de506ca262a2749cb108923e5dddffde842bbfee6cb8d692fb43aca0f12946c521cce2633887914ca1f96898478d10ad3f\nlang:\ten\n')
  709. with nested(mock.patch('sys.stdout',
  710. StringIO.StringIO()), mock.patch('sys.argv',
  711. [ 'progname', '-d', 'dc:creator', testfname ])) as (stdout, argv):
  712. main()
  713. with nested(mock.patch('sys.stdout',
  714. StringIO.StringIO()), mock.patch('sys.argv',
  715. [ 'progname', '-l', testfname ])) as (stdout, argv):
  716. main()
  717. self.assertEqual(stdout.getvalue(),
  718. 'foo:\tbar=baz\nhashes:\tsha256:91751cee0a1ab8414400238a761411daa29643ab4b8243e9a91649e25be53ada\nhashes:\tsha512:7d5768d47b6bc27dc4fa7e9732cfa2de506ca262a2749cb108923e5dddffde842bbfee6cb8d692fb43aca0f12946c521cce2633887914ca1f96898478d10ad3f\nlang:\ten\n')
  719. with nested(mock.patch('sys.stdout',
  720. StringIO.StringIO()), mock.patch('sys.argv',
  721. [ 'progname', '-a', 'foo=bleh', testfname ])) as (stdout, argv):
  722. main()
  723. with nested(mock.patch('sys.stdout',
  724. StringIO.StringIO()), mock.patch('sys.argv',
  725. [ 'progname', '-l', testfname ])) as (stdout, argv):
  726. main()
  727. self.assertEqual(stdout.getvalue(),
  728. 'foo:\tbar=baz\nfoo:\tbleh\nhashes:\tsha256:91751cee0a1ab8414400238a761411daa29643ab4b8243e9a91649e25be53ada\nhashes:\tsha512:7d5768d47b6bc27dc4fa7e9732cfa2de506ca262a2749cb108923e5dddffde842bbfee6cb8d692fb43aca0f12946c521cce2633887914ca1f96898478d10ad3f\nlang:\ten\n')
  729. with nested(mock.patch('sys.stdout',
  730. StringIO.StringIO()), mock.patch('sys.argv',
  731. [ 'progname', '-d', 'foo=bar=baz', testfname ])) as (stdout, argv):
  732. main()
  733. with nested(mock.patch('sys.stdout',
  734. StringIO.StringIO()), mock.patch('sys.argv',
  735. [ 'progname', '-l', testfname ])) as (stdout, argv):
  736. main()
  737. self.assertEqual(stdout.getvalue(),
  738. 'foo:\tbleh\nhashes:\tsha256:91751cee0a1ab8414400238a761411daa29643ab4b8243e9a91649e25be53ada\nhashes:\tsha512:7d5768d47b6bc27dc4fa7e9732cfa2de506ca262a2749cb108923e5dddffde842bbfee6cb8d692fb43aca0f12946c521cce2633887914ca1f96898478d10ad3f\nlang:\ten\n')