| @@ -726,8 +726,18 @@ class TagCache: | |||||
| def __init__(self, tags=(), count=10): | def __init__(self, tags=(), count=10): | ||||
| self._cache = dict((x, None) for x in tags) | self._cache = dict((x, None) for x in tags) | ||||
| self._count = 10 | self._count = 10 | ||||
| self._modified = False | |||||
| @property | |||||
| def modified(self): | |||||
| '''Return if the cache has been modified since the last | |||||
| time store has been called.''' | |||||
| return self._modified | |||||
| def add(self, tag): | def add(self, tag): | ||||
| self._modified = True | |||||
| try: | try: | ||||
| del self._cache[tag] | del self._cache[tag] | ||||
| except KeyError: | except KeyError: | ||||
| @@ -756,47 +766,44 @@ class TagCache: | |||||
| return cls(**cache) | return cls(**cache) | ||||
| def store(self, fname): | def store(self, fname): | ||||
| self._modified = False | |||||
| cache = dict(tags=list(self._cache.keys())) | cache = dict(tags=list(self._cache.keys())) | ||||
| with open(fname, 'wb') as fp: | with open(fname, 'wb') as fp: | ||||
| fp.write(_asn1coder.dumps(cache)) | fp.write(_asn1coder.dumps(cache)) | ||||
| def get_cache(options): | |||||
| cachefname = os.path.expanduser('~/.medashare_cache.pasn1') | |||||
| return TagCache.load(cachefname) | |||||
| def write_cache(options, cache): | |||||
| cachefname = os.path.expanduser('~/.medashare_cache.pasn1') | |||||
| cache.store(cachefname) | |||||
| def get_objstore(options): | |||||
| persona = get_persona(options) | |||||
| storefname = os.path.expanduser('~/.medashare_store.sqlite3') | |||||
| def init_datastructs(f): | |||||
| @functools.wraps(f) | |||||
| def wrapper(options): | |||||
| engine = create_engine("sqlite+pysqlite:///%s" % storefname, | |||||
| echo=_sql_verbose, future=True) | |||||
| identfname = os.path.expanduser('~/.medashare_identity.pasn1') | |||||
| storefname = os.path.expanduser('~/.medashare_store.sqlite3') | |||||
| cachefname = os.path.expanduser('~/.medashare_cache.pasn1') | |||||
| objstr = ObjectStore(engine, persona.get_identity().uuid) | |||||
| return persona, objstr | |||||
| # create the persona | |||||
| try: | |||||
| persona = Persona.load(identfname) | |||||
| except FileNotFoundError: | |||||
| print('ERROR: Identity not created, create w/ genident.', | |||||
| file=sys.stderr) | |||||
| sys.exit(1) | |||||
| def write_objstore(options, objstr): | |||||
| pass | |||||
| # create the object store | |||||
| engine = create_engine("sqlite+pysqlite:///%s" % storefname, | |||||
| echo=_sql_verbose, future=True) | |||||
| def get_persona(options): | |||||
| identfname = os.path.expanduser('~/.medashare_identity.pasn1') | |||||
| objstr = ObjectStore(engine, persona.get_identity().uuid) | |||||
| try: | |||||
| persona = Persona.load(identfname) | |||||
| except FileNotFoundError: | |||||
| print('ERROR: Identity not created, create w/ genident.', | |||||
| file=sys.stderr) | |||||
| sys.exit(1) | |||||
| # create the cache | |||||
| cache = TagCache.load(cachefname) | |||||
| return persona | |||||
| try: | |||||
| return f(options, persona, objstr, cache) | |||||
| finally: | |||||
| if cache.modified: | |||||
| cache.store(cachefname) | |||||
| return wrapper | |||||
| def cmd_genident(options): | def cmd_genident(options): | ||||
| identfname = os.path.expanduser('~/.medashare_identity.pasn1') | identfname = os.path.expanduser('~/.medashare_identity.pasn1') | ||||
| @@ -834,9 +841,8 @@ def cmd_pubkey(options): | |||||
| print(persona.get_pubkey().decode('ascii')) | print(persona.get_pubkey().decode('ascii')) | ||||
| def cmd_modify(options): | |||||
| persona, objstr = get_objstore(options) | |||||
| @init_datastructs | |||||
| def cmd_modify(options, persona, objstr, cache): | |||||
| # because of how argparse works, only one file will be collected | # because of how argparse works, only one file will be collected | ||||
| # multiple files will end up in modtagvalues, so we need to | # multiple files will end up in modtagvalues, so we need to | ||||
| # find and move them. | # find and move them. | ||||
| @@ -910,14 +916,11 @@ def cmd_modify(options): | |||||
| objstr.loadobj(nobj) | objstr.loadobj(nobj) | ||||
| write_objstore(options, objstr) | |||||
| def printhost(host): | def printhost(host): | ||||
| print('%s\t%s' % (host.name, host.hostuuid)) | print('%s\t%s' % (host.name, host.hostuuid)) | ||||
| def cmd_mapping(options): | |||||
| persona, objstr = get_objstore(options) | |||||
| @init_datastructs | |||||
| def cmd_mapping(options, persona, objstr, cache): | |||||
| if options.mapping is not None: | if options.mapping is not None: | ||||
| parts = [ x.split(':', 1) for x in options.mapping ] | parts = [ x.split(':', 1) for x in options.mapping ] | ||||
| @@ -943,11 +946,8 @@ def cmd_mapping(options): | |||||
| objstr.loadobj(m) | objstr.loadobj(m) | ||||
| write_objstore(options, objstr) | |||||
| def cmd_hosts(options): | |||||
| persona, objstr = get_objstore(options) | |||||
| @init_datastructs | |||||
| def cmd_hosts(options, persona, objstr, cache): | |||||
| selfuuid = hostuuid() | selfuuid = hostuuid() | ||||
| try: | try: | ||||
| @@ -966,8 +966,6 @@ def cmd_hosts(options): | |||||
| printhost(i) | printhost(i) | ||||
| write_objstore(options, objstr) | |||||
| def getnextfile(files, idx): | def getnextfile(files, idx): | ||||
| origidx = idx | origidx = idx | ||||
| @@ -1035,9 +1033,8 @@ def checkforfile(objstr, curfile, ask=False): | |||||
| return fobj | return fobj | ||||
| def cmd_interactive(options): | |||||
| persona, objstr = get_objstore(options) | |||||
| @init_datastructs | |||||
| def cmd_interactive(options, persona, objstr, cache): | |||||
| cache = get_cache(options) | cache = get_cache(options) | ||||
| files = [ pathlib.Path(x) for x in options.files ] | files = [ pathlib.Path(x) for x in options.files ] | ||||
| @@ -1168,13 +1165,8 @@ def cmd_interactive(options): | |||||
| print('Invalid selection.') | print('Invalid selection.') | ||||
| write_objstore(options, objstr) | |||||
| write_cache(options, cache) | |||||
| def cmd_dump(options): | |||||
| persona, objstr = get_objstore(options) | |||||
| @init_datastructs | |||||
| def cmd_dump(options, persona, objstr, cache): | |||||
| print(persona.get_identity().encode('json')) | print(persona.get_identity().encode('json')) | ||||
| for i in objstr: | for i in objstr: | ||||
| @@ -1200,9 +1192,8 @@ def cmd_auto(options): | |||||
| options.modtagvalues = [ '+mimetype=%s' % mt ] | options.modtagvalues = [ '+mimetype=%s' % mt ] | ||||
| cmd_modify(options) | cmd_modify(options) | ||||
| def cmd_list(options): | |||||
| persona, objstr = get_objstore(options) | |||||
| @init_datastructs | |||||
| def cmd_list(options, persona, objstr, cache): | |||||
| for i in options.files: | for i in options.files: | ||||
| try: | try: | ||||
| objs = objstr.by_file(i) | objs = objstr.by_file(i) | ||||
| @@ -1225,11 +1216,8 @@ def cmd_list(options): | |||||
| # This is needed so that if it creates a FileObj, which may be | # This is needed so that if it creates a FileObj, which may be | ||||
| # expensive (hashing large file), that it gets saved. | # expensive (hashing large file), that it gets saved. | ||||
| write_objstore(options, objstr) | |||||
| def cmd_container(options): | |||||
| persona, objstr = get_objstore(options) | |||||
| @init_datastructs | |||||
| def cmd_container(options, persona, objstr, cache): | |||||
| for i in options.files: | for i in options.files: | ||||
| good, bad = validate_file(i) | good, bad = validate_file(i) | ||||
| @@ -1282,8 +1270,6 @@ def cmd_container(options): | |||||
| objstr.loadobj(cont) | objstr.loadobj(cont) | ||||
| write_objstore(options, objstr) | |||||
| def _json_objstream(fp): | def _json_objstream(fp): | ||||
| inp = fp.read() | inp = fp.read() | ||||
| @@ -1297,9 +1283,8 @@ def _json_objstream(fp): | |||||
| inp = inp[endpos:] | inp = inp[endpos:] | ||||
| def cmd_import(options): | |||||
| persona, objstr = get_objstore(options) | |||||
| @init_datastructs | |||||
| def cmd_import(options, persona, objstr, cache): | |||||
| for jobj in _json_objstream(sys.stdin): | for jobj in _json_objstream(sys.stdin): | ||||
| if options.sign: | if options.sign: | ||||
| cbr = _makeuuid(jobj['created_by_ref']) | cbr = _makeuuid(jobj['created_by_ref']) | ||||
| @@ -1318,16 +1303,11 @@ def cmd_import(options): | |||||
| objstr.loadobj(obj) | objstr.loadobj(obj) | ||||
| write_objstore(options, objstr) | |||||
| def cmd_drop(options): | |||||
| persona, objstr = get_objstore(options) | |||||
| @init_datastructs | |||||
| def cmd_drop(options, persona, objstr, cache): | |||||
| for i in options.uuids: | for i in options.uuids: | ||||
| objstr.drop_uuid(i) | objstr.drop_uuid(i) | ||||
| write_objstore(options, objstr) | |||||
| def main(): | def main(): | ||||
| import argparse | import argparse | ||||
| @@ -1923,6 +1903,7 @@ class _TestCases(unittest.TestCase): | |||||
| # setup object store | # setup object store | ||||
| storefname = self.tempdir / 'storefname' | storefname = self.tempdir / 'storefname' | ||||
| identfname = self.tempdir / 'identfname' | identfname = self.tempdir / 'identfname' | ||||
| cachefname = self.tempdir / 'cachefname' | |||||
| # setup path mapping | # setup path mapping | ||||
| def expandusermock(arg): | def expandusermock(arg): | ||||
| @@ -1930,6 +1911,11 @@ class _TestCases(unittest.TestCase): | |||||
| return storefname | return storefname | ||||
| elif arg == '~/.medashare_identity.pasn1': | elif arg == '~/.medashare_identity.pasn1': | ||||
| return identfname | return identfname | ||||
| elif arg == '~/.medashare_cache.pasn1': | |||||
| return cachefname | |||||
| if True: #pragma: no cover | |||||
| raise NotImplementedError(arg) | |||||
| # setup test fname | # setup test fname | ||||
| testfname = os.path.join(self.tempdir, 'test.txt') | testfname = os.path.join(self.tempdir, 'test.txt') | ||||