From 5006cb622fd56b17c4bef34fb002bf5145ae28bc Mon Sep 17 00:00:00 2001 From: John-Mark Gurney Date: Sun, 11 Sep 2022 16:57:09 -0700 Subject: [PATCH] drop unneed arg to ObjectStore, add type column via migration add tests for migration, good example for future needs.. --- ui/fixtures/genfixtures.py | 2 +- .../f2131e9ae4db_add_type_to_mdbase_table.py | 43 +++++++++ ui/medashare/cli.py | 92 ++++++++++++++----- ui/medashare/orm.py | 6 +- ui/medashare/tests.py | 1 + 5 files changed, 121 insertions(+), 23 deletions(-) create mode 100644 ui/medashare/alembic/versions/f2131e9ae4db_add_type_to_mdbase_table.py diff --git a/ui/fixtures/genfixtures.py b/ui/fixtures/genfixtures.py index 6ebdc23..12252b4 100644 --- a/ui/fixtures/genfixtures.py +++ b/ui/fixtures/genfixtures.py @@ -7,7 +7,7 @@ persona = cli.Persona() persona.generate_key() cbr = persona.get_identity().uuid storename = 'sample.data.sqlite3' -objst = cli.ObjectStore.load(storename, cbr) +objst = cli.ObjectStore.load(storename) list(map(objst.loadobj, [ { diff --git a/ui/medashare/alembic/versions/f2131e9ae4db_add_type_to_mdbase_table.py b/ui/medashare/alembic/versions/f2131e9ae4db_add_type_to_mdbase_table.py new file mode 100644 index 0000000..0f567b3 --- /dev/null +++ b/ui/medashare/alembic/versions/f2131e9ae4db_add_type_to_mdbase_table.py @@ -0,0 +1,43 @@ +"""add type to mdbase table + +Revision ID: f2131e9ae4db +Revises: afad01589b76 +Create Date: 2022-09-09 22:38:07.325511 + +""" +from alembic import op +import sqlalchemy as sa +from medashare import mdb + + +# revision identifiers, used by Alembic. +revision = 'f2131e9ae4db' +down_revision = 'afad01589b76' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + import sys + op.add_column('metadata_objects', sa.Column('type', sa.String, + nullable=True)) + + connection = op.get_bind() + + mdo = sa.schema.MetaData() + mdotbl = sa.Table('metadata_objects', mdo, autoload_with=connection.engine) + + stmt = sa.select([mdotbl.c.uuid, mdotbl.c.data]) + newtypes = [ dict(olduuid=uuid, newtype=mdb.MDBase.decode(data).type) for + uuid, data in connection.execute(stmt) ] + + if newtypes: + stmt = sa.update(mdotbl).where(mdotbl.c.uuid == + sa.bindparam('olduuid')).values(type=sa.bindparam('newtype')) + connection.execute(stmt, newtypes) + + # XXX - SQLite cannot add a constraint later w/o difficulty + #op.alter_column('metadata_objects', 'type', nullable=False) + +def downgrade() -> None: + op.drop_column('metadata_objects', 'type') diff --git a/ui/medashare/cli.py b/ui/medashare/cli.py index ab4960a..d8f35c6 100644 --- a/ui/medashare/cli.py +++ b/ui/medashare/cli.py @@ -54,7 +54,8 @@ import pasn1 import re import shutil import socket -from sqlalchemy import create_engine, select, func, delete +import sqlalchemy +from sqlalchemy import create_engine, select, insert, func, delete from sqlalchemy.orm import sessionmaker import string import subprocess @@ -274,17 +275,15 @@ class ObjectStore(object): # The _uuids property contains both the UUIDv4 for objects, and # looking up the UUIDv5 for FileObjects. - def __init__(self, engine, created_by_ref): + def __init__(self, engine, version='head'): #orm.Base.metadata.create_all(engine) self._engine = engine self._ses = sessionmaker(engine) - self._created_by_ref = created_by_ref + self._handle_migration(version) - self._handle_migration() - - def _handle_migration(self): + def _handle_migration(self, version): '''Handle migrating the database to a newer version.''' # running commands directly: @@ -302,7 +301,7 @@ class ObjectStore(object): with self._engine.begin() as connection: config.attributes['engine'] = self._engine - command.upgrade(config, 'head') + command.upgrade(config, version) def get_host(self, hostuuid): hostuuid = _makeuuid(hostuuid) @@ -315,10 +314,20 @@ class ObjectStore(object): return self._by_id(a.objid, session) - def get_hosts(self): + def get_by_type(self, _type): + try: + if issubclass(_type, MDBase): + _type = _type._type + except TypeError: + pass + with self._ses() as session: - for i in session.query(orm.HostTable.objid).all(): - yield self._by_id(i.objid, session) + for i in session.query(orm.MetaDataObject.data).where( + orm.MetaDataObject.type == _type): + yield i.data + + def get_hosts(self): + return self.get_by_type(Host) @staticmethod def makehash(hashstr, strict=True): @@ -364,11 +373,11 @@ class ObjectStore(object): yield i.data @classmethod - def load(cls, fname, cbr): + def load(cls, fname): engine = create_engine("sqlite+pysqlite:///%s" % fname, echo=_sql_verbose, future=True) - return cls(engine, cbr) + return cls(engine) def store(self, fname): '''Write out the objects in the store to the file named @@ -409,7 +418,7 @@ class ObjectStore(object): if oldobj is not None: session.delete(oldobj) - sobj = orm.MetaDataObject(uuid=obj.uuid, + sobj = orm.MetaDataObject(uuid=obj.uuid, type=obj.type, modified=obj.modified, data=obj) session.add(sobj) @@ -653,6 +662,10 @@ def _hashfile(fname): class Host(MDBase): _type = 'host' + _class_instance_properties = { + 'hostuuid': _makeuuid, + } + class Mapping(MDBase): _type = 'mapping' @@ -817,7 +830,7 @@ def init_datastructs(f): engine = create_engine("sqlite+pysqlite:///%s" % storefname, echo=_sql_verbose, future=True) - objstr = ObjectStore(engine, persona.get_identity().uuid) + objstr = ObjectStore(engine) # create the cache cache = TagCache.load(cachefname) @@ -1452,6 +1465,44 @@ class _TestCononicalCoder(unittest.TestCase): # they are now encoded the same self.assertEqual(astr, bstr) +class _TestMigrations(unittest.TestCase): + def setUp(self): + self._engine = create_engine('sqlite+pysqlite:///:memory:', + echo=_sql_verbose, future=True) + + def test_f2131(self): + # That an object store generated at the start + objstr = ObjectStore(self._engine, 'afad01589b76') + + # and a host objects + hostobj = Host(created_by_ref=uuid.uuid4(), hostuuid=uuid.uuid4()) + + # build table metadata from original db + mdo = sqlalchemy.schema.MetaData() + mdobjstable = sqlalchemy.Table('metadata_objects', mdo, autoload_with=self._engine) + + with objstr._ses() as session: + stmt = insert(mdobjstable).values( + uuid=hostobj.uuid.hex, modified=hostobj.modified, + data=hostobj.encode()) + session.execute(stmt) + + session.commit() + + # migrate the database forward + objstr._handle_migration('head') + + # make sure we can query it + self.assertEqual(list(objstr.get_hosts()), [ hostobj ]) + self.assertEqual(list(objstr), [ hostobj ]) + self.assertEqual(list(objstr.get_by_type('file')), [ ]) + self.assertEqual(list(objstr.get_by_type(FileObject)), [ ]) + self.assertEqual(list(objstr.get_by_type(Host)), [ hostobj ]) + + #with objstr._ses() as session: + # for i in session.query(orm.MetaDataObject).all(): + # _debprint('c:', repr(i)) + class _TestCases(unittest.TestCase): def setUp(self): self.fixtures = pathlib.Path('fixtures').resolve() @@ -1482,7 +1533,7 @@ class _TestCases(unittest.TestCase): "sqlite+pysqlite:///memdb1?mode=memory&cache=shared", echo=_sql_verbose, future=True) - objst = ObjectStore(engine, self.created_by_ref) + objst = ObjectStore(engine) a = self.persona.by_file('test.txt') @@ -1504,7 +1555,7 @@ class _TestCases(unittest.TestCase): objst.store('teststore.pasn1') # load it back in - objstr = ObjectStore(engine, self.created_by_ref) + objstr = ObjectStore(engine) a = objstr.by_id(a['uuid']) @@ -1669,7 +1720,7 @@ class _TestCases(unittest.TestCase): def test_mdbaseoverlay(self): engine = create_engine("sqlite+pysqlite:///:memory:", echo=_sql_verbose, future=True) - objst = ObjectStore(engine, self.created_by_ref) + objst = ObjectStore(engine) # that a base object bid = uuid.uuid4() @@ -1830,8 +1881,7 @@ class _TestCases(unittest.TestCase): def test_objectstore(self): persona = self.persona - objst = ObjectStore.load(self.tempdir / 'sample.data.sqlite3', - persona.get_identity().uuid) + objst = ObjectStore.load(self.tempdir / 'sample.data.sqlite3') lst = objst.by_hash('91751cee0a1ab8414400238a761411daa29643ab4b8243e9a91649e25be53ada') self.assertEqual(len(lst), 1) @@ -1986,7 +2036,7 @@ class _TestCases(unittest.TestCase): with open(newtestfname, 'w') as fp: fp.write('some new contents') elif special == 'verify store object cnt': - objst = ObjectStore.load(storefname, None) + objst = ObjectStore.load(storefname) objcnt = len(objst) self.assertEqual(objcnt, len(list(objst))) self.assertEqual(objcnt, cmd['count']) @@ -1996,7 +2046,7 @@ class _TestCases(unittest.TestCase): hostidpatch.start().return_value = hid patches.append(hostidpatch) elif special == 'iter is unique': - objst = ObjectStore.load(storefname, None) + objst = ObjectStore.load(storefname) uniqobjs = len(set((x['uuid'] for x in objst))) self.assertEqual(len(list(objst)), uniqobjs) elif special == 'setup bittorrent files': diff --git a/ui/medashare/orm.py b/ui/medashare/orm.py index 3cdf6e5..9e3334d 100644 --- a/ui/medashare/orm.py +++ b/ui/medashare/orm.py @@ -66,7 +66,11 @@ class MetaDataObject(Base): uuid = Column(UUID, primary_key=True) modified = Column(DateTime) + type = Column(String) data = Column(MDBaseType) def __repr__(self): - return 'MetaDataObject(uuid=%s, modified=%s, data=%s)' % (repr(self.uuid), repr(self.modified), repr(self.data)) + return \ + 'MetaDataObject(uuid=%s, type=%s, modified=%s,' \ + ' data=%s)' % (repr(self.uuid), repr(self.type), + repr(self.modified), repr(self.data)) diff --git a/ui/medashare/tests.py b/ui/medashare/tests.py index 263e75c..20d0339 100644 --- a/ui/medashare/tests.py +++ b/ui/medashare/tests.py @@ -2,5 +2,6 @@ from .btv import _TestCases as btv_test_cases from .btv.bencode import _TestCases as bencode_test_cases from .mdb import _TestJSONEncoder from .cli import _TestCononicalCoder, _TestCases as cli_test_cases +from .cli import _TestMigrations from .mtree import Test from .server import _TestCases, _TestPostConfig