Browse Source

add support for fetching from a related alias url when the data

isn't found in the cache..
main
John-Mark Gurney 5 years ago
parent
commit
704597885e
1 changed files with 73 additions and 4 deletions
  1. +73
    -4
      casimport/__init__.py

+ 73
- 4
casimport/__init__.py View File

@@ -26,6 +26,7 @@ import contextlib
import glob
import hashlib
import importlib.resources
import mock
import os.path
import pathlib
import shutil
@@ -167,7 +168,15 @@ class CASFinder(MetaPathFinder, Loader):
self._aliases.update(self._parsealiases(aliases))

@staticmethod
def _parsealiases(data):
def _makebasichashurl(url):
try:
hashurl = urllib.parse.urlparse(url)
except AttributeError:
hashurl = url
return urllib.parse.urlunparse(hashurl[:3] + ('', '', ''))

@classmethod
def _parsealiases(cls, data):
ret = {}

lines = data.split('\n')
@@ -178,6 +187,13 @@ class CASFinder(MetaPathFinder, Loader):
name, hash = i.split()
ret.setdefault(name, []).append(hash)

# split out the hashes
for items in list(ret.values()):
lst = [ x for x in items if not x.startswith('hash://') ]
for h in [ x for x in items if x.startswith('hash://') ]:
h = cls._makebasichashurl(h)
ret[h] = lst

return ret

def disconnect(self):
@@ -245,10 +261,24 @@ class CASFinder(MetaPathFinder, Loader):
try:
data = load.fetch_data(url)
break
except:
except Exception:
pass

else:
raise ValueError('unable to find loader for url %s' % repr(urllib.parse.urlunparse(url)))
for url in self._aliases[self._makebasichashurl(url)]:
url = urllib.parse.urlparse(url)
for load in self._loaders:
try:
data = load.fetch_data(url)
break
except Exception:
pass
else:
continue

break
else:
raise ValueError('unable to find loader for url %s' % repr(urllib.parse.urlunparse(url)))

exec(data, module.__dict__)

@@ -402,7 +432,10 @@ class Test(unittest.TestCase):
'hello': [
'hash://sha256/330884aa2febb5e19fb7194ec6a69ed11dd3d77122f1a5175ee93e73cf0161c3?type=text/x-python',
'ipfs://bafkreibtbcckul7lwxqz7nyzj3dknhwrdxj5o4jc6gsroxxjhzz46albym',
]
],
'hash://sha256/330884aa2febb5e19fb7194ec6a69ed11dd3d77122f1a5175ee93e73cf0161c3': [
'ipfs://bafkreibtbcckul7lwxqz7nyzj3dknhwrdxj5o4jc6gsroxxjhzz46albym',
],
})

def test_aliasimports(self):
@@ -430,9 +463,45 @@ class Test(unittest.TestCase):

# and pulled in the method
self.assertTrue(hasattr(randpkg, 'hello'))

del sys.modules['randpkg']
finally:
sys.path.remove(fixdir)

def test_aliasipfsimports(self):
# add the test module's path
fixdir = str(self.fixtures)
sys.path.append(fixdir)

# that a fake ipfsloader
with open(self.fixtures / 'hello.py') as fp:
# that returns the correct data
fakedata = fp.read()

def fakeload(url, fd=fakedata):
if url.scheme != 'ipfs' or url.netloc != 'bafkreibtbcckul7lwxqz7nyzj3dknhwrdxj5o4jc6gsroxxjhzz46albym':
raise ValueError

return fd

fakeipfsloader = mock.MagicMock()
fakeipfsloader.fetch_data = fakeload

try:
with CASFinder() as f, \
tempattrset(sys.modules[__name__], 'load_aliases',
f.load_aliases):

f.register(fakeipfsloader)

# that the import is successful
import randpkg

# and pulled in the method
self.assertTrue(hasattr(randpkg, 'hello'))
finally:
sys.path.remove(fixdir)

def test_overlappingaliases(self):
# make sure that an aliases file is consistent and does not
# override other urls. That is that any hashes are consistent,


Loading…
Cancel
Save