A pure Python ASN.1 library. Supports dict and sets.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 

734 lines
19 KiB

  1. #!/usr/bin/env python
  2. '''A Pure Python ASN.1 encoder/decoder w/ a calling interface in the spirit
  3. of pickle.
  4. The default dumps/loads uses a profile of ASN.1 that supports serialization
  5. of key/value pairs. This is non-standard. Instantiate the class ASN1Coder
  6. to get a pure ASN.1 serializer/deserializer.
  7. All lengths must be specified. That is that End-of-contents octets
  8. MUST NOT be used. The shorted form of length encoding MUST be used.
  9. A longer length encoding MUST be rejected.'''
  10. __author__ = 'John-Mark Gurney'
  11. __copyright__ = 'Copyright 2016-2020 John-Mark Gurney. All rights reserved.'
  12. __license__ = '2-clause BSD license'
  13. # Copyright 2016-2020, John-Mark Gurney
  14. # All rights reserved.
  15. #
  16. # Redistribution and use in source and binary forms, with or without
  17. # modification, are permitted provided that the following conditions are met:
  18. #
  19. # 1. Redistributions of source code must retain the above copyright notice, this
  20. # list of conditions and the following disclaimer.
  21. # 2. Redistributions in binary form must reproduce the above copyright notice,
  22. # this list of conditions and the following disclaimer in the documentation
  23. # and/or other materials provided with the distribution.
  24. #
  25. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
  26. # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
  27. # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
  28. # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
  29. # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
  30. # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
  31. # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
  32. # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  33. # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
  34. # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  35. #
  36. # The views and conclusions contained in the software and documentation are those
  37. # of the authors and should not be interpreted as representing official policies,
  38. # either expressed or implied, of the Project.
  39. import datetime
  40. import functools
  41. import math
  42. import os
  43. import sys
  44. import unittest
  45. if sys.version_info.major != 3: # pragma: no cover
  46. raise RuntimeError('this module only supports python 3')
  47. __all__ = [ 'dumps', 'loads', 'ASN1Coder' ]
  48. def _numtobytes(n):
  49. hs = '%x' % n
  50. if len(hs) & 1 == 1:
  51. hs = '0' + hs
  52. bs = bytes.fromhex(hs)
  53. return bs
  54. def _encodelen(l):
  55. '''Takes l as a length value, and returns a byte string that
  56. represents l per ASN.1 rules.'''
  57. if l < 128:
  58. return bytes([l])
  59. bs = _numtobytes(l)
  60. return bytes([len(bs) | 0x80]) + bs
  61. def _decodelen(d, pos=0):
  62. '''Returns the length, and number of bytes required.'''
  63. odp = d[pos]
  64. if odp < 128:
  65. return d[pos], 1
  66. else:
  67. l = odp & 0x7f
  68. return int(d[pos + 1:pos + 1 + l].hex(), 16), l + 1
  69. class Test_codelen(unittest.TestCase):
  70. _testdata = [
  71. (2, b'\x02'),
  72. (127, b'\x7f'),
  73. (128, b'\x81\x80'),
  74. (255, b'\x81\xff'),
  75. (256, b'\x82\x01\x00'),
  76. (65536-1, b'\x82\xff\xff'),
  77. (65536, b'\x83\x01\x00\x00'),
  78. ]
  79. def test_el(self):
  80. for i, j in self._testdata:
  81. self.assertEqual(_encodelen(i), j)
  82. self.assertEqual(_decodelen(j), (i, len(j)))
  83. def _splitfloat(f):
  84. m, e = math.frexp(f)
  85. # XXX - less than ideal
  86. while m != math.trunc(m):
  87. m *= 2
  88. e -= 1
  89. return math.trunc(m), e
  90. class TestSplitFloat(unittest.TestCase):
  91. def test_sf(self):
  92. for a, b in [ (0x2421, -32), (0x5382f, 238),
  93. (0x1fa8c3b094adf1, 971) ]:
  94. self.assertEqual(_splitfloat(a * 2**b), (a, b))
  95. class ASN1Coder(object):
  96. '''A class that contains an PASN.1 encoder/decoder.
  97. Exports two methods, loads and dumps.'''
  98. def __init__(self, coerce=None):
  99. '''If the arg coerce is provided, when dumping the object,
  100. if the type is not found, the coerce function will be called
  101. with the obj. It is expected to return a tuple of a string
  102. and an object that has the method w/ the string as defined:
  103. 'bool': __nonzero__
  104. 'float': compatible w/ float
  105. 'int': compatible w/ int
  106. 'list': __iter__
  107. 'set': __iter__
  108. 'bytes': __str__ # XXX what is correct here
  109. 'null': no method needed
  110. 'unicode': encode method returns UTF-8 encoded bytes
  111. 'datetime': strftime and microsecond
  112. '''
  113. self.coerce = coerce
  114. _typemap = {
  115. bool: 'bool',
  116. float: 'float',
  117. int: 'int',
  118. list: 'list',
  119. set: 'set',
  120. bytes: 'bytes',
  121. type(None): 'null',
  122. str: 'unicode',
  123. #decimal.Decimal: 'float',
  124. datetime.datetime: 'datetime',
  125. #datetime.timedelta: 'timedelta',
  126. }
  127. _tagmap = {
  128. b'\x01': 'bool',
  129. b'\x02': 'int',
  130. b'\x04': 'bytes',
  131. b'\x05': 'null',
  132. b'\x09': 'float',
  133. b'\x0c': 'unicode',
  134. b'\x18': 'datetime',
  135. b'\x30': 'list',
  136. b'\x31': 'set',
  137. }
  138. _typetag = dict((v, k) for k, v in _tagmap.items())
  139. @staticmethod
  140. def enc_int(obj, **kwargs):
  141. l = obj.bit_length()
  142. l += 1 # space for sign bit
  143. l = (l + 7) // 8
  144. if obj < 0:
  145. obj += 1 << (l * 8) # twos-complement conversion
  146. v = _numtobytes(obj)
  147. if len(v) != l:
  148. # XXX - is this a problem for signed values?
  149. v = b'\x00' + v # add sign octect
  150. return _encodelen(l) + v
  151. @staticmethod
  152. def dec_int(d, pos, end):
  153. if pos == end:
  154. return 0, end
  155. v = int(bytes.hex(d[pos:end]), 16)
  156. av = 1 << ((end - pos) * 8 - 1) # sign bit
  157. if v > av:
  158. v -= av * 2 # twos-complement conversion
  159. return v, end
  160. @staticmethod
  161. def enc_bool(obj, **kwargs):
  162. return b'\x01' + (b'\xff' if obj else b'\x00')
  163. def dec_bool(self, d, pos, end):
  164. v = self.dec_int(d, pos, end)[0]
  165. if v not in (-1, 0):
  166. raise ValueError('invalid bool value: %d' % v)
  167. return bool(v), end
  168. @staticmethod
  169. def enc_null(obj, **kwargs):
  170. return b'\x00'
  171. @staticmethod
  172. def dec_null(d, pos, end):
  173. return None, end
  174. def enc_list(self, obj, **kwargs):
  175. r = b''.join(self.dumps(x, **kwargs) for x in obj)
  176. return _encodelen(len(r)) + r
  177. def dec_list(self, d, pos, end):
  178. r = []
  179. vend = pos
  180. while pos < end:
  181. v, vend = self._loads(d, pos, end)
  182. if vend > end:
  183. raise ValueError('load past end')
  184. r.append(v)
  185. pos = vend
  186. return r, vend
  187. enc_set = enc_list
  188. def dec_set(self, d, pos, end):
  189. r, end = self.dec_list(d, pos, end)
  190. return set(r), end
  191. @staticmethod
  192. def enc_bytes(obj, **kwargs):
  193. return _encodelen(len(obj)) + bytes(obj)
  194. @staticmethod
  195. def dec_bytes(d, pos, end):
  196. return d[pos:end], end
  197. @staticmethod
  198. def enc_unicode(obj, **kwargs):
  199. encobj = obj.encode('utf-8')
  200. return _encodelen(len(encobj)) + encobj
  201. def dec_unicode(self, d, pos, end):
  202. return d[pos:end].decode('utf-8'), end
  203. @staticmethod
  204. def enc_float(obj, **kwargs):
  205. s = math.copysign(1, obj)
  206. if math.isnan(obj):
  207. return _encodelen(1) + bytes([0b01000010])
  208. elif math.isinf(obj):
  209. if s == 1:
  210. return _encodelen(1) + bytes([0b01000000])
  211. else:
  212. return _encodelen(1) + bytes([0b01000001])
  213. elif obj == 0:
  214. if s == 1:
  215. return _encodelen(0)
  216. else:
  217. return _encodelen(1) + bytes([0b01000011])
  218. m, e = _splitfloat(obj)
  219. # Binary encoding
  220. val = 0x80
  221. if m < 0:
  222. val |= 0x40
  223. m = -m
  224. # Base 2
  225. el = (e.bit_length() + 7 + 1) // 8 # + 1 is sign bit
  226. if el > 2:
  227. raise ValueError('exponent too large')
  228. if e < 0:
  229. e += 256**el # convert negative to twos-complement
  230. v = el - 1
  231. encexp = _numtobytes(e)
  232. val |= v
  233. r = bytes([val]) + encexp + _numtobytes(m)
  234. return _encodelen(len(r)) + r
  235. def dec_float(self, d, pos, end):
  236. if pos == end:
  237. return float(0), end
  238. v = d[pos]
  239. if v == 0b01000000:
  240. return float('inf'), end
  241. elif v == 0b01000001:
  242. return float('-inf'), end
  243. elif v == 0b01000010:
  244. return float('nan'), end
  245. elif v == 0b01000011:
  246. return float('-0'), end
  247. elif v & 0b110000:
  248. raise ValueError('base must be 2')
  249. elif v & 0b1100:
  250. raise ValueError('scaling factor must be 0')
  251. elif v & 0b11000000 == 0:
  252. raise ValueError('decimal encoding not supported')
  253. #elif v & 0b11000000 == 0b01000000:
  254. # raise ValueError('invalid encoding')
  255. if (v & 3) >= 2:
  256. raise ValueError('large exponents not supported')
  257. pexp = pos + 1
  258. eexp = pos + 1 + (v & 3) + 1
  259. exp = self.dec_int(d, pexp, eexp)[0]
  260. n = float(int(bytes.hex(d[eexp:end]), 16))
  261. r = n * 2 ** exp
  262. if v & 0b1000000:
  263. r = -r
  264. return r, end
  265. def dumps(self, obj, default=None):
  266. '''Convert obj into an array of bytes.
  267. ``default(obj)`` is a function that should return a
  268. serializable version of obj or raise TypeError. The
  269. default simply raises TypeError.
  270. '''
  271. try:
  272. tf = self._typemap[type(obj)]
  273. except KeyError:
  274. if default is not None:
  275. try:
  276. return self.dumps(default(obj), default=default)
  277. except TypeError:
  278. pass
  279. if self.coerce is None:
  280. raise TypeError('unhandled object: %s' % repr(obj))
  281. tf, obj = self.coerce(obj)
  282. fun = getattr(self, 'enc_%s' % tf)
  283. return self._typetag[tf] + fun(obj, default=default)
  284. def _loads(self, data, pos, end):
  285. tag = data[pos:pos + 1]
  286. l, b = _decodelen(data, pos + 1)
  287. if len(data) < pos + 1 + b + l:
  288. raise ValueError('string not long enough')
  289. # XXX - enforce that len(data) == end?
  290. end = pos + 1 + b + l
  291. t = self._tagmap[tag]
  292. fun = getattr(self, 'dec_%s' % t)
  293. return fun(data, pos + 1 + b, end)
  294. def enc_datetime(self, obj, **kwargs):
  295. obj = obj.astimezone(datetime.timezone.utc)
  296. ts = obj.strftime('%Y%m%d%H%M%S')
  297. if obj.microsecond:
  298. ts += ('.%06d' % obj.microsecond).rstrip('0')
  299. ts += 'Z'
  300. return _encodelen(len(ts)) + ts.encode('utf-8')
  301. def dec_datetime(self, data, pos, end):
  302. ts = data[pos:end].decode('ascii')
  303. if ts[-1:] != 'Z':
  304. raise ValueError('last character must be Z, was: %s' % repr(ts[-1]))
  305. # Real bug is in strptime, but work around it here.
  306. if ' ' in ts:
  307. raise ValueError('no spaces are allowed')
  308. if '.' in ts:
  309. fstr = '%Y%m%d%H%M%S.%fZ'
  310. if ts.endswith('0Z'):
  311. raise ValueError('invalid trailing zeros')
  312. else:
  313. fstr = '%Y%m%d%H%M%SZ'
  314. return datetime.datetime.strptime(ts, fstr).replace(tzinfo=datetime.timezone.utc), end
  315. def loads(self, data, pos=0, end=None, consume=False):
  316. '''Load from data, starting at pos (optional), and ending
  317. at end (optional). If it is required to consume the
  318. whole string (not the default), set consume to True, and
  319. a ValueError will be raised if the string is not
  320. completely consumed. The second item in ValueError will
  321. be the possition that was the detected end.'''
  322. if end is None:
  323. end = len(data)
  324. r, e = self._loads(data, pos, end)
  325. if consume and e != end:
  326. raise ValueError('entire string not consumed', e)
  327. return r
  328. class ASN1DictCoder(ASN1Coder):
  329. '''This adds support for the non-standard dict serialization.
  330. The coerce method also supports the following type:
  331. 'dict': iteritems
  332. '''
  333. _typemap = ASN1Coder._typemap.copy()
  334. _typemap[dict] = 'dict'
  335. _tagmap = ASN1Coder._tagmap.copy()
  336. _tagmap[b'\xe0'] = 'dict'
  337. _typetag = dict((v, k) for k, v in _tagmap.items())
  338. def enc_dict(self, obj, **kwargs):
  339. #it = list(obj.iteritems())
  340. #it.sort()
  341. r = b''.join(self.dumps(k, **kwargs) + self.dumps(v, **kwargs) for k, v in
  342. obj.items())
  343. return _encodelen(len(r)) + r
  344. def dec_dict(self, d, pos, end):
  345. r = {}
  346. vend = pos
  347. while pos < end:
  348. k, kend = self._loads(d, pos, end)
  349. #if kend > end:
  350. # raise ValueError('key past end')
  351. v, vend = self._loads(d, kend, end)
  352. if vend > end:
  353. raise ValueError('value past end')
  354. r[k] = v
  355. pos = vend
  356. return r, vend
  357. _coder = ASN1DictCoder()
  358. dumps = _coder.dumps
  359. loads = _coder.loads
  360. def cmp(a, b):
  361. return (a > b) - (a < b)
  362. def universal_cmp(a, b):
  363. # Because Python 3 sucks, make this function that
  364. # orders first based upon type, then on value.
  365. if type(a) == type(b):
  366. if isinstance(a, (tuple, list)):
  367. for a, b in zip(a, b):
  368. if a != b:
  369. return universal_cmp(a, b)
  370. #return cmp(len(a), len(b))
  371. else:
  372. return cmp(a, b)
  373. else:
  374. return id(type(a)) < id(type(b))
  375. def deeptypecmp(obj, o):
  376. #print('dtc:', repr(obj), repr(o))
  377. if type(obj) != type(o):
  378. return False
  379. if type(obj) is str:
  380. return True
  381. if type(obj) in (list, set):
  382. for i, j in zip(obj, o):
  383. if not deeptypecmp(i, j):
  384. return False
  385. if type(obj) in (dict,):
  386. itms = sorted(obj.items(), key=functools.cmp_to_key(universal_cmp))
  387. nitms = sorted(o.items(), key=functools.cmp_to_key(universal_cmp))
  388. for (k, v), (nk, nv) in zip(itms, nitms):
  389. if not deeptypecmp(k, nk):
  390. return False
  391. if not deeptypecmp(v, nv):
  392. return False
  393. return True
  394. class Test_deeptypecmp(unittest.TestCase):
  395. def test_true(self):
  396. for i in ((1,1), ('sldkfj', 'sldkfj')
  397. ):
  398. self.assertTrue(deeptypecmp(*i))
  399. def test_false(self):
  400. for i in (([[]], [{}]), ([1], ['str']), ([], set()),
  401. ({1: 2, 5: 'sdlkfj'}, {1: 2, 5: b'sdlkfj'}),
  402. ({1: 2, 'sdlkfj': 5}, {1: 2, b'sdlkfj': 5}),
  403. ):
  404. self.assertFalse(deeptypecmp(*i), '%s != %s' % (i[0], i[1]))
  405. def genfailures(obj):
  406. s = dumps(obj)
  407. for i in range(len(s)):
  408. for j in (bytes([x]) for x in range(256)):
  409. ts = s[:i] + j + s[i + 1:]
  410. if ts == s:
  411. continue
  412. try:
  413. o = loads(ts, consume=True)
  414. if o != obj or not deeptypecmp(o, obj):
  415. raise ValueError
  416. except (ValueError, KeyError, IndexError, TypeError):
  417. pass
  418. else: # pragma: no cover
  419. raise AssertionError('uncaught modification: %s, byte %d, orig: %02x' % (ts.encode('hex'), i, s[i]))
  420. class TestCode(unittest.TestCase):
  421. def test_primv(self):
  422. self.assertEqual(dumps(-257), bytes.fromhex('0202feff'))
  423. self.assertEqual(dumps(-256), bytes.fromhex('0202ff00'))
  424. self.assertEqual(dumps(-255), bytes.fromhex('0202ff01'))
  425. self.assertEqual(dumps(-1), bytes.fromhex('0201ff'))
  426. self.assertEqual(dumps(5), bytes.fromhex('020105'))
  427. self.assertEqual(dumps(128), bytes.fromhex('02020080'))
  428. self.assertEqual(dumps(256), bytes.fromhex('02020100'))
  429. self.assertEqual(dumps(False), bytes.fromhex('010100'))
  430. self.assertEqual(dumps(True), bytes.fromhex('0101ff'))
  431. self.assertEqual(dumps(None), bytes.fromhex('0500'))
  432. self.assertEqual(dumps(.15625), bytes.fromhex('090380fb05'))
  433. def test_fuzzing(self):
  434. # Make sure that when a failure is detected here, that it
  435. # gets added to test_invalids, so that this function may be
  436. # disabled.
  437. genfailures(float(1))
  438. genfailures([ 1, 2, 'sdlkfj' ])
  439. genfailures({ 1: 2, 5: 'sdlkfj' })
  440. genfailures(set([ 1, 2, 'sdlkfj' ]))
  441. genfailures(True)
  442. genfailures(datetime.datetime.utcnow())
  443. def test_invalids(self):
  444. # Add tests for base 8, 16 floats among others
  445. for v in [ '010101',
  446. '0903040001', # float scaling factor
  447. '0903840001', # float scaling factor
  448. '0903100001', # float base
  449. '0903900001', # float base
  450. '0903000001', # float decimal encoding
  451. '0903830001', # float exponent encoding
  452. '090b827fffcc0df505d0fa58f7', # float large exponent
  453. '3007020101020102040673646c6b666a', # list short string still valid
  454. 'e007020101020102020105040673646c6b666a', # dict short value still valid
  455. '181632303136303231353038343031362e3539303839305a', #datetime w/ trailing zero
  456. '181632303136303231373136343034372e3035343433367a', #datetime w/ lower z
  457. '181632303136313220383031303933302e3931353133385a', #datetime w/ space
  458. ]:
  459. self.assertRaises(ValueError, loads, bytes.fromhex(v))
  460. def test_invalid_floats(self):
  461. from unittest import mock
  462. with mock.patch('math.frexp', return_value=(.87232, 1 << 23)):
  463. self.assertRaises(ValueError, dumps, 1.1)
  464. def test_consume(self):
  465. b = dumps(5)
  466. self.assertRaises(ValueError, loads, b + b'398473',
  467. consume=True)
  468. # XXX - still possible that an internal data member
  469. # doesn't consume all
  470. # XXX - test that sets are ordered properly
  471. # XXX - test that dicts are ordered properly..
  472. def test_nan(self):
  473. s = dumps(float('nan'))
  474. v = loads(s)
  475. self.assertTrue(math.isnan(v))
  476. def test_cryptoutilasn1(self):
  477. '''Test DER sequences generated by Crypto.Util.asn1.'''
  478. for s, v in [ (b'\x02\x03$\x8a\xf9', 2394873),
  479. (b'\x05\x00', None),
  480. (b'\x02\x03\x00\x96I', 38473),
  481. (b'\x04\x81\xc8' + b'\x00' * 200, b'\x00' * 200),
  482. ]:
  483. self.assertEqual(loads(s), v)
  484. def test_longstrings(self):
  485. for i in (203, 65484):
  486. s = os.urandom(i)
  487. v = dumps(s)
  488. self.assertEqual(loads(v), s)
  489. def test_invaliddate(self):
  490. pass
  491. # XXX - add test to reject datetime w/ tzinfo, or that it
  492. # handles it properly
  493. def test_tzdate(self):
  494. dlocal = datetime.datetime.now()
  495. dutc = dlocal.astimezone(datetime.timezone.utc)
  496. dts = dutc.timestamp()
  497. # sanity check
  498. self.assertEqual(dts, dlocal.timestamp())
  499. # verify that the same datetime, but with different
  500. # tzinfo, is serialized the same way
  501. self.assertEqual(dumps(dlocal), dumps(dutc))
  502. # that when dutc is read back
  503. dround = loads(dumps(dutc))
  504. # that it represents the same time
  505. self.assertEqual(dround.timestamp(), dts)
  506. # that when dlocal is read back
  507. dround = loads(dumps(dlocal))
  508. # that it represents the same time
  509. self.assertEqual(dround.timestamp(), dts)
  510. def test_dumps(self):
  511. for i in [ None,
  512. True, False,
  513. -1, 0, 1, 255, 256, -255, -256,
  514. 23498732498723, -2398729387234,
  515. (1<<2383) + 23984734, (-1<<1983) + 23984723984,
  516. float(0), float('-0'), float('inf'), float('-inf'),
  517. float(1.0), float(-1.0), float('353.3487'),
  518. float('2.38723873e+307'), float('2.387349e-317'),
  519. sys.float_info.max, sys.float_info.min,
  520. float('.15625'),
  521. 'weoifjwef',
  522. '\U0001f4a9',
  523. [], [ 1,2,3 ],
  524. {}, { 5: 10, 'adfkj': 34 },
  525. set(), set((1,2,3)),
  526. set((1,'sjlfdkj', None, float('inf'))),
  527. datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc),
  528. [ datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc), ' ' ],
  529. datetime.datetime.utcnow().replace(microsecond=0, tzinfo=datetime.timezone.utc),
  530. datetime.datetime.utcnow().replace(microsecond=1000, tzinfo=datetime.timezone.utc),
  531. ]:
  532. s = dumps(i)
  533. o = loads(s)
  534. self.assertEqual(i, o)
  535. tobj = { 1: 'dflkj', 5: 'sdlkfj', 'float': 1,
  536. 'largeint': 1<<342, 'list': [ 1, 2, 'str', 'str' ] }
  537. out = dumps(tobj)
  538. self.assertEqual(tobj, loads(out))
  539. def test_coerce(self):
  540. class Foo:
  541. pass
  542. class Bar:
  543. pass
  544. class Baz:
  545. pass
  546. def coerce(obj):
  547. if isinstance(obj, Foo):
  548. return 'list', obj.lst
  549. elif isinstance(obj, Baz):
  550. return 'bytes', obj.s
  551. raise TypeError('unknown type')
  552. ac = ASN1Coder(coerce)
  553. v = [1, 2, 3]
  554. o = Foo()
  555. o.lst = v
  556. self.assertEqual(ac.loads(ac.dumps(o)), v)
  557. self.assertRaises(TypeError, ac.dumps, Bar())
  558. v = b'oiejfd'
  559. o = Baz()
  560. o.s = v
  561. es = ac.dumps(o)
  562. self.assertEqual(ac.loads(es), v)
  563. self.assertIsInstance(es, bytes)
  564. self.assertRaises(TypeError, dumps, o)
  565. def test_loads(self):
  566. self.assertRaises(ValueError, loads, b'\x00\x02\x00')
  567. def test_nodict(self):
  568. '''Verify that ASN1Coder does not support dict.'''
  569. self.assertRaises(KeyError, ASN1Coder().loads, dumps({}))
  570. def test_dumps_default(self):
  571. '''Test that dumps supports the default method, and that
  572. it works.'''
  573. class Dummy(object):
  574. def somefun(self):
  575. return 5
  576. class Dummy2(object):
  577. def somefun(self):
  578. return [ Dummy() ]
  579. def deffun(obj):
  580. try:
  581. return obj.somefun()
  582. except Exception:
  583. raise TypeError
  584. self.assertEqual(dumps(5), dumps(Dummy(), default=deffun))
  585. # Make sure it works for the various containers
  586. self.assertEqual(dumps([5]), dumps([Dummy()], default=deffun))
  587. self.assertEqual(dumps({ 5: 5 }), dumps({ Dummy(): Dummy() },
  588. default=deffun))
  589. self.assertEqual(dumps([5]), dumps(Dummy2(), default=deffun))
  590. # Make sure that an error is raised when the function doesn't work
  591. self.assertRaises(TypeError, dumps, object(), default=deffun)