From 91014f8bb4869ce123e631981d94b137abfaa68e Mon Sep 17 00:00:00 2001 From: Tobias Oberstein Date: Thu, 2 Jun 2022 02:29:41 +0200 Subject: [PATCH] support trustroot/certificate in WAMP-cryptosign (#1569) see changelog --- MANIFEST.in | 1 + Makefile | 6 +- autobahn/_version.py | 2 +- autobahn/util.py | 4 +- autobahn/wamp/cryptosign.py | 328 ++++++- autobahn/wamp/test/test_wamp_cryptosign.py | 53 +- autobahn/wamp/test/test_wamp_uri_pattern.py | 45 +- autobahn/wamp/uri.py | 36 +- autobahn/xbr/__init__.py | 4 +- autobahn/xbr/_cli.py | 278 +----- autobahn/xbr/_schema.py | 841 +++++++++++++++--- .../{ => py-autobahn}/enum.py.jinja2 | 0 .../{ => py-autobahn}/module.py.jinja2 | 0 .../xbr/templates/py-autobahn/obj.py.jinja2 | 360 ++++++++ .../{ => py-autobahn}/service.py.jinja2 | 8 +- .../{ => py-autobahn}/test_enum.py.jinja2 | 0 .../{ => py-autobahn}/test_module.py.jinja2 | 0 .../templates/py-autobahn/test_obj.py.jinja2 | 220 +++++ .../{ => py-autobahn}/test_service.py.jinja2 | 0 .../obj-eip712.sol.jinja2} | 0 autobahn/xbr/templates/test_obj.py.jinja2 | 219 ----- docs/changelog.rst | 16 +- mypy.ini | 68 ++ setup.py | 7 +- 24 files changed, 1862 insertions(+), 634 deletions(-) rename autobahn/xbr/templates/{ => py-autobahn}/enum.py.jinja2 (100%) rename autobahn/xbr/templates/{ => py-autobahn}/module.py.jinja2 (100%) create mode 100644 autobahn/xbr/templates/py-autobahn/obj.py.jinja2 rename autobahn/xbr/templates/{ => py-autobahn}/service.py.jinja2 (92%) rename autobahn/xbr/templates/{ => py-autobahn}/test_enum.py.jinja2 (100%) rename autobahn/xbr/templates/{ => py-autobahn}/test_module.py.jinja2 (100%) create mode 100644 autobahn/xbr/templates/py-autobahn/test_obj.py.jinja2 rename autobahn/xbr/templates/{ => py-autobahn}/test_service.py.jinja2 (100%) rename autobahn/xbr/templates/{obj.py.jinja2 => sol-eip712/obj-eip712.sol.jinja2} (100%) delete mode 100644 autobahn/xbr/templates/test_obj.py.jinja2 create mode 100644 mypy.ini diff --git a/MANIFEST.in b/MANIFEST.in index da5d532e2..bd0fa53bc 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -3,3 +3,4 @@ include requirements-dev.txt include autobahn/nvx/_utf8validator.c recursive-include autobahn/wamp/gen/schema * recursive-include autobahn/asset * +recursive-include autobahn/xbr/templates * diff --git a/Makefile b/Makefile index 39194df38..4ec87f5ab 100755 --- a/Makefile +++ b/Makefile @@ -128,6 +128,9 @@ upload_exe: aws cloudfront create-invalidation \ --distribution-id E2QIG9LNGCJSP9 --paths "/xbrnetwork/linux-amd64/*" +mypy: + mypy --install-types --non-interactive autobahn + # WEB3_INFURA_PROJECT_ID must be defined for this test_infura: time -f "%e" python -c "from web3.auto.infura import w3; print(w3.isConnected())" @@ -253,7 +256,8 @@ test_cs1: USE_ASYNCIO=1 python -m pytest -s -v autobahn/wamp/test/test_cryptosign.py test1: - USE_TWISTED=1 trial autobahn.wamp.test.test_auth + USE_TWISTED=1 trial autobahn.wamp.test.test_wamp_uri_pattern +# USE_TWISTED=1 trial autobahn.wamp.test.test_auth # USE_TWISTED=1 python -m pytest -s -v autobahn/wamp/test/test_auth.py # USE_TWISTED=1 python -m pytest -s -v autobahn/wamp/test/test_router.py # USE_ASYNCIO=1 python -m pytest -s -v autobahn/wamp/test/test_router.py diff --git a/autobahn/_version.py b/autobahn/_version.py index 7abb88db9..d9374c39f 100644 --- a/autobahn/_version.py +++ b/autobahn/_version.py @@ -24,6 +24,6 @@ # ############################################################################### -__version__ = '22.5.1.dev1' +__version__ = '22.5.1.dev2' __build__ = '00000000-0000000' diff --git a/autobahn/util.py b/autobahn/util.py index da8c412c6..acc48cc50 100644 --- a/autobahn/util.py +++ b/autobahn/util.py @@ -962,8 +962,8 @@ def hluserid(oid): return hl('"{}"'.format(oid), color='yellow', bold=True) -def hlval(val, color='white'): - return hl('{}'.format(val), color=color, bold=True) +def hlval(val, color='white', bold=True): + return hl('{}'.format(val), color=color, bold=bold) def hlcontract(oid): diff --git a/autobahn/wamp/cryptosign.py b/autobahn/wamp/cryptosign.py index 14dbd1dd2..f9ecc3e26 100644 --- a/autobahn/wamp/cryptosign.py +++ b/autobahn/wamp/cryptosign.py @@ -25,14 +25,16 @@ ############################################################################### import binascii +from binascii import a2b_hex, b2a_hex import struct -from typing import Callable, Optional, Union +from typing import Callable, Optional, Union, Dict, Any import txaio from autobahn import util from autobahn.wamp.interfaces import ISecurityModule, ICryptosignKey, ISession from autobahn.wamp.types import Challenge +from autobahn.wamp.message import _URI_PAT_REALM_NAME_ETH __all__ = [ 'HAS_CRYPTOSIGN', @@ -512,6 +514,9 @@ def sign_challenge(self, session: ISession, challenge: Challenge, """ Implements :meth:`autobahn.wamp.interfaces.ICryptosignKey.sign_challenge`. """ + assert challenge.method in ['cryptosign', 'cryptosign-proxy'], \ + 'unexpected cryptosign challenge with method "{}"'.format(challenge.method) + # get the TLS channel ID of the underlying TLS connection if channel_id_type in session._transport.transport_details.channel_id: channel_id = session._transport.transport_details.channel_id.get(channel_id_type, None) @@ -649,4 +654,323 @@ def from_seedphrase(cls, seedphrase: str, index: int = 0) -> 'CryptosignKey': ICryptosignKey.register(CryptosignKey) - __all__.extend(['CryptosignKey', 'format_challenge', 'sign_challenge']) + class CryptosignAuthextra(object): + """ + WAMP-Cryptosign authextra object. + """ + __slots__ = [ + '_pubkey', + '_trustroot', + '_challenge', + '_channel_binding', + '_channel_id', + '_realm', + '_chain_id', + '_block_no', + '_delegate', + '_seeder', + '_bandwidth', + '_signature', + ] + + def __init__(self, + pubkey: Optional[bytes] = None, + challenge: Optional[bytes] = None, + channel_binding: Optional[str] = None, + channel_id: Optional[bytes] = None, + + # domain address, certificates are verified against owner of the domain + trustroot: Optional[bytes] = None, + + # FIXME: add delegate address + # FIXME: add certificates + # FIXME: remove reservation + realm: Optional[bytes] = None, + chain_id: Optional[int] = None, + block_no: Optional[int] = None, + delegate: Optional[bytes] = None, + seeder: Optional[bytes] = None, + bandwidth: Optional[int] = None, + + signature: Optional[bytes] = None, + ): + if pubkey: + assert len(pubkey) == 32 + if trustroot: + assert len(trustroot) == 20 + if challenge: + assert len(challenge) == 32 + if channel_binding: + assert channel_binding in ['tls-unique'] + if channel_id: + assert len(channel_id) == 32 + if realm: + assert len(realm) == 20 + if delegate: + assert len(delegate) == 20 + if seeder: + assert len(seeder) == 20 + if signature: + assert len(signature) == 65 + self._pubkey = pubkey + self._trustroot = trustroot + self._challenge = challenge + self._channel_binding = channel_binding + self._channel_id = channel_id + self._realm = realm + self._chain_id = chain_id + self._block_no = block_no + self._delegate = delegate + self._seeder = seeder + self._bandwidth = bandwidth + self._signature = signature + + @property + def pubkey(self) -> Optional[bytes]: + return self._pubkey + + @pubkey.setter + def pubkey(self, value: Optional[bytes]): + assert value is None or len(value) == 20 + self._pubkey = value + + @property + def trustroot(self) -> Optional[bytes]: + return self._trustroot + + @trustroot.setter + def trustroot(self, value: Optional[bytes]): + assert value is None or len(value) == 20 + self._trustroot = value + + @property + def challenge(self) -> Optional[bytes]: + return self._challenge + + @challenge.setter + def challenge(self, value: Optional[bytes]): + assert value is None or len(value) == 32 + self._challenge = value + + @property + def channel_binding(self) -> Optional[str]: + return self._channel_binding + + @channel_binding.setter + def channel_binding(self, value: Optional[str]): + assert value is None or value in ['tls-unique'] + self._channel_binding = value + + @property + def channel_id(self) -> Optional[bytes]: + return self._channel_id + + @channel_id.setter + def channel_id(self, value: Optional[bytes]): + assert value is None or len(value) == 32 + self._channel_id = value + + @property + def realm(self) -> Optional[bytes]: + return self._realm + + @realm.setter + def realm(self, value: Optional[bytes]): + assert value is None or len(value) == 20 + self._realm = value + + @property + def chain_id(self) -> Optional[int]: + return self._chain_id + + @chain_id.setter + def chain_id(self, value: Optional[int]): + assert value is None or value > 0 + self._chain_id = value + + @property + def block_no(self) -> Optional[int]: + return self._block_no + + @block_no.setter + def block_no(self, value: Optional[int]): + assert value is None or value > 0 + self._block_no = value + + @property + def delegate(self) -> Optional[bytes]: + return self._delegate + + @delegate.setter + def delegate(self, value: Optional[bytes]): + assert value is None or len(value) == 20 + self._delegate = value + + @property + def seeder(self) -> Optional[bytes]: + return self._seeder + + @seeder.setter + def seeder(self, value: Optional[bytes]): + assert value is None or len(value) == 20 + self._seeder = value + + @property + def bandwidth(self) -> Optional[int]: + return self._bandwidth + + @bandwidth.setter + def bandwidth(self, value: Optional[int]): + assert value is None or value > 0 + self._bandwidth = value + + @property + def signature(self) -> Optional[bytes]: + return self._signature + + @signature.setter + def signature(self, value: Optional[bytes]): + assert value is None or len(value) == 65 + self._signature = value + + @staticmethod + def parse(data: Dict[str, Any]) -> 'CryptosignAuthextra': + obj = CryptosignAuthextra() + + pubkey = data.get('pubkey', None) + if pubkey is not None: + if type(pubkey) != str: + raise ValueError('invalid type {} for pubkey'.format(type(pubkey))) + if len(pubkey) != 32 * 2: + raise ValueError('invalid length {} of pubkey'.format(len(pubkey))) + obj._pubkey = a2b_hex(pubkey) + + challenge = data.get('challenge', None) + if challenge is not None: + if type(challenge) != str: + raise ValueError('invalid type {} for challenge'.format(type(challenge))) + if len(challenge) != 32 * 2: + raise ValueError('invalid length {} of challenge'.format(len(challenge))) + obj._challenge = a2b_hex(challenge) + + channel_binding = data.get('channel_binding', None) + if channel_binding is not None: + if type(channel_binding) != str: + raise ValueError('invalid type {} for channel_binding'.format(type(channel_binding))) + if channel_binding not in ['tls-unique']: + raise ValueError('invalid value "{}" for channel_binding'.format(channel_binding)) + obj._channel_binding = channel_binding + + channel_id = data.get('channel_id', None) + if channel_id is not None: + if type(channel_id) != str: + raise ValueError('invalid type {} for channel_id'.format(type(channel_id))) + if len(channel_id) != 32 * 2: + raise ValueError('invalid length {} of channel_id'.format(len(channel_id))) + obj._channel_id = a2b_hex(channel_id) + + trustroot = data.get('trustroot', None) + if trustroot is not None: + if type(trustroot) != str: + raise ValueError('invalid type {} for trustroot - expected a string'.format(type(trustroot))) + if not _URI_PAT_REALM_NAME_ETH.match(trustroot): + raise ValueError('invalid value "{}" for trustroot - expected an Ethereum address'.format(type(trustroot))) + obj._trustroot = a2b_hex(trustroot[2:]) + + reservation = data.get('reservation', None) + if reservation is not None: + if type(reservation) != dict: + raise ValueError('invalid type {} for reservation'.format(type(reservation))) + + chain_id = reservation.get('chain_id', None) + if chain_id is not None: + if type(chain_id) != int: + raise ValueError('invalid type {} for reservation.chain_id - expected an integer'.format(type(chain_id))) + obj._chain_id = chain_id + + block_no = reservation.get('block_no', None) + if block_no is not None: + if type(block_no) != int: + raise ValueError('invalid type {} for reservation.block_no - expected an integer'.format(type(block_no))) + obj._block_no = block_no + + realm = reservation.get('realm', None) + if realm is not None: + if type(realm) != str: + raise ValueError('invalid type {} for reservation.realm - expected a string'.format(type(realm))) + if not _URI_PAT_REALM_NAME_ETH.match(realm): + raise ValueError('invalid value "{}" for reservation.realm - expected an Ethereum address'.format(type(realm))) + obj._realm = a2b_hex(realm[2:]) + + delegate = reservation.get('delegate', None) + if delegate is not None: + if type(delegate) != str: + raise ValueError('invalid type {} for reservation.delegate - expected a string'.format(type(delegate))) + if not _URI_PAT_REALM_NAME_ETH.match(delegate): + raise ValueError('invalid value "{}" for reservation.delegate - expected an Ethereum address'.format(type(delegate))) + obj._delegate = a2b_hex(delegate[2:]) + + seeder = reservation.get('seeder', None) + if seeder is not None: + if type(seeder) != str: + raise ValueError('invalid type {} for reservation.seeder - expected a string'.format(type(seeder))) + if not _URI_PAT_REALM_NAME_ETH.match(seeder): + raise ValueError('invalid value "{}" for reservation.seeder - expected an Ethereum address'.format(type(seeder))) + obj._seeder = a2b_hex(seeder[2:]) + + bandwidth = reservation.get('bandwidth', None) + if bandwidth is not None: + if type(bandwidth) != int: + raise ValueError('invalid type {} for reservation.bandwidth - expected an integer'.format(type(bandwidth))) + obj._bandwidth = bandwidth + + signature = data.get('signature', None) + if signature is not None: + if type(signature) != str: + raise ValueError('invalid type {} for signature'.format(type(signature))) + if len(signature) != 65 * 2: + raise ValueError('invalid length {} of signature'.format(len(signature))) + obj._signature = a2b_hex(signature) + + return obj + + def marshal(self) -> Dict[str, Any]: + res = {} + + # FIXME: marshal check-summed eth addresses + + if self._pubkey is not None: + res['pubkey'] = b2a_hex(self._pubkey).decode() + + if self._challenge is not None: + res['challenge'] = b2a_hex(self._challenge).decode() + if self._channel_binding is not None: + res['channel_binding'] = self._channel_binding + if self._channel_id is not None: + res['channel_id'] = b2a_hex(self._channel_id).decode() + + if self._trustroot is not None: + res['trustroot'] = '0x' + b2a_hex(self._trustroot).decode() + + reservation = {} + if self._chain_id is not None: + reservation['chain_id'] = self._chain_id + if self._block_no is not None: + reservation['block_no'] = self._block_no + if self._realm is not None: + reservation['realm'] = '0x' + b2a_hex(self._realm).decode() + if self._delegate is not None: + reservation['delegate'] = '0x' + b2a_hex(self._delegate).decode() + if self._seeder is not None: + reservation['seeder'] = '0x' + b2a_hex(self._seeder).decode() + if self._bandwidth is not None: + reservation['bandwidth'] = self._bandwidth + if reservation: + res['reservation'] = reservation + + if self._signature is not None: + res['signature'] = b2a_hex(self._signature).decode() + + return res + + __all__.extend(['CryptosignKey', 'format_challenge', 'sign_challenge', 'CryptosignAuthextra']) diff --git a/autobahn/wamp/test/test_wamp_cryptosign.py b/autobahn/wamp/test/test_wamp_cryptosign.py index 34dd38edc..ccc3cda51 100644 --- a/autobahn/wamp/test/test_wamp_cryptosign.py +++ b/autobahn/wamp/test/test_wamp_cryptosign.py @@ -41,7 +41,7 @@ from autobahn.wamp import types from autobahn.wamp.auth import create_authenticator -from autobahn.wamp.cryptosign import _makepad, HAS_CRYPTOSIGN +from autobahn.wamp.cryptosign import _makepad, HAS_CRYPTOSIGN, CryptosignAuthextra if HAS_CRYPTOSIGN: from autobahn.wamp.cryptosign import CryptosignKey @@ -101,7 +101,7 @@ def test_valid(self): session = Mock() session._transport.transport_details = self.transport_details - challenge = types.Challenge("ticket", dict(challenge="ff" * 32)) + challenge = types.Challenge("cryptosign", dict(challenge="ff" * 32)) f_signed = self.key.sign_challenge(session, challenge, channel_id_type='tls-unique') def success(signed): @@ -125,7 +125,7 @@ def test_testvectors(self): for testvec in testvectors: priv_key = CryptosignKey.from_bytes(binascii.a2b_hex(testvec['priv_key'])) - challenge = types.Challenge("ticket", dict(challenge=testvec['challenge'])) + challenge = types.Challenge("cryptosign", dict(challenge=testvec['challenge'])) f_signed = priv_key.sign_challenge(session, challenge, channel_id_type='tls-unique') def success(signed): @@ -192,3 +192,50 @@ def test_pubkey(self): key = CryptosignKey.from_ssh_file(fp.name) self.assertEqual(key.public_key(binary=False), '9569de18c7c0843212569dcddf2615c7f46125dc9b2292dea30b07b56a4d02a6') self.assertEqual(key.comment, 'someuser@example.com') + + +class TestAuthExtra(unittest.TestCase): + def test_default_ctor(self): + ae = CryptosignAuthextra() + self.assertEqual(ae.marshal(), {}) + + def test_ctor(self): + ae1 = CryptosignAuthextra(pubkey=b'\xff' * 32) + self.assertEqual(ae1.marshal(), { + 'pubkey': 'ff' * 32 + }) + + ae1 = CryptosignAuthextra(pubkey=b'\xff' * 32, bandwidth=200) + self.assertEqual(ae1.marshal(), { + 'pubkey': 'ff' * 32, + 'reservation': { + 'bandwidth': 200 + } + }) + + def test_parse(self): + data_original = { + 'pubkey': '9019a424b040859c108edee02e64c1dcb32b253686d7b5db56c306e9bdb2fe7e', + 'challenge': 'fe81c84e94a75a357c259d6b37361e43966a45f57dff181bb61b2f91a0f4ac88', + 'channel_binding': 'tls-unique', + 'channel_id': '2e642bf991f48ece9133a0a32d15550921dda12bfebfbc941571d4b2960540bc', + 'trustroot': '0xe78ea2fE1533D4beD9A10d91934e109A130D0ad8', + 'reservation': { + 'chain_id': 999, + 'block_no': 123456789, + 'realm': '0x163D58cE482560B7826b4612f40aa2A7d53310C4', + 'delegate': '0x72b3486d38E9f49215b487CeAaDF27D6acf22115', + 'seeder': '0x52d66f36A7927cF9612e1b40bD6549d08E0513Ff', + 'bandwidth': 200 + }, + 'signature': '747763c69394270603f64af5be3f8256a14b41ff51027e583ee81db9f1f15a01cc8e55218a76139f26dbaaa78d8a537d80d248b3fc6245ecf4602cc5fbb0f6452e', + } + ae1 = CryptosignAuthextra.parse(data_original) + data_marshalled = ae1.marshal() + + # FIXME: marshal check-summed eth addresses + data_original['trustroot'] = data_original['trustroot'].lower() + for k in ['realm', 'delegate', 'seeder']: + data_original['reservation'][k] = data_original['reservation'][k].lower() + + self.assertEqual(data_marshalled, data_original) diff --git a/autobahn/wamp/test/test_wamp_uri_pattern.py b/autobahn/wamp/test/test_wamp_uri_pattern.py index bc6906e0a..d1d0d72d5 100644 --- a/autobahn/wamp/test/test_wamp_uri_pattern.py +++ b/autobahn/wamp/test/test_wamp_uri_pattern.py @@ -60,27 +60,31 @@ def test_parse_uris(self): ("com.myapp.aaa.update", None), ("com.myapp..update", None), ("com.myapp.0.delete", None), - ] - ), + ]), ("com.myapp..update", [ ("com.myapp.box.update", {'product': 'box'}), ("com.myapp.123456.update", {'product': '123456'}), ("com.myapp..update", None), - ] - ), + ]), ("com.myapp..update", [ ("com.myapp.0.update", {'product': '0'}), ("com.myapp.abc.update", {'product': 'abc'}), ("com.myapp..update", None), - ] - ), + ]), ("com.myapp...list", [ ("com.myapp.cosmetic.shampoo.list", {'category': 'cosmetic', 'subcategory': 'shampoo'}), ("com.myapp...list", None), ("com.myapp.cosmetic..list", None), ("com.myapp..shampoo.list", None), - ] - ) + ]), + ("eth.pydefi.tradeclock..get_clock_info", [ + ("eth.pydefi.tradeclock.ba3b1e9f-3006-4eae-ae88-cf5896b36342.get_clock_info", + {"clock_oid": "ba3b1e9f-3006-4eae-ae88-cf5896b36342"}), + ]), + ("eth.wamp.network.catalog..owner", [ + ("eth.wamp.network.catalog.0xAA8Cc377db31a354137d8Bb86D0E38495dbD5266.owner", + {"catalog_adr": "0xAA8Cc377db31a354137d8Bb86D0E38495dbD5266"}), + ]), ] for test in tests: pat = Pattern(test[0], Pattern.URI_TARGET_ENDPOINT) @@ -97,7 +101,6 @@ def test_parse_uris(self): class TestDecorators(unittest.TestCase): def test_decorate_endpoint(self): - @wamp.register("com.calculator.square") def square(_): """Do nothing.""" @@ -162,6 +165,7 @@ def circle(name=None, details=None): RegisterOptions(match="wildcard", details_arg="details")) def something(dynamic=None, details=None): """ Do nothing. """ + self.assertTrue(hasattr(something, '_wampuris')) self.assertTrue(type(something._wampuris) == list) self.assertEqual(len(something._wampuris), 1) @@ -176,7 +180,6 @@ def something(dynamic=None, details=None): self.assertEqual(something._wampuris[0]._type, Pattern.URI_TYPE_WILDCARD) def test_decorate_handler(self): - @wamp.subscribe("com.myapp.on_shutdown") def on_shutdown(): """Do nothing.""" @@ -238,7 +241,6 @@ def on_event(event=None, details=None): self.assertEqual(on_event._wampuris[0]._type, Pattern.URI_TYPE_WILDCARD) def test_decorate_exception(self): - @wamp.error("com.myapp.error") class AppError(Exception): """Do nothing.""" @@ -282,7 +284,6 @@ class ObjectInactiveError(Exception): self.assertEqual(ObjectInactiveError._wampuris[0]._type, Pattern.URI_TYPE_WILDCARD) def test_match_decorated_endpoint(self): - @wamp.register("com.calculator.square") def square(x): return x @@ -307,7 +308,6 @@ def update(category=None, cid=None, label=None): self.assertEqual(update(**kwargs), ("product", 123456, "foobar")) def test_match_decorated_handler(self): - @wamp.subscribe("com.myapp.on_shutdown") def on_shutdown(): pass @@ -332,7 +332,6 @@ def on_update(category=None, cid=None, label=None): self.assertEqual(on_update(**kwargs), ("product", 123456, "foobar")) def test_match_decorated_exception(self): - @wamp.error("com.myapp.error") class AppError(Exception): @@ -340,8 +339,7 @@ def __init__(self, msg): Exception.__init__(self, msg) def __eq__(self, other): - return self.__class__ == other.__class__ and \ - self.args == other.args + return self.__class__ == other.__class__ and self.args == other.args args, kwargs = AppError._wampuris[0].match("com.myapp.error") # noinspection PyArgumentList @@ -355,9 +353,7 @@ def __init__(self, msg, product=None): self.product = product def __eq__(self, other): - return self.__class__ == other.__class__ and \ - self.args == other.args and \ - self.product == other.product + return self.__class__ == other.__class__ and self.args == other.args and self.product == other.product args, kwargs = ProductInactiveError._wampuris[0].match("com.myapp.product.123456.product_inactive") self.assertEqual(ProductInactiveError("fuck", **kwargs), ProductInactiveError("fuck", 123456)) @@ -371,10 +367,8 @@ def __init__(self, msg, category=None, product=None): self.product = product def __eq__(self, other): - return self.__class__ == other.__class__ and \ - self.args == other.args and \ - self.category == other.category and \ - self.product == other.product + return self.__class__ == other.__class__ and self.args == other.args and \ + self.category == other.category and self.product == other.product args, kwargs = ObjectInactiveError._wampuris[0].match("com.myapp.product.123456.inactive") self.assertEqual(ObjectInactiveError("fuck", **kwargs), ObjectInactiveError("fuck", "product", 123456)) @@ -385,6 +379,7 @@ def __init__(self, *args, **kwargs): Exception.__init__(self, *args) self.kwargs = kwargs + # what if the WAMP error message received # contains args/kwargs that cannot be # consumed by the constructor of the exception @@ -404,11 +399,11 @@ def __init__(self): def define(self, exception, error=None): if error is None: - assert(hasattr(exception, '_wampuris')) + assert (hasattr(exception, '_wampuris')) self._ecls_to_uri_pat[exception] = exception._wampuris self._uri_to_ecls[exception._wampuris[0].uri()] = exception else: - assert(not hasattr(exception, '_wampuris')) + assert (not hasattr(exception, '_wampuris')) self._ecls_to_uri_pat[exception] = [Pattern(error, Pattern.URI_TARGET_HANDLER)] self._uri_to_ecls[error] = exception diff --git a/autobahn/wamp/uri.py b/autobahn/wamp/uri.py index ecf3f7f6b..3ebb29fc4 100644 --- a/autobahn/wamp/uri.py +++ b/autobahn/wamp/uri.py @@ -167,6 +167,11 @@ def __init__(self, uri: str, target: int, options: Optional[Union[SubscribeOptio options = None components = uri.split('.') + + _URI_COMP_CHARS = r'[^\s\.#]+' + # _URI_COMP_CHARS = r'[\da-z_]+' + # _URI_COMP_CHARS = r'[a-z0-9][a-z0-9_\-]*' + pl = [] nc = {} group_count = 0 @@ -175,26 +180,25 @@ def __init__(self, uri: str, target: int, options: Optional[Union[SubscribeOptio match = Pattern._URI_NAMED_CONVERTED_COMPONENT.match(component) if match: - ctype = match.groups()[1] - if ctype not in ['string', 'int', 'suffix']: - raise Exception("invalid URI") + name, comp_type = match.groups() + if comp_type not in ['str', 'string', 'int', 'suffix']: + raise TypeError("invalid URI") - if ctype == 'suffix' and i != len(components) - 1: - raise Exception("invalid URI") + if comp_type == 'suffix' and i != len(components) - 1: + raise TypeError("invalid URI") - name = match.groups()[0] if name in nc: - raise Exception("invalid URI") + raise TypeError("invalid URI") - if ctype in ['string', 'suffix']: + if comp_type in ['str', 'string', 'suffix']: nc[name] = str - elif ctype == 'int': + elif comp_type == 'int': nc[name] = int else: # should not arrive here - raise Exception("logic error") + raise TypeError("logic error") - pl.append("(?P<{0}>[a-z0-9_]+)".format(name)) + pl.append("(?P<{}>{})".format(name, _URI_COMP_CHARS)) group_count += 1 continue @@ -202,10 +206,10 @@ def __init__(self, uri: str, target: int, options: Optional[Union[SubscribeOptio if match: name = match.groups()[0] if name in nc: - raise Exception("invalid URI") + raise TypeError("invalid URI") nc[name] = str - pl.append("(?P<{0}>[a-z0-9_]+)".format(name)) + pl.append("(?P<{}>{})".format(name, _URI_COMP_CHARS)) group_count += 1 continue @@ -216,11 +220,11 @@ def __init__(self, uri: str, target: int, options: Optional[Union[SubscribeOptio if component == '': group_count += 1 - pl.append(r"([a-z0-9][a-z0-9_\-]*)") + pl.append(r"({})".format(_URI_COMP_CHARS)) nc[group_count] = str continue - raise Exception("invalid URI") + raise TypeError("invalid URI") if nc: # URI pattern @@ -294,7 +298,7 @@ def match(self, uri): kwargs[key] = val return args, kwargs else: - raise Exception("no match") + raise ValueError('no match') @public def is_endpoint(self): diff --git a/autobahn/xbr/__init__.py b/autobahn/xbr/__init__.py index 22a78ba3c..7da0e913e 100644 --- a/autobahn/xbr/__init__.py +++ b/autobahn/xbr/__init__.py @@ -68,7 +68,7 @@ from autobahn.xbr._buyer import SimpleBuyer # noqa from autobahn.xbr._config import load_or_create_profile, UserConfig, Profile # noqa from autobahn.xbr._schema import FbsSchema, FbsObject, FbsType, FbsRPCCall, FbsEnum, FbsService, FbsEnumValue, \ - FbsAttribute, FbsField, FbsRepository # noqa + FbsAttribute, FbsField, FbsRepository # noqa from autobahn.xbr._wallet import stretch_argon2_secret, expand_argon2_secret, pkm_from_argon2_secret # noqa HAS_XBR = True @@ -386,8 +386,6 @@ def account_from_ethkey(ethkey: bytes) -> eth_account.account.Account: 'FbsRPCCall', 'FbsAttribute', 'FbsField', - 'FbsRepository', - 'stretch_argon2_secret', 'expand_argon2_secret', 'pkm_from_argon2_secret', diff --git a/autobahn/xbr/_cli.py b/autobahn/xbr/_cli.py index 2366d3f31..dc91a3903 100644 --- a/autobahn/xbr/_cli.py +++ b/autobahn/xbr/_cli.py @@ -26,19 +26,12 @@ import os import sys -import json import pkg_resources -from pprint import pprint from jinja2 import Environment, FileSystemLoader -# https://github.com/google/yapf#example-as-a-module -from yapf.yapflib.yapf_api import FormatCode - from autobahn import xbr from autobahn import __version__ -from autobahn.xbr import FbsType - if not xbr.HAS_XBR: print("\nYou must install the [xbr] extra to use xbrnetwork") @@ -51,7 +44,7 @@ from autobahn.xbr._abi import XBR_DEBUG_TOKEN_ADDR_SRC, XBR_DEBUG_NETWORK_ADDR_SRC, XBR_DEBUG_DOMAIN_ADDR_SRC, \ XBR_DEBUG_CATALOG_ADDR_SRC, XBR_DEBUG_MARKET_ADDR_SRC, XBR_DEBUG_CHANNEL_ADDR_SRC -from autobahn.xbr import FbsSchema, FbsRepository +from autobahn.xbr import FbsRepository import uuid import binascii @@ -68,6 +61,7 @@ import numpy as np import txaio + txaio.use_twisted() from twisted.internet import reactor @@ -133,8 +127,11 @@ def set_ethkey_from_profile(self, profile): :param profile: :return: """ - assert type(profile.ethkey) == bytes, 'set_ethkey_from_profile::profile invalid type "{}" - must be bytes'.format(type(profile.ethkey)) - assert len(profile.ethkey) == 32, 'set_ethkey_from_profile::profile invalid length {} - must be 32'.format(len(profile.ethkey)) + assert type( + profile.ethkey) == bytes, 'set_ethkey_from_profile::profile invalid type "{}" - must be bytes'.format( + type(profile.ethkey)) + assert len(profile.ethkey) == 32, 'set_ethkey_from_profile::profile invalid length {} - must be 32'.format( + len(profile.ethkey)) self._ethkey_raw = profile.ethkey self._ethkey = eth_keys.keys.PrivateKey(self._ethkey_raw) self._ethadr = web3.Web3.toChecksumAddress(self._ethkey.public_key.to_canonical_address()) @@ -165,12 +162,13 @@ def onChallenge(self, challenge): raise RuntimeError('unable to process authentication method {}'.format(challenge.method)) async def onJoin(self, details): - self.log.info('Ok, client joined on realm "{realm}" [session={session}, authid="{authid}", authrole="{authrole}"]', - realm=hlid(details.realm), - session=hlid(details.session), - authid=hlid(details.authid), - authrole=hlid(details.authrole), - details=details) + self.log.info( + 'Ok, client joined on realm "{realm}" [session={session}, authid="{authid}", authrole="{authrole}"]', + realm=hlid(details.realm), + session=hlid(details.session), + authid=hlid(details.authid), + authrole=hlid(details.authrole), + details=details) if 'ready' in self.config.extra: txaio.resolve(self.config.extra['ready'], (self, details)) @@ -362,11 +360,12 @@ async def _do_get_actor(self, market_oid, actor_adr): actor_level = actor['level'] actor_balance_eth = web3.Web3.fromWei(unpack_uint256(actor['balance']['eth']), 'ether') actor_balance_xbr = web3.Web3.fromWei(unpack_uint256(actor['balance']['xbr']), 'ether') - self.log.info('Found member with address {member_adr} (member level {member_level}, balances: {member_balance_eth} ETH, {member_balance_xbr} XBR)', - member_adr=hlid(actor_adr), - member_level=hlval(actor_level), - member_balance_eth=hlval(actor_balance_eth), - member_balance_xbr=hlval(actor_balance_xbr)) + self.log.info( + 'Found member with address {member_adr} (member level {member_level}, balances: {member_balance_eth} ETH, {member_balance_xbr} XBR)', + member_adr=hlid(actor_adr), + member_level=hlval(actor_level), + member_balance_eth=hlval(actor_balance_eth), + member_balance_xbr=hlval(actor_balance_xbr)) if market_oid: market_oids = [market_oid.bytes] @@ -382,8 +381,10 @@ async def _do_get_actor(self, market_oid, actor_adr): actor['timestamp'] = np.datetime64(actor['timestamp'], 'ns') actor['joined'] = unpack_uint256(actor['joined']) if actor['joined'] else None actor['market'] = uuid.UUID(bytes=actor['market']) - actor['security'] = web3.Web3.fromWei(unpack_uint256(actor['security']), 'ether') if actor['security'] else None - actor['signature'] = '0x' + binascii.b2a_hex(actor['signature']).decode() if actor['signature'] else None + actor['security'] = web3.Web3.fromWei(unpack_uint256(actor['security']), 'ether') if actor[ + 'security'] else None + actor['signature'] = '0x' + binascii.b2a_hex(actor['signature']).decode() if actor[ + 'signature'] else None actor['tid'] = '0x' + binascii.b2a_hex(actor['tid']).decode() if actor['tid'] else None actor_type = actor['actor_type'] @@ -685,8 +686,9 @@ async def _do_join_market_verify(self, member_oid, vaction_oid, vaction_code): request_verified = await self.call('xbr.network.verify_join_market', vaction_oid.bytes, vaction_code) market_oid = request_verified['market_oid'] actor_type = request_verified['actor_type'] - self.log.info('SUCCESS! XBR market joined: member_oid={member_oid}, market_oid={market_oid}, actor_type={actor_type}', - member_oid=member_oid, market_oid=market_oid, actor_type=actor_type) + self.log.info( + 'SUCCESS! XBR market joined: member_oid={member_oid}, market_oid={market_oid}, actor_type={actor_type}', + member_oid=member_oid, market_oid=market_oid, actor_type=actor_type) async def _do_get_active_payment_channel(self, market_oid, delegate_adr): channel = await self.call('xbr.marketmaker.get_active_payment_channel', delegate_adr) @@ -993,33 +995,30 @@ def _main(): if args.command == 'version': print_version() + # describe schema in WAMP IDL FlatBuffers schema files elif args.command == 'describe-schema': - schema = FbsSchema.load(args.schema) - obj = schema.marshal() - data = json.dumps(obj, - separators=(',', ':'), - ensure_ascii=False, - sort_keys=False, ) - print('json data generated ({} bytes)'.format(len(data))) - for svc_key, svc in schema.services.items(): - print('API "{}"'.format(svc_key)) - for uri in sorted(svc.calls.keys()): - ep = svc.calls[uri] - ep_type = ep.attrs['type'] - print(' {:<10} {:<26}: {}'.format(ep_type, ep.name, ep.docs)) - for obj_name, obj in schema.objs.items(): - print(obj_name) + repo = FbsRepository(basemodule=args.basemodule) + repo.load(args.schema) + + total_count = len(repo.objs) + len(repo.enums) + len(repo.services) + print('ok, loaded {} types ({} structs and tables, {} enums and {} service interfaces)'.format( + hlval(total_count), + hlval(len(repo.objs)), + hlval(len(repo.enums)), + hlval(len(repo.services)))) + print() + + repo.print_summary() # generate code from WAMP IDL FlatBuffers schema files - # elif args.command == 'codegen-schema': # load repository from flatbuffers schema files - repo = FbsRepository(render_to_basemodule=args.basemodule) + repo = FbsRepository(basemodule=args.basemodule) repo.load(args.schema) # print repository summary - pprint(repo.summary(keys=True)) + print(repo.summary(keys=True)) # folder with jinja2 templates for python code sections templates = pkg_resources.resource_filename('autobahn', 'xbr/templates') @@ -1032,192 +1031,9 @@ def _main(): if not os.path.isdir(args.output): os.mkdir(args.output) - # type categories in schemata in the repository - # - work = { - 'obj': repo.objs.values(), - 'enum': repo.enums.values(), - 'service': repo.services.values(), - } - - # collect code sections by module - # - code_modules = {} - test_code_modules = {} - is_first_by_category_modules = {} - - for category, values in work.items(): - # generate and collect code for all FlatBuffers items in the given category - # and defined in schemata previously loaded int - - for item in values: - # metadata = item.marshal() - # pprint(item.marshal()) - metadata = item - - # com.example.device.HomeDeviceVendor => com.example.device - modulename = '.'.join(metadata.name.split('.')[0:-1]) - metadata.modulename = modulename - - # com.example.device.HomeDeviceVendor => HomeDeviceVendor - metadata.classname = metadata.name.split('.')[-1].strip() - - # com.example.device => device - metadata.module_relimport = modulename.split('.')[-1] - - is_first = modulename not in code_modules - is_first_by_category = (modulename, category) not in is_first_by_category_modules - - if is_first_by_category: - is_first_by_category_modules[(modulename, category)] = True - - # render template into python code section - if args.language == 'python': - # render obj|enum|service.py.jinja2 template - tmpl = env.get_template('{}.py.jinja2'.format(category)) - code = tmpl.render(repo=repo, metadata=metadata, FbsType=FbsType, - render_imports=is_first, - is_first_by_category=is_first_by_category, - render_to_basemodule=args.basemodule) - code = FormatCode(code)[0] - - # render test_obj|enum|service.py.jinja2 template - test_tmpl = env.get_template('test_{}.py.jinja2'.format(category)) - test_code = test_tmpl.render(repo=repo, metadata=metadata, FbsType=FbsType, - render_imports=is_first, - is_first_by_category=is_first_by_category, - render_to_basemodule=args.basemodule) - try: - test_code = FormatCode(test_code)[0] - except Exception as e: - print('error during formatting code:\n{}\n{}'.format(test_code, e)) - - elif args.language == 'json': - code = json.dumps(metadata.marshal(), - separators=(', ', ': '), - ensure_ascii=False, - indent=4, - sort_keys=True) - test_code = None - else: - raise RuntimeError('invalid language "{}" for code generation'.format(args.languages)) - - # collect code sections per-module - if modulename not in code_modules: - code_modules[modulename] = [] - test_code_modules[modulename] = [] - code_modules[modulename].append(code) - if test_code: - test_code_modules[modulename].append(test_code) - else: - test_code_modules[modulename].append(None) - - # ['', 'com.example.bla.blub', 'com.example.doo'] - namespaces = {} - for code_file in code_modules.keys(): - name_parts = code_file.split('.') - for i in range(len(name_parts)): - pn = name_parts[i] - ns = '.'.join(name_parts[:i]) - if ns not in namespaces: - namespaces[ns] = [] - if pn and pn not in namespaces[ns]: - namespaces[ns].append(pn) - - print('Namespaces:\n{}\n'.format(pformat(namespaces))) - - # write out code modules - # - i = 0 - initialized = set() - for code_file, code_sections in code_modules.items(): - code = '\n\n\n'.join(code_sections) - if code_file: - code_file_dir = [''] + code_file.split('.')[0:-1] - else: - code_file_dir = [''] - - # FIXME: cleanup this mess - for i in range(len(code_file_dir)): - d = os.path.join(args.output, *(code_file_dir[:i + 1])) - if not os.path.isdir(d): - os.mkdir(d) - if args.language == 'python': - fn = os.path.join(d, '__init__.py') - - _modulename = '.'.join(code_file_dir[:i + 1])[1:] - _imports = namespaces[_modulename] - tmpl = env.get_template('module.py.jinja2') - init_code = tmpl.render(repo=repo, modulename=_modulename, imports=_imports, - render_to_basemodule=args.basemodule) - data = init_code.encode('utf8') - - if not os.path.exists(fn): - with open(fn, 'wb') as f: - f.write(data) - print('Ok, rendered "module.py.jinja2" in {} bytes to "{}"'.format(len(data), fn)) - initialized.add(fn) - else: - with open(fn, 'ab') as f: - f.write(data) - - if args.language == 'python': - if code_file: - code_file_name = '{}.py'.format(code_file.split('.')[-1]) - test_code_file_name = 'test_{}.py'.format(code_file.split('.')[-1]) - else: - code_file_name = '__init__.py' - test_code_file_name = None - elif args.language == 'json': - if code_file: - code_file_name = '{}.json'.format(code_file.split('.')[-1]) - else: - code_file_name = 'init.json' - test_code_file_name = None - else: - code_file_name = None - test_code_file_name = None - - # write out code modules - # - if code_file_name: - data = code.encode('utf8') - - fn = os.path.join(*(code_file_dir + [code_file_name])) - fn = os.path.join(args.output, fn) - - # FIXME - # if fn not in initialized and os.path.exists(fn): - # os.remove(fn) - # with open(fn, 'wb') as fd: - # fd.write('# Generated by Autobahn v{}\n'.format(__version__).encode('utf8')) - # initialized.add(fn) - - with open(fn, 'ab') as fd: - fd.write(data) - - print('Ok, written {} bytes to {}'.format(len(data), fn)) - - # write out unit test code modules - # - if test_code_file_name: - test_code_sections = test_code_modules[code_file] - test_code = '\n\n\n'.join(test_code_sections) - data = test_code.encode('utf8') - - fn = os.path.join(*(code_file_dir + [test_code_file_name])) - fn = os.path.join(args.output, fn) - - if fn not in initialized and os.path.exists(fn): - os.remove(fn) - with open(fn, 'wb') as fd: - fd.write('# Copyright (c) ...'.encode('utf8')) - initialized.add(fn) - - with open(fn, 'ab') as fd: - fd.write(data) + # render python source code files + repo.render(env, args.output, 'python') - print('Ok, written {} bytes to {}'.format(len(data), fn)) else: if args.command is None or args.command == 'noop': print('no command given. select from: {}'.format(', '.join(_COMMANDS))) @@ -1266,11 +1082,13 @@ def _main(): 'delegate': binascii.a2b_hex(args.delegate[2:]) if args.delegate else None, 'amount': args.amount or 0, } - runner = ApplicationRunner(url=profile.network_url, realm=profile.network_realm, extra=extra, serializers=[CBORSerializer()]) + runner = ApplicationRunner(url=profile.network_url, realm=profile.network_realm, extra=extra, + serializers=[CBORSerializer()]) try: log.info('Connecting to "{url}" {realm} ..', - url=hlval(profile.network_url), realm=('at realm "' + hlval(profile.network_realm) + '"' if profile.network_realm else '')) + url=hlval(profile.network_url), + realm=('at realm "' + hlval(profile.network_realm) + '"' if profile.network_realm else '')) runner.run(Client, auto_reconnect=False) except Exception as e: print(e) diff --git a/autobahn/xbr/_schema.py b/autobahn/xbr/_schema.py index 89657d217..f1b80da32 100644 --- a/autobahn/xbr/_schema.py +++ b/autobahn/xbr/_schema.py @@ -2,8 +2,7 @@ # # The MIT License (MIT) # -# Copyright (c) 2018 Luis Teixeira -# - copied & modified from https://github.com/vergl4s/ethereum-mnemonic-utils +# Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal @@ -24,21 +23,24 @@ # THE SOFTWARE. # ############################################################################### - +import json import os import pprint import hashlib -from typing import Dict, Optional, TypeVar +import textwrap from pathlib import Path +from pprint import pformat +from typing import Dict, List, Optional + +# FIXME +# https://github.com/google/yapf#example-as-a-module +from yapf.yapflib.yapf_api import FormatCode +from autobahn.util import hlval from zlmdb.flatbuffers.reflection.Schema import Schema as _Schema from zlmdb.flatbuffers.reflection.BaseType import BaseType as _BaseType -# https://stackoverflow.com/a/46064289/884770 -T_FbsRepository = TypeVar('T_FbsRepository', bound='FbsRepository') - - class FbsType(object): """ Flatbuffers type. @@ -177,17 +179,27 @@ class FbsType(object): } def __init__(self, - repository: T_FbsRepository, + repository: 'FbsRepository', + schema: 'FbsSchema', basetype: int, element: int, index: int, objtype: Optional[str] = None): self._repository = repository + self._schema = schema self._basetype = basetype self._element = element self._index = index self._objtype = objtype + @property + def repository(self): + return self._repository + + @property + def schema(self): + return self._schema + @property def basetype(self): """ @@ -222,25 +234,33 @@ def objtype(self): :return: """ + if self._basetype == FbsType.Obj: + if self._objtype is None: + self._objtype = self._schema.objs_by_id[self._index].name + # print('filled in missing objtype "{}" for type index {} in object {}'.format(self._objtype, self._index, self)) return self._objtype - def map(self, language: str, attrs: Optional[Dict] = None, required: Optional[bool] = True) -> str: + def map(self, language: str, attrs: Optional[Dict] = None, required: Optional[bool] = True, + objtype_as_string: bool = False) -> str: """ :param language: + :param attrs: + :param required: + :param objtype_as_string: :return: """ if language == 'python': _mapped_type = None if self.basetype == FbsType.Vector: - # vectors of uint8 are mapped to byte strings .. + # vectors of uint8 are mapped to byte strings if self.element == FbsType.UByte: if attrs and 'uuid' in attrs: _mapped_type = 'uuid.UUID' else: _mapped_type = 'bytes' - # .. whereas all other vectors are mapped to lists of the same element type + # whereas all other vectors are mapped to list of the same element type else: if self.objtype: # FIXME @@ -265,12 +285,22 @@ def map(self, language: str, attrs: Optional[Dict] = None, required: Optional[bo _mapped_type = FbsType.FBS2PY[self.basetype] else: - raise NotImplementedError('FIXME: implement mapping of FlatBuffers type "{}" to Python in {}'.format(self.basetype, self.map)) - - if required: - return _mapped_type + raise NotImplementedError( + 'FIXME: implement mapping of FlatBuffers type "{}" to Python in {}'.format(self.basetype, self.map)) + + if objtype_as_string and self.basetype == FbsType.Obj: + # for object types, use 'TYPE' rather than TYPE so that the type reference + # does not depend on type declaration order within a single file + # https://peps.python.org/pep-0484/#forward-references + if required: + return "'{}'".format(_mapped_type) + else: + return "Optional['{}']".format(_mapped_type) else: - return 'Optional[{}]'.format(_mapped_type) + if required: + return '{}'.format(_mapped_type) + else: + return 'Optional[{}]'.format(_mapped_type) else: raise RuntimeError('cannot map FlatBuffers type to target language "{}" in {}'.format(language, self.map)) @@ -297,7 +327,8 @@ def __str__(self): class FbsField(object): def __init__(self, - repository: T_FbsRepository, + repository: 'FbsRepository', + schema: 'FbsSchema', name: str, type: FbsType, id: int, @@ -309,6 +340,7 @@ def __init__(self, attrs: Dict[str, FbsAttribute], docs: str): self._repository = repository + self._schema = schema self._name = name self._type = type self._id = id @@ -324,6 +356,10 @@ def __init__(self, def repository(self): return self._repository + @property + def schema(self): + return self._schema + @property def name(self): return self._name @@ -406,15 +442,16 @@ def parse_docs(obj): for j in range(obj.DocumentationLength()): doc_line = obj.Documentation(j) if doc_line: - doc_line = doc_line.decode('utf8') + doc_line = doc_line.decode('utf8').strip() docs.append(doc_line) - docs = '\n'.join(docs).strip() + # docs = '\n'.join(docs).strip() + docs = ' '.join(docs).strip() return docs -def parse_fields(repository, obj, objs_lst=None): - fields = {} - fields_by_id = {} +def parse_fields(repository, schema, obj, objs_lst=None): + fields_by_name = {} + fields_by_id = [] for j in range(obj.FieldsLength()): fbs_field = obj.Fields(j) @@ -425,17 +462,21 @@ def parse_fields(repository, obj, objs_lst=None): field_id = int(fbs_field.Id()) fbs_field_type = fbs_field.Type() + # FIXME _objtype = None if fbs_field_type.Index() >= 0: - _obj = objs_lst[fbs_field_type.Index()] - _objtype = _obj.name + if len(objs_lst) > fbs_field_type.Index(): + _obj = objs_lst[fbs_field_type.Index()] + _objtype = _obj.name field_type = FbsType(repository=repository, + schema=schema, basetype=fbs_field_type.BaseType(), element=fbs_field_type.Element(), index=fbs_field_type.Index(), objtype=_objtype) field = FbsField(repository=repository, + schema=schema, name=field_name, type=field_type, id=field_id, @@ -446,18 +487,18 @@ def parse_fields(repository, obj, objs_lst=None): required=fbs_field.Required(), attrs=parse_attr(fbs_field), docs=parse_docs(fbs_field)) - assert field_name not in fields, 'field "{}" with id "{}" already in fields {}'.format(field_name, field_id, sorted(fields.keys())) - fields[field_name] = field - assert field_id not in fields_by_id, 'field "{}" with id " {}" already in fields {}'.format(field_name, field_id, sorted(fields.keys())) - fields_by_id[field_id] = field_name - res = [] - for _, value in sorted(fields_by_id.items()): - res.append(value) - fields_by_id = res - return fields, fields_by_id - - -def parse_calls(repository, svc_obj, objs_lst=None): + assert field_name not in fields_by_name, 'field "{}" with id "{}" already in fields {}'.format(field_name, + field_id, + sorted(fields_by_name.keys())) + fields_by_name[field_name] = field + assert field_id not in fields_by_id, 'field "{}" with id " {}" already in fields {}'.format(field_name, + field_id, + sorted(fields_by_id.keys())) + fields_by_id.append(field) + return fields_by_name, fields_by_id + + +def parse_calls(repository, schema, svc_obj, objs_lst=None): calls = {} calls_by_id = {} for j in range(svc_obj.CallsLength()): @@ -475,13 +516,18 @@ def parse_calls(repository, svc_obj, objs_lst=None): call_req_name = fbs_call_req.Name() if call_req_name: call_req_name = call_req_name.decode('utf8') + call_req_declaration_file = fbs_call_req.DeclarationFile() + if call_req_declaration_file: + call_req_declaration_file = call_req_declaration_file.decode('utf8') call_req_is_struct = fbs_call_req.IsStruct() call_req_min_align = fbs_call_req.Minalign() call_req_bytesize = fbs_call_req.Bytesize() call_req_docs = parse_docs(fbs_call_req) call_req_attrs = parse_attr(fbs_call_req) - call_req_fields, call_fields_by_id = parse_fields(repository, fbs_call_req, objs_lst=objs_lst) + call_req_fields, call_fields_by_id = parse_fields(repository, schema, fbs_call_req, objs_lst=objs_lst) call_req = FbsObject(repository=repository, + schema=schema, + declaration_file=call_req_declaration_file, name=call_req_name, fields=call_req_fields, fields_by_id=call_fields_by_id, @@ -495,13 +541,18 @@ def parse_calls(repository, svc_obj, objs_lst=None): call_resp_name = fbs_call_resp.Name() if call_resp_name: call_resp_name = call_resp_name.decode('utf8') + call_resp_declaration_file = fbs_call_resp.DeclarationFile() + if call_resp_declaration_file: + call_resp_declaration_file = call_resp_declaration_file.decode('utf8') call_resp_is_struct = fbs_call_resp.IsStruct() call_resp_min_align = fbs_call_resp.Minalign() call_resp_bytesize = fbs_call_resp.Bytesize() call_resp_docs = parse_docs(fbs_call_resp) call_resp_attrs = parse_attr(fbs_call_resp) - call_resp_fields, call_resp_fields_by_id = parse_fields(repository, fbs_call_resp, objs_lst=objs_lst) + call_resp_fields, call_resp_fields_by_id = parse_fields(repository, schema, fbs_call_resp, objs_lst=objs_lst) call_resp = FbsObject(repository=repository, + schema=schema, + declaration_file=call_resp_declaration_file, name=call_resp_name, fields=call_resp_fields, fields_by_id=call_resp_fields_by_id, @@ -513,7 +564,8 @@ def parse_calls(repository, svc_obj, objs_lst=None): call_docs = parse_docs(fbs_call) call_attrs = parse_attr(fbs_call) - call = FbsRPCCall(repository, + call = FbsRPCCall(repository=repository, + schema=schema, name=call_name, id=call_id, request=call_req, @@ -521,9 +573,11 @@ def parse_calls(repository, svc_obj, objs_lst=None): docs=call_docs, attrs=call_attrs) - assert call_name not in calls, 'call "{}" with id "{}" already in calls {}'.format(call_name, call_id, sorted(calls.keys())) + assert call_name not in calls, 'call "{}" with id "{}" already in calls {}'.format(call_name, call_id, + sorted(calls.keys())) calls[call_name] = call - assert call_id not in calls_by_id, 'call "{}" with id " {}" already in calls {}'.format(call_name, call_id, sorted(calls.keys())) + assert call_id not in calls_by_id, 'call "{}" with id " {}" already in calls {}'.format(call_name, call_id, + sorted(calls.keys())) calls_by_id[call_id] = call_name res = [] @@ -535,16 +589,20 @@ def parse_calls(repository, svc_obj, objs_lst=None): class FbsObject(object): def __init__(self, - repository: T_FbsRepository, + repository: 'FbsRepository', + schema: 'FbsSchema', + declaration_file: str, name: str, fields: Dict[str, FbsField], - fields_by_id: Dict[int, str], + fields_by_id: List[FbsField], is_struct: bool, min_align: int, bytesize: int, attrs: Dict[str, FbsAttribute], docs: str): self._repository = repository + self._schema = schema + self._declaration_file = declaration_file self._name = name self._fields = fields self._fields_by_id = fields_by_id @@ -554,10 +612,22 @@ def __init__(self, self._attrs = attrs self._docs = docs - def map(self, language: str) -> str: + def map(self, language: str, required: Optional[bool] = True, objtype_as_string: bool = False) -> str: if language == 'python': klass = self._name.split('.')[-1] - return klass + if objtype_as_string: + # for object types, use 'TYPE' rather than TYPE so that the type reference + # does not depend on type declaration order within a single file + # https://peps.python.org/pep-0484/#forward-references + if required: + return "'{}'".format(klass) + else: + return "Optional['{}']".format(klass) + else: + if required: + return '{}'.format(klass) + else: + return 'Optional[{}]'.format(klass) else: raise NotImplementedError() @@ -573,6 +643,14 @@ def map_import(self, language: str) -> str: def repository(self): return self._repository + @property + def schema(self): + return self._schema + + @property + def declaration_file(self): + return self._declaration_file + @property def name(self): return self._name @@ -611,6 +689,7 @@ def __str__(self): def marshal(self): obj = { 'name': self._name, + 'declaration_file': self._declaration_file, 'fields': {}, 'is_struct': self._is_struct, 'min_align': self._min_align, @@ -627,17 +706,24 @@ def marshal(self): return obj @staticmethod - def parse(repository, fbs_obj, objs_lst=None): + def parse(repository, schema, fbs_obj, objs_lst=None): obj_name = fbs_obj.Name() if obj_name: obj_name = obj_name.decode('utf8') + obj_declaration_file = fbs_obj.DeclarationFile() + if obj_declaration_file: + obj_declaration_file = obj_declaration_file.decode('utf8') obj_docs = parse_docs(fbs_obj) obj_attrs = parse_attr(fbs_obj) - obj_fields, obj_fields_by_id = parse_fields(repository, fbs_obj, objs_lst=objs_lst) + + fields_by_name, fields_by_id = parse_fields(repository, schema, fbs_obj, objs_lst=objs_lst) + # print('ok, parsed fields in object "{}": {}'.format(obj_name, fields_by_name)) obj = FbsObject(repository=repository, + schema=schema, + declaration_file=obj_declaration_file, name=obj_name, - fields=obj_fields, - fields_by_id=obj_fields_by_id, + fields=fields_by_name, + fields_by_id=fields_by_id, is_struct=fbs_obj.IsStruct(), min_align=fbs_obj.Minalign(), bytesize=fbs_obj.Bytesize(), @@ -648,7 +734,8 @@ def parse(repository, fbs_obj, objs_lst=None): class FbsRPCCall(object): def __init__(self, - repository: T_FbsRepository, + repository: 'FbsRepository', + schema: 'FbsSchema', name: str, id: int, request: FbsObject, @@ -656,6 +743,7 @@ def __init__(self, docs: str, attrs: Dict[str, FbsAttribute]): self._repository = repository + self._schema = schema self._name = name self._id = id self._request = request @@ -667,6 +755,10 @@ def __init__(self, def repository(self): return self._repository + @property + def schema(self): + return self._schema + @property def name(self): return self._name @@ -710,13 +802,17 @@ def marshal(self): class FbsService(object): def __init__(self, - repository: T_FbsRepository, + repository: 'FbsRepository', + schema: 'FbsSchema', + declaration_file: str, name: str, calls: Dict[str, FbsRPCCall], - calls_by_id: Dict[int, str], + calls_by_id: List[FbsRPCCall], attrs: Dict[str, FbsAttribute], docs: str): self._repository = repository + self._schema = schema + self._declaration_file = declaration_file self._name = name self._calls = calls self._calls_by_id = calls_by_id @@ -727,6 +823,14 @@ def __init__(self, def repository(self): return self._repository + @property + def schema(self): + return self._schema + + @property + def declaration_file(self): + return self._declaration_file + @property def name(self): return self._name @@ -753,6 +857,7 @@ def __str__(self): def marshal(self): obj = { 'name': self._name, + 'declaration_file': self._declaration_file, 'calls': {}, 'attrs': {}, 'docs': self._docs, @@ -767,8 +872,21 @@ def marshal(self): class FbsEnumValue(object): - def __init__(self, repository, name, value, docs): + def __init__(self, + repository: 'FbsRepository', + schema: 'FbsSchema', + name, + value, + docs): + """ + + :param repository: + :param name: + :param value: + :param docs: + """ self._repository = repository + self._schema = schema self._name = name self._value = value self._attrs = {} @@ -778,6 +896,10 @@ def __init__(self, repository, name, value, docs): def repository(self): return self._repository + @property + def schema(self): + return self._schema + @property def name(self): return self._name @@ -814,8 +936,11 @@ class FbsEnum(object): """ FlatBuffers enum type. """ + def __init__(self, - repository: T_FbsRepository, + repository: 'FbsRepository', + schema: 'FbsSchema', + declaration_file: str, name: str, values: Dict[str, FbsEnumValue], is_union: bool, @@ -823,6 +948,8 @@ def __init__(self, attrs: Dict[str, FbsAttribute], docs: str): self._repository = repository + self._schema = schema + self._declaration_file = declaration_file self._name = name self._values = values self._is_union = is_union @@ -836,6 +963,14 @@ def __init__(self, def repository(self): return self._repository + @property + def schema(self): + return self._schema + + @property + def declaration_file(self): + return self._declaration_file + @property def name(self): return self._name @@ -884,18 +1019,23 @@ def marshal(self): class FbsSchema(object): """ """ + def __init__(self, - repository: T_FbsRepository, + repository: 'FbsRepository', file_name: str, file_sha256: str, file_size: int, file_ident: str, file_ext: str, + fbs_files: List[Dict[str, str]], root_table: FbsObject, root: _Schema, - objs: Dict[str, FbsObject], - enums: Dict[str, FbsEnum], - services: Dict[str, FbsService]): + objs: Optional[Dict[str, FbsObject]] = None, + objs_by_id: Optional[List[FbsObject]] = None, + enums: Optional[Dict[str, FbsEnum]] = None, + enums_by_id: Optional[List[FbsEnum]] = None, + services: Optional[Dict[str, FbsService]] = None, + services_by_id: Optional[List[FbsService]] = None): """ :param repository: @@ -904,11 +1044,15 @@ def __init__(self, :param file_size: :param file_ident: :param file_ext: + :param fbs_files: :param root_table: :param root: :param objs: + :param objs_by_id: :param enums: + :param enums_by_id: :param services: + :param services_by_id: """ self._repository = repository self._file_name = file_name @@ -916,11 +1060,15 @@ def __init__(self, self._file_size = file_size self._file_ident = file_ident self._file_ext = file_ext + self._fbs_files = fbs_files self._root_table = root_table self._root = root self._objs = objs + self._objs_by_id = objs_by_id self._enums = enums + self._enums_by_id = enums_by_id self._services = services + self._services_by_id = services_by_id @property def repository(self): @@ -946,6 +1094,10 @@ def file_ident(self): def file_ext(self): return self._file_ext + @property + def fbs_files(self): + return self._fbs_files + @property def root_table(self): return self._root_table @@ -958,14 +1110,26 @@ def root(self): def objs(self): return self._objs + @property + def objs_by_id(self): + return self._objs_by_id + @property def enums(self): return self._enums + @property + def enums_by_id(self): + return self._enums_by_id + @property def services(self): return self._services + @property + def services_by_id(self): + return self._services_by_id + def __str__(self): return '\n{}\n'.format(pprint.pformat(self.marshal(), width=255)) @@ -979,6 +1143,7 @@ def marshal(self) -> Dict[str, object]: 'ident': self._file_ident, 'ext': self._file_ext, 'name': os.path.basename(self._file_name) if self._file_name else None, + 'files': self._fbs_files, 'sha256': self._file_sha256, 'size': self._file_size, 'objects': len(self._objs), @@ -1002,9 +1167,10 @@ def marshal(self) -> Dict[str, object]: return obj @staticmethod - def load(repository, filename) -> object: + def load(repository, filename) -> 'FbsSchema': """ + :param repository: :param filename: :return: """ @@ -1012,8 +1178,12 @@ def load(repository, filename) -> object: raise RuntimeError('cannot open schema file {}'.format(filename)) with open(filename, 'rb') as fd: data = fd.read() + m = hashlib.sha256() + m.update(data) + print('loading schema file "{}" ({} bytes, SHA256 0x{})'.format(filename, len(data), m.hexdigest())) - print('processing schema {} ({} bytes) ..'.format(filename, len(data))) + # get root object in Flatbuffers reflection schema + # see: https://github.com/google/flatbuffers/blob/master/reflection/reflection.fbs root = _Schema.GetRootAsSchema(data, 0) file_ident = root.FileIdent() @@ -1024,15 +1194,43 @@ def load(repository, filename) -> object: if file_ext is not None: file_ext = file_ext.decode('utf8') + fbs_files = [] + for i in range(root.FbsFilesLength()): + # zlmdb.flatbuffers.reflection.SchemaFile.SchemaFile + schema_file = root.FbsFiles(i) + schema_file_filename = schema_file.Filename() + if schema_file_filename: + schema_file_filename = schema_file_filename.decode('utf8') + schema_file_included_filenames = [] + for j in range(schema_file.IncludedFilenamesLength()): + included_filename = schema_file.IncludedFilenames(j) + if included_filename: + included_filename = included_filename.decode('utf8') + schema_file_included_filenames.append(included_filename) + fbs_files.append( + { + 'filename': schema_file_filename, + 'included_filenames': schema_file_included_filenames, + } + ) + root_table = root.RootTable() if root_table is not None: root_table = FbsObject.parse(repository, root_table) - objs = {} - objs_lst = [] - services = {} - enums = {} + schema = FbsSchema(repository=repository, + file_name=filename, + file_size=len(data), + file_sha256=m.hexdigest(), + file_ident=file_ident, + file_ext=file_ext, + fbs_files=fbs_files, + root_table=root_table, + root=root) + # enum types from the schema by name and by index + enums = {} + enums_by_id = [] for i in range(root.EnumsLength()): fbs_enum = root.Enums(i) @@ -1040,6 +1238,10 @@ def load(repository, filename) -> object: if enum_name: enum_name = enum_name.decode('utf8') + enum_declaration_file = fbs_enum.DeclarationFile() + if enum_declaration_file: + enum_declaration_file = enum_declaration_file.decode('utf8') + enum_underlying_type = fbs_enum.UnderlyingType() enum_values = {} @@ -1051,6 +1253,7 @@ def load(repository, filename) -> object: enum_value_value = fbs_enum_value.Value() enum_value_docs = parse_docs(fbs_enum_value) enum_value = FbsEnumValue(repository=repository, + schema=schema, name=enum_value_name, value=enum_value_value, docs=enum_value_docs) @@ -1058,6 +1261,8 @@ def load(repository, filename) -> object: enum_values[enum_value_name] = enum_value enum = FbsEnum(repository=repository, + schema=schema, + declaration_file=enum_declaration_file, name=enum_name, values=enum_values, is_union=fbs_enum.IsUnion(), @@ -1066,14 +1271,26 @@ def load(repository, filename) -> object: docs=parse_docs(fbs_enum)) assert enum_name not in enums enums[enum_name] = enum + enums_by_id.append(enum) + schema._enums = enums + schema._enums_by_id = enums_by_id + # type objects (structs and tables) from the schema by name and by index + objs = {} + objs_by_id = [] for i in range(root.ObjectsLength()): fbs_obj = root.Objects(i) - obj = FbsObject.parse(repository, fbs_obj, objs_lst=objs_lst) + obj = FbsObject.parse(repository, schema, fbs_obj, objs_lst=objs_by_id) assert obj.name not in objs objs[obj.name] = obj - objs_lst.append(obj) + objs_by_id.append(obj) + # print('ok, processed schema object "{}"'.format(obj.name)) + schema._objs = objs + schema._objs_by_id = objs_by_id + # service type objects (interfaces) from the schema by name and by index + services = {} + services_by_id = [] for i in range(root.ServicesLength()): svc_obj = root.Services(i) @@ -1081,11 +1298,17 @@ def load(repository, filename) -> object: if svc_name: svc_name = svc_name.decode('utf8') + svc_declaration_file = svc_obj.DeclarationFile() + if svc_declaration_file: + svc_declaration_file = svc_declaration_file.decode('utf8') + docs = parse_docs(svc_obj) attrs = parse_attr(svc_obj) - calls, calls_by_id = parse_calls(repository, svc_obj, objs_lst=objs_lst) + calls, calls_by_id = parse_calls(repository, schema, svc_obj, objs_lst=objs_by_id) service = FbsService(repository=repository, + schema=schema, + declaration_file=svc_declaration_file, name=svc_name, calls=calls, calls_by_id=calls_by_id, @@ -1093,61 +1316,46 @@ def load(repository, filename) -> object: docs=docs) assert svc_name not in services services[svc_name] = service + services_by_id.append(service) + schema._services = services + schema._services_by_id = services_by_id - m = hashlib.sha256() - m.update(data) - - schema = FbsSchema(repository=repository, - file_name=filename, - file_size=len(data), - file_sha256=m.hexdigest(), - file_ident=file_ident, - file_ext=file_ext, - root_table=root_table, - root=root, - objs=objs, - enums=enums, - services=services) return schema class FbsRepository(object): """ - """ + crossbar.interfaces.IRealmInventory + - add: FbsRepository[] + - load: FbsSchema[] - def __init__(self, render_to_basemodule): - self._render_to_basemodule = render_to_basemodule - - self._schemata = {} + https://github.com/google/flatbuffers/blob/master/reflection/reflection.fbs + """ - # Dict[str, FbsObject] - self._objs = {} + def __init__(self, basemodule: str): + self._basemodule = basemodule + self._schemata: Dict[str, FbsSchema] = {} + self._objs: Dict[str, FbsObject] = {} + self._enums: Dict[str, FbsEnum] = {} + self._services: Dict[str, FbsService] = {} - # Dict[str, FbsEnum] - self._enums = {} + @staticmethod + def from_archive(filename: str) -> 'FbsRepository': + catalog = FbsRepository() + return catalog - # Dict[str, FbsService] - self._services = {} + @staticmethod + def from_address(address: str) -> 'FbsRepository': + catalog = FbsRepository() + return catalog - def summary(self, keys=False): - if keys: - return { - 'schemata': sorted(self._schemata.keys()), - 'objs': sorted(self._objs.keys()), - 'enums': sorted(self._enums.keys()), - 'services': sorted(self._services.keys()), - } - else: - return { - 'schemata': len(self._schemata), - 'objs': len(self._objs), - 'enums': len(self._enums), - 'services': len(self._services), - } + @property + def basemodule(self): + return self._basemodule @property - def render_to_basemodule(self): - return self._render_to_basemodule + def schemata(self): + return self._schemata @property def objs(self): @@ -1161,42 +1369,425 @@ def enums(self): def services(self): return self._services - def load(self, dirname) -> object: - if not os.path.isdir(dirname): - raise RuntimeError('cannot open schema directory {}'.format(dirname)) + def load(self, filename: str): + """ + Load and add all schemata from Flatbuffers binary schema files (`*.bfbs`) + found in the given directory. Alternatively, a path to a single schema file + can be provided. - found = [] - for path in Path(dirname).rglob('*.bfbs'): - fn = os.path.abspath(os.path.join(dirname, path.name)) - if fn not in self._schemata: - found.append(fn) + :param filename: Filesystem path of a directory or single file from which to + load and add Flatbuffers schemata. + """ + load_from_filenames = [] + if os.path.isdir(filename): + for path in Path(filename).rglob('*.bfbs'): + fn = os.path.join(filename, path.name) + if fn not in self._schemata: + load_from_filenames.append(fn) + else: + print('duplicate schema file skipped ("{}" already loaded)'.format(fn)) + elif os.path.isfile(filename): + if filename not in self._schemata: + load_from_filenames.append(filename) else: - print('duplicate schema: {} already loaded'.format(fn)) + print('duplicate schema file skipped ("{}" already loaded)'.format(filename)) + elif ',' in filename: + for filename_single in filename.split(','): + filename_single = os.path.expanduser(filename_single) + # filename_single = os.path.expandvars(filename_single) + if os.path.isfile(filename_single): + if filename_single not in self._schemata: + load_from_filenames.append(filename_single) + else: + print('duplicate schema file skipped ("{}" already loaded)'.format(filename_single)) + else: + raise RuntimeError('"{}" in list is not a file'.format(filename_single)) + else: + raise RuntimeError('cannot open schema file or directory: "{}"'.format(filename)) # iterate over all schema files found - for fn in found: + for fn in load_from_filenames: # load this schema file - schema = FbsSchema.load(self, fn) + schema: FbsSchema = FbsSchema.load(self, fn) - # add enum types + # add enum types to repository by name for enum in schema.enums.values(): if enum.name in self._enums: - print('duplicate enum for name "{}"'.format(enum.name)) + print('skipping duplicate enum type for name "{}"'.format(enum.name)) else: self._enums[enum.name] = enum # add object types for obj in schema.objs.values(): if obj.name in self._objs: - print('duplicate object for name "{}"'.format(obj.name)) + print('skipping duplicate object (table/struct) type for name "{}"'.format(obj.name)) else: self._objs[obj.name] = obj # add service definitions ("APIs") for svc in schema.services.values(): if svc.name in self._services: - print('duplicate service for name "{}"'.format(svc.name)) + print('skipping duplicate service type for name "{}"'.format(svc.name)) else: self._services[svc.name] = svc self._schemata[fn] = schema + + def summary(self, keys=False): + if keys: + return { + 'schemata': sorted(self._schemata.keys()), + 'objs': sorted(self._objs.keys()), + 'enums': sorted(self._enums.keys()), + 'services': sorted(self._services.keys()), + } + else: + return { + 'schemata': len(self._schemata), + 'objs': len(self._objs), + 'enums': len(self._enums), + 'services': len(self._services), + } + + def print_summary(self): + # brown = (160, 110, 50) + # brown = (133, 51, 51) + brown = (51, 133, 255) + # steel_blue = (70, 130, 180) + orange = (255, 127, 36) + # deep_pink = (255, 20, 147) + # light_pink = (255, 102, 204) + # pink = (204, 82, 163) + pink = (127, 127, 127) + + for obj_key, obj in self.objs.items(): + prefix_uri = obj.attrs.get('uri', self._basemodule) + obj_name = obj_key.split('.')[-1] + obj_color = 'blue' if obj.is_struct else brown + obj_label = '{} {}'.format('Struct' if obj.is_struct else 'Table', obj_name) + print('{}\n'.format(hlval(' {} {} {}'.format('====', obj_label, '=' * (118 - len(obj_label))), + color=obj_color))) + # print(' {} {} {}\n'.format(obj_kind, hlval(obj_name, color=obj_color), '=' * (120 - len(obj_name)))) + + if prefix_uri: + print(' Type URI: {}.{}'.format(hlval(prefix_uri), hlval(obj_name))) + else: + print(' Type URI: {}'.format(hlval(obj_name))) + print() + print(textwrap.fill(obj.docs, + width=100, + initial_indent=' ', + subsequent_indent=' ', + expand_tabs=True, + replace_whitespace=True, + fix_sentence_endings=False, + break_long_words=True, + drop_whitespace=True, + break_on_hyphens=True, + tabsize=4)) + print() + for field in obj.fields_by_id: + docs = textwrap.wrap(field.docs, + width=70, + initial_indent='', + subsequent_indent='', + expand_tabs=True, + replace_whitespace=True, + fix_sentence_endings=False, + break_long_words=True, + drop_whitespace=True, + break_on_hyphens=True, + tabsize=4) + if field.type.basetype == FbsType.Obj: + type_desc_str = field.type.objtype.split('.')[-1] + if self.objs[field.type.objtype].is_struct: + type_desc = hlval(type_desc_str, color='blue') + else: + type_desc = hlval(type_desc_str, color=brown) + elif field.type.basetype == FbsType.Vector: + type_desc_str = 'Vector[{}]'.format(FbsType.FBS2STR[field.type.element]) + type_desc = hlval(type_desc_str, color='white') + else: + type_desc_str = FbsType.FBS2STR[field.type.basetype] + type_desc = hlval(type_desc_str, color='white') + + if field.attrs: + attrs_text_str = '(' + ', '.join(field.attrs.keys()) + ')' + attrs_text = hlval(attrs_text_str, color=pink) + type_text_str = ' '.join([type_desc_str, attrs_text_str]) + type_text = ' '.join([type_desc, attrs_text]) + else: + type_text_str = type_desc_str + type_text = type_desc + + # print('>>', len(type_text_str), len(type_text)) + + print(' {:<30} {} {}'.format(hlval(field.name), + type_text + ' ' * (34 - len(type_text_str)), + docs[0] if docs else '')) + for line in docs[1:]: + print(' ' * 57 + line) + print() + + for svc_key, svc in self.services.items(): + prefix_uri = svc.attrs.get('uri', self._basemodule) + ifx_uuid = svc.attrs.get('uuid', None) + ifc_name = svc_key.split('.')[-1] + ifc_label = 'Interface {}'.format(ifc_name) + print('{}\n'.format(hlval(' {} {} {}'.format('====', ifc_label, '=' * (118 - len(ifc_label))), + color='yellow'))) + print(' Interface UUID: {}'.format(hlval(ifx_uuid))) + print(' Interface URIs: {}.({}|{})'.format(hlval(prefix_uri), hlval('procedure', color=orange), + hlval('topic', color='green'))) + print() + print(textwrap.fill(svc.docs, + width=100, + initial_indent=' ', + subsequent_indent=' ', + expand_tabs=True, + replace_whitespace=True, + fix_sentence_endings=False, + break_long_words=True, + drop_whitespace=True, + break_on_hyphens=True, + tabsize=4)) + for uri in svc.calls.keys(): + print() + ep: FbsRPCCall = svc.calls[uri] + ep_type = ep.attrs['type'] + ep_color = {'topic': 'green', 'procedure': orange}.get(ep_type, 'white') + # uri_long = '{}.{}'.format(hlval(prefix_uri, color=(127, 127, 127)), + # hlval(ep.attrs.get('uri', ep.name), color='white')) + uri_short = '{}'.format(hlval(ep.attrs.get('uri', ep.name), color=(255, 255, 255))) + print(' {} {} ({}) -> {}'.format(hlval(ep_type, color=ep_color), + uri_short, + hlval(ep.request.name.split('.')[-1], color='blue', bold=False), + hlval(ep.response.name.split('.')[-1], color='blue', bold=False))) + print() + print(textwrap.fill(ep.docs, + width=90, + initial_indent=' ', + subsequent_indent=' ', + expand_tabs=True, + replace_whitespace=True, + fix_sentence_endings=False, + break_long_words=True, + drop_whitespace=True, + break_on_hyphens=True, + tabsize=4)) + print() + + def render(self, jinja2_env, output_dir, output_lang): + """ + + :param jinja2_env: + :param output_dir: + :param output_lang: + :return: + """ + # type categories in schemata in the repository + # + work = { + 'obj': self.objs.values(), + 'enum': self.enums.values(), + 'service': self.services.values(), + } + + # collect code sections by module + # + code_modules = {} + test_code_modules = {} + is_first_by_category_modules = {} + + for category, values in work.items(): + # generate and collect code for all FlatBuffers items in the given category + # and defined in schemata previously loaded int + + for item in values: + # metadata = item.marshal() + # pprint(item.marshal()) + metadata = item + + # com.example.device.HomeDeviceVendor => com.example.device + modulename = '.'.join(metadata.name.split('.')[0:-1]) + metadata.modulename = modulename + + # com.example.device.HomeDeviceVendor => HomeDeviceVendor + metadata.classname = metadata.name.split('.')[-1].strip() + + # com.example.device => device + metadata.module_relimport = modulename.split('.')[-1] + + is_first = modulename not in code_modules + is_first_by_category = (modulename, category) not in is_first_by_category_modules + + if is_first_by_category: + is_first_by_category_modules[(modulename, category)] = True + + # render template into python code section + if output_lang == 'python': + # render obj|enum|service.py.jinja2 template + tmpl = jinja2_env.get_template('py-autobahn/{}.py.jinja2'.format(category)) + code = tmpl.render(repo=self, metadata=metadata, FbsType=FbsType, + render_imports=is_first, + is_first_by_category=is_first_by_category, + render_to_basemodule=self.basemodule) + + # FIXME + # code = FormatCode(code)[0] + + # render test_obj|enum|service.py.jinja2 template + test_tmpl = jinja2_env.get_template('py-autobahn/test_{}.py.jinja2'.format(category)) + test_code = test_tmpl.render(repo=self, metadata=metadata, FbsType=FbsType, + render_imports=is_first, + is_first_by_category=is_first_by_category, + render_to_basemodule=self.basemodule) + + elif output_lang == 'eip712': + # render obj|enum|service-eip712.sol.jinja2 template + tmpl = jinja2_env.get_template('so-eip712/{}-eip712.sol.jinja2'.format(category)) + code = tmpl.render(repo=self, metadata=metadata, FbsType=FbsType, + render_imports=is_first, + is_first_by_category=is_first_by_category, + render_to_basemodule=self.basemodule) + + # FIXME + # code = FormatCode(code)[0] + + test_tmpl = None + test_code = None + + elif output_lang == 'json': + code = json.dumps(metadata.marshal(), + separators=(', ', ': '), + ensure_ascii=False, + indent=4, + sort_keys=True) + test_code = None + else: + raise RuntimeError('invalid language "{}" for code generation'.format(output_lang)) + + # collect code sections per-module + if modulename not in code_modules: + code_modules[modulename] = [] + test_code_modules[modulename] = [] + code_modules[modulename].append(code) + if test_code: + test_code_modules[modulename].append(test_code) + else: + test_code_modules[modulename].append(None) + + # ['', 'com.example.bla.blub', 'com.example.doo'] + namespaces = {} + for code_file in code_modules.keys(): + name_parts = code_file.split('.') + for i in range(len(name_parts)): + pn = name_parts[i] + ns = '.'.join(name_parts[:i]) + if ns not in namespaces: + namespaces[ns] = [] + if pn and pn not in namespaces[ns]: + namespaces[ns].append(pn) + + print('Namespaces:\n{}\n'.format(pformat(namespaces))) + + # write out code modules + # + i = 0 + initialized = set() + for code_file, code_sections in code_modules.items(): + code = '\n\n\n'.join(code_sections) + if code_file: + code_file_dir = [''] + code_file.split('.')[0:-1] + else: + code_file_dir = [''] + + # FIXME: cleanup this mess + for i in range(len(code_file_dir)): + d = os.path.join(output_dir, *(code_file_dir[:i + 1])) + if not os.path.isdir(d): + os.mkdir(d) + if output_lang == 'python': + fn = os.path.join(d, '__init__.py') + + _modulename = '.'.join(code_file_dir[:i + 1])[1:] + _imports = namespaces[_modulename] + tmpl = jinja2_env.get_template('py-autobahn/module.py.jinja2') + init_code = tmpl.render(repo=self, modulename=_modulename, imports=_imports, + render_to_basemodule=self.basemodule) + data = init_code.encode('utf8') + + if not os.path.exists(fn): + with open(fn, 'wb') as f: + f.write(data) + print('Ok, rendered "module.py.jinja2" in {} bytes to "{}"'.format(len(data), fn)) + initialized.add(fn) + else: + with open(fn, 'ab') as f: + f.write(data) + + if output_lang == 'python': + if code_file: + code_file_name = '{}.py'.format(code_file.split('.')[-1]) + test_code_file_name = 'test_{}.py'.format(code_file.split('.')[-1]) + else: + code_file_name = '__init__.py' + test_code_file_name = None + elif output_lang == 'json': + if code_file: + code_file_name = '{}.json'.format(code_file.split('.')[-1]) + else: + code_file_name = 'init.json' + test_code_file_name = None + else: + code_file_name = None + test_code_file_name = None + + # write out code modules + # + if code_file_name: + try: + code = FormatCode(code)[0] + except Exception as e: + print('error during formatting code: {}'.format(e)) + data = code.encode('utf8') + + fn = os.path.join(*(code_file_dir + [code_file_name])) + fn = os.path.join(output_dir, fn) + + # FIXME + # if fn not in initialized and os.path.exists(fn): + # os.remove(fn) + # with open(fn, 'wb') as fd: + # fd.write('# Generated by Autobahn v{}\n'.format(__version__).encode('utf8')) + # initialized.add(fn) + + with open(fn, 'ab') as fd: + fd.write(data) + + print('Ok, written {} bytes to {}'.format(len(data), fn)) + + # write out unit test code modules + # + if test_code_file_name: + test_code_sections = test_code_modules[code_file] + test_code = '\n\n\n'.join(test_code_sections) + try: + test_code = FormatCode(test_code)[0] + except Exception as e: + print('error during formatting code: {}'.format(e)) + data = test_code.encode('utf8') + + fn = os.path.join(*(code_file_dir + [test_code_file_name])) + fn = os.path.join(output_dir, fn) + + if fn not in initialized and os.path.exists(fn): + os.remove(fn) + with open(fn, 'wb') as fd: + fd.write('# Copyright (c) ...\n'.encode('utf8')) + initialized.add(fn) + + with open(fn, 'ab') as fd: + fd.write(data) + + print('Ok, written {} bytes to {}'.format(len(data), fn)) diff --git a/autobahn/xbr/templates/enum.py.jinja2 b/autobahn/xbr/templates/py-autobahn/enum.py.jinja2 similarity index 100% rename from autobahn/xbr/templates/enum.py.jinja2 rename to autobahn/xbr/templates/py-autobahn/enum.py.jinja2 diff --git a/autobahn/xbr/templates/module.py.jinja2 b/autobahn/xbr/templates/py-autobahn/module.py.jinja2 similarity index 100% rename from autobahn/xbr/templates/module.py.jinja2 rename to autobahn/xbr/templates/py-autobahn/module.py.jinja2 diff --git a/autobahn/xbr/templates/py-autobahn/obj.py.jinja2 b/autobahn/xbr/templates/py-autobahn/obj.py.jinja2 new file mode 100644 index 000000000..09ee49b76 --- /dev/null +++ b/autobahn/xbr/templates/py-autobahn/obj.py.jinja2 @@ -0,0 +1,360 @@ +{% if is_first_by_category %} +## +## object types +## + +{% endif %} +{% if render_imports %} +import uuid +import pprint +from typing import Dict, List, Optional + +from autobahn.wamp.request import Publication, Subscription, Registration + +import flatbuffers +from flatbuffers.compat import import_numpy +np = import_numpy() + +{% endif %} + + +class {{ metadata.classname }}(object): + """ + {{ metadata.docs }} + """ + __slots__ = ['_tab', {% for field in metadata.fields_by_id %}'_{{ field.name }}', {% endfor %}] + + def __init__(self, {% for field in metadata.fields_by_id %}{{ field.name }}: {{ field.type.map('python', field.attrs, required=False, objtype_as_string=True) }} = None, {% endfor %}): + # the underlying FlatBuffers vtable + self._tab = None + + {% for field in metadata.fields_by_id %} + # {{ field.docs }} + self._{{ field.name }}: {{ field.type.map('python', field.attrs, required=False, objtype_as_string=True) }} = {{ field.name }} + + {% endfor %} + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + {% for field in metadata.fields_by_id %} + if other.{{ field.name }} != self.{{ field.name }}: + return False + {% endfor %} + return True + + def __ne__(self, other): + return not self.__eq__(other) + + {% for field in metadata.fields_by_id %} + @property + def {{ field.name }}(self) -> {{ field.type.map('python', field.attrs, required=False, objtype_as_string=True) }}: + """ + {{ field.docs }} + """ + if self._{{ field.name }} is None and self._tab: + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset({{ field.offset }})) + + {% if field.type.map('python', field.attrs, True) == 'str' %} + # access type "string" attribute: + value = '' + if o != 0: + _value = self._tab.String(o + self._tab.Pos) + if _value is not None: + value = _value.decode('utf8') + + {% elif field.type.map('python', field.attrs, True) == 'bytes' %} + # access type "bytes" attribute: + value = b'' + if o != 0: + _off = self._tab.Vector(o) + _len = self._tab.VectorLen(o) + _value = memoryview(self._tab.Bytes)[_off:_off + _len] + if _value is not None: + value = _value + + {% elif field.type.map('python', field.attrs, True) in ['int', 'float', 'double'] %} + # access type "int|float|double" attribute: + value = 0 + if o != 0: + _value = self._tab.Get(flatbuffers.number_types.{{ FbsType.FBS2FLAGS[field.type.basetype] }}, o + self._tab.Pos) + if _value is not None: + value = _value + + {% elif field.type.map('python', field.attrs, True) == 'bool' %} + # access type "bool" attribute: + value = False + if o != 0: + _value = self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos) + if _value is not None: + value = _value + + {% elif field.type.map('python', field.attrs, True) == 'uuid.UUID' %} + # access type "uuid.UUID" attribute: + value = uuid.UUID(bytes=b'\x00' * 16) + if o != 0: + _off = self._tab.Vector(o) + _len = self._tab.VectorLen(o) + _value = memoryview(self._tab.Bytes)[_off:_off + _len] + if _value is not None: + value = uuid.UUID(bytes=bytes(_value)) + + {% elif field.type.map('python', field.attrs, True) == 'np.datetime64' %} + # access type "np.datetime64" attribute: + value = np.datetime64(0, 'ns') + if o != 0: + _value = self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + if value is not None: + value = np.datetime64(_value, 'ns') + + {% elif field.type.basetype == FbsType.Vector %} + # access type "Vector" attribute: + value = [] + if o != 0: + _start_off = self._tab.Vector(o) + _len = self._tab.VectorLen(o) + for j in range(_len): + _off = _start_off + flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + _off = self._tab.Indirect(_off) + {% if False and field.type.element == FbsType.Obj %} + _value = {{ field.type.element.split('.')[-1] }}.cast(self._tab.Bytes, _off) + {% else %} + # FIXME [8] + _value = {{ field.type.element }}() + {% endif %} + value.append(_value) + + {% elif field.type.basetype == FbsType.Obj %} + # access type "Object" attribute: + + {% if field.type.objtype %} + value = {{ field.type.objtype.split('.')[-1] }}() + if o != 0: + _off = self._tab.Indirect(o + self._tab.Pos) + value = {{ field.type.objtype.split('.')[-1] }}.cast(self._tab.Bytes, _off) + {% else %} + # FIXME [9]: objtype of field "{{ field.name }}" is None + value = '' + {% endif %} + + {% else %} + # FIXME [5] + raise NotImplementedError('implement processing [5] of FlatBuffers type "{}"'.format({{ field.type.map('python', field.attrs, True) }})) + {% endif %} + assert value is not None + self._{{ field.name }} = value + return self._{{ field.name }} + + @{{ field.name }}.setter + def {{ field.name }}(self, value: {{ field.type.map('python', field.attrs, required=False, objtype_as_string=True) }}): + if value is not None: + self._{{ field.name }} = value + else: + {% if field.type.map('python', field.attrs, True) == 'str' %} + # set default value on type "string" attribute: + self._{{ field.name }} = '' + {% elif field.type.map('python', field.attrs, True) == 'bytes' %} + # set default value on type "bytes" attribute: + self._{{ field.name }} = b'' + {% elif field.type.map('python', field.attrs, True) in ['int', 'float', 'double'] %} + # set default value on type "int|float|double" attribute: + self._{{ field.name }} = 0 + {% elif field.type.map('python', field.attrs, True) == 'bool' %} + # set default value on type "bool" attribute: + self._{{ field.name }} = False + {% elif field.type.map('python', field.attrs, True) == 'uuid.UUID' %} + # set default value on type "uuid.UUID" attribute: + self._{{ field.name }} = uuid.UUID(bytes=b'\x00' * 16) + {% elif field.type.map('python', field.attrs, True) == 'np.datetime64' %} + # set default value on type "np.datetime64" attribute: + self._{{ field.name }} = np.datetime64(0, 'ns') + # set default value on type "List" attribute: + {% elif field.type.basetype == FbsType.Vector %} + self._{{ field.name }} = [] + # set default value on type "Object" attribute: + {% elif field.type.basetype == FbsType.Obj %} + self._{{ field.name }} = {{ field.type.map('python', field.attrs, True) }}() + {% else %} + # FIXME [6] + raise NotImplementedError('implement processing [2] of FlatBuffers type "{}", basetype {}'.format({{ field.type.map('python', field.attrs, True) }}, {{ field.type.basetype }})) + {% endif %} + + {% endfor %} + + @staticmethod + def parse(data: Dict) -> '{{ metadata.classname }}': + """ + Parse generic, native language object into a typed, native language object. + + :param data: Generic native language object to parse, e.g. output of ``cbor2.loads``. + + :returns: Typed object of this class. + """ + # FIXME + # for key in data.keys(): + # assert key in {{ metadata.fields.keys() }} + obj = {{ metadata.classname }}() + {% for field in metadata.fields_by_id %} + if '{{ field.name }}' in data: + {% if field.type.map('python', field.attrs, True) == 'str' %} + assert (data['{{ field.name }}'] is None or type(data['{{ field.name }}']) == str), '{} has wrong type {}'.format('{{ field.name }}', type(data['{{ field.name }}'])) + obj.{{ field.name }} = data['{{ field.name }}'] + + {% elif field.type.map('python', field.attrs, True) == 'bytes' %} + assert (data['{{ field.name }}'] is None or type(data['{{ field.name }}']) == bytes), '{} has wrong type {}'.format('{{ field.name }}', type(data['{{ field.name }}'])) + obj.{{ field.name }} = data['{{ field.name }}'] + + {% elif field.type.map('python', field.attrs, True) == 'int' %} + assert (data['{{ field.name }}'] is None or type(data['{{ field.name }}']) == int), '{} has wrong type {}'.format('{{ field.name }}', type(data['{{ field.name }}'])) + obj.{{ field.name }} = data['{{ field.name }}'] + + {% elif field.type.map('python', field.attrs, True) == 'float' %} + assert (data['{{ field.name }}'] is None or type(data['{{ field.name }}']) == float), '{} has wrong type {}'.format('{{ field.name }}', type(data['{{ field.name }}'])) + obj.{{ field.name }} = data['{{ field.name }}'] + + {% elif field.type.map('python', field.attrs, True) == 'bool' %} + assert (data['{{ field.name }}'] is None or type(data['{{ field.name }}']) == bool), '{} has wrong type {}'.format('{{ field.name }}', type(data['{{ field.name }}'])) + obj.{{ field.name }} = data['{{ field.name }}'] + + {% elif field.type.map('python', field.attrs, True) == 'uuid.UUID' %} + assert (data['{{ field.name }}'] is None or (type(data['{{ field.name }}']) == bytes and len(data['{{ field.name }}']) == 16)), '{} has wrong type {}'.format('{{ field.name }}', type(data['{{ field.name }}'])) + if data['{{ field.name }}'] is not None: + obj.{{ field.name }} = uuid.UUID(bytes=data['{{ field.name }}']) + else: + obj.{{ field.name }} = None + + {% elif field.type.map('python', field.attrs, True) == 'np.datetime64' %} + assert (data['{{ field.name }}'] is None or type(data['{{ field.name }}']) == int), '{} has wrong type {}'.format('{{ field.name }}', type(data['{{ field.name }}'])) + if data['{{ field.name }}'] is not None: + obj.{{ field.name }} = np.datetime64(data['{{ field.name }}'], 'ns') + else: + obj.{{ field.name }} = np.datetime64(0, 'ns') + + {% elif field.type.basetype == FbsType.Vector %} + assert (data['{{ field.name }}'] is None or type(data['{{ field.name }}']) == list), '{} has wrong type {}'.format('{{ field.name }}', type(data['{{ field.name }}'])) + _value = [] + for v in data['{{ field.name }}']: + {% if False and field.type.element == FbsType.Obj %} + # FIXME + _value.append({{ field.type.objtype.split('.')[-1] }}.parse(v)) + {% else %} + _value.append(v) + {% endif %} + obj.{{ field.name }} = _value + + {% elif field.type.basetype == FbsType.Obj %} + assert (data['{{ field.name }}'] is None or type(data['{{ field.name }}']) == dict), '{} has wrong type {}'.format('{{ field.name }}', type(data['{{ field.name }}'])) + _value = {{ field.type.map('python', field.attrs, True) }}.parse(data['{{ field.name }}']) + obj.{{ field.name }} = _value + + {% else %} + # FIXME [3] + raise NotImplementedError('implement processing [3] of FlatBuffers type "{}"'.format({{ field.type.map('python', field.attrs, True) }})) + {% endif %} + {% endfor %} + return obj + + def marshal(self) -> Dict: + """ + Marshal all data contained in this typed native object into a generic object. + + :returns: Generic object that can be serialized to bytes using e.g. ``cbor2.dumps``. + """ + obj = { + {% for field in metadata.fields_by_id %} + + {% if field.type.map('python', field.attrs, True) in ['str', 'bytes', 'int', 'long', 'float', 'double', 'bool'] %} + '{{ field.name }}': self.{{ field.name }}, + + {% elif field.type.map('python', field.attrs, True) == 'uuid.UUID' %} + '{{ field.name }}': self.{{ field.name }}.bytes if self.{{ field.name }} is not None else None, + + {% elif field.type.map('python', field.attrs, True) == 'np.datetime64' %} + '{{ field.name }}': int(self.{{ field.name }}) if self.{{ field.name }} is not None else None, + + {% elif field.type.basetype == FbsType.Vector %} + {% if field.type.element == FbsType.Obj %} + '{{ field.name }}': [o.marshal() for o in self.{{ field.name }}] if self.{{ field.name }} is not None else None, + {% else %} + '{{ field.name }}': self.{{ field.name }}, + {% endif %} + + {% elif field.type.basetype == FbsType.Obj %} + '{{ field.name }}': self.{{ field.name }}.marshal() if self.{{ field.name }} is not None else None, + + {% else %} + # FIXME [4]: implement processing [4] of FlatBuffers type "{{ field.type | string }}" (Python type "{{ field.type.map('python', field.attrs, True) }}") + {% endif %} + {% endfor %} + } + return obj + + def __str__(self) -> str: + """ + Return string representation of this object, suitable for e.g. logging. + + :returns: String representation of this object. + """ + return '\n{}\n'.format(pprint.pformat(self.marshal())) + + @staticmethod + def cast(buf: bytes, offset: int = 0) -> '{{ metadata.classname }}': + """ + Cast a FlatBuffers raw input buffer as a typed object of this class. + + :param buf: The raw input buffer to cast. + :param offset: Offset into raw buffer from which to cast flatbuffers from. + + :returns: New native object that wraps the FlatBuffers raw buffer. + """ + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = {{ metadata.classname }}() + x._tab = flatbuffers.table.Table(buf, n + offset) + return x + + def build(self, builder): + """ + Build a FlatBuffers raw output buffer from this typed object. + + :returns: Constructs the FlatBuffers using the builder and + returns ``builder.EndObject()``. + """ + # first, write all string|bytes|etc typed attribute values (in order) to the buffer + {% for field in metadata.fields_by_id %} + {% if field.type.map('python', field.attrs, True) in ['str', 'bytes'] %} + _{{ field.name }} = self.{{ field.name }} + if _{{ field.name }}: + _{{ field.name }} = builder.CreateString(_{{ field.name }}) + {% elif field.type.map('python', field.attrs, True) == 'uuid.UUID' %} + _{{ field.name }} = self.{{ field.name }}.bytes if self.{{ field.name }} else None + if _{{ field.name }}: + _{{ field.name }} = builder.CreateString(_{{ field.name }}) + {% else %} + {% endif %} + {% endfor %} + # now start a new object in the buffer and write the actual object attributes (in field + # order) to the buffer + builder.StartObject({{ metadata.fields_by_id|length }}) + + {% for field in metadata.fields_by_id %} + + {% if field.type.map('python', field.attrs, True) in ['str', 'bytes', 'uuid.UUID'] %} + if _{{ field.name }}: + builder.PrependUOffsetTRelativeSlot({{ field.id }}, flatbuffers.number_types.UOffsetTFlags.py_type(_{{ field.name }}), 0) + + {% elif field.type.map('python', field.attrs, True) == 'np.datetime64' %} + if self.{{ field.name }}: + builder.PrependUint64Slot({{ field.id }}, int(self.{{ field.name }}), 0) + + {% elif field.type.map('python', field.attrs, True) in ['bool', 'int', 'float'] %} + if self.{{ field.name }}: + builder.{{ FbsType.FBS2PREPEND[field.type.basetype] }}({{ field.id }}, self.{{ field.name }}, 0) + + {% else %} + # FIXME [1] + # raise NotImplementedError('implement builder [1] for type "{}"'.format({{ field.type.map('python', field.attrs, True) }})) + + {% endif %} + {% endfor %} + + return builder.EndObject() diff --git a/autobahn/xbr/templates/service.py.jinja2 b/autobahn/xbr/templates/py-autobahn/service.py.jinja2 similarity index 92% rename from autobahn/xbr/templates/service.py.jinja2 rename to autobahn/xbr/templates/py-autobahn/service.py.jinja2 index b1364e693..61171235a 100644 --- a/autobahn/xbr/templates/service.py.jinja2 +++ b/autobahn/xbr/templates/py-autobahn/service.py.jinja2 @@ -71,7 +71,7 @@ class {{ metadata.classname }}(object): {% for call_name in metadata.calls_by_id %} {% if metadata.calls[call_name].attrs['type'] == 'topic' %} - async def publish_{{ call_name }}(self, evt: {{ repo.objs[metadata.calls[call_name].request.name].map('python') }}, options: Optional[PublishOptions] = None) -> Optional[Publication]: + async def publish_{{ call_name }}(self, evt: {{ repo.objs[metadata.calls[call_name].request.name].map('python', required=False, objtype_as_string=True) }}, options: Optional[PublishOptions] = None) -> Optional[Publication]: """ As an **interface provider**, publish event: @@ -99,7 +99,7 @@ class {{ metadata.classname }}(object): pub = None return pub - def receive_{{ call_name }}(self, evt: {{ repo.objs[metadata.calls[call_name].request.name].map('python') }}, details: Optional[EventDetails] = None): + def receive_{{ call_name }}(self, evt: {{ repo.objs[metadata.calls[call_name].request.name].map('python', required=False, objtype_as_string=True) }}, details: Optional[EventDetails] = None): """ As an **interface consumer**, receive event: @@ -116,7 +116,7 @@ class {{ metadata.classname }}(object): {% for call_name in metadata.calls_by_id %} {% if metadata.calls[call_name].attrs['type'] == 'procedure' %} - async def call_{{ call_name }}(self, req: {{ repo.objs[metadata.calls[call_name].request.name].map('python') }}, options: Optional[CallOptions] = None) -> {{ repo.objs[metadata.calls[call_name].response.name].map('python') }}: + async def call_{{ call_name }}(self, req: {{ repo.objs[metadata.calls[call_name].request.name].map('python', required=False, objtype_as_string=True) }}, options: Optional[CallOptions] = None) -> {{ repo.objs[metadata.calls[call_name].response.name].map('python', required=False, objtype_as_string=True) }}: """ As an **interface consumer**, call procedure: @@ -136,7 +136,7 @@ class {{ metadata.classname }}(object): result = await self._x_session.call(procedure, payload, options=options) return result - def invoke_{{ call_name }}(self, req: {{ repo.objs[metadata.calls[call_name].request.name].map('python') }}, details: Optional[CallDetails] = None) -> {{ repo.objs[metadata.calls[call_name].response.name].map('python') }}: + def invoke_{{ call_name }}(self, req: {{ repo.objs[metadata.calls[call_name].request.name].map('python', required=False, objtype_as_string=True) }}, details: Optional[CallDetails] = None) -> {{ repo.objs[metadata.calls[call_name].response.name].map('python', required=False, objtype_as_string=True) }}: """ As an **interface provider**, process call invocation: diff --git a/autobahn/xbr/templates/test_enum.py.jinja2 b/autobahn/xbr/templates/py-autobahn/test_enum.py.jinja2 similarity index 100% rename from autobahn/xbr/templates/test_enum.py.jinja2 rename to autobahn/xbr/templates/py-autobahn/test_enum.py.jinja2 diff --git a/autobahn/xbr/templates/test_module.py.jinja2 b/autobahn/xbr/templates/py-autobahn/test_module.py.jinja2 similarity index 100% rename from autobahn/xbr/templates/test_module.py.jinja2 rename to autobahn/xbr/templates/py-autobahn/test_module.py.jinja2 diff --git a/autobahn/xbr/templates/py-autobahn/test_obj.py.jinja2 b/autobahn/xbr/templates/py-autobahn/test_obj.py.jinja2 new file mode 100644 index 000000000..4caef81c1 --- /dev/null +++ b/autobahn/xbr/templates/py-autobahn/test_obj.py.jinja2 @@ -0,0 +1,220 @@ +{% if render_imports %} + +import os +import random +import timeit +import uuid +import cbor2 + +import txaio +txaio.use_twisted() # noqa + +from autobahn import util +from autobahn.wamp.serializer import JsonObjectSerializer, MsgPackObjectSerializer, \ + CBORObjectSerializer, UBJSONObjectSerializer + +import flatbuffers +import pytest +import numpy as np +from txaio import time_ns + + +@pytest.fixture(scope='function') +def builder(): + _builder = flatbuffers.Builder(0) + return _builder + + +_SERIALIZERS = [ + JsonObjectSerializer(), + MsgPackObjectSerializer(), + CBORObjectSerializer(), + UBJSONObjectSerializer(), +] + +{% endif %} +from {{ metadata.module_relimport }} import {{ metadata.classname }} + + +def fill_{{ metadata.classname }}(obj: {{ metadata.classname }}): + {% if metadata.fields_by_id|length == 0 %} + # class has no fields + pass + {% else %} + {% for field in metadata.fields_by_id %} + {% if field.type.map('python', field.attrs, True) == 'str' %} + obj.{{ field.name }} = util.generate_activation_code() + {% elif field.type.map('python', field.attrs, True) == 'bytes' %} + obj.{{ field.name }} = os.urandom(32) + {% elif field.type.map('python', field.attrs, True) in ['int', 'long'] %} + # FIXME: enum vs int + # obj.{{ field.name }} = random.randint(0, 2**31 - 1) + obj.{{ field.name }} = random.randint(0, 3) + {% elif field.type.map('python', field.attrs, True) in ['float', 'double'] %} + obj.{{ field.name }} = random.random() + {% elif field.type.map('python', field.attrs, True) == 'bool' %} + obj.{{ field.name }} = random.random() > 0.5 + {% elif field.type.map('python', field.attrs, True) == 'uuid.UUID' %} + obj.{{ field.name }} = uuid.uuid4() + {% elif field.type.map('python', field.attrs, True) == 'np.datetime64' %} + obj.{{ field.name }} = np.datetime64(time_ns(), 'ns') + {% else %} + obj.{{ field.name }} = None + {% endif %} + {% endfor %} + {% endif %} + + +def fill_{{ metadata.classname }}_empty(obj: {{ metadata.classname }}): + {% if metadata.fields_by_id|length == 0 %} + # class has no fields + pass + {% else %} + {% for field in metadata.fields_by_id %} + obj.{{ field.name }} = None + {% endfor %} + {% endif %} + + +@pytest.fixture(scope='function') +def {{ metadata.classname }}_obj(): + _obj: {{ metadata.classname }} = {{ metadata.classname }}() + fill_{{ metadata.classname }}(_obj) + return _obj + + +def test_{{ metadata.classname }}_roundtrip({{ metadata.classname }}_obj, builder): + # serialize to bytes (flatbuffers) from python object + obj = {{ metadata.classname }}_obj.build(builder) + builder.Finish(obj) + data = builder.Output() + + # check length of serialized object data + print('{} serialized object length = {} bytes'.format('{{ metadata.classname }}', len(data))) + + # create python object from bytes (flatbuffers) + _obj: {{ metadata.classname }} = {{ metadata.classname }}_obj.cast(data) + + {% for field in metadata.fields_by_id %} + assert _obj.{{ field.name }} == {{ metadata.classname }}_obj.{{ field.name }} + {% endfor %} + + +def test_{{ metadata.classname }}_empty(builder): + empty_obj = {{ metadata.classname }}() + fill_{{ metadata.classname }}_empty(empty_obj) + + # check the object was initialized correctly + {% for field in metadata.fields_by_id %} + {% if field.type.map('python', field.attrs, True) == 'str' %} + assert empty_obj.{{ field.name }} == '' + {% elif field.type.map('python', field.attrs, True) == 'bytes' %} + assert empty_obj.{{ field.name }} == b'' + {% elif field.type.map('python', field.attrs, True) in ['int', 'long'] %} + assert empty_obj.{{ field.name }} == 0 + {% elif field.type.map('python', field.attrs, True) in ['float', 'double'] %} + assert empty_obj.{{ field.name }} == 0.0 + {% elif field.type.map('python', field.attrs, True) == 'bool' %} + assert empty_obj.{{ field.name }} is False + {% elif field.type.map('python', field.attrs, True) == 'uuid.UUID' %} + assert empty_obj.{{ field.name }} == uuid.UUID(bytes=b'\0'*16) + {% elif field.type.map('python', field.attrs, True) == 'np.datetime64' %} + assert empty_obj.{{ field.name }} == np.datetime64(0, 'ns') + {% else %} + assert empty_obj.{{ field.name }} is None + {% endif %} + {% endfor %} + + # serialize to bytes (flatbuffers) from python object + obj = empty_obj.build(builder) + builder.Finish(obj) + data = builder.Output() + + # check length of serialized object data + print('{} serialized object length = {} bytes'.format('{{ metadata.classname }}', len(data))) + + # create python object from bytes (flatbuffers) + _obj: {{ metadata.classname }} = {{ metadata.classname }}.cast(data) + + {% for field in metadata.fields_by_id %} + {% if field.type.map('python', field.attrs, True) == 'str' %} + assert _obj.{{ field.name }} == '' + {% elif field.type.map('python', field.attrs, True) == 'bytes' %} + assert _obj.{{ field.name }} == b'' + {% elif field.type.map('python', field.attrs, True) in ['int', 'long'] %} + assert _obj.{{ field.name }} == 0 + {% elif field.type.map('python', field.attrs, True) in ['float', 'double'] %} + assert _obj.{{ field.name }} == 0.0 + {% elif field.type.map('python', field.attrs, True) == 'bool' %} + assert _obj.{{ field.name }} is False + {% elif field.type.map('python', field.attrs, True) == 'uuid.UUID' %} + assert _obj.{{ field.name }} == uuid.UUID(bytes=b'\0'*16) + {% elif field.type.map('python', field.attrs, True) == 'np.datetime64' %} + assert _obj.{{ field.name }} == np.datetime64(0, 'ns') + {% else %} + assert _obj.{{ field.name }} is None + {% endif %} + {% endfor %} + + +def test_{{ metadata.classname }}_roundtrip_perf({{ metadata.classname }}_obj, builder): + obj = {{ metadata.classname }}_obj.build(builder) + builder.Finish(obj) + data = builder.Output() + scratch = {'value': 0} + + def loop(): + _obj: {{ metadata.classname }} = {{ metadata.classname }}.cast(data) + {% for field in metadata.fields_by_id %} + assert _obj.{{ field.name }} == {{ metadata.classname }}_obj.{{ field.name }} + {% endfor %} + scratch['value'] += 1 + + loop_n = 7 + loop_m = 20000 + samples = [] + print('measuring:') + for i in range(loop_n): + secs = timeit.timeit(loop, number=loop_m) + ops = round(float(loop_m) / secs, 1) + samples.append(ops) + print('{} objects/sec performance'.format(ops)) + + samples = sorted(samples) + ops50 = samples[int(len(samples) / 2)] + print('RESULT: {} objects/sec median performance'.format(ops50)) + + assert ops50 > 1000 + print(scratch['value']) + + +def test_{{ metadata.classname }}_marshal_parse({{ metadata.classname }}_obj, builder): + obj = {{ metadata.classname }}_obj.marshal() + _obj = {{ metadata.classname }}_obj.parse(obj) + {% for field in metadata.fields_by_id %} + assert _obj.{{ field.name }} == {{ metadata.classname }}_obj.{{ field.name }} + {% endfor %} + + +def test_{{ metadata.classname }}_marshal_cbor_parse({{ metadata.classname }}_obj, builder): + obj = {{ metadata.classname }}_obj.marshal() + data = cbor2.dumps(obj) + print('serialized {} to {} bytes (cbor)'.format({{ metadata.classname }}, len(data))) + _obj_raw = cbor2.loads(data) + _obj = {{ metadata.classname }}_obj.parse(_obj_raw) + {% for field in metadata.fields_by_id %} + assert _obj.{{ field.name }} == {{ metadata.classname }}_obj.{{ field.name }} + {% endfor %} + + +def test_{{ metadata.classname }}_ab_serializer_roundtrip({{ metadata.classname }}_obj, builder): + obj = {{ metadata.classname }}_obj.marshal() + for ser in _SERIALIZERS: + data = ser.serialize(obj) + print('serialized {} to {} bytes ({})'.format({{ metadata.classname }}, len(data), ser.NAME)) + msg2 = ser.unserialize(data)[0] + obj2 = {{ metadata.classname }}.parse(msg2) + + {% for field in metadata.fields_by_id %} + assert obj2.{{ field.name }} == {{ metadata.classname }}_obj.{{ field.name }} + {% endfor %} diff --git a/autobahn/xbr/templates/test_service.py.jinja2 b/autobahn/xbr/templates/py-autobahn/test_service.py.jinja2 similarity index 100% rename from autobahn/xbr/templates/test_service.py.jinja2 rename to autobahn/xbr/templates/py-autobahn/test_service.py.jinja2 diff --git a/autobahn/xbr/templates/obj.py.jinja2 b/autobahn/xbr/templates/sol-eip712/obj-eip712.sol.jinja2 similarity index 100% rename from autobahn/xbr/templates/obj.py.jinja2 rename to autobahn/xbr/templates/sol-eip712/obj-eip712.sol.jinja2 diff --git a/autobahn/xbr/templates/test_obj.py.jinja2 b/autobahn/xbr/templates/test_obj.py.jinja2 deleted file mode 100644 index a9e18137a..000000000 --- a/autobahn/xbr/templates/test_obj.py.jinja2 +++ /dev/null @@ -1,219 +0,0 @@ -{% if render_imports %} -import os -import random -import timeit -import uuid -import cbor2 - -import txaio -txaio.use_twisted() # noqa - -from autobahn import util -from autobahn.wamp.serializer import JsonObjectSerializer, MsgPackObjectSerializer, \ - CBORObjectSerializer, UBJSONObjectSerializer - -import flatbuffers -import pytest -import numpy as np -from txaio import time_ns - - -@pytest.fixture(scope='function') -def builder(): - _builder = flatbuffers.Builder(0) - return _builder - - -_SERIALIZERS = [ - JsonObjectSerializer(), - MsgPackObjectSerializer(), - CBORObjectSerializer(), - UBJSONObjectSerializer(), -] - -{% endif %} -from .{{ metadata.module_relimport }} import {{ metadata.classname }} - - -def fill_{{ metadata.classname }}(obj: {{ metadata.classname }}): - {% if metadata.fields_by_id|length == 0 %} - # class has no fields - pass - {% else %} - {% for field_name in metadata.fields_by_id %} - {% if metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) == 'str' %} - obj.{{ metadata.fields[field_name].name }} = util.generate_activation_code() - {% elif metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) == 'bytes' %} - obj.{{ metadata.fields[field_name].name }} = os.urandom(32) - {% elif metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) in ['int', 'long'] %} - # FIXME: enum vs int - # obj.{{ metadata.fields[field_name].name }} = random.randint(0, 2**31 - 1) - obj.{{ metadata.fields[field_name].name }} = random.randint(0, 3) - {% elif metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) in ['float', 'double'] %} - obj.{{ metadata.fields[field_name].name }} = random.random() - {% elif metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) == 'bool' %} - obj.{{ metadata.fields[field_name].name }} = random.random() > 0.5 - {% elif metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) == 'uuid.UUID' %} - obj.{{ metadata.fields[field_name].name }} = uuid.uuid4() - {% elif metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) == 'np.datetime64' %} - obj.{{ metadata.fields[field_name].name }} = np.datetime64(time_ns(), 'ns') - {% else %} - obj.{{ metadata.fields[field_name].name }} = None - {% endif %} - {% endfor %} - {% endif %} - - -def fill_{{ metadata.classname }}_empty(obj: {{ metadata.classname }}): - {% if metadata.fields_by_id|length == 0 %} - # class has no fields - pass - {% else %} - {% for field_name in metadata.fields_by_id %} - obj.{{ metadata.fields[field_name].name }} = None - {% endfor %} - {% endif %} - - -@pytest.fixture(scope='function') -def {{ metadata.classname }}_obj(): - _obj: {{ metadata.classname }} = {{ metadata.classname }}() - fill_{{ metadata.classname }}(_obj) - return _obj - - -def test_{{ metadata.classname }}_roundtrip({{ metadata.classname }}_obj, builder): - # serialize to bytes (flatbuffers) from python object - obj = {{ metadata.classname }}_obj.build(builder) - builder.Finish(obj) - data = builder.Output() - - # check length of serialized object data - print('{} serialized object length = {} bytes'.format('{{ metadata.classname }}', len(data))) - - # create python object from bytes (flatbuffes) - _obj: {{ metadata.classname }} = {{ metadata.classname }}_obj.cast(data) - - {% for field_name in metadata.fields_by_id %} - assert _obj.{{ metadata.fields[field_name].name }} == {{ metadata.classname }}_obj.{{ metadata.fields[field_name].name }} - {% endfor %} - - -def test_{{ metadata.classname }}_empty(builder): - empty_obj = {{ metadata.classname }}() - fill_{{ metadata.classname }}_empty(empty_obj) - - # check the object was initialized correctly - {% for field_name in metadata.fields_by_id %} - {% if metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) == 'str' %} - assert empty_obj.{{ metadata.fields[field_name].name }} == '' - {% elif metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) == 'bytes' %} - assert empty_obj.{{ metadata.fields[field_name].name }} == b'' - {% elif metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) in ['int', 'long'] %} - assert empty_obj.{{ metadata.fields[field_name].name }} == 0 - {% elif metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) in ['float', 'double'] %} - assert empty_obj.{{ metadata.fields[field_name].name }} == 0.0 - {% elif metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) == 'bool' %} - assert empty_obj.{{ metadata.fields[field_name].name }} is False - {% elif metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) == 'uuid.UUID' %} - assert empty_obj.{{ metadata.fields[field_name].name }} == uuid.UUID(bytes=b'\0'*16) - {% elif metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) == 'np.datetime64' %} - assert empty_obj.{{ metadata.fields[field_name].name }} == np.datetime64(0, 'ns') - {% else %} - assert empty_obj.{{ metadata.fields[field_name].name }} is None - {% endif %} - {% endfor %} - - # serialize to bytes (flatbuffers) from python object - obj = empty_obj.build(builder) - builder.Finish(obj) - data = builder.Output() - - # check length of serialized object data - print('{} serialized object length = {} bytes'.format('{{ metadata.classname }}', len(data))) - - # create python object from bytes (flatbuffes) - _obj: {{ metadata.classname }} = {{ metadata.classname }}.cast(data) - - {% for field_name in metadata.fields_by_id %} - {% if metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) == 'str' %} - assert _obj.{{ metadata.fields[field_name].name }} == '' - {% elif metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) == 'bytes' %} - assert _obj.{{ metadata.fields[field_name].name }} == b'' - {% elif metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) in ['int', 'long'] %} - assert _obj.{{ metadata.fields[field_name].name }} == 0 - {% elif metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) in ['float', 'double'] %} - assert _obj.{{ metadata.fields[field_name].name }} == 0.0 - {% elif metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) == 'bool' %} - assert _obj.{{ metadata.fields[field_name].name }} is False - {% elif metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) == 'uuid.UUID' %} - assert _obj.{{ metadata.fields[field_name].name }} == uuid.UUID(bytes=b'\0'*16) - {% elif metadata.fields[field_name].type.map('python', metadata.fields[field_name].attrs, True) == 'np.datetime64' %} - assert _obj.{{ metadata.fields[field_name].name }} == np.datetime64(0, 'ns') - {% else %} - assert _obj.{{ metadata.fields[field_name].name }} is None - {% endif %} - {% endfor %} - - -def test_{{ metadata.classname }}_roundtrip_perf({{ metadata.classname }}_obj, builder): - obj = {{ metadata.classname }}_obj.build(builder) - builder.Finish(obj) - data = builder.Output() - scratch = {'value': 0} - - def loop(): - _obj: {{ metadata.classname }} = {{ metadata.classname }}.cast(data) - {% for field_name in metadata.fields_by_id %} - assert _obj.{{ metadata.fields[field_name].name }} == {{ metadata.classname }}_obj.{{ metadata.fields[field_name].name }} - {% endfor %} - scratch['value'] += 1 - - N = 7 - M = 20000 - samples = [] - print('measuring:') - for i in range(N): - secs = timeit.timeit(loop, number=M) - ops = round(float(M) / secs, 1) - samples.append(ops) - print('{} objects/sec performance'.format(ops)) - - samples = sorted(samples) - ops50 = samples[int(len(samples) / 2)] - print('RESULT: {} objects/sec median performance'.format(ops50)) - - assert ops50 > 1000 - print(scratch['value']) - - -def test_{{ metadata.classname }}_marshal_parse({{ metadata.classname }}_obj, builder): - obj = {{ metadata.classname }}_obj.marshal() - _obj = {{ metadata.classname }}_obj.parse(obj) - {% for field_name in metadata.fields_by_id %} - assert _obj.{{ metadata.fields[field_name].name }} == {{ metadata.classname }}_obj.{{ metadata.fields[field_name].name }} - {% endfor %} - - -def test_{{ metadata.classname }}_marshal_cbor_parse({{ metadata.classname }}_obj, builder): - obj = {{ metadata.classname }}_obj.marshal() - data = cbor2.dumps(obj) - print('serialized {} to {} bytes (cbor)'.format({{ metadata.classname }}, len(data))) - _obj_raw = cbor2.loads(data) - _obj = {{ metadata.classname }}_obj.parse(_obj_raw) - {% for field_name in metadata.fields_by_id %} - assert _obj.{{ metadata.fields[field_name].name }} == {{ metadata.classname }}_obj.{{ metadata.fields[field_name].name }} - {% endfor %} - - -def test_{{ metadata.classname }}_ab_serializer_roundtrip({{ metadata.classname }}_obj, builder): - obj = {{ metadata.classname }}_obj.marshal() - for ser in _SERIALIZERS: - data = ser.serialize(obj) - print('serialized {} to {} bytes ({})'.format({{ metadata.classname }}, len(data), ser.NAME)) - msg2 = ser.unserialize(data)[0] - obj2 = {{ metadata.classname }}.parse(msg2) - - {% for field_name in metadata.fields_by_id %} - assert obj2.{{ metadata.fields[field_name].name }} == {{ metadata.classname }}_obj.{{ metadata.fields[field_name].name }} - {% endfor %} diff --git a/docs/changelog.rst b/docs/changelog.rst index 13bc96d60..bf63e094d 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -5,9 +5,23 @@ Changelog ========= -22.5.1.dev1 +22.5.1.dev2 ----------- +* new: WAMP Flatbuffers IDL and schema processing (experimental) +* new: WAMP-cryptosign trustroot (experimental) +* new: add wrapper type for CryptosignAuthextra +* fix: stricted type checking of Challenge; fix cryposign unit test; +* new: more test coverage +* fix: reduce log noise +* fix: forward channel_binding selected in Component client +* new: expand ISigningKey to provide security_module/key_id (if used) +* fix: Component cryptosign test +* fix: add type hints; fix channel_binding +* new: work on federated realms and secmods +* new: rename to and work on a.w.CryptosignKey +* new: add bip44 for cryptosign test +* fix: remove all txaio.make_logger refs from generic code (#1564) * new: initial support for federated WAMP realms via a.x.FederatedRealm/Seeder * new: moved utility functions and unit tests for WAMP realm name checking from Crossbar.io * new: allow list of URLs for transports in a.t.component.Component diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 000000000..94ac00c3b --- /dev/null +++ b/mypy.ini @@ -0,0 +1,68 @@ +[mypy] +python_version = 3.7 + +[mypy-autobahn.*] +ignore_missing_imports = True + +[mypy-txaio.*] +ignore_missing_imports = True + +[mypy-xbr.*] +ignore_missing_imports = True + +[mypy-flatbuffers.*] +ignore_missing_imports = True + +[mypy-py_eth_sig_utils.*] +ignore_missing_imports = True + +[mypy-cbor2.*] +ignore_missing_imports = True + +[mypy-humanize.*] +ignore_missing_imports = True + +[mypy-yapf.*] +ignore_missing_imports = True + +[mypy-zope.interface.*] +ignore_missing_imports = True + +[mypy-pyqrcode.*] +ignore_missing_imports = True + +[mypy-pytrie.*] +ignore_missing_imports = True + +[mypy-passlib.*] +ignore_missing_imports = True + +[mypy-colorama.*] +ignore_missing_imports = True + +[mypy-eth_keys.*] +ignore_missing_imports = True + +[mypy-txtorcon.*] +ignore_missing_imports = True + +[mypy-ens.*] +ignore_missing_imports = True + +[mypy-jinja2.*] +ignore_missing_imports = True + +[mypy-multihash.*] +ignore_missing_imports = True + +[mypy-gi.*] +ignore_missing_imports = True + +[mypy-_hashlib.*] +ignore_missing_imports = True + +[mypy-_nvx_utf8validator.*] +ignore_missing_imports = True + +[mypy-wsaccel.*] +ignore_missing_imports = True diff --git a/setup.py b/setup.py index 7c3acbc78..c08534f9d 100644 --- a/setup.py +++ b/setup.py @@ -53,7 +53,7 @@ if CPY and sys.platform != 'win32': # wsaccel does not provide wheels: https://github.com/methane/wsaccel/issues/12 extras_require_accelerate = [ - "wsaccel>=0.6.3" # Apache 2.0 + # "wsaccel>=0.6.3" # Apache 2.0 ] else: extras_require_accelerate = [] @@ -198,7 +198,10 @@ else: extras_require_all += extras_require_xbr packages += xbr_packages - package_data['xbr'] = ['./xbr/templates/*.py.jinja2'] + package_data['xbr'] = [ + './xbr/templates/py-autobahn/*.py.jinja2', + './xbr/templates/sol-eip712/*.sol.jinja2', + ] entry_points['console_scripts'] += ["xbrnetwork = autobahn.xbr._cli:_main"] entry_points['console_scripts'] += ["xbrnetwork-ui = autobahn.xbr._gui:_main"]