cfxdb
Advanced tools
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| from cfxdb.cookiestore._cookie import Cookie, Cookies, IndexCookiesByValue | ||
| from cfxdb.cookiestore._schema import CookieStoreSchema | ||
| __all__ = ('Cookie', 'Cookies', 'IndexCookiesByValue', 'CookieStoreSchema') |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| import uuid | ||
| import pprint | ||
| from typing import Optional, Dict, Any | ||
| import cbor2 | ||
| import flatbuffers | ||
| import numpy as np | ||
| from zlmdb import table, MapUuidFlatBuffers, MapStringUuid | ||
| from cfxdb.gen.cookiestore import Cookie as CookieGen | ||
| class _CookieGen(CookieGen.Cookie): | ||
| @classmethod | ||
| def GetRootAsCookie(cls, buf, offset=0): | ||
| n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) | ||
| x = _CookieGen() | ||
| x.Init(buf, n + offset) | ||
| return x | ||
| def OidAsBytes(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) | ||
| if o != 0: | ||
| _off = self._tab.Vector(o) | ||
| _len = self._tab.VectorLen(o) | ||
| return memoryview(self._tab.Bytes)[_off:_off + _len] | ||
| return None | ||
| def AuthenticatedOnNodeAsBytes(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) | ||
| if o != 0: | ||
| _off = self._tab.Vector(o) | ||
| _len = self._tab.VectorLen(o) | ||
| return memoryview(self._tab.Bytes)[_off:_off + _len] | ||
| return None | ||
| def AuthenticatedTransportInfoAsBytes(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) | ||
| if o != 0: | ||
| _off = self._tab.Vector(o) | ||
| _len = self._tab.VectorLen(o) | ||
| return memoryview(self._tab.Bytes)[_off:_off + _len] | ||
| return None | ||
| def AuthExtraAsBytes(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(36)) | ||
| if o != 0: | ||
| _off = self._tab.Vector(o) | ||
| _len = self._tab.VectorLen(o) | ||
| return memoryview(self._tab.Bytes)[_off:_off + _len] | ||
| return None | ||
| class Cookie(object): | ||
| """ | ||
| Persistent cookies, as used in WAMP-Cookie authentication by router and proxy workers. | ||
| """ | ||
| def __init__(self, from_fbs=None): | ||
| self._from_fbs = from_fbs | ||
| # [uint8] (uuid) | ||
| self._oid = None | ||
| # uint64 (timestamp) | ||
| self._created = None | ||
| # uint64 | ||
| self._max_age = None | ||
| # string | ||
| self._name = None | ||
| # string | ||
| self._value = None | ||
| # uint64 (timestamp) | ||
| self._authenticated = None | ||
| # [uint8] (uuid) | ||
| self._authenticated_on_node = None | ||
| # string | ||
| self._authenticated_on_worker = None | ||
| # [uint8] (cbor) | ||
| self._authenticated_transport_info = None | ||
| # uint64 | ||
| self._authenticated_session = None | ||
| # uint64 (timestamp) | ||
| self._authenticated_joined_at = None | ||
| # string | ||
| self._authenticated_authmethod = None | ||
| # string | ||
| self._authid = None | ||
| # string | ||
| self._authrole = None | ||
| # string | ||
| self._authmethod = None | ||
| # string | ||
| self._authrealm = None | ||
| # [uint8] (cbor) | ||
| self._authextra = None | ||
| def marshal(self) -> dict: | ||
| obj = { | ||
| 'oid': self.oid.bytes if self.oid else None, | ||
| 'created': int(self.created) if self.created else None, | ||
| 'max_age': self.max_age, | ||
| 'name': self.name, | ||
| 'value': self.value, | ||
| 'authenticated': int(self.authenticated) if self.authenticated else None, | ||
| 'authenticated_on_node': self.authenticated_on_node.bytes if self.authenticated_on_node else None, | ||
| 'authenticated_on_worker': self.authenticated_on_worker, | ||
| 'authenticated_transport_info': self.authenticated_transport_info, | ||
| 'authenticated_session': self.authenticated_session, | ||
| 'authenticated_joined_at': | ||
| int(self.authenticated_joined_at) if self.authenticated_joined_at else None, | ||
| 'authenticated_authmethod': self.authenticated_authmethod, | ||
| 'authid': self.authid, | ||
| 'authrole': self.authrole, | ||
| 'authrealm': self.authrealm, | ||
| 'authextra': self.authextra, | ||
| } | ||
| return obj | ||
| def __str__(self): | ||
| return '\n{}\n'.format(pprint.pformat(self.marshal())) | ||
| @property | ||
| def oid(self) -> uuid.UUID: | ||
| """ | ||
| Database ID of this cookie record. | ||
| """ | ||
| if self._oid is None and self._from_fbs: | ||
| if self._from_fbs.OidLength(): | ||
| _oid = self._from_fbs.OidAsBytes() | ||
| self._oid = uuid.UUID(bytes=bytes(_oid)) | ||
| return self._oid | ||
| @oid.setter | ||
| def oid(self, value: uuid.UUID): | ||
| assert value is None or isinstance(value, uuid.UUID) | ||
| self._oid = value | ||
| @property | ||
| def created(self) -> np.datetime64: | ||
| """ | ||
| Timestamp when the cookie was created. Epoch time in ns. | ||
| """ | ||
| if self._created is None and self._from_fbs: | ||
| self._created = np.datetime64(self._from_fbs.Created(), 'ns') | ||
| return self._created | ||
| @created.setter | ||
| def created(self, value: np.datetime64): | ||
| assert value is None or isinstance(value, np.datetime64) | ||
| self._created = value | ||
| @property | ||
| def max_age(self) -> int: | ||
| """ | ||
| Cookie maximum age (lifetime of the cookie in seconds, see http://tools.ietf.org/html/rfc6265#page-20), | ||
| e.g. ``604800"``. | ||
| """ | ||
| if self._max_age is None and self._from_fbs: | ||
| self._max_age = self._from_fbs.MaxAge() | ||
| return self._max_age | ||
| @max_age.setter | ||
| def max_age(self, value: int): | ||
| assert value is None or type(value) == int | ||
| self._max_age = value | ||
| @property | ||
| def name(self) -> str: | ||
| """ | ||
| Cookie name, as set in HTTP header, e.g. ``"cbtid"``. | ||
| """ | ||
| if self._name is None and self._from_fbs: | ||
| _name = self._from_fbs.Name() | ||
| if _name: | ||
| self._name = _name.decode('utf8') | ||
| return self._name | ||
| @name.setter | ||
| def name(self, value: str): | ||
| self._name = value | ||
| @property | ||
| def value(self) -> str: | ||
| """ | ||
| Cookie value, as set in HTTP header, e.g. ``"gn2ri8fuAYQse50/L6N7jnt2"``. | ||
| """ | ||
| if self._value is None and self._from_fbs: | ||
| _value = self._from_fbs.Value() | ||
| if _value: | ||
| self._value = _value.decode('utf8') | ||
| return self._value | ||
| @value.setter | ||
| def value(self, value: str): | ||
| self._value = value | ||
| @property | ||
| def authenticated(self) -> np.datetime64: | ||
| """ | ||
| Timestamp when the cookie was authenticated (if any). Epoch time in ns. | ||
| """ | ||
| if self._authenticated is None and self._from_fbs: | ||
| self._authenticated = np.datetime64(self._from_fbs.Authenticated(), 'ns') | ||
| return self._authenticated | ||
| @authenticated.setter | ||
| def authenticated(self, value: np.datetime64): | ||
| assert value is None or isinstance(value, np.datetime64) | ||
| self._authenticated = value | ||
| @property | ||
| def authenticated_on_node(self) -> uuid.UUID: | ||
| """ | ||
| The Crossbar.io node (within the management domain) the cookie was authenticated on (if any). | ||
| """ | ||
| if self._authenticated_on_node is None and self._from_fbs: | ||
| if self._from_fbs.AuthenticatedOnNodeLength(): | ||
| _authenticated_on_node = self._from_fbs.AuthenticatedOnNodeAsBytes() | ||
| self._authenticated_on_node = uuid.UUID(bytes=bytes(_authenticated_on_node)) | ||
| return self._authenticated_on_node | ||
| @authenticated_on_node.setter | ||
| def authenticated_on_node(self, value: uuid.UUID): | ||
| assert value is None or isinstance(value, uuid.UUID) | ||
| self._authenticated_on_node = value | ||
| @property | ||
| def authenticated_on_worker(self) -> str: | ||
| """ | ||
| The Crossbar.io worker (within the node) the cookie was authenticated on (if any). | ||
| """ | ||
| if self._authenticated_on_worker is None and self._from_fbs: | ||
| _authenticated_on_worker = self._from_fbs.AuthenticatedOnWorker() | ||
| if _authenticated_on_worker: | ||
| self._authenticated_on_worker = _authenticated_on_worker.decode('utf8') | ||
| return self._authenticated_on_worker | ||
| @authenticated_on_worker.setter | ||
| def authenticated_on_worker(self, value: str): | ||
| self._authenticated_on_worker = value | ||
| @property | ||
| def authenticated_transport_info(self) -> dict: | ||
| """ | ||
| The client transport information for the connection the cookie was authenticated in (if any). | ||
| """ | ||
| if self._authenticated_transport_info is None and self._from_fbs: | ||
| _authenticated_transport_info = self._from_fbs.AuthenticatedTransportInfoAsBytes() | ||
| if _authenticated_transport_info: | ||
| self._authenticated_transport_info = cbor2.loads(_authenticated_transport_info) | ||
| else: | ||
| self._authenticated_transport_info = {} | ||
| return self._authenticated_transport_info | ||
| @authenticated_transport_info.setter | ||
| def authenticated_transport_info(self, value: Optional[Dict[str, Any]]): | ||
| assert value is None or type(value) == dict | ||
| self._authenticated_transport_info = value | ||
| @property | ||
| def authenticated_session(self) -> int: | ||
| """ | ||
| The WAMP session ID of the original authenticating session. | ||
| """ | ||
| if self._authenticated_session is None and self._from_fbs: | ||
| self._authenticated_session = self._from_fbs.AuthenticatedSession() | ||
| return self._authenticated_session | ||
| @authenticated_session.setter | ||
| def authenticated_session(self, value: int): | ||
| assert value is None or type(value) == int | ||
| self._authenticated_session = value | ||
| @property | ||
| def authenticated_joined_at(self) -> np.datetime64: | ||
| """ | ||
| Timestamp when the original authenticating session was welcome by the router. Epoch time in ns. | ||
| """ | ||
| if self._authenticated_joined_at is None and self._from_fbs: | ||
| self._authenticated_joined_at = np.datetime64(self._from_fbs.AuthenticatedJoinedAt(), 'ns') | ||
| return self._authenticated_joined_at | ||
| @authenticated_joined_at.setter | ||
| def authenticated_joined_at(self, value: np.datetime64): | ||
| assert value is None or isinstance(value, np.datetime64) | ||
| self._authenticated_joined_at = value | ||
| @property | ||
| def authenticated_authmethod(self) -> str: | ||
| """ | ||
| The (original) WAMP authentication method, after which the client was authenticated before setting this cookie. | ||
| """ | ||
| if self._authenticated_authmethod is None and self._from_fbs: | ||
| _authenticated_authmethod = self._from_fbs.AuthenticatedAuthmethod() | ||
| if _authenticated_authmethod: | ||
| self._authenticated_authmethod = _authenticated_authmethod.decode('utf8') | ||
| return self._authenticated_authmethod | ||
| @authenticated_authmethod.setter | ||
| def authenticated_authmethod(self, value: str): | ||
| self._authenticated_authmethod = value | ||
| @property | ||
| def authid(self) -> str: | ||
| """ | ||
| The WAMP authid a cookie-authenticating session is to be assigned. | ||
| """ | ||
| if self._authid is None and self._from_fbs: | ||
| _authid = self._from_fbs.Authid() | ||
| if _authid: | ||
| self._authid = _authid.decode('utf8') | ||
| return self._authid | ||
| @authid.setter | ||
| def authid(self, value: str): | ||
| self._authid = value | ||
| @property | ||
| def authrole(self) -> str: | ||
| """ | ||
| The WAMP authrole a cookie-authenticating session is to join under. | ||
| """ | ||
| if self._authrole is None and self._from_fbs: | ||
| _authrole = self._from_fbs.Authrole() | ||
| if _authrole: | ||
| self._authrole = _authrole.decode('utf8') | ||
| return self._authrole | ||
| @authrole.setter | ||
| def authrole(self, value: str): | ||
| self._authrole = value | ||
| @property | ||
| def authmethod(self) -> str: | ||
| """ | ||
| The WAMP authmethod a cookie-authenticating session is to join under. | ||
| """ | ||
| if self._authmethod is None and self._from_fbs: | ||
| _authmethod = self._from_fbs.Authmethod() | ||
| if _authmethod: | ||
| self._authmethod = _authmethod.decode('utf8') | ||
| return self._authmethod | ||
| @authmethod.setter | ||
| def authmethod(self, value: str): | ||
| self._authmethod = value | ||
| @property | ||
| def authrealm(self) -> str: | ||
| """ | ||
| The WAMP realm a cookie-authenticating session is to join. | ||
| """ | ||
| if self._authrealm is None and self._from_fbs: | ||
| _authrealm = self._from_fbs.Authrealm() | ||
| if _authrealm: | ||
| self._authrealm = _authrealm.decode('utf8') | ||
| return self._authrealm | ||
| @authrealm.setter | ||
| def authrealm(self, value: str): | ||
| self._authrealm = value | ||
| @property | ||
| def authextra(self) -> dict: | ||
| """ | ||
| The WAMP authentication extra data to be returned to the client performing cookie-based authentication. | ||
| """ | ||
| if self._authextra is None and self._from_fbs: | ||
| _authextra = self._from_fbs.AuthExtraAsBytes() | ||
| if _authextra: | ||
| self._authextra = cbor2.loads(_authextra) | ||
| else: | ||
| self._authextra = {} | ||
| return self._authextra | ||
| @authextra.setter | ||
| def authextra(self, value: Optional[Dict[str, Any]]): | ||
| assert value is None or type(value) == dict | ||
| self._authextra = value | ||
| @staticmethod | ||
| def cast(buf) -> 'Cookie': | ||
| return Cookie(_CookieGen.GetRootAsCookie(buf, 0)) | ||
| def build(self, builder): | ||
| oid = self.oid.bytes if self.oid else None | ||
| if oid: | ||
| oid = builder.CreateString(oid) | ||
| name = self.name | ||
| if name: | ||
| name = builder.CreateString(name) | ||
| value = self.value | ||
| if value: | ||
| value = builder.CreateString(value) | ||
| authenticated_on_node = self.authenticated_on_node.bytes if self.authenticated_on_node else None | ||
| if authenticated_on_node: | ||
| authenticated_on_node = builder.CreateString(authenticated_on_node) | ||
| authenticated_on_worker = self.authenticated_on_worker | ||
| if authenticated_on_worker: | ||
| authenticated_on_worker = builder.CreateString(authenticated_on_worker) | ||
| authenticated_transport_info = self.authenticated_transport_info | ||
| if authenticated_transport_info: | ||
| authenticated_transport_info = builder.CreateString(cbor2.dumps(authenticated_transport_info)) | ||
| authenticated_authmethod = self.authenticated_authmethod | ||
| if authenticated_authmethod: | ||
| authenticated_authmethod = builder.CreateString(authenticated_authmethod) | ||
| authid = self.authid | ||
| if authid: | ||
| authid = builder.CreateString(authid) | ||
| authrole = self.authrole | ||
| if authrole: | ||
| authrole = builder.CreateString(authrole) | ||
| authmethod = self.authmethod | ||
| if authmethod: | ||
| authmethod = builder.CreateString(authmethod) | ||
| authrealm = self.authrealm | ||
| if authrealm: | ||
| authrealm = builder.CreateString(authrealm) | ||
| authextra = self.authextra | ||
| if authextra: | ||
| authextra = builder.CreateString(cbor2.dumps(authextra)) | ||
| CookieGen.CookieStart(builder) | ||
| if oid: | ||
| CookieGen.CookieAddOid(builder, oid) | ||
| if self.created: | ||
| CookieGen.CookieAddCreated(builder, int(self.created)) | ||
| if self.max_age: | ||
| CookieGen.CookieAddMaxAge(builder, self.max_age) | ||
| if name: | ||
| CookieGen.CookieAddName(builder, name) | ||
| if value: | ||
| CookieGen.CookieAddValue(builder, value) | ||
| if self.authenticated: | ||
| CookieGen.CookieAddAuthenticated(builder, int(self.authenticated)) | ||
| if authenticated_on_node: | ||
| CookieGen.CookieAddAuthenticatedOnNode(builder, authenticated_on_node) | ||
| if authenticated_on_worker: | ||
| CookieGen.CookieAddAuthenticatedOnWorker(builder, authenticated_on_worker) | ||
| if authenticated_transport_info: | ||
| CookieGen.CookieAddAuthenticatedTransportInfo(builder, authenticated_transport_info) | ||
| if self.authenticated_session: | ||
| CookieGen.CookieAddAuthenticatedSession(builder, self.authenticated_session) | ||
| if self.authenticated_joined_at: | ||
| CookieGen.CookieAddAuthenticatedJoinedAt(builder, int(self.authenticated_joined_at)) | ||
| if authenticated_authmethod: | ||
| CookieGen.CookieAddAuthenticatedAuthmethod(builder, authenticated_authmethod) | ||
| if authid: | ||
| CookieGen.CookieAddAuthid(builder, authid) | ||
| if authrole: | ||
| CookieGen.CookieAddAuthrole(builder, authrole) | ||
| if authmethod: | ||
| CookieGen.CookieAddAuthmethod(builder, authmethod) | ||
| if authrealm: | ||
| CookieGen.CookieAddAuthrealm(builder, authrealm) | ||
| if authextra: | ||
| CookieGen.CookieAddAuthextra(builder, authextra) | ||
| final = CookieGen.CookieEnd(builder) | ||
| return final | ||
| @table('62f8c8c9-c50b-4686-bafe-38b221c64a0c', build=Cookie.build, cast=Cookie.cast) | ||
| class Cookies(MapUuidFlatBuffers): | ||
| """ | ||
| Persisted cookies table. | ||
| Map :class:`zlmdb.MapUuidFlatBuffers` from ``cookie_oid`` to :class:`cfxdb.cookiestore.Cookie` | ||
| """ | ||
| @table('65e1d8c1-fa8b-459d-ae43-cb320d28cc97') | ||
| class IndexCookiesByValue(MapStringUuid): | ||
| """ | ||
| Index: cookie_value -> cookie_oid | ||
| """ |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| import zlmdb | ||
| from cfxdb.cookiestore._cookie import Cookies, IndexCookiesByValue | ||
| class CookieStoreSchema(object): | ||
| """ | ||
| Persistent cookie store. | ||
| """ | ||
| def __init__(self, db): | ||
| self.db = db | ||
| cookies: Cookies | ||
| """ | ||
| Cookies persisted in this cookie store. | ||
| """ | ||
| idx_cookies_by_value: IndexCookiesByValue | ||
| """ | ||
| Index of cookies by cookie value. | ||
| """ | ||
| @staticmethod | ||
| def attach(db: zlmdb.Database) -> 'CookieStoreSchema': | ||
| """ | ||
| Factory to create a schema from attaching to a database. The schema tables | ||
| will be automatically mapped as persistent maps and attached to the | ||
| database slots. | ||
| """ | ||
| schema = CookieStoreSchema(db) | ||
| schema.cookies = db.attach_table(Cookies) | ||
| schema.idx_cookies_by_value = db.attach_table(IndexCookiesByValue) | ||
| schema.cookies.attach_index('idx1', schema.idx_cookies_by_value, lambda cookie: cookie.value) | ||
| return schema |
| # automatically generated by the FlatBuffers compiler, do not modify | ||
| # namespace: cookiestore | ||
| import flatbuffers | ||
| from flatbuffers.compat import import_numpy | ||
| np = import_numpy() | ||
| # Persistent cookies, as used in WAMP-Cookie authentication by router and proxy workers. | ||
| class Cookie(object): | ||
| __slots__ = ['_tab'] | ||
| @classmethod | ||
| def GetRootAs(cls, buf, offset=0): | ||
| n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) | ||
| x = Cookie() | ||
| x.Init(buf, n + offset) | ||
| return x | ||
| @classmethod | ||
| def GetRootAsCookie(cls, buf, offset=0): | ||
| """This method is deprecated. Please switch to GetRootAs.""" | ||
| return cls.GetRootAs(buf, offset) | ||
| # Cookie | ||
| def Init(self, buf, pos): | ||
| self._tab = flatbuffers.table.Table(buf, pos) | ||
| # Database ID of this cookie record. | ||
| # Cookie | ||
| def Oid(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) | ||
| return 0 | ||
| # Cookie | ||
| def OidAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) | ||
| return 0 | ||
| # Cookie | ||
| def OidLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Cookie | ||
| def OidIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) | ||
| return o == 0 | ||
| # Timestamp when the cookie was created. Epoch time in ns. | ||
| # Cookie | ||
| def Created(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # Cookie maximum age (lifetime of the cookie in seconds, see http://tools.ietf.org/html/rfc6265#page-20), e.g. ``604800"``. | ||
| # Cookie | ||
| def MaxAge(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # Cookie name, as set in HTTP header, e.g. ``"cbtid"``. | ||
| # Cookie | ||
| def Name(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # Cookie value, as set in HTTP header, e.g. ``"gn2ri8fuAYQse50/L6N7jnt2"``. | ||
| # Cookie | ||
| def Value(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # Timestamp when the cookie was authenticated (if any). Epoch time in ns. | ||
| # Cookie | ||
| def Authenticated(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # The Crossbar.io node (within the management domain) the cookie was authenticated on (if any). | ||
| # Cookie | ||
| def AuthenticatedOnNode(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) | ||
| return 0 | ||
| # Cookie | ||
| def AuthenticatedOnNodeAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) | ||
| return 0 | ||
| # Cookie | ||
| def AuthenticatedOnNodeLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Cookie | ||
| def AuthenticatedOnNodeIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) | ||
| return o == 0 | ||
| # The Crossbar.io worker (within the node) the cookie was authenticated on (if any). | ||
| # Cookie | ||
| def AuthenticatedOnWorker(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # The client transport information for the connection the cookie was authenticated in (if any). | ||
| # Cookie | ||
| def AuthenticatedTransportInfo(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) | ||
| return 0 | ||
| # Cookie | ||
| def AuthenticatedTransportInfoAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) | ||
| return 0 | ||
| # Cookie | ||
| def AuthenticatedTransportInfoLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Cookie | ||
| def AuthenticatedTransportInfoIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) | ||
| return o == 0 | ||
| # The WAMP session ID of the original authenticating session. | ||
| # Cookie | ||
| def AuthenticatedSession(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # Timestamp when the original authenticating session was welcome by the router. Epoch time in ns. | ||
| # Cookie | ||
| def AuthenticatedJoinedAt(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # The (original) WAMP authentication method, after which the client was authenticated before setting this cookie. | ||
| # Cookie | ||
| def AuthenticatedAuthmethod(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(26)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # The WAMP authid a cookie-authenticating session is to be assigned. | ||
| # Cookie | ||
| def Authid(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(28)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # The WAMP authrole a cookie-authenticating session is to join under. | ||
| # Cookie | ||
| def Authrole(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(30)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # The WAMP authmethod a cookie-authenticating session is to join under. | ||
| # Cookie | ||
| def Authmethod(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(32)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # The WAMP realm a cookie-authenticating session is to join. | ||
| # Cookie | ||
| def Authrealm(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(34)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # The WAMP authentication extra data to be returned to the client performing cookie-based authentication. | ||
| # Cookie | ||
| def Authextra(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(36)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) | ||
| return 0 | ||
| # Cookie | ||
| def AuthextraAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(36)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) | ||
| return 0 | ||
| # Cookie | ||
| def AuthextraLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(36)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Cookie | ||
| def AuthextraIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(36)) | ||
| return o == 0 | ||
| def Start(builder): builder.StartObject(17) | ||
| def CookieStart(builder): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return Start(builder) | ||
| def AddOid(builder, oid): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(oid), 0) | ||
| def CookieAddOid(builder, oid): | ||
| """This method is deprecated. Please switch to AddOid.""" | ||
| return AddOid(builder, oid) | ||
| def StartOidVector(builder, numElems): return builder.StartVector(1, numElems, 1) | ||
| def CookieStartOidVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartOidVector(builder, numElems) | ||
| def AddCreated(builder, created): builder.PrependUint64Slot(1, created, 0) | ||
| def CookieAddCreated(builder, created): | ||
| """This method is deprecated. Please switch to AddCreated.""" | ||
| return AddCreated(builder, created) | ||
| def AddMaxAge(builder, maxAge): builder.PrependUint64Slot(2, maxAge, 0) | ||
| def CookieAddMaxAge(builder, maxAge): | ||
| """This method is deprecated. Please switch to AddMaxAge.""" | ||
| return AddMaxAge(builder, maxAge) | ||
| def AddName(builder, name): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) | ||
| def CookieAddName(builder, name): | ||
| """This method is deprecated. Please switch to AddName.""" | ||
| return AddName(builder, name) | ||
| def AddValue(builder, value): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(value), 0) | ||
| def CookieAddValue(builder, value): | ||
| """This method is deprecated. Please switch to AddValue.""" | ||
| return AddValue(builder, value) | ||
| def AddAuthenticated(builder, authenticated): builder.PrependUint64Slot(5, authenticated, 0) | ||
| def CookieAddAuthenticated(builder, authenticated): | ||
| """This method is deprecated. Please switch to AddAuthenticated.""" | ||
| return AddAuthenticated(builder, authenticated) | ||
| def AddAuthenticatedOnNode(builder, authenticatedOnNode): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(authenticatedOnNode), 0) | ||
| def CookieAddAuthenticatedOnNode(builder, authenticatedOnNode): | ||
| """This method is deprecated. Please switch to AddAuthenticatedOnNode.""" | ||
| return AddAuthenticatedOnNode(builder, authenticatedOnNode) | ||
| def StartAuthenticatedOnNodeVector(builder, numElems): return builder.StartVector(1, numElems, 1) | ||
| def CookieStartAuthenticatedOnNodeVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartAuthenticatedOnNodeVector(builder, numElems) | ||
| def AddAuthenticatedOnWorker(builder, authenticatedOnWorker): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(authenticatedOnWorker), 0) | ||
| def CookieAddAuthenticatedOnWorker(builder, authenticatedOnWorker): | ||
| """This method is deprecated. Please switch to AddAuthenticatedOnWorker.""" | ||
| return AddAuthenticatedOnWorker(builder, authenticatedOnWorker) | ||
| def AddAuthenticatedTransportInfo(builder, authenticatedTransportInfo): builder.PrependUOffsetTRelativeSlot(8, flatbuffers.number_types.UOffsetTFlags.py_type(authenticatedTransportInfo), 0) | ||
| def CookieAddAuthenticatedTransportInfo(builder, authenticatedTransportInfo): | ||
| """This method is deprecated. Please switch to AddAuthenticatedTransportInfo.""" | ||
| return AddAuthenticatedTransportInfo(builder, authenticatedTransportInfo) | ||
| def StartAuthenticatedTransportInfoVector(builder, numElems): return builder.StartVector(1, numElems, 1) | ||
| def CookieStartAuthenticatedTransportInfoVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartAuthenticatedTransportInfoVector(builder, numElems) | ||
| def AddAuthenticatedSession(builder, authenticatedSession): builder.PrependUint64Slot(9, authenticatedSession, 0) | ||
| def CookieAddAuthenticatedSession(builder, authenticatedSession): | ||
| """This method is deprecated. Please switch to AddAuthenticatedSession.""" | ||
| return AddAuthenticatedSession(builder, authenticatedSession) | ||
| def AddAuthenticatedJoinedAt(builder, authenticatedJoinedAt): builder.PrependUint64Slot(10, authenticatedJoinedAt, 0) | ||
| def CookieAddAuthenticatedJoinedAt(builder, authenticatedJoinedAt): | ||
| """This method is deprecated. Please switch to AddAuthenticatedJoinedAt.""" | ||
| return AddAuthenticatedJoinedAt(builder, authenticatedJoinedAt) | ||
| def AddAuthenticatedAuthmethod(builder, authenticatedAuthmethod): builder.PrependUOffsetTRelativeSlot(11, flatbuffers.number_types.UOffsetTFlags.py_type(authenticatedAuthmethod), 0) | ||
| def CookieAddAuthenticatedAuthmethod(builder, authenticatedAuthmethod): | ||
| """This method is deprecated. Please switch to AddAuthenticatedAuthmethod.""" | ||
| return AddAuthenticatedAuthmethod(builder, authenticatedAuthmethod) | ||
| def AddAuthid(builder, authid): builder.PrependUOffsetTRelativeSlot(12, flatbuffers.number_types.UOffsetTFlags.py_type(authid), 0) | ||
| def CookieAddAuthid(builder, authid): | ||
| """This method is deprecated. Please switch to AddAuthid.""" | ||
| return AddAuthid(builder, authid) | ||
| def AddAuthrole(builder, authrole): builder.PrependUOffsetTRelativeSlot(13, flatbuffers.number_types.UOffsetTFlags.py_type(authrole), 0) | ||
| def CookieAddAuthrole(builder, authrole): | ||
| """This method is deprecated. Please switch to AddAuthrole.""" | ||
| return AddAuthrole(builder, authrole) | ||
| def AddAuthmethod(builder, authmethod): builder.PrependUOffsetTRelativeSlot(14, flatbuffers.number_types.UOffsetTFlags.py_type(authmethod), 0) | ||
| def CookieAddAuthmethod(builder, authmethod): | ||
| """This method is deprecated. Please switch to AddAuthmethod.""" | ||
| return AddAuthmethod(builder, authmethod) | ||
| def AddAuthrealm(builder, authrealm): builder.PrependUOffsetTRelativeSlot(15, flatbuffers.number_types.UOffsetTFlags.py_type(authrealm), 0) | ||
| def CookieAddAuthrealm(builder, authrealm): | ||
| """This method is deprecated. Please switch to AddAuthrealm.""" | ||
| return AddAuthrealm(builder, authrealm) | ||
| def AddAuthextra(builder, authextra): builder.PrependUOffsetTRelativeSlot(16, flatbuffers.number_types.UOffsetTFlags.py_type(authextra), 0) | ||
| def CookieAddAuthextra(builder, authextra): | ||
| """This method is deprecated. Please switch to AddAuthextra.""" | ||
| return AddAuthextra(builder, authextra) | ||
| def StartAuthextraVector(builder, numElems): return builder.StartVector(1, numElems, 1) | ||
| def CookieStartAuthextraVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartAuthextraVector(builder, numElems) | ||
| def End(builder): return builder.EndObject() | ||
| def CookieEnd(builder): | ||
| """This method is deprecated. Please switch to End.""" | ||
| return End(builder) |
| # automatically generated by the FlatBuffers compiler, do not modify | ||
| # namespace: realmstore | ||
| # The payload end-to-end encryption algorithm the payload is encrypted. | ||
| class EncAlgo(object): | ||
| # Unset | ||
| NONE = 0 | ||
| # WAMP-cryptobox end-to-end encrypted application payload | ||
| CRYPTOBOX = 1 | ||
| # MQTT passthrough-mode application payload | ||
| MQTT = 2 | ||
| # XBR end-to-end encrypted application payload | ||
| XBR = 3 | ||
| OPAQUE = 4 | ||
| # automatically generated by the FlatBuffers compiler, do not modify | ||
| # namespace: realmstore | ||
| # The serializer the app payload was serialized with before encryption. | ||
| class EncSerializer(object): | ||
| # Unset | ||
| NONE = 0 | ||
| # Payload was serialized in JSON | ||
| JSON = 1 | ||
| # Payload was serialized in MsgPack | ||
| MSGPACK = 2 | ||
| # Payload was serialized in CBOR | ||
| CBOR = 3 | ||
| # Payload was serialized in UBJSON | ||
| UBJSON = 4 | ||
| OPAQUE = 5 | ||
| FLATBUFFERS = 6 | ||
| # automatically generated by the FlatBuffers compiler, do not modify | ||
| # namespace: realmstore | ||
| import flatbuffers | ||
| from flatbuffers.compat import import_numpy | ||
| np = import_numpy() | ||
| # This table store WAMP events dispatched to receivers, under WAMP subscriptions on URIs (or patterns). | ||
| class Event(object): | ||
| __slots__ = ['_tab'] | ||
| @classmethod | ||
| def GetRootAs(cls, buf, offset=0): | ||
| n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) | ||
| x = Event() | ||
| x.Init(buf, n + offset) | ||
| return x | ||
| @classmethod | ||
| def GetRootAsEvent(cls, buf, offset=0): | ||
| """This method is deprecated. Please switch to GetRootAs.""" | ||
| return cls.GetRootAs(buf, offset) | ||
| # Event | ||
| def Init(self, buf, pos): | ||
| self._tab = flatbuffers.table.Table(buf, pos) | ||
| # Timestamp when the event was sent to the receiver. Epoch time in ns. | ||
| # Event | ||
| def Timestamp(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # The subscription ID this event is dispatched under. | ||
| # Event | ||
| def Subscription(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # The publication ID of the dispatched event. | ||
| # Event | ||
| def Publication(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # The WAMP session ID of the receiver. | ||
| # Event | ||
| def Receiver(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # Whether the message was retained by the broker on the topic, rather than just published. | ||
| # Event | ||
| def Retained(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) | ||
| if o != 0: | ||
| return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) | ||
| return False | ||
| # Whether this Event was to be acknowledged by the receiver. | ||
| # Event | ||
| def AcknowledgedDelivery(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) | ||
| if o != 0: | ||
| return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) | ||
| return False | ||
| def Start(builder): builder.StartObject(6) | ||
| def EventStart(builder): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return Start(builder) | ||
| def AddTimestamp(builder, timestamp): builder.PrependUint64Slot(0, timestamp, 0) | ||
| def EventAddTimestamp(builder, timestamp): | ||
| """This method is deprecated. Please switch to AddTimestamp.""" | ||
| return AddTimestamp(builder, timestamp) | ||
| def AddSubscription(builder, subscription): builder.PrependUint64Slot(1, subscription, 0) | ||
| def EventAddSubscription(builder, subscription): | ||
| """This method is deprecated. Please switch to AddSubscription.""" | ||
| return AddSubscription(builder, subscription) | ||
| def AddPublication(builder, publication): builder.PrependUint64Slot(2, publication, 0) | ||
| def EventAddPublication(builder, publication): | ||
| """This method is deprecated. Please switch to AddPublication.""" | ||
| return AddPublication(builder, publication) | ||
| def AddReceiver(builder, receiver): builder.PrependUint64Slot(3, receiver, 0) | ||
| def EventAddReceiver(builder, receiver): | ||
| """This method is deprecated. Please switch to AddReceiver.""" | ||
| return AddReceiver(builder, receiver) | ||
| def AddRetained(builder, retained): builder.PrependBoolSlot(4, retained, 0) | ||
| def EventAddRetained(builder, retained): | ||
| """This method is deprecated. Please switch to AddRetained.""" | ||
| return AddRetained(builder, retained) | ||
| def AddAcknowledgedDelivery(builder, acknowledgedDelivery): builder.PrependBoolSlot(5, acknowledgedDelivery, 0) | ||
| def EventAddAcknowledgedDelivery(builder, acknowledgedDelivery): | ||
| """This method is deprecated. Please switch to AddAcknowledgedDelivery.""" | ||
| return AddAcknowledgedDelivery(builder, acknowledgedDelivery) | ||
| def End(builder): return builder.EndObject() | ||
| def EventEnd(builder): | ||
| """This method is deprecated. Please switch to End.""" | ||
| return End(builder) |
| # automatically generated by the FlatBuffers compiler, do not modify | ||
| # namespace: realmstore | ||
| import flatbuffers | ||
| from flatbuffers.compat import import_numpy | ||
| np = import_numpy() | ||
| # This table stores WAMP publications with configurable amount of details, optionally including application payload. | ||
| class Publication(object): | ||
| __slots__ = ['_tab'] | ||
| @classmethod | ||
| def GetRootAs(cls, buf, offset=0): | ||
| n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) | ||
| x = Publication() | ||
| x.Init(buf, n + offset) | ||
| return x | ||
| @classmethod | ||
| def GetRootAsPublication(cls, buf, offset=0): | ||
| """This method is deprecated. Please switch to GetRootAs.""" | ||
| return cls.GetRootAs(buf, offset) | ||
| # Publication | ||
| def Init(self, buf, pos): | ||
| self._tab = flatbuffers.table.Table(buf, pos) | ||
| # Timestamp when the publication was accepted by the broker. Epoch time in ns. | ||
| # Publication | ||
| def Timestamp(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # WAMP publication ID that was assigned by the broker. | ||
| # Publication | ||
| def Publication(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # WAMP session ID of the publisher. | ||
| # Publication | ||
| def Publisher(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # The WAMP or application URI of the PubSub topic the event was published to. | ||
| # Publication | ||
| def Topic(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # Positional values for application-defined event payload. | ||
| # Publication | ||
| def Args(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) | ||
| return 0 | ||
| # Publication | ||
| def ArgsAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) | ||
| return 0 | ||
| # Publication | ||
| def ArgsLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def ArgsIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) | ||
| return o == 0 | ||
| # Keyword values for application-defined event payload. | ||
| # Publication | ||
| def Kwargs(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) | ||
| return 0 | ||
| # Publication | ||
| def KwargsAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) | ||
| return 0 | ||
| # Publication | ||
| def KwargsLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def KwargsIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) | ||
| return o == 0 | ||
| # Alternative, transparent payload. If given, ``args`` and ``kwargs`` must be left unset. | ||
| # Publication | ||
| def Payload(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) | ||
| return 0 | ||
| # Publication | ||
| def PayloadAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) | ||
| return 0 | ||
| # Publication | ||
| def PayloadLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def PayloadIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) | ||
| return o == 0 | ||
| # If ``True``, the broker was asked to acknowledge the publication with a success or error response. | ||
| # Publication | ||
| def Acknowledge(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) | ||
| if o != 0: | ||
| return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) | ||
| return False | ||
| # If ``True``, the broker was requested to retain this event. | ||
| # Publication | ||
| def Retain(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) | ||
| if o != 0: | ||
| return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) | ||
| return False | ||
| # If ``True``, the broker was asked to exclude the publisher from receiving the event. | ||
| # Publication | ||
| def ExcludeMe(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) | ||
| if o != 0: | ||
| return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) | ||
| return False | ||
| # List of WAMP session IDs to exclude from receiving this event. | ||
| # Publication | ||
| def Exclude(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) | ||
| return 0 | ||
| # Publication | ||
| def ExcludeAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) | ||
| return 0 | ||
| # Publication | ||
| def ExcludeLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def ExcludeIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) | ||
| return o == 0 | ||
| # List of WAMP authids to exclude from receiving this event. | ||
| # Publication | ||
| def ExcludeAuthid(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(26)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) | ||
| return "" | ||
| # Publication | ||
| def ExcludeAuthidLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(26)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def ExcludeAuthidIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(26)) | ||
| return o == 0 | ||
| # List of WAMP authroles to exclude from receiving this event. | ||
| # Publication | ||
| def ExcludeAuthrole(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(28)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) | ||
| return "" | ||
| # Publication | ||
| def ExcludeAuthroleLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(28)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def ExcludeAuthroleIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(28)) | ||
| return o == 0 | ||
| # List of WAMP session IDs eligible to receive this event. | ||
| # Publication | ||
| def Eligible(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(30)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) | ||
| return 0 | ||
| # Publication | ||
| def EligibleAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(30)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) | ||
| return 0 | ||
| # Publication | ||
| def EligibleLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(30)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def EligibleIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(30)) | ||
| return o == 0 | ||
| # List of WAMP authids eligible to receive this event. | ||
| # Publication | ||
| def EligibleAuthid(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(32)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) | ||
| return "" | ||
| # Publication | ||
| def EligibleAuthidLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(32)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def EligibleAuthidIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(32)) | ||
| return o == 0 | ||
| # List of WAMP authroles eligible to receive this event. | ||
| # Publication | ||
| def EligibleAuthrole(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(34)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) | ||
| return "" | ||
| # Publication | ||
| def EligibleAuthroleLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(34)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def EligibleAuthroleIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(34)) | ||
| return o == 0 | ||
| # When using payload transparency, the encoding algorithm that was used to encode the payload. | ||
| # Publication | ||
| def EncAlgo(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(36)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # When using payload transparency with an encryption algorithm, the payload encryption key. | ||
| # Publication | ||
| def EncKey(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(38)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) | ||
| return 0 | ||
| # Publication | ||
| def EncKeyAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(38)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) | ||
| return 0 | ||
| # Publication | ||
| def EncKeyLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(38)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def EncKeyIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(38)) | ||
| return o == 0 | ||
| # When using payload transparency, the payload object serializer that was used encoding the payload. | ||
| # Publication | ||
| def EncSerializer(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(40)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) | ||
| return 0 | ||
| def Start(builder): builder.StartObject(19) | ||
| def PublicationStart(builder): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return Start(builder) | ||
| def AddTimestamp(builder, timestamp): builder.PrependUint64Slot(0, timestamp, 0) | ||
| def PublicationAddTimestamp(builder, timestamp): | ||
| """This method is deprecated. Please switch to AddTimestamp.""" | ||
| return AddTimestamp(builder, timestamp) | ||
| def AddPublication(builder, publication): builder.PrependUint64Slot(1, publication, 0) | ||
| def PublicationAddPublication(builder, publication): | ||
| """This method is deprecated. Please switch to AddPublication.""" | ||
| return AddPublication(builder, publication) | ||
| def AddPublisher(builder, publisher): builder.PrependUint64Slot(2, publisher, 0) | ||
| def PublicationAddPublisher(builder, publisher): | ||
| """This method is deprecated. Please switch to AddPublisher.""" | ||
| return AddPublisher(builder, publisher) | ||
| def AddTopic(builder, topic): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(topic), 0) | ||
| def PublicationAddTopic(builder, topic): | ||
| """This method is deprecated. Please switch to AddTopic.""" | ||
| return AddTopic(builder, topic) | ||
| def AddArgs(builder, args): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(args), 0) | ||
| def PublicationAddArgs(builder, args): | ||
| """This method is deprecated. Please switch to AddArgs.""" | ||
| return AddArgs(builder, args) | ||
| def StartArgsVector(builder, numElems): return builder.StartVector(1, numElems, 1) | ||
| def PublicationStartArgsVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartArgsVector(builder, numElems) | ||
| def AddKwargs(builder, kwargs): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(kwargs), 0) | ||
| def PublicationAddKwargs(builder, kwargs): | ||
| """This method is deprecated. Please switch to AddKwargs.""" | ||
| return AddKwargs(builder, kwargs) | ||
| def StartKwargsVector(builder, numElems): return builder.StartVector(1, numElems, 1) | ||
| def PublicationStartKwargsVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartKwargsVector(builder, numElems) | ||
| def AddPayload(builder, payload): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(payload), 0) | ||
| def PublicationAddPayload(builder, payload): | ||
| """This method is deprecated. Please switch to AddPayload.""" | ||
| return AddPayload(builder, payload) | ||
| def StartPayloadVector(builder, numElems): return builder.StartVector(1, numElems, 1) | ||
| def PublicationStartPayloadVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartPayloadVector(builder, numElems) | ||
| def AddAcknowledge(builder, acknowledge): builder.PrependBoolSlot(7, acknowledge, 0) | ||
| def PublicationAddAcknowledge(builder, acknowledge): | ||
| """This method is deprecated. Please switch to AddAcknowledge.""" | ||
| return AddAcknowledge(builder, acknowledge) | ||
| def AddRetain(builder, retain): builder.PrependBoolSlot(8, retain, 0) | ||
| def PublicationAddRetain(builder, retain): | ||
| """This method is deprecated. Please switch to AddRetain.""" | ||
| return AddRetain(builder, retain) | ||
| def AddExcludeMe(builder, excludeMe): builder.PrependBoolSlot(9, excludeMe, 0) | ||
| def PublicationAddExcludeMe(builder, excludeMe): | ||
| """This method is deprecated. Please switch to AddExcludeMe.""" | ||
| return AddExcludeMe(builder, excludeMe) | ||
| def AddExclude(builder, exclude): builder.PrependUOffsetTRelativeSlot(10, flatbuffers.number_types.UOffsetTFlags.py_type(exclude), 0) | ||
| def PublicationAddExclude(builder, exclude): | ||
| """This method is deprecated. Please switch to AddExclude.""" | ||
| return AddExclude(builder, exclude) | ||
| def StartExcludeVector(builder, numElems): return builder.StartVector(8, numElems, 8) | ||
| def PublicationStartExcludeVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartExcludeVector(builder, numElems) | ||
| def AddExcludeAuthid(builder, excludeAuthid): builder.PrependUOffsetTRelativeSlot(11, flatbuffers.number_types.UOffsetTFlags.py_type(excludeAuthid), 0) | ||
| def PublicationAddExcludeAuthid(builder, excludeAuthid): | ||
| """This method is deprecated. Please switch to AddExcludeAuthid.""" | ||
| return AddExcludeAuthid(builder, excludeAuthid) | ||
| def StartExcludeAuthidVector(builder, numElems): return builder.StartVector(4, numElems, 4) | ||
| def PublicationStartExcludeAuthidVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartExcludeAuthidVector(builder, numElems) | ||
| def AddExcludeAuthrole(builder, excludeAuthrole): builder.PrependUOffsetTRelativeSlot(12, flatbuffers.number_types.UOffsetTFlags.py_type(excludeAuthrole), 0) | ||
| def PublicationAddExcludeAuthrole(builder, excludeAuthrole): | ||
| """This method is deprecated. Please switch to AddExcludeAuthrole.""" | ||
| return AddExcludeAuthrole(builder, excludeAuthrole) | ||
| def StartExcludeAuthroleVector(builder, numElems): return builder.StartVector(4, numElems, 4) | ||
| def PublicationStartExcludeAuthroleVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartExcludeAuthroleVector(builder, numElems) | ||
| def AddEligible(builder, eligible): builder.PrependUOffsetTRelativeSlot(13, flatbuffers.number_types.UOffsetTFlags.py_type(eligible), 0) | ||
| def PublicationAddEligible(builder, eligible): | ||
| """This method is deprecated. Please switch to AddEligible.""" | ||
| return AddEligible(builder, eligible) | ||
| def StartEligibleVector(builder, numElems): return builder.StartVector(8, numElems, 8) | ||
| def PublicationStartEligibleVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartEligibleVector(builder, numElems) | ||
| def AddEligibleAuthid(builder, eligibleAuthid): builder.PrependUOffsetTRelativeSlot(14, flatbuffers.number_types.UOffsetTFlags.py_type(eligibleAuthid), 0) | ||
| def PublicationAddEligibleAuthid(builder, eligibleAuthid): | ||
| """This method is deprecated. Please switch to AddEligibleAuthid.""" | ||
| return AddEligibleAuthid(builder, eligibleAuthid) | ||
| def StartEligibleAuthidVector(builder, numElems): return builder.StartVector(4, numElems, 4) | ||
| def PublicationStartEligibleAuthidVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartEligibleAuthidVector(builder, numElems) | ||
| def AddEligibleAuthrole(builder, eligibleAuthrole): builder.PrependUOffsetTRelativeSlot(15, flatbuffers.number_types.UOffsetTFlags.py_type(eligibleAuthrole), 0) | ||
| def PublicationAddEligibleAuthrole(builder, eligibleAuthrole): | ||
| """This method is deprecated. Please switch to AddEligibleAuthrole.""" | ||
| return AddEligibleAuthrole(builder, eligibleAuthrole) | ||
| def StartEligibleAuthroleVector(builder, numElems): return builder.StartVector(4, numElems, 4) | ||
| def PublicationStartEligibleAuthroleVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartEligibleAuthroleVector(builder, numElems) | ||
| def AddEncAlgo(builder, encAlgo): builder.PrependUint8Slot(16, encAlgo, 0) | ||
| def PublicationAddEncAlgo(builder, encAlgo): | ||
| """This method is deprecated. Please switch to AddEncAlgo.""" | ||
| return AddEncAlgo(builder, encAlgo) | ||
| def AddEncKey(builder, encKey): builder.PrependUOffsetTRelativeSlot(17, flatbuffers.number_types.UOffsetTFlags.py_type(encKey), 0) | ||
| def PublicationAddEncKey(builder, encKey): | ||
| """This method is deprecated. Please switch to AddEncKey.""" | ||
| return AddEncKey(builder, encKey) | ||
| def StartEncKeyVector(builder, numElems): return builder.StartVector(1, numElems, 1) | ||
| def PublicationStartEncKeyVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartEncKeyVector(builder, numElems) | ||
| def AddEncSerializer(builder, encSerializer): builder.PrependUint8Slot(18, encSerializer, 0) | ||
| def PublicationAddEncSerializer(builder, encSerializer): | ||
| """This method is deprecated. Please switch to AddEncSerializer.""" | ||
| return AddEncSerializer(builder, encSerializer) | ||
| def End(builder): return builder.EndObject() | ||
| def PublicationEnd(builder): | ||
| """This method is deprecated. Please switch to End.""" | ||
| return End(builder) |
| # automatically generated by the FlatBuffers compiler, do not modify | ||
| # namespace: realmstore | ||
| import flatbuffers | ||
| from flatbuffers.compat import import_numpy | ||
| np = import_numpy() | ||
| # This table stores WAMP sessions and serves as an anchor for all usage related data. | ||
| class Session(object): | ||
| __slots__ = ['_tab'] | ||
| @classmethod | ||
| def GetRootAs(cls, buf, offset=0): | ||
| n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) | ||
| x = Session() | ||
| x.Init(buf, n + offset) | ||
| return x | ||
| @classmethod | ||
| def GetRootAsSession(cls, buf, offset=0): | ||
| """This method is deprecated. Please switch to GetRootAs.""" | ||
| return cls.GetRootAs(buf, offset) | ||
| # Session | ||
| def Init(self, buf, pos): | ||
| self._tab = flatbuffers.table.Table(buf, pos) | ||
| # OID of the application realm this session is/was joined on. | ||
| # Session | ||
| def ArealmOid(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) | ||
| return 0 | ||
| # Session | ||
| def ArealmOidAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) | ||
| return 0 | ||
| # Session | ||
| def ArealmOidLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Session | ||
| def ArealmOidIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) | ||
| return o == 0 | ||
| # Unlimited time, globally unique, long-term OID of this session. The pair of WAMP session ID and join time ``(session, joined_at)`` bidirectionally maps to session ``oid``. | ||
| # Session | ||
| def Oid(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) | ||
| return 0 | ||
| # Session | ||
| def OidAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) | ||
| return 0 | ||
| # Session | ||
| def OidLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Session | ||
| def OidIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) | ||
| return o == 0 | ||
| # The WAMP session_id of the session. | ||
| # Session | ||
| def Session(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # Timestamp when the session was joined by the router. Epoch time in ns. | ||
| # Session | ||
| def JoinedAt(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # Timestamp when the session left the router. Epoch time in ns. | ||
| # Session | ||
| def LeftAt(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # OID of the node of the router worker hosting this session. | ||
| # Session | ||
| def NodeOid(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) | ||
| return 0 | ||
| # Session | ||
| def NodeOidAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) | ||
| return 0 | ||
| # Session | ||
| def NodeOidLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Session | ||
| def NodeOidIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) | ||
| return o == 0 | ||
| # Name (management realm WAMP authid) of the node of the router worker hosting this session. | ||
| # Session | ||
| def NodeAuthid(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # Local worker name of the router worker hosting this session. | ||
| # Session | ||
| def WorkerName(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # Local worker PID of the router worker hosting this session. | ||
| # Session | ||
| def WorkerPid(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # Session transport information, the incoming frontend client connection in proxy-router setups. This is also returned as part of authextra to the client. | ||
| # Session | ||
| def Transport(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) | ||
| return 0 | ||
| # Session | ||
| def TransportAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) | ||
| return 0 | ||
| # Session | ||
| def TransportLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Session | ||
| def TransportIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) | ||
| return o == 0 | ||
| # From proxy (in proxy-router cluster setups): OID of the node of the proxy worker hosting this session. | ||
| # Session | ||
| def ProxyNodeOid(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) | ||
| return 0 | ||
| # Session | ||
| def ProxyNodeOidAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) | ||
| return 0 | ||
| # Session | ||
| def ProxyNodeOidLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Session | ||
| def ProxyNodeOidIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) | ||
| return o == 0 | ||
| # From proxy (in proxy-router cluster setups): Name (management realm WAMP authid) of the node of the proxy worker hosting this session. | ||
| # Session | ||
| def ProxyNodeAuthid(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(26)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # From proxy (in proxy-router cluster setups): Local worker name of the proxy worker hosting this session. | ||
| # Session | ||
| def ProxyWorkerName(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(28)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # From proxy (in proxy-router cluster setups): Local worker PID of the proxy worker hosting this session. | ||
| # Session | ||
| def ProxyWorkerPid(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(30)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # From proxy (in proxy-router cluster setups): Session transport information, the transport from the proxy to the backend router. | ||
| # Session | ||
| def ProxyTransport(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(32)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) | ||
| return 0 | ||
| # Session | ||
| def ProxyTransportAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(32)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) | ||
| return 0 | ||
| # Session | ||
| def ProxyTransportLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(32)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Session | ||
| def ProxyTransportIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(32)) | ||
| return o == 0 | ||
| # The WAMP realm (name) the session is/was joined on. | ||
| # Session | ||
| def Realm(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(34)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # The WAMP authid the session was authenticated under. | ||
| # Session | ||
| def Authid(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(36)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # The WAMP authrole the session was authenticated under. | ||
| # Session | ||
| def Authrole(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(38)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # The WAMP authmethod uses to authenticate the session. | ||
| # Session | ||
| def Authmethod(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(40)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # The WAMP authprovider that was handling the session authentication. | ||
| # Session | ||
| def Authprovider(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(42)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # The WAMP authextra as provided to the authenticated session. | ||
| # Session | ||
| def Authextra(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(44)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) | ||
| return 0 | ||
| # Session | ||
| def AuthextraAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(44)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) | ||
| return 0 | ||
| # Session | ||
| def AuthextraLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(44)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Session | ||
| def AuthextraIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(44)) | ||
| return o == 0 | ||
| def Start(builder): builder.StartObject(21) | ||
| def SessionStart(builder): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return Start(builder) | ||
| def AddArealmOid(builder, arealmOid): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(arealmOid), 0) | ||
| def SessionAddArealmOid(builder, arealmOid): | ||
| """This method is deprecated. Please switch to AddArealmOid.""" | ||
| return AddArealmOid(builder, arealmOid) | ||
| def StartArealmOidVector(builder, numElems): return builder.StartVector(1, numElems, 1) | ||
| def SessionStartArealmOidVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartArealmOidVector(builder, numElems) | ||
| def AddOid(builder, oid): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(oid), 0) | ||
| def SessionAddOid(builder, oid): | ||
| """This method is deprecated. Please switch to AddOid.""" | ||
| return AddOid(builder, oid) | ||
| def StartOidVector(builder, numElems): return builder.StartVector(1, numElems, 1) | ||
| def SessionStartOidVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartOidVector(builder, numElems) | ||
| def AddSession(builder, session): builder.PrependUint64Slot(2, session, 0) | ||
| def SessionAddSession(builder, session): | ||
| """This method is deprecated. Please switch to AddSession.""" | ||
| return AddSession(builder, session) | ||
| def AddJoinedAt(builder, joinedAt): builder.PrependUint64Slot(3, joinedAt, 0) | ||
| def SessionAddJoinedAt(builder, joinedAt): | ||
| """This method is deprecated. Please switch to AddJoinedAt.""" | ||
| return AddJoinedAt(builder, joinedAt) | ||
| def AddLeftAt(builder, leftAt): builder.PrependUint64Slot(4, leftAt, 0) | ||
| def SessionAddLeftAt(builder, leftAt): | ||
| """This method is deprecated. Please switch to AddLeftAt.""" | ||
| return AddLeftAt(builder, leftAt) | ||
| def AddNodeOid(builder, nodeOid): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(nodeOid), 0) | ||
| def SessionAddNodeOid(builder, nodeOid): | ||
| """This method is deprecated. Please switch to AddNodeOid.""" | ||
| return AddNodeOid(builder, nodeOid) | ||
| def StartNodeOidVector(builder, numElems): return builder.StartVector(1, numElems, 1) | ||
| def SessionStartNodeOidVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartNodeOidVector(builder, numElems) | ||
| def AddNodeAuthid(builder, nodeAuthid): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(nodeAuthid), 0) | ||
| def SessionAddNodeAuthid(builder, nodeAuthid): | ||
| """This method is deprecated. Please switch to AddNodeAuthid.""" | ||
| return AddNodeAuthid(builder, nodeAuthid) | ||
| def AddWorkerName(builder, workerName): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(workerName), 0) | ||
| def SessionAddWorkerName(builder, workerName): | ||
| """This method is deprecated. Please switch to AddWorkerName.""" | ||
| return AddWorkerName(builder, workerName) | ||
| def AddWorkerPid(builder, workerPid): builder.PrependInt32Slot(8, workerPid, 0) | ||
| def SessionAddWorkerPid(builder, workerPid): | ||
| """This method is deprecated. Please switch to AddWorkerPid.""" | ||
| return AddWorkerPid(builder, workerPid) | ||
| def AddTransport(builder, transport): builder.PrependUOffsetTRelativeSlot(9, flatbuffers.number_types.UOffsetTFlags.py_type(transport), 0) | ||
| def SessionAddTransport(builder, transport): | ||
| """This method is deprecated. Please switch to AddTransport.""" | ||
| return AddTransport(builder, transport) | ||
| def StartTransportVector(builder, numElems): return builder.StartVector(1, numElems, 1) | ||
| def SessionStartTransportVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartTransportVector(builder, numElems) | ||
| def AddProxyNodeOid(builder, proxyNodeOid): builder.PrependUOffsetTRelativeSlot(10, flatbuffers.number_types.UOffsetTFlags.py_type(proxyNodeOid), 0) | ||
| def SessionAddProxyNodeOid(builder, proxyNodeOid): | ||
| """This method is deprecated. Please switch to AddProxyNodeOid.""" | ||
| return AddProxyNodeOid(builder, proxyNodeOid) | ||
| def StartProxyNodeOidVector(builder, numElems): return builder.StartVector(1, numElems, 1) | ||
| def SessionStartProxyNodeOidVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartProxyNodeOidVector(builder, numElems) | ||
| def AddProxyNodeAuthid(builder, proxyNodeAuthid): builder.PrependUOffsetTRelativeSlot(11, flatbuffers.number_types.UOffsetTFlags.py_type(proxyNodeAuthid), 0) | ||
| def SessionAddProxyNodeAuthid(builder, proxyNodeAuthid): | ||
| """This method is deprecated. Please switch to AddProxyNodeAuthid.""" | ||
| return AddProxyNodeAuthid(builder, proxyNodeAuthid) | ||
| def AddProxyWorkerName(builder, proxyWorkerName): builder.PrependUOffsetTRelativeSlot(12, flatbuffers.number_types.UOffsetTFlags.py_type(proxyWorkerName), 0) | ||
| def SessionAddProxyWorkerName(builder, proxyWorkerName): | ||
| """This method is deprecated. Please switch to AddProxyWorkerName.""" | ||
| return AddProxyWorkerName(builder, proxyWorkerName) | ||
| def AddProxyWorkerPid(builder, proxyWorkerPid): builder.PrependInt32Slot(13, proxyWorkerPid, 0) | ||
| def SessionAddProxyWorkerPid(builder, proxyWorkerPid): | ||
| """This method is deprecated. Please switch to AddProxyWorkerPid.""" | ||
| return AddProxyWorkerPid(builder, proxyWorkerPid) | ||
| def AddProxyTransport(builder, proxyTransport): builder.PrependUOffsetTRelativeSlot(14, flatbuffers.number_types.UOffsetTFlags.py_type(proxyTransport), 0) | ||
| def SessionAddProxyTransport(builder, proxyTransport): | ||
| """This method is deprecated. Please switch to AddProxyTransport.""" | ||
| return AddProxyTransport(builder, proxyTransport) | ||
| def StartProxyTransportVector(builder, numElems): return builder.StartVector(1, numElems, 1) | ||
| def SessionStartProxyTransportVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartProxyTransportVector(builder, numElems) | ||
| def AddRealm(builder, realm): builder.PrependUOffsetTRelativeSlot(15, flatbuffers.number_types.UOffsetTFlags.py_type(realm), 0) | ||
| def SessionAddRealm(builder, realm): | ||
| """This method is deprecated. Please switch to AddRealm.""" | ||
| return AddRealm(builder, realm) | ||
| def AddAuthid(builder, authid): builder.PrependUOffsetTRelativeSlot(16, flatbuffers.number_types.UOffsetTFlags.py_type(authid), 0) | ||
| def SessionAddAuthid(builder, authid): | ||
| """This method is deprecated. Please switch to AddAuthid.""" | ||
| return AddAuthid(builder, authid) | ||
| def AddAuthrole(builder, authrole): builder.PrependUOffsetTRelativeSlot(17, flatbuffers.number_types.UOffsetTFlags.py_type(authrole), 0) | ||
| def SessionAddAuthrole(builder, authrole): | ||
| """This method is deprecated. Please switch to AddAuthrole.""" | ||
| return AddAuthrole(builder, authrole) | ||
| def AddAuthmethod(builder, authmethod): builder.PrependUOffsetTRelativeSlot(18, flatbuffers.number_types.UOffsetTFlags.py_type(authmethod), 0) | ||
| def SessionAddAuthmethod(builder, authmethod): | ||
| """This method is deprecated. Please switch to AddAuthmethod.""" | ||
| return AddAuthmethod(builder, authmethod) | ||
| def AddAuthprovider(builder, authprovider): builder.PrependUOffsetTRelativeSlot(19, flatbuffers.number_types.UOffsetTFlags.py_type(authprovider), 0) | ||
| def SessionAddAuthprovider(builder, authprovider): | ||
| """This method is deprecated. Please switch to AddAuthprovider.""" | ||
| return AddAuthprovider(builder, authprovider) | ||
| def AddAuthextra(builder, authextra): builder.PrependUOffsetTRelativeSlot(20, flatbuffers.number_types.UOffsetTFlags.py_type(authextra), 0) | ||
| def SessionAddAuthextra(builder, authextra): | ||
| """This method is deprecated. Please switch to AddAuthextra.""" | ||
| return AddAuthextra(builder, authextra) | ||
| def StartAuthextraVector(builder, numElems): return builder.StartVector(1, numElems, 1) | ||
| def SessionStartAuthextraVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartAuthextraVector(builder, numElems) | ||
| def End(builder): return builder.EndObject() | ||
| def SessionEnd(builder): | ||
| """This method is deprecated. Please switch to End.""" | ||
| return End(builder) |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| from cfxdb.realmstore._session import Session, Sessions, IndexSessionsBySessionId | ||
| from cfxdb.realmstore._event import Event, Events | ||
| from cfxdb.realmstore._publication import Publication, Publications | ||
| from cfxdb.realmstore._schema import RealmStore | ||
| __all__ = ( | ||
| 'Session', | ||
| 'Sessions', | ||
| 'IndexSessionsBySessionId', | ||
| 'Event', | ||
| 'Events', | ||
| 'Publication', | ||
| 'Publications', | ||
| 'RealmStore', | ||
| ) |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| import pprint | ||
| from zlmdb import table, MapOidTimestampFlatBuffers | ||
| from cfxdb.gen.realmstore import Event as EventGen | ||
| class Event(object): | ||
| """ | ||
| Persisted event database object. | ||
| """ | ||
| def __init__(self, from_fbs=None): | ||
| self._from_fbs = from_fbs | ||
| self._timestamp = None | ||
| self._subscription = None | ||
| self._publication = None | ||
| self._receiver = None | ||
| self._retained = None | ||
| self._acknowledged_delivery = None | ||
| def marshal(self): | ||
| obj = { | ||
| 'timestamp': self.timestamp, | ||
| 'subscription': self.subscription, | ||
| 'publication': self.publication, | ||
| 'receiver': self.receiver, | ||
| 'retained': self.retained, | ||
| 'acknowledged_delivery': self.acknowledged_delivery, | ||
| } | ||
| return obj | ||
| def __str__(self): | ||
| return '\n{}\n'.format(pprint.pformat(self.marshal())) | ||
| @property | ||
| def timestamp(self): | ||
| """ | ||
| Timestamp when the event was sent to the receiver. Epoch time in ns. | ||
| :returns: Epoc time in ns. | ||
| :rtype: int | ||
| """ | ||
| if self._timestamp is None and self._from_fbs: | ||
| self._timestamp = self._from_fbs.Timestamp() | ||
| return self._timestamp | ||
| @timestamp.setter | ||
| def timestamp(self, value): | ||
| assert type(value) == int | ||
| self._timestamp = value | ||
| @property | ||
| def subscription(self): | ||
| """ | ||
| The subscription ID this event is dispatched under. | ||
| :returns: The subscription ID. | ||
| :rtype: int | ||
| """ | ||
| if self._subscription is None and self._from_fbs: | ||
| self._subscription = self._from_fbs.Subscription() | ||
| return self._subscription | ||
| @subscription.setter | ||
| def subscription(self, value): | ||
| assert type(value) == int | ||
| self._subscription = value | ||
| @property | ||
| def publication(self): | ||
| """ | ||
| The publication ID of the dispatched event. | ||
| :returns: The publication ID. | ||
| :rtype: int | ||
| """ | ||
| if self._publication is None and self._from_fbs: | ||
| self._publication = self._from_fbs.Publication() | ||
| return self._publication | ||
| @publication.setter | ||
| def publication(self, value): | ||
| assert type(value) == int | ||
| self._publication = value | ||
| @property | ||
| def receiver(self): | ||
| """ | ||
| The WAMP session ID of the receiver. | ||
| :returns: The receiver ID. | ||
| :rtype: int | ||
| """ | ||
| if self._receiver is None and self._from_fbs: | ||
| self._receiver = self._from_fbs.Receiver() | ||
| return self._receiver | ||
| @receiver.setter | ||
| def receiver(self, value): | ||
| assert type(value) == int | ||
| self._receiver = value | ||
| @property | ||
| def retained(self): | ||
| """ | ||
| Whether the message was retained by the broker on the topic, rather than just published. | ||
| :returns: retained flag | ||
| :rtype: bool | ||
| """ | ||
| if self._retained is None and self._from_fbs: | ||
| self._retained = self._from_fbs.Retained() | ||
| return self._retained | ||
| @retained.setter | ||
| def retained(self, value): | ||
| assert type(value) == bool | ||
| self._retained = value | ||
| @property | ||
| def acknowledged_delivery(self): | ||
| """ | ||
| Whether this Event was to be acknowledged by the receiver. | ||
| :returns: acknowledged delivery flag | ||
| :rtype: bool | ||
| """ | ||
| if self._acknowledged_delivery is None and self._from_fbs: | ||
| self._acknowledged_delivery = self._from_fbs.AcknowledgedDelivery() | ||
| return self._acknowledged_delivery | ||
| @acknowledged_delivery.setter | ||
| def acknowledged_delivery(self, value): | ||
| assert type(value) == bool | ||
| self._acknowledged_delivery = value | ||
| @staticmethod | ||
| def cast(buf): | ||
| return Event(EventGen.Event.GetRootAsEvent(buf, 0)) | ||
| def build(self, builder): | ||
| # now start and build a new object .. | ||
| EventGen.EventStart(builder) | ||
| EventGen.EventAddTimestamp(builder, self.timestamp) | ||
| EventGen.EventAddSubscription(builder, self.subscription) | ||
| EventGen.EventAddPublication(builder, self.publication) | ||
| EventGen.EventAddReceiver(builder, self.receiver) | ||
| if self.retained is not None: | ||
| EventGen.EventAddRetained(builder, self.retained) | ||
| if self.acknowledged_delivery is not None: | ||
| EventGen.EventAddAcknowledgedDelivery(builder, self.acknowledged_delivery) | ||
| # finish the object. | ||
| final = EventGen.EventEnd(builder) | ||
| return final | ||
| @table('40a9df31-6065-496f-809f-027a1879654c', build=Event.build, cast=Event.cast) | ||
| class Events(MapOidTimestampFlatBuffers): | ||
| """ | ||
| Persisted events archive. | ||
| Map :class:`zlmdb.MapOid3FlatBuffers` from ``(subscription, time_ns)`` to :class:`cfxdb.eventstore.Event`. | ||
| """ |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| import pprint | ||
| import cbor2 | ||
| import flatbuffers | ||
| from zlmdb import table, MapOidFlatBuffers | ||
| from cfxdb.gen.realmstore import Publication as PublicationGen | ||
| class _Publication(PublicationGen.Publication): | ||
| """ | ||
| Expand methods on the class code generated by flatc. | ||
| FIXME: comes up with a PR for flatc to generated this stuff automatically. | ||
| """ | ||
| @classmethod | ||
| def GetRootAsPublication(cls, buf, offset): | ||
| n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) | ||
| x = _Publication() | ||
| x.Init(buf, n + offset) | ||
| return x | ||
| def ArgsAsBytes(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) | ||
| if o != 0: | ||
| _off = self._tab.Vector(o) | ||
| _len = self._tab.VectorLen(o) | ||
| return memoryview(self._tab.Bytes)[_off:_off + _len] | ||
| return None | ||
| def KwargsAsBytes(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) | ||
| if o != 0: | ||
| _off = self._tab.Vector(o) | ||
| _len = self._tab.VectorLen(o) | ||
| return memoryview(self._tab.Bytes)[_off:_off + _len] | ||
| return None | ||
| def PayloadAsBytes(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) | ||
| if o != 0: | ||
| _off = self._tab.Vector(o) | ||
| _len = self._tab.VectorLen(o) | ||
| return memoryview(self._tab.Bytes)[_off:_off + _len] | ||
| return None | ||
| def EncKeyAsBytes(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(38)) | ||
| if o != 0: | ||
| _off = self._tab.Vector(o) | ||
| _len = self._tab.VectorLen(o) | ||
| return memoryview(self._tab.Bytes)[_off:_off + _len] | ||
| return None | ||
| class Publication(object): | ||
| """ | ||
| Persisted publication database object. | ||
| """ | ||
| ENC_ALGO_NONE = 0 | ||
| ENC_ALGO_CRYPTOBOX = 1 | ||
| ENC_ALGO_MQTT = 2 | ||
| ENC_ALGO_XBR = 3 | ||
| ENC_SER_NONE = 0 | ||
| ENC_SER_JSON = 1 | ||
| ENC_SER_MSGPACK = 2 | ||
| ENC_SER_CBOR = 3 | ||
| ENC_SER_UBJSON = 4 | ||
| ENC_SER_OPAQUE = 5 | ||
| ENC_SER_FLATBUFFERS = 6 | ||
| __slots__ = ( | ||
| '_from_fbs', | ||
| '_timestamp', | ||
| '_publication', | ||
| '_publisher', | ||
| '_topic', | ||
| '_args', | ||
| '_kwargs', | ||
| '_payload', | ||
| '_acknowledge', | ||
| '_retain', | ||
| '_exclude_me', | ||
| '_exclude', | ||
| '_exclude_authid', | ||
| '_exclude_authrole', | ||
| '_eligible', | ||
| '_eligible_authid', | ||
| '_eligible_authrole', | ||
| '_enc_algo', | ||
| '_enc_key', | ||
| '_enc_serializer', | ||
| ) | ||
| def __init__(self, from_fbs=None): | ||
| self._from_fbs = from_fbs | ||
| self._timestamp = None | ||
| self._publication = None | ||
| self._publisher = None | ||
| self._topic = None | ||
| self._args = None | ||
| self._kwargs = None | ||
| self._payload = None | ||
| self._acknowledge = None | ||
| self._retain = None | ||
| self._exclude_me = None | ||
| self._exclude = None | ||
| self._exclude_authid = None | ||
| self._exclude_authrole = None | ||
| self._eligible = None | ||
| self._eligible_authid = None | ||
| self._eligible_authrole = None | ||
| self._enc_algo = None | ||
| self._enc_key = None | ||
| self._enc_serializer = None | ||
| def marshal(self): | ||
| obj = { | ||
| 'timestamp': self.timestamp, | ||
| 'publication': self.publication, | ||
| 'publisher': self.publisher, | ||
| 'topic': self.topic, | ||
| 'args': self.args, | ||
| 'kwargs': self.kwargs, | ||
| 'payload': self.payload, | ||
| 'acknowledge': self.acknowledge, | ||
| 'retain': self.retain, | ||
| 'exclude_me': self.exclude_me, | ||
| 'exclude': self.exclude, | ||
| 'exclude_authid': self.exclude_authid, | ||
| 'exclude_authrole': self.exclude_authrole, | ||
| 'eligible': self.eligible, | ||
| 'eligible_authid': self.eligible_authid, | ||
| 'eligible_authrole': self.eligible_authrole, | ||
| 'enc_algo': self.enc_algo, | ||
| 'enc_key': self.enc_key, | ||
| 'enc_serializer': self.enc_serializer, | ||
| } | ||
| return obj | ||
| def __str__(self): | ||
| return '\n{}\n'.format(pprint.pformat(self.marshal())) | ||
| @property | ||
| def timestamp(self): | ||
| """ | ||
| Timestamp when the publication was accepted by the broker. Epoch time in ns. | ||
| :returns: epoch time in ns | ||
| :rtype: int | ||
| """ | ||
| if self._timestamp is None and self._from_fbs: | ||
| self._timestamp = self._from_fbs.Timestamp() | ||
| return self._timestamp | ||
| @timestamp.setter | ||
| def timestamp(self, value): | ||
| assert value is None or type(value) == int | ||
| self._timestamp = value | ||
| @property | ||
| def publication(self): | ||
| """ | ||
| WAMP publication ID that was assigned by the broker. | ||
| :returns: publication ID | ||
| :rtype: int | ||
| """ | ||
| if self._publication is None and self._from_fbs: | ||
| self._publication = self._from_fbs.Publication() | ||
| return self._publication | ||
| @publication.setter | ||
| def publication(self, value): | ||
| assert value is None or type(value) == int | ||
| self._publication = value | ||
| @property | ||
| def publisher(self): | ||
| """ | ||
| WAMP session ID of the publisher. | ||
| :returns: publisher ID | ||
| :rtype: int | ||
| """ | ||
| if self._publisher is None and self._from_fbs: | ||
| self._publisher = self._from_fbs.Publisher() | ||
| return self._publisher | ||
| @publisher.setter | ||
| def publisher(self, value): | ||
| assert value is None or type(value) == int | ||
| self._publisher = value | ||
| @property | ||
| def topic(self): | ||
| """ | ||
| The WAMP or application URI of the PubSub topic the event was published to. | ||
| :returns: topic (URI) published to | ||
| :rtype: str | ||
| """ | ||
| if self._topic is None and self._from_fbs: | ||
| self._topic = self._from_fbs.Topic().decode('utf8') | ||
| return self._topic | ||
| @topic.setter | ||
| def topic(self, value): | ||
| assert value is None or type(value) == str | ||
| self._topic = value | ||
| # | ||
| # args, kwargs, payload | ||
| # | ||
| @property | ||
| def args(self): | ||
| """ | ||
| Positional values for application-defined event payload. | ||
| :returns: positional arguments (app payload) of the event (if any) | ||
| :rtype: None or list | ||
| """ | ||
| if self._args is None and self._from_fbs: | ||
| if self._from_fbs.ArgsLength(): | ||
| self._args = cbor2.loads(bytes(self._from_fbs.ArgsAsBytes())) | ||
| return self._args | ||
| @args.setter | ||
| def args(self, value): | ||
| assert value is None or type(value) == list | ||
| self._args = value | ||
| @property | ||
| def kwargs(self): | ||
| """ | ||
| Keyword values for application-defined event payload. | ||
| :returns: keyword arguments (app payload) of the event (if any) | ||
| :rtype: None or dict | ||
| """ | ||
| if self._kwargs is None and self._from_fbs: | ||
| if self._from_fbs.KwargsLength(): | ||
| self._kwargs = cbor2.loads(bytes(self._from_fbs.KwargsAsBytes())) | ||
| return self._kwargs | ||
| @kwargs.setter | ||
| def kwargs(self, value): | ||
| assert value is None or type(value) == dict | ||
| self._kwargs = value | ||
| @property | ||
| def payload(self): | ||
| """ | ||
| Alternative, transparent payload. If given, ``args`` and ``kwargs`` must be left unset. | ||
| :returns: Transparent binary payload (see ``enc_algo``) if applicable | ||
| :rtype: None or bytes | ||
| """ | ||
| if self._payload is None and self._from_fbs: | ||
| if self._from_fbs.PayloadLength(): | ||
| self._payload = self._from_fbs.PayloadAsBytes() | ||
| return self._payload | ||
| @payload.setter | ||
| def payload(self, value): | ||
| assert value is None or type(value) == bytes | ||
| self._payload = value | ||
| # | ||
| # acknowledge, retain, exclude_me | ||
| # | ||
| @property | ||
| def acknowledge(self): | ||
| """ | ||
| If ``True``, the broker was asked to acknowledge the publication with a success or error response. | ||
| :returns: acknowledge flag | ||
| :rtype: None or bool | ||
| """ | ||
| if self._acknowledge is None and self._from_fbs: | ||
| self._acknowledge = self._from_fbs.Acknowledge() | ||
| return self._acknowledge | ||
| @acknowledge.setter | ||
| def acknowledge(self, value): | ||
| assert value is None or type(value) == bool | ||
| self._acknowledge = value | ||
| @property | ||
| def retain(self): | ||
| """ | ||
| If ``True``, the broker was requested to retain this event. | ||
| :returns: retain flag | ||
| :rtype: None or bool | ||
| """ | ||
| if self._retain is None and self._from_fbs: | ||
| self._retain = self._from_fbs.Retain() | ||
| return self._retain | ||
| @retain.setter | ||
| def retain(self, value): | ||
| assert value is None or type(value) == bool | ||
| self._retain = value | ||
| @property | ||
| def exclude_me(self): | ||
| """ | ||
| If ``True``, the broker was asked to exclude the publisher from receiving the event. | ||
| :returns: exclude_me flag | ||
| :rtype: None or bool | ||
| """ | ||
| if self._exclude_me is None and self._from_fbs: | ||
| self._exclude_me = self._from_fbs.ExcludeMe() | ||
| return self._exclude_me | ||
| @exclude_me.setter | ||
| def exclude_me(self, value): | ||
| assert value is None or type(value) == bool | ||
| self._exclude_me = value | ||
| # | ||
| # exclude, exclude_authid, exclude_authrole | ||
| # | ||
| @property | ||
| def exclude(self): | ||
| """ | ||
| List of WAMP session IDs to exclude from receiving this event. | ||
| :returns: list of excluded session IDs | ||
| :rtype: list[int] | ||
| """ | ||
| if self._exclude is None and self._from_fbs: | ||
| if self._from_fbs.ExcludeLength(): | ||
| exclude = [] | ||
| for j in range(self._from_fbs.ExcludeLength()): | ||
| exclude.append(self._from_fbs.Exclude(j)) | ||
| self._exclude = exclude | ||
| return self._exclude | ||
| @exclude.setter | ||
| def exclude(self, value): | ||
| assert value is None or type(value) == list | ||
| if value: | ||
| for x in value: | ||
| assert type(x) == int | ||
| self._exclude = value | ||
| @property | ||
| def exclude_authid(self): | ||
| """ | ||
| List of WAMP authids to exclude from receiving this event. | ||
| :returns: list of excluded authids | ||
| :rtype: list[str] | ||
| """ | ||
| if self._exclude_authid is None and self._from_fbs: | ||
| if self._from_fbs.ExcludeAuthidLength(): | ||
| exclude_authid = [] | ||
| for j in range(self._from_fbs.ExcludeAuthidLength()): | ||
| exclude_authid.append(self._from_fbs.ExcludeAuthid(j).decode('utf8')) | ||
| self._exclude_authid = exclude_authid | ||
| return self._exclude_authid | ||
| @exclude_authid.setter | ||
| def exclude_authid(self, value): | ||
| assert value is None or type(value) == list | ||
| if value: | ||
| for x in value: | ||
| assert type(x) == str | ||
| self._exclude_authid = value | ||
| @property | ||
| def exclude_authrole(self): | ||
| """ | ||
| List of WAMP authroles to exclude from receiving this event. | ||
| :returns: list of excluded authroles | ||
| :rtype: list[str] | ||
| """ | ||
| if self._exclude_authrole is None and self._from_fbs: | ||
| if self._from_fbs.ExcludeAuthroleLength(): | ||
| exclude_authrole = [] | ||
| for j in range(self._from_fbs.ExcludeAuthroleLength()): | ||
| exclude_authrole.append(self._from_fbs.ExcludeAuthrole(j).decode('utf8')) | ||
| self._exclude_authrole = exclude_authrole | ||
| return self._exclude_authrole | ||
| @exclude_authrole.setter | ||
| def exclude_authrole(self, value): | ||
| assert value is None or type(value) == list | ||
| if value: | ||
| for x in value: | ||
| assert type(x) == str | ||
| self._exclude_authrole = value | ||
| # | ||
| # eligible, eligible_authid, eligible_authrole | ||
| # | ||
| @property | ||
| def eligible(self): | ||
| """ | ||
| List of WAMP session IDs eligible to receive this event. | ||
| :returns: list of eligible session IDs | ||
| :rtype: list[int] | ||
| """ | ||
| if self._eligible is None and self._from_fbs: | ||
| if self._from_fbs.EligibleLength(): | ||
| eligible = [] | ||
| for j in range(self._from_fbs.EligibleLength()): | ||
| eligible.append(self._from_fbs.Eligible(j)) | ||
| self._eligible = eligible | ||
| return self._eligible | ||
| @eligible.setter | ||
| def eligible(self, value): | ||
| assert value is None or type(value) == list | ||
| if value: | ||
| for x in value: | ||
| assert type(x) == int | ||
| self._eligible = value | ||
| @property | ||
| def eligible_authid(self): | ||
| """ | ||
| List of WAMP authids eligible to receive this event. | ||
| :returns: list of eligible authids | ||
| :rtype: list[str] | ||
| """ | ||
| if self._eligible_authid is None and self._from_fbs: | ||
| if self._from_fbs.EligibleAuthidLength(): | ||
| eligible_authid = [] | ||
| for j in range(self._from_fbs.EligibleAuthidLength()): | ||
| eligible_authid.append(self._from_fbs.EligibleAuthid(j).decode('utf8')) | ||
| self._eligible_authid = eligible_authid | ||
| return self._eligible_authid | ||
| @eligible_authid.setter | ||
| def eligible_authid(self, value): | ||
| assert value is None or type(value) == list | ||
| if value: | ||
| for x in value: | ||
| assert type(x) == str | ||
| self._eligible_authid = value | ||
| @property | ||
| def eligible_authrole(self): | ||
| """ | ||
| List of WAMP authroles eligible to receive this event. | ||
| :returns: list of eligible authroles | ||
| :rtype: list[str] | ||
| """ | ||
| if self._eligible_authrole is None and self._from_fbs: | ||
| if self._from_fbs.EligibleAuthroleLength(): | ||
| eligible_authrole = [] | ||
| for j in range(self._from_fbs.EligibleAuthroleLength()): | ||
| eligible_authrole.append(self._from_fbs.EligibleAuthrole(j).decode('utf8')) | ||
| self._eligible_authrole = eligible_authrole | ||
| return self._eligible_authrole | ||
| @eligible_authrole.setter | ||
| def eligible_authrole(self, value): | ||
| assert value is None or type(value) == list | ||
| if value: | ||
| for x in value: | ||
| assert type(x) == str | ||
| self._eligible_authrole = value | ||
| # | ||
| # encryption | ||
| # | ||
| @property | ||
| def enc_algo(self): | ||
| """ | ||
| When using payload transparency, the encoding algorithm that was used to encode the payload. | ||
| :returns: payload encryption algorithm | ||
| :rtype: int | ||
| """ | ||
| if self._enc_algo is None and self._from_fbs: | ||
| self._enc_algo = self._from_fbs.EncAlgo() | ||
| return self._enc_algo | ||
| @enc_algo.setter | ||
| def enc_algo(self, value): | ||
| assert value is None or value in [self.ENC_ALGO_CRYPTOBOX, self.ENC_ALGO_MQTT, self.ENC_ALGO_XBR] | ||
| self._enc_algo = value | ||
| @property | ||
| def enc_key(self): | ||
| """ | ||
| When using payload transparency with an encryption algorithm, the payload encryption key. | ||
| :returns: payload key | ||
| :rtype: None or bytes | ||
| """ | ||
| if self._enc_key is None and self._from_fbs: | ||
| if self._from_fbs.EncKeyLength(): | ||
| self._enc_key = self._from_fbs.EncKeyAsBytes() | ||
| return self._enc_key | ||
| @enc_key.setter | ||
| def enc_key(self, value): | ||
| assert value is None or type(value) == bytes | ||
| self._enc_key = value | ||
| @property | ||
| def enc_serializer(self): | ||
| """ | ||
| When using payload transparency, the payload object serializer that was used encoding the payload. | ||
| :returns: payload serializer | ||
| :rtype: int | ||
| """ | ||
| if self._enc_serializer is None and self._from_fbs: | ||
| self._enc_serializer = self._from_fbs.EncSerializer() | ||
| return self._enc_serializer | ||
| @enc_serializer.setter | ||
| def enc_serializer(self, value): | ||
| assert value is None or value in [ | ||
| self.ENC_SER_JSON, self.ENC_SER_MSGPACK, self.ENC_SER_CBOR, self.ENC_SER_UBJSON | ||
| ] | ||
| self._enc_serializer = value | ||
| @staticmethod | ||
| def cast(buf): | ||
| return Publication(_Publication.GetRootAsPublication(buf, 0)) | ||
| def build(self, builder): | ||
| args = self.args | ||
| if args: | ||
| args = builder.CreateString(cbor2.dumps(args)) | ||
| kwargs = self.kwargs | ||
| if kwargs: | ||
| kwargs = builder.CreateString(cbor2.dumps(kwargs)) | ||
| payload = self.payload | ||
| if payload: | ||
| payload = builder.CreateString(payload) | ||
| topic = self.topic | ||
| if topic: | ||
| topic = builder.CreateString(topic) | ||
| enc_key = self.enc_key | ||
| if enc_key: | ||
| enc_key = builder.CreateString(enc_key) | ||
| # exclude: [int] | ||
| exclude = self.exclude | ||
| if exclude: | ||
| PublicationGen.PublicationStartExcludeAuthidVector(builder, len(exclude)) | ||
| for session in reversed(exclude): | ||
| builder.PrependUint64(session) | ||
| exclude = builder.EndVector() | ||
| # exclude_authid: [string] | ||
| exclude_authid = self.exclude_authid | ||
| if exclude_authid: | ||
| _exclude_authid = [] | ||
| for authid in exclude_authid: | ||
| _exclude_authid.append(builder.CreateString(authid)) | ||
| PublicationGen.PublicationStartExcludeAuthidVector(builder, len(_exclude_authid)) | ||
| for o in reversed(_exclude_authid): | ||
| builder.PrependUOffsetTRelative(o) | ||
| exclude_authid = builder.EndVector() | ||
| # exclude_authrole: [string] | ||
| exclude_authrole = self.exclude_authrole | ||
| if exclude_authid: | ||
| _exclude_authrole = [] | ||
| for authrole in exclude_authrole: | ||
| _exclude_authrole.append(builder.CreateString(authrole)) | ||
| PublicationGen.PublicationStartExcludeAuthroleVector(builder, len(_exclude_authrole)) | ||
| for o in reversed(_exclude_authrole): | ||
| builder.PrependUOffsetTRelative(o) | ||
| exclude_authrole = builder.EndVector() | ||
| # eligible: [int] | ||
| eligible = self.eligible | ||
| if eligible: | ||
| PublicationGen.PublicationStartEligibleAuthidVector(builder, len(eligible)) | ||
| for session in reversed(eligible): | ||
| builder.PrependUint64(session) | ||
| eligible = builder.EndVector() | ||
| # eligible_authid: [string] | ||
| eligible_authid = self.eligible_authid | ||
| if eligible_authid: | ||
| _eligible_authid = [] | ||
| for authid in eligible_authid: | ||
| _eligible_authid.append(builder.CreateString(authid)) | ||
| PublicationGen.PublicationStartEligibleAuthidVector(builder, len(_eligible_authid)) | ||
| for o in reversed(_eligible_authid): | ||
| builder.PrependUOffsetTRelative(o) | ||
| eligible_authid = builder.EndVector() | ||
| # eligible_authrole: [string] | ||
| eligible_authrole = self.eligible_authrole | ||
| if eligible_authrole: | ||
| _eligible_authrole = [] | ||
| for authrole in eligible_authrole: | ||
| _eligible_authrole.append(builder.CreateString(authrole)) | ||
| PublicationGen.PublicationStartEligibleAuthroleVector(builder, len(_eligible_authrole)) | ||
| for o in reversed(_eligible_authrole): | ||
| builder.PrependUOffsetTRelative(o) | ||
| eligible_authrole = builder.EndVector() | ||
| # now start and build a new object .. | ||
| PublicationGen.PublicationStart(builder) | ||
| if self.timestamp: | ||
| PublicationGen.PublicationAddTimestamp(builder, self.timestamp) | ||
| if self.publication: | ||
| PublicationGen.PublicationAddPublication(builder, self.publication) | ||
| if self.publisher: | ||
| PublicationGen.PublicationAddPublisher(builder, self.publisher) | ||
| if topic: | ||
| PublicationGen.PublicationAddTopic(builder, topic) | ||
| if args: | ||
| PublicationGen.PublicationAddArgs(builder, args) | ||
| if kwargs: | ||
| PublicationGen.PublicationAddKwargs(builder, kwargs) | ||
| if payload is not None: | ||
| PublicationGen.PublicationAddPayload(builder, payload) | ||
| if self.acknowledge is not None: | ||
| PublicationGen.PublicationAddAcknowledge(builder, self.acknowledge) | ||
| if self.retain is not None: | ||
| PublicationGen.PublicationAddRetain(builder, self.retain) | ||
| if self.exclude_me is not None: | ||
| PublicationGen.PublicationAddExcludeMe(builder, self.exclude_me) | ||
| if exclude: | ||
| PublicationGen.PublicationAddExclude(builder, exclude) | ||
| if exclude_authid: | ||
| PublicationGen.PublicationAddExcludeAuthid(builder, exclude_authid) | ||
| if exclude_authrole: | ||
| PublicationGen.PublicationAddExcludeAuthrole(builder, exclude_authrole) | ||
| if eligible: | ||
| PublicationGen.PublicationAddEligible(builder, eligible) | ||
| if eligible_authid: | ||
| PublicationGen.PublicationAddEligibleAuthid(builder, eligible_authid) | ||
| if eligible_authrole: | ||
| PublicationGen.PublicationAddEligibleAuthrole(builder, eligible_authrole) | ||
| if self.enc_algo: | ||
| PublicationGen.PublicationAddEncAlgo(builder, self.enc_algo) | ||
| if enc_key: | ||
| PublicationGen.PublicationAddEncKey(builder, enc_key) | ||
| if self.enc_serializer: | ||
| PublicationGen.PublicationAddEncSerializer(builder, self.enc_serializer) | ||
| # finish the object. | ||
| final = PublicationGen.PublicationEnd(builder) | ||
| return final | ||
| @table('dd04931a-753b-4fde-8140-d66b93519c73', build=Publication.build, cast=Publication.cast) | ||
| class Publications(MapOidFlatBuffers): | ||
| """ | ||
| Persisted publications archive. | ||
| Map :class:`zlmdb.MapOidFlatBuffers` from ``publication`` to :class:`cfxdb.eventstore.Publication`. | ||
| """ |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| from typing import Optional | ||
| import zlmdb | ||
| from cfxdb.realmstore._session import Sessions, IndexSessionsBySessionId | ||
| from cfxdb.realmstore._publication import Publications | ||
| from cfxdb.realmstore._event import Events | ||
| class RealmStore(object): | ||
| """ | ||
| Persistent realm store. | ||
| """ | ||
| __slots__ = ( | ||
| '_db', | ||
| '_sessions', | ||
| '_idx_sessions_by_session_id', | ||
| '_publications', | ||
| '_events', | ||
| ) | ||
| def __init__(self, db): | ||
| self._db = db | ||
| self._sessions: Optional[Sessions] = None | ||
| self._idx_sessions_by_session_id: Optional[IndexSessionsBySessionId] = None | ||
| self._publications: Optional[Publications] = None | ||
| self._events: Optional[Events] = None | ||
| @property | ||
| def db(self) -> zlmdb.Database: | ||
| """ | ||
| Database this schema is attached to. | ||
| """ | ||
| return self._db | ||
| @property | ||
| def sessions(self) -> Optional[Sessions]: | ||
| """ | ||
| Sessions persisted in this realm store. | ||
| """ | ||
| return self._sessions | ||
| @property | ||
| def idx_sessions_by_session_id(self) -> Optional[IndexSessionsBySessionId]: | ||
| """ | ||
| Index: (session, joined_at) -> app_session_oid | ||
| """ | ||
| return self._idx_sessions_by_session_id | ||
| @property | ||
| def publications(self) -> Optional[Publications]: | ||
| """ | ||
| Publications archive. | ||
| """ | ||
| return self._publications | ||
| @property | ||
| def events(self) -> Optional[Events]: | ||
| """ | ||
| Events archive. | ||
| """ | ||
| return self._events | ||
| @staticmethod | ||
| def attach(db: zlmdb.Database) -> 'RealmStore': | ||
| schema = RealmStore(db) | ||
| schema._sessions = db.attach_table(Sessions) | ||
| assert schema._sessions is not None | ||
| schema._idx_sessions_by_session_id = db.attach_table(IndexSessionsBySessionId) | ||
| schema._sessions.attach_index('idx1', schema._idx_sessions_by_session_id, lambda session: | ||
| (session.session, session.joined_at)) | ||
| schema._publications = db.attach_table(Publications) | ||
| schema._events = db.attach_table(Events) | ||
| return schema |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| import uuid | ||
| import cbor2 | ||
| from typing import Optional, Dict, Any | ||
| import pprint | ||
| import flatbuffers | ||
| import numpy as np | ||
| from zlmdb import table, MapUuidFlatBuffers, MapUint64TimestampUuid | ||
| from cfxdb.gen.realmstore import Session as SessionGen | ||
| class _SessionGen(SessionGen.Session): | ||
| @classmethod | ||
| def GetRootAs(cls, buf, offset=0): | ||
| n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) | ||
| x = _SessionGen() | ||
| x.Init(buf, n + offset) | ||
| return x | ||
| def ArealmOidAsBytes(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) | ||
| if o != 0: | ||
| _off = self._tab.Vector(o) | ||
| _len = self._tab.VectorLen(o) | ||
| return memoryview(self._tab.Bytes)[_off:_off + _len] | ||
| return None | ||
| def OidAsBytes(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) | ||
| if o != 0: | ||
| _off = self._tab.Vector(o) | ||
| _len = self._tab.VectorLen(o) | ||
| return memoryview(self._tab.Bytes)[_off:_off + _len] | ||
| return None | ||
| def NodeOidAsBytes(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) | ||
| if o != 0: | ||
| _off = self._tab.Vector(o) | ||
| _len = self._tab.VectorLen(o) | ||
| return memoryview(self._tab.Bytes)[_off:_off + _len] | ||
| return None | ||
| def TransportAsBytes(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) | ||
| if o != 0: | ||
| _off = self._tab.Vector(o) | ||
| _len = self._tab.VectorLen(o) | ||
| return memoryview(self._tab.Bytes)[_off:_off + _len] | ||
| return None | ||
| def ProxyNodeOidAsBytes(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) | ||
| if o != 0: | ||
| _off = self._tab.Vector(o) | ||
| _len = self._tab.VectorLen(o) | ||
| return memoryview(self._tab.Bytes)[_off:_off + _len] | ||
| return None | ||
| def ProxyTransportAsBytes(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(32)) | ||
| if o != 0: | ||
| _off = self._tab.Vector(o) | ||
| _len = self._tab.VectorLen(o) | ||
| return memoryview(self._tab.Bytes)[_off:_off + _len] | ||
| return None | ||
| def AuthextraAsBytes(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(44)) | ||
| if o != 0: | ||
| _off = self._tab.Vector(o) | ||
| _len = self._tab.VectorLen(o) | ||
| return memoryview(self._tab.Bytes)[_off:_off + _len] | ||
| return None | ||
| class Session(object): | ||
| """ | ||
| Persisted session database object. | ||
| """ | ||
| __slots__ = ( | ||
| '_from_fbs', | ||
| '_arealm_oid', | ||
| '_oid', | ||
| '_session', | ||
| '_joined_at', | ||
| '_left_at', | ||
| '_node_oid', | ||
| '_node_authid', | ||
| '_worker_name', | ||
| '_worker_pid', | ||
| '_transport', | ||
| '_proxy_node_oid', | ||
| '_proxy_node_authid', | ||
| '_proxy_worker_name', | ||
| '_proxy_worker_pid', | ||
| '_proxy_transport', | ||
| '_realm', | ||
| '_authid', | ||
| '_authrole', | ||
| '_authmethod', | ||
| '_authprovider', | ||
| '_authextra', | ||
| ) | ||
| def __init__(self, from_fbs: Optional[_SessionGen] = None): | ||
| self._from_fbs = from_fbs | ||
| # [uint8] (uuid) | ||
| self._arealm_oid: Optional[uuid.UUID] = None | ||
| # [uint8] (uuid) | ||
| self._oid: Optional[uuid.UUID] = None | ||
| # uint64 | ||
| self._session: Optional[int] = None | ||
| # uint64 (timestamp) | ||
| self._joined_at: Optional[np.datetime64] = None | ||
| # uint64 (timestamp) | ||
| self._left_at: Optional[np.datetime64] = None | ||
| # [uint8] (uuid) | ||
| self._node_oid: Optional[uuid.UUID] = None | ||
| # string | ||
| self._node_authid: Optional[str] = None | ||
| # string | ||
| self._worker_name: Optional[str] = None | ||
| # int32 | ||
| self._worker_pid: Optional[int] = None | ||
| # [uint8] (cbor) | ||
| self._transport: Optional[Dict[str, Any]] = None | ||
| # [uint8] (uuid) | ||
| self._proxy_node_oid: Optional[uuid.UUID] = None | ||
| # string | ||
| self._proxy_node_authid: Optional[str] = None | ||
| # string | ||
| self._proxy_worker_name: Optional[str] = None | ||
| # int32 | ||
| self._proxy_worker_pid: Optional[int] = None | ||
| # [uint8] (cbor) | ||
| self._proxy_transport: Optional[Dict[str, Any]] = None | ||
| # string | ||
| self._realm: Optional[str] = None | ||
| # string | ||
| self._authid: Optional[str] = None | ||
| # string | ||
| self._authrole: Optional[str] = None | ||
| # string | ||
| self._authmethod: Optional[str] = None | ||
| # string | ||
| self._authprovider: Optional[str] = None | ||
| # [uint8] (cbor) | ||
| self._authextra: Optional[Dict[str, Any]] = None | ||
| def marshal(self): | ||
| obj = { | ||
| 'arealm_oid': self.arealm_oid.bytes if self.arealm_oid else None, | ||
| 'oid': self.oid.bytes if self.oid else None, | ||
| 'session': self.session, | ||
| 'joined_at': int(self.joined_at) if self.joined_at else None, | ||
| 'left_at': int(self.left_at) if self.left_at else None, | ||
| 'node_oid': self.node_oid.bytes if self.node_oid else None, | ||
| 'node_authid': self.node_authid, | ||
| 'worker_name': self.worker_name, | ||
| 'worker_pid': self.worker_pid, | ||
| 'transport': self.transport, | ||
| 'proxy_node_oid': self.proxy_node_oid.bytes if self.proxy_node_oid else None, | ||
| 'proxy_node_authid': self.proxy_node_authid, | ||
| 'proxy_worker_name': self.proxy_worker_name, | ||
| 'proxy_worker_pid': self.proxy_worker_pid, | ||
| 'proxy_transport': self.proxy_transport, | ||
| 'realm': self.realm, | ||
| 'authid': self.authid, | ||
| 'authrole': self.authrole, | ||
| 'authmethod': self.authmethod, | ||
| 'authprovider': self.authprovider, | ||
| 'authextra': self.authextra, | ||
| } | ||
| return obj | ||
| def __str__(self): | ||
| return '\n{}\n'.format(pprint.pformat(self.marshal())) | ||
| @property | ||
| def arealm_oid(self) -> Optional[uuid.UUID]: | ||
| """ | ||
| OID of the application realm this session is/was joined on. | ||
| """ | ||
| if self._arealm_oid is None and self._from_fbs: | ||
| if self._from_fbs.ArealmOidLength(): | ||
| _arealm_oid = self._from_fbs.ArealmOidAsBytes() | ||
| self._arealm_oid = uuid.UUID(bytes=bytes(_arealm_oid)) | ||
| return self._arealm_oid | ||
| @arealm_oid.setter | ||
| def arealm_oid(self, value: Optional[uuid.UUID]): | ||
| assert value is None or isinstance(value, uuid.UUID) | ||
| self._arealm_oid = value | ||
| @property | ||
| def oid(self) -> Optional[uuid.UUID]: | ||
| """ | ||
| Unlimited time, globally unique, long-term OID of this session. The pair of WAMP session ID and join time ``(session, joined_at)`` bidirectionally maps to session ``oid``. | ||
| """ | ||
| if self._oid is None and self._from_fbs: | ||
| if self._from_fbs.OidLength(): | ||
| _oid = self._from_fbs.OidAsBytes() | ||
| self._oid = uuid.UUID(bytes=bytes(_oid)) | ||
| return self._oid | ||
| @oid.setter | ||
| def oid(self, value: Optional[uuid.UUID]): | ||
| assert value is None or isinstance(value, uuid.UUID) | ||
| self._oid = value | ||
| @property | ||
| def session(self) -> Optional[int]: | ||
| """ | ||
| The WAMP session_id of the session. | ||
| """ | ||
| if self._session is None and self._from_fbs: | ||
| self._session = self._from_fbs.Session() | ||
| return self._session | ||
| @session.setter | ||
| def session(self, value: Optional[int]): | ||
| assert value is None or type(value) == int | ||
| self._session = value | ||
| @property | ||
| def joined_at(self) -> Optional[np.datetime64]: | ||
| """ | ||
| Timestamp when the session was joined by the router. Epoch time in ns. | ||
| """ | ||
| if self._joined_at is None and self._from_fbs: | ||
| self._joined_at = np.datetime64(self._from_fbs.JoinedAt(), 'ns') | ||
| return self._joined_at | ||
| @joined_at.setter | ||
| def joined_at(self, value: Optional[np.datetime64]): | ||
| assert value is None or isinstance(value, np.datetime64) | ||
| self._joined_at = value | ||
| @property | ||
| def left_at(self) -> Optional[np.datetime64]: | ||
| """ | ||
| Timestamp when the session left the router. Epoch time in ns. | ||
| """ | ||
| if self._left_at is None and self._from_fbs: | ||
| self._left_at = np.datetime64(self._from_fbs.LeftAt(), 'ns') | ||
| return self._left_at | ||
| @left_at.setter | ||
| def left_at(self, value: Optional[np.datetime64]): | ||
| assert value is None or isinstance(value, np.datetime64) | ||
| self._left_at = value | ||
| @property | ||
| def node_oid(self) -> Optional[uuid.UUID]: | ||
| """ | ||
| OID of the node of the router worker hosting this session. | ||
| """ | ||
| if self._node_oid is None and self._from_fbs: | ||
| if self._from_fbs.NodeOidLength(): | ||
| _node_oid = self._from_fbs.NodeOidAsBytes() | ||
| self._node_oid = uuid.UUID(bytes=bytes(_node_oid)) | ||
| return self._node_oid | ||
| @node_oid.setter | ||
| def node_oid(self, value: Optional[uuid.UUID]): | ||
| assert value is None or isinstance(value, uuid.UUID) | ||
| self._node_oid = value | ||
| @property | ||
| def node_authid(self) -> Optional[str]: | ||
| """ | ||
| Name (management realm WAMP authid) of the node of the router worker hosting this session. | ||
| """ | ||
| if self._node_authid is None and self._from_fbs: | ||
| _node_authid = self._from_fbs.NodeAuthid() | ||
| if _node_authid: | ||
| self._node_authid = _node_authid.decode('utf8') | ||
| return self._node_authid | ||
| @node_authid.setter | ||
| def node_authid(self, value: Optional[str]): | ||
| self._node_authid = value | ||
| @property | ||
| def worker_name(self) -> Optional[str]: | ||
| """ | ||
| Local worker name of the router worker hosting this session. | ||
| """ | ||
| if self._worker_name is None and self._from_fbs: | ||
| _worker_name = self._from_fbs.WorkerName() | ||
| if _worker_name: | ||
| self._worker_name = _worker_name.decode('utf8') | ||
| return self._worker_name | ||
| @worker_name.setter | ||
| def worker_name(self, value: Optional[str]): | ||
| self._worker_name = value | ||
| @property | ||
| def worker_pid(self) -> Optional[int]: | ||
| """ | ||
| Local worker PID of the router worker hosting this session. | ||
| """ | ||
| if self._worker_pid is None and self._from_fbs: | ||
| self._worker_pid = self._from_fbs.WorkerPid() | ||
| return self._worker_pid | ||
| @worker_pid.setter | ||
| def worker_pid(self, value: Optional[int]): | ||
| self._worker_pid = value | ||
| @property | ||
| def transport(self) -> Optional[Dict[str, Any]]: | ||
| """ | ||
| Session transport information. | ||
| """ | ||
| if self._transport is None and self._from_fbs: | ||
| _transport = self._from_fbs.TransportAsBytes() | ||
| if _transport: | ||
| self._transport = cbor2.loads(_transport) | ||
| else: | ||
| self._transport = {} | ||
| return self._transport | ||
| @transport.setter | ||
| def transport(self, value: Optional[Dict[str, Any]]): | ||
| assert value is None or type(value) == dict | ||
| self._transport = value | ||
| @property | ||
| def proxy_node_oid(self) -> Optional[uuid.UUID]: | ||
| """ | ||
| From proxy (in proxy-router cluster setups): OID of the node of the proxy worker hosting this session. | ||
| """ | ||
| if self._proxy_node_oid is None and self._from_fbs: | ||
| if self._from_fbs.ProxyNodeOidLength(): | ||
| _proxy_node_oid = self._from_fbs.ProxyNodeOidAsBytes() | ||
| self._proxy_node_oid = uuid.UUID(bytes=bytes(_proxy_node_oid)) | ||
| return self._proxy_node_oid | ||
| @proxy_node_oid.setter | ||
| def proxy_node_oid(self, value: Optional[uuid.UUID]): | ||
| assert value is None or isinstance(value, uuid.UUID) | ||
| self._proxy_node_oid = value | ||
| @property | ||
| def proxy_node_authid(self) -> Optional[str]: | ||
| """ | ||
| From proxy (in proxy-router cluster setups): Name (management realm WAMP authid) of the node of the proxy worker hosting this session. | ||
| """ | ||
| if self._proxy_node_authid is None and self._from_fbs: | ||
| _proxy_node_authid = self._from_fbs.ProxyNodeAuthid() | ||
| if _proxy_node_authid: | ||
| self._proxy_node_authid = _proxy_node_authid.decode('utf8') | ||
| return self._proxy_node_authid | ||
| @proxy_node_authid.setter | ||
| def proxy_node_authid(self, value: Optional[str]): | ||
| self._proxy_node_authid = value | ||
| @property | ||
| def proxy_worker_name(self) -> Optional[str]: | ||
| """ | ||
| From proxy (in proxy-router cluster setups): Local worker name of the proxy worker hosting this session. | ||
| """ | ||
| if self._proxy_worker_name is None and self._from_fbs: | ||
| _proxy_worker_name = self._from_fbs.ProxyWorkerName() | ||
| if _proxy_worker_name: | ||
| self._proxy_worker_name = _proxy_worker_name.decode('utf8') | ||
| return self._proxy_worker_name | ||
| @proxy_worker_name.setter | ||
| def proxy_worker_name(self, value: Optional[str]): | ||
| self._proxy_worker_name = value | ||
| @property | ||
| def proxy_worker_pid(self) -> Optional[int]: | ||
| """ | ||
| From proxy (in proxy-router cluster setups): Local worker PID of the proxy worker hosting this session. | ||
| """ | ||
| if self._proxy_worker_pid is None and self._from_fbs: | ||
| self._proxy_worker_pid = self._from_fbs.ProxyWorkerPid() | ||
| return self._proxy_worker_pid | ||
| @proxy_worker_pid.setter | ||
| def proxy_worker_pid(self, value: Optional[int]): | ||
| self._proxy_worker_pid = value | ||
| @property | ||
| def proxy_transport(self) -> Optional[Dict[str, Any]]: | ||
| """ | ||
| From proxy (in proxy-router cluster setups): Session transport information, the transport from the proxy to the backend router. | ||
| """ | ||
| if self._proxy_transport is None and self._from_fbs: | ||
| _proxy_transport = self._from_fbs.ProxyTransportAsBytes() | ||
| if _proxy_transport: | ||
| self._proxy_transport = cbor2.loads(_proxy_transport) | ||
| else: | ||
| self._proxy_transport = {} | ||
| return self._proxy_transport | ||
| @proxy_transport.setter | ||
| def proxy_transport(self, value: Optional[Dict[str, Any]]): | ||
| assert value is None or type(value) == dict | ||
| self._proxy_transport = value | ||
| @property | ||
| def realm(self) -> Optional[str]: | ||
| """ | ||
| The WAMP realm the session is/was joined on. | ||
| """ | ||
| if self._realm is None and self._from_fbs: | ||
| self._realm = self._from_fbs.Realm().decode('utf8') | ||
| return self._realm | ||
| @realm.setter | ||
| def realm(self, value: Optional[str]): | ||
| assert value is None or type(value) == str | ||
| self._realm = value | ||
| @property | ||
| def authid(self) -> Optional[str]: | ||
| """ | ||
| The WAMP authid the session was authenticated under. | ||
| """ | ||
| if self._authid is None and self._from_fbs: | ||
| _authid = self._from_fbs.Authid() | ||
| if _authid: | ||
| self._authid = _authid.decode('utf8') | ||
| return self._authid | ||
| @authid.setter | ||
| def authid(self, value: Optional[str]): | ||
| self._authid = value | ||
| @property | ||
| def authrole(self) -> Optional[str]: | ||
| """ | ||
| The WAMP authrole the session was authenticated under. | ||
| """ | ||
| if self._authrole is None and self._from_fbs: | ||
| _authrole = self._from_fbs.Authrole() | ||
| if _authrole: | ||
| self._authrole = _authrole.decode('utf8') | ||
| return self._authrole | ||
| @authrole.setter | ||
| def authrole(self, value: Optional[str]): | ||
| self._authrole = value | ||
| @property | ||
| def authmethod(self) -> Optional[str]: | ||
| """ | ||
| The WAMP authmethod uses to authenticate the session. | ||
| """ | ||
| if self._authmethod is None and self._from_fbs: | ||
| _authmethod = self._from_fbs.Authmethod() | ||
| if _authmethod: | ||
| self._authmethod = _authmethod.decode('utf8') | ||
| return self._authmethod | ||
| @authmethod.setter | ||
| def authmethod(self, value: Optional[str]): | ||
| self._authmethod = value | ||
| @property | ||
| def authprovider(self) -> Optional[str]: | ||
| """ | ||
| The WAMP authprovider that was handling the session authentication. | ||
| """ | ||
| if self._authprovider is None and self._from_fbs: | ||
| _authprovider = self._from_fbs.Authprovider() | ||
| if _authprovider: | ||
| self._authprovider = _authprovider.decode('utf8') | ||
| return self._authprovider | ||
| @authprovider.setter | ||
| def authprovider(self, value: Optional[str]): | ||
| self._authprovider = value | ||
| @property | ||
| def authextra(self) -> Optional[Dict[str, Any]]: | ||
| """ | ||
| The WAMP authextra as provided to the authenticated session. | ||
| """ | ||
| if self._authextra is None and self._from_fbs: | ||
| _authextra = self._from_fbs.AuthextraAsBytes() | ||
| if _authextra: | ||
| self._authextra = cbor2.loads(_authextra) | ||
| else: | ||
| self._authextra = {} | ||
| return self._authextra | ||
| @authextra.setter | ||
| def authextra(self, value: Optional[Dict[str, Any]]): | ||
| assert value is None or type(value) == dict | ||
| self._authextra = value | ||
| @staticmethod | ||
| def cast(buf) -> 'Session': | ||
| return Session(_SessionGen.GetRootAsSession(buf, 0)) | ||
| def build(self, builder): | ||
| arealm_oid = self.arealm_oid.bytes if self.arealm_oid else None | ||
| if arealm_oid: | ||
| arealm_oid = builder.CreateString(arealm_oid) | ||
| oid = self.oid.bytes if self.oid else None | ||
| if oid: | ||
| oid = builder.CreateString(oid) | ||
| node_oid = self.node_oid.bytes if self.node_oid else None | ||
| if node_oid: | ||
| node_oid = builder.CreateString(node_oid) | ||
| node_authid = self.node_authid | ||
| if node_authid: | ||
| node_authid = builder.CreateString(node_authid) | ||
| worker_name = self.worker_name | ||
| if worker_name: | ||
| worker_name = builder.CreateString(worker_name) | ||
| transport = self.transport | ||
| if transport: | ||
| transport = builder.CreateString(cbor2.dumps(transport)) | ||
| proxy_node_oid = self.proxy_node_oid.bytes if self.proxy_node_oid else None | ||
| if proxy_node_oid: | ||
| proxy_node_oid = builder.CreateString(proxy_node_oid) | ||
| proxy_node_authid = self.proxy_node_authid | ||
| if proxy_node_authid: | ||
| proxy_node_authid = builder.CreateString(proxy_node_authid) | ||
| proxy_worker_name = self.proxy_worker_name | ||
| if proxy_worker_name: | ||
| proxy_worker_name = builder.CreateString(proxy_worker_name) | ||
| proxy_transport = self.proxy_transport | ||
| if proxy_transport: | ||
| proxy_transport = builder.CreateString(cbor2.dumps(proxy_transport)) | ||
| realm = self.realm | ||
| if realm: | ||
| realm = builder.CreateString(realm) | ||
| authid = self.authid | ||
| if authid: | ||
| authid = builder.CreateString(authid) | ||
| authrole = self.authrole | ||
| if authrole: | ||
| authrole = builder.CreateString(authrole) | ||
| authmethod = self.authmethod | ||
| if authmethod: | ||
| authmethod = builder.CreateString(authmethod) | ||
| authprovider = self.authprovider | ||
| if authprovider: | ||
| authprovider = builder.CreateString(authprovider) | ||
| authextra = self.authextra | ||
| if authextra: | ||
| authextra = builder.CreateString(cbor2.dumps(authextra)) | ||
| SessionGen.SessionStart(builder) | ||
| if arealm_oid: | ||
| SessionGen.SessionAddArealmOid(builder, arealm_oid) | ||
| if oid: | ||
| SessionGen.SessionAddOid(builder, oid) | ||
| if self.session: | ||
| SessionGen.SessionAddSession(builder, self.session) | ||
| if self.joined_at: | ||
| SessionGen.SessionAddJoinedAt(builder, int(self.joined_at)) | ||
| if self.left_at: | ||
| SessionGen.SessionAddLeftAt(builder, int(self.left_at)) | ||
| if node_oid: | ||
| SessionGen.SessionAddNodeOid(builder, node_oid) | ||
| if node_authid: | ||
| SessionGen.SessionAddNodeAuthid(builder, node_authid) | ||
| if worker_name: | ||
| SessionGen.SessionAddWorkerName(builder, worker_name) | ||
| if self.worker_pid: | ||
| SessionGen.SessionAddWorkerPid(builder, self.worker_pid) | ||
| if transport: | ||
| SessionGen.SessionAddTransport(builder, transport) | ||
| if proxy_node_oid: | ||
| SessionGen.SessionAddProxyNodeOid(builder, proxy_node_oid) | ||
| if proxy_node_authid: | ||
| SessionGen.SessionAddProxyNodeAuthid(builder, proxy_node_authid) | ||
| if proxy_worker_name: | ||
| SessionGen.SessionAddProxyWorkerName(builder, proxy_worker_name) | ||
| if self.proxy_worker_pid: | ||
| SessionGen.SessionAddProxyWorkerPid(builder, self.proxy_worker_pid) | ||
| if proxy_transport: | ||
| SessionGen.SessionAddProxyTransport(builder, proxy_transport) | ||
| if realm: | ||
| SessionGen.SessionAddRealm(builder, realm) | ||
| if authid: | ||
| SessionGen.SessionAddAuthid(builder, authid) | ||
| if authrole: | ||
| SessionGen.SessionAddAuthrole(builder, authrole) | ||
| if authmethod: | ||
| SessionGen.SessionAddAuthmethod(builder, authmethod) | ||
| if authprovider: | ||
| SessionGen.SessionAddAuthprovider(builder, authprovider) | ||
| if authextra: | ||
| SessionGen.SessionAddAuthextra(builder, authextra) | ||
| final = SessionGen.SessionEnd(builder) | ||
| return final | ||
| @table('403ecc06-f564-4ea9-92f2-c4c13bd2ba5a', build=Session.build, cast=Session.cast) | ||
| class Sessions(MapUuidFlatBuffers): | ||
| """ | ||
| Persisted session information table. | ||
| Map :class:`zlmdb.MapUuidFlatBuffers` from ``session_oid`` to :class:`cfxdb.realmstore.Session` | ||
| """ | ||
| @table('0ea1ea1a-45f2-4352-a4a0-1fafff099c96') | ||
| class IndexSessionsBySessionId(MapUint64TimestampUuid): | ||
| """ | ||
| Index: ``(sessionid, joined_at) -> session_oid`` | ||
| """ |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| import random | ||
| import timeit | ||
| import uuid | ||
| import txaio | ||
| txaio.use_twisted() # noqa | ||
| from autobahn import util | ||
| import flatbuffers | ||
| import pytest | ||
| import numpy as np | ||
| from txaio import time_ns | ||
| from cfxdb.cookiestore import Cookie | ||
| def fill_cookie(cookie): | ||
| cookie.oid = uuid.uuid4() | ||
| cookie.created = np.datetime64(time_ns(), 'ns') | ||
| cookie.max_age = random.randint(1, 10**10) | ||
| cookie.name = random.choice(['cbtid1', 'cbtid2', 'cbtid3']) | ||
| cookie.value = util.newid(24) | ||
| cookie.authenticated = np.datetime64(time_ns(), 'ns') | ||
| cookie.authenticated_on_node = uuid.uuid4() | ||
| cookie.authenticated_on_worker = random.choice(['worker1', 'worker2', 'worker3']) | ||
| cookie.authenticated_transport_info = {'xoo': 'yar', 'zaz': [9, 8, 7]} | ||
| cookie.authenticated_session = util.id() | ||
| cookie.authenticated_joined_at = np.datetime64(time_ns(), 'ns') | ||
| cookie.authenticated_authmethod = random.choice(['meth1', 'meth2', 'meth3']) | ||
| cookie.authid = util.generate_token(4, 3) | ||
| cookie.authrole = random.choice(['role1', 'role2', 'role3']) | ||
| cookie.authmethod = random.choice(['method1', 'method2', 'method3']) | ||
| cookie.authrealm = random.choice(['realm1', 'realm2', 'realm3']) | ||
| cookie.authextra = {'foo': 'bar', 'baz': [1, 2, 3]} | ||
| def fill_cookie_empty(cookie): | ||
| cookie.oid = None | ||
| cookie.created = None | ||
| cookie.max_age = None | ||
| cookie.name = None | ||
| cookie.value = None | ||
| cookie.authenticated = None | ||
| cookie.authenticated_on_node = None | ||
| cookie.authenticated_on_worker = None | ||
| cookie.authenticated_transport_info = None | ||
| cookie.authenticated_session = None | ||
| cookie.authenticated_joined_at = None | ||
| cookie.authenticated_authmethod = None | ||
| cookie.authid = None | ||
| cookie.authrole = None | ||
| cookie.authmethod = None | ||
| cookie.authrealm = None | ||
| cookie.authextra = None | ||
| @pytest.fixture(scope='function') | ||
| def cookie(): | ||
| _cookie = Cookie() | ||
| fill_cookie(_cookie) | ||
| return _cookie | ||
| @pytest.fixture(scope='function') | ||
| def builder(): | ||
| _builder = flatbuffers.Builder(0) | ||
| return _builder | ||
| def test_cookie_roundtrip(cookie, builder): | ||
| # serialize to bytes (flatbuffers) from python object | ||
| obj = cookie.build(builder) | ||
| builder.Finish(obj) | ||
| data = builder.Output() | ||
| assert len(data) == 360 | ||
| # create python object from bytes (flatbuffes) | ||
| _cookie = Cookie.cast(data) | ||
| assert _cookie.oid == cookie.oid | ||
| assert _cookie.created == cookie.created | ||
| assert _cookie.max_age == cookie.max_age | ||
| assert _cookie.name == cookie.name | ||
| assert _cookie.value == cookie.value | ||
| assert _cookie.authenticated == cookie.authenticated | ||
| assert _cookie.authenticated_on_node == cookie.authenticated_on_node | ||
| assert _cookie.authenticated_on_worker == cookie.authenticated_on_worker | ||
| assert _cookie.authenticated_transport_info == cookie.authenticated_transport_info | ||
| assert _cookie.authenticated_session == cookie.authenticated_session | ||
| assert _cookie.authenticated_joined_at == cookie.authenticated_joined_at | ||
| assert _cookie.authenticated_authmethod == cookie.authenticated_authmethod | ||
| assert _cookie.authid == cookie.authid | ||
| assert _cookie.authrole == cookie.authrole | ||
| assert _cookie.authmethod == cookie.authmethod | ||
| assert _cookie.authrealm == cookie.authrealm | ||
| assert _cookie.authextra == cookie.authextra | ||
| def test_cookie_empty(builder): | ||
| cookie = Cookie() | ||
| fill_cookie_empty(cookie) | ||
| # serialize to bytes (flatbuffers) from python object | ||
| obj = cookie.build(builder) | ||
| builder.Finish(obj) | ||
| data = builder.Output() | ||
| assert len(data) == 12 | ||
| # create python object from bytes (flatbuffes) | ||
| _cookie = Cookie.cast(data) | ||
| unix_zero = np.datetime64(0, 'ns') | ||
| assert _cookie.oid is None | ||
| assert _cookie.created == unix_zero | ||
| assert _cookie.max_age == 0 | ||
| assert _cookie.name is None | ||
| assert _cookie.value is None | ||
| assert _cookie.authenticated == unix_zero | ||
| assert _cookie.authenticated_on_node is None | ||
| assert _cookie.authenticated_on_worker is None | ||
| assert _cookie.authenticated_transport_info == {} | ||
| assert _cookie.authenticated_session == 0 | ||
| assert _cookie.authenticated_joined_at == unix_zero | ||
| assert _cookie.authenticated_authmethod is None | ||
| assert _cookie.authid is None | ||
| assert _cookie.authrole is None | ||
| assert _cookie.authmethod is None | ||
| assert _cookie.authrealm is None | ||
| assert _cookie.authextra == {} | ||
| def test_cookie_roundtrip_perf(cookie, builder): | ||
| obj = cookie.build(builder) | ||
| builder.Finish(obj) | ||
| data = builder.Output() | ||
| scratch = {'value': 0} | ||
| def loop(): | ||
| _cookie = Cookie.cast(data) | ||
| assert _cookie.oid == cookie.oid | ||
| assert _cookie.created == cookie.created | ||
| assert _cookie.max_age == cookie.max_age | ||
| assert _cookie.name == cookie.name | ||
| assert _cookie.value == cookie.value | ||
| assert _cookie.authenticated == cookie.authenticated | ||
| assert _cookie.authenticated_on_node == cookie.authenticated_on_node | ||
| assert _cookie.authenticated_on_worker == cookie.authenticated_on_worker | ||
| assert _cookie.authenticated_transport_info == cookie.authenticated_transport_info | ||
| assert _cookie.authenticated_session == cookie.authenticated_session | ||
| assert _cookie.authenticated_joined_at == cookie.authenticated_joined_at | ||
| assert _cookie.authenticated_authmethod == cookie.authenticated_authmethod | ||
| assert _cookie.authid == cookie.authid | ||
| assert _cookie.authrole == cookie.authrole | ||
| assert _cookie.authmethod == cookie.authmethod | ||
| assert _cookie.authrealm == cookie.authrealm | ||
| assert _cookie.authextra == cookie.authextra | ||
| scratch['value'] += _cookie.authenticated_session | ||
| N = 7 | ||
| M = 20000 | ||
| samples = [] | ||
| print('measuring:') | ||
| for i in range(N): | ||
| secs = timeit.timeit(loop, number=M) | ||
| ops = round(float(M) / secs, 1) | ||
| samples.append(ops) | ||
| print('{} objects/sec performance'.format(ops)) | ||
| samples = sorted(samples) | ||
| ops50 = samples[int(len(samples) / 2)] | ||
| print('RESULT: {} objects/sec median performance'.format(ops50)) | ||
| assert ops50 > 1000 | ||
| print(scratch['value']) |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| import pytest | ||
| import random | ||
| import timeit | ||
| import flatbuffers | ||
| from txaio import with_twisted # noqa | ||
| from txaio import time_ns | ||
| from autobahn import util | ||
| import zlmdb | ||
| from cfxdb.realmstore import Event | ||
| zlmdb.TABLES_BY_UUID = {} | ||
| def fill_event(event): | ||
| event.timestamp = time_ns() | ||
| event.subscription = util.id() | ||
| event.publication = util.id() | ||
| event.receiver = util.id() | ||
| event.retained = random.choice([True, False]) | ||
| event.acknowledged_delivery = random.choice([True, False]) | ||
| @pytest.fixture(scope='function') | ||
| def builder(): | ||
| _builder = flatbuffers.Builder(0) | ||
| return _builder | ||
| @pytest.fixture(scope='function') | ||
| def event(): | ||
| _event = Event() | ||
| fill_event(_event) | ||
| return _event | ||
| def test_event_roundtrip(event, builder): | ||
| obj = event.build(builder) | ||
| builder.Finish(obj) | ||
| data = builder.Output() | ||
| assert len(data) in [64, 56] | ||
| _event = Event.cast(data) | ||
| assert _event.timestamp == event.timestamp | ||
| assert _event.subscription == event.subscription | ||
| assert _event.publication == event.publication | ||
| assert _event.receiver == event.receiver | ||
| assert _event.retained == event.retained | ||
| assert _event.acknowledged_delivery == event.acknowledged_delivery | ||
| def test_event_roundtrip_perf(event, builder): | ||
| obj = event.build(builder) | ||
| builder.Finish(obj) | ||
| data = builder.Output() | ||
| scratch = {'timestamp': 0} | ||
| def loop(): | ||
| _event = Event.cast(data) | ||
| if True: | ||
| assert _event.timestamp == event.timestamp | ||
| assert _event.subscription == event.subscription | ||
| assert _event.publication == event.publication | ||
| assert _event.receiver == event.receiver | ||
| assert _event.retained == event.retained | ||
| assert _event.acknowledged_delivery == event.acknowledged_delivery | ||
| scratch['timestamp'] += event.timestamp | ||
| N = 5 | ||
| M = 100000 | ||
| samples = [] | ||
| print('measuring:') | ||
| for i in range(N): | ||
| secs = timeit.timeit(loop, number=M) | ||
| ops = round(float(M) / secs, 1) | ||
| samples.append(ops) | ||
| print('{} objects/sec performance'.format(ops)) | ||
| samples = sorted(samples) | ||
| ops50 = samples[int(len(samples) / 2)] | ||
| print('RESULT: {} objects/sec median performance'.format(ops50)) | ||
| assert ops50 > 1000 | ||
| assert scratch['timestamp'] > 0 |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| import pytest | ||
| import os | ||
| import random | ||
| import uuid | ||
| import timeit | ||
| import platform | ||
| import flatbuffers | ||
| from txaio import with_twisted # noqa | ||
| from txaio import time_ns | ||
| from autobahn import util | ||
| import zlmdb | ||
| from cfxdb.realmstore import Publication | ||
| zlmdb.TABLES_BY_UUID = {} | ||
| @pytest.fixture(scope='function') | ||
| def builder(): | ||
| _builder = flatbuffers.Builder(0) | ||
| return _builder | ||
| def fill_publication(publication): | ||
| publication.timestamp = time_ns() | ||
| publication.publication = util.id() | ||
| publication.publisher = util.id() | ||
| publication.topic = 'com.example.foobar.{}.doit'.format(uuid.uuid4()) | ||
| publication.args = [23, 'hello', {'foo': 0.5}] | ||
| publication.kwargs = {'bar': 23, 'baz': [1, 2, 3]} | ||
| publication.payload = os.urandom(32) | ||
| publication.acknowledge = random.choice([True, False]) | ||
| publication.retain = random.choice([True, False]) | ||
| publication.exclude_me = random.choice([True, False]) | ||
| i0 = util.id() | ||
| publication.exclude = [i0 + j + 1000 for j in range(5)] | ||
| publication.exclude_authid = ['user1', 'user2', 'user3'] | ||
| publication.exclude_authrole = ['roleA', 'roleB', 'roleC'] | ||
| i0 = util.id() | ||
| publication.eligible = [i0 + j + 1000 for j in range(5)] | ||
| publication.eligible_authid = ['user4', 'user5', 'user6'] | ||
| publication.eligible_authrole = ['roleD', 'roleE', 'roleF'] | ||
| publication.enc_algo = Publication.ENC_ALGO_XBR | ||
| publication.enc_key = os.urandom(32) | ||
| publication.enc_serializer = Publication.ENC_SER_CBOR | ||
| @pytest.fixture(scope='function') | ||
| def publication(): | ||
| _publication = Publication() | ||
| fill_publication(_publication) | ||
| return _publication | ||
| def test_publication_roundtrip(publication, builder): | ||
| obj = publication.build(builder) | ||
| builder.Finish(obj) | ||
| data = builder.Output() | ||
| assert len(data) in [624, 632, 640] | ||
| _publication = Publication.cast(data) | ||
| assert _publication.timestamp == publication.timestamp | ||
| assert _publication.publication == publication.publication | ||
| assert _publication.publisher == publication.publisher | ||
| assert _publication.topic == publication.topic | ||
| assert _publication.args == publication.args | ||
| assert _publication.kwargs == publication.kwargs | ||
| assert _publication.payload == publication.payload | ||
| assert _publication.acknowledge == publication.acknowledge | ||
| assert _publication.retain == publication.retain | ||
| assert _publication.exclude_me == publication.exclude_me | ||
| assert _publication.exclude == publication.exclude | ||
| assert _publication.exclude_authid == publication.exclude_authid | ||
| assert _publication.exclude_authrole == publication.exclude_authrole | ||
| assert _publication.eligible == publication.eligible | ||
| assert _publication.eligible_authid == publication.eligible_authid | ||
| assert _publication.eligible_authrole == publication.eligible_authrole | ||
| assert _publication.enc_algo == publication.enc_algo | ||
| assert _publication.enc_key == publication.enc_key | ||
| assert _publication.enc_serializer == publication.enc_serializer | ||
| def test_publication_roundtrip_perf(publication, builder): | ||
| obj = publication.build(builder) | ||
| builder.Finish(obj) | ||
| data = builder.Output() | ||
| scratch = {'timestamp': 0} | ||
| def loop(): | ||
| _publication = Publication.cast(data) | ||
| if True: | ||
| assert _publication.timestamp == publication.timestamp | ||
| assert _publication.publication == publication.publication | ||
| assert _publication.publisher == publication.publisher | ||
| assert _publication.topic == publication.topic | ||
| assert _publication.args == publication.args | ||
| assert _publication.kwargs == publication.kwargs | ||
| assert _publication.payload == publication.payload | ||
| assert _publication.acknowledge == publication.acknowledge | ||
| assert _publication.retain == publication.retain | ||
| assert _publication.exclude_me == publication.exclude_me | ||
| assert _publication.exclude == publication.exclude | ||
| assert _publication.exclude_authid == publication.exclude_authid | ||
| assert _publication.exclude_authrole == publication.exclude_authrole | ||
| assert _publication.eligible == publication.eligible | ||
| assert _publication.eligible_authid == publication.eligible_authid | ||
| assert _publication.eligible_authrole == publication.eligible_authrole | ||
| assert _publication.enc_algo == publication.enc_algo | ||
| assert _publication.enc_key == publication.enc_key | ||
| assert _publication.enc_serializer == publication.enc_serializer | ||
| scratch['timestamp'] += publication.timestamp | ||
| N = 5 | ||
| if platform.python_implementation() == 'PyPy': | ||
| M = 100000 | ||
| else: | ||
| M = 10000 | ||
| samples = [] | ||
| print('measuring with N={}, M={}:'.format(N, M)) | ||
| for i in range(N): | ||
| secs = timeit.timeit(loop, number=M) | ||
| ops = round(float(M) / secs, 1) | ||
| samples.append(ops) | ||
| print('{} objects/sec performance'.format(ops)) | ||
| samples = sorted(samples) | ||
| ops50 = samples[int(len(samples) / 2)] | ||
| print('RESULT: {} objects/sec median performance ({} objects total)'.format(ops50, N * M)) | ||
| assert ops50 > 1000 | ||
| assert scratch['timestamp'] > 0 |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| import pytest | ||
| import random | ||
| import uuid | ||
| import timeit | ||
| import flatbuffers | ||
| import numpy as np | ||
| from txaio import with_twisted, time_ns # noqa | ||
| from autobahn import util | ||
| from autobahn.wamp.types import TransportDetails | ||
| from cfxdb.realmstore import Session | ||
| DATA1 = { | ||
| 'authextra': { | ||
| 'transport': { | ||
| 'channel_framing': | ||
| 'websocket', | ||
| 'channel_id': {}, | ||
| 'channel_serializer': | ||
| None, | ||
| 'channel_type': | ||
| 'tcp', | ||
| 'http_cbtid': | ||
| 'ch0oFqC4EQMCqpYmj/78bQ5D', | ||
| 'http_headers_received': { | ||
| 'cache-control': 'no-cache', | ||
| 'connection': 'Upgrade', | ||
| 'host': 'localhost:8080', | ||
| 'pragma': 'no-cache', | ||
| 'sec-websocket-extensions': 'permessage-deflate; ' | ||
| 'client_no_context_takeover; ' | ||
| 'client_max_window_bits', | ||
| 'sec-websocket-key': 'FG9K1Vx44MqEE9c37YgPEw==', | ||
| 'sec-websocket-protocol': 'wamp.2.json', | ||
| 'sec-websocket-version': '13', | ||
| 'upgrade': 'WebSocket', | ||
| 'user-agent': 'AutobahnPython/22.4.1.dev7' | ||
| }, | ||
| 'http_headers_sent': { | ||
| 'Set-Cookie': 'cbtid=ch0oFqC4EQMCqpYmj/78bQ5D;max-age=604800' | ||
| }, | ||
| 'is_secure': | ||
| False, | ||
| 'is_server': | ||
| True, | ||
| 'own': | ||
| None, | ||
| 'own_fd': | ||
| -1, | ||
| 'own_pid': | ||
| 28806, | ||
| 'own_tid': | ||
| 28806, | ||
| 'peer': | ||
| 'tcp4:127.0.0.1:48812', | ||
| 'peer_cert': | ||
| None, | ||
| 'websocket_extensions_in_use': [{ | ||
| 'client_max_window_bits': 13, | ||
| 'client_no_context_takeover': False, | ||
| 'extension': 'permessage-deflate', | ||
| 'is_server': True, | ||
| 'mem_level': 5, | ||
| 'server_max_window_bits': 13, | ||
| 'server_no_context_takeover': False | ||
| }], | ||
| 'websocket_protocol': | ||
| 'wamp.2.json' | ||
| }, | ||
| 'x_cb_node': 'intel-nuci7-28788', | ||
| 'x_cb_peer': 'unix', | ||
| 'x_cb_pid': 28797, | ||
| 'x_cb_worker': 'test_router1' | ||
| }, | ||
| 'authid': 'client1', | ||
| 'authmethod': 'anonymous-proxy', | ||
| 'authprovider': 'static', | ||
| 'authrole': 'frontend', | ||
| 'session': 941369063710961, | ||
| 'transport': { | ||
| 'channel_framing': 'rawsocket', | ||
| 'channel_id': {}, | ||
| 'channel_serializer': 'cbor', | ||
| 'channel_type': 'tcp', | ||
| 'http_cbtid': None, | ||
| 'http_headers_received': None, | ||
| 'http_headers_sent': None, | ||
| 'is_secure': False, | ||
| 'is_server': None, | ||
| 'own': None, | ||
| 'own_fd': -1, | ||
| 'own_pid': 28797, | ||
| 'own_tid': 28797, | ||
| 'peer': 'unix', | ||
| 'peer_cert': None, | ||
| 'websocket_extensions_in_use': None, | ||
| 'websocket_protocol': 'wamp.2.cbor' | ||
| } | ||
| } | ||
| DATA2 = { | ||
| 'channel_framing': 'rawsocket', | ||
| 'channel_id': {}, | ||
| 'channel_serializer': 'cbor', | ||
| 'channel_type': 'tcp', | ||
| 'http_cbtid': None, | ||
| 'http_headers_received': None, | ||
| 'http_headers_sent': None, | ||
| 'is_secure': False, | ||
| 'is_server': None, | ||
| 'own': None, | ||
| 'own_fd': -1, | ||
| 'own_pid': 14017, | ||
| 'own_tid': 14017, | ||
| 'peer': 'unix', | ||
| 'peer_cert': None, | ||
| 'websocket_extensions_in_use': None, | ||
| 'websocket_protocol': 'wamp.2.cbor' | ||
| } | ||
| def fill_session(session): | ||
| _td1 = TransportDetails.parse(DATA1['transport']) | ||
| _td2 = TransportDetails.parse(DATA1['authextra']['transport']) | ||
| _td3 = TransportDetails.parse(DATA2) | ||
| session.arealm_oid = uuid.uuid4() | ||
| session.oid = uuid.uuid4() | ||
| session.session = util.id() | ||
| session.joined_at = np.datetime64(time_ns() - 723 * 10**9, 'ns') | ||
| session.left_at = np.datetime64(time_ns(), 'ns') | ||
| session.node_oid = uuid.uuid4() | ||
| session.node_authid = 'intel-nuci7' | ||
| session.worker_name = 'router1' | ||
| session.worker_pid = 28797 | ||
| session.transport = _td1.marshal() | ||
| session.proxy_node_oid = uuid.uuid4() | ||
| session.proxy_node_authid = 'intel-nuci7' | ||
| session.proxy_worker_name = 'proxy1' | ||
| session.proxy_worker_pid = 30992 | ||
| session.proxy_transport = _td3.marshal() | ||
| session.realm = 'realm-{}'.format(uuid.uuid4()) | ||
| session.authid = util.generate_serial_number() | ||
| session.authrole = random.choice(['admin', 'user*', 'guest', 'anon*']) | ||
| session.authmethod = random.choice(['wampcra', 'cookie', 'anonymous-proxy']) | ||
| session.authprovider = random.choice(['static', 'dynamic']) | ||
| session.authextra = { | ||
| 'transport': _td2.marshal(), | ||
| 'x_cb_node': DATA1['authextra'].get('x_cb_node', None), | ||
| 'x_cb_peer': DATA1['authextra'].get('x_cb_peer', None), | ||
| 'x_cb_pid': DATA1['authextra'].get('x_cb_pid', None), | ||
| 'x_cb_worker': DATA1['authextra'].get('x_cb_worker', None), | ||
| } | ||
| @pytest.fixture(scope='function') | ||
| def builder(): | ||
| _builder = flatbuffers.Builder(0) | ||
| return _builder | ||
| @pytest.fixture(scope='function') | ||
| def session(): | ||
| _session = Session() | ||
| fill_session(_session) | ||
| return _session | ||
| def test_session_roundtrip(session, builder): | ||
| # serialize to bytes (flatbuffers) from python object | ||
| obj = session.build(builder) | ||
| builder.Finish(obj) | ||
| data = builder.Output() | ||
| assert len(data) in [1944, 1952] | ||
| # create python object from bytes (flatbuffers) | ||
| _session = Session.cast(data) | ||
| assert _session.arealm_oid == session.arealm_oid | ||
| assert _session.oid == session.oid | ||
| assert _session.session == session.session | ||
| assert _session.joined_at == session.joined_at | ||
| assert _session.left_at == session.left_at | ||
| assert _session.node_oid == session.node_oid | ||
| assert _session.node_authid == session.node_authid | ||
| assert _session.worker_name == session.worker_name | ||
| assert _session.worker_pid == session.worker_pid | ||
| assert _session.transport == session.transport | ||
| assert _session.proxy_node_oid == session.proxy_node_oid | ||
| assert _session.proxy_node_authid == session.proxy_node_authid | ||
| assert _session.proxy_worker_name == session.proxy_worker_name | ||
| assert _session.proxy_worker_pid == session.proxy_worker_pid | ||
| assert _session.proxy_transport == session.proxy_transport | ||
| assert _session.realm == session.realm | ||
| assert _session.authid == session.authid | ||
| assert _session.authrole == session.authrole | ||
| assert _session.authmethod == session.authmethod | ||
| assert _session.authprovider == session.authprovider | ||
| assert _session.authextra == session.authextra | ||
| def test_session_roundtrip_perf(session, builder): | ||
| obj = session.build(builder) | ||
| builder.Finish(obj) | ||
| data = builder.Output() | ||
| scratch = {'session': 0} | ||
| def loop(): | ||
| _session = Session.cast(data) | ||
| if True: | ||
| assert _session.arealm_oid == session.arealm_oid | ||
| assert _session.oid == session.oid | ||
| assert _session.session == session.session | ||
| assert _session.joined_at == session.joined_at | ||
| assert _session.left_at == session.left_at | ||
| assert _session.node_oid == session.node_oid | ||
| assert _session.node_authid == session.node_authid | ||
| assert _session.worker_name == session.worker_name | ||
| assert _session.worker_pid == session.worker_pid | ||
| assert _session.transport == session.transport | ||
| assert _session.proxy_node_oid == session.proxy_node_oid | ||
| assert _session.proxy_node_authid == session.proxy_node_authid | ||
| assert _session.proxy_worker_name == session.proxy_worker_name | ||
| assert _session.proxy_worker_pid == session.proxy_worker_pid | ||
| assert _session.proxy_transport == session.proxy_transport | ||
| assert _session.realm == session.realm | ||
| assert _session.authid == session.authid | ||
| assert _session.authrole == session.authrole | ||
| assert _session.authmethod == session.authmethod | ||
| assert _session.authprovider == session.authprovider | ||
| assert _session.authextra == session.authextra | ||
| scratch['session'] += session.session | ||
| N = 5 | ||
| M = 100000 | ||
| samples = [] | ||
| print('measuring:') | ||
| for i in range(N): | ||
| secs = timeit.timeit(loop, number=M) | ||
| ops = round(float(M) / secs, 1) | ||
| samples.append(ops) | ||
| print('{} objects/sec performance'.format(ops)) | ||
| samples = sorted(samples) | ||
| ops50 = samples[int(len(samples) / 2)] | ||
| print('RESULT: {} objects/sec median performance'.format(ops50)) | ||
| assert ops50 > 1000 | ||
| assert scratch['session'] > 0 |
| include LICENSE | ||
| include requirements.txt | ||
| include requirements-dev.txt |
| pip>=9.0.1 | ||
| bumpversion>=0.5.3 | ||
| wheel>=0.30.0 | ||
| watchdog>=0.8.3 | ||
| flake8>=3.5.0 | ||
| tox>=2.9.1 | ||
| tox-gh-actions>=2.2.0 | ||
| codecov>=2.0.15 | ||
| sphinx>=1.7.1 | ||
| sphinxcontrib-images | ||
| sphinxcontrib-spelling | ||
| sphinx-autoapi | ||
| sphinx_rtd_theme | ||
| twine>=1.10.0 | ||
| pytest>=3.4.2 | ||
| pytest-runner>=2.11.1 | ||
| humanize>=0.5.1 | ||
| backports.tempfile>=1.0 | ||
| # https://github.com/google/yapf/issues/712 | ||
| yapf==0.29.0 | ||
| pylint>=1.9.2 | ||
| pyyaml>=4.2b4 | ||
| mypy>=0.610; python_version >= '3.4' and platform_python_implementation != 'PyPy' | ||
| twisted>=18.7.0 | ||
| txaio>=22.2.1 | ||
| autobahn[twisted,xbr,serialization]>=22.3.2 | ||
| zlmdb>=22.3.1 | ||
| web3>=5.28.0 | ||
| argon2>=0.1.10 |
| autobahn[twisted,xbr,serialization]>=22.4.1 | ||
| zlmdb>=22.4.1 | ||
| web3>=5.28.0 | ||
| argon2_cffi>=20.1.0 |
+43
-39
@@ -1,4 +0,4 @@ | ||
| Metadata-Version: 1.2 | ||
| Metadata-Version: 2.1 | ||
| Name: cfxdb | ||
| Version: 22.3.1 | ||
| Version: 22.4.1 | ||
| Summary: Crossbar.io Database, based on zLMDB | ||
@@ -8,39 +8,2 @@ Home-page: https://crossbario.com | ||
| License: MIT License | ||
| Description: Introduction to cfxdb | ||
| ===================== | ||
| | |Version| |Build| |Deploy| |Docs| | ||
| **cfxdb** is a Crossbar.io Python support package with core database access classes | ||
| written in native Python. The package allows direct in-memory data access from | ||
| Python programs (including Jupyter notebooks) to CrossbarFX edge node data: | ||
| * persisted WAMP event history | ||
| * persisted router tracing data | ||
| * XBR market maker transactions database | ||
| * XBR network backend database | ||
| * WAMP session cache | ||
| * custom user, embedded object databases | ||
| -------------- | ||
| *Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| WAMP, Crossbar.io and XBR are trademarks of Crossbar.io Technologies GmbH.* | ||
| .. |Version| image:: https://img.shields.io/pypi/v/cfxdb.svg | ||
| :target: https://pypi.python.org/pypi/cfxdb | ||
| :alt: Version | ||
| .. |Build| image:: https://github.com/crossbario/cfxdb/workflows/main/badge.svg | ||
| :target: https://github.com/crossbario/cfxdb/actions?query=workflow%3Amain | ||
| :alt: Build | ||
| .. |Deploy| image:: https://github.com/crossbario/cfxdb/workflows/deploy/badge.svg | ||
| :target: https://github.com/crossbario/cfxdb/actions?query=workflow%3Adeploy | ||
| :alt: Deploy | ||
| .. |Docs| image:: https://img.shields.io/badge/docs-brightgreen.svg?style=flat | ||
| :target: https://crossbario.com/docs/cfxdb/index.html | ||
| :alt: Docs | ||
| Platform: Any | ||
@@ -69,1 +32,42 @@ Classifier: License :: OSI Approved :: MIT License | ||
| Requires-Python: >=3.7 | ||
| Provides-Extra: dev | ||
| License-File: LICENSE | ||
| Introduction to cfxdb | ||
| ===================== | ||
| | |Version| |Build| |Deploy| |Docs| | ||
| **cfxdb** is a Crossbar.io Python support package with core database access classes | ||
| written in native Python. The package allows direct in-memory data access from | ||
| Python programs (including Jupyter notebooks) to CrossbarFX edge node data: | ||
| * persisted WAMP event history | ||
| * persisted router tracing data | ||
| * XBR market maker transactions database | ||
| * XBR network backend database | ||
| * WAMP session cache | ||
| * custom user, embedded object databases | ||
| -------------- | ||
| *Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| WAMP, Crossbar.io and XBR are trademarks of Crossbar.io Technologies GmbH.* | ||
| .. |Version| image:: https://img.shields.io/pypi/v/cfxdb.svg | ||
| :target: https://pypi.python.org/pypi/cfxdb | ||
| :alt: Version | ||
| .. |Build| image:: https://github.com/crossbario/cfxdb/workflows/main/badge.svg | ||
| :target: https://github.com/crossbario/cfxdb/actions?query=workflow%3Amain | ||
| :alt: Build | ||
| .. |Deploy| image:: https://github.com/crossbario/cfxdb/workflows/deploy/badge.svg | ||
| :target: https://github.com/crossbario/cfxdb/actions?query=workflow%3Adeploy | ||
| :alt: Deploy | ||
| .. |Docs| image:: https://readthedocs.org/projects/cfxdb/badge/?version=latest | ||
| :target: https://cfxdb.readthedocs.io/en/latest/ | ||
| :alt: Docs | ||
@@ -0,4 +1,36 @@ | ||
| autobahn[serialization,twisted,xbr]>=22.4.1 | ||
| zlmdb>=22.4.1 | ||
| web3>=5.28.0 | ||
| argon2_cffi>=20.1.0 | ||
| [dev] | ||
| pip>=9.0.1 | ||
| bumpversion>=0.5.3 | ||
| wheel>=0.30.0 | ||
| watchdog>=0.8.3 | ||
| flake8>=3.5.0 | ||
| tox>=2.9.1 | ||
| tox-gh-actions>=2.2.0 | ||
| codecov>=2.0.15 | ||
| sphinx>=1.7.1 | ||
| sphinxcontrib-images | ||
| sphinxcontrib-spelling | ||
| sphinx-autoapi | ||
| sphinx_rtd_theme | ||
| twine>=1.10.0 | ||
| pytest>=3.4.2 | ||
| pytest-runner>=2.11.1 | ||
| humanize>=0.5.1 | ||
| backports.tempfile>=1.0 | ||
| yapf==0.29.0 | ||
| pylint>=1.9.2 | ||
| pyyaml>=4.2b4 | ||
| twisted>=18.7.0 | ||
| txaio>=22.2.1 | ||
| autobahn[serialization,twisted,xbr]>=22.3.2 | ||
| zlmdb>=22.3.1 | ||
| autobahn[serialization,twisted,xbr]>=22.3.1 | ||
| web3>=5.28.0 | ||
| argon2>=0.1.10 | ||
| [dev:python_version >= "3.4" and platform_python_implementation != "PyPy"] | ||
| mypy>=0.610 |
| LICENSE | ||
| MANIFEST.in | ||
| README.rst | ||
| requirements-dev.txt | ||
| requirements.txt | ||
| setup.cfg | ||
@@ -12,3 +15,2 @@ setup.py | ||
| cfxdb/mrealmschema.py | ||
| cfxdb/schema.py | ||
| cfxdb/tracing.py | ||
@@ -22,7 +24,5 @@ cfxdb/usage.py | ||
| cfxdb.egg-info/zip-safe | ||
| cfxdb/eventstore/__init__.py | ||
| cfxdb/eventstore/event.py | ||
| cfxdb/eventstore/publication.py | ||
| cfxdb/eventstore/schema.py | ||
| cfxdb/eventstore/session.py | ||
| cfxdb/cookiestore/__init__.py | ||
| cfxdb/cookiestore/_cookie.py | ||
| cfxdb/cookiestore/_schema.py | ||
| cfxdb/gen/KeyValue.py | ||
@@ -43,8 +43,4 @@ cfxdb/gen/ObjRef.py | ||
| cfxdb/gen/arealm/__init__.py | ||
| cfxdb/gen/eventstore/EncAlgo.py | ||
| cfxdb/gen/eventstore/EncSerializer.py | ||
| cfxdb/gen/eventstore/Event.py | ||
| cfxdb/gen/eventstore/Publication.py | ||
| cfxdb/gen/eventstore/Session.py | ||
| cfxdb/gen/eventstore/__init__.py | ||
| cfxdb/gen/cookiestore/Cookie.py | ||
| cfxdb/gen/cookiestore/__init__.py | ||
| cfxdb/gen/log/IpVersion.py | ||
@@ -82,2 +78,8 @@ cfxdb/gen/log/MNodeLog.py | ||
| cfxdb/gen/mrealm/__init__.py | ||
| cfxdb/gen/realmstore/EncAlgo.py | ||
| cfxdb/gen/realmstore/EncSerializer.py | ||
| cfxdb/gen/realmstore/Event.py | ||
| cfxdb/gen/realmstore/Publication.py | ||
| cfxdb/gen/realmstore/Session.py | ||
| cfxdb/gen/realmstore/__init__.py | ||
| cfxdb/gen/reflection/BaseType.py | ||
@@ -163,2 +165,7 @@ cfxdb/gen/reflection/Enum.py | ||
| cfxdb/mrealm/web_service.py | ||
| cfxdb/realmstore/__init__.py | ||
| cfxdb/realmstore/_event.py | ||
| cfxdb/realmstore/_publication.py | ||
| cfxdb/realmstore/_schema.py | ||
| cfxdb/realmstore/_session.py | ||
| cfxdb/tests/__init__.py | ||
@@ -170,6 +177,8 @@ cfxdb/tests/_util.py | ||
| cfxdb/tests/test_xbrnetwork.py | ||
| cfxdb/tests/eventstore/__init__.py | ||
| cfxdb/tests/eventstore/test_event.py | ||
| cfxdb/tests/eventstore/test_publication.py | ||
| cfxdb/tests/eventstore/test_session.py | ||
| cfxdb/tests/cookiestore/__init__.py | ||
| cfxdb/tests/cookiestore/test_cookie.py | ||
| cfxdb/tests/realmstore/__init__.py | ||
| cfxdb/tests/realmstore/test_event.py | ||
| cfxdb/tests/realmstore/test_publication.py | ||
| cfxdb/tests/realmstore/test_session.py | ||
| cfxdb/tests/user/__init__.py | ||
@@ -176,0 +185,0 @@ cfxdb/tests/user/test_activation_token.py |
@@ -28,2 +28,3 @@ ############################################################################### | ||
| import txaio | ||
| txaio.use_twisted() | ||
@@ -38,3 +39,3 @@ | ||
| from . import meta, mrealm, xbr, xbrmm, xbrnetwork # noqa | ||
| from . import schema, globalschema, mrealmschema # noqa | ||
| from . import globalschema, mrealmschema, cookiestore, realmstore # noqa | ||
@@ -45,2 +46,4 @@ __all__ = ( | ||
| 'mrealm', | ||
| 'cookiestore', | ||
| 'realmstore', | ||
| 'xbr', | ||
@@ -59,3 +62,2 @@ 'xbrmm', | ||
| 'unpack_uint64', | ||
| 'schema', | ||
| 'globalschema', | ||
@@ -62,0 +64,0 @@ 'mrealmschema', |
@@ -8,2 +8,2 @@ ############################################################################## | ||
| __version__ = '22.3.1' | ||
| __version__ = '22.4.1' |
@@ -8,3 +8,3 @@ ############################################################################## | ||
| from typing import Optional, List | ||
| from typing import Optional, List, Any, Dict | ||
| from uuid import UUID | ||
@@ -63,5 +63,6 @@ import pprint | ||
| webcluster_oid: Optional[UUID] = None, | ||
| datamarket_oid: Optional[UUID] = None, | ||
| changed: Optional[np.datetime64] = None, | ||
| owner_oid: Optional[UUID] = None, | ||
| _unknown=None): | ||
| _unknown: Optional[Any] = None): | ||
| """ | ||
@@ -81,7 +82,13 @@ | ||
| :param workergroup_oid: When running, router cluster worker group this application realm is running on. | ||
| :param workergroup_oid: When running, router cluster worker group this application | ||
| realm is running on. | ||
| :param webcluster_oid: When running, the web cluster to serve as a frontend layer for the application realm. | ||
| :param webcluster_oid: When running, the web cluster to serve as a frontend layer | ||
| for the application realm. | ||
| :param datamarket_oid: When this application realm is to be federated with nodes paired | ||
| to a different management realm (master node) or run by different operators. | ||
| :param changed: Timestamp when the application realm was last changed | ||
| :param owner_oid: Owning user (object ID) | ||
@@ -94,2 +101,3 @@ """ | ||
| self.webcluster_oid = webcluster_oid | ||
| self.datamarket_oid = datamarket_oid | ||
| self.changed = changed | ||
@@ -101,3 +109,3 @@ self.owner_oid = owner_oid | ||
| def __eq__(self, other): | ||
| def __eq__(self, other: Any) -> bool: | ||
| if not isinstance(other, self.__class__): | ||
@@ -115,2 +123,4 @@ return False | ||
| return False | ||
| if other.datamarket_oid != self.datamarket_oid: | ||
| return False | ||
| if other.changed != self.changed: | ||
@@ -122,9 +132,9 @@ return False | ||
| def __ne__(self, other): | ||
| def __ne__(self, other: Any) -> bool: | ||
| return not self.__eq__(other) | ||
| def __str__(self): | ||
| def __str__(self) -> str: | ||
| return pprint.pformat(self.marshal()) | ||
| def copy(self, other, overwrite=False): | ||
| def copy(self, other: 'ApplicationRealm', overwrite: bool = False): | ||
| """ | ||
@@ -134,4 +144,3 @@ Copy over other object. | ||
| :param other: Other application realm to copy data from. | ||
| :type other: instance of :class:`ManagementRealm` | ||
| :return: | ||
| :param overwrite: Overwrite members already set. | ||
| """ | ||
@@ -148,2 +157,4 @@ ConfigurationElement.copy(self, other, overwrite=overwrite) | ||
| self.webcluster_oid = other.webcluster_oid | ||
| if (not self.datamarket_oid and other.datamarket_oid) or overwrite: | ||
| self.datamarket_oid = other.datamarket_oid | ||
| if (not self.changed and other.changed) or overwrite: | ||
@@ -156,7 +167,5 @@ self.changed = other.changed | ||
| def marshal(self): | ||
| def marshal(self) -> Dict[str, Any]: | ||
| """ | ||
| Marshal this object to a generic host language object. | ||
| :return: dict | ||
| """ | ||
@@ -166,9 +175,19 @@ obj = ConfigurationElement.marshal(self) | ||
| obj.update({ | ||
| 'oid': str(self.oid) if self.oid else None, | ||
| 'name': self.name, | ||
| 'status': self.STATUS_BY_CODE.get(self.status, None), | ||
| 'workergroup_oid': str(self.workergroup_oid) if self.workergroup_oid else None, | ||
| 'webcluster_oid': str(self.webcluster_oid) if self.webcluster_oid else None, | ||
| 'changed': int(self.changed) if self.changed else None, | ||
| 'owner_oid': str(self.owner_oid) if self.owner_oid else None, | ||
| 'oid': | ||
| str(self.oid) if self.oid else None, | ||
| 'name': | ||
| self.name, | ||
| 'status': | ||
| ApplicationRealm.STATUS_BY_CODE[self.status] | ||
| if self.status in ApplicationRealm.STATUS_BY_CODE else None, | ||
| 'workergroup_oid': | ||
| str(self.workergroup_oid) if self.workergroup_oid else None, | ||
| 'webcluster_oid': | ||
| str(self.webcluster_oid) if self.webcluster_oid else None, | ||
| 'datamarket_oid': | ||
| str(self.datamarket_oid) if self.datamarket_oid else None, | ||
| 'changed': | ||
| int(self.changed) if self.changed else None, | ||
| 'owner_oid': | ||
| str(self.owner_oid) if self.owner_oid else None, | ||
| }) | ||
@@ -183,3 +202,3 @@ | ||
| @staticmethod | ||
| def parse(data): | ||
| def parse(data: Dict[str, Any]) -> 'ApplicationRealm': | ||
| """ | ||
@@ -189,5 +208,3 @@ Parse generic host language object into an object of this class. | ||
| :param data: Generic host language object | ||
| :type data: dict | ||
| :return: instance of :class:`ApplicationRealm` | ||
| :returns: New instance of this class. | ||
| """ | ||
@@ -212,3 +229,3 @@ assert type(data) == dict | ||
| assert status is None or (type(status) == str) | ||
| status = ApplicationRealm.STATUS_BY_NAME.get(status, None) | ||
| status = ApplicationRealm.STATUS_BY_NAME[status] if status in ApplicationRealm.STATUS_BY_NAME else None | ||
@@ -228,2 +245,8 @@ workergroup_oid = None | ||
| datamarket_oid = None | ||
| if 'datamarket_oid' in data and data['datamarket_oid'] is not None: | ||
| assert type(data['datamarket_oid']) == str, 'datamarket_oid must be a string, but was {}'.format( | ||
| type(data['datamarket_oid'])) | ||
| datamarket_oid = UUID(data['datamarket_oid']) | ||
| owner_oid = None | ||
@@ -247,2 +270,3 @@ if 'owner_oid' in data and data['owner_oid'] is not None: | ||
| webcluster_oid=webcluster_oid, | ||
| datamarket_oid=datamarket_oid, | ||
| status=status, | ||
@@ -249,0 +273,0 @@ owner_oid=owner_oid, |
@@ -19,4 +19,4 @@ ############################################################################## | ||
| """ | ||
| Placement of router worker groups onto router clusters, specifically router | ||
| workers running as part of router worker groups. | ||
| Placement of router worker groups onto router clusters, specifically **router workers** | ||
| and **xbrmm workers** running as part of router worker groups. | ||
| """ | ||
@@ -166,4 +166,4 @@ def __init__(self, | ||
| tcp_listening_port = data.get('tcp_listening_port', None) | ||
| assert tcp_listening_port is None or (type(tcp_listening_port) == int and tcp_listening_port >= 0 | ||
| and tcp_listening_port < 65536) | ||
| assert tcp_listening_port is None or (type(tcp_listening_port) == int | ||
| and 0 <= tcp_listening_port < 65536) | ||
@@ -170,0 +170,0 @@ obj = RouterWorkerGroupClusterPlacement(oid=oid, |
+43
-39
@@ -1,4 +0,4 @@ | ||
| Metadata-Version: 1.2 | ||
| Metadata-Version: 2.1 | ||
| Name: cfxdb | ||
| Version: 22.3.1 | ||
| Version: 22.4.1 | ||
| Summary: Crossbar.io Database, based on zLMDB | ||
@@ -8,39 +8,2 @@ Home-page: https://crossbario.com | ||
| License: MIT License | ||
| Description: Introduction to cfxdb | ||
| ===================== | ||
| | |Version| |Build| |Deploy| |Docs| | ||
| **cfxdb** is a Crossbar.io Python support package with core database access classes | ||
| written in native Python. The package allows direct in-memory data access from | ||
| Python programs (including Jupyter notebooks) to CrossbarFX edge node data: | ||
| * persisted WAMP event history | ||
| * persisted router tracing data | ||
| * XBR market maker transactions database | ||
| * XBR network backend database | ||
| * WAMP session cache | ||
| * custom user, embedded object databases | ||
| -------------- | ||
| *Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| WAMP, Crossbar.io and XBR are trademarks of Crossbar.io Technologies GmbH.* | ||
| .. |Version| image:: https://img.shields.io/pypi/v/cfxdb.svg | ||
| :target: https://pypi.python.org/pypi/cfxdb | ||
| :alt: Version | ||
| .. |Build| image:: https://github.com/crossbario/cfxdb/workflows/main/badge.svg | ||
| :target: https://github.com/crossbario/cfxdb/actions?query=workflow%3Amain | ||
| :alt: Build | ||
| .. |Deploy| image:: https://github.com/crossbario/cfxdb/workflows/deploy/badge.svg | ||
| :target: https://github.com/crossbario/cfxdb/actions?query=workflow%3Adeploy | ||
| :alt: Deploy | ||
| .. |Docs| image:: https://img.shields.io/badge/docs-brightgreen.svg?style=flat | ||
| :target: https://crossbario.com/docs/cfxdb/index.html | ||
| :alt: Docs | ||
| Platform: Any | ||
@@ -69,1 +32,42 @@ Classifier: License :: OSI Approved :: MIT License | ||
| Requires-Python: >=3.7 | ||
| Provides-Extra: dev | ||
| License-File: LICENSE | ||
| Introduction to cfxdb | ||
| ===================== | ||
| | |Version| |Build| |Deploy| |Docs| | ||
| **cfxdb** is a Crossbar.io Python support package with core database access classes | ||
| written in native Python. The package allows direct in-memory data access from | ||
| Python programs (including Jupyter notebooks) to CrossbarFX edge node data: | ||
| * persisted WAMP event history | ||
| * persisted router tracing data | ||
| * XBR market maker transactions database | ||
| * XBR network backend database | ||
| * WAMP session cache | ||
| * custom user, embedded object databases | ||
| -------------- | ||
| *Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| WAMP, Crossbar.io and XBR are trademarks of Crossbar.io Technologies GmbH.* | ||
| .. |Version| image:: https://img.shields.io/pypi/v/cfxdb.svg | ||
| :target: https://pypi.python.org/pypi/cfxdb | ||
| :alt: Version | ||
| .. |Build| image:: https://github.com/crossbario/cfxdb/workflows/main/badge.svg | ||
| :target: https://github.com/crossbario/cfxdb/actions?query=workflow%3Amain | ||
| :alt: Build | ||
| .. |Deploy| image:: https://github.com/crossbario/cfxdb/workflows/deploy/badge.svg | ||
| :target: https://github.com/crossbario/cfxdb/actions?query=workflow%3Adeploy | ||
| :alt: Deploy | ||
| .. |Docs| image:: https://readthedocs.org/projects/cfxdb/badge/?version=latest | ||
| :target: https://cfxdb.readthedocs.io/en/latest/ | ||
| :alt: Docs | ||
+2
-2
@@ -34,4 +34,4 @@ Introduction to cfxdb | ||
| .. |Docs| image:: https://img.shields.io/badge/docs-brightgreen.svg?style=flat | ||
| :target: https://crossbario.com/docs/cfxdb/index.html | ||
| .. |Docs| image:: https://readthedocs.org/projects/cfxdb/badge/?version=latest | ||
| :target: https://cfxdb.readthedocs.io/en/latest/ | ||
| :alt: Docs |
+45
-7
@@ -38,3 +38,38 @@ ############################################################################### | ||
| # we read requirements from requirements*.txt files down below | ||
| install_requires = [] | ||
| extras_require = { | ||
| 'dev': [] | ||
| } | ||
| reqs = 'requirements.txt' | ||
| # https://mike.zwobble.org/2013/05/adding-git-or-hg-or-svn-dependencies-in-setup-py/ | ||
| dependency_links = [] | ||
| with open(reqs) as f: | ||
| for line in f.read().splitlines(): | ||
| line = line.strip() | ||
| if not line.startswith('#'): | ||
| parts = line.strip().split(';') | ||
| if len(parts) > 1: | ||
| parts[0] = parts[0].strip() | ||
| parts[1] = ':{}'.format(parts[1].strip()) | ||
| if parts[1] not in extras_require: | ||
| extras_require[parts[1]] = [] | ||
| extras_require[parts[1]].append(parts[0]) | ||
| else: | ||
| name = parts[0].strip() | ||
| # do NOT (!) touch this! add dependency to either install_requires or dependency_links | ||
| # depending on whether a git+ URL is used or not (eg plain PyPI) | ||
| # https://mike.zwobble.org/2013/05/adding-git-or-hg-or-svn-dependencies-in-setup-py/ | ||
| if name.startswith('git+'): | ||
| dependency_links.append(name) | ||
| elif name != '': | ||
| install_requires.append(name) | ||
| with open('requirements-dev.txt') as f: | ||
| for line in f.read().splitlines(): | ||
| extras_require['dev'].append(line.strip()) | ||
| setup( | ||
@@ -71,12 +106,15 @@ name='cfxdb', | ||
| python_requires='>=3.7', | ||
| install_requires=[ | ||
| 'zlmdb>=22.3.1', | ||
| 'autobahn[twisted,xbr,serialization]>=22.3.1', | ||
| 'web3>=5.28.0', | ||
| 'argon2>=0.1.10' | ||
| ], | ||
| install_requires=install_requires, | ||
| # https://mike.zwobble.org/2013/05/adding-git-or-hg-or-svn-dependencies-in-setup-py/ | ||
| dependency_links=dependency_links, | ||
| extras_require=extras_require, | ||
| packages=find_packages(), | ||
| # this flag will make files from MANIFEST.in go into _source_ distributions only | ||
| include_package_data=True, | ||
| data_files=[('.', ['LICENSE', 'README.rst'])], | ||
| data_files=[('.', ['LICENSE', 'README.rst', 'requirements.txt', 'requirements-dev.txt'])], | ||
| zip_safe=True | ||
| ) |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| from cfxdb.eventstore.publication import Publication | ||
| from cfxdb.eventstore.session import Session | ||
| from cfxdb.eventstore.event import Event | ||
| from cfxdb.gen.eventstore.EncAlgo import EncAlgo | ||
| from cfxdb.gen.eventstore.EncSerializer import EncSerializer | ||
| __all__ = ( | ||
| 'Publication', | ||
| 'Session', | ||
| 'Event', | ||
| 'EncAlgo', | ||
| 'EncSerializer', | ||
| ) |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| import pprint | ||
| from cfxdb.gen.eventstore import Event as EventGen | ||
| class Event(object): | ||
| """ | ||
| Persisted event database object. | ||
| """ | ||
| def __init__(self, from_fbs=None): | ||
| self._from_fbs = from_fbs | ||
| self._timestamp = None | ||
| self._subscription = None | ||
| self._publication = None | ||
| self._receiver = None | ||
| self._retained = None | ||
| self._acknowledged_delivery = None | ||
| def marshal(self): | ||
| obj = { | ||
| 'timestamp': self.timestamp, | ||
| 'subscription': self.subscription, | ||
| 'publication': self.publication, | ||
| 'receiver': self.receiver, | ||
| 'retained': self.retained, | ||
| 'acknowledged_delivery': self.acknowledged_delivery, | ||
| } | ||
| return obj | ||
| def __str__(self): | ||
| return '\n{}\n'.format(pprint.pformat(self.marshal())) | ||
| @property | ||
| def timestamp(self): | ||
| """ | ||
| Timestamp when the event was sent to the receiver. Epoch time in ns. | ||
| :returns: Epoc time in ns. | ||
| :rtype: int | ||
| """ | ||
| if self._timestamp is None and self._from_fbs: | ||
| self._timestamp = self._from_fbs.Timestamp() | ||
| return self._timestamp | ||
| @timestamp.setter | ||
| def timestamp(self, value): | ||
| assert type(value) == int | ||
| self._timestamp = value | ||
| @property | ||
| def subscription(self): | ||
| """ | ||
| The subscription ID this event is dispatched under. | ||
| :returns: The subscription ID. | ||
| :rtype: int | ||
| """ | ||
| if self._subscription is None and self._from_fbs: | ||
| self._subscription = self._from_fbs.Subscription() | ||
| return self._subscription | ||
| @subscription.setter | ||
| def subscription(self, value): | ||
| assert type(value) == int | ||
| self._subscription = value | ||
| @property | ||
| def publication(self): | ||
| """ | ||
| The publication ID of the dispatched event. | ||
| :returns: The publication ID. | ||
| :rtype: int | ||
| """ | ||
| if self._publication is None and self._from_fbs: | ||
| self._publication = self._from_fbs.Publication() | ||
| return self._publication | ||
| @publication.setter | ||
| def publication(self, value): | ||
| assert type(value) == int | ||
| self._publication = value | ||
| @property | ||
| def receiver(self): | ||
| """ | ||
| The WAMP session ID of the receiver. | ||
| :returns: The receiver ID. | ||
| :rtype: int | ||
| """ | ||
| if self._receiver is None and self._from_fbs: | ||
| self._receiver = self._from_fbs.Receiver() | ||
| return self._receiver | ||
| @receiver.setter | ||
| def receiver(self, value): | ||
| assert type(value) == int | ||
| self._receiver = value | ||
| @property | ||
| def retained(self): | ||
| """ | ||
| Whether the message was retained by the broker on the topic, rather than just published. | ||
| :returns: retained flag | ||
| :rtype: bool | ||
| """ | ||
| if self._retained is None and self._from_fbs: | ||
| self._retained = self._from_fbs.Retained() | ||
| return self._retained | ||
| @retained.setter | ||
| def retained(self, value): | ||
| assert type(value) == bool | ||
| self._retained = value | ||
| @property | ||
| def acknowledged_delivery(self): | ||
| """ | ||
| Whether this Event was to be acknowledged by the receiver. | ||
| :returns: acknowledged delivery flag | ||
| :rtype: bool | ||
| """ | ||
| if self._acknowledged_delivery is None and self._from_fbs: | ||
| self._acknowledged_delivery = self._from_fbs.AcknowledgedDelivery() | ||
| return self._acknowledged_delivery | ||
| @acknowledged_delivery.setter | ||
| def acknowledged_delivery(self, value): | ||
| assert type(value) == bool | ||
| self._acknowledged_delivery = value | ||
| @staticmethod | ||
| def cast(buf): | ||
| return Event(EventGen.Event.GetRootAsEvent(buf, 0)) | ||
| def build(self, builder): | ||
| # now start and build a new object .. | ||
| EventGen.EventStart(builder) | ||
| EventGen.EventAddTimestamp(builder, self.timestamp) | ||
| EventGen.EventAddSubscription(builder, self.subscription) | ||
| EventGen.EventAddPublication(builder, self.publication) | ||
| EventGen.EventAddReceiver(builder, self.receiver) | ||
| if self.retained is not None: | ||
| EventGen.EventAddRetained(builder, self.retained) | ||
| if self.acknowledged_delivery is not None: | ||
| EventGen.EventAddAcknowledgedDelivery(builder, self.acknowledged_delivery) | ||
| # finish the object. | ||
| final = EventGen.EventEnd(builder) | ||
| return final |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| import pprint | ||
| import cbor2 | ||
| import flatbuffers | ||
| from cfxdb.gen.eventstore import Publication as PublicationGen | ||
| class _Publication(PublicationGen.Publication): | ||
| """ | ||
| Expand methods on the class code generated by flatc. | ||
| FIXME: comes up with a PR for flatc to generated this stuff automatically. | ||
| """ | ||
| @classmethod | ||
| def GetRootAsPublication(cls, buf, offset): | ||
| n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) | ||
| x = _Publication() | ||
| x.Init(buf, n + offset) | ||
| return x | ||
| def ArgsAsBytes(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) | ||
| if o != 0: | ||
| _off = self._tab.Vector(o) | ||
| _len = self._tab.VectorLen(o) | ||
| return memoryview(self._tab.Bytes)[_off:_off + _len] | ||
| return None | ||
| def KwargsAsBytes(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) | ||
| if o != 0: | ||
| _off = self._tab.Vector(o) | ||
| _len = self._tab.VectorLen(o) | ||
| return memoryview(self._tab.Bytes)[_off:_off + _len] | ||
| return None | ||
| def PayloadAsBytes(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) | ||
| if o != 0: | ||
| _off = self._tab.Vector(o) | ||
| _len = self._tab.VectorLen(o) | ||
| return memoryview(self._tab.Bytes)[_off:_off + _len] | ||
| return None | ||
| def EncKeyAsBytes(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(38)) | ||
| if o != 0: | ||
| _off = self._tab.Vector(o) | ||
| _len = self._tab.VectorLen(o) | ||
| return memoryview(self._tab.Bytes)[_off:_off + _len] | ||
| return None | ||
| class Publication(object): | ||
| """ | ||
| Persisted publication database object. | ||
| """ | ||
| ENC_ALGO_NONE = 0 | ||
| ENC_ALGO_CRYPTOBOX = 1 | ||
| ENC_ALGO_MQTT = 2 | ||
| ENC_ALGO_XBR = 3 | ||
| ENC_SER_NONE = 0 | ||
| ENC_SER_JSON = 1 | ||
| ENC_SER_MSGPACK = 2 | ||
| ENC_SER_CBOR = 3 | ||
| ENC_SER_UBJSON = 4 | ||
| ENC_SER_OPAQUE = 5 | ||
| ENC_SER_FLATBUFFERS = 6 | ||
| __slots__ = ( | ||
| '_from_fbs', | ||
| '_timestamp', | ||
| '_publication', | ||
| '_publisher', | ||
| '_topic', | ||
| '_args', | ||
| '_kwargs', | ||
| '_payload', | ||
| '_acknowledge', | ||
| '_retain', | ||
| '_exclude_me', | ||
| '_exclude', | ||
| '_exclude_authid', | ||
| '_exclude_authrole', | ||
| '_eligible', | ||
| '_eligible_authid', | ||
| '_eligible_authrole', | ||
| '_enc_algo', | ||
| '_enc_key', | ||
| '_enc_serializer', | ||
| ) | ||
| def __init__(self, from_fbs=None): | ||
| self._from_fbs = from_fbs | ||
| self._timestamp = None | ||
| self._publication = None | ||
| self._publisher = None | ||
| self._topic = None | ||
| self._args = None | ||
| self._kwargs = None | ||
| self._payload = None | ||
| self._acknowledge = None | ||
| self._retain = None | ||
| self._exclude_me = None | ||
| self._exclude = None | ||
| self._exclude_authid = None | ||
| self._exclude_authrole = None | ||
| self._eligible = None | ||
| self._eligible_authid = None | ||
| self._eligible_authrole = None | ||
| self._enc_algo = None | ||
| self._enc_key = None | ||
| self._enc_serializer = None | ||
| def marshal(self): | ||
| obj = { | ||
| 'timestamp': self.timestamp, | ||
| 'publication': self.publication, | ||
| 'publisher': self.publisher, | ||
| 'topic': self.topic, | ||
| 'args': self.args, | ||
| 'kwargs': self.kwargs, | ||
| 'payload': self.payload, | ||
| 'acknowledge': self.acknowledge, | ||
| 'retain': self.retain, | ||
| 'exclude_me': self.exclude_me, | ||
| 'exclude': self.exclude, | ||
| 'exclude_authid': self.exclude_authid, | ||
| 'exclude_authrole': self.exclude_authrole, | ||
| 'eligible': self.eligible, | ||
| 'eligible_authid': self.eligible_authid, | ||
| 'eligible_authrole': self.eligible_authrole, | ||
| 'enc_algo': self.enc_algo, | ||
| 'enc_key': self.enc_key, | ||
| 'enc_serializer': self.enc_serializer, | ||
| } | ||
| return obj | ||
| def __str__(self): | ||
| return '\n{}\n'.format(pprint.pformat(self.marshal())) | ||
| @property | ||
| def timestamp(self): | ||
| """ | ||
| Timestamp when the publication was accepted by the broker. Epoch time in ns. | ||
| :returns: epoch time in ns | ||
| :rtype: int | ||
| """ | ||
| if self._timestamp is None and self._from_fbs: | ||
| self._timestamp = self._from_fbs.Timestamp() | ||
| return self._timestamp | ||
| @timestamp.setter | ||
| def timestamp(self, value): | ||
| assert value is None or type(value) == int | ||
| self._timestamp = value | ||
| @property | ||
| def publication(self): | ||
| """ | ||
| WAMP publication ID that was assigned by the broker. | ||
| :returns: publication ID | ||
| :rtype: int | ||
| """ | ||
| if self._publication is None and self._from_fbs: | ||
| self._publication = self._from_fbs.Publication() | ||
| return self._publication | ||
| @publication.setter | ||
| def publication(self, value): | ||
| assert value is None or type(value) == int | ||
| self._publication = value | ||
| @property | ||
| def publisher(self): | ||
| """ | ||
| WAMP session ID of the publisher. | ||
| :returns: publisher ID | ||
| :rtype: int | ||
| """ | ||
| if self._publisher is None and self._from_fbs: | ||
| self._publisher = self._from_fbs.Publisher() | ||
| return self._publisher | ||
| @publisher.setter | ||
| def publisher(self, value): | ||
| assert value is None or type(value) == int | ||
| self._publisher = value | ||
| @property | ||
| def topic(self): | ||
| """ | ||
| The WAMP or application URI of the PubSub topic the event was published to. | ||
| :returns: topic (URI) published to | ||
| :rtype: str | ||
| """ | ||
| if self._topic is None and self._from_fbs: | ||
| self._topic = self._from_fbs.Topic().decode('utf8') | ||
| return self._topic | ||
| @topic.setter | ||
| def topic(self, value): | ||
| assert value is None or type(value) == str | ||
| self._topic = value | ||
| # | ||
| # args, kwargs, payload | ||
| # | ||
| @property | ||
| def args(self): | ||
| """ | ||
| Positional values for application-defined event payload. | ||
| :returns: positional arguments (app payload) of the event (if any) | ||
| :rtype: None or list | ||
| """ | ||
| if self._args is None and self._from_fbs: | ||
| if self._from_fbs.ArgsLength(): | ||
| self._args = cbor2.loads(bytes(self._from_fbs.ArgsAsBytes())) | ||
| return self._args | ||
| @args.setter | ||
| def args(self, value): | ||
| assert value is None or type(value) == list | ||
| self._args = value | ||
| @property | ||
| def kwargs(self): | ||
| """ | ||
| Keyword values for application-defined event payload. | ||
| :returns: keyword arguments (app payload) of the event (if any) | ||
| :rtype: None or dict | ||
| """ | ||
| if self._kwargs is None and self._from_fbs: | ||
| if self._from_fbs.KwargsLength(): | ||
| self._kwargs = cbor2.loads(bytes(self._from_fbs.KwargsAsBytes())) | ||
| return self._kwargs | ||
| @kwargs.setter | ||
| def kwargs(self, value): | ||
| assert value is None or type(value) == dict | ||
| self._kwargs = value | ||
| @property | ||
| def payload(self): | ||
| """ | ||
| Alternative, transparent payload. If given, ``args`` and ``kwargs`` must be left unset. | ||
| :returns: Transparent binary payload (see ``enc_algo``) if applicable | ||
| :rtype: None or bytes | ||
| """ | ||
| if self._payload is None and self._from_fbs: | ||
| if self._from_fbs.PayloadLength(): | ||
| self._payload = self._from_fbs.PayloadAsBytes() | ||
| return self._payload | ||
| @payload.setter | ||
| def payload(self, value): | ||
| assert value is None or type(value) == bytes | ||
| self._payload = value | ||
| # | ||
| # acknowledge, retain, exclude_me | ||
| # | ||
| @property | ||
| def acknowledge(self): | ||
| """ | ||
| If ``True``, the broker was asked to acknowledge the publication with a success or error response. | ||
| :returns: acknowledge flag | ||
| :rtype: None or bool | ||
| """ | ||
| if self._acknowledge is None and self._from_fbs: | ||
| self._acknowledge = self._from_fbs.Acknowledge() | ||
| return self._acknowledge | ||
| @acknowledge.setter | ||
| def acknowledge(self, value): | ||
| assert value is None or type(value) == bool | ||
| self._acknowledge = value | ||
| @property | ||
| def retain(self): | ||
| """ | ||
| If ``True``, the broker was requested to retain this event. | ||
| :returns: retain flag | ||
| :rtype: None or bool | ||
| """ | ||
| if self._retain is None and self._from_fbs: | ||
| self._retain = self._from_fbs.Retain() | ||
| return self._retain | ||
| @retain.setter | ||
| def retain(self, value): | ||
| assert value is None or type(value) == bool | ||
| self._retain = value | ||
| @property | ||
| def exclude_me(self): | ||
| """ | ||
| If ``True``, the broker was asked to exclude the publisher from receiving the event. | ||
| :returns: exclude_me flag | ||
| :rtype: None or bool | ||
| """ | ||
| if self._exclude_me is None and self._from_fbs: | ||
| self._exclude_me = self._from_fbs.ExcludeMe() | ||
| return self._exclude_me | ||
| @exclude_me.setter | ||
| def exclude_me(self, value): | ||
| assert value is None or type(value) == bool | ||
| self._exclude_me = value | ||
| # | ||
| # exclude, exclude_authid, exclude_authrole | ||
| # | ||
| @property | ||
| def exclude(self): | ||
| """ | ||
| List of WAMP session IDs to exclude from receiving this event. | ||
| :returns: list of excluded session IDs | ||
| :rtype: list[int] | ||
| """ | ||
| if self._exclude is None and self._from_fbs: | ||
| if self._from_fbs.ExcludeLength(): | ||
| exclude = [] | ||
| for j in range(self._from_fbs.ExcludeLength()): | ||
| exclude.append(self._from_fbs.Exclude(j)) | ||
| self._exclude = exclude | ||
| return self._exclude | ||
| @exclude.setter | ||
| def exclude(self, value): | ||
| assert value is None or type(value) == list | ||
| if value: | ||
| for x in value: | ||
| assert type(x) == int | ||
| self._exclude = value | ||
| @property | ||
| def exclude_authid(self): | ||
| """ | ||
| List of WAMP authids to exclude from receiving this event. | ||
| :returns: list of excluded authids | ||
| :rtype: list[str] | ||
| """ | ||
| if self._exclude_authid is None and self._from_fbs: | ||
| if self._from_fbs.ExcludeAuthidLength(): | ||
| exclude_authid = [] | ||
| for j in range(self._from_fbs.ExcludeAuthidLength()): | ||
| exclude_authid.append(self._from_fbs.ExcludeAuthid(j).decode('utf8')) | ||
| self._exclude_authid = exclude_authid | ||
| return self._exclude_authid | ||
| @exclude_authid.setter | ||
| def exclude_authid(self, value): | ||
| assert value is None or type(value) == list | ||
| if value: | ||
| for x in value: | ||
| assert type(x) == str | ||
| self._exclude_authid = value | ||
| @property | ||
| def exclude_authrole(self): | ||
| """ | ||
| List of WAMP authroles to exclude from receiving this event. | ||
| :returns: list of excluded authroles | ||
| :rtype: list[str] | ||
| """ | ||
| if self._exclude_authrole is None and self._from_fbs: | ||
| if self._from_fbs.ExcludeAuthroleLength(): | ||
| exclude_authrole = [] | ||
| for j in range(self._from_fbs.ExcludeAuthroleLength()): | ||
| exclude_authrole.append(self._from_fbs.ExcludeAuthrole(j).decode('utf8')) | ||
| self._exclude_authrole = exclude_authrole | ||
| return self._exclude_authrole | ||
| @exclude_authrole.setter | ||
| def exclude_authrole(self, value): | ||
| assert value is None or type(value) == list | ||
| if value: | ||
| for x in value: | ||
| assert type(x) == str | ||
| self._exclude_authrole = value | ||
| # | ||
| # eligible, eligible_authid, eligible_authrole | ||
| # | ||
| @property | ||
| def eligible(self): | ||
| """ | ||
| List of WAMP session IDs eligible to receive this event. | ||
| :returns: list of eligible session IDs | ||
| :rtype: list[int] | ||
| """ | ||
| if self._eligible is None and self._from_fbs: | ||
| if self._from_fbs.EligibleLength(): | ||
| eligible = [] | ||
| for j in range(self._from_fbs.EligibleLength()): | ||
| eligible.append(self._from_fbs.Eligible(j)) | ||
| self._eligible = eligible | ||
| return self._eligible | ||
| @eligible.setter | ||
| def eligible(self, value): | ||
| assert value is None or type(value) == list | ||
| if value: | ||
| for x in value: | ||
| assert type(x) == int | ||
| self._eligible = value | ||
| @property | ||
| def eligible_authid(self): | ||
| """ | ||
| List of WAMP authids eligible to receive this event. | ||
| :returns: list of eligible authids | ||
| :rtype: list[str] | ||
| """ | ||
| if self._eligible_authid is None and self._from_fbs: | ||
| if self._from_fbs.EligibleAuthidLength(): | ||
| eligible_authid = [] | ||
| for j in range(self._from_fbs.EligibleAuthidLength()): | ||
| eligible_authid.append(self._from_fbs.EligibleAuthid(j).decode('utf8')) | ||
| self._eligible_authid = eligible_authid | ||
| return self._eligible_authid | ||
| @eligible_authid.setter | ||
| def eligible_authid(self, value): | ||
| assert value is None or type(value) == list | ||
| if value: | ||
| for x in value: | ||
| assert type(x) == str | ||
| self._eligible_authid = value | ||
| @property | ||
| def eligible_authrole(self): | ||
| """ | ||
| List of WAMP authroles eligible to receive this event. | ||
| :returns: list of eligible authroles | ||
| :rtype: list[str] | ||
| """ | ||
| if self._eligible_authrole is None and self._from_fbs: | ||
| if self._from_fbs.EligibleAuthroleLength(): | ||
| eligible_authrole = [] | ||
| for j in range(self._from_fbs.EligibleAuthroleLength()): | ||
| eligible_authrole.append(self._from_fbs.EligibleAuthrole(j).decode('utf8')) | ||
| self._eligible_authrole = eligible_authrole | ||
| return self._eligible_authrole | ||
| @eligible_authrole.setter | ||
| def eligible_authrole(self, value): | ||
| assert value is None or type(value) == list | ||
| if value: | ||
| for x in value: | ||
| assert type(x) == str | ||
| self._eligible_authrole = value | ||
| # | ||
| # encryption | ||
| # | ||
| @property | ||
| def enc_algo(self): | ||
| """ | ||
| When using payload transparency, the encoding algorithm that was used to encode the payload. | ||
| :returns: payload encryption algorithm | ||
| :rtype: int | ||
| """ | ||
| if self._enc_algo is None and self._from_fbs: | ||
| self._enc_algo = self._from_fbs.EncAlgo() | ||
| return self._enc_algo | ||
| @enc_algo.setter | ||
| def enc_algo(self, value): | ||
| assert value is None or value in [self.ENC_ALGO_CRYPTOBOX, self.ENC_ALGO_MQTT, self.ENC_ALGO_XBR] | ||
| self._enc_algo = value | ||
| @property | ||
| def enc_key(self): | ||
| """ | ||
| When using payload transparency with an encryption algorithm, the payload encryption key. | ||
| :returns: payload key | ||
| :rtype: None or bytes | ||
| """ | ||
| if self._enc_key is None and self._from_fbs: | ||
| if self._from_fbs.EncKeyLength(): | ||
| self._enc_key = self._from_fbs.EncKeyAsBytes() | ||
| return self._enc_key | ||
| @enc_key.setter | ||
| def enc_key(self, value): | ||
| assert value is None or type(value) == bytes | ||
| self._enc_key = value | ||
| @property | ||
| def enc_serializer(self): | ||
| """ | ||
| When using payload transparency, the payload object serializer that was used encoding the payload. | ||
| :returns: payload serializer | ||
| :rtype: int | ||
| """ | ||
| if self._enc_serializer is None and self._from_fbs: | ||
| self._enc_serializer = self._from_fbs.EncSerializer() | ||
| return self._enc_serializer | ||
| @enc_serializer.setter | ||
| def enc_serializer(self, value): | ||
| assert value is None or value in [ | ||
| self.ENC_SER_JSON, self.ENC_SER_MSGPACK, self.ENC_SER_CBOR, self.ENC_SER_UBJSON | ||
| ] | ||
| self._enc_serializer = value | ||
| @staticmethod | ||
| def cast(buf): | ||
| return Publication(_Publication.GetRootAsPublication(buf, 0)) | ||
| def build(self, builder): | ||
| args = self.args | ||
| if args: | ||
| args = builder.CreateString(cbor2.dumps(args)) | ||
| kwargs = self.kwargs | ||
| if kwargs: | ||
| kwargs = builder.CreateString(cbor2.dumps(kwargs)) | ||
| payload = self.payload | ||
| if payload: | ||
| payload = builder.CreateString(payload) | ||
| topic = self.topic | ||
| if topic: | ||
| topic = builder.CreateString(topic) | ||
| enc_key = self.enc_key | ||
| if enc_key: | ||
| enc_key = builder.CreateString(enc_key) | ||
| # exclude: [int] | ||
| exclude = self.exclude | ||
| if exclude: | ||
| PublicationGen.PublicationStartExcludeAuthidVector(builder, len(exclude)) | ||
| for session in reversed(exclude): | ||
| builder.PrependUint64(session) | ||
| exclude = builder.EndVector() | ||
| # exclude_authid: [string] | ||
| exclude_authid = self.exclude_authid | ||
| if exclude_authid: | ||
| _exclude_authid = [] | ||
| for authid in exclude_authid: | ||
| _exclude_authid.append(builder.CreateString(authid)) | ||
| PublicationGen.PublicationStartExcludeAuthidVector(builder, len(_exclude_authid)) | ||
| for o in reversed(_exclude_authid): | ||
| builder.PrependUOffsetTRelative(o) | ||
| exclude_authid = builder.EndVector() | ||
| # exclude_authrole: [string] | ||
| exclude_authrole = self.exclude_authrole | ||
| if exclude_authid: | ||
| _exclude_authrole = [] | ||
| for authrole in exclude_authrole: | ||
| _exclude_authrole.append(builder.CreateString(authrole)) | ||
| PublicationGen.PublicationStartExcludeAuthroleVector(builder, len(_exclude_authrole)) | ||
| for o in reversed(_exclude_authrole): | ||
| builder.PrependUOffsetTRelative(o) | ||
| exclude_authrole = builder.EndVector() | ||
| # eligible: [int] | ||
| eligible = self.eligible | ||
| if eligible: | ||
| PublicationGen.PublicationStartEligibleAuthidVector(builder, len(eligible)) | ||
| for session in reversed(eligible): | ||
| builder.PrependUint64(session) | ||
| eligible = builder.EndVector() | ||
| # eligible_authid: [string] | ||
| eligible_authid = self.eligible_authid | ||
| if eligible_authid: | ||
| _eligible_authid = [] | ||
| for authid in eligible_authid: | ||
| _eligible_authid.append(builder.CreateString(authid)) | ||
| PublicationGen.PublicationStartEligibleAuthidVector(builder, len(_eligible_authid)) | ||
| for o in reversed(_eligible_authid): | ||
| builder.PrependUOffsetTRelative(o) | ||
| eligible_authid = builder.EndVector() | ||
| # eligible_authrole: [string] | ||
| eligible_authrole = self.eligible_authrole | ||
| if eligible_authrole: | ||
| _eligible_authrole = [] | ||
| for authrole in eligible_authrole: | ||
| _eligible_authrole.append(builder.CreateString(authrole)) | ||
| PublicationGen.PublicationStartEligibleAuthroleVector(builder, len(_eligible_authrole)) | ||
| for o in reversed(_eligible_authrole): | ||
| builder.PrependUOffsetTRelative(o) | ||
| eligible_authrole = builder.EndVector() | ||
| # now start and build a new object .. | ||
| PublicationGen.PublicationStart(builder) | ||
| if self.timestamp: | ||
| PublicationGen.PublicationAddTimestamp(builder, self.timestamp) | ||
| if self.publication: | ||
| PublicationGen.PublicationAddPublication(builder, self.publication) | ||
| if self.publisher: | ||
| PublicationGen.PublicationAddPublisher(builder, self.publisher) | ||
| if topic: | ||
| PublicationGen.PublicationAddTopic(builder, topic) | ||
| if args: | ||
| PublicationGen.PublicationAddArgs(builder, args) | ||
| if kwargs: | ||
| PublicationGen.PublicationAddKwargs(builder, kwargs) | ||
| if payload is not None: | ||
| PublicationGen.PublicationAddPayload(builder, payload) | ||
| if self.acknowledge is not None: | ||
| PublicationGen.PublicationAddAcknowledge(builder, self.acknowledge) | ||
| if self.retain is not None: | ||
| PublicationGen.PublicationAddRetain(builder, self.retain) | ||
| if self.exclude_me is not None: | ||
| PublicationGen.PublicationAddExcludeMe(builder, self.exclude_me) | ||
| if exclude: | ||
| PublicationGen.PublicationAddExclude(builder, exclude) | ||
| if exclude_authid: | ||
| PublicationGen.PublicationAddExcludeAuthid(builder, exclude_authid) | ||
| if exclude_authrole: | ||
| PublicationGen.PublicationAddExcludeAuthrole(builder, exclude_authrole) | ||
| if eligible: | ||
| PublicationGen.PublicationAddEligible(builder, eligible) | ||
| if eligible_authid: | ||
| PublicationGen.PublicationAddEligibleAuthid(builder, eligible_authid) | ||
| if eligible_authrole: | ||
| PublicationGen.PublicationAddEligibleAuthrole(builder, eligible_authrole) | ||
| if self.enc_algo: | ||
| PublicationGen.PublicationAddEncAlgo(builder, self.enc_algo) | ||
| if enc_key: | ||
| PublicationGen.PublicationAddEncKey(builder, enc_key) | ||
| if self.enc_serializer: | ||
| PublicationGen.PublicationAddEncSerializer(builder, self.enc_serializer) | ||
| # finish the object. | ||
| final = PublicationGen.PublicationEnd(builder) | ||
| return final |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| from .event import Event | ||
| from .publication import Publication | ||
| from .session import Session | ||
| class Schema(object): | ||
| """ | ||
| user database schema for ZLMDB. | ||
| """ | ||
| def __init__(self, db): | ||
| self.db = db | ||
| events: Event | ||
| """ | ||
| CFC persisted events. | ||
| """ | ||
| publications: Publication | ||
| """ | ||
| CFC persistent publications. | ||
| """ | ||
| sessions: Session | ||
| """ | ||
| CFC persisted sessions. | ||
| """ | ||
| @staticmethod | ||
| def attach(db): | ||
| """ | ||
| Factory to create a schema from attaching to a database. The schema tables | ||
| will be automatically mapped as persistent maps and attached to the | ||
| database slots. | ||
| :param db: zlmdb.Database | ||
| :return: object of Schema | ||
| """ | ||
| schema = Schema(db) | ||
| schema.events = db.attach_table(Event) | ||
| schema.publications = db.attach_table(Publication) | ||
| schema.sessions = db.attach_table(Session) | ||
| return schema |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| import pprint | ||
| from cfxdb.gen.eventstore import Session as SessionGen | ||
| class Session(object): | ||
| """ | ||
| Persisted session database object. | ||
| """ | ||
| def __init__(self, from_fbs=None): | ||
| self._from_fbs = from_fbs | ||
| self._session = None | ||
| self._joined_at = None | ||
| self._left_at = None | ||
| self._realm = None | ||
| self._authid = None | ||
| self._authrole = None | ||
| def marshal(self): | ||
| obj = { | ||
| 'session': self.session, | ||
| 'joined_at': self.joined_at, | ||
| 'left_at': self.left_at, | ||
| 'realm': self.realm, | ||
| 'authid': self.authid, | ||
| 'authrole': self.authrole, | ||
| } | ||
| return obj | ||
| def __str__(self): | ||
| return '\n{}\n'.format(pprint.pformat(self.marshal())) | ||
| @property | ||
| def session(self): | ||
| """ | ||
| The WAMP session ID of the session. | ||
| :returns: session ID | ||
| :rtype: int | ||
| """ | ||
| if self._session is None and self._from_fbs: | ||
| self._session = self._from_fbs.Session() | ||
| return self._session | ||
| @session.setter | ||
| def session(self, value): | ||
| assert value is None or type(value) == int | ||
| self._session = value | ||
| @property | ||
| def joined_at(self): | ||
| """ | ||
| Timestamp when the session was joined by the router. Epoch time in ns. | ||
| :returns: Epoch time in ns when session joined | ||
| :rtype: int | ||
| """ | ||
| if self._joined_at is None and self._from_fbs: | ||
| self._joined_at = self._from_fbs.JoinedAt() | ||
| return self._joined_at | ||
| @joined_at.setter | ||
| def joined_at(self, value): | ||
| assert value is None or type(value) == int | ||
| self._joined_at = value | ||
| @property | ||
| def left_at(self): | ||
| """ | ||
| Timestamp when the session left the router. Epoch time in ns. | ||
| :returns: Epoch time in ns when session left - or 0 when session currently joined | ||
| :rtype: int | ||
| """ | ||
| if self._left_at is None and self._from_fbs: | ||
| self._left_at = self._from_fbs.LeftAt() | ||
| return self._left_at | ||
| @left_at.setter | ||
| def left_at(self, value): | ||
| assert value is None or type(value) == int | ||
| self._left_at = value | ||
| @property | ||
| def realm(self): | ||
| """ | ||
| The WAMP realm the session is/was joined on. | ||
| :returns: WAMP realm | ||
| :rtype: str | ||
| """ | ||
| if self._realm is None and self._from_fbs: | ||
| self._realm = self._from_fbs.Realm().decode('utf8') | ||
| return self._realm | ||
| @realm.setter | ||
| def realm(self, value): | ||
| assert value is None or type(value) == str | ||
| self._realm = value | ||
| @property | ||
| def authid(self): | ||
| """ | ||
| The WAMP authid the session was authenticated under. | ||
| :returns: WAMP authid | ||
| :rtype: str | ||
| """ | ||
| if self._authid is None and self._from_fbs: | ||
| self._authid = self._from_fbs.Authid().decode('utf8') | ||
| return self._authid | ||
| @authid.setter | ||
| def authid(self, value): | ||
| assert value is None or type(value) == str | ||
| self._authid = value | ||
| @property | ||
| def authrole(self): | ||
| """ | ||
| The WAMP authrole the session was authenticated under. | ||
| :returns: WAMP authrole | ||
| :rtype: str | ||
| """ | ||
| if self._authrole is None and self._from_fbs: | ||
| self._authrole = self._from_fbs.Authrole().decode('utf8') | ||
| return self._authrole | ||
| @authrole.setter | ||
| def authrole(self, value): | ||
| assert value is None or type(value) == str | ||
| self._authrole = value | ||
| @staticmethod | ||
| def cast(buf): | ||
| return Session(SessionGen.Session.GetRootAsSession(buf, 0)) | ||
| def build(self, builder): | ||
| realm = self.realm | ||
| if realm: | ||
| realm = builder.CreateString(realm) | ||
| authid = self.authid | ||
| if authid: | ||
| authid = builder.CreateString(authid) | ||
| authrole = self.authrole | ||
| if authrole: | ||
| authrole = builder.CreateString(authrole) | ||
| # now start and build a new object .. | ||
| SessionGen.SessionStart(builder) | ||
| SessionGen.SessionAddSession(builder, self.session) | ||
| SessionGen.SessionAddJoinedAt(builder, self.joined_at) | ||
| if self.left_at: | ||
| SessionGen.SessionAddLeftAt(builder, self.left_at) | ||
| if realm: | ||
| SessionGen.SessionAddRealm(builder, realm) | ||
| if authid: | ||
| SessionGen.SessionAddAuthid(builder, authid) | ||
| if authrole: | ||
| SessionGen.SessionAddAuthrole(builder, authrole) | ||
| # finish the object. | ||
| final = SessionGen.SessionEnd(builder) | ||
| return final |
| # automatically generated by the FlatBuffers compiler, do not modify | ||
| # namespace: eventstore | ||
| # The payload end-to-end encryption algorithm the payload is encrypted. | ||
| class EncAlgo(object): | ||
| # Unset | ||
| NONE = 0 | ||
| # WAMP-cryptobox end-to-end encrypted application payload | ||
| CRYPTOBOX = 1 | ||
| # MQTT passthrough-mode application payload | ||
| MQTT = 2 | ||
| # XBR end-to-end encrypted application payload | ||
| XBR = 3 | ||
| OPAQUE = 4 | ||
| # automatically generated by the FlatBuffers compiler, do not modify | ||
| # namespace: eventstore | ||
| # The serializer the app payload was serialized with before encryption. | ||
| class EncSerializer(object): | ||
| # Unset | ||
| NONE = 0 | ||
| # Payload was serialized in JSON | ||
| JSON = 1 | ||
| # Payload was serialized in MsgPack | ||
| MSGPACK = 2 | ||
| # Payload was serialized in CBOR | ||
| CBOR = 3 | ||
| # Payload was serialized in UBJSON | ||
| UBJSON = 4 | ||
| OPAQUE = 5 | ||
| FLATBUFFERS = 6 | ||
| # automatically generated by the FlatBuffers compiler, do not modify | ||
| # namespace: eventstore | ||
| import flatbuffers | ||
| from flatbuffers.compat import import_numpy | ||
| np = import_numpy() | ||
| # This table store WAMP events dispatched to receivers, under WAMP subscriptions on URIs (or patterns). | ||
| class Event(object): | ||
| __slots__ = ['_tab'] | ||
| @classmethod | ||
| def GetRootAs(cls, buf, offset=0): | ||
| n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) | ||
| x = Event() | ||
| x.Init(buf, n + offset) | ||
| return x | ||
| @classmethod | ||
| def GetRootAsEvent(cls, buf, offset=0): | ||
| """This method is deprecated. Please switch to GetRootAs.""" | ||
| return cls.GetRootAs(buf, offset) | ||
| # Event | ||
| def Init(self, buf, pos): | ||
| self._tab = flatbuffers.table.Table(buf, pos) | ||
| # Timestamp when the event was sent to the receiver. Epoch time in ns. | ||
| # Event | ||
| def Timestamp(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # The subscription ID this event is dispatched under. | ||
| # Event | ||
| def Subscription(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # The publication ID of the dispatched event. | ||
| # Event | ||
| def Publication(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # The WAMP session ID of the receiver. | ||
| # Event | ||
| def Receiver(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # Whether the message was retained by the broker on the topic, rather than just published. | ||
| # Event | ||
| def Retained(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) | ||
| if o != 0: | ||
| return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) | ||
| return False | ||
| # Whether this Event was to be acknowledged by the receiver. | ||
| # Event | ||
| def AcknowledgedDelivery(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) | ||
| if o != 0: | ||
| return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) | ||
| return False | ||
| def Start(builder): builder.StartObject(6) | ||
| def EventStart(builder): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return Start(builder) | ||
| def AddTimestamp(builder, timestamp): builder.PrependUint64Slot(0, timestamp, 0) | ||
| def EventAddTimestamp(builder, timestamp): | ||
| """This method is deprecated. Please switch to AddTimestamp.""" | ||
| return AddTimestamp(builder, timestamp) | ||
| def AddSubscription(builder, subscription): builder.PrependUint64Slot(1, subscription, 0) | ||
| def EventAddSubscription(builder, subscription): | ||
| """This method is deprecated. Please switch to AddSubscription.""" | ||
| return AddSubscription(builder, subscription) | ||
| def AddPublication(builder, publication): builder.PrependUint64Slot(2, publication, 0) | ||
| def EventAddPublication(builder, publication): | ||
| """This method is deprecated. Please switch to AddPublication.""" | ||
| return AddPublication(builder, publication) | ||
| def AddReceiver(builder, receiver): builder.PrependUint64Slot(3, receiver, 0) | ||
| def EventAddReceiver(builder, receiver): | ||
| """This method is deprecated. Please switch to AddReceiver.""" | ||
| return AddReceiver(builder, receiver) | ||
| def AddRetained(builder, retained): builder.PrependBoolSlot(4, retained, 0) | ||
| def EventAddRetained(builder, retained): | ||
| """This method is deprecated. Please switch to AddRetained.""" | ||
| return AddRetained(builder, retained) | ||
| def AddAcknowledgedDelivery(builder, acknowledgedDelivery): builder.PrependBoolSlot(5, acknowledgedDelivery, 0) | ||
| def EventAddAcknowledgedDelivery(builder, acknowledgedDelivery): | ||
| """This method is deprecated. Please switch to AddAcknowledgedDelivery.""" | ||
| return AddAcknowledgedDelivery(builder, acknowledgedDelivery) | ||
| def End(builder): return builder.EndObject() | ||
| def EventEnd(builder): | ||
| """This method is deprecated. Please switch to End.""" | ||
| return End(builder) |
| # automatically generated by the FlatBuffers compiler, do not modify | ||
| # namespace: eventstore | ||
| import flatbuffers | ||
| from flatbuffers.compat import import_numpy | ||
| np = import_numpy() | ||
| # This table stores WAMP publications with configurable amount of details, optionally including application payload. | ||
| class Publication(object): | ||
| __slots__ = ['_tab'] | ||
| @classmethod | ||
| def GetRootAs(cls, buf, offset=0): | ||
| n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) | ||
| x = Publication() | ||
| x.Init(buf, n + offset) | ||
| return x | ||
| @classmethod | ||
| def GetRootAsPublication(cls, buf, offset=0): | ||
| """This method is deprecated. Please switch to GetRootAs.""" | ||
| return cls.GetRootAs(buf, offset) | ||
| # Publication | ||
| def Init(self, buf, pos): | ||
| self._tab = flatbuffers.table.Table(buf, pos) | ||
| # Timestamp when the publication was accepted by the broker. Epoch time in ns. | ||
| # Publication | ||
| def Timestamp(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # WAMP publication ID that was assigned by the broker. | ||
| # Publication | ||
| def Publication(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # WAMP session ID of the publisher. | ||
| # Publication | ||
| def Publisher(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # The WAMP or application URI of the PubSub topic the event was published to. | ||
| # Publication | ||
| def Topic(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # Positional values for application-defined event payload. | ||
| # Publication | ||
| def Args(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) | ||
| return 0 | ||
| # Publication | ||
| def ArgsAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) | ||
| return 0 | ||
| # Publication | ||
| def ArgsLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def ArgsIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) | ||
| return o == 0 | ||
| # Keyword values for application-defined event payload. | ||
| # Publication | ||
| def Kwargs(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) | ||
| return 0 | ||
| # Publication | ||
| def KwargsAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) | ||
| return 0 | ||
| # Publication | ||
| def KwargsLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def KwargsIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) | ||
| return o == 0 | ||
| # Alternative, transparent payload. If given, ``args`` and ``kwargs`` must be left unset. | ||
| # Publication | ||
| def Payload(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) | ||
| return 0 | ||
| # Publication | ||
| def PayloadAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) | ||
| return 0 | ||
| # Publication | ||
| def PayloadLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def PayloadIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) | ||
| return o == 0 | ||
| # If ``True``, the broker was asked to acknowledge the publication with a success or error response. | ||
| # Publication | ||
| def Acknowledge(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) | ||
| if o != 0: | ||
| return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) | ||
| return False | ||
| # If ``True``, the broker was requested to retain this event. | ||
| # Publication | ||
| def Retain(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) | ||
| if o != 0: | ||
| return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) | ||
| return False | ||
| # If ``True``, the broker was asked to exclude the publisher from receiving the event. | ||
| # Publication | ||
| def ExcludeMe(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) | ||
| if o != 0: | ||
| return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) | ||
| return False | ||
| # List of WAMP session IDs to exclude from receiving this event. | ||
| # Publication | ||
| def Exclude(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) | ||
| return 0 | ||
| # Publication | ||
| def ExcludeAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) | ||
| return 0 | ||
| # Publication | ||
| def ExcludeLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def ExcludeIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) | ||
| return o == 0 | ||
| # List of WAMP authids to exclude from receiving this event. | ||
| # Publication | ||
| def ExcludeAuthid(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(26)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) | ||
| return "" | ||
| # Publication | ||
| def ExcludeAuthidLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(26)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def ExcludeAuthidIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(26)) | ||
| return o == 0 | ||
| # List of WAMP authroles to exclude from receiving this event. | ||
| # Publication | ||
| def ExcludeAuthrole(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(28)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) | ||
| return "" | ||
| # Publication | ||
| def ExcludeAuthroleLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(28)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def ExcludeAuthroleIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(28)) | ||
| return o == 0 | ||
| # List of WAMP session IDs eligible to receive this event. | ||
| # Publication | ||
| def Eligible(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(30)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) | ||
| return 0 | ||
| # Publication | ||
| def EligibleAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(30)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o) | ||
| return 0 | ||
| # Publication | ||
| def EligibleLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(30)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def EligibleIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(30)) | ||
| return o == 0 | ||
| # List of WAMP authids eligible to receive this event. | ||
| # Publication | ||
| def EligibleAuthid(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(32)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) | ||
| return "" | ||
| # Publication | ||
| def EligibleAuthidLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(32)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def EligibleAuthidIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(32)) | ||
| return o == 0 | ||
| # List of WAMP authroles eligible to receive this event. | ||
| # Publication | ||
| def EligibleAuthrole(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(34)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) | ||
| return "" | ||
| # Publication | ||
| def EligibleAuthroleLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(34)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def EligibleAuthroleIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(34)) | ||
| return o == 0 | ||
| # When using payload transparency, the encoding algorithm that was used to encode the payload. | ||
| # Publication | ||
| def EncAlgo(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(36)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # When using payload transparency with an encryption algorithm, the payload encryption key. | ||
| # Publication | ||
| def EncKey(self, j): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(38)) | ||
| if o != 0: | ||
| a = self._tab.Vector(o) | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) | ||
| return 0 | ||
| # Publication | ||
| def EncKeyAsNumpy(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(38)) | ||
| if o != 0: | ||
| return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) | ||
| return 0 | ||
| # Publication | ||
| def EncKeyLength(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(38)) | ||
| if o != 0: | ||
| return self._tab.VectorLen(o) | ||
| return 0 | ||
| # Publication | ||
| def EncKeyIsNone(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(38)) | ||
| return o == 0 | ||
| # When using payload transparency, the payload object serializer that was used encoding the payload. | ||
| # Publication | ||
| def EncSerializer(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(40)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) | ||
| return 0 | ||
| def Start(builder): builder.StartObject(19) | ||
| def PublicationStart(builder): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return Start(builder) | ||
| def AddTimestamp(builder, timestamp): builder.PrependUint64Slot(0, timestamp, 0) | ||
| def PublicationAddTimestamp(builder, timestamp): | ||
| """This method is deprecated. Please switch to AddTimestamp.""" | ||
| return AddTimestamp(builder, timestamp) | ||
| def AddPublication(builder, publication): builder.PrependUint64Slot(1, publication, 0) | ||
| def PublicationAddPublication(builder, publication): | ||
| """This method is deprecated. Please switch to AddPublication.""" | ||
| return AddPublication(builder, publication) | ||
| def AddPublisher(builder, publisher): builder.PrependUint64Slot(2, publisher, 0) | ||
| def PublicationAddPublisher(builder, publisher): | ||
| """This method is deprecated. Please switch to AddPublisher.""" | ||
| return AddPublisher(builder, publisher) | ||
| def AddTopic(builder, topic): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(topic), 0) | ||
| def PublicationAddTopic(builder, topic): | ||
| """This method is deprecated. Please switch to AddTopic.""" | ||
| return AddTopic(builder, topic) | ||
| def AddArgs(builder, args): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(args), 0) | ||
| def PublicationAddArgs(builder, args): | ||
| """This method is deprecated. Please switch to AddArgs.""" | ||
| return AddArgs(builder, args) | ||
| def StartArgsVector(builder, numElems): return builder.StartVector(1, numElems, 1) | ||
| def PublicationStartArgsVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartArgsVector(builder, numElems) | ||
| def AddKwargs(builder, kwargs): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(kwargs), 0) | ||
| def PublicationAddKwargs(builder, kwargs): | ||
| """This method is deprecated. Please switch to AddKwargs.""" | ||
| return AddKwargs(builder, kwargs) | ||
| def StartKwargsVector(builder, numElems): return builder.StartVector(1, numElems, 1) | ||
| def PublicationStartKwargsVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartKwargsVector(builder, numElems) | ||
| def AddPayload(builder, payload): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(payload), 0) | ||
| def PublicationAddPayload(builder, payload): | ||
| """This method is deprecated. Please switch to AddPayload.""" | ||
| return AddPayload(builder, payload) | ||
| def StartPayloadVector(builder, numElems): return builder.StartVector(1, numElems, 1) | ||
| def PublicationStartPayloadVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartPayloadVector(builder, numElems) | ||
| def AddAcknowledge(builder, acknowledge): builder.PrependBoolSlot(7, acknowledge, 0) | ||
| def PublicationAddAcknowledge(builder, acknowledge): | ||
| """This method is deprecated. Please switch to AddAcknowledge.""" | ||
| return AddAcknowledge(builder, acknowledge) | ||
| def AddRetain(builder, retain): builder.PrependBoolSlot(8, retain, 0) | ||
| def PublicationAddRetain(builder, retain): | ||
| """This method is deprecated. Please switch to AddRetain.""" | ||
| return AddRetain(builder, retain) | ||
| def AddExcludeMe(builder, excludeMe): builder.PrependBoolSlot(9, excludeMe, 0) | ||
| def PublicationAddExcludeMe(builder, excludeMe): | ||
| """This method is deprecated. Please switch to AddExcludeMe.""" | ||
| return AddExcludeMe(builder, excludeMe) | ||
| def AddExclude(builder, exclude): builder.PrependUOffsetTRelativeSlot(10, flatbuffers.number_types.UOffsetTFlags.py_type(exclude), 0) | ||
| def PublicationAddExclude(builder, exclude): | ||
| """This method is deprecated. Please switch to AddExclude.""" | ||
| return AddExclude(builder, exclude) | ||
| def StartExcludeVector(builder, numElems): return builder.StartVector(8, numElems, 8) | ||
| def PublicationStartExcludeVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartExcludeVector(builder, numElems) | ||
| def AddExcludeAuthid(builder, excludeAuthid): builder.PrependUOffsetTRelativeSlot(11, flatbuffers.number_types.UOffsetTFlags.py_type(excludeAuthid), 0) | ||
| def PublicationAddExcludeAuthid(builder, excludeAuthid): | ||
| """This method is deprecated. Please switch to AddExcludeAuthid.""" | ||
| return AddExcludeAuthid(builder, excludeAuthid) | ||
| def StartExcludeAuthidVector(builder, numElems): return builder.StartVector(4, numElems, 4) | ||
| def PublicationStartExcludeAuthidVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartExcludeAuthidVector(builder, numElems) | ||
| def AddExcludeAuthrole(builder, excludeAuthrole): builder.PrependUOffsetTRelativeSlot(12, flatbuffers.number_types.UOffsetTFlags.py_type(excludeAuthrole), 0) | ||
| def PublicationAddExcludeAuthrole(builder, excludeAuthrole): | ||
| """This method is deprecated. Please switch to AddExcludeAuthrole.""" | ||
| return AddExcludeAuthrole(builder, excludeAuthrole) | ||
| def StartExcludeAuthroleVector(builder, numElems): return builder.StartVector(4, numElems, 4) | ||
| def PublicationStartExcludeAuthroleVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartExcludeAuthroleVector(builder, numElems) | ||
| def AddEligible(builder, eligible): builder.PrependUOffsetTRelativeSlot(13, flatbuffers.number_types.UOffsetTFlags.py_type(eligible), 0) | ||
| def PublicationAddEligible(builder, eligible): | ||
| """This method is deprecated. Please switch to AddEligible.""" | ||
| return AddEligible(builder, eligible) | ||
| def StartEligibleVector(builder, numElems): return builder.StartVector(8, numElems, 8) | ||
| def PublicationStartEligibleVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartEligibleVector(builder, numElems) | ||
| def AddEligibleAuthid(builder, eligibleAuthid): builder.PrependUOffsetTRelativeSlot(14, flatbuffers.number_types.UOffsetTFlags.py_type(eligibleAuthid), 0) | ||
| def PublicationAddEligibleAuthid(builder, eligibleAuthid): | ||
| """This method is deprecated. Please switch to AddEligibleAuthid.""" | ||
| return AddEligibleAuthid(builder, eligibleAuthid) | ||
| def StartEligibleAuthidVector(builder, numElems): return builder.StartVector(4, numElems, 4) | ||
| def PublicationStartEligibleAuthidVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartEligibleAuthidVector(builder, numElems) | ||
| def AddEligibleAuthrole(builder, eligibleAuthrole): builder.PrependUOffsetTRelativeSlot(15, flatbuffers.number_types.UOffsetTFlags.py_type(eligibleAuthrole), 0) | ||
| def PublicationAddEligibleAuthrole(builder, eligibleAuthrole): | ||
| """This method is deprecated. Please switch to AddEligibleAuthrole.""" | ||
| return AddEligibleAuthrole(builder, eligibleAuthrole) | ||
| def StartEligibleAuthroleVector(builder, numElems): return builder.StartVector(4, numElems, 4) | ||
| def PublicationStartEligibleAuthroleVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartEligibleAuthroleVector(builder, numElems) | ||
| def AddEncAlgo(builder, encAlgo): builder.PrependUint8Slot(16, encAlgo, 0) | ||
| def PublicationAddEncAlgo(builder, encAlgo): | ||
| """This method is deprecated. Please switch to AddEncAlgo.""" | ||
| return AddEncAlgo(builder, encAlgo) | ||
| def AddEncKey(builder, encKey): builder.PrependUOffsetTRelativeSlot(17, flatbuffers.number_types.UOffsetTFlags.py_type(encKey), 0) | ||
| def PublicationAddEncKey(builder, encKey): | ||
| """This method is deprecated. Please switch to AddEncKey.""" | ||
| return AddEncKey(builder, encKey) | ||
| def StartEncKeyVector(builder, numElems): return builder.StartVector(1, numElems, 1) | ||
| def PublicationStartEncKeyVector(builder, numElems): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return StartEncKeyVector(builder, numElems) | ||
| def AddEncSerializer(builder, encSerializer): builder.PrependUint8Slot(18, encSerializer, 0) | ||
| def PublicationAddEncSerializer(builder, encSerializer): | ||
| """This method is deprecated. Please switch to AddEncSerializer.""" | ||
| return AddEncSerializer(builder, encSerializer) | ||
| def End(builder): return builder.EndObject() | ||
| def PublicationEnd(builder): | ||
| """This method is deprecated. Please switch to End.""" | ||
| return End(builder) |
| # automatically generated by the FlatBuffers compiler, do not modify | ||
| # namespace: eventstore | ||
| import flatbuffers | ||
| from flatbuffers.compat import import_numpy | ||
| np = import_numpy() | ||
| # This table stores WAMP sessions and serves as an anchor for all usage related data. | ||
| class Session(object): | ||
| __slots__ = ['_tab'] | ||
| @classmethod | ||
| def GetRootAs(cls, buf, offset=0): | ||
| n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) | ||
| x = Session() | ||
| x.Init(buf, n + offset) | ||
| return x | ||
| @classmethod | ||
| def GetRootAsSession(cls, buf, offset=0): | ||
| """This method is deprecated. Please switch to GetRootAs.""" | ||
| return cls.GetRootAs(buf, offset) | ||
| # Session | ||
| def Init(self, buf, pos): | ||
| self._tab = flatbuffers.table.Table(buf, pos) | ||
| # The WAMP session_id of the session. | ||
| # Session | ||
| def Session(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # Timestamp when the session was joined by the router. Epoch time in ns. | ||
| # Session | ||
| def JoinedAt(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # Timestamp when the session left the router. Epoch time in ns. | ||
| # Session | ||
| def LeftAt(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) | ||
| if o != 0: | ||
| return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) | ||
| return 0 | ||
| # The WAMP realm the session is/was joined on. | ||
| # Session | ||
| def Realm(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # The WAMP authid the session was authenticated under. | ||
| # Session | ||
| def Authid(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| # The WAMP authrole the session was authenticated under. | ||
| # Session | ||
| def Authrole(self): | ||
| o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) | ||
| if o != 0: | ||
| return self._tab.String(o + self._tab.Pos) | ||
| return None | ||
| def Start(builder): builder.StartObject(6) | ||
| def SessionStart(builder): | ||
| """This method is deprecated. Please switch to Start.""" | ||
| return Start(builder) | ||
| def AddSession(builder, session): builder.PrependUint64Slot(0, session, 0) | ||
| def SessionAddSession(builder, session): | ||
| """This method is deprecated. Please switch to AddSession.""" | ||
| return AddSession(builder, session) | ||
| def AddJoinedAt(builder, joinedAt): builder.PrependUint64Slot(1, joinedAt, 0) | ||
| def SessionAddJoinedAt(builder, joinedAt): | ||
| """This method is deprecated. Please switch to AddJoinedAt.""" | ||
| return AddJoinedAt(builder, joinedAt) | ||
| def AddLeftAt(builder, leftAt): builder.PrependUint64Slot(2, leftAt, 0) | ||
| def SessionAddLeftAt(builder, leftAt): | ||
| """This method is deprecated. Please switch to AddLeftAt.""" | ||
| return AddLeftAt(builder, leftAt) | ||
| def AddRealm(builder, realm): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(realm), 0) | ||
| def SessionAddRealm(builder, realm): | ||
| """This method is deprecated. Please switch to AddRealm.""" | ||
| return AddRealm(builder, realm) | ||
| def AddAuthid(builder, authid): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(authid), 0) | ||
| def SessionAddAuthid(builder, authid): | ||
| """This method is deprecated. Please switch to AddAuthid.""" | ||
| return AddAuthid(builder, authid) | ||
| def AddAuthrole(builder, authrole): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(authrole), 0) | ||
| def SessionAddAuthrole(builder, authrole): | ||
| """This method is deprecated. Please switch to AddAuthrole.""" | ||
| return AddAuthrole(builder, authrole) | ||
| def End(builder): return builder.EndObject() | ||
| def SessionEnd(builder): | ||
| """This method is deprecated. Please switch to End.""" | ||
| return End(builder) |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| from zlmdb import table, MapOidFlatBuffers, MapOidTimestampFlatBuffers | ||
| import cfxdb.eventstore.session | ||
| __all__ = ( | ||
| 'Sessions', | ||
| 'Publications', | ||
| 'Events', | ||
| 'Schema', | ||
| ) | ||
| @table('a674f707-69b8-4c41-b2a1-df17cec9b095', | ||
| build=cfxdb.eventstore.session.Session.build, | ||
| cast=cfxdb.eventstore.session.Session.cast) | ||
| class Sessions(MapOidFlatBuffers): | ||
| """ | ||
| Persisted sessions archive. | ||
| Map :class:`zlmdb.MapOidFlatBuffers` from ``session`` to :class:`cfxdb.eventstore.Session` | ||
| """ | ||
| @table('dd04931a-753b-4fde-8140-d66b93519c73', | ||
| build=cfxdb.eventstore.publication.Publication.build, | ||
| cast=cfxdb.eventstore.publication.Publication.cast) | ||
| class Publications(MapOidFlatBuffers): | ||
| """ | ||
| Persisted publications archive. | ||
| Map :class:`zlmdb.MapOidFlatBuffers` from ``publication`` to :class:`cfxdb.eventstore.Publication`. | ||
| """ | ||
| @table('40a9df31-6065-496f-809f-027a1879654c', | ||
| build=cfxdb.eventstore.event.Event.build, | ||
| cast=cfxdb.eventstore.event.Event.cast) | ||
| class Events(MapOidTimestampFlatBuffers): | ||
| """ | ||
| Persisted events archive. | ||
| Map :class:`zlmdb.MapOid3FlatBuffers` from ``(subscription, time_ns)`` to :class:`cfxdb.eventstore.Event`. | ||
| """ | ||
| class Schema(object): | ||
| """ | ||
| CFC edge database schema for ZLMDB. | ||
| """ | ||
| def __init__(self, db): | ||
| self.db = db | ||
| # sessions: Sessions | ||
| sessions = None | ||
| """ | ||
| Sessions archive. | ||
| """ | ||
| # publications: Publications | ||
| publications = None | ||
| """ | ||
| Publications archive. | ||
| """ | ||
| # events: Events | ||
| events = None | ||
| """ | ||
| Events archive. | ||
| """ | ||
| @staticmethod | ||
| def attach(db): | ||
| """ | ||
| Factory to create a schema from attaching to a database. The schema tables | ||
| will be automatically mapped as persistant maps and attached to the | ||
| database slots. | ||
| :param db: zlmdb.Database | ||
| :return: object of Schema | ||
| """ | ||
| schema = Schema(db) | ||
| schema.sessions = db.attach_table(Sessions) | ||
| schema.publications = db.attach_table(Publications) | ||
| schema.events = db.attach_table(Events) | ||
| return schema |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| import pytest | ||
| import random | ||
| import timeit | ||
| import flatbuffers | ||
| from txaio import with_twisted # noqa | ||
| from txaio import time_ns | ||
| from autobahn import util | ||
| import zlmdb | ||
| from cfxdb.eventstore import Event | ||
| zlmdb.TABLES_BY_UUID = {} | ||
| def fill_event(event): | ||
| event.timestamp = time_ns() | ||
| event.subscription = util.id() | ||
| event.publication = util.id() | ||
| event.receiver = util.id() | ||
| event.retained = random.choice([True, False]) | ||
| event.acknowledged_delivery = random.choice([True, False]) | ||
| @pytest.fixture(scope='function') | ||
| def builder(): | ||
| _builder = flatbuffers.Builder(0) | ||
| return _builder | ||
| @pytest.fixture(scope='function') | ||
| def event(): | ||
| _event = Event() | ||
| fill_event(_event) | ||
| return _event | ||
| def test_event_roundtrip(event, builder): | ||
| obj = event.build(builder) | ||
| builder.Finish(obj) | ||
| data = builder.Output() | ||
| assert len(data) in [64, 56] | ||
| _event = Event.cast(data) | ||
| assert _event.timestamp == event.timestamp | ||
| assert _event.subscription == event.subscription | ||
| assert _event.publication == event.publication | ||
| assert _event.receiver == event.receiver | ||
| assert _event.retained == event.retained | ||
| assert _event.acknowledged_delivery == event.acknowledged_delivery | ||
| def test_event_roundtrip_perf(event, builder): | ||
| obj = event.build(builder) | ||
| builder.Finish(obj) | ||
| data = builder.Output() | ||
| scratch = {'timestamp': 0} | ||
| def loop(): | ||
| _event = Event.cast(data) | ||
| if True: | ||
| assert _event.timestamp == event.timestamp | ||
| assert _event.subscription == event.subscription | ||
| assert _event.publication == event.publication | ||
| assert _event.receiver == event.receiver | ||
| assert _event.retained == event.retained | ||
| assert _event.acknowledged_delivery == event.acknowledged_delivery | ||
| scratch['timestamp'] += event.timestamp | ||
| N = 5 | ||
| M = 100000 | ||
| samples = [] | ||
| print('measuring:') | ||
| for i in range(N): | ||
| secs = timeit.timeit(loop, number=M) | ||
| ops = round(float(M) / secs, 1) | ||
| samples.append(ops) | ||
| print('{} objects/sec performance'.format(ops)) | ||
| samples = sorted(samples) | ||
| ops50 = samples[int(len(samples) / 2)] | ||
| print('RESULT: {} objects/sec median performance'.format(ops50)) | ||
| assert ops50 > 1000 | ||
| assert scratch['timestamp'] > 0 |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| import pytest | ||
| import os | ||
| import random | ||
| import uuid | ||
| import timeit | ||
| import platform | ||
| import flatbuffers | ||
| from txaio import with_twisted # noqa | ||
| from txaio import time_ns | ||
| from autobahn import util | ||
| import zlmdb | ||
| from cfxdb.eventstore import Publication | ||
| zlmdb.TABLES_BY_UUID = {} | ||
| @pytest.fixture(scope='function') | ||
| def builder(): | ||
| _builder = flatbuffers.Builder(0) | ||
| return _builder | ||
| def fill_publication(publication): | ||
| publication.timestamp = time_ns() | ||
| publication.publication = util.id() | ||
| publication.publisher = util.id() | ||
| publication.topic = 'com.example.foobar.{}.doit'.format(uuid.uuid4()) | ||
| publication.args = [23, 'hello', {'foo': 0.5}] | ||
| publication.kwargs = {'bar': 23, 'baz': [1, 2, 3]} | ||
| publication.payload = os.urandom(32) | ||
| publication.acknowledge = random.choice([True, False]) | ||
| publication.retain = random.choice([True, False]) | ||
| publication.exclude_me = random.choice([True, False]) | ||
| i0 = util.id() | ||
| publication.exclude = [i0 + j + 1000 for j in range(5)] | ||
| publication.exclude_authid = ['user1', 'user2', 'user3'] | ||
| publication.exclude_authrole = ['roleA', 'roleB', 'roleC'] | ||
| i0 = util.id() | ||
| publication.eligible = [i0 + j + 1000 for j in range(5)] | ||
| publication.eligible_authid = ['user4', 'user5', 'user6'] | ||
| publication.eligible_authrole = ['roleD', 'roleE', 'roleF'] | ||
| publication.enc_algo = Publication.ENC_ALGO_XBR | ||
| publication.enc_key = os.urandom(32) | ||
| publication.enc_serializer = Publication.ENC_SER_CBOR | ||
| @pytest.fixture(scope='function') | ||
| def publication(): | ||
| _publication = Publication() | ||
| fill_publication(_publication) | ||
| return _publication | ||
| def test_publication_roundtrip(publication, builder): | ||
| obj = publication.build(builder) | ||
| builder.Finish(obj) | ||
| data = builder.Output() | ||
| assert len(data) in [624, 632, 640] | ||
| _publication = Publication.cast(data) | ||
| assert _publication.timestamp == publication.timestamp | ||
| assert _publication.publication == publication.publication | ||
| assert _publication.publisher == publication.publisher | ||
| assert _publication.topic == publication.topic | ||
| assert _publication.args == publication.args | ||
| assert _publication.kwargs == publication.kwargs | ||
| assert _publication.payload == publication.payload | ||
| assert _publication.acknowledge == publication.acknowledge | ||
| assert _publication.retain == publication.retain | ||
| assert _publication.exclude_me == publication.exclude_me | ||
| assert _publication.exclude == publication.exclude | ||
| assert _publication.exclude_authid == publication.exclude_authid | ||
| assert _publication.exclude_authrole == publication.exclude_authrole | ||
| assert _publication.eligible == publication.eligible | ||
| assert _publication.eligible_authid == publication.eligible_authid | ||
| assert _publication.eligible_authrole == publication.eligible_authrole | ||
| assert _publication.enc_algo == publication.enc_algo | ||
| assert _publication.enc_key == publication.enc_key | ||
| assert _publication.enc_serializer == publication.enc_serializer | ||
| def test_publication_roundtrip_perf(publication, builder): | ||
| obj = publication.build(builder) | ||
| builder.Finish(obj) | ||
| data = builder.Output() | ||
| scratch = {'timestamp': 0} | ||
| def loop(): | ||
| _publication = Publication.cast(data) | ||
| if True: | ||
| assert _publication.timestamp == publication.timestamp | ||
| assert _publication.publication == publication.publication | ||
| assert _publication.publisher == publication.publisher | ||
| assert _publication.topic == publication.topic | ||
| assert _publication.args == publication.args | ||
| assert _publication.kwargs == publication.kwargs | ||
| assert _publication.payload == publication.payload | ||
| assert _publication.acknowledge == publication.acknowledge | ||
| assert _publication.retain == publication.retain | ||
| assert _publication.exclude_me == publication.exclude_me | ||
| assert _publication.exclude == publication.exclude | ||
| assert _publication.exclude_authid == publication.exclude_authid | ||
| assert _publication.exclude_authrole == publication.exclude_authrole | ||
| assert _publication.eligible == publication.eligible | ||
| assert _publication.eligible_authid == publication.eligible_authid | ||
| assert _publication.eligible_authrole == publication.eligible_authrole | ||
| assert _publication.enc_algo == publication.enc_algo | ||
| assert _publication.enc_key == publication.enc_key | ||
| assert _publication.enc_serializer == publication.enc_serializer | ||
| scratch['timestamp'] += publication.timestamp | ||
| N = 5 | ||
| if platform.python_implementation() == 'PyPy': | ||
| M = 100000 | ||
| else: | ||
| M = 10000 | ||
| samples = [] | ||
| print('measuring with N={}, M={}:'.format(N, M)) | ||
| for i in range(N): | ||
| secs = timeit.timeit(loop, number=M) | ||
| ops = round(float(M) / secs, 1) | ||
| samples.append(ops) | ||
| print('{} objects/sec performance'.format(ops)) | ||
| samples = sorted(samples) | ||
| ops50 = samples[int(len(samples) / 2)] | ||
| print('RESULT: {} objects/sec median performance ({} objects total)'.format(ops50, N * M)) | ||
| assert ops50 > 1000 | ||
| assert scratch['timestamp'] > 0 |
| ############################################################################## | ||
| # | ||
| # Crossbar.io Database | ||
| # Copyright (c) Crossbar.io Technologies GmbH. Licensed under MIT. | ||
| # | ||
| ############################################################################## | ||
| import pytest | ||
| import random | ||
| import uuid | ||
| import timeit | ||
| import flatbuffers | ||
| from txaio import with_twisted # noqa | ||
| from txaio import time_ns | ||
| from autobahn import util | ||
| import zlmdb | ||
| from cfxdb.eventstore import Session | ||
| zlmdb.TABLES_BY_UUID = {} | ||
| def fill_session(session): | ||
| session.session = util.id() | ||
| session.joined_at = time_ns() - 723 * 10**9 | ||
| session.left_at = time_ns() | ||
| session.realm = 'realm-{}'.format(uuid.uuid4()) | ||
| session.authid = util.generate_serial_number() | ||
| session.authrole = random.choice(['admin', 'user*', 'guest', 'anon*']) | ||
| @pytest.fixture(scope='function') | ||
| def builder(): | ||
| _builder = flatbuffers.Builder(0) | ||
| return _builder | ||
| @pytest.fixture(scope='function') | ||
| def session(): | ||
| _session = Session() | ||
| fill_session(_session) | ||
| return _session | ||
| def test_session_roundtrip(session, builder): | ||
| # serialize to bytes (flatbuffers) from python object | ||
| obj = session.build(builder) | ||
| builder.Finish(obj) | ||
| data = builder.Output() | ||
| assert len(data) == 160 | ||
| # create python object from bytes (flatbuffes) | ||
| _session = Session.cast(data) | ||
| assert _session.session == session.session | ||
| assert _session.joined_at == session.joined_at | ||
| assert _session.left_at == session.left_at | ||
| assert _session.realm == session.realm | ||
| assert _session.authid == session.authid | ||
| assert _session.authrole == session.authrole | ||
| def test_session_roundtrip_perf(session, builder): | ||
| obj = session.build(builder) | ||
| builder.Finish(obj) | ||
| data = builder.Output() | ||
| scratch = {'joined_at': 0} | ||
| def loop(): | ||
| _session = Session.cast(data) | ||
| if True: | ||
| assert _session.session == session.session | ||
| assert _session.joined_at == session.joined_at | ||
| assert _session.left_at == session.left_at | ||
| assert _session.realm == session.realm | ||
| assert _session.authid == session.authid | ||
| assert _session.authrole == session.authrole | ||
| scratch['joined_at'] += session.joined_at | ||
| N = 5 | ||
| M = 100000 | ||
| samples = [] | ||
| print('measuring:') | ||
| for i in range(N): | ||
| secs = timeit.timeit(loop, number=M) | ||
| ops = round(float(M) / secs, 1) | ||
| samples.append(ops) | ||
| print('{} objects/sec performance'.format(ops)) | ||
| samples = sorted(samples) | ||
| ops50 = samples[int(len(samples) / 2)] | ||
| print('RESULT: {} objects/sec median performance'.format(ops50)) | ||
| assert ops50 > 1000 | ||
| assert scratch['joined_at'] > 0 |
Alert delta unavailable
Currently unable to show alert delta for PyPI packages.
1350825
6.69%230
4.07%31164
6.25%