microgue
Advanced tools
| import datetime | ||
| import json | ||
| import redis.asyncio as redis | ||
| from ...loggers.logger import Logger | ||
| logger = Logger() | ||
| class CacheConnectionFailed(Exception): | ||
| pass | ||
| class AbstractCache: | ||
| # internal use only | ||
| _cache_client = None | ||
| # extension required | ||
| host = None | ||
| # extension optional | ||
| port = 6379 | ||
| prefix = None | ||
| ttl = 900 | ||
| connection_timeout = 1 | ||
| connection_required = True | ||
| def __init__(self, *args, **kwargs): | ||
| pass | ||
| async def connect(self): | ||
| logger.debug(f"{self.__class__.__name__}.connect", priority=2) | ||
| # share the cache client at the __class__ level | ||
| if self.__class__._cache_client is None: | ||
| try: | ||
| logger.debug("connecting to redis", priority=3) | ||
| logger.debug(f"host: {self.host}") | ||
| logger.debug(f"port: {self.port}") | ||
| self.__class__._cache_client = await redis.Redis( | ||
| host=self.host, | ||
| port=self.port, | ||
| socket_connect_timeout=self.connection_timeout, | ||
| ) | ||
| await self.__class__._cache_client.ping() | ||
| except Exception as e: | ||
| logger.error(f"{self.__class__.__name__}.connect - error", priority=3) | ||
| logger.error(f"{e.__class__.__name__}: {str(e)}") | ||
| if self.connection_required: | ||
| raise CacheConnectionFailed(str(e)) | ||
| else: | ||
| self.__class__._cache_client = None | ||
| else: | ||
| logger.debug("using existing connection to redis", priority=3) | ||
| def _prefix_key(self, key): | ||
| if self.prefix: | ||
| return f"{self.prefix}-{key}" | ||
| else: | ||
| return key | ||
| async def get(self, key): | ||
| await self.connect() | ||
| if not self._cache_client: | ||
| return False | ||
| prefixed_key = self._prefix_key(key) | ||
| value = await self._cache_client.get(prefixed_key) | ||
| logger.debug(f"{self.__class__.__name__}.get", priority=2) | ||
| logger.debug(f"key: {prefixed_key}") | ||
| try: | ||
| return json.loads(value) | ||
| except: # noqa | ||
| pass | ||
| try: | ||
| return value.decode("ascii") | ||
| except: # noqa | ||
| pass | ||
| return value | ||
| async def set(self, key, value, ttl=None): | ||
| await self.connect() | ||
| if not self._cache_client: | ||
| return False | ||
| value = value if type(value) is str else json.dumps(value) | ||
| prefixed_key = self._prefix_key(key) | ||
| ttl = ttl or self.ttl | ||
| logger.debug(f"{self.__class__.__name__}.set", priority=2) | ||
| logger.debug(f"key: {prefixed_key}") | ||
| await self._cache_client.set(prefixed_key, value, ex=ttl) | ||
| return True | ||
| async def delete(self, key): | ||
| await self.connect() | ||
| if not self._cache_client: | ||
| return False | ||
| prefixed_key = self._prefix_key(key) | ||
| logger.debug(f"{self.__class__.__name__}.delete", priority=2) | ||
| logger.debug(f"key: {prefixed_key}") | ||
| return bool(await self._cache_client.delete(prefixed_key)) | ||
| async def expires_at(self, key): | ||
| await self.connect() | ||
| if not self._cache_client: | ||
| return False | ||
| prefixed_key = self._prefix_key(key) | ||
| expire_time = await self._cache_client.ttl(prefixed_key) | ||
| return str(datetime.timedelta(seconds=expire_time)) | ||
| async def clear(self): | ||
| await self.connect() | ||
| if not self._cache_client: | ||
| return False | ||
| logger.debug(f"{self.__class__.__name__}.clear", priority=2) | ||
| return bool(await self._cache_client.flushdb()) |
| import json | ||
| import aioboto3 | ||
| from ...loggers.logger import Logger | ||
| logger = Logger() | ||
| class EventBusConnectionFailed(Exception): | ||
| pass | ||
| class PublishFailed(Exception): | ||
| pass | ||
| class AbstractEventBus: | ||
| # internal use only | ||
| _event_bus_client = None | ||
| # extension required | ||
| event_bus_name = None | ||
| event_bus_region = None | ||
| event_source = None | ||
| def __init__(self, *args, **kwargs): | ||
| pass | ||
| async def connect(self): | ||
| logger.debug(f"{self.__class__.__name__}.connect", priority=2) | ||
| # share the event bus client at the global level | ||
| if AbstractEventBus._event_bus_client is None: | ||
| try: | ||
| logger.debug("connecting to eventbridge", priority=3) | ||
| logger.debug(f"event_bus_name: {self.event_bus_name}") | ||
| logger.debug(f"event_bus_region: {self.event_bus_region}") | ||
| AbstractEventBus._event_bus_client = await aioboto3.Session().client("events", region_name=self.event_bus_region).__aenter__() | ||
| except Exception as e: | ||
| logger.error(f"{self.__class__.__name__}.connect - error", priority=3) | ||
| logger.error(f"{e.__class__.__name__}: {str(e)}") | ||
| raise EventBusConnectionFailed(str(e)) | ||
| async def publish(self, event_type, event_data=None): | ||
| await self.connect() | ||
| # defaults | ||
| event_data = {} if event_data is None else event_data | ||
| logger.debug(f"{self.__class__.__name__}.publish", priority=2) | ||
| logger.debug(f"event_type: {event_type}") | ||
| try: | ||
| response = await self._event_bus_client.put_events( | ||
| Entries=[ | ||
| { | ||
| "Source": self.event_source, | ||
| "DetailType": event_type, | ||
| "Detail": json.dumps(event_data), | ||
| "EventBusName": self.event_bus_name, | ||
| } | ||
| ] | ||
| ) | ||
| except Exception as e: | ||
| logger.error(f"{self.__class__.__name__}.publish - error", priority=3) | ||
| logger.error(f"{e.__class__.__name__}: {str(e)}") | ||
| raise PublishFailed(str(e)) | ||
| if response.get("ResponseMetadata", {}).get("HTTPStatusCode") == 200: | ||
| logger.debug(f"{self.__class__.__name__}.publish - success", priority=3) | ||
| return True | ||
| else: | ||
| logger.critical(f"{self.__class__.__name__}.publish - error", priority=3) | ||
| logger.critical(f"response: {response}") | ||
| raise PublishFailed(response) |
| import json | ||
| import aioboto3 | ||
| from ...loggers.logger import Logger | ||
| logger = Logger() | ||
| class DeleteFailed(Exception): | ||
| pass | ||
| class ReceiveFailed(Exception): | ||
| pass | ||
| class SendFailed(Exception): | ||
| pass | ||
| class QueueConnectionFailed(Exception): | ||
| pass | ||
| class AbstractQueue: | ||
| # internal use only | ||
| _queue_client = None | ||
| # extension required | ||
| queue_url = None | ||
| class Message: | ||
| def __init__(self, message_data): | ||
| self.id = message_data["ReceiptHandle"] | ||
| self.data = json.loads(message_data["Body"]) | ||
| def __init__(self, *args, **kwargs): | ||
| pass | ||
| async def connect(self): | ||
| logger.debug(f"{self.__class__.__name__}.connect", priority=2) | ||
| # share the queue client at the global level | ||
| if AbstractQueue._queue_client is None: | ||
| try: | ||
| logger.debug("connecting to sqs", priority=3) | ||
| AbstractQueue._queue_client = await aioboto3.Session().client("sqs").__aenter__() | ||
| except Exception as e: | ||
| logger.error(f"{self.__class__.__name__}.connect - error", priority=3) | ||
| logger.error(f"{e.__class__.__name__}: {str(e)}") | ||
| raise QueueConnectionFailed(str(e)) | ||
| else: | ||
| logger.debug("using existing connection to sqs", priority=3) | ||
| async def send(self, message): | ||
| await self.connect() | ||
| logger.debug(f"{self.__class__.__name__}.send", priority=2) | ||
| if isinstance(message, dict): | ||
| message = json.dumps(message) | ||
| try: | ||
| await self._queue_client.send_message( | ||
| QueueUrl=self.queue_url, | ||
| MessageBody=message, | ||
| ) | ||
| logger.debug(f"{self.__class__.__name__}.send - success", priority=3) | ||
| except Exception as e: | ||
| logger.error(f"{self.__class__.__name__}.send - error", priority=3) | ||
| logger.error(f"{e.__class__.__name__}: {str(e)}") | ||
| raise SendFailed(str(e)) | ||
| return True | ||
| async def receive(self, max_number_of_messages=1, visibility_timeout=1, wait_time=1): | ||
| await self.connect() | ||
| logger.debug(f"{self.__class__.__name__}.receive", priority=2) | ||
| logger.debug(f"max_number_of_messages: {max_number_of_messages}") | ||
| logger.debug(f"visibility_timeout: {visibility_timeout}") | ||
| logger.debug(f"wait_time: {wait_time}") | ||
| try: | ||
| response = await self._queue_client.receive_message( | ||
| QueueUrl=self.queue_url, | ||
| MaxNumberOfMessages=max_number_of_messages, | ||
| VisibilityTimeout=visibility_timeout, | ||
| WaitTimeSeconds=wait_time, | ||
| ) | ||
| except Exception as e: | ||
| logger.error(f"{self.__class__.__name__}.receive - error", priority=3) | ||
| logger.error(f"{e.__class__.__name__}: {str(e)}") | ||
| raise ReceiveFailed(str(e)) | ||
| response_messages = response.get("Messages", []) | ||
| messages = [self.Message(msg) for msg in response_messages] | ||
| logger.debug(f"number_of_messages_received: {len(messages)}") | ||
| return messages | ||
| async def delete(self, message): | ||
| await self.connect() | ||
| logger.debug(f"{self.__class__.__name__}.delete", priority=2) | ||
| logger.debug(f"message.id: {message.id}") | ||
| try: | ||
| await self._queue_client.delete_message( | ||
| QueueUrl=self.queue_url, | ||
| ReceiptHandle=message.id, | ||
| ) | ||
| except Exception as e: | ||
| logger.error(f"{self.__class__.__name__}.delete - error", priority=3) | ||
| logger.error(f"{e.__class__.__name__}: {str(e)}") | ||
| raise DeleteFailed(str(e)) | ||
| return True |
| import json | ||
| import aioboto3 | ||
| from ...loggers.logger import Logger | ||
| logger = Logger() | ||
| class GetSecretFailed(Exception): | ||
| pass | ||
| class SecretsConnectionFailed(Exception): | ||
| pass | ||
| class Secrets: | ||
| # internal use only | ||
| _secrets_client = None | ||
| def __init__(self, *args, **kwargs): | ||
| pass | ||
| async def connect(self): | ||
| # share the secrets client at the global level | ||
| if Secrets._secrets_client is None: | ||
| try: | ||
| Secrets._secrets_client = await aioboto3.Session().client("secretsmanager").__aenter__() | ||
| except Exception as e: | ||
| logger.error(f"{self.__name__}.connect - error", priority=3) | ||
| logger.error(f"{e.__class__.__name__}: {str(e)}") | ||
| raise SecretsConnectionFailed(str(e)) | ||
| async def get(self, secret_name): | ||
| await self.connect() | ||
| logger.debug(f"{self.__class__.__name__}.get", priority=2) | ||
| logger.debug(f"secret_name: {secret_name}") | ||
| try: | ||
| get_secret_value_response = await self._secrets_client.get_secret_value( | ||
| SecretId=secret_name, | ||
| ) | ||
| except Exception as e: | ||
| logger.error(f"{self.__class__.__name__}.get - error", priority=3) | ||
| logger.error(f"{e.__class__.__name__}: {str(e)}") | ||
| raise GetSecretFailed(str(e)) | ||
| try: | ||
| return json.loads(get_secret_value_response["SecretString"]) | ||
| except: # noqa | ||
| return get_secret_value_response["SecretString"] |
| import traceback | ||
| from collections import OrderedDict | ||
| import httpx | ||
| from ...constants.error_constants import ErrorConstants | ||
| from ...loggers.logger import Logger | ||
| from ...services.service import Service as _Service | ||
| logger = Logger() | ||
| class Service(_Service): | ||
| async def request(self, *args, **kwargs) -> _Service.Response: # type: ignore | ||
| return await self.invoke(self.Request(*args, **kwargs)) | ||
| async def invoke(self, request: _Service.Request) -> _Service.Response: # type: ignore | ||
| logger.debug(f"{self.__class__.__name__}.invoke", priority=2) | ||
| self.log_request(request) | ||
| # open all files before sending them | ||
| opened_request_files = OrderedDict() | ||
| for key, file in request.files.items(): | ||
| opened_request_files[key] = open(file, "rb") | ||
| try: | ||
| async with httpx.AsyncClient() as client: | ||
| http_response = await client.request( | ||
| url=self.request_base_url + request.url, | ||
| params=request.parameters, | ||
| method=request.method, | ||
| headers=request.headers, | ||
| cookies=request.cookies, | ||
| data=request.data, | ||
| json=request.json, | ||
| files=opened_request_files, | ||
| ) | ||
| response_status_code = http_response.status_code | ||
| response_headers = dict(http_response.headers) | ||
| response_cookies = dict(http_response.cookies) | ||
| try: | ||
| response_data = http_response.json() | ||
| except: # noqa | ||
| response_data = http_response.text | ||
| response = self.Response( | ||
| status_code=response_status_code, | ||
| headers=response_headers, | ||
| cookies=response_cookies, | ||
| data=response_data, | ||
| ) | ||
| except Exception as e: | ||
| logger.error(f"{self.__class__.__name__}.invoke - error", priority=3) | ||
| logger.error(f"{e.__class__.__name__}: {str(e)}") | ||
| logger.error(traceback.format_exc()) | ||
| response = self.Response( | ||
| status_code=500, | ||
| headers={}, | ||
| cookies={}, | ||
| data={ | ||
| "error": ErrorConstants.Services.INTERNAL_SERVER_ERROR, | ||
| }, | ||
| ) | ||
| self.log_response(response) | ||
| return response |
| import os | ||
| import uuid | ||
| from pathlib import Path | ||
| import aioboto3 | ||
| from ...loggers.logger import Logger | ||
| logger = Logger() | ||
| class DeleteFailed(Exception): | ||
| pass | ||
| class DownloadFailed(Exception): | ||
| pass | ||
| class StorageConnectionFailed(Exception): | ||
| pass | ||
| class UploadFailed(Exception): | ||
| pass | ||
| class AbstractStorage: | ||
| # internal use only | ||
| _storage_client = None | ||
| # extension required | ||
| bucket_name = None | ||
| bucket_public_url = None | ||
| class File: | ||
| def __init__(self, remote_path=None, local_path=None, url=None): | ||
| self.remote_path = remote_path | ||
| self.local_path = local_path | ||
| self.url = url | ||
| def __init__(self, *args, **kwargs): | ||
| pass | ||
| async def connect(self): | ||
| logger.debug(f"{self.__class__.__name__}.connect", priority=2) | ||
| # share the storage client at the global level | ||
| if AbstractStorage._storage_client is None: | ||
| try: | ||
| logger.debug("connecting to s3", priority=3) | ||
| logger.debug(f"bucket_name: {self.bucket_name}") | ||
| session = aioboto3.Session() | ||
| AbstractStorage._storage_client = await session.client("s3").__aenter__() | ||
| except Exception as e: | ||
| logger.error(f"{self.__class__.__name__}.connect - error", priority=3) | ||
| logger.error(f"{e.__class__.__name__}: {str(e)}") | ||
| raise StorageConnectionFailed(str(e)) | ||
| async def upload(self, local_file_path, remote_file_path=None): | ||
| await self.connect() | ||
| if remote_file_path is None: | ||
| remote_file_path = f"{uuid.uuid4()}-{os.path.basename(local_file_path)}" | ||
| logger.debug(f"{self.__class__.__name__}.upload", priority=2) | ||
| logger.debug(f"local_file_path: {local_file_path}") | ||
| logger.debug(f"remote_file_path: {remote_file_path}") | ||
| try: | ||
| await self._storage_client.upload_file(local_file_path, self.bucket_name, remote_file_path) | ||
| except Exception as e: | ||
| logger.error(f"{self.__class__.__name__}.upload - error", priority=3) | ||
| logger.error(f"{e.__class__.__name__}: {str(e)}") | ||
| raise UploadFailed(str(e)) | ||
| return self.File( | ||
| remote_path=remote_file_path, | ||
| local_path=local_file_path, | ||
| url=f"{self.bucket_public_url}/{remote_file_path}", | ||
| ) | ||
| async def download(self, remote_file_path, local_file_path=None): | ||
| await self.connect() | ||
| if local_file_path is None: | ||
| local_file_path = os.path.join(os.getcwd(), remote_file_path) | ||
| # ensure local_file_path directories exist | ||
| Path(os.path.dirname(local_file_path)).mkdir(parents=True, exist_ok=True) | ||
| logger.debug(f"{self.__class__.__name__}.download", priority=2) | ||
| logger.debug(f"remote_file_path: {remote_file_path}") | ||
| logger.debug(f"local_file_path: {local_file_path}") | ||
| try: | ||
| await self._storage_client.download_file(self.bucket_name, remote_file_path, local_file_path) | ||
| except Exception as e: | ||
| logger.error(f"{self.__class__.__name__}.download - error", priority=3) | ||
| logger.error(f"{e.__class__.__name__}: {str(e)}") | ||
| raise DownloadFailed(str(e)) | ||
| return self.File( | ||
| remote_path=remote_file_path, | ||
| local_path=local_file_path, | ||
| url=f"{self.bucket_public_url}/{remote_file_path}", | ||
| ) | ||
| async def delete(self, remote_file_path): | ||
| await self.connect() | ||
| logger.debug(f"{self.__class__.__name__}.delete", priority=2) | ||
| logger.debug(f"remote_file_path: {remote_file_path}") | ||
| try: | ||
| await self._storage_client.delete_object(Bucket=self.bucket_name, Key=remote_file_path) | ||
| except Exception as e: | ||
| logger.error(f"{self.__class__.__name__}.delete - error", priority=3) | ||
| logger.error(f"{e.__class__.__name__}: {str(e)}") | ||
| raise DeleteFailed(str(e)) | ||
| return True |
| Metadata-Version: 2.2 | ||
| Name: microgue | ||
| Version: 4.1.0 | ||
| Version: 4.2.0 | ||
| Summary: This project contains bootstrap code to speed up the development of AWS based microservices | ||
@@ -5,0 +5,0 @@ Author: Michael Hudelson |
@@ -11,2 +11,15 @@ README.md | ||
| microgue.egg-info/top_level.txt | ||
| microgue/asynchronous/__init__.py | ||
| microgue/asynchronous/caches/__init__.py | ||
| microgue/asynchronous/caches/abstract_cache.py | ||
| microgue/asynchronous/events/__init__.py | ||
| microgue/asynchronous/events/abstract_event_bus.py | ||
| microgue/asynchronous/queues/__init__.py | ||
| microgue/asynchronous/queues/abstract_queue.py | ||
| microgue/asynchronous/secrets/__init__.py | ||
| microgue/asynchronous/secrets/secrets.py | ||
| microgue/asynchronous/services/__init__.py | ||
| microgue/asynchronous/services/service.py | ||
| microgue/asynchronous/storages/__init__.py | ||
| microgue/asynchronous/storages/abstract_storage.py | ||
| microgue/caches/__init__.py | ||
@@ -13,0 +26,0 @@ microgue/caches/abstract_cache.py |
+1
-1
| Metadata-Version: 2.2 | ||
| Name: microgue | ||
| Version: 4.1.0 | ||
| Version: 4.2.0 | ||
| Summary: This project contains bootstrap code to speed up the development of AWS based microservices | ||
@@ -5,0 +5,0 @@ Author: Michael Hudelson |
+8
-1
@@ -8,3 +8,3 @@ from setuptools import setup | ||
| name="microgue", | ||
| version="4.1.0", | ||
| version="4.2.0", | ||
| author="Michael Hudelson", | ||
@@ -18,2 +18,9 @@ author_email="michaelhudelson@gmail.com", | ||
| "microgue", | ||
| "microgue.asynchronous", | ||
| "microgue.asynchronous.caches", | ||
| "microgue.asynchronous.events", | ||
| "microgue.asynchronous.queues", | ||
| "microgue.asynchronous.secrets", | ||
| "microgue.asynchronous.services", | ||
| "microgue.asynchronous.storages", | ||
| "microgue.caches", | ||
@@ -20,0 +27,0 @@ "microgue.events", |
Alert delta unavailable
Currently unable to show alert delta for PyPI packages.
92937
25.11%48
37.14%1935
28.74%