Latest Threat Research:SANDWORM_MODE: Shai-Hulud-Style npm Worm Hijacks CI Workflows and Poisons AI Toolchains.Details
Socket
Book a DemoInstallSign in
Socket

microgue

Package Overview
Dependencies
Maintainers
1
Versions
83
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

microgue - npm Package Compare versions

Comparing version
4.2.1
to
4.2.2
+1
-1
microgue.egg-info/PKG-INFO
Metadata-Version: 2.2
Name: microgue
Version: 4.2.1
Version: 4.2.2
Summary: This project contains bootstrap code to speed up the development of AWS based microservices

@@ -5,0 +5,0 @@ Author: Michael Hudelson

@@ -34,25 +34,19 @@ import datetime

logger.debug(f"{self.__class__.__name__}.connect", priority=2)
try:
logger.debug("connecting to redis", priority=3)
logger.debug(f"host: {self.host}")
logger.debug(f"port: {self.port}")
self._cache_client = await redis.Redis(
host=self.host,
port=self.port,
socket_connect_timeout=self.connection_timeout,
)
except Exception as e:
logger.error(f"{self.__class__.__name__}.connect - error", priority=3)
logger.error(f"{e.__class__.__name__}: {str(e)}")
if self.connection_required:
raise CacheConnectionFailed(str(e))
else:
self._cache_client = None
# share the cache client at the __class__ level
if self.__class__._cache_client is None:
try:
logger.debug("connecting to redis", priority=3)
logger.debug(f"host: {self.host}")
logger.debug(f"port: {self.port}")
self.__class__._cache_client = await redis.Redis(
host=self.host,
port=self.port,
socket_connect_timeout=self.connection_timeout,
)
await self.__class__._cache_client.ping()
except Exception as e:
logger.error(f"{self.__class__.__name__}.connect - error", priority=3)
logger.error(f"{e.__class__.__name__}: {str(e)}")
if self.connection_required:
raise CacheConnectionFailed(str(e))
else:
self.__class__._cache_client = None
else:
logger.debug("using existing connection to redis", priority=3)
def _prefix_key(self, key):

@@ -59,0 +53,0 @@ if self.prefix:

@@ -10,6 +10,2 @@ import json

class EventBusConnectionFailed(Exception):
pass
class PublishFailed(Exception):

@@ -20,5 +16,2 @@ pass

class AbstractEventBus:
# internal use only
_event_bus_client = None
# extension required

@@ -30,50 +23,35 @@ event_bus_name = None

def __init__(self, *args, **kwargs):
pass
logger.debug("connecting to eventbridge", priority=3)
logger.debug(f"event_bus_name: {self.event_bus_name}")
logger.debug(f"event_bus_region: {self.event_bus_region}")
async def connect(self):
logger.debug(f"{self.__class__.__name__}.connect", priority=2)
# share the event bus client at the global level
if AbstractEventBus._event_bus_client is None:
try:
logger.debug("connecting to eventbridge", priority=3)
logger.debug(f"event_bus_name: {self.event_bus_name}")
logger.debug(f"event_bus_region: {self.event_bus_region}")
AbstractEventBus._event_bus_client = await aioboto3.Session().client("events", region_name=self.event_bus_region).__aenter__()
except Exception as e:
logger.error(f"{self.__class__.__name__}.connect - error", priority=3)
logger.error(f"{e.__class__.__name__}: {str(e)}")
raise EventBusConnectionFailed(str(e))
async def publish(self, event_type, event_data=None):
await self.connect()
# defaults
event_data = {} if event_data is None else event_data
logger.debug(f"{self.__class__.__name__}.publish", priority=2)
logger.debug(f"event_type: {event_type}")
try:
response = await self._event_bus_client.put_events(
Entries=[
{
"Source": self.event_source,
"DetailType": event_type,
"Detail": json.dumps(event_data),
"EventBusName": self.event_bus_name,
}
]
)
except Exception as e:
logger.error(f"{self.__class__.__name__}.publish - error", priority=3)
logger.error(f"{e.__class__.__name__}: {str(e)}")
raise PublishFailed(str(e))
async with aioboto3.Session().client("events", region_name=self.event_bus_region) as event_bus_client:
try:
response = await event_bus_client.put_events(
Entries=[
{
"Source": self.event_source,
"DetailType": event_type,
"Detail": json.dumps(event_data),
"EventBusName": self.event_bus_name,
}
]
)
except Exception as e:
logger.error(f"{self.__class__.__name__}.publish - error", priority=3)
logger.error(f"{e.__class__.__name__}: {str(e)}")
raise PublishFailed(str(e))
if response.get("ResponseMetadata", {}).get("HTTPStatusCode") == 200:
logger.debug(f"{self.__class__.__name__}.publish - success", priority=3)
return True
else:
logger.critical(f"{self.__class__.__name__}.publish - error", priority=3)
logger.critical(f"response: {response}")
raise PublishFailed(response)
if response.get("ResponseMetadata", {}).get("HTTPStatusCode") == 200:
logger.debug(f"{self.__class__.__name__}.publish - success", priority=3)
return True
else:
logger.critical(f"{self.__class__.__name__}.publish - error", priority=3)
logger.critical(f"response: {response}")
raise PublishFailed(response)

@@ -22,10 +22,3 @@ import json

class QueueConnectionFailed(Exception):
pass
class AbstractQueue:
# internal use only
_queue_client = None
# extension required

@@ -40,42 +33,23 @@ queue_url = None

def __init__(self, *args, **kwargs):
pass
logger.debug("connecting to sqs", priority=3)
async def connect(self):
logger.debug(f"{self.__class__.__name__}.connect", priority=2)
# share the queue client at the global level
if AbstractQueue._queue_client is None:
async def send(self, message):
logger.debug(f"{self.__class__.__name__}.send", priority=2)
async with aioboto3.Session().client("sqs") as queue_client:
if isinstance(message, dict):
message = json.dumps(message)
try:
logger.debug("connecting to sqs", priority=3)
AbstractQueue._queue_client = await aioboto3.Session().client("sqs").__aenter__()
await queue_client.send_message(
QueueUrl=self.queue_url,
MessageBody=message,
)
logger.debug(f"{self.__class__.__name__}.send - success", priority=3)
except Exception as e:
logger.error(f"{self.__class__.__name__}.connect - error", priority=3)
logger.error(f"{self.__class__.__name__}.send - error", priority=3)
logger.error(f"{e.__class__.__name__}: {str(e)}")
raise QueueConnectionFailed(str(e))
else:
logger.debug("using existing connection to sqs", priority=3)
raise SendFailed(str(e))
async def send(self, message):
await self.connect()
return True
logger.debug(f"{self.__class__.__name__}.send", priority=2)
if isinstance(message, dict):
message = json.dumps(message)
try:
await self._queue_client.send_message(
QueueUrl=self.queue_url,
MessageBody=message,
)
logger.debug(f"{self.__class__.__name__}.send - success", priority=3)
except Exception as e:
logger.error(f"{self.__class__.__name__}.send - error", priority=3)
logger.error(f"{e.__class__.__name__}: {str(e)}")
raise SendFailed(str(e))
return True
async def receive(self, max_number_of_messages=1, visibility_timeout=1, wait_time=1):
await self.connect()
logger.debug(f"{self.__class__.__name__}.receive", priority=2)

@@ -85,37 +59,35 @@ logger.debug(f"max_number_of_messages: {max_number_of_messages}")

logger.debug(f"wait_time: {wait_time}")
async with aioboto3.Session().client("sqs") as queue_client:
try:
response = await queue_client.receive_message(
QueueUrl=self.queue_url,
MaxNumberOfMessages=max_number_of_messages,
VisibilityTimeout=visibility_timeout,
WaitTimeSeconds=wait_time,
)
except Exception as e:
logger.error(f"{self.__class__.__name__}.receive - error", priority=3)
logger.error(f"{e.__class__.__name__}: {str(e)}")
raise ReceiveFailed(str(e))
try:
response = await self._queue_client.receive_message(
QueueUrl=self.queue_url,
MaxNumberOfMessages=max_number_of_messages,
VisibilityTimeout=visibility_timeout,
WaitTimeSeconds=wait_time,
)
except Exception as e:
logger.error(f"{self.__class__.__name__}.receive - error", priority=3)
logger.error(f"{e.__class__.__name__}: {str(e)}")
raise ReceiveFailed(str(e))
response_messages = response.get("Messages", [])
messages = [self.Message(msg) for msg in response_messages]
response_messages = response.get("Messages", [])
messages = [self.Message(msg) for msg in response_messages]
logger.debug(f"number_of_messages_received: {len(messages)}")
return messages
logger.debug(f"number_of_messages_received: {len(messages)}")
return messages
async def delete(self, message):
await self.connect()
logger.debug(f"{self.__class__.__name__}.delete", priority=2)
logger.debug(f"message.id: {message.id}")
async with aioboto3.Session().client("sqs") as queue_client:
try:
await queue_client.delete_message(
QueueUrl=self.queue_url,
ReceiptHandle=message.id,
)
except Exception as e:
logger.error(f"{self.__class__.__name__}.delete - error", priority=3)
logger.error(f"{e.__class__.__name__}: {str(e)}")
raise DeleteFailed(str(e))
try:
await self._queue_client.delete_message(
QueueUrl=self.queue_url,
ReceiptHandle=message.id,
)
except Exception as e:
logger.error(f"{self.__class__.__name__}.delete - error", priority=3)
logger.error(f"{e.__class__.__name__}: {str(e)}")
raise DeleteFailed(str(e))
return True
return True

@@ -14,41 +14,22 @@ import json

class SecretsConnectionFailed(Exception):
pass
class Secrets:
# internal use only
_secrets_client = None
def __init__(self, *args, **kwargs):
pass
async def connect(self):
# share the secrets client at the global level
if Secrets._secrets_client is None:
async def get(self, secret_name):
logger.debug(f"{self.__class__.__name__}.get", priority=2)
logger.debug(f"secret_name: {secret_name}")
async with aioboto3.Session().client("secretsmanager") as secrets_client:
try:
Secrets._secrets_client = await aioboto3.Session().client("secretsmanager").__aenter__()
get_secret_value_response = await secrets_client.get_secret_value(
SecretId=secret_name,
)
except Exception as e:
logger.error(f"{self.__name__}.connect - error", priority=3)
logger.error(f"{self.__class__.__name__}.get - error", priority=3)
logger.error(f"{e.__class__.__name__}: {str(e)}")
raise SecretsConnectionFailed(str(e))
raise GetSecretFailed(str(e))
async def get(self, secret_name):
await self.connect()
logger.debug(f"{self.__class__.__name__}.get", priority=2)
logger.debug(f"secret_name: {secret_name}")
try:
get_secret_value_response = await self._secrets_client.get_secret_value(
SecretId=secret_name,
)
except Exception as e:
logger.error(f"{self.__class__.__name__}.get - error", priority=3)
logger.error(f"{e.__class__.__name__}: {str(e)}")
raise GetSecretFailed(str(e))
try:
return json.loads(get_secret_value_response["SecretString"])
except: # noqa
return get_secret_value_response["SecretString"]
try:
return json.loads(get_secret_value_response["SecretString"])
except: # noqa
return get_secret_value_response["SecretString"]

@@ -20,6 +20,2 @@ import os

class StorageConnectionFailed(Exception):
pass
class UploadFailed(Exception):

@@ -30,5 +26,2 @@ pass

class AbstractStorage:
# internal use only
_storage_client = None
# extension required

@@ -45,81 +38,63 @@ bucket_name = None

def __init__(self, *args, **kwargs):
pass
logger.debug("connecting to s3", priority=3)
logger.debug(f"bucket_name: {self.bucket_name}")
async def connect(self):
logger.debug(f"{self.__class__.__name__}.connect", priority=2)
async def upload(self, local_file_path, remote_file_path=None):
async with aioboto3.Session().client("s3") as storage_client:
if remote_file_path is None:
remote_file_path = f"{uuid.uuid4()}-{os.path.basename(local_file_path)}"
# share the storage client at the global level
if AbstractStorage._storage_client is None:
logger.debug(f"{self.__class__.__name__}.upload", priority=2)
logger.debug(f"local_file_path: {local_file_path}")
logger.debug(f"remote_file_path: {remote_file_path}")
try:
logger.debug("connecting to s3", priority=3)
logger.debug(f"bucket_name: {self.bucket_name}")
session = aioboto3.Session()
AbstractStorage._storage_client = await session.client("s3").__aenter__()
await storage_client.upload_file(local_file_path, self.bucket_name, remote_file_path)
except Exception as e:
logger.error(f"{self.__class__.__name__}.connect - error", priority=3)
logger.error(f"{self.__class__.__name__}.upload - error", priority=3)
logger.error(f"{e.__class__.__name__}: {str(e)}")
raise StorageConnectionFailed(str(e))
raise UploadFailed(str(e))
async def upload(self, local_file_path, remote_file_path=None):
await self.connect()
return self.File(
remote_path=remote_file_path,
local_path=local_file_path,
url=f"{self.bucket_public_url}/{remote_file_path}",
)
if remote_file_path is None:
remote_file_path = f"{uuid.uuid4()}-{os.path.basename(local_file_path)}"
logger.debug(f"{self.__class__.__name__}.upload", priority=2)
logger.debug(f"local_file_path: {local_file_path}")
logger.debug(f"remote_file_path: {remote_file_path}")
try:
await self._storage_client.upload_file(local_file_path, self.bucket_name, remote_file_path)
except Exception as e:
logger.error(f"{self.__class__.__name__}.upload - error", priority=3)
logger.error(f"{e.__class__.__name__}: {str(e)}")
raise UploadFailed(str(e))
return self.File(
remote_path=remote_file_path,
local_path=local_file_path,
url=f"{self.bucket_public_url}/{remote_file_path}",
)
async def download(self, remote_file_path, local_file_path=None):
await self.connect()
async with aioboto3.Session().client("s3") as storage_client:
if local_file_path is None:
local_file_path = os.path.join(os.getcwd(), remote_file_path)
if local_file_path is None:
local_file_path = os.path.join(os.getcwd(), remote_file_path)
# ensure local_file_path directories exist
Path(os.path.dirname(local_file_path)).mkdir(parents=True, exist_ok=True)
# ensure local_file_path directories exist
Path(os.path.dirname(local_file_path)).mkdir(parents=True, exist_ok=True)
logger.debug(f"{self.__class__.__name__}.download", priority=2)
logger.debug(f"remote_file_path: {remote_file_path}")
logger.debug(f"local_file_path: {local_file_path}")
logger.debug(f"{self.__class__.__name__}.download", priority=2)
logger.debug(f"remote_file_path: {remote_file_path}")
logger.debug(f"local_file_path: {local_file_path}")
try:
await storage_client.download_file(self.bucket_name, remote_file_path, local_file_path)
except Exception as e:
logger.error(f"{self.__class__.__name__}.download - error", priority=3)
logger.error(f"{e.__class__.__name__}: {str(e)}")
raise DownloadFailed(str(e))
try:
await self._storage_client.download_file(self.bucket_name, remote_file_path, local_file_path)
except Exception as e:
logger.error(f"{self.__class__.__name__}.download - error", priority=3)
logger.error(f"{e.__class__.__name__}: {str(e)}")
raise DownloadFailed(str(e))
return self.File(
remote_path=remote_file_path,
local_path=local_file_path,
url=f"{self.bucket_public_url}/{remote_file_path}",
)
return self.File(
remote_path=remote_file_path,
local_path=local_file_path,
url=f"{self.bucket_public_url}/{remote_file_path}",
)
async def delete(self, remote_file_path):
await self.connect()
logger.debug(f"{self.__class__.__name__}.delete", priority=2)
logger.debug(f"remote_file_path: {remote_file_path}")
async with aioboto3.Session().client("s3") as storage_client:
try:
await storage_client.delete_object(Bucket=self.bucket_name, Key=remote_file_path)
except Exception as e:
logger.error(f"{self.__class__.__name__}.delete - error", priority=3)
logger.error(f"{e.__class__.__name__}: {str(e)}")
raise DeleteFailed(str(e))
try:
await self._storage_client.delete_object(Bucket=self.bucket_name, Key=remote_file_path)
except Exception as e:
logger.error(f"{self.__class__.__name__}.delete - error", priority=3)
logger.error(f"{e.__class__.__name__}: {str(e)}")
raise DeleteFailed(str(e))
return True
return True
Metadata-Version: 2.2
Name: microgue
Version: 4.2.1
Version: 4.2.2
Summary: This project contains bootstrap code to speed up the development of AWS based microservices

@@ -5,0 +5,0 @@ Author: Michael Hudelson

@@ -8,3 +8,3 @@ from setuptools import setup

name="microgue",
version="4.2.1",
version="4.2.2",
author="Michael Hudelson",

@@ -11,0 +11,0 @@ author_email="michaelhudelson@gmail.com",