Latest Threat Research:SANDWORM_MODE: Shai-Hulud-Style npm Worm Hijacks CI Workflows and Poisons AI Toolchains.Details
Socket
Book a DemoInstallSign in
Socket

karton-core

Package Overview
Dependencies
Maintainers
1
Versions
31
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

karton-core - npm Package Compare versions

Comparing version
5.3.4
to
5.4.0
+1
karton_core-5.4.0-nspkg.pth
import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('karton',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('karton', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('karton', [os.path.dirname(p)])));m = m or sys.modules.setdefault('karton', types.ModuleType('karton'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)
+1
-1

@@ -1,1 +0,1 @@

__version__ = "5.3.4"
__version__ = "5.4.0"

@@ -452,5 +452,3 @@ import dataclasses

return [
Task.unserialize(task_data, backend=self)
if parse_resources
else Task.unserialize(task_data, parse_resources=False)
Task.unserialize(task_data, backend=self, parse_resources=parse_resources)
for chunk in keys

@@ -469,5 +467,5 @@ for task_data in self.redis.mget(chunk)

yield from (
Task.unserialize(task_data, backend=self)
if parse_resources
else Task.unserialize(task_data, parse_resources=False)
Task.unserialize(
task_data, backend=self, parse_resources=parse_resources
)
for task_data in self.redis.mget(chunk)

@@ -539,2 +537,54 @@ if task_data is not None

def _iter_legacy_task_tree(
self, root_uid: str, chunk_size: int = 1000, parse_resources: bool = True
) -> Iterator[Task]:
"""
Processes tasks made by <5.4.0 (unrouted from <5.4.0 producers or existing
before upgrade)
Used internally by iter_task_tree.
"""
# Iterate over all karton tasks that do not match the new task id format
legacy_task_keys = self.redis.scan_iter(
match=f"{KARTON_TASK_NAMESPACE}:[^{{]*", count=chunk_size
)
for chunk in chunks_iter(legacy_task_keys, chunk_size):
yield from filter(
lambda task: task.root_uid == root_uid,
(
Task.unserialize(
task_data, backend=self, parse_resources=parse_resources
)
for task_data in self.redis.mget(chunk)
if task_data is not None
),
)
def iter_task_tree(
self, root_uid: str, chunk_size: int = 1000, parse_resources: bool = True
) -> Iterator[Task]:
"""
Iterates all tasks that belong to the same analysis task tree
and have the same root_uid
:param root_uid: Root identifier of task tree
:param chunk_size: Size of chunks passed to the Redis SCAN and MGET command
:param parse_resources: If set to False, resources are not parsed.
It speeds up deserialization. Read :py:meth:`Task.unserialize` documentation
to learn more.
:return: Iterator with task objects
"""
# Process <5.4.0 tasks (unrouted from <5.4.0 producers
# or existing before upgrade)
yield from self._iter_legacy_task_tree(
root_uid, chunk_size=chunk_size, parse_resources=parse_resources
)
# Process >=5.4.0 tasks
task_keys = self.redis.scan_iter(
match=f"{KARTON_TASK_NAMESPACE}:{{{root_uid}}}:*", count=chunk_size
)
yield from self._iter_tasks(
task_keys, chunk_size=chunk_size, parse_resources=parse_resources
)
def register_task(self, task: Task, pipe: Optional[Pipeline] = None) -> None:

@@ -541,0 +591,0 @@ """

from collections import defaultdict
from typing import Dict, List
from typing import Dict, List, Optional

@@ -12,5 +12,5 @@ from .backend import KartonBackend, KartonBind

:param bind: :py:meth:`KartonBind` object representing the queue bind
:param bind: :class:`KartonBind` object representing the queue bind
:param tasks: List of tasks currently in queue
:param state: :py:meth:`KartonBackend` object to be used
:param state: :class:`KartonState` object to be used
"""

@@ -52,3 +52,3 @@

:param tasks: List of tasks
:param state: :py:meth:`KartonBackend` object to be used
:param state: :class:`KartonState` object to be used
"""

@@ -94,3 +94,3 @@

:param tasks: Task objects to group
:param state: :py:meth:`KartonBackend` to bind to created queues
:param state: :class:`KartonState` object to be used
:return: A dictionary containing the queue names and lists of tasks

@@ -125,28 +125,66 @@ """

def __init__(self, backend: KartonBackend) -> None:
def __init__(self, backend: KartonBackend, parse_resources: bool = False) -> None:
self.backend = backend
self.binds = {bind.identity: bind for bind in backend.get_binds()}
self.replicas = backend.get_online_consumers()
self.tasks = backend.get_all_tasks()
self.pending_tasks = [
task for task in self.tasks if task.status != TaskState.FINISHED
]
self.parse_resources = parse_resources
# Tasks grouped by root_uid
tasks_per_analysis = defaultdict(list)
self._tasks: Optional[List[Task]] = None
self._pending_tasks: Optional[List[Task]] = None
self._analyses: Optional[Dict[str, KartonAnalysis]] = None
self._queues: Optional[Dict[str, KartonQueue]] = None
for task in self.pending_tasks:
tasks_per_analysis[task.root_uid].append(task)
@property
def tasks(self) -> List[Task]:
if self._tasks is None:
self._tasks = self.backend.get_all_tasks(
parse_resources=self.parse_resources
)
return self._tasks
self.analyses = {
root_uid: KartonAnalysis(root_uid=root_uid, tasks=tasks, state=self)
for root_uid, tasks in tasks_per_analysis.items()
}
queues = get_queues_for_tasks(self.pending_tasks, self)
# Present registered queues without tasks
for bind_name, bind in self.binds.items():
if bind_name not in queues:
queues[bind_name] = KartonQueue(
bind=self.binds[bind_name], tasks=[], state=self
@property
def pending_tasks(self) -> List[Task]:
if self._pending_tasks is None:
self._pending_tasks = [
task for task in self.tasks if task.status != TaskState.FINISHED
]
return self._pending_tasks
@property
def analyses(self) -> Dict[str, KartonAnalysis]:
if self._analyses is None:
# Tasks grouped by root_uid
tasks_per_analysis = defaultdict(list)
for task in self.pending_tasks:
tasks_per_analysis[task.root_uid].append(task)
self._analyses = {
root_uid: KartonAnalysis(root_uid=root_uid, tasks=tasks, state=self)
for root_uid, tasks in tasks_per_analysis.items()
}
return self._analyses
@property
def queues(self) -> Dict[str, KartonQueue]:
if self._queues is None:
queues = get_queues_for_tasks(self.pending_tasks, self)
# Present registered queues without tasks
for bind_name, bind in self.binds.items():
if bind_name not in queues:
queues[bind_name] = KartonQueue(
bind=self.binds[bind_name], tasks=[], state=self
)
self._queues = queues
return self._queues
def get_analysis(self, root_uid: str) -> KartonAnalysis:
return KartonAnalysis(
root_uid=root_uid,
tasks=list(
self.backend.iter_task_tree(
root_uid, parse_resources=self.parse_resources
)
self.queues = queues
),
state=self,
)

@@ -109,9 +109,14 @@ import enum

if uid is None:
self.uid = str(uuid.uuid4())
task_uid = str(uuid.uuid4())
if root_uid is None:
self.root_uid = task_uid
else:
self.root_uid = root_uid
# New-style UID format introduced in v5.4.0
# {12345678-1234-1234-1234-12345678abcd}:12345678-1234-1234-1234-12345678abcd
self.uid = f"{{{self.root_uid}}}:{task_uid}"
else:
self.uid = uid
if root_uid is None:
self.root_uid = self.uid
else:
if root_uid is None:
raise ValueError("root_uid cannot be None when uid is not None")
self.root_uid = root_uid

@@ -141,2 +146,17 @@

@property
def task_uid(self) -> str:
return self.fquid_to_uid(self.uid)
@staticmethod
def fquid_to_uid(fquid: str) -> str:
"""
Gets task uid from fully-qualified fquid ({root_uid}:task_uid)
:return: Task uid
"""
if ":" not in fquid:
return fquid
return fquid.split(":")[-1]
def fork_task(self) -> "Task":

@@ -439,5 +459,3 @@ """

When resource deserialization is turned off, Task.unserialize will try
to use faster 3rd-party JSON parser (orjson) if it's installed. It's not
added as a required dependency but can speed up things if you need to check
status of multiple tasks at once.
to use faster 3rd-party JSON parser (orjson).
:return: Unserialized Task object

@@ -463,7 +481,3 @@

else:
try:
task_data = orjson.loads(data)
except orjson.JSONDecodeError:
# fallback, in case orjson raises exception during loading
task_data = json.loads(data, object_hook=unserialize_resources)
task_data = orjson.loads(data)

@@ -470,0 +484,0 @@ # Compatibility with Karton <5.2.0

@@ -169,3 +169,3 @@ import argparse

# Performs routing of task
self.log.info("[%s] Processing task %s", task.root_uid, task.uid)
self.log.info("[%s] Processing task %s", task.root_uid, task.task_uid)
# store the producer-task relationship in redis for task tracking

@@ -172,0 +172,0 @@ self.backend.log_identity_output(

Metadata-Version: 2.1
Name: karton-core
Version: 5.3.4
Version: 5.4.0
Summary: Distributed malware analysis orchestration framework

@@ -5,0 +5,0 @@ Home-page: https://github.com/CERT-Polska/karton

+13
-13

@@ -1,9 +0,9 @@

karton_core-5.3.4-nspkg.pth,sha256=vHa-jm6pBTeInFrmnsHMg9AOeD88czzQy-6QCFbpRcM,539
karton_core-5.4.0-nspkg.pth,sha256=vHa-jm6pBTeInFrmnsHMg9AOeD88czzQy-6QCFbpRcM,539
karton/core/__init__.py,sha256=QuT0BWZyp799eY90tK3H1OD2hwuusqMJq8vQwpB3kG4,337
karton/core/__version__.py,sha256=dAiy67tSM_yYFR8Us_fQT-TDbfV34VpASYNKXfGmEnQ,22
karton/core/backend.py,sha256=evOzlz1v1sxWusc8VojGAYyeyi9fcbVoEPm6WoNT1Xs,34696
karton/core/__version__.py,sha256=xjYaBGUFGg0kGZj_WhuoFyPD8NILPsr79SaMwmYQGSg,22
karton/core/backend.py,sha256=-sQG7utnaWLJOEcafeSwEDLnkflPqtSCwg_mn_nnFhg,36727
karton/core/base.py,sha256=C6Lco3E0XCsxvEjeVOLR9fxh_IWJ1vjC9BqUYsQyewE,8083
karton/core/config.py,sha256=7oKchitq6pWzPuXRfjBXqVT_BgGIz2p-CDo1RGaNJQg,8118
karton/core/exceptions.py,sha256=8i9WVzi4PinNlX10Cb-lQQC35Hl-JB5R_UKXa9AUKoQ,153
karton/core/inspect.py,sha256=rIa0u4u12vG_RudPfc9UAS4RZD56W8qbUa8n1dDIkX0,4868
karton/core/inspect.py,sha256=aIJQEOEkD5q2xLlV8nhxY5qL5zqcnprP-2DdP6ecKlE,6150
karton/core/karton.py,sha256=9SOAviG42kSsPqc3EuaHzWtA_KywMtc01hmU6FaJpHo,15007

@@ -14,3 +14,3 @@ karton/core/logger.py,sha256=J3XAyG88U0cwYC9zR6E3QD1uJenrQh7zS9-HgxhqeAs,2040

karton/core/resource.py,sha256=tA3y_38H9HVKIrCeAU70zHUkQUv0BuCQWMC470JLxxc,20321
karton/core/task.py,sha256=WYXzVopg8VlWOc7ncEscHVKivsXHfZc5zWLHW_mxBwY,21000
karton/core/task.py,sha256=diwg8uUl57NEYNRjT1l5CPiNw3EQcU11BnrLul33fx0,21350
karton/core/test.py,sha256=tms-YM7sUKQDHN0vm2_W7DIvHnO_ld_VPsWHnsbKSfk,9102

@@ -20,9 +20,9 @@ karton/core/utils.py,sha256=sEVqGdVPyYswWuVn8wYXBQmln8Az826N_2HgC__pmW8,4090

karton/system/__main__.py,sha256=QJkwIlSwaPRdzwKlNmCAL41HtDAa73db9MZKWmOfxGM,56
karton/system/system.py,sha256=65c8j1Ayra_CrXA1AQPBm00bTnpQiW91iZlTTRfJJI8,13787
karton_core-5.3.4.dist-info/LICENSE,sha256=o8h7hYhn7BJC_-DmrfqWwLjaR_Gbe0TZOOQJuN2ca3I,1519
karton_core-5.3.4.dist-info/METADATA,sha256=w15YiCEMDZrAF_429FAx98NH3tif38vyDxI9g9heY7E,6847
karton_core-5.3.4.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
karton_core-5.3.4.dist-info/entry_points.txt,sha256=FJj5EZuvFP0LkagjX_dLbRGBUnuLjgBiSyiFfq4c86U,99
karton_core-5.3.4.dist-info/namespace_packages.txt,sha256=X8SslCPsqXDCnGZqrYYolzT3xPzJMq1r-ZQSc0jfAEA,7
karton_core-5.3.4.dist-info/top_level.txt,sha256=X8SslCPsqXDCnGZqrYYolzT3xPzJMq1r-ZQSc0jfAEA,7
karton_core-5.3.4.dist-info/RECORD,,
karton/system/system.py,sha256=yF_d71a8w7JYA7IXUt63d5_QBH6x1QplB-xcrzQTXL4,13792
karton_core-5.4.0.dist-info/LICENSE,sha256=o8h7hYhn7BJC_-DmrfqWwLjaR_Gbe0TZOOQJuN2ca3I,1519
karton_core-5.4.0.dist-info/METADATA,sha256=kopeYFCI9EoFQbc7J7woZWjI_5egy29-lYUW7UzEQ2I,6847
karton_core-5.4.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
karton_core-5.4.0.dist-info/entry_points.txt,sha256=FJj5EZuvFP0LkagjX_dLbRGBUnuLjgBiSyiFfq4c86U,99
karton_core-5.4.0.dist-info/namespace_packages.txt,sha256=X8SslCPsqXDCnGZqrYYolzT3xPzJMq1r-ZQSc0jfAEA,7
karton_core-5.4.0.dist-info/top_level.txt,sha256=X8SslCPsqXDCnGZqrYYolzT3xPzJMq1r-ZQSc0jfAEA,7
karton_core-5.4.0.dist-info/RECORD,,
import sys, types, os;has_mfs = sys.version_info > (3, 5);p = os.path.join(sys._getframe(1).f_locals['sitedir'], *('karton',));importlib = has_mfs and __import__('importlib.util');has_mfs and __import__('importlib.machinery');m = has_mfs and sys.modules.setdefault('karton', importlib.util.module_from_spec(importlib.machinery.PathFinder.find_spec('karton', [os.path.dirname(p)])));m = m or sys.modules.setdefault('karton', types.ModuleType('karton'));mp = (m or []) and m.__dict__.setdefault('__path__',[]);(p not in mp) and mp.append(p)