New Research: Supply Chain Attack on Axios Pulls Malicious Dependency from npm.Details →
Socket
Book a DemoSign in
Socket

executorlib

Package Overview
Dependencies
Maintainers
2
Versions
45
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

executorlib - pypi Package Compare versions

Comparing version
1.8.0
to
1.8.1
+1
-1
PKG-INFO
Metadata-Version: 2.4
Name: executorlib
Version: 1.8.0
Version: 1.8.1
Summary: Up-scale python functions for high performance computing (HPC) with executorlib.

@@ -5,0 +5,0 @@ Project-URL: Homepage, https://github.com/pyiron/executorlib

@@ -31,5 +31,5 @@ # file generated by setuptools-scm

__version__ = version = '1.8.0'
__version_tuple__ = version_tuple = (1, 8, 0)
__version__ = version = '1.8.1'
__version_tuple__ = version_tuple = (1, 8, 1)
__commit_id__ = commit_id = None

@@ -115,2 +115,3 @@ import os

os.makedirs(self._cwd, exist_ok=True)
set_current_directory_in_environment()
self._process = subprocess.Popen(

@@ -199,1 +200,13 @@ args=self.generate_command(command_lst=command_lst),

return command_prepend_lst
def set_current_directory_in_environment():
"""
Add the current directory to the PYTHONPATH to be able to access local Python modules.
"""
environment = os.environ
current_path = os.getcwd()
if "PYTHONPATH" in environment and current_path not in environment["PYTHONPATH"]:
environment["PYTHONPATH"] = os.getcwd() + ":" + environment["PYTHONPATH"]
elif "PYTHONPATH" not in environment:
environment["PYTHONPATH"] = os.getcwd()

@@ -93,35 +93,26 @@ import contextlib

while len(memory_dict) > 0:
memory_dict = {
key: _check_task_output(
task_key=key,
future_obj=value,
cache_directory=cache_dir_dict[key],
)
for key, value in memory_dict.items()
if not value.done()
}
memory_dict = _refresh_memory_dict(
memory_dict=memory_dict,
cache_dir_dict=cache_dir_dict,
process_dict=process_dict,
terminate_function=terminate_function,
pysqa_config_directory=pysqa_config_directory,
backend=backend,
)
if not task_dict["cancel_futures"] and wait:
if (
terminate_function is not None
and terminate_function == terminate_subprocess
):
for task in process_dict.values():
terminate_function(task=task)
elif terminate_function is not None:
for queue_id in process_dict.values():
terminate_function(
queue_id=queue_id,
config_directory=pysqa_config_directory,
backend=backend,
)
_cancel_processes(
process_dict=process_dict,
terminate_function=terminate_function,
pysqa_config_directory=pysqa_config_directory,
backend=backend,
)
else:
memory_dict = {
key: _check_task_output(
task_key=key,
future_obj=value,
cache_directory=cache_dir_dict[key],
)
for key, value in memory_dict.items()
if not value.done()
}
memory_dict = _refresh_memory_dict(
memory_dict=memory_dict,
cache_dir_dict=cache_dir_dict,
process_dict=process_dict,
terminate_function=terminate_function,
pysqa_config_directory=pysqa_config_directory,
backend=backend,
)
for value in memory_dict.values():

@@ -197,11 +188,10 @@ if not value.done():

else:
memory_dict = {
key: _check_task_output(
task_key=key,
future_obj=value,
cache_directory=cache_dir_dict[key],
)
for key, value in memory_dict.items()
if not value.done()
}
memory_dict = _refresh_memory_dict(
memory_dict=memory_dict,
cache_dir_dict=cache_dir_dict,
process_dict=process_dict,
terminate_function=terminate_function,
pysqa_config_directory=pysqa_config_directory,
backend=backend,
)

@@ -280,1 +270,70 @@

return task_args, task_kwargs, future_wait_key_lst
def _refresh_memory_dict(
memory_dict: dict,
cache_dir_dict: dict,
process_dict: dict,
terminate_function: Optional[Callable] = None,
pysqa_config_directory: Optional[str] = None,
backend: Optional[str] = None,
) -> dict:
"""
Refresh memory dictionary
Args:
memory_dict (dict): dictionary with task keys and future objects
cache_dir_dict (dict): dictionary with task keys and cache directories
process_dict (dict): dictionary with task keys and process reference.
terminate_function (callable): The function to terminate the tasks.
pysqa_config_directory (str): path to the pysqa config directory (only for pysqa based backend).
backend (str): name of the backend used to spawn tasks.
Returns:
dict: Updated memory dictionary
"""
cancelled_lst = [
key for key, value in memory_dict.items() if value.done() and value.cancelled()
]
_cancel_processes(
process_dict={k: v for k, v in process_dict.items() if k in cancelled_lst},
terminate_function=terminate_function,
pysqa_config_directory=pysqa_config_directory,
backend=backend,
)
return {
key: _check_task_output(
task_key=key,
future_obj=value,
cache_directory=cache_dir_dict[key],
)
for key, value in memory_dict.items()
if not value.done()
}
def _cancel_processes(
process_dict: dict,
terminate_function: Optional[Callable] = None,
pysqa_config_directory: Optional[str] = None,
backend: Optional[str] = None,
):
"""
Cancel processes
Args:
process_dict (dict): dictionary with task keys and process reference.
terminate_function (callable): The function to terminate the tasks.
pysqa_config_directory (str): path to the pysqa config directory (only for pysqa based backend).
backend (str): name of the backend used to spawn tasks.
"""
if terminate_function is not None and terminate_function == terminate_subprocess:
for task in process_dict.values():
terminate_function(task=task)
elif terminate_function is not None and backend is not None:
for queue_id in process_dict.values():
terminate_function(
queue_id=queue_id,
config_directory=pysqa_config_directory,
backend=backend,
)

@@ -8,2 +8,5 @@ import os

from executorlib.standalone.inputcheck import check_file_exists
from executorlib.standalone.interactive.spawner import (
set_current_directory_in_environment,
)
from executorlib.standalone.scheduler import pysqa_execute_command, terminate_with_pysqa

@@ -89,2 +92,3 @@

submit_kwargs.update(resource_dict)
set_current_directory_in_environment()
queue_id = qa.submit_job(**submit_kwargs)

@@ -91,0 +95,0 @@ dump(file_name=file_name, data_dict={"queue_id": queue_id})

@@ -8,2 +8,5 @@ import os

from executorlib.standalone.inputcheck import check_file_exists
from executorlib.standalone.interactive.spawner import (
set_current_directory_in_environment,
)

@@ -57,7 +60,8 @@

raise ValueError("backend parameter is not supported for subprocess spawner.")
if resource_dict is None:
resource_dict = {}
cwd = resource_dict.get("cwd", cache_directory)
cwd = _get_working_directory(
cache_directory=cache_directory, resource_dict=resource_dict
)
if cwd is not None:
os.makedirs(cwd, exist_ok=True)
set_current_directory_in_environment()
return subprocess.Popen(command, universal_newlines=True, cwd=cwd)

@@ -76,1 +80,12 @@

time.sleep(0.1)
def _get_working_directory(
cache_directory: Optional[str] = None, resource_dict: Optional[dict] = None
):
if resource_dict is None:
resource_dict = {}
if "cwd" in resource_dict and resource_dict["cwd"] is not None:
return resource_dict["cwd"]
else:
return cache_directory

@@ -329,3 +329,3 @@ import inspect

elif "object at" in node_value_str:
short_name = node_value_str[1:-1].split()[0].split(".")[-1] + "()"
short_name = node_value_str[1:-1].split(maxsplit=1)[0].split(".")[-1] + "()"
elif "<function" in node_value_str:

@@ -336,3 +336,3 @@ short_name = node_value_str.split()[1] + "()"

elif "(" in node_value_str and ")" in node_value_str:
short_name = node_value_str.split("(")[0] + "()"
short_name = node_value_str.split("(", maxsplit=1)[0] + "()"
elif len(node_value_str) > 20:

@@ -339,0 +339,0 @@ short_name = node_value_str[:21] + "..."

@@ -8,3 +8,6 @@ import contextlib

from executorlib.standalone.interactive.spawner import BaseSpawner
from executorlib.standalone.interactive.spawner import (
BaseSpawner,
set_current_directory_in_environment,
)

@@ -122,2 +125,3 @@

)
set_current_directory_in_environment()
jobspec.environment = dict(os.environ)

@@ -124,0 +128,0 @@ if self._pmi_mode is not None:

@@ -9,3 +9,6 @@ import hashlib

from executorlib.standalone.inputcheck import validate_number_of_cores
from executorlib.standalone.interactive.spawner import BaseSpawner
from executorlib.standalone.interactive.spawner import (
BaseSpawner,
set_current_directory_in_environment,
)
from executorlib.standalone.scheduler import pysqa_execute_command, terminate_with_pysqa

@@ -187,2 +190,3 @@ from executorlib.task_scheduler.interactive.blockallocation import (

working_directory = os.path.abspath(hash)
set_current_directory_in_environment()
return queue_adapter.submit_job(

@@ -189,0 +193,0 @@ command=" ".join(self.generate_command(command_lst=command_lst)),