Latest Threat Research:SANDWORM_MODE: Shai-Hulud-Style npm Worm Hijacks CI Workflows and Poisons AI Toolchains.Details
Socket
Book a DemoInstallSign in
Socket

bptk-py

Package Overview
Dependencies
Maintainers
1
Versions
109
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

bptk-py - npm Package Compare versions

Comparing version
1.9.2
to
1.9.5
+20
-3
bptk_py.egg-info/PKG-INFO

@@ -1,4 +0,4 @@

Metadata-Version: 2.1
Metadata-Version: 2.2
Name: bptk-py
Version: 1.9.2
Version: 1.9.5
Summary: A python simulation engine for System Dynamics & Agent based models

@@ -22,3 +22,2 @@ Author-email: transentis <support@transentis.com>

Requires-Dist: ipywidgets==8.1.5
Requires-Dist: pyyaml==6.0.1
Requires-Dist: xlsxwriter==3.1.9

@@ -80,2 +79,20 @@ Requires-Dist: parsimonious==0.10.0

### 1.9.5
* Fix publication issues
### 1.9.4
* Fix publication issues
### 1.9.3
* Fix agent.py serialize method
* Removed to_string method of agent.py
* Fix csv_datacollector.py
* Removed kinesis_datacollector.py, yaml_model_parser.py and serializer.py
* Fix model.py reset method
* Adjusted model.py configure_properties method (only dict-values allowed)
* added unittests
### 1.9.2

@@ -82,0 +99,0 @@

+0
-1

@@ -6,3 +6,2 @@ pandas==2.2.3

ipywidgets==8.1.5
pyyaml==6.0.1
xlsxwriter==3.1.9

@@ -9,0 +8,0 @@ parsimonious==0.10.0

@@ -26,3 +26,2 @@ LICENSE

BPTK_Py/modeling/datacollectors/csv_datacollector.py
BPTK_Py/modeling/datacollectors/kinesis_datacollector.py
BPTK_Py/modelmonitor/__init__.py

@@ -35,3 +34,2 @@ BPTK_Py/modelmonitor/file_monitor.py

BPTK_Py/modelparser/parser_factory.py
BPTK_Py/modelparser/yaml_model_parser.py
BPTK_Py/scenariomanager/__init__.py

@@ -89,3 +87,2 @@ BPTK_Py/scenariomanager/scenario.py

BPTK_Py/util/lookup_data.py
BPTK_Py/util/serializer.py
BPTK_Py/util/statecompression.py

@@ -92,0 +89,0 @@ BPTK_Py/visualizations/__init__.py

import BPTK_Py.sddsl.functions as sd_functions
from importlib.metadata import version
from .modeling import Event, DelayedEvent, Agent, DataCollector, Model, Scheduler, SimultaneousScheduler
from .modeling import Event, DelayedEvent, Agent, DataCollector, Model, Scheduler, SimultaneousScheduler, CSVDataCollector, AgentDataCollector
from .sddsl import Module

@@ -5,0 +5,0 @@ from .bptk import bptk, conf

@@ -71,3 +71,3 @@ # /`-

for key, value in self.properties.items():
output[key] = self.properties[value]['value']
output[key] = value['value']

@@ -355,7 +355,2 @@ output['id'] = self.id

def to_string(self):
#TODO might want to rename this or just remove it ...
return self.state
@staticmethod

@@ -362,0 +357,0 @@ def is_event_relevant(threshold):

### Package for additional Data Collectors for Agent based simulations ###
from .csv_datacollector import CSVDataCollector
#from .kinesis_datacollector import KinesisDataCollector
from .agent_datacollector import AgentDataCollector
from .agent_datacollector import AgentDataCollector

@@ -40,8 +40,11 @@ # /`-

self.column_names = None
self.cache = {}
self.observed_ids = []
#Probably not necessary
self.headlines = None
self.column_names = None

@@ -64,8 +67,8 @@

self.cache = {}
def reset(self):
self.agent_statistics = {}
self.event_statistics = {}
self.cache = {}
self.observed_ids = []
def collect_agent_statistics(self, sim_time, agents):

@@ -101,9 +104,11 @@ """

if not id in self.observed_ids:
self.observed_ids[id] = ""
outfile.write(";".join(stats.keys()))
self.observed_ids.append(id)
if filename not in self.cache:
self.cache[filename] = []
self.cache[filename] += [stats.values()]
self.cache[filename] += [stats.values()]
outfile.write(";".join(stats.keys()))
outfile.write("\n" + ";".join([str(x) for x in stats.values()]))
def statistics(self):

@@ -110,0 +115,0 @@ """

@@ -134,5 +134,2 @@ # /`-

self.data_collector.agent_statistics = {}
self.data_collector.event_statistics = {}
self.reset_cache()

@@ -555,4 +552,2 @@

Agent type that is to receive the event
num_agents: Integer.
Number of random agents that should receive the event
event_factory: Function.

@@ -577,29 +572,11 @@ The factory (typicalla a lambda function) that generates the desired event for a given target agent type. The function receives the agent_id as its parameter.

if type(properties) == list:
for property in properties:
try:
prop_name = property["name"]
prop_val = property["value"]
prop_type = property["type"]
except KeyError as e:
prop_name = list(property.keys())[0]
prop_val = property[prop_name]["value"]
prop_type = property[prop_name]["type"]
for name, property in properties.items():
self.set_property(name, property)
self.set_property(prop_name,prop_val)
#Lookup properties need to be added to the point dictionary also, for compatibilty with SD models
if prop_type == "Lookup":
self.points[prop_name] = prop_val
if property["type"] == "Lookup":
self.points[name] = property["value"]
else:
for name, property in properties.items():
self.set_property(name, property)
#Lookup properties need to be added to the point dictionary also, for compatibilty with SD models
if property["type"] == "Lookup":
self.points[name] = property["value"]
def configure_agents(self,config):

@@ -606,0 +583,0 @@ """

@@ -1,2 +0,1 @@

from .yaml_model_parser import YAMLModelParser
from .meta_model_creator import ModelCreator

@@ -3,0 +2,0 @@ from .meta_model_creator import serializable_agent

@@ -80,9 +80,2 @@ # /`-

def import_class(name):
components = name.split('.')
mod = __import__(components[0])
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
from copy import deepcopy

@@ -89,0 +82,0 @@ model_to_dump = deepcopy(self)

@@ -13,6 +13,5 @@ # /`-

from ..logger import log
from .yaml_model_parser import YAMLModelParser
from .json_model_parser import JSONModelParser
parsers = {"yml": YAMLModelParser, "yaml": YAMLModelParser,"json": JSONModelParser}
parsers = {"json": JSONModelParser}

@@ -19,0 +18,0 @@ def ParserFactory(filename):

@@ -123,18 +123,13 @@ # /`-

for constant, value in self.constants.items():
try:
if type(value) == str:
self.model.equations[constant] = eval("lambda t : " + value)
log("[INFO] {}, {}: Changed constant {} to {}".format(self.scenario_manager, self.name, constant,
if type(value) == str:
self.model.equations[constant] = eval("lambda t : " + value)
log("[INFO] {}, {}: Changed constant {} to {}".format(self.scenario_manager, self.name, constant,
str(value)))
elif type(value) == int or type(value) == float:
self.model.equations[constant] = eval("lambda t: " + str(value))
log("[INFO] {}, {}: Changed constant {} to {}".format(self.scenario_manager, self.name, constant,
elif type(value) == int or type(value) == float:
self.model.equations[constant] = eval("lambda t: " + str(value))
log("[INFO] {}, {}: Changed constant {} to {}".format(self.scenario_manager, self.name, constant,
str(value)))
else:
log("[ERROR] Invalid type for constant {}: {}".format(constant, str(value)))
else:
log("[ERROR] Invalid type for constant {}: {}".format(constant, str(value)))
except ValueError as e:
log("[ERROR] Attempted to evaluate an expression that I cannot evaluate. Error message: {}".format(
str(e)))
else:

@@ -152,17 +147,12 @@ log(

if self.model is not None:
for name, value in self.points.items():
try:
if type(value) == str:
self.model.points[name] = eval(value)
log("[INFO] {}, {}: Changed points {} to {}".format(self.scenario_manager, self.name, name, str(value)))
elif type(value) == list:
self.model.points[name] = value
log("[INFO] {}, {}: Changed points {} to {}".format(self.scenario_manager, self.name, name, str(value)))
else:
log("[ERROR] Invalid type for points {}: {}".format(name, str(value)))
if type(value) == str:
self.model.points[name] = eval(value)
log("[INFO] {}, {}: Changed points {} to {}".format(self.scenario_manager, self.name, name, str(value)))
elif type(value) == list:
self.model.points[name] = value
log("[INFO] {}, {}: Changed points {} to {}".format(self.scenario_manager, self.name, name, str(value)))
else:
log("[ERROR] Invalid type for points {}: {}".format(name, str(value)))
except ValueError as e:
log("[ERROR] Attempted to evaluate an expression that I cannot evaluate. Error message: {}".format(
str(e)))

@@ -169,0 +159,0 @@ else:

@@ -1,4 +0,4 @@

Metadata-Version: 2.1
Metadata-Version: 2.2
Name: bptk-py
Version: 1.9.2
Version: 1.9.5
Summary: A python simulation engine for System Dynamics & Agent based models

@@ -22,3 +22,2 @@ Author-email: transentis <support@transentis.com>

Requires-Dist: ipywidgets==8.1.5
Requires-Dist: pyyaml==6.0.1
Requires-Dist: xlsxwriter==3.1.9

@@ -80,2 +79,20 @@ Requires-Dist: parsimonious==0.10.0

### 1.9.5
* Fix publication issues
### 1.9.4
* Fix publication issues
### 1.9.3
* Fix agent.py serialize method
* Removed to_string method of agent.py
* Fix csv_datacollector.py
* Removed kinesis_datacollector.py, yaml_model_parser.py and serializer.py
* Fix model.py reset method
* Adjusted model.py configure_properties method (only dict-values allowed)
* added unittests
### 1.9.2

@@ -82,0 +99,0 @@

@@ -14,3 +14,2 @@ [build-system]

"ipywidgets==8.1.5",
"pyyaml==6.0.1",
"xlsxwriter==3.1.9",

@@ -17,0 +16,0 @@ "parsimonious==0.10.0",

@@ -44,2 +44,20 @@ # Business Prototyping Toolkit for Python

### 1.9.5
* Fix publication issues
### 1.9.4
* Fix publication issues
### 1.9.3
* Fix agent.py serialize method
* Removed to_string method of agent.py
* Fix csv_datacollector.py
* Removed kinesis_datacollector.py, yaml_model_parser.py and serializer.py
* Fix model.py reset method
* Adjusted model.py configure_properties method (only dict-values allowed)
* added unittests
### 1.9.2

@@ -46,0 +64,0 @@

@@ -15,3 +15,3 @@ from setuptools import setup

def get_version():
return '1.9.2'
return '1.9.5'

@@ -18,0 +18,0 @@ setup(version=get_version(),

# /`-
# _ _ _ /####`-
# | | | | (_) /########`-
# | |_ _ __ __ _ _ __ ___ ___ _ __ | |_ _ ___ /###########`-
# | __| '__/ _` | '_ \/ __|/ _ \ '_ \| __| / __| ____ -###########/
# | |_| | | (_| | | | \__ \ __/ | | | |_| \__ \ | | `-#######/
# \__|_| \__,_|_| |_|___/\___|_| |_|\__|_|___/ |____| `- # /
#
# Copyright (c) 2018 transentis labs GmbH
# MIT License
#########################
## DATACOLLECTOR CLASS ##
#########################
class KinesisDataCollector:
"""
A datacollector for the agent based simulation.
Collects the output data of each agent/event and outputs them to a Kinesis stream (100 records per 1/10th seconds)
For now it only outputs the agent statistics, not the event statistics
"""
def __init__(self, target_streams=["BPTK-Demo"], region="eu-west-1"):
"""
:param target_streams: List of streams to fire the events into
:param region: AWS region where the stream(s) are located
"""
self.agent_statistics = {}
self.event_statistics = {}
self.client = kinesisProducer(stream_names=target_streams, region=region)
def record_event(self, time, event):
"""
Record an event
:param time: t (int)
:param event: event instance
:return: None
"""
if time not in self.event_statistics:
self.event_statistics[time] = {}
if event.name not in self.event_statistics[time]:
self.event_statistics[time][event.name] = 0
self.event_statistics[time][event.name] += 1
def reset(self):
"""
No effect in this data collector
:return:
"""
self.agent_statistics = {}
def collect_agent_statistics(self, sim_time, agents):
"""
Collect agent statistics from agent(s)
:param sim_time: t (int)
:param agents: list of Agent
:return: None
"""
for agent in agents:
stats = {}
stats["id"] = agent.id
stats["time"] = sim_time
for agent_property_name, agent_property_value in agent.properties.items():
stats[agent_property_name] = agent_property_value['value']
self.client.send_data(stats)
def statistics(self):
"""
Get the statistics collected
:return: Dictionary
"""
return {}
## You will need boto for accessing AWS.
try:
from boto import kinesis
except ModuleNotFoundError as e:
print("Module boto not available. This is required for acessing your AWS account. Please install using pip.")
import json
from time import sleep
from time import time
class kinesisProducer():
def __init__(self, region="eu-west-1", stream_names=["BPTK-Demo"]):
"""
Please make sure you used "aws configure" and have access to the stream(s) given
:param region: AWS region where the streams(s) are located
:param stream_names: List of stream(s) to output the data to
"""
## Create stream if not already existing
self.kinesis = kinesis.connect_to_region(region)
self.stream_names = stream_names
for stream_name in stream_names:
if not stream_name in self.kinesis.list_streams()['StreamNames']:
print("Target Stream does not yet exist. Attempting to create.")
stream = self.kinesis.create_stream(stream_name, shard_count=8)
self.kinesis.describe_stream(stream_name)
sleep(3)
self.records = []
def send_data(self, data=None):
'''
Send data to the AWS Kinesis stream(s)
:param data: Dictionary of agent statistics
:return:
'''
if data:
data["timestamp"] = time()
record = {'Data': json.dumps(data), 'PartitionKey': str(hash(data["id"]))}
self.records.append(record)
## Only send bunches of 100 records to Kinesis
if len(self.records) == 100:
for stream_name in self.stream_names:
self.kinesis.put_records(self.records, stream_name)
self.records = []
sleep(0.1)
# /`-
# _ _ _ /####`-
# | | | | (_) /########`-
# | |_ _ __ __ _ _ __ ___ ___ _ __ | |_ _ ___ /###########`-
# | __| '__/ _` | '_ \/ __|/ _ \ '_ \| __| / __| ____ -###########/
# | |_| | | (_| | | | \__ \ __/ | | | |_| \__ \ | | `-#######/
# \__|_| \__,_|_| |_|___/\___|_| |_|\__|_|___/ |____| `- # /
#
# Copyright (c) 2018 transentis labs GmbH
#
import copy
from .meta_model_creator import serializable_agent
from .meta_model_creator import ModelCreator
from BPTK_Py.logger import log
class YAMLModelParser():
"""
Parser for YAML/YML files. Returns a modelCreator instance and the instantiated Model object
"""
PROPMAP = {float: "Double", str: "String", int: "Integer", dict: "Dictionary"}
def parse_model(self, filename, silent=False):
def import_class(name):
components = name.split('.')
mod = __import__(components[0])
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
def fullname(o):
module = o.__class__.__module__
if module is None or module == str.__class__.__module__:
return o.__class__.__name__ # Avoid reporting __builtin__
else:
return module + '.' + o.__class__.__name__
import yaml
with open(filename, 'r') as stream:
model = yaml.load(stream, Loader=yaml.FullLoader)["Model"]
# IN CASE, USER DID NOT WRITE THE MODEL DESTINATION
if "type" not in model.keys():
model["type"] = "abm"
## HANDLE SD MODELS
if model["type"] == "sd":
return ModelCreator(type="sd",name="unimportant",model="unimportant",silent=silent,json_dict= model)
if "model" not in model.keys():
model["model"] = model["name"].lower() + "." + model["name"]
job = ModelCreator(name=model["name"], model=model["model"], silent=silent)
datacollector = None if "datacollector" not in model.keys() else import_class(model["datacollector"])()
for scenario in model["scenarios"]:
scenario_name = list(scenario.keys())[0]
params = scenario[scenario_name]
starttime = params["starttime"]
duration = params["duration"]
dt = params["dt"]
scenario_properties = {} if "properties" not in scenario[scenario_name].keys() else scenario[scenario_name]["properties"]
# Let's keep it generic.
if "nodes" in params.keys():
params["agents"] = copy.deepcopy(params["nodes"])
params["nodes"] = {}
agents = {} if not "agents" in params.keys() else params["agents"]
job.add_scenario(name=scenario_name, starttime=starttime, stoptime=duration, dt=dt,properties=scenario_properties,datacollector=datacollector)
if agents:
for agent in agents:
name = list(agent.keys())[0]
try:
agent_type = "" if "type" not in agent[name].keys() else agent[name]["type"]
except KeyError as e:
log("[ERROR] Missing type declaration for node {}".format(name))
raise e
count = 1 if "count" not in agent[name].keys() else agent[name]["count"]
step = 1 if "step" not in agent[name].keys() else agent[name]["step"]
properties = [key for key in agent[name].keys() if
key.lower() not in ["type", "count", "step","classname"]]
try:
agent_class = import_class(agent_type)
agent_obj = agent_class(name=name, count=count, step=step, silent=silent)
except Exception as e:
agent_class = serializable_agent
log("[WARN] Error instantiating model. Trying the classname directive")
agent_obj = agent_class(name=name, count=count, step=step, silent=silent,
classname=agent[name]["classname"])
job.add_agent(scenario=scenario_name, agent=agent_obj)
for property in properties:
prop_val = str(agent[name][property]) if type(agent[name][property]) == dict else agent[name][
property]
if type(prop_val) == str:
try: # We need to find dictionaries. They might be coded as String. Hence, try to find it with eval()
prop_type = YAMLModelParser.PROPMAP[type(eval(prop_val))]
except:
prop_type = YAMLModelParser.PROPMAP[type(prop_val)]
else:
prop_type = YAMLModelParser.PROPMAP[type(prop_val)]
agent_obj.set_property(property, prop_type, prop_val)
return job
# /`-
# _ _ _ /####`-
#| | | | (_) /########`-
#| |_ _ __ __ _ _ __ ___ ___ _ __ | |_ _ ___ /###########`-
#| __| '__/ _` | '_ \/ __|/ _ \ '_ \| __| / __| ____ -###########/
#| |_| | | (_| | | | \__ \ __/ | | | |_| \__ \ | | `-#######/
# \__|_| \__,_|_| |_|___/\___|_| |_|\__|_|___/ |____| `- # /
#
# Copyright (c) 2018 transentis labs GmbH
# MIT License
import json
class serializer():
"""
Simple Serializer class
"""
def __init__(self):
"""
Initialize addresses
"""
self.addresses = []
def serialize_to_json(self, obj, filename):
self.addresses = []
res = self.serialize(obj)
with open(filename, "w") as output_file:
output_file.write(json.dumps(res, indent=4))
def serialize(self, obj):
return_value = {}
if type(obj) is dict:
# Handle dict
for key, value in obj.items():
return_value[key] = self.serialize(value)
elif type(obj) is list:
# Handle list
return self.serialize_list(obj)
# Handle primitive types
elif type(obj) in (int, float, str, bool):
return str(obj)
elif obj is None:
# Handle the None-Object
return "None"
else:
# Handle ordinary objects that we can iterate using "vars"
return_value["__address__"] = hex(id(obj))
return_value["__type__"] = str(type(obj)).replace("<class", "").replace("\'", "").replace(">", "").replace(
" ", "")
if return_value["__address__"] not in self.addresses:
# To avoid endless recursion (may occur frequently), I only serialize objects I have not serialized before
# Otherwise, just add address so I can rebuild later
self.addresses += [return_value["__address__"]]
for key, value in vars(obj).items():
return_value[key] = self.serialize(value)
return return_value
def serialize_list(self, lis):
return_value = []
for value in lis:
if type(value) in (int, float, str, bool):
return_value += [value]
elif type(value) is dict:
return_value += [self.serialize(value)]
elif type(value) is list:
return_value += [self.serialize_list(value)]
else:
return_value += [self.serialize(value)]
return return_value
def deserialize(self,dictionary):
obj = None
## First trials deserialize
# Beim deserialisieren müsste ich also immer auf nen Key "__address__" checken
# und daraufhin entweder das Objekt dranhängen, das ich schon kenne, oder es suchen und dann weiter machen
# Oder zwei Schritte: erst alle Objekte bauen
# Dann links fixen, wenn fehlende gefunden
# Lambda funktionen sind komplizierter