You're Invited:Meet the Socket Team at RSAC and BSidesSF 2026, March 23–26.RSVP
Socket
Book a DemoSign in
Socket

isitfit

Package Overview
Dependencies
Maintainers
1
Versions
73
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

isitfit - pypi Package Compare versions

Comparing version
0.20.4
to
0.20.5
+1
-1
isitfit.egg-info/PKG-INFO
Metadata-Version: 2.1
Name: isitfit
Version: 0.20.4
Version: 0.20.5
Summary: Command-line tool to calculate excess AWS cloud resource capacity

@@ -5,0 +5,0 @@ Home-page: https://gitlab.com/autofitcloud/isitfit

@@ -1,1 +0,1 @@

isitfit_version='0.20.4'
isitfit_version='0.20.5'

@@ -30,3 +30,3 @@ from .. import isitfit_version

@datadog.command(help="Dump raw data from datadog for a day of an EC2 ID", cls=IsitfitCommand)
@click.argument('date')
@click.argument('date', type=click.DateTime(formats=["%Y-%m-%d"]))
@click.argument('aws_id')

@@ -43,3 +43,7 @@ @click.pass_context

df = ddgL1.get_metrics_all(aws_id)
# drop nhours column since useless here
del df['nhours']
print("Daily usage")
print(df)
print("")

@@ -52,7 +56,7 @@ # convert aws ID to datadog hostname

SECONDS_PER_POINT = 60*10 # 60*60 # *24
query = 'system.cpu.idle{host:%s}.rollup(min,%i)'%(dd_hostname, SECONDS_PER_POINT)
import datetime as dt
dt_start="2020-01-24 07:00:00"
dt_end="2020-01-24 09:59:59"
date_str = date.strftime("%Y-%m-%d")
dt_start="%s 00:00:00"%date_str
dt_end="%s 23:59:59"%date_str
dt_start = dt.datetime.strptime(dt_start, "%Y-%m-%d %H:%M:%S")

@@ -72,17 +76,31 @@ dt_end = dt.datetime.strptime(dt_end, "%Y-%m-%d %H:%M:%S")

# repeat as dataframe
col_i = 'cpu_idle_min'
metric_name = 'system.cpu.idle'
from isitfit.cost.metrics_datadog import DatadogApiWrap
apiwrap = DatadogApiWrap()
df = apiwrap.metric_query(
dd_hostname=dd_hostname,
start=ue_start,
end=ue_end,
query=query,
metric_name=metric_name,
dfcol_name=col_i
)
print(df)
df_all = []
metric_all = [
('system.cpu.idle', 'cpu_idle_min', 'system.cpu.idle{host:%s}.rollup(min,%i)'),
('system.mem.free', 'mem_free_min', 'system.mem.free{host:%s}.rollup(min,%i)'),
('system.cpu.idle', 'cpu_idle_max', 'system.cpu.idle{host:%s}.rollup(max,%i)'),
('system.mem.free', 'mem_free_max', 'system.mem.free{host:%s}.rollup(max,%i)')
]
for metric_name, col_name, query_t in metric_all:
query_v = query_t%(dd_hostname, SECONDS_PER_POINT)
df_i = apiwrap.metric_query(
dd_hostname=dd_hostname,
start=ue_start,
end=ue_end,
query=query_v,
metric_name=metric_name,
dfcol_name=col_name
)
df_i.set_index('ts_dt', inplace=True)
df_all.append(df_i)
# concat all
import pandas as pd
df_all = pd.concat(df_all, axis=1)
pd.set_option("display.max_rows", None)
print("Datadog details")
print(df_all)
# memory_total = ddgL1._get_meta()['memory_total']

@@ -89,0 +107,0 @@ # df['ram_free_min'] = df.ram_free_min / memory_total * 100

Metadata-Version: 2.1
Name: isitfit
Version: 0.20.4
Version: 0.20.5
Summary: Command-line tool to calculate excess AWS cloud resource capacity

@@ -5,0 +5,0 @@ Home-page: https://gitlab.com/autofitcloud/isitfit