Latest Threat Research:SANDWORM_MODE: Shai-Hulud-Style npm Worm Hijacks CI Workflows and Poisons AI Toolchains.Details
Socket
Book a DemoInstallSign in
Socket

impdar

Package Overview
Dependencies
Maintainers
1
Versions
26
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

impdar - npm Package Compare versions

Comparing version
1.2.0
to
1.2.1
+3
-2
PKG-INFO

@@ -1,4 +0,4 @@

Metadata-Version: 2.1
Metadata-Version: 2.4
Name: impdar
Version: 1.2.0
Version: 1.2.1
Summary: Scripts for impulse radar

@@ -15,2 +15,3 @@ Author-email: David Lilien <dlilien@iu.edu>

Requires-Dist: segyio
Dynamic: license-file

@@ -17,0 +18,0 @@ # ImpDAR: an impulse radar processor

+1
-1
[project]
name = "impdar"
version = "1.2.0"
version = "1.2.1"
description = "Scripts for impulse radar"

@@ -5,0 +5,0 @@ readme = "README.md"

@@ -1,4 +0,4 @@

Metadata-Version: 2.1
Metadata-Version: 2.4
Name: impdar
Version: 1.2.0
Version: 1.2.1
Summary: Scripts for impulse radar

@@ -15,2 +15,3 @@ Author-email: David Lilien <dlilien@iu.edu>

Requires-Dist: segyio
Dynamic: license-file

@@ -17,0 +18,0 @@ # ImpDAR: an impulse radar processor

@@ -28,3 +28,3 @@ #! /usr/bin/env python

help='File(s) to load')
parser_load.add_argument('-channel', type=int, default=1,
parser_load.add_argument('-channel', type=str, default="processed",
help='Receiver channel to load this is primarily for the St. Olaf HF data.')

@@ -31,0 +31,0 @@ parser_load.add_argument('-gps_offset',

@@ -67,3 +67,3 @@ #! /usr/bin/env python

"""
outmat = {att: (getattr(self, att) if getattr(self, att) is not None else np.NaN) for att in self.attrs}
outmat = {att: (getattr(self, att) if getattr(self, att) is not None else np.nan) for att in self.attrs}
return outmat

@@ -137,3 +137,3 @@

"""
outmat = {att: (getattr(self, att) if getattr(self, att) is not None else np.NaN) for att in self.attrs}
outmat = {att: (getattr(self, att) if getattr(self, att) is not None else np.nan) for att in self.attrs}
return outmat

@@ -140,0 +140,0 @@

@@ -68,4 +68,8 @@ #! /usr/bin/env python

out_cs = osr.SpatialReference()
out_cs.SetFromUserInput(t_srs)
try:
out_cs.SetFromUserInput(t_srs)
except TypeError:
out_cs.SetFromUserInput(t_srs[0])
try:
out_cs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)

@@ -386,3 +390,3 @@ except AttributeError:

else:
fill_value = np.NaN
fill_value = np.nan

@@ -389,0 +393,0 @@ if type(dats) not in [list, tuple]:

@@ -126,3 +126,3 @@ #! /usr/bin/env python

for fn in fns_in:
dat += load_UoA.load_UoA_h5(fn, gps_offset=gps_offset)
dat += load_UoA.load_UoA_h5(fn, gps_offset=gps_offset, channel=channel)
else:

@@ -129,0 +129,0 @@ raise ImportError('You need h5py for UoA')

@@ -79,4 +79,3 @@ #! /usr/bin/env python

h5_data.tnum = len(list(dset.keys()))
h5_data.snum = len(
dset['location_0']['datacapture_0']['echogram_0'])
h5_data.snum = len(dset['location_0']['datacapture_0']['echogram_0'])
lat = np.zeros((h5_data.tnum,))

@@ -115,3 +114,3 @@ lon = np.zeros((h5_data.tnum,))

if type(dset['location_0']['datacapture_'+ch]['echogram_'+ch].attrs[dig_meta_str]) == str:
if type(dset['location_0']['datacapture_'+ch]['echogram_'+ch].attrs[dig_meta_str]) is str:
digitizer_data = dset['location_0']['datacapture_'+ch][

@@ -128,3 +127,3 @@ 'echogram_'+ch].attrs[dig_meta_str]

h5_data.snum = nsamps
h5_data.data[:, location_num] = dset[
h5_data.data[:nsamps, location_num] = dset[
'location_{:d}'.format(location_num)][

@@ -134,3 +133,3 @@ 'datacapture_'+ch]['echogram_'+ch]

'datacapture_'+ch]['echogram_'+ch].attrs[
gps_cluster_str]) == str:
gps_cluster_str]) is str:
gps_data = dset['location_{:d}'.format(location_num)][

@@ -157,4 +156,10 @@ 'datacapture_'+ch]['echogram_'+ch].attrs[

lon[location_num] = np.nan
time[location_num] = float(_xmlGetVal(gps_data, gps_timestamp_str))
h5_data.elev[location_num] = float(_xmlGetVal(gps_data, alt_asl))
try:
time[location_num] = float(_xmlGetVal(gps_data, gps_timestamp_str))
except ValueError:
time[location_num] = np.nan
try:
h5_data.elev[location_num] = float(_xmlGetVal(gps_data, alt_asl))
except ValueError:
h5_data.elev[location_num] = np.nan
else:

@@ -161,0 +166,0 @@ lat[location_num] = np.nan

@@ -39,3 +39,3 @@ #! /usr/bin/env python

self.bytes_per_point = np.zeros((1, tnum))
self.n_stackes = np.zeros((1, tnum))
self.n_stacks = np.zeros((1, tnum))
self.time_window = np.zeros((1, tnum))

@@ -61,3 +61,3 @@ self.pos = np.zeros((3, tnum))

self.bytes_per_point[0, self.header_index] = header[5]
self.n_stackes[0, self.header_index] = header[7]
self.n_stacks[0, self.header_index] = header[7]
self.time_window[0, self.header_index] = header[8]

@@ -177,6 +177,5 @@ self.pos[0, self.header_index] = header[9]

try:
strtypes = (unicode, str)
unicode
openmode_unicode = 'rU'
except NameError:
strtypes = (str, )
openmode_unicode = 'r'

@@ -194,3 +193,2 @@

fin.seek(0)
print(pe_data.version)
for i, line in enumerate(fin):

@@ -211,7 +209,13 @@ if 'TRACES' in line or 'NUMBER OF TRACES' in line:

doy = (int(line[:4]), int(line[5:7]), int(line[8:10]))
if i == 2 and pe_data.version != '1.0':
doy = (int(line[6:10]), int(line[:2]), int(line[3:5]))
day_offset = datetime.datetime(doy[0], doy[1], doy[2], 0, 0, 0)
elif i == 2 and float(pe_data.version) <= 1.5:
try:
doy = (int(line[6:10]), int(line[:2]), int(line[3:5]))
except ValueError:
doy = (int(line[28:32]), int(line[34:36]), int(line[36:38]))
day_offset = datetime.datetime(doy[0], doy[1], doy[2], 0, 0, 0)
elif i == 2 and float(pe_data.version) > 1.5:
fmt = "%Y-%b-%d\nT%H:%M:%S"
day_offset = datetime.datetime.strptime(line + "T00:00:00", fmt)
day_offset = datetime.datetime(doy[0], doy[1], doy[2], 0, 0, 0)
if pe_data.version == '1.0':

@@ -218,0 +222,0 @@ pe_data.data = np.zeros((pe_data.snum, pe_data.tnum), dtype=np.int16)

@@ -86,3 +86,3 @@ #! /usr/bin/env python

def load_UoA_h5(fn, gps_offset=0.0):
def load_UoA_h5(fn, gps_offset=0.0, channel="processed"):
"""Load MCoRDS data in .mat format downloaded from the CReSIS ftp client

@@ -99,3 +99,3 @@

raise ValueError('Can only unpack MultiChannel UoA data')
if 'processed' in fin:
if channel == "processed" and 'processed' in fin:
for name in fin['processed'].keys():

@@ -109,17 +109,28 @@ for integrator in fin['processed'][name].keys():

data_list.append(UoA_data)
else:
print('No processed data found, reading channels')
for i in range(8):
if f'channel_{i}' in fin:
for integrator in fin[f'channel_{i}'].keys():
grp = fin[f'channel_{i}'][integrator]
UoA_data = RadarData(None)
UoA_data.fn = fn[:-3] + f'_ch{i}_Int' + integrator[-1]
if f'channel_{channel}' in fin:
for integrator in fin[f'channel_{channel}'].keys():
grp = fin[f'channel_{channel}'][integrator]
UoA_data = RadarData(None)
UoA_data.fn = fn[:-3] + f'_ch{channel}_Int' + integrator[-1]
UoA_data.chan = i
_load_group(UoA_data, grp, gps_offset)
UoA_data.chan = channel
_load_group(UoA_data, grp, gps_offset)
data_list.append(UoA_data)
data_list.append(UoA_data)
else:
print('No processed data found, reading channels')
for i in range(8):
if f'channel_{i}' in fin:
for integrator in fin[f'channel_{i}'].keys():
grp = fin[f'channel_{i}'][integrator]
UoA_data = RadarData(None)
UoA_data.fn = fn[:-3] + f'_ch{i}_Int' + integrator[-1]
UoA_data.chan = i
_load_group(UoA_data, grp, gps_offset)
data_list.append(UoA_data)
return data_list

@@ -148,5 +159,3 @@

nminfo.elev = np.zeros_like(nminfo.lat)
print(nminfo.lat.shape)
print(nminfo.lon.shape)
print(nminfo.elev.shape)
print(nminfo.lat.shape, UoA_data.data.shape[1])

@@ -153,0 +162,0 @@ if nminfo.lat.shape[0] > UoA_data.tnum:

@@ -252,3 +252,3 @@ #! /usr/bin/env python

for attr in pick_attrs:
setattr(out.picks, attr, np.zeros((len(all_picks), out.tnum)) * np.NaN)
setattr(out.picks, attr, np.zeros((len(all_picks), out.tnum)) * np.nan)
start_ind = 0

@@ -255,0 +255,0 @@ for dat in radar_data:

@@ -344,2 +344,4 @@ #! /usr/bin/env python

transform, self.t_srs = gpslib.get_conversion(t_srs=t_srs)
elif self.t_srs is not None:
transform, _ = gpslib.get_conversion(t_srs=self.t_srs)
else:

@@ -346,0 +348,0 @@ transform, self.t_srs = gpslib.get_utm_conversion(np.nanmean(self.lat), np.nanmean(self.long))

@@ -13,5 +13,3 @@ #! /usr/bin/env python

import os
import unittest
import numpy as np
from impdar.lib.ApresData import ApresFlags, TimeDiffFlags, QuadPolFlags

@@ -18,0 +16,0 @@

@@ -91,3 +91,3 @@ #! /usr/bin/env python

dat = NoInitRadarData(big=True)
dat.decday[10] = np.NaN
dat.decday[10] = np.nan

@@ -94,0 +94,0 @@ gpslib.kinematic_gps_control(dat, np.arange(0, 2.0, 0.1), np.arange(40, 60, 1), np.arange(0, 2000, 100), np.arange(0, 20, 1), guess_offset=False)

@@ -135,5 +135,5 @@ #! /usr/bin/env python

thatdata.picks.samp1[:, :] = np.NaN
thatdata.picks.samp2[:, :] = np.NaN
thatdata.picks.samp3[:, :] = np.NaN
thatdata.picks.samp1[:, :] = np.nan
thatdata.picks.samp2[:, :] = np.nan
thatdata.picks.samp3[:, :] = np.nan
tnum, sn = picklib.get_intersection(thisdata, thatdata, multiple_int=False, return_nans=True)

@@ -140,0 +140,0 @@ self.assertTrue(len(sn) == len(thisdata.picks.picknums))

@@ -71,3 +71,3 @@ #! /usr/bin/env python

def test_smooth(self):
# first, no NaNs
# first, no nans
data = RadarData(os.path.join(THIS_DIR, 'input_data', 'small_data_picks.mat'))

@@ -81,8 +81,8 @@ cache_val = data.picks.samp1.copy()

# NaNs ends only
# nans ends only
data = RadarData(os.path.join(THIS_DIR, 'input_data', 'small_data_picks.mat'))
for attr in ['samp1', 'samp2', 'samp3', 'power']:
val = getattr(data.picks, attr)
val[:, -1] = np.NaN
val[:, 0] = np.NaN
val[:, -1] = np.nan
val[:, 0] = np.nan
setattr(data.picks, attr, val)

@@ -95,5 +95,5 @@ data.picks.smooth(4, units='tnum')

val = getattr(data.picks, attr)
val[:, -1] = np.NaN
val[:, 0] = np.NaN
val[:, 5] = np.NaN
val[:, -1] = np.nan
val[:, 0] = np.nan
val[:, 5] = np.nan
setattr(data.picks, attr, val)

@@ -106,3 +106,3 @@ data.picks.smooth(4, units='tnum')

val = getattr(data.picks, attr)
val[0, :] = np.NaN
val[0, :] = np.nan
setattr(data.picks, attr, val)

@@ -116,5 +116,5 @@ data.picks.smooth(4, units='tnum')

val = getattr(data.picks, attr)
val[:, -1] = np.NaN
val[:, 0] = np.NaN
val[:, 5] = np.NaN
val[:, -1] = np.nan
val[:, 0] = np.nan
val[:, 5] = np.nan
setattr(data.picks, attr, val)

@@ -121,0 +121,0 @@ data.picks.smooth(4, units='dist')

@@ -286,3 +286,3 @@ #! /usr/bin/env python

dat.picks.samp2[:, 1] = np.NaN # make sure no bugs if this is at the bottom
dat.picks.samp2[:, 1] = np.nan # make sure no bugs if this is at the bottom
fig, ax = plot.plot_radargram(dat, flatten_layer=10)

@@ -289,0 +289,0 @@

@@ -155,3 +155,3 @@ #! /usr/bin/env python

# First, export with NaNs, both with normal field (depth) and elev
# First, export with nans, both with normal field (depth) and elev
rd.picks.samp2[:] = np.nan

@@ -169,3 +169,3 @@ with warnings.catch_warnings(record=True) as w:

# Fill in NaNs
# Fill in nans
rd.picks.samp2[:] = 1

@@ -204,3 +204,3 @@ with warnings.catch_warnings(record=True) as w:

# First, export with NaNs
# First, export with nans
rd.picks.samp2[:] = np.nan

@@ -220,3 +220,3 @@ rd.output_csv(os.path.join(THIS_DIR, 'input_data', 'test.csv'))

# Fill in NaNs
# Fill in nans
rd.picks.samp2[:] = 1

@@ -223,0 +223,0 @@ rd.output_csv(os.path.join(THIS_DIR, 'input_data', 'test.csv'))