New Research: Supply Chain Attack on Axios Pulls Malicious Dependency from npm.Details
Socket
Book a DemoSign in
Socket

derivative

Package Overview
Dependencies
Maintainers
2
Versions
13
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

derivative - pypi Package Compare versions

Comparing version
0.4.2
to
0.5.3
+1
-1
derivative/__init__.py

@@ -1,2 +0,2 @@

from .differentiation import dxdt, methods
from .differentiation import dxdt, smooth_x, methods
from .dglobal import Spectral, Spline, TrendFiltered, Kalman

@@ -3,0 +3,0 @@ from .dlocal import FiniteDifference, SavitzkyGolay

@@ -1,1 +0,1 @@

__version__: str = '0.4.2'
__version__: str = '0.5.3'
from .differentiation import Derivative, register
from .utils import deriv, integ
from .utils import deriv, integ, _memoize_arrays

@@ -7,3 +7,2 @@ import numpy as np

from scipy import interpolate, sparse
from scipy.sparse import linalg as splinalg
from scipy.special import legendre

@@ -203,12 +202,7 @@ from sklearn.linear_model import Lasso

"""
self._t = None
self._x = None
self._xdot_hat = None
self._x_hat = None
self.alpha = alpha
def _global(self, t, x, alpha):
self._t = t
self._x = x
@_memoize_arrays(1)
def _global(self, t, z, alpha):
delta_times = t[1:]-t[:-1]

@@ -228,15 +222,25 @@ n = len(t)

A = sparse.vstack((H, G.T @ Qinv @ G))
b = np.vstack((x.reshape((-1,1)), np.zeros((2*n, 1))))
sol = np.linalg.solve((A.T @ A).todense(), A.T @ b)
self._x_hat = (H @ sol).flatten()
self._xdot_hat = (H[:, list(range(1,2*n))+ [0]] @ sol).flatten()
rhs = H.T @ z.reshape((-1,1))
lhs = H.T @ H + G.T @ Qinv @ G
sol = np.linalg.solve(lhs.toarray(), rhs)
x_hat = (H @ sol).flatten()
x_dot_hat = (H[:, list(range(1,2*n))+ [0]] @ sol).flatten()
return x_hat, x_dot_hat
def compute(self, t, x, i):
self._global(t, x, self.alpha)
return self._xdot_hat[i]
x_dot_hat = self._global(t, x, self.alpha)[1]
return x_dot_hat[i]
def compute_for(self, t, x, indices):
self._global(t, x, self.alpha)
x_dot_hat = self._global(t, x, self.alpha)[1]
for i in indices:
yield self._xdot_hat[i]
yield x_dot_hat[i]
def compute_x(self, t, x, i):
x_hat = self._global(t, x, self.alpha)[0]
return x_hat[i]
def compute_x_for(self, t, x, indices):
x_hat = self._global(t, x, self.alpha)[0]
for i in indices:
yield x_hat[i]
import abc
import numpy as np
from .utils import _memoize_arrays

@@ -17,2 +18,7 @@ methods = {}

@_memoize_arrays()
def _gen_method(x, t, kind, axis, **kwargs):
return methods.get(kind)(**kwargs)
def dxdt(x, t, kind=None, axis=1, **kwargs):

@@ -46,9 +52,45 @@ """

if kind is None:
method = methods.get(default[0])
return method(**default[1]).d(x, t, axis=axis)
method = _gen_method(x, t, default[0], axis, **default[1])
return method.d(x, t, axis=axis)
else:
method = methods.get(kind)
return method(**kwargs).d(x, t, axis=axis)
method = _gen_method(x, t, kind, axis, **kwargs)
return method.d(x, t, axis=axis)
def smooth_x(x, t, kind=None, axis=1, **kwargs):
"""
Compute the smoothed version of x given t along axis using the numerical
derivative specified by "kind". This is the functional interface of
the Derivative class's x() method.
This function requires that X and t have equal length along axis. If
X.shape[axis] == 1, then the smoothing cannot be computed in a reasonable way and X is returned.
The implementation 'kind', an instance of the Derivative class, is responsbile for determining the behavior.
Args:
x (:obj:`ndarray` of float): Ordered measurement values.
t (:obj:`ndarray` of float): Ordered measurement times.
kind (string): Derivative method name (see available kinds).
axis ({0,1}): Axis of x along which to differentiate. Default 1.
**kwargs: Keyword arguments for the derivative method "kind".
Available kinds
- finite_difference. Required kwargs: k (symmetric window size as index).
- savitzky_golay. Required kwargs: order (of a fit polynomial), left, right (window size).
- spectral. Required kwargs: None.
- spline. Required kwargs: s (smoothing).
- trend_filtered. Required kwargs: order (of a fit polynomial), alpha (regularization).
Returns:
:obj:`ndarray` of float: Returns dx/dt along axis.
"""
if kind is None:
method = _gen_method(x, t, default[0], axis, **default[1])
return method.x(x, t, axis=axis)
else:
method = _gen_method(x, t, kind, axis, **kwargs)
return method.x(x, t, axis=axis)
class Derivative(abc.ABC):

@@ -97,2 +139,41 @@ """ Interface for computing numerical derivatives. """

def compute_x(self, t, x, i):
"""
Compute smoothed values of one-dimensional data x at the index i of x.
Overload this if subclass actually smooths values.
This requires that x and t have equal lengths >= 2, and that the index i is a valid index.
For each implementation, any exceptions raised by a valid input should either be handled or denoted in the
implementation docstring. For example, some implementations may raise an exception when x and t have length 2.
Args:
t (:obj:`ndarray` of float): Ordered measurement times.
x (:obj:`ndarray` of float): Ordered measurement values.
i (int): Index i at which to returned smoothed values
Returns:
float
"""
return x[i]
def compute_x_for(self, t, x, indices):
"""
Compute smoothed values of x at each i in indices. Overload
this if desiring a more efficient computation over a list of
indices.
This function requires that x and t have equal length along axis, and that all of the indicies are valid.
Args:
t (:obj:`ndarray` of float): Ordered measurement times.
x (:obj:`ndarray` of float): Ordered measurement values.
indices (:obj:`ndarray` of int): Indices i at which to compute (dx/dt)[i]
Returns:
Generator[float]: yields (dx/dt)[i] for i in indices
"""
for i in indices:
yield self.compute_x(t, x, i)
def d(self, X, t, axis=1):

@@ -106,4 +187,4 @@ """

Args:
X (:obj:`ndarray` of float): Ordered measurements values. Multiple measurements allowed.
t (:obj:`ndarray` of float): Ordered measurement times.
X (:obj:`ndarray` of float): Ordered measurements values. Multiple measurements allowed.
axis ({0,1}). axis of X along which to differentiate. default 1.

@@ -117,34 +198,64 @@

"""
# Cast
X = np.array(X)
if not X.size:
return np.array([])
X, flat = _align_axes(X, t, axis)
flat = False
# Check shape and axis
if len(X.shape) == 1:
X = X.reshape(1, -1)
flat = True
elif len(X.shape) == 2:
if axis == 0:
X = X.T
elif axis == 1:
pass
else:
raise ValueError("Invalid axis.")
if X.shape[1] == 1:
dX = X
else:
raise ValueError("Invalid shape of X.")
dX = np.array([list(self.compute_for(t, x, np.arange(len(t)))) for x in X])
if X.shape[1] != len(t):
raise ValueError("Desired X axis size does not match t size.")
return _restore_axes(dX, axis, flat)
# Differentiate if 2 or more points along axis
def x(self, X, t, axis=1):
"""
Compute the smoothed X values from measurements X taken at times t.
Not all methods perform smoothing when calculating derivatives. In
these cases, X is returned unmodified
Args:
X (:obj:`ndarray` of float): Ordered measurements values. Multiple measurements allowed.
t (:obj:`ndarray` of float): Ordered measurement times.
axis ({0,1}). axis of X along which to smooth. default 1.
Returns:
:obj:`ndarray` of float: Returns dX/dt along axis.
"""
X, flat = _align_axes(X, t, axis)
if X.shape[1] == 1:
dX = X
else:
dX = np.array([list(self.compute_for(t, x, np.arange(len(t)))) for x in X])
dX = np.array([list(self.compute_x_for(t, x, np.arange(len(t)))) for x in X])
if flat:
return dX.flatten()
return _restore_axes(dX, axis, flat)
def _align_axes(X, t, axis):
# Cast
X = np.array(X)
flat = False
# Check shape and axis
if len(X.shape) == 1:
X = X.reshape(1, -1)
flat = True
elif len(X.shape) == 2:
if axis == 0:
X = X.T
elif axis == 1:
pass
else:
return dX if axis == 1 else dX.T
raise ValueError("Invalid axis.")
else:
raise ValueError("Invalid shape of X.")
if X.shape[1] != len(t):
raise ValueError("Desired X axis size does not match t size.")
return X, flat
def _restore_axes(dX, axis, flat):
if flat:
return dX.flatten()
else:
return dX if axis == 1 else dX.T

@@ -0,1 +1,3 @@

from functools import _CacheInfo as CacheInfo, wraps
from collections import OrderedDict, Counter
import numpy as np

@@ -56,1 +58,73 @@ from scipy.special import binom

raise ValueError('Bad size of {}'.format(n))
def _memoize_arrays(maxsize=128):
"""A cache wrapper for functions that accept numpy arrays.
Cannot directly use any memoization from functools on these
functions because they require hashable types.
"""
def memoizing_decorator(wrapped_func):
class ArrayKey(int):
pass
def make_key(*args, **kwargs):
def arrs_to_keys(*args, **kwargs):
new_args = []
new_kwargs = {}
for arg in args:
if not isinstance(arg, np.ndarray):
new_args.append(arg)
continue
key = ArrayKey(hash(arg.tobytes()))
new_args.append(key)
for k, v in kwargs.items():
if not isinstance(v, np.ndarray):
new_kwargs[k] = v
continue
key = ArrayKey(hash(v.tobytes()))
new_kwargs[k] = key
return new_args, new_kwargs
new_args, new_kwargs = arrs_to_keys(*args, **kwargs)
new_args_dict = {k: v for k, v in enumerate(new_args)}
return (
tuple(sorted(new_args_dict.items()))
+ tuple(sorted(new_kwargs.items()))
)
arg_dict = OrderedDict()
hits = 0
misses = 0
@wraps(wrapped_func)
def wrapper_func(*args, **kwargs):
nonlocal arg_dict, hits, misses
cache_key = make_key(*args, **kwargs)
try:
result = arg_dict[cache_key]
arg_dict.move_to_end(cache_key)
hits += 1
return result
except KeyError: pass
misses += 1
result = wrapped_func(*args, **kwargs)
if maxsize > 0:
arg_dict[cache_key] = result
if len(arg_dict) > maxsize:
arg_dict.popitem(last=False)
return result
def cache_clear():
nonlocal arg_dict, hits, misses
arg_dict = OrderedDict()
hits = 0
misses = 0
wrapper_func.cache_clear = cache_clear
def cache_info():
return CacheInfo(hits, misses, maxsize, arg_dict.__len__())
wrapper_func.cache_info = cache_info
return wrapper_func
return memoizing_decorator
Metadata-Version: 2.1
Name: derivative
Version: 0.4.2
Version: 0.5.3
Summary: Numerical differentiation in python.

@@ -13,4 +13,6 @@ Home-page: https://github.com/andgoldschmidt/derivative

Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Provides-Extra: dev

@@ -25,4 +27,4 @@ Provides-Extra: docs

Requires-Dist: pytest (>=3.6.4,<4.0.0); extra == "dev"
Requires-Dist: scikit-learn (>=1.0.0,<1.1.0)
Requires-Dist: scipy (>=1.4.1,<2.0.0)
Requires-Dist: sklearn (>=0.0,<0.1)
Requires-Dist: sphinx (>=4.0.2,<5.0.0); extra == "docs"

@@ -34,3 +36,3 @@ Requires-Dist: sphinx_rtd_theme (>=0.4.3,<0.5.0); extra == "docs"

|RTD| |PyPI| |LIC|
|RTD| |PyPI| |Zenodo| |LIC|

@@ -105,2 +107,25 @@ Numerical differentiation of noisy time series data in python

Citing derivative:
------------------
The **derivative** package is a contribution to `PySINDy <https://github.com/dynamicslab/pysindy/>`_; this work has been published in the Journal of Open Source Software (JOSS). If you use **derivative** in your work, please cite it using the following reference:
Kaptanoglu et al., (2022). PySINDy: A comprehensive Python package for robust sparse system identification. Journal of Open Source Software, 7(69), 3994, https://doi.org/10.21105/joss.03994
.. code-block:: text
@article{kaptanoglu2022pysindy,
doi = {10.21105/joss.03994},
url = {https://doi.org/10.21105/joss.03994},
year = {2022},
publisher = {The Open Journal},
volume = {7},
number = {69},
pages = {3994},
author = {Alan A. Kaptanoglu and Brian M. de Silva and Urban Fasel and Kadierdan Kaheman and Andy J. Goldschmidt and Jared Callaham and Charles B. Delahunt and Zachary G. Nicolaou and Kathleen Champion and Jean-Christophe Loiseau and J. Nathan Kutz and Steven L. Brunton},
title = {PySINDy: A comprehensive Python package for robust sparse system identification},
journal = {Journal of Open Source Software}
}
.. |RTD| image:: https://readthedocs.org/projects/derivative/badge/?version=latest

@@ -117,2 +142,4 @@ :target: https://derivative.readthedocs.io/en/latest/?badge=latest

.. |Zenodo| image:: https://zenodo.org/badge/DOI/10.5281/zenodo.6617446.svg
:target: https://doi.org/10.5281/zenodo.6617446
[tool.poetry]
name = "derivative"
version = "0.4.2"
version = "0.5.3"
description = "Numerical differentiation in python."

@@ -17,3 +17,3 @@ repository = "https://github.com/andgoldschmidt/derivative"

scipy = "^1.4.1"
sklearn = "^0.0"
scikit-learn = "~1.0.0"

@@ -37,3 +37,3 @@ # docs

[build-system]
requires = ["poetry>=0.12"]
build-backend = "poetry.masonry.api"
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

@@ -1,2 +0,2 @@

|RTD| |PyPI| |LIC|
|RTD| |PyPI| |Zenodo| |LIC|

@@ -71,2 +71,25 @@ Numerical differentiation of noisy time series data in python

Citing derivative:
------------------
The **derivative** package is a contribution to `PySINDy <https://github.com/dynamicslab/pysindy/>`_; this work has been published in the Journal of Open Source Software (JOSS). If you use **derivative** in your work, please cite it using the following reference:
Kaptanoglu et al., (2022). PySINDy: A comprehensive Python package for robust sparse system identification. Journal of Open Source Software, 7(69), 3994, https://doi.org/10.21105/joss.03994
.. code-block:: text
@article{kaptanoglu2022pysindy,
doi = {10.21105/joss.03994},
url = {https://doi.org/10.21105/joss.03994},
year = {2022},
publisher = {The Open Journal},
volume = {7},
number = {69},
pages = {3994},
author = {Alan A. Kaptanoglu and Brian M. de Silva and Urban Fasel and Kadierdan Kaheman and Andy J. Goldschmidt and Jared Callaham and Charles B. Delahunt and Zachary G. Nicolaou and Kathleen Champion and Jean-Christophe Loiseau and J. Nathan Kutz and Steven L. Brunton},
title = {PySINDy: A comprehensive Python package for robust sparse system identification},
journal = {Journal of Open Source Software}
}
.. |RTD| image:: https://readthedocs.org/projects/derivative/badge/?version=latest

@@ -83,1 +106,3 @@ :target: https://derivative.readthedocs.io/en/latest/?badge=latest

.. |Zenodo| image:: https://zenodo.org/badge/DOI/10.5281/zenodo.6617446.svg
:target: https://doi.org/10.5281/zenodo.6617446

@@ -11,3 +11,3 @@ # -*- coding: utf-8 -*-

install_requires = \
['numpy>=1.18.3,<2.0.0', 'scipy>=1.4.1,<2.0.0', 'sklearn>=0.0,<0.1']
['numpy>=1.18.3,<2.0.0', 'scikit-learn>=1.0.0,<1.1.0', 'scipy>=1.4.1,<2.0.0']

@@ -26,5 +26,5 @@ extras_require = \

'name': 'derivative',
'version': '0.4.2',
'version': '0.5.3',
'description': 'Numerical differentiation in python.',
'long_description': '|RTD| |PyPI| |LIC|\n\nNumerical differentiation of noisy time series data in python\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n**derivative** is a Python package for differentiating noisy data. The package showcases a variety of improvements that can be made over finite differences when data is not clean.\n\nWant to see an example of how **derivative** can help? This package is part of **PySINDy** (`github.com/dynamicslab/pysindy <https://github.com/dynamicslab/pysindy/>`_), a sparse-regression framework for discovering nonlinear dynamical systems from data.\n\nThis package binds common differentiation methods to a single easily implemented differentiation interface to encourage user adaptation.\nNumerical differentiation methods for noisy time series data in python includes:\n\n1. Symmetric finite difference schemes using arbitrary window size.\n\n2. Savitzky-Galoy derivatives (aka polynomial-filtered derivatives) of any polynomial order with independent left and right window parameters.\n\n3. Spectral derivatives with optional filter.\n\n4. Spline derivatives of any order.\n\n5. Polynomial-trend-filtered derivatives generalizing methods like total variational derivatives.\n\n6. Kalman derivatives find the maximum likelihood estimator for a derivative described by a Brownian motion.\n\n.. code-block:: python\n\n from derivative import dxdt\n import numpy as np\n\n t = np.linspace(0,2*np.pi,50)\n x = np.sin(x)\n\n # 1. Finite differences with central differencing using 3 points.\n result1 = dxdt(x, t, kind="finite_difference", k=1)\n\n # 2. Savitzky-Golay using cubic polynomials to fit in a centered window of length 1\n result2 = dxdt(x, t, kind="savitzky_golay", left=.5, right=.5, order=3)\n\n # 3. Spectral derivative\n result3 = dxdt(x, t, kind="spectral")\n\n # 4. Spline derivative with smoothing set to 0.01\n result4 = dxdt(x, t, kind="spline", s=1e-2)\n\n # 5. Total variational derivative with regularization set to 0.01\n result5 = dxdt(x, t, kind="trend_filtered", order=0, alpha=1e-2)\n\n # 6. Kalman derivative with smoothing set to 1\n result6 = dxdt(x, t, kind="kalman", alpha=1)\n\n\nContributors:\n-------------\nThanks to the members of the community who have contributed!\n\n+-----------------------------------------------------------------+----------------------------------------------------------------------------------+\n| `Jacob Stevens-Haas <https://github.com/Jacob-Stevens-Haas>`_ | Kalman derivatives `#12 <https://github.com/andgoldschmidt/derivative/pull/12>`_ | \n+-----------------------------------------------------------------+----------------------------------------------------------------------------------+\n\n\nReferences:\n-----------\n\n[1] Numerical differentiation of experimental data: local versus global methods- K. Ahnert and M. Abel\n\n[2] Numerical Differentiation of Noisy, Nonsmooth Data- Rick Chartrand\n\n[3] The Solution Path of the Generalized LASSO- R.J. Tibshirani and J. Taylor\n\n\n.. |RTD| image:: https://readthedocs.org/projects/derivative/badge/?version=latest\n :target: https://derivative.readthedocs.io/en/latest/?badge=latest\n :alt: Documentation Status\n \n.. |LIC| image:: https://img.shields.io/badge/License-MIT-blue.svg\n :target: https://derivative.readthedocs.io/en/latest/license.html\n :alt: MIT License\n\n.. |PyPI| image:: https://badge.fury.io/py/derivative.svg\n :target: https://pypi.org/project/derivative/\n\n',
'long_description': '|RTD| |PyPI| |Zenodo| |LIC|\n\nNumerical differentiation of noisy time series data in python\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n**derivative** is a Python package for differentiating noisy data. The package showcases a variety of improvements that can be made over finite differences when data is not clean.\n\nWant to see an example of how **derivative** can help? This package is part of **PySINDy** (`github.com/dynamicslab/pysindy <https://github.com/dynamicslab/pysindy/>`_), a sparse-regression framework for discovering nonlinear dynamical systems from data.\n\nThis package binds common differentiation methods to a single easily implemented differentiation interface to encourage user adaptation.\nNumerical differentiation methods for noisy time series data in python includes:\n\n1. Symmetric finite difference schemes using arbitrary window size.\n\n2. Savitzky-Galoy derivatives (aka polynomial-filtered derivatives) of any polynomial order with independent left and right window parameters.\n\n3. Spectral derivatives with optional filter.\n\n4. Spline derivatives of any order.\n\n5. Polynomial-trend-filtered derivatives generalizing methods like total variational derivatives.\n\n6. Kalman derivatives find the maximum likelihood estimator for a derivative described by a Brownian motion.\n\n.. code-block:: python\n\n from derivative import dxdt\n import numpy as np\n\n t = np.linspace(0,2*np.pi,50)\n x = np.sin(x)\n\n # 1. Finite differences with central differencing using 3 points.\n result1 = dxdt(x, t, kind="finite_difference", k=1)\n\n # 2. Savitzky-Golay using cubic polynomials to fit in a centered window of length 1\n result2 = dxdt(x, t, kind="savitzky_golay", left=.5, right=.5, order=3)\n\n # 3. Spectral derivative\n result3 = dxdt(x, t, kind="spectral")\n\n # 4. Spline derivative with smoothing set to 0.01\n result4 = dxdt(x, t, kind="spline", s=1e-2)\n\n # 5. Total variational derivative with regularization set to 0.01\n result5 = dxdt(x, t, kind="trend_filtered", order=0, alpha=1e-2)\n\n # 6. Kalman derivative with smoothing set to 1\n result6 = dxdt(x, t, kind="kalman", alpha=1)\n\n\nContributors:\n-------------\nThanks to the members of the community who have contributed!\n\n+-----------------------------------------------------------------+----------------------------------------------------------------------------------+\n| `Jacob Stevens-Haas <https://github.com/Jacob-Stevens-Haas>`_ | Kalman derivatives `#12 <https://github.com/andgoldschmidt/derivative/pull/12>`_ | \n+-----------------------------------------------------------------+----------------------------------------------------------------------------------+\n\n\nReferences:\n-----------\n\n[1] Numerical differentiation of experimental data: local versus global methods- K. Ahnert and M. Abel\n\n[2] Numerical Differentiation of Noisy, Nonsmooth Data- Rick Chartrand\n\n[3] The Solution Path of the Generalized LASSO- R.J. Tibshirani and J. Taylor\n\n\n\nCiting derivative:\n------------------\nThe **derivative** package is a contribution to `PySINDy <https://github.com/dynamicslab/pysindy/>`_; this work has been published in the Journal of Open Source Software (JOSS). If you use **derivative** in your work, please cite it using the following reference:\n\nKaptanoglu et al., (2022). PySINDy: A comprehensive Python package for robust sparse system identification. Journal of Open Source Software, 7(69), 3994, https://doi.org/10.21105/joss.03994\n\n.. code-block:: text\n\n @article{kaptanoglu2022pysindy,\n \tdoi = {10.21105/joss.03994},\n \turl = {https://doi.org/10.21105/joss.03994},\n \tyear = {2022},\n \tpublisher = {The Open Journal},\n \tvolume = {7},\n \tnumber = {69},\n \tpages = {3994},\n \tauthor = {Alan A. Kaptanoglu and Brian M. de Silva and Urban Fasel and Kadierdan Kaheman and Andy J. Goldschmidt and Jared Callaham and Charles B. Delahunt and Zachary G. Nicolaou and Kathleen Champion and Jean-Christophe Loiseau and J. Nathan Kutz and Steven L. Brunton},\n \ttitle = {PySINDy: A comprehensive Python package for robust sparse system identification},\n \tjournal = {Journal of Open Source Software}\n\t}\n \n\n.. |RTD| image:: https://readthedocs.org/projects/derivative/badge/?version=latest\n :target: https://derivative.readthedocs.io/en/latest/?badge=latest\n :alt: Documentation Status\n \n.. |LIC| image:: https://img.shields.io/badge/License-MIT-blue.svg\n :target: https://derivative.readthedocs.io/en/latest/license.html\n :alt: MIT License\n\n.. |PyPI| image:: https://badge.fury.io/py/derivative.svg\n :target: https://pypi.org/project/derivative/\n\n.. |Zenodo| image:: https://zenodo.org/badge/DOI/10.5281/zenodo.6617446.svg\n :target: https://doi.org/10.5281/zenodo.6617446\n',
'author': 'Andy Goldschmidt',

@@ -31,0 +31,0 @@ 'author_email': 'andygold@uw.edu',