Latest Threat Research:SANDWORM_MODE: Shai-Hulud-Style npm Worm Hijacks CI Workflows and Poisons AI Toolchains.Details
Socket
Book a DemoInstallSign in
Socket

asdf

Package Overview
Dependencies
Maintainers
8
Versions
74
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

asdf - npm Package Compare versions

Comparing version
4.5.0
to
5.0.0
+4
-3
.github/workflows/benchmark.yml

@@ -36,3 +36,3 @@ name: Benchmarks

- name: Setup Python
uses: actions/setup-python@v5
uses: actions/setup-python@v6
with:

@@ -42,5 +42,6 @@ python-version: "3.13"

run: pip install pytest-codspeed .[benchmark]
- uses: CodSpeedHQ/action@v3
- uses: CodSpeedHQ/action@v4
with:
run: pytest benchmarks/ --codspeed
run: pytest benchmarks/ --codspeed --codspeed-mode instrumentation
mode: instrumentation
token: ${{ secrets.CODSPEED_TOKEN }}

@@ -21,3 +21,3 @@ name: changelog

steps:
- uses: actions/setup-python@v5
- uses: actions/setup-python@v6
with:

@@ -24,0 +24,0 @@ python-version: 3

@@ -35,3 +35,3 @@ name: CI

- uses: actions/checkout@v5
- uses: actions/setup-python@v5
- uses: actions/setup-python@v6
- uses: pre-commit/action@v3.0.1

@@ -38,0 +38,0 @@ core:

@@ -12,4 +12,1 @@ [asdf.extensions]

asdftool = asdf._commands.main:main
[pytest11]
asdf_schema_tester = pytest_asdf.plugin
Metadata-Version: 2.4
Name: asdf
Version: 4.5.0
Version: 5.0.0
Summary: Python implementation of the ASDF Standard

@@ -5,0 +5,0 @@ Author-email: The ASDF Developers <help@stsci.edu>

@@ -901,4 +901,2 @@ CHANGES.rst

licenses/JSONSCHEMA_LICENSE
licenses/JSON_LICENSE
pytest_asdf/__init__.py
pytest_asdf/plugin.py
licenses/JSON_LICENSE
asdf
pytest_asdf

@@ -103,3 +103,3 @@ import copy

if custom_schema is not None:
self._custom_schema = schema._load_schema_cached(custom_schema, None, True)
self._custom_schema = schema._load_schema_cached(custom_schema, True)
else:

@@ -1125,3 +1125,3 @@ self._custom_schema = None

def schema_info(self, key="description", path=None, preserve_list=True, refresh_extension_manager=NotSet):
def schema_info(self, key="description", path=None, preserve_list=True):
"""

@@ -1145,10 +1145,3 @@ Get a nested dictionary of the schema information for a given key, relative to the path.

If True, then lists are preserved. Otherwise, they are turned into dicts.
refresh_extension_manager : bool
If `True`, refresh the extension manager before looking up the
key. This is useful if you want to make sure that the schema
data for a given key is up to date.
"""
if refresh_extension_manager is not NotSet:
warnings.warn("refresh_extension_manager is deprecated", DeprecationWarning)
if isinstance(path, AsdfSearchResult):

@@ -1158,3 +1151,2 @@ return path.schema_info(

preserve_list=preserve_list,
refresh_extension_manager=refresh_extension_manager,
)

@@ -1167,3 +1159,2 @@

preserve_list=preserve_list,
refresh_extension_manager=refresh_extension_manager,
extension_manager=self.extension_manager,

@@ -1177,3 +1168,2 @@ )

show_values=display.DEFAULT_SHOW_VALUES,
refresh_extension_manager=NotSet,
):

@@ -1203,5 +1193,2 @@ """

"""
if refresh_extension_manager is not NotSet:
warnings.warn("refresh_extension_manager is deprecated", DeprecationWarning)
lines = display.render_tree(

@@ -1213,3 +1200,2 @@ self.tree,

identifier="root",
refresh_extension_manager=refresh_extension_manager,
extension_manager=self.extension_manager,

@@ -1216,0 +1202,0 @@ )

@@ -1,3 +0,1 @@

import warnings
from asdf.extension import Validator

@@ -7,19 +5,7 @@ from asdf.tags.core.ndarray import validate_datatype, validate_max_ndim, validate_ndim

def _warn_if_not_array(node, schema_property):
# warn here for non-ndarray tags, in a major version bump we can
# remove this and update the tags below to only match ndarrays
if not getattr(node, "_tag", "").startswith("tag:stsci.edu:asdf/core/ndarray-"):
warnings.warn(
f"Use of the {schema_property} validator with non-ndarray tags is deprecated. "
"Please define a custom validator for your tag",
DeprecationWarning,
)
class NdimValidator(Validator):
schema_property = "ndim"
tags = ["**"]
tags = ["tag:stsci.edu:asdf/core/ndarray-*"]
def validate(self, expected_ndim, node, schema):
_warn_if_not_array(node, self.schema_property)
yield from validate_ndim(None, expected_ndim, node, schema)

@@ -30,6 +16,5 @@

schema_property = "max_ndim"
tags = ["**"]
tags = ["tag:stsci.edu:asdf/core/ndarray-*"]
def validate(self, max_ndim, node, schema):
_warn_if_not_array(node, self.schema_property)
yield from validate_max_ndim(None, max_ndim, node, schema)

@@ -40,6 +25,5 @@

schema_property = "datatype"
tags = ["**"]
tags = ["tag:stsci.edu:asdf/core/ndarray-*"]
def validate(self, expected_datatype, node, schema):
_warn_if_not_array(node, self.schema_property)
yield from validate_datatype(None, expected_datatype, node, schema)

@@ -14,3 +14,2 @@ """

from ._node_info import create_tree
from .util import NotSet

@@ -37,3 +36,2 @@ __all__ = [

identifier="root",
refresh_extension_manager=NotSet,
extension_manager=None,

@@ -49,3 +47,2 @@ ):

filters=[] if filters is None else filters,
refresh_extension_manager=refresh_extension_manager,
extension_manager=extension_manager,

@@ -52,0 +49,0 @@ )

@@ -6,3 +6,2 @@ import re

from .treeutil import get_children, is_container
from .util import NotSet

@@ -143,3 +142,3 @@

def create_tree(key, node, identifier="root", filters=None, refresh_extension_manager=NotSet, extension_manager=None):
def create_tree(key, node, identifier="root", filters=None, extension_manager=None):
"""

@@ -158,7 +157,2 @@ Create a `NodeSchemaInfo` tree which can be filtered from a base node.

If True, then lists are preserved. Otherwise, they are turned into dicts.
refresh_extension_manager : bool
DEPRECATED
If `True`, refresh the extension manager before looking up the
key. This is useful if you want to make sure that the schema
data for a given key is up to date.
"""

@@ -171,3 +165,2 @@ filters = [] if filters is None else filters

node,
refresh_extension_manager=refresh_extension_manager,
extension_manager=extension_manager,

@@ -189,3 +182,2 @@ )

preserve_list=True,
refresh_extension_manager=NotSet,
extension_manager=None,

@@ -209,7 +201,2 @@ ):

If True, then lists are preserved. Otherwise, they are turned into dicts.
refresh_extension_manager : bool
DEPRECATED
If `True`, refresh the extension manager before looking up the
key. This is useful if you want to make sure that the schema
data for a given key is up to date.
"""

@@ -222,3 +209,2 @@

filters=[] if filters is None else filters,
refresh_extension_manager=refresh_extension_manager,
extension_manager=extension_manager,

@@ -239,14 +225,6 @@ )

def _get_extension_manager(refresh_extension_manager):
from ._asdf import AsdfFile, get_config
from .extension import ExtensionManager
def _get_extension_manager():
from ._asdf import AsdfFile
if refresh_extension_manager is NotSet:
refresh_extension_manager = False
af = AsdfFile()
if refresh_extension_manager:
config = get_config()
af._extension_manager = ExtensionManager(config.extensions)
return af.extension_manager

@@ -375,5 +353,3 @@

@classmethod
def from_root_node(
cls, key, root_identifier, root_node, schema=None, refresh_extension_manager=NotSet, extension_manager=None
):
def from_root_node(cls, key, root_identifier, root_node, schema=None, extension_manager=None):
"""

@@ -384,3 +360,3 @@ Build a NodeSchemaInfo tree from the given ASDF root node.

"""
extension_manager = extension_manager or _get_extension_manager(refresh_extension_manager)
extension_manager = extension_manager or _get_extension_manager()

@@ -387,0 +363,0 @@ current_nodes = [(None, root_identifier, root_node)]

@@ -7,3 +7,2 @@ import io

import urllib.request as urllib_request
from contextlib import nullcontext

@@ -16,3 +15,2 @@ import numpy as np

from asdf.config import config_context
from asdf.exceptions import AsdfDeprecationWarning

@@ -22,22 +20,2 @@ from . import _helpers as helpers

@pytest.fixture(params=[True, False])
def has_fsspec(request, monkeypatch):
if request.param:
yield True
else:
pytest.importorskip("fsspec")
monkeypatch.setitem(sys.modules, "fsspec", None)
yield False
@pytest.fixture()
def warn_no_fsspec(has_fsspec):
if has_fsspec:
yield nullcontext()
else:
yield pytest.warns(
AsdfDeprecationWarning, match=r"Opening http urls without fsspec is deprecated. Please install fsspec"
)
def _roundtrip(tree, get_write_fd, get_read_fd, write_options=None, read_options=None):

@@ -281,3 +259,3 @@ write_options = {} if write_options is None else write_options

@pytest.mark.remote_data()
def test_http_connection(tree, httpserver, warn_no_fsspec):
def test_http_connection(tree, httpserver):
path = os.path.join(httpserver.tmpdir, "test.asdf")

@@ -297,6 +275,5 @@

with warn_no_fsspec:
with _roundtrip(tree, get_write_fd, get_read_fd) as ff:
assert len(ff._blocks.blocks) == 2
assert (ff.tree["science_data"] == tree["science_data"]).all()
with _roundtrip(tree, get_write_fd, get_read_fd) as ff:
assert len(ff._blocks.blocks) == 2
assert (ff.tree["science_data"] == tree["science_data"]).all()

@@ -338,3 +315,3 @@

@pytest.mark.remote_data()
def test_exploded_http(tree, httpserver, warn_no_fsspec):
def test_exploded_http(tree, httpserver):
path = os.path.join(httpserver.tmpdir, "test.asdf")

@@ -348,5 +325,4 @@

with warn_no_fsspec:
with _roundtrip(tree, get_write_fd, get_read_fd, write_options={"all_array_storage": "external"}) as ff:
assert len(list(ff._blocks.blocks)) == 0
with _roundtrip(tree, get_write_fd, get_read_fd, write_options={"all_array_storage": "external"}) as ff:
assert len(list(ff._blocks.blocks)) == 0

@@ -406,3 +382,3 @@

def test_invalid_obj(tmp_path, has_fsspec):
def test_invalid_obj(tmp_path):
with pytest.raises(ValueError, match=r"Can't handle .* as a file for mode 'r'"):

@@ -423,7 +399,3 @@ generic_io.get_file(42)

mode = "w"
if has_fsspec:
raises_ctx = pytest.raises(ValueError, match=f"Unable to open {url} with mode {mode}")
else:
raises_ctx = pytest.raises(ValueError, match=r"HTTP connections can not be opened for writing")
with raises_ctx:
with pytest.raises(ValueError, match=f"Unable to open {url} with mode {mode}"):
generic_io.get_file(url, mode)

@@ -430,0 +402,0 @@

@@ -12,3 +12,3 @@ import contextlib

from asdf import tagged, treeutil, yamlutil
from asdf.exceptions import AsdfConversionWarning, AsdfDeprecationWarning, AsdfSerializationError
from asdf.exceptions import AsdfSerializationError
from asdf.testing.helpers import yaml_to_asdf

@@ -267,19 +267,2 @@

def test_ndarray_subclass_conversion(tmp_path):
class MyNDArray(np.ndarray):
pass
fn = tmp_path / "test.asdf"
af = asdf.AsdfFile()
af["a"] = MyNDArray([1, 2, 3])
with pytest.raises(AsdfSerializationError, match=r".*is not serializable by asdf.*"):
af.write_to(fn)
with asdf.config.config_context() as cfg:
with pytest.warns(AsdfDeprecationWarning, match=r"convert_unknown_ndarray_subclasses"):
cfg.convert_unknown_ndarray_subclasses = True
with pytest.warns(AsdfConversionWarning, match=r"A ndarray subclass .*"):
af.write_to(fn)
@pytest.mark.parametrize(

@@ -286,0 +269,0 @@ "payload",

@@ -31,5 +31,5 @@ # file generated by setuptools-scm

__version__ = version = '4.5.0'
__version_tuple__ = version_tuple = (4, 5, 0)
__version__ = version = '5.0.0'
__version_tuple__ = version_tuple = (5, 0, 0)
__commit_id__ = commit_id = 'g3ab5f2ac2'
__commit_id__ = commit_id = 'g10f536301'

@@ -9,3 +9,2 @@ """

import threading
import warnings
from contextlib import contextmanager

@@ -15,3 +14,2 @@

from ._helpers import validate_version
from .exceptions import AsdfDeprecationWarning
from .extension import ExtensionProxy

@@ -32,3 +30,2 @@ from .resource import ResourceManager, ResourceMappingProxy

DEFAULT_DEFAULT_ARRAY_SAVE_BASE = True
DEFAULT_CONVERT_UNKNOWN_NDARRAY_SUBCLASSES = False
DEFAULT_LAZY_TREE = False

@@ -57,3 +54,2 @@

self._default_array_save_base = DEFAULT_DEFAULT_ARRAY_SAVE_BASE
self._convert_unknown_ndarray_subclasses = DEFAULT_CONVERT_UNKNOWN_NDARRAY_SUBCLASSES
self._lazy_tree = DEFAULT_LAZY_TREE

@@ -446,33 +442,2 @@

@property
def convert_unknown_ndarray_subclasses(self):
"""
Get configuration that controls if ndarray subclasses
(subclasses that aren't otherwise handled by a specific
converter) are serialized as ndarray. If `True`, instances
of these subclasses will appear in ASDF files as ndarrays
and when loaded, will load as ndarrays.
Note that these conversions will result in an
AsdfConversionWarning being issued as this support for
converting subclasses will be removed in a future version
of ASDF.
Returns
-------
bool
"""
return self._convert_unknown_ndarray_subclasses
@convert_unknown_ndarray_subclasses.setter
def convert_unknown_ndarray_subclasses(self, value):
if value:
msg = (
"Enabling convert_unknown_ndarray_subclasses is deprecated. "
"Please add a Converter or install an extension that supports "
"the ndarray subclass you'd like to convert"
)
warnings.warn(msg, AsdfDeprecationWarning)
self._convert_unknown_ndarray_subclasses = value
@property
def lazy_tree(self):

@@ -503,3 +468,2 @@ """

f" default_array_save_base: {self.default_array_save_base}\n"
f" convert_unknown_ndarray_subclasses: {self.convert_unknown_ndarray_subclasses}\n"
f" default_version: {self.default_version}\n"

@@ -506,0 +470,0 @@ f" io_block_size: {self.io_block_size}\n"

@@ -16,6 +16,4 @@ """

import sys
import tempfile
import warnings
from os import SEEK_CUR, SEEK_END, SEEK_SET
from urllib.request import url2pathname, urlopen
from urllib.request import url2pathname

@@ -26,3 +24,3 @@ import numpy as np

from ._extern import atomicfile
from .exceptions import AsdfDeprecationWarning, DelimiterNotFoundError
from .exceptions import DelimiterNotFoundError
from .util import _patched_urllib_parse

@@ -976,50 +974,2 @@

def _http_to_temp(init, mode, uri=None):
"""
Stream the content of an http or https URL to a temporary file.
Parameters
----------
init : str
HTTP or HTTPS URL.
mode : str
ASDF file mode. The temporary file will always be opened
in w+b mode, but the resulting GenericFile will report itself
writable based on this value.
uri : str, optional
URI against which relative paths within the file are
resolved. If None, the init value will be used.
Returns
-------
RealFile
Temporary file.
"""
from asdf import get_config
fd = tempfile.NamedTemporaryFile("w+b")
block_size = get_config().io_block_size
if block_size == -1:
try:
block_size = os.fstat(fd.fileno()).st_blksize
except Exception:
block_size = io.DEFAULT_BUFFER_SIZE
try:
# This method is only called with http and https schemes:
with urlopen(init) as response: # nosec
chunk = response.read(block_size)
while len(chunk) > 0:
fd.write(chunk)
chunk = response.read(block_size)
fd.seek(0)
except Exception:
fd.close()
raise
return RealFile(fd, mode, close=True, uri=uri or init)
def get_uri(file_obj):

@@ -1160,12 +1110,2 @@ """

# finally, allow the legacy http code to handle this (with a warning)
if parsed.scheme in ["http", "https"]:
if "w" in mode:
msg = "HTTP connections can not be opened for writing"
raise ValueError(msg)
msg = "Opening http urls without fsspec is deprecated. Please install fsspec[http]"
warnings.warn(msg, AsdfDeprecationWarning)
return _http_to_temp(init, mode, uri=uri)
if isinstance(init, io.BytesIO):

@@ -1172,0 +1112,0 @@ return MemoryIO(init, mode, uri=uri)

@@ -52,6 +52,2 @@ import copy

def _default_resolver(uri):
return uri
def validate_tag(validator, tag_pattern, instance, schema):

@@ -237,3 +233,3 @@ """

def _create_validator(validators=YAML_VALIDATORS, visit_repeat_nodes=False):
meta_schema = _load_schema_cached(YAML_SCHEMA_METASCHEMA_ID, None, False)
meta_schema = _load_schema_cached(YAML_SCHEMA_METASCHEMA_ID, False)

@@ -322,2 +318,5 @@ type_checker = mvalidators.Draft4Validator.TYPE_CHECKER.redefine_many(

def _load_schema(url):
# handle case where the provided url is a non-str (Path)
url = str(url)
if url.startswith("http://") or url.startswith("https://") or url.startswith("asdf://"):

@@ -338,6 +337,3 @@ msg = f"Unable to fetch schema from non-file URL: {url}"

def _make_schema_loader(resolver):
if resolver is None:
resolver = _default_resolver
def _make_schema_loader():
def load_schema(url):

@@ -348,8 +344,2 @@ # Check if this is a URI provided by the new

if url not in resource_manager:
# Allow the resolvers to do their thing, in case they know
# how to turn this string into a URI that the resource manager
# recognizes.
url = resolver(str(url))
if url in resource_manager:

@@ -371,5 +361,5 @@ content = resource_manager[url]

def _make_jsonschema_refresolver(url_mapping):
def _make_jsonschema_refresolver():
handlers = {}
schema_loader = _make_schema_loader(url_mapping)
schema_loader = _make_schema_loader()

@@ -399,3 +389,3 @@ def get_schema(url):

def load_schema(url, resolver=None, resolve_references=False):
def load_schema(url, resolve_references=False):
"""

@@ -409,10 +399,2 @@ Load a schema from the given URL.

resolver : callable, optional
DEPRECATED arbitrary mapping of uris is no longer supported
Please register all required resources with the resource manager.
A callback function used to map URIs to other URIs. The
callable must take a string and return a string or `None`.
This is useful, for example, when a remote resource has a
mirror on the local filesystem that you wish to use.
resolve_references : bool, optional

@@ -422,11 +404,9 @@ If ``True``, resolve all ``$ref`` references.

"""
if resolver is not None:
warnings.warn("resolver is deprecated, arbitrary mapping of uris is no longer supported", DeprecationWarning)
# We want to cache the work that went into constructing the schema, but returning
# the same object is treacherous, because users who mutate the result will not
# expect that they're changing the schema everywhere.
return copy.deepcopy(_load_schema_cached(url, resolver, resolve_references))
return copy.deepcopy(_load_schema_cached(url, resolve_references))
def _safe_resolve(resolver, json_id, uri):
def _safe_resolve(json_id, uri):
"""

@@ -445,4 +425,2 @@ This function handles the tricky task of resolving a schema URI

"""
if resolver is None:
resolver = _default_resolver
# We can't use urllib.parse here because tag: URIs don't

@@ -454,8 +432,2 @@ # parse correctly.

# The generic_io.resolve_uri method cannot operate on tag: URIs.
# New-style extensions don't support $ref with a tag URI target anyway,
# so it's safe to feed this through the resolver right away.
if base.startswith("tag:"):
base = resolver(base)
# Resolve relative URIs (e.g., #foo/bar, ../foo/bar) against

@@ -465,7 +437,2 @@ # the current schema id.

# Use the resolver object only if the URI does not belong to one
# of the new-style extensions.
if base not in get_config().resource_manager:
base = resolver(base)
return base, fragment

@@ -475,4 +442,4 @@

@lru_cache
def _load_schema_cached(url, resolver, resolve_references):
loader = _make_schema_loader(resolver)
def _load_schema_cached(url, resolve_references):
loader = _make_schema_loader()
schema, url = loader(url)

@@ -487,3 +454,3 @@

if isinstance(node, dict) and "$ref" in node:
suburl_base, suburl_fragment = _safe_resolve(resolver, json_id, node["$ref"])
suburl_base, suburl_fragment = _safe_resolve(json_id, node["$ref"])

@@ -494,3 +461,3 @@ if suburl_base == url or suburl_base == schema.get("id"):

else:
subschema = load_schema(suburl_base, resolver, True)
subschema = load_schema(suburl_base, True)

@@ -510,3 +477,2 @@ return reference.resolve_fragment(subschema, suburl_fragment)

validators=None,
url_mapping=None,
*args,

@@ -520,4 +486,4 @@ _visit_repeat_nodes=False,

The additional *args and **kwargs are passed along to
`~jsonschema.protocols.Validator.validate`.
The additional *args and **kwargs are passed to the
constructor of the returned ``Validator``.

@@ -537,7 +503,2 @@ Parameters

url_mapping : callable, optional
DEPRECATED
A callable that takes one string argument and returns a string
to convert remote URLs into local ones.
_visit_repeat_nodes : bool, optional

@@ -554,5 +515,2 @@ Force the validator to visit nodes that it has already

"""
if url_mapping is not None:
warnings.warn("url_mapping is deprecated, arbitrary mapping of uris is no longer supported", DeprecationWarning)
if ctx is None:

@@ -570,3 +528,3 @@ from ._asdf import AsdfFile

kwargs["resolver"] = _make_jsonschema_refresolver(url_mapping)
kwargs["resolver"] = _make_jsonschema_refresolver()

@@ -670,3 +628,3 @@ # We don't just call validators.validate() directly here, because

schema = ctx._custom_schema
validator = get_validator({} if schema is None else schema, ctx, validators, None, *args, **kwargs)
validator = get_validator({} if schema is None else schema, ctx, validators, *args, **kwargs)
validator.validate(instance)

@@ -761,5 +719,5 @@

meta_schema_id = schema.get("$schema", YAML_SCHEMA_METASCHEMA_ID)
meta_schema = _load_schema_cached(meta_schema_id, None, False)
meta_schema = _load_schema_cached(meta_schema_id, False)
resolver = _make_jsonschema_refresolver(_default_resolver)
resolver = _make_jsonschema_refresolver()

@@ -766,0 +724,0 @@ cls = mvalidators.create(

@@ -9,3 +9,2 @@ """

import typing
import warnings

@@ -331,3 +330,3 @@ from ._display import DEFAULT_MAX_COLS, DEFAULT_MAX_ROWS, DEFAULT_SHOW_VALUES, render_tree

def schema_info(self, key="description", preserve_list=True, refresh_extension_manager=NotSet):
def schema_info(self, key="description", preserve_list=True):
"""

@@ -343,11 +342,3 @@ Get a nested dictionary of the schema information for a given key, relative to this search result.

If True, then lists are preserved. Otherwise, they are turned into dicts.
refresh_extension_manager : bool
DEPRECATED
If `True`, refresh the extension manager before looking up the
key. This is useful if you want to make sure that the schema
data for a given key is up to date.
"""
if refresh_extension_manager is not NotSet:
warnings.warn("refresh_extension_manager is deprecated", DeprecationWarning)
return collect_schema_info(

@@ -359,3 +350,2 @@ key,

preserve_list=preserve_list,
refresh_extension_manager=refresh_extension_manager,
)

@@ -362,0 +352,0 @@

@@ -8,3 +8,3 @@ import warnings

from . import config, schema, tagged, treeutil, util
from . import schema, tagged, treeutil, util
from .constants import STSCI_SCHEMA_TAG_BASE, YAML_TAG_PREFIX

@@ -293,4 +293,2 @@ from .exceptions import AsdfConversionWarning, AsdfSerializationError

cfg = config.get_config()
convert_ndarray_subclasses = cfg.convert_unknown_ndarray_subclasses
converters_cache = {}

@@ -306,12 +304,2 @@

return _convert_obj(obj, converter)
if convert_ndarray_subclasses and isinstance(obj, np.ndarray):
warnings.warn(
f"A ndarray subclass ({type(obj)}) was converted as a ndarray. "
"This behavior will be removed from a future version of ASDF. "
"See https://asdf.readthedocs.io/en/latest/asdf/config.html#convert-unknown-ndarray-subclasses",
AsdfConversionWarning,
)
converter = extension_manager.get_converter_for_type(np.ndarray)
converters_cache[typ] = lambda obj, _converter=converter: _convert_obj(obj, _converter)
return _convert_obj(obj, converter)

@@ -318,0 +306,0 @@ converters_cache[typ] = lambda obj: obj

@@ -0,1 +1,18 @@

5.0.0 (2025-09-10)
==================
Removal
-------
- Remove deprecated refresh_extension_manager argument to
``AsdfFile.schema_info`` and ``AsdfFile.info``.
Remove deprecated url_mapping argument to ``get_validator``.
Remove deprecated resolver argument to ``load_schema``.
Remove deprecated ``AsdfConfig.convert_unknown_ndarray_subclasses``.
Remove deprecated application of ndarray specific validators to non-ndarrays.
Remove deprecated opening of "http://" uris (unless fsspec is installed).
Remove deprecated pytest_asdf plugin. (`#1970
<https://github.com/asdf-format/asdf/pull/1970>`_)
4.5.0 (2025-09-04)

@@ -2,0 +19,0 @@ ==================

@@ -43,3 +43,2 @@ .. currentmodule:: asdf.config

default_array_save_base: True
convert_unknown_ndarray_subclasses: False
default_version: 1.6.0

@@ -70,3 +69,2 @@ io_block_size: -1

default_array_save_base: True
convert_unknown_ndarray_subclasses: False
default_version: 1.6.0

@@ -85,3 +83,2 @@ io_block_size: -1

default_array_save_base: True
convert_unknown_ndarray_subclasses: False
default_version: 1.6.0

@@ -163,27 +160,2 @@ io_block_size: -1

.. _convert_unknown_ndarray_subclasses:
convert_unknown_ndarray_subclasses
----------------------------------
Convert otherwise unhandled instances of subclasses of ndarray into
ndarrays prior to serialization.
Previous extension code allowed AsdfTypes to convert instances of subclasses
of supported types. Internally, the handling of ndarrays has been moved
from an AsdfType to a Converter which does not support converting
instances of subclasses unless they are explicitly listed. This means
that code that previously relied on asdf converting instances of subclasses
of ndarray into an ndarray will need to be updated to define a Converter
for the ndarray subclass or to request that support be added directly
in asdf (for subclasses in existing asdf dependencies).
With this setting enabled, asdf will continue to convert instances
of subclasses of ndarray but will issue a warning when an instance is
converted. This currently defaults to ``False`` and issues
a deprecation warning if enabled. In a future version of asdf
this setting will be removed.
Defaults to ``False``.
default_version

@@ -190,0 +162,0 @@ ---------------

@@ -9,2 +9,25 @@ .. currentmodule:: asdf

.. _whats_new_5.0.0:
5.0.0
=====
Asdf 5.0.0 removes some deprecated API.
Removed API
-----------
- The ``refresh_extension_manager`` argument is no longer supported for
`AsdfFile.schema_info` and `AsdfFile.info`. There should be no need
for forced refreshing.
- The ``url_mapping`` argument to ``asdf.schema.get_validator`` was removed.
This was a leftover from the legacy extension API and is no longer needed.
- The ``resolver`` argument to `asdf.schema.load_schema` was removed.
This was a leftover from the legacy extension API and is no longer needed.
- ``AsdfConfig.convert_unknown_ndarray_subclasses`` is removed. Please
implement a `asdf.extension.Converter` for any subclasses..
- Opening of "http://" and "https://" uris now requires fsspec to be installed.
- The bundled ``pytest_asdf`` plugin was removed. Please install
``pytest-asdf-plugin`` for a replacement.
.. _whats_new_4.0.0:

@@ -129,3 +152,3 @@

In asdf 3.0.0 a config attribute was added
`asdf.config.AsdfConfig.convert_unknown_ndarray_subclasses` that
``asdf.config.AsdfConfig.convert_unknown_ndarray_subclasses`` that
was enabled by default (to retain the behavior of the removed legacy

@@ -138,3 +161,3 @@ extension that handled ndarrays).

See :ref:`convert_unknown_ndarray_subclasses` for more details.
See ``convert_unknown_ndarray_subclasses`` for more details.

@@ -141,0 +164,0 @@ 3.0.0

Metadata-Version: 2.4
Name: asdf
Version: 4.5.0
Version: 5.0.0
Summary: Python implementation of the ASDF Standard

@@ -5,0 +5,0 @@ Author-email: The ASDF Developers <help@stsci.edu>

@@ -69,3 +69,2 @@ [project]

console_scripts = {asdftool = 'asdf._commands.main:main'}
pytest11 = {asdf_schema_tester = 'pytest_asdf.plugin'}

@@ -81,3 +80,3 @@ [build-system]

[tool.setuptools.packages.find]
include = ['asdf*', 'pytest_asdf*']
include = ['asdf*']
exclude = ['asdf/_jsonschema/json/*']

@@ -84,0 +83,0 @@

import importlib
import os
import pathlib
import warnings
from dataclasses import dataclass
import pytest
import yaml
# Avoid all imports of asdf at this level in order to avoid circular imports
HAS_NEW_PLUGIN = importlib.util.find_spec("pytest_asdf_plugin") is not None
def pytest_addoption(parser):
if HAS_NEW_PLUGIN:
return
parser.addini("asdf_schema_root", "Root path indicating where schemas are stored")
parser.addini("asdf_schema_skip_names", "Base names of files to skip in schema tests")
parser.addini(
"asdf_schema_skip_tests",
"List of tests to skip, one per line, in format <schema path suffix>::<test name>",
)
parser.addini(
"asdf_schema_xfail_tests",
"List of tests to xfail, one per line, in format <schema path suffix>::<test name>",
)
parser.addini("asdf_schema_skip_examples", "Base names of schemas whose examples should not be tested")
parser.addini(
"asdf_schema_tests_enabled",
"Controls whether schema tests are enabled by default",
type="bool",
default=False,
)
parser.addini(
"asdf_schema_validate_default",
"Set to true to enable validation of the schema 'default' property",
type="bool",
default=True,
)
parser.addini(
"asdf_schema_ignore_unrecognized_tag",
"Set to true to disable warnings when tag serializers are missing",
type="bool",
default=False,
)
parser.addoption("--asdf-tests", action="store_true", help="Enable ASDF schema tests")
class AsdfSchemaFile(pytest.File):
@classmethod
def from_parent(
cls,
parent,
*,
fspath,
skip_examples=False,
validate_default=True,
ignore_unrecognized_tag=False,
skip_tests=None,
xfail_tests=None,
**kwargs,
):
path = pathlib.Path(fspath)
kwargs["path"] = path
if hasattr(super(), "from_parent"):
result = super().from_parent(parent, **kwargs)
else:
result = AsdfSchemaFile(path, parent)
result.skip_examples = skip_examples
result.validate_default = validate_default
result.ignore_unrecognized_tag = ignore_unrecognized_tag
result.skip_tests = [] if skip_tests is None else skip_tests
result.xfail_tests = [] if xfail_tests is None else xfail_tests
return result
def _set_markers(self, item):
if item.name in self.skip_tests or "*" in self.skip_tests:
item.add_marker(pytest.mark.skip)
if item.name in self.xfail_tests or "*" in self.xfail_tests:
item.add_marker(pytest.mark.xfail)
def collect(self):
item = AsdfSchemaItem.from_parent(self, self.fspath, validate_default=self.validate_default, name="test_schema")
self._set_markers(item)
yield item
if not self.skip_examples:
for index, example in enumerate(self.find_examples_in_schema()):
name = f"test_example_{index}"
item = AsdfSchemaExampleItem.from_parent(
self,
self.fspath,
example,
index,
ignore_unrecognized_tag=self.ignore_unrecognized_tag,
name=name,
)
self._set_markers(item)
yield item
def find_examples_in_schema(self):
"""Returns generator for all examples in schema at given path"""
from asdf import treeutil
with open(str(self.fspath), "rb") as fd:
schema_tree = yaml.safe_load(fd)
for node in treeutil.iter_tree(schema_tree):
if isinstance(node, dict) and "examples" in node and isinstance(node["examples"], list):
yield from node["examples"]
class AsdfSchemaItem(pytest.Item):
@classmethod
def from_parent(cls, parent, schema_path, validate_default=True, **kwargs):
if hasattr(super(), "from_parent"):
result = super().from_parent(parent, **kwargs)
else:
name = kwargs.pop("name")
result = AsdfSchemaItem(name, parent, **kwargs)
result.schema_path = schema_path
result.validate_default = validate_default
return result
def runtest(self):
from asdf import schema
# warn inside test, we don't do this yet to allow time for downstream packages to adopt pytest-asdf-plugin
warnings.warn("pytest_asdf is deprecated, install pytest_asdf_plugin instead", DeprecationWarning)
# Make sure that each schema itself is valid.
schema_tree = schema.load_schema(
self.schema_path,
resolve_references=True,
)
schema.check_schema(schema_tree, validate_default=self.validate_default)
def reportinfo(self):
return self.fspath, 0, ""
@dataclass
class SchemaExample:
description: str
example: str
_version: str = None
other: any = None
@classmethod
def from_schema(cls, example: list):
if len(example) == 1:
_description = ""
_example = example[0]
elif len(example) == 2:
_description = example[0]
_example = example[1]
_version = None
_other = None
elif len(example) > 2:
_description = example[0]
_example = example[2]
_version = example[1]
_other = example[3:] if len(example) > 3 else None
else:
msg = "Invalid example"
raise RuntimeError(msg)
return cls(_description, _example, _version, _other)
@property
def version(self):
from asdf import versioning
if self._version is None:
return versioning.default_version
version = self._version.lower().split("asdf-standard-")[1]
return versioning.AsdfVersion(version)
class AsdfSchemaExampleItem(pytest.Item):
@classmethod
def from_parent(
cls,
parent,
schema_path,
example,
example_index,
ignore_unrecognized_tag=False,
**kwargs,
):
if hasattr(super(), "from_parent"):
result = super().from_parent(parent, **kwargs)
else:
name = kwargs.pop("name")
result = AsdfSchemaExampleItem(name, parent, **kwargs)
result.filename = str(schema_path)
result.example = SchemaExample.from_schema(example)
result.ignore_unrecognized_tag = ignore_unrecognized_tag
return result
def runtest(self):
import asdf
from asdf.testing.helpers import yaml_to_asdf
# warn inside test, we don't do this yet to allow time for downstream packages to adopt pytest-asdf-plugin
warnings.warn("pytest_asdf is deprecated, install pytest_asdf_plugin instead", DeprecationWarning)
# check the example is valid
buff = yaml_to_asdf("example: " + self.example.example.strip(), version=self.example.version)
tagged_tree = asdf.util.load_yaml(buff, tagged=True)
instance = asdf.AsdfFile(version=self.example.version)
asdf.schema.validate(tagged_tree, instance, reading=True)
def reportinfo(self):
return self.fspath, 0, ""
def _parse_test_list(content):
result = {}
if isinstance(content, str):
content = content.split("\n")
for line in content:
line_ = line.strip()
if len(line_) > 0:
parts = line_.split("::", 1)
path_suffix = pathlib.Path(parts[0]).as_posix()
name = "*" if len(parts) == 1 else parts[-1]
if path_suffix not in result:
result[path_suffix] = []
result[path_suffix].append(name)
return result
def pytest_collect_file(file_path, parent):
if HAS_NEW_PLUGIN:
return None
if not (parent.config.getini("asdf_schema_tests_enabled") or parent.config.getoption("asdf_tests")):
return None
schema_roots = parent.config.getini("asdf_schema_root").split()
if not schema_roots:
return None
skip_names = parent.config.getini("asdf_schema_skip_names")
skip_examples = parent.config.getini("asdf_schema_skip_examples")
validate_default = parent.config.getini("asdf_schema_validate_default")
ignore_unrecognized_tag = parent.config.getini("asdf_schema_ignore_unrecognized_tag")
skip_tests = _parse_test_list(parent.config.getini("asdf_schema_skip_tests"))
xfail_tests = _parse_test_list(parent.config.getini("asdf_schema_xfail_tests"))
schema_roots = [os.path.join(str(parent.config.rootpath), os.path.normpath(root)) for root in schema_roots]
if file_path.suffix != ".yaml":
return None
for root in schema_roots:
if str(file_path).startswith(root) and file_path.stem not in skip_names:
posix_path = pathlib.Path(file_path).as_posix()
schema_skip_tests = []
for suffix, names in skip_tests.items():
if posix_path.endswith(suffix):
schema_skip_tests.extend(names)
schema_xfail_tests = []
for suffix, names in xfail_tests.items():
if posix_path.endswith(suffix):
schema_xfail_tests.extend(names)
return AsdfSchemaFile.from_parent(
parent,
fspath=file_path,
skip_examples=(file_path.stem in skip_examples),
validate_default=validate_default,
ignore_unrecognized_tag=ignore_unrecognized_tag,
skip_tests=schema_skip_tests,
xfail_tests=schema_xfail_tests,
)
return None