Latest Threat Research:SANDWORM_MODE: Shai-Hulud-Style npm Worm Hijacks CI Workflows and Poisons AI Toolchains.Details
Socket
Book a DemoInstallSign in
Socket

asdf

Package Overview
Dependencies
Maintainers
8
Versions
74
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

asdf - npm Package Compare versions

Comparing version
5.0.0
to
5.1.0
+133
asdf/_tests/_regtests/test_1948.py
"""
Test that warn_on_failed_conversion works as expected for:
- lazy and non-lazy trees
- nested objects
- generator producing (yielding) converters
Broken out into a regtest since there is significant
test setup.
https://github.com/asdf-format/asdf/issues/1948
"""
import pytest
import asdf
test_dict_tag_uri = "asdf://somewhere.org/tags/test_dict-1.0.0"
failing_dict_tag_uri = "asdf://somewhere.org/tags/failing_dict-1.0.0"
failing_yield_dict_tag_uri = "asdf://somewhere.org/tags/failing_yield_dict-1.0.0"
class MyDict:
def __init__(self, data):
self.data = data
class FailingDict(MyDict):
pass
class FailingYieldDict(MyDict):
pass
class MyDictConverter:
tags = [test_dict_tag_uri]
types = [MyDict]
lazy = True
def to_yaml_tree(self, obj, tag, ctx):
return obj.data
def from_yaml_tree(self, node, tag, ctx):
return MyDict(node)
class FailingDictConverter:
tags = [failing_dict_tag_uri]
types = [FailingDict]
lazy = True
def to_yaml_tree(self, obj, tag, ctx):
return obj.data
def from_yaml_tree(self, node, tag, ctx):
# always make reading a failing_dict tag fail
raise Exception("FailingDict failed")
class FailingYieldDictConverter:
tags = [failing_yield_dict_tag_uri]
types = [FailingYieldDict]
lazy = True
def to_yaml_tree(self, obj, tag, ctx):
return obj.data
def from_yaml_tree(self, node, tag, ctx):
raise Exception("FailingYieldDict failed")
yield {}
class TestExtension:
extension_uri = "asdf://somewhere.org/extensions/minimum-1.0.0"
converters = [MyDictConverter(), FailingDictConverter(), FailingYieldDictConverter()]
tags = [test_dict_tag_uri, failing_dict_tag_uri, failing_yield_dict_tag_uri]
@pytest.fixture()
def enable_test_extension():
with asdf.config_context() as cfg:
cfg.add_extension(TestExtension())
yield
def test_failed_conversion_warns(enable_test_extension):
tree = {
"test_dict": MyDict({"failing_dict": FailingDict({"a": 1})}),
"failing_yield_dict": FailingYieldDict({"b": 2}),
}
asdf_str = asdf.dumps(tree)
with asdf.config_context() as cfg:
cfg.warn_on_failed_conversion = True
with (
pytest.warns(asdf.exceptions.AsdfConversionWarning, match="FailingDict failed"),
pytest.warns(asdf.exceptions.AsdfConversionWarning, match="FailingYieldDict failed"),
):
read_obj = asdf.loads(asdf_str)
test_dict = read_obj["test_dict"]
assert isinstance(test_dict, MyDict)
failing_dict = read_obj["test_dict"].data["failing_dict"]
assert isinstance(failing_dict, asdf.tagged.TaggedDict)
assert failing_dict._tag == failing_dict_tag_uri
assert failing_dict["a"] == 1
failing_yield_dict = read_obj["failing_yield_dict"]
assert isinstance(failing_yield_dict, asdf.tagged.TaggedDict)
assert failing_yield_dict._tag == failing_yield_dict_tag_uri
assert failing_yield_dict["b"] == 2
def test_lazy_failed_conversion_warns(enable_test_extension, tmp_path):
test_path = tmp_path / "test.asdf"
tree = {
"test_dict": MyDict({"failing_dict": FailingDict({"a": 1})}),
"failing_yield_dict": FailingYieldDict({"b": 2}),
}
asdf.dump(tree, test_path)
with asdf.config_context() as cfg:
cfg.warn_on_failed_conversion = True
cfg.lazy_tree = True
with asdf.open(test_path) as af:
test_dict = af["test_dict"]
assert isinstance(test_dict, MyDict)
with pytest.warns(asdf.exceptions.AsdfConversionWarning, match="FailingDict failed"):
failing_dict = af["test_dict"].data["failing_dict"]
with pytest.warns(asdf.exceptions.AsdfConversionWarning, match="FailingYieldDict failed"):
failing_yield_dict = af["failing_yield_dict"]
assert isinstance(failing_dict, asdf.tagged.TaggedDict)
assert failing_dict._tag == failing_dict_tag_uri
assert failing_dict["a"] == 1
assert isinstance(failing_yield_dict, asdf.tagged.TaggedDict)
assert failing_yield_dict._tag == failing_yield_dict_tag_uri
assert failing_yield_dict["b"] == 2
+2
-2
Metadata-Version: 2.4
Name: asdf
Version: 5.0.0
Version: 5.1.0
Summary: Python implementation of the ASDF Standard
Author-email: The ASDF Developers <help@stsci.edu>
Author: The ASDF Developers
Project-URL: documentation, https://asdf.readthedocs.io/en/stable/

@@ -7,0 +7,0 @@ Project-URL: repository, https://github.com/asdf-format/asdf

@@ -772,2 +772,3 @@ CHANGES.rst

asdf/_tests/_regtests/test_1738.py
asdf/_tests/_regtests/test_1948.py
asdf/_tests/commands/__init__.py

@@ -774,0 +775,0 @@ asdf/_tests/commands/test_defragment.py

@@ -240,3 +240,7 @@ import copy

"""
if "history" not in tree or not isinstance(tree["history"], dict) or "extensions" not in tree["history"]:
if (
"history" not in tree
or not isinstance(tree["history"], (dict, lazy_nodes.AsdfDictNode))
or "extensions" not in tree["history"]
):
return

@@ -243,0 +247,0 @@

@@ -68,4 +68,4 @@ """

path = util._patched_urllib_parse.urlparse(uri).path
dirname, filename = os.path.split(path)
_, filename = os.path.split(path)
filename = os.path.splitext(filename)[0] + f"{index:04d}.asdf"
return filename

@@ -70,3 +70,3 @@ """

if output is None:
base, ext = os.path.splitext(input_)
base, _ = os.path.splitext(input_)
output = base + "_all" + ".asdf"

@@ -125,5 +125,5 @@ with asdf.open(input_) as ff:

if output is None:
base, ext = os.path.splitext(input_)
base, _ = os.path.splitext(input_)
output = base + "_exploded" + ".asdf"
with asdf.open(input_) as ff:
ff.write_to(output, all_array_storage="external")

@@ -53,3 +53,3 @@ import argparse

def main_from_args(args):
parser, subparsers = make_argparser()
parser, _ = make_argparser()

@@ -56,0 +56,0 @@ args = parser.parse_args(args)

@@ -10,2 +10,3 @@ import numpy as np

"tag:stsci.edu:asdf/core/integer-1.1.0",
"tag:stsci.edu:asdf/core/integer-1.2.0",
]

@@ -12,0 +13,0 @@ types = ["asdf.tags.core.integer.IntegerType"]

@@ -10,2 +10,3 @@ import numpy as np

"tag:stsci.edu:asdf/core/ndarray-1.1.0",
"tag:stsci.edu:asdf/core/ndarray-1.2.0",
]

@@ -12,0 +13,0 @@ types = [

from asdf.extension import ManifestExtension
from asdf.versioning import get_supported_core_schema_versions

@@ -41,12 +42,5 @@ from ._converters.complex import ComplexConverter

MANIFEST_URIS = [
"asdf://asdf-format.org/core/manifests/core-1.0.0",
"asdf://asdf-format.org/core/manifests/core-1.1.0",
"asdf://asdf-format.org/core/manifests/core-1.2.0",
"asdf://asdf-format.org/core/manifests/core-1.3.0",
"asdf://asdf-format.org/core/manifests/core-1.4.0",
"asdf://asdf-format.org/core/manifests/core-1.5.0",
"asdf://asdf-format.org/core/manifests/core-1.6.0",
f"asdf://asdf-format.org/core/manifests/core-{version}" for version in get_supported_core_schema_versions()
]
EXTENSIONS = [

@@ -53,0 +47,0 @@ ManifestExtension.from_uri(

@@ -364,3 +364,3 @@ import re

# but instead using a different node for traversal.
t_node, traversable, from_converter = _make_traversable(node, extension_manager)
t_node, traversable, _ = _make_traversable(node, extension_manager)
if (is_container(node) or traversable) and id(node) in seen:

@@ -367,0 +367,0 @@ info = NodeSchemaInfo(

@@ -172,3 +172,3 @@ import io

fd = generic_io.get_file(raw_fd, mode="rw")
with pytest.raises(RuntimeError, match="Block used size.*"):
with pytest.raises(RuntimeError, match=r"Block used size.*"):
bio.write_block(fd, data, allocated_size=0)

@@ -194,3 +194,3 @@ assert fd.tell() == 0

with pytest.raises(ValueError, match="write_block received offset.*"):
with pytest.raises(ValueError, match=r"write_block received offset.*"):
bio.write_block(fd, data, offset=0)

@@ -228,3 +228,3 @@

_, _, _, callback = bio.read_block(fd, offset=0, lazy_load=True)
with pytest.raises(OSError, match="ASDF file has already been closed. Can not get the data."):
with pytest.raises(OSError, match=r"ASDF file has already been closed\. Can not get the data\."):
callback()

@@ -236,3 +236,3 @@

fd = generic_io.get_file(io.BytesIO(), mode="rw")
with pytest.raises(ValueError, match="Data must be of.*"):
with pytest.raises(ValueError, match=r"Data must be of.*"):
bio.write_block(fd, data, stream=True)

@@ -344,3 +344,3 @@

fd.seek(len(constants.INDEX_HEADER))
with pytest.raises(BlockIndexError, match="Failed to read block index.*"):
with pytest.raises(BlockIndexError, match=r"Failed to read block index.*"):
assert bio.read_block_index(fd) == values

@@ -347,0 +347,0 @@

@@ -71,3 +71,3 @@ import copy

def test_invalid_storage_type_init(invalid_storage):
with pytest.raises(ValueError, match="array_storage must be one of.*"):
with pytest.raises(ValueError, match=r"array_storage must be one of.*"):
Options(invalid_storage)

@@ -79,3 +79,3 @@

o = Options("internal")
with pytest.raises(ValueError, match="array_storage must be one of.*"):
with pytest.raises(ValueError, match=r"array_storage must be one of.*"):
o.storage_type = invalid_storage

@@ -87,3 +87,3 @@

o = Options("internal")
with pytest.raises(ValueError, match="Invalid compression.*"):
with pytest.raises(ValueError, match=r"Invalid compression.*"):
o.compression = invalid_compression

@@ -94,3 +94,3 @@

def test_invalid_compression_init(invalid_compression):
with pytest.raises(ValueError, match="Invalid compression.*"):
with pytest.raises(ValueError, match=r"Invalid compression.*"):
Options("internal", invalid_compression)

@@ -97,0 +97,0 @@

@@ -143,3 +143,3 @@ import contextlib

else:
with pytest.raises(ValueError, match="Header size.*"):
with pytest.raises(ValueError, match=r"Header size.*"):
check(read_blocks(fd, lazy_load=True))

@@ -172,3 +172,3 @@

fn = tmp_path / "test.bin"
with gen_blocks(fn=fn, with_index=True) as (fd, check):
with gen_blocks(fn=fn, with_index=True) as (fd, _):
blocks = read_blocks(fd, lazy_load=True)

@@ -195,5 +195,5 @@ blk = blocks[1]

if validate_checksums:
with pytest.raises(ValueError, match=".* does not match given checksum"):
with pytest.raises(ValueError, match=r".* does not match given checksum"):
read_blocks(fd, lazy_load=False, validate_checksums=validate_checksums)[0].data
else:
read_blocks(fd, lazy_load=False, validate_checksums=validate_checksums)[0].data

@@ -20,4 +20,4 @@ import io

f.close()
with pytest.raises(IOError, match="I/O operation on closed file."):
with pytest.raises(IOError, match=r"I/O operation on closed file\."):
f.read_into_array(10)
assert b.tell() == 0

@@ -99,2 +99,2 @@ import importlib.resources

cfg.lazy_tree = request.param
yield
yield cfg.lazy_tree

@@ -20,2 +20,10 @@ import contextlib

@pytest.fixture
def ndarray_tag():
af = asdf.AsdfFile()
cvt = af.extension_manager.get_converter_for_type(np.ndarray)
full_tag = cvt.select_tag(np.zeros(0), af)
return full_tag.removeprefix("tag:stsci.edu:asdf/")
# These custom types and the custom extension are here purely for the purpose

@@ -160,3 +168,3 @@ # of testing NDArray objects and making sure that they can be validated as part

def test_byteorder(tmp_path):
def test_byteorder():
tree = {

@@ -212,3 +220,3 @@ "bigendian": np.arange(0, 10, dtype=">f8"),

def test_table_inline(tmp_path):
def test_table_inline():
table = np.array(

@@ -290,5 +298,5 @@ [(0, 1, (2, 3)), (4, 5, (6, 7))],

def test_copy_inline():
yaml = """
x0: !core/ndarray-1.1.0
def test_copy_inline(ndarray_tag):
yaml = f"""
x0: !{ndarray_tag}
data: [-1.0, 1.0]

@@ -305,3 +313,3 @@ """

def test_table(tmp_path):
def test_table():
table = np.array([(0, 1, (2, 3)), (4, 5, (6, 7))], dtype=[("MINE", np.int8), ("", "<f8"), ("arr", ">i4", (2,))])

@@ -329,3 +337,3 @@

def test_table_nested_fields(tmp_path):
def test_table_nested_fields():
table = np.array(

@@ -383,4 +391,4 @@ [(0, (1, 2)), (4, (5, 6)), (7, (8, 9))],

def test_inline_bare():
content = "arr: !core/ndarray-1.1.0 [[1, 2, 3, 4], [5, 6, 7, 8]]"
def test_inline_bare(ndarray_tag):
content = f"arr: !{ndarray_tag} [[1, 2, 3, 4], [5, 6, 7, 8]]"
buff = helpers.yaml_to_asdf(content)

@@ -400,3 +408,3 @@

)
def test_mask_roundtrip(mask, tmp_path):
def test_mask_roundtrip(mask):
array = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 0]])

@@ -417,3 +425,3 @@ tree = {

def test_len_roundtrip(tmp_path):
def test_len_roundtrip():
sequence = np.arange(0, 10, dtype=int)

@@ -427,5 +435,5 @@ tree = {"sequence": sequence}

def test_mask_arbitrary():
content = """
arr: !core/ndarray-1.1.0
def test_mask_arbitrary(ndarray_tag):
content = f"""
arr: !{ndarray_tag}
data: [[1, 2, 3, 1234], [5, 6, 7, 8]]

@@ -440,5 +448,5 @@ mask: 1234

def test_mask_nan():
content = """
arr: !core/ndarray-1.1.0
def test_mask_nan(ndarray_tag):
content = f"""
arr: !{ndarray_tag}
data: [[1, 2, 3, .NaN], [5, 6, 7, 8]]

@@ -453,3 +461,3 @@ mask: .NaN

def test_string(tmp_path):
def test_string():
tree = {

@@ -465,3 +473,3 @@ "ascii": np.array([b"foo", b"bar", b"baz"]),

def test_string_table(tmp_path):
def test_string_table():
tree = {"table": np.array([(b"foo", "სამეცნიერო", "42", "53.0")])}

@@ -474,4 +482,4 @@

def test_inline_string():
content = "arr: !core/ndarray-1.1.0 ['a', 'b', 'c']"
def test_inline_string(ndarray_tag):
content = f"arr: !{ndarray_tag} ['a', 'b', 'c']"
buff = helpers.yaml_to_asdf(content)

@@ -483,5 +491,5 @@

def test_inline_structured():
content = """
arr: !core/ndarray-1.1.0
def test_inline_structured(ndarray_tag):
content = f"""
arr: !{ndarray_tag}
datatype: [['ascii', 4], uint16, uint16, ['ascii', 4]]

@@ -526,3 +534,3 @@ data: [[M110, 110, 205, And],

def test_unicode_to_list(tmp_path):
def test_unicode_to_list():
arr = np.array(["", "𐀠"], dtype="<U")

@@ -643,8 +651,8 @@ tree = {"unicode": arr}

def test_mask_datatype(tmp_path):
content = """
arr: !core/ndarray-1.1.0
def test_mask_datatype(ndarray_tag):
content = f"""
arr: !{ndarray_tag}
data: [1, 2, 3]
dtype: int32
mask: !core/ndarray-1.1.0
mask: !{ndarray_tag}
data: [true, true, false]

@@ -658,8 +666,8 @@ """

def test_invalid_mask_datatype(tmp_path):
content = """
arr: !core/ndarray-1.1.0
def test_invalid_mask_datatype(ndarray_tag):
content = f"""
arr: !{ndarray_tag}
data: [1, 2, 3]
dtype: int32
mask: !core/ndarray-1.1.0
mask: !{ndarray_tag}
data: ['a', 'b', 'c']

@@ -679,6 +687,6 @@ """

@with_custom_extension()
def test_ndim_validation(tmp_path):
content = """
def test_ndim_validation(ndarray_tag):
content = f"""
obj: !<tag:nowhere.org:custom/ndim-1.0.0>
a: !core/ndarray-1.1.0
a: !{ndarray_tag}
data: [1, 2, 3]

@@ -696,5 +704,5 @@ """

content = """
content = f"""
obj: !<tag:nowhere.org:custom/ndim-1.0.0>
a: !core/ndarray-1.1.0
a: !{ndarray_tag}
data: [[1, 2, 3]]

@@ -707,5 +715,5 @@ """

content = """
content = f"""
obj: !<tag:nowhere.org:custom/ndim-1.0.0>
a: !core/ndarray-1.1.0
a: !{ndarray_tag}
shape: [1, 3]

@@ -719,5 +727,5 @@ data: [[1, 2, 3]]

content = """
content = f"""
obj: !<tag:nowhere.org:custom/ndim-1.0.0>
b: !core/ndarray-1.1.0
b: !{ndarray_tag}
data: [1, 2, 3]

@@ -730,5 +738,5 @@ """

content = """
content = f"""
obj: !<tag:nowhere.org:custom/ndim-1.0.0>
b: !core/ndarray-1.1.0
b: !{ndarray_tag}
data: [[1, 2, 3]]

@@ -741,5 +749,5 @@ """

content = """
content = f"""
obj: !<tag:nowhere.org:custom/ndim-1.0.0>
b: !core/ndarray-1.1.0
b: !{ndarray_tag}
data: [[[1, 2, 3]]]

@@ -759,6 +767,6 @@ """

@with_custom_extension()
def test_datatype_validation(tmp_path):
content = """
def test_datatype_validation(ndarray_tag):
content = f"""
obj: !<tag:nowhere.org:custom/datatype-1.0.0>
a: !core/ndarray-1.1.0
a: !{ndarray_tag}
data: [1, 2, 3]

@@ -772,5 +780,5 @@ datatype: float32

content = """
content = f"""
obj: !<tag:nowhere.org:custom/datatype-1.0.0>
a: !core/ndarray-1.1.0
a: !{ndarray_tag}
data: [1, 2, 3]

@@ -789,5 +797,5 @@ datatype: float64

content = """
content = f"""
obj: !<tag:nowhere.org:custom/datatype-1.0.0>
a: !core/ndarray-1.1.0
a: !{ndarray_tag}
data: [1, 2, 3]

@@ -801,5 +809,5 @@ datatype: int16

content = """
content = f"""
obj: !<tag:nowhere.org:custom/datatype-1.0.0>
b: !core/ndarray-1.1.0
b: !{ndarray_tag}
data: [1, 2, 3]

@@ -818,5 +826,5 @@ datatype: int16

content = """
content = f"""
obj: !<tag:nowhere.org:custom/datatype-1.0.0>
a: !core/ndarray-1.1.0
a: !{ndarray_tag}
data: [[1, 'a'], [2, 'b'], [3, 'c']]

@@ -850,6 +858,6 @@ datatype:

@with_custom_extension()
def test_structured_datatype_validation(tmp_path):
content = """
def test_structured_datatype_validation(ndarray_tag):
content = f"""
obj: !<tag:nowhere.org:custom/datatype-1.0.0>
c: !core/ndarray-1.1.0
c: !{ndarray_tag}
data: [[1, 'a'], [2, 'b'], [3, 'c']]

@@ -868,5 +876,5 @@ datatype:

content = """
content = f"""
obj: !<tag:nowhere.org:custom/datatype-1.0.0>
c: !core/ndarray-1.1.0
c: !{ndarray_tag}
data: [[1, 'a'], [2, 'b'], [3, 'c']]

@@ -890,5 +898,5 @@ datatype:

content = """
content = f"""
obj: !<tag:nowhere.org:custom/datatype-1.0.0>
c: !core/ndarray-1.1.0
c: !{ndarray_tag}
data: [[1, 'a', 0], [2, 'b', 1], [3, 'c', 2]]

@@ -913,5 +921,5 @@ datatype:

content = """
content = f"""
obj: !<tag:nowhere.org:custom/datatype-1.0.0>
c: !core/ndarray-1.1.0
c: !{ndarray_tag}
data: [1, 2, 3]

@@ -929,5 +937,5 @@ """

content = """
content = f"""
obj: !<tag:nowhere.org:custom/datatype-1.0.0>
d: !core/ndarray-1.1.0
d: !{ndarray_tag}
data: [[1, 'a'], [2, 'b'], [3, 'c']]

@@ -950,5 +958,5 @@ datatype:

content = """
content = f"""
obj: !<tag:nowhere.org:custom/datatype-1.0.0>
d: !core/ndarray-1.1.0
d: !{ndarray_tag}
data: [[1, 'a'], [2, 'b'], [3, 'c']]

@@ -975,5 +983,5 @@ datatype:

def test_inline_shape_mismatch():
content = """
arr: !core/ndarray-1.1.0
def test_inline_shape_mismatch(ndarray_tag):
content = f"""
arr: !{ndarray_tag}
data: [1, 2, 3]

@@ -989,3 +997,3 @@ shape: [2]

def test_broadcasted_array(tmp_path):
def test_broadcasted_array():
attrs = np.broadcast_arrays(np.array([10, 20]), np.array(10), np.array(10))

@@ -997,3 +1005,3 @@ tree = {"one": attrs[1]} # , 'two': attrs[1], 'three': attrs[2]}

def test_broadcasted_offset_array(tmp_path):
def test_broadcasted_offset_array():
base = np.arange(10)

@@ -1007,3 +1015,3 @@ offset = base[5:]

def test_non_contiguous_base_array(tmp_path):
def test_non_contiguous_base_array():
base = np.arange(60).reshape(5, 4, 3).transpose(2, 0, 1) * 1

@@ -1016,3 +1024,3 @@ contiguous = base.transpose(1, 2, 0)

def test_fortran_order(tmp_path):
def test_fortran_order():
array = np.array([[11, 12, 13], [21, 22, 23]], order="F", dtype=np.int64)

@@ -1019,0 +1027,0 @@ tree = {"data": array}

@@ -0,1 +1,2 @@

import contextlib
import copy

@@ -283,2 +284,3 @@ import getpass

@pytest.mark.parametrize("strict", [True, False])
@pytest.mark.parametrize(

@@ -295,3 +297,3 @@ ("installed", "extension", "warns"),

)
def test_extension_version_check(installed, extension, warns):
def test_extension_version_check(installed, extension, warns, strict, with_lazy_tree, tmp_path):
class FooExtension:

@@ -302,29 +304,33 @@ extension_uri = "asdf://somewhere.org/extensions/foo-1.0.0"

with config_context() as config:
if installed is not None:
config.add_extension(proxy)
af = asdf.AsdfFile()
test_filename = tmp_path / "test.asdf"
tree = {
"history": {
"extensions": [
asdf.tags.core.ExtensionMetadata(
extension_uri=FooExtension.extension_uri,
software=asdf.tags.core.Software(name="foo", version=extension),
),
],
asdf.dump(
{
"history": {
"extensions": [
asdf.tags.core.ExtensionMetadata(
extension_class="something",
extension_uri=FooExtension.extension_uri,
software=asdf.tags.core.Software(name="foo", version=extension),
),
],
},
},
}
test_filename,
)
if warns:
with pytest.warns(AsdfPackageVersionWarning, match=r"File was created with"):
af._check_extensions(tree)
if strict:
ctx = pytest.raises(RuntimeError, match=r"was created with extension")
else:
ctx = pytest.warns(AsdfPackageVersionWarning, match=r"was created with extension")
else:
ctx = contextlib.nullcontext()
with pytest.raises(RuntimeError, match=r"^File was created with"):
af._check_extensions(tree, strict=True)
with config_context() as config:
if installed is not None:
config.add_extension(proxy)
with ctx, asdf.open(test_filename, strict_extension_check=strict):
pass
else:
af._check_extensions(tree)
@pytest.mark.parametrize(

@@ -331,0 +337,0 @@ ("installed", "extension", "warns"),

@@ -1040,3 +1040,3 @@ import collections

# as the module hasn't been loaded, the converter shouldn't be found
with pytest.raises(KeyError, match="No support available for Python type 'mailbox.Mailbox'"):
with pytest.raises(KeyError, match=r"No support available for Python type 'mailbox\.Mailbox'"):
extension_manager.get_converter_for_type(typ)

@@ -1043,0 +1043,0 @@

@@ -105,3 +105,3 @@ import datetime

ff = asdf.AsdfFile()
ff = asdf.AsdfFile(version="1.6.0")
ff.write_to(file_path)

@@ -108,0 +108,0 @@

@@ -740,3 +740,3 @@ import contextlib

with tag_reference_extension():
buff = yaml_to_asdf(yaml)
buff = yaml_to_asdf(yaml, version="1.6.0")
with asdf.open(buff) as ff:

@@ -802,3 +802,3 @@ custom = ff.tree["custom"]

buff = yaml_to_asdf(yaml)
buff = yaml_to_asdf(yaml, version="1.6.0")
with asdf.open(buff) as ff:

@@ -805,0 +805,0 @@ a = ff.tree["custom"].a

@@ -14,14 +14,15 @@ import pytest

yaml = """
undefined_tag = "tag:nowhere.org:custom/also_undefined-1.3.0"
yaml = f"""
undefined_data:
!<tag:nowhere.org:custom/undefined_tag-1.0.0>
- 5
- {'message': 'there is no tag'}
- 'message': 'there is no tag'
- !core/ndarray-1.1.0
[[1, 2, 3], [4, 5, 6]]
- !<tag:nowhere.org:custom/also_undefined-1.3.0>
- !<{undefined_tag}>
- !core/ndarray-1.1.0 [[7],[8],[9],[10]]
- !core/complex-1.0.0 3.14j
"""
buff = yaml_to_asdf(yaml)
buff = yaml_to_asdf(yaml, version="1.6.0")
with pytest.warns(Warning) as warning:

@@ -34,2 +35,6 @@ afile = asdf.open(buff)

assert missing[1] == {"message": "there is no tag"}
assert isinstance(missing[3], asdf.tagged.TaggedList)
assert missing[3]._tag == undefined_tag
if with_lazy_tree:
assert isinstance(missing[3].data, asdf.lazy_nodes.AsdfListNode)
assert (missing[2] == array([[1, 2, 3], [4, 5, 6]])).all()

@@ -36,0 +41,0 @@ assert (missing[3][0] == array([[7], [8], [9], [10]])).all()

@@ -5,3 +5,2 @@ from itertools import combinations

AsdfVersion,
asdf_standard_development_version,
default_version,

@@ -16,6 +15,2 @@ supported_versions,

def test_development_is_not_default():
assert default_version != asdf_standard_development_version
def test_version_constructor():

@@ -22,0 +17,0 @@ ver0 = AsdfVersion("1.0.0")

@@ -31,5 +31,5 @@ # file generated by setuptools-scm

__version__ = version = '5.0.0'
__version_tuple__ = version_tuple = (5, 0, 0)
__version__ = version = '5.1.0'
__version_tuple__ = version_tuple = (5, 1, 0)
__commit_id__ = commit_id = 'g10f536301'
__commit_id__ = commit_id = 'g740c807b2'

@@ -29,2 +29,3 @@ """

DEFAULT_LAZY_TREE = False
DEFAULT_WARN_ON_FAILED_CONVERSION = False

@@ -53,2 +54,3 @@

self._lazy_tree = DEFAULT_LAZY_TREE
self._warn_on_failed_conversion = DEFAULT_WARN_ON_FAILED_CONVERSION

@@ -457,2 +459,21 @@ self._lock = threading.RLock()

@property
def warn_on_failed_conversion(self):
"""
Get configuration that controls if errors during
conversion are converted to warnings.
Enabling this can be helpful when opening old
files that contain tags that are no longer supported.
Returns
-------
bool
"""
return self._warn_on_failed_conversion
@warn_on_failed_conversion.setter
def warn_on_failed_conversion(self, value):
self._warn_on_failed_conversion = value
def __repr__(self):

@@ -471,2 +492,3 @@ return (

f" lazy_tree: {self.lazy_tree}\n"
f" warn_on_failed_conversion: {self.warn_on_failed_conversion}\n"
">"

@@ -473,0 +495,0 @@ )

@@ -12,2 +12,3 @@ """

from . import tagged, treeutil, yamlutil
from .config import get_config
from .exceptions import AsdfConversionWarning, AsdfLazyReferenceError

@@ -113,3 +114,7 @@ from .extension._serialization_context import BlockAccess

"""
if isinstance(node, list):
if isinstance(node, tagged.TaggedList):
return tagged.TaggedList(data=_to_lazy_node(node.data, af_ref), tag=node._tag)
elif isinstance(node, tagged.TaggedDict):
return tagged.TaggedDict(data=_to_lazy_node(node.data, af_ref), tag=node._tag)
elif isinstance(node, list):
return AsdfListNode(node, af_ref)

@@ -184,2 +189,4 @@ elif isinstance(node, collections.OrderedDict):

return value
if isinstance(value, (tagged.TaggedDict, tagged.TaggedList)) and isinstance(value.data, _AsdfNode):
return value
if not isinstance(value, tagged.Tagged) and type(value) not in _base_type_to_node_map:

@@ -213,3 +220,10 @@ return value

sctx = af._create_serialization_context(BlockAccess.READ)
obj = converter.from_yaml_tree(data, tag, sctx)
try:
obj = converter.from_yaml_tree(data, tag, sctx)
except Exception as err:
if get_config().warn_on_failed_conversion:
warnings.warn(f"A node failed to convert with: {err}", AsdfConversionWarning)
obj = _to_lazy_node(value, self._af_ref)
else:
raise
sctx.assign_object(obj)

@@ -216,0 +230,0 @@ sctx.assign_blocks()

@@ -11,2 +11,10 @@ """

try:
from asdf_standard._versioning import get_supported_core_schema_versions
except ImportError:
def get_supported_core_schema_versions():
return ("1.0.0", "1.1.0", "1.2.0", "1.3.0", "1.4.0", "1.5.0", "1.6.0")
_yaml_base_loader = yaml.CSafeLoader if getattr(yaml, "__with_libyaml__", None) else yaml.SafeLoader

@@ -87,20 +95,7 @@

supported_versions = [
AsdfVersion("1.0.0"),
AsdfVersion("1.1.0"),
AsdfVersion("1.2.0"),
AsdfVersion("1.3.0"),
AsdfVersion("1.4.0"),
AsdfVersion("1.5.0"),
AsdfVersion("1.6.0"),
]
supported_versions = tuple(AsdfVersion(version) for version in get_supported_core_schema_versions())
default_version = supported_versions[-1]
default_version = AsdfVersion("1.6.0")
# This is the ASDF core schemas version that is currently in development
# it is possible that breaking changes will be made to this version.
asdf_standard_development_version = AsdfVersion("1.7.0")
# This is the ASDF core schemas version at which the format of the history

@@ -107,0 +102,0 @@ # field changed to include extension metadata.

@@ -9,2 +9,3 @@ import warnings

from . import schema, tagged, treeutil, util
from .config import get_config
from .constants import STSCI_SCHEMA_TAG_BASE, YAML_TAG_PREFIX

@@ -326,2 +327,3 @@ from .exceptions import AsdfConversionWarning, AsdfSerializationError

extension_manager = _serialization_context.extension_manager
cfg = get_config()

@@ -338,6 +340,24 @@ def _walker(node):

converter = extension_manager.get_converter_for_tag(tag)
obj = converter.from_yaml_tree(node.data, tag, _serialization_context)
try:
obj = converter.from_yaml_tree(node.data, tag, _serialization_context)
except Exception as err:
if cfg.warn_on_failed_conversion:
warnings.warn(f"A node failed to convert with: {err}", AsdfConversionWarning)
obj = node
else:
raise
_serialization_context.assign_object(obj)
_serialization_context.assign_blocks()
_serialization_context._mark_extension_used(converter.extension)
if isinstance(obj, GeneratorType) and cfg.warn_on_failed_conversion:
# wrap the generator to catch any errors
def wrapped_generator(generator, node):
try:
yield from generator
except Exception as err:
warnings.warn(f"A node failed to convert with: {err}", AsdfConversionWarning)
yield node
return wrapped_generator(obj, node)
return obj

@@ -344,0 +364,0 @@

@@ -0,1 +1,26 @@

5.1.0 (2025-11-06)
==================
Bugfix
------
- Fix but preventing extension checking when opening a file with lazy_tree
enabled. (`#1979 <https://github.com/asdf-format/asdf/pull/1979>`_)
Feature
-------
- Add support for registering unstable/development versions of core extensions
if the ASDF_UNSTABLE_CORE_SCHEMAS environment variable is set.
Writing files with unstable/development extensions is discouraged as schema
changes may make these files unreable in the future. (`#1962
<https://github.com/asdf-format/asdf/pull/1962>`_)
- Add ``warn_on_failed_conversion`` to ``AsdfConfig``.
Enabling this option will convert any exceptions raised during conversion to
warnings.
This can be helpful when opening old files with unsupported tags. (`#1983
<https://github.com/asdf-format/asdf/pull/1983>`_)
5.0.0 (2025-09-10)

@@ -2,0 +27,0 @@ ==================

@@ -36,3 +36,3 @@ .. currentmodule:: asdf.config

>>> import asdf
>>> asdf.get_config()
>>> asdf.get_config() # doctest: +ELLIPSIS
<AsdfConfig

@@ -44,3 +44,3 @@ array_inline_threshold: None

default_array_save_base: True
default_version: 1.6.0
default_version: ...
io_block_size: -1

@@ -50,2 +50,3 @@ legacy_fill_schema_defaults: True

lazy_tree: False
warn_on_failed_conversion: False
>

@@ -61,3 +62,3 @@

>>> import asdf
>>> with asdf.config_context() as config:
>>> with asdf.config_context() as config: # doctest: +ELLIPSIS
... config.validate_on_read = False

@@ -72,3 +73,3 @@ ... asdf.get_config()

default_array_save_base: True
default_version: 1.6.0
default_version: ...
io_block_size: -1

@@ -78,4 +79,5 @@ legacy_fill_schema_defaults: True

lazy_tree: False
warn_on_failed_conversion: False
>
>>> asdf.get_config()
>>> asdf.get_config() # doctest: +ELLIPSIS
<AsdfConfig

@@ -87,3 +89,3 @@ array_inline_threshold: None

default_array_save_base: True
default_version: 1.6.0
default_version: ...
io_block_size: -1

@@ -93,2 +95,3 @@ legacy_fill_schema_defaults: True

lazy_tree: False
warn_on_failed_conversion: False
>

@@ -206,2 +209,16 @@

lazy_tree
---------
Flag to control if the tree is "lazy". See the ``lazy_tree`` argument to
`asdf.open` for more details.
warn_on_failed_conversion
-------------------------
Flag to control if any errors raised during conversion of a tagged object to
a custom object are caught and turned into warnings. It may be helpful to
enable this option when opening old files with tags that are no longer supported
in the current environment.
Additional AsdfConfig features

@@ -208,0 +225,0 @@ ==============================

@@ -31,3 +31,3 @@ import datetime

project = configuration["name"]
author = f"{configuration['authors'][0]['name']} <{configuration['authors'][0]['email']}>"
author = configuration["authors"][0]["name"]
copyright = f"{datetime.datetime.now().year}, {author}"

@@ -34,0 +34,0 @@

Metadata-Version: 2.4
Name: asdf
Version: 5.0.0
Version: 5.1.0
Summary: Python implementation of the ASDF Standard
Author-email: The ASDF Developers <help@stsci.edu>
Author: The ASDF Developers
Project-URL: documentation, https://asdf.readthedocs.io/en/stable/

@@ -7,0 +7,0 @@ Project-URL: repository, https://github.com/asdf-format/asdf

@@ -6,3 +6,3 @@ [project]

license-files = ['LICENSE']
authors = [{ name = 'The ASDF Developers', email = 'help@stsci.edu' }]
authors = [{ name = 'The ASDF Developers' }]
requires-python = '>=3.9'

@@ -174,3 +174,3 @@ classifiers = [

[tool.ruff]
target-version = "py38"
target-version = "py39"
line-length = 120

@@ -191,2 +191,3 @@ extend-exclude = ["asdf/_extern/*", "asdf/_jsonschema/*", "docs/*"]

"S310", # URL open for permitted schemes
"RUF005", # prefer concatenate over add for collections
"RUF012", # mutable-class-default (typing related)

@@ -193,0 +194,0 @@ ]

@@ -21,2 +21,3 @@ [tox]

devdeps: PIP_EXTRA_INDEX_URL = https://pypi.anaconda.org/scientific-python-nightly-wheels/simple
devdeps: ASDF_UNSTABLE_CORE_SCHEMAS = 1
deps =

@@ -47,8 +48,11 @@ compatibility: virtualenv

pip freeze
# print out the default core schemas version
python -c "import asdf; print(f'Core schemas default version: {asdf.get_config().default_version}')"
commands =
# coverage run must be used because the pytest-asdf plugin will interfere
# with proper coverage measurement due to the order pytest loads its
# entry points.
commands =
coverage: coverage run --source=asdf --rcfile={tox_root}/pyproject.toml -m \
pytest \
devdeps: -W "ignore::asdf_standard.exceptions.UnstableCoreSchemasWarning"
compatibility: integration_tests/compatibility/ \

@@ -55,0 +59,0 @@ mocks3: integration_tests/mocks3/ \