Latest Threat Research:SANDWORM_MODE: Shai-Hulud-Style npm Worm Hijacks CI Workflows and Poisons AI Toolchains.Details
Socket
Book a DemoInstallSign in
Socket

asdf

Package Overview
Dependencies
Maintainers
8
Versions
74
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

asdf - npm Package Compare versions

Comparing version
4.0.0
to
4.1.0
+9
docs/_static/css/globalnav.css
/* Top Banner Navigation
-------------------------------------------------- */
.announcement-content a {
padding-right: 1em;
}
.announcement-content a:hover {
color: fuchsia;
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

******************
asdf.config Module
******************
.. currentmodule:: asdf
.. automodapi:: asdf.config
************
asdf Package
************
.. currentmodule:: asdf
.. automodapi:: asdf
:include-all-objects:
:inherited-members:
:no-inheritance-diagram:
:skip: ValidationError
:skip: Stream
******************
asdf.search Module
******************
.. currentmodule:: asdf
.. automodapi:: asdf.search
.. _user_api:
********
User API
********
.. toctree::
:maxdepth: 2
:hidden:
asdf_package.rst
asdf_search.rst
asdf_config.rst
* :doc:`asdf Package <asdf_package>`
* :doc:`asdf.search Module <asdf_search>`
* :doc:`asdf.config Module <asdf_config>`
+4
-5

@@ -32,3 +32,3 @@ name: Downstream

# Any env name which does not start with `pyXY` will use this Python version.
default_python: '3.10'
default_python: '3.12'
envs: |

@@ -46,3 +46,3 @@ - linux: asdf-wcs-schemas

# Any env name which does not start with `pyXY` will use this Python version.
default_python: '3.10'
default_python: '3.12'
envs: |

@@ -58,3 +58,3 @@ - linux: asdf-astropy

# Any env name which does not start with `pyXY` will use this Python version.
default_python: '3.10'
default_python: '3.12'
envs: |

@@ -74,7 +74,6 @@ - linux: astrocut

# Any env name which does not start with `pyXY` will use this Python version.
default_python: '3.10'
default_python: '3.12'
envs: |
- linux: weldx
- linux: sunpy
- linux: dkist
- linux: abacusutils

@@ -1,4 +0,4 @@

Metadata-Version: 2.1
Metadata-Version: 2.2
Name: asdf
Version: 4.0.0
Version: 4.1.0
Summary: Python implementation of the ASDF Standard

@@ -36,3 +36,3 @@ Author-email: The ASDF Developers <help@stsci.edu>

Project-URL: documentation, https://asdf.readthedocs.io/en/stable
Project-URL: documentation, https://asdf.readthedocs.io/en/stable/
Project-URL: repository, https://github.com/asdf-format/asdf

@@ -67,2 +67,3 @@ Project-URL: tracker, https://github.com/asdf-format/asdf/issues

Requires-Dist: tomli; python_version < "3.11" and extra == "docs"
Requires-Dist: furo; extra == "docs"
Provides-Extra: tests

@@ -69,0 +70,0 @@ Requires-Dist: fsspec[http]>=2022.8.2; extra == "tests"

@@ -20,2 +20,3 @@ asdf-standard>=1.1.0

sphinx-inline-tabs
furo

@@ -22,0 +23,0 @@ [docs:python_version < "3.11"]

@@ -856,5 +856,6 @@ CHANGES.rst

docs/rtd_environment.yaml
docs/_static/logo.ico
docs/_static/logo.pdf
docs/_static/logo.png
docs/_static/css/globalnav.css
docs/_static/images/favicon.ico
docs/_static/images/logo-dark-mode.png
docs/_static/images/logo-light-mode.png
docs/asdf/CODE_OF_CONDUCT.rst

@@ -874,3 +875,2 @@ docs/asdf/arrays.rst

docs/asdf/release_and_support.rst
docs/asdf/user_api.rst
docs/asdf/using_extensions.rst

@@ -887,2 +887,6 @@ docs/asdf/whats_new.rst

docs/asdf/extending/validators.rst
docs/asdf/user_api/asdf_config.rst
docs/asdf/user_api/asdf_package.rst
docs/asdf/user_api/asdf_search.rst
docs/asdf/user_api/index.rst
licenses/JSONSCHEMA_LICENSE

@@ -889,0 +893,0 @@ licenses/JSON_LICENSE

@@ -1381,2 +1381,6 @@ import copy

This method will only return unambiguous info. If a property is subject to multiple
subschemas or contains ambiguous entries (multiple titles) no result will be returned
for that property.
Parameters

@@ -1412,2 +1416,3 @@ ----------

refresh_extension_manager=refresh_extension_manager,
extension_manager=self.extension_manager,
)

@@ -1453,2 +1458,3 @@

refresh_extension_manager=refresh_extension_manager,
extension_manager=self.extension_manager,
)

@@ -1455,0 +1461,0 @@ print("\n".join(lines))

@@ -10,3 +10,3 @@ """

from ._asdf import AsdfFile, open_asdf
from ._display import DEFAULT_MAX_COLS, DEFAULT_MAX_ROWS, DEFAULT_SHOW_VALUES, render_tree
from ._display import DEFAULT_MAX_COLS, DEFAULT_MAX_ROWS, DEFAULT_SHOW_VALUES

@@ -45,4 +45,3 @@ __all__ = ["info"]

with _manage_node(node_or_path) as node:
lines = render_tree(node, max_rows=max_rows, max_cols=max_cols, show_values=show_values, identifier="root")
print("\n".join(lines))
node.info(max_rows=max_rows, max_cols=max_cols, show_values=show_values)

@@ -54,8 +53,8 @@

with open_asdf(node_or_path) as af:
yield af.tree
yield af
elif isinstance(node_or_path, AsdfFile):
yield node_or_path.tree
yield node_or_path
else:
yield node_or_path
yield AsdfFile(node_or_path)

@@ -193,1 +193,4 @@ import numpy as np

raise TypeError(msg)
def to_info(self, obj):
return {"shape": obj.shape, "dtype": obj.dtype}

@@ -11,6 +11,5 @@ """

import numpy as np
import sys
from ._node_info import create_tree
from .tags.core.ndarray import NDArrayType

@@ -22,5 +21,2 @@ __all__ = [

"render_tree",
"format_bold",
"format_faint",
"format_italic",
]

@@ -33,5 +29,3 @@

EXTENSION_MANAGER = None
def render_tree(

@@ -45,2 +39,3 @@ node,

refresh_extension_manager=False,
extension_manager=None,
):

@@ -56,2 +51,3 @@ """

refresh_extension_manager=refresh_extension_manager,
extension_manager=extension_manager,
)

@@ -69,27 +65,2 @@ if info is None:

def format_bold(value):
"""
Wrap the input value in the ANSI escape sequence for increased intensity.
"""
return _format_code(value, 1)
def format_faint(value):
"""
Wrap the input value in the ANSI escape sequence for decreased intensity.
"""
return _format_code(value, 2)
def format_italic(value):
"""
Wrap the input value in the ANSI escape sequence for italic.
"""
return _format_code(value, 3)
def _format_code(value, code):
return f"\x1B[{code}m{value}\x1B[0m"
class _TreeRenderer:

@@ -104,3 +75,27 @@ """

self._show_values = show_values
self._isatty = hasattr(sys.stdout, "isatty") and sys.stdout.isatty()
def format_bold(self, value):
"""
Wrap the input value in the ANSI escape sequence for increased intensity.
"""
return self._format_code(value, 1)
def format_faint(self, value):
"""
Wrap the input value in the ANSI escape sequence for decreased intensity.
"""
return self._format_code(value, 2)
def format_italic(self, value):
"""
Wrap the input value in the ANSI escape sequence for italic.
"""
return self._format_code(value, 3)
def _format_code(self, value, code):
if not self._isatty:
return f"{value}"
return f"\x1B[{code}m{value}\x1B[0m"
def render(self, info):

@@ -112,3 +107,3 @@ self._mark_visible(info)

if elided:
lines.append(format_faint(format_italic("Some nodes not shown.")))
lines.append(self.format_faint(self.format_italic("Some nodes not shown.")))

@@ -231,3 +226,3 @@ return lines

prefix = self._make_prefix(info.depth + 1, active_depths, True)
message = format_faint(format_italic(str(hidden_count) + " not shown"))
message = self.format_faint(self.format_italic(str(hidden_count) + " not shown"))
lines.append(f"{prefix}{message}")

@@ -242,19 +237,19 @@

line = (
f"{prefix}[{format_bold(info.identifier)}] {value}"
f"{prefix}[{self.format_bold(info.identifier)}] {value}"
if isinstance(info.parent_node, (list, tuple))
else f"{prefix}{format_bold(info.identifier)} {value}"
else f"{prefix}{self.format_bold(info.identifier)} {value}"
)
if info.info is not None:
line = line + format_faint(format_italic(" # " + info.info))
line = line + self.format_faint(self.format_italic(" # " + info.info))
visible_children = info.visible_children
if len(visible_children) == 0 and len(info.children) > 0:
line = line + format_italic(" ...")
line = line + self.format_italic(" ...")
if info.recursive:
line = line + " " + format_faint(format_italic("(recursive reference)"))
line = line + " " + self.format_faint(self.format_italic("(recursive reference)"))
if self._max_cols is not None and len(line) > self._max_cols:
message = " (truncated)"
line = line[0 : (self._max_cols - len(message))] + format_faint(format_italic(message))
line = line[0 : (self._max_cols - len(message))] + self.format_faint(self.format_italic(message))

@@ -266,5 +261,2 @@ return line

if isinstance(info.node, (NDArrayType, np.ndarray)):
return f"({rendered_type}): shape={info.node.shape}, dtype={info.node.dtype.name}"
if not info.children and self._show_values:

@@ -301,2 +293,2 @@ try:

return format_faint(prefix)
return self.format_faint(prefix)

@@ -5,3 +5,3 @@ import re

from .schema import load_schema
from .treeutil import get_children
from .treeutil import get_children, is_container

@@ -24,4 +24,122 @@

def create_tree(key, node, identifier="root", filters=None, refresh_extension_manager=False):
def _get_matching_schema_property(schema, property_name):
"""
Extract a property subschema for a given property_name.
This function does not descend into the schema (beyond
looking for a "properties" key) and does not support
schema combiners.
Parameters
----------
schema : dict
A dictionary containing a JSONSCHEMA
property_name : str
The name of the property to extract
Returns
-------
dict or None
The property subschema at the provided name or
``None`` if the property doesn't exist.
"""
if "properties" in schema:
props = schema["properties"]
if property_name in props:
return props[property_name]
if "patternProperties" in props:
patterns = props["patternProperties"]
for regex in patterns:
if re.search(regex, property_name):
return patterns[regex]
return None
def _get_subschema_for_property(schema, property_name):
"""
Extract a property subschema for a given property_name.
This function will attempt to consider schema combiners
and will return None on an ambiguous result.
Parameters
----------
schema : dict
A dictionary containing a JSONSCHEMA
property_name : str
The name of the property to extract
Returns
-------
dict or None
The property subschema at the provided name or
``None`` if the property doesn't exist or is
ambiguous (has more than one subschema or is nested in a not).
"""
# This does NOT handle $ref the expectation is that the schema
# is loaded with resolve_references=True
applicable = []
# first check properties and patternProperties
subschema = _get_matching_schema_property(schema, property_name)
if subschema is not None:
applicable.append(subschema)
# next handle schema combiners
if "not" in schema:
subschema = _get_subschema_for_property(schema["not"], property_name)
if subschema is not None:
# We can't resolve a valid subschema under a "not" since
# we'd have to know how to invert a schema
return None
for combiner in ("allOf", "oneOf", "anyOf"):
for combined_schema in schema.get(combiner, []):
subschema = _get_subschema_for_property(combined_schema, property_name)
if subschema is not None:
applicable.append(subschema)
# only return the subschema if we found exactly 1 applicable
if len(applicable) == 1:
return applicable[0]
return None
def _get_schema_key(schema, key):
"""
Extract a subschema at a given key.
This function will attempt to consider schema combiners
(allOf, oneOf, anyOf) and will return None on an
ambiguous result (where more than 1 match is found).
Parameters
----------
schema : dict
A dictionary containing a JSONSCHEMA
key : str
The key under which the subschema is stored
Returns
-------
dict or None
The subschema at the provided key or
``None`` if the key doesn't exist or is ambiguous.
"""
applicable = []
if key in schema:
applicable.append(schema[key])
# Here we don't consider any subschema under "not" to avoid
# false positives for keys like "type" etc.
for combiner in ("allOf", "oneOf", "anyOf"):
for combined_schema in schema.get(combiner, []):
possible = _get_schema_key(combined_schema, key)
if possible is not None:
applicable.append(possible)
# only return the property if we found exactly 1 applicable
if len(applicable) == 1:
return applicable[0]
return None
def create_tree(key, node, identifier="root", filters=None, refresh_extension_manager=False, extension_manager=None):
"""
Create a `NodeSchemaInfo` tree which can be filtered from a base node.

@@ -51,2 +169,3 @@

refresh_extension_manager=refresh_extension_manager,
extension_manager=extension_manager,
)

@@ -68,2 +187,3 @@

refresh_extension_manager=False,
extension_manager=None,
):

@@ -98,2 +218,3 @@ """

refresh_extension_manager=refresh_extension_manager,
extension_manager=extension_manager,
)

@@ -125,2 +246,11 @@

def _make_traversable(node, extension_manager):
if hasattr(node, "__asdf_traverse__"):
return node.__asdf_traverse__(), True, False
node_type = type(node)
if not extension_manager.handles_type(node_type):
return node, False, False
return extension_manager.get_converter_for_type(node_type).to_info(node), False, True
_SchemaInfo = namedtuple("SchemaInfo", ["info", "value"])

@@ -190,3 +320,3 @@

def __init__(self, key, parent, identifier, node, depth, recursive=False, visible=True):
def __init__(self, key, parent, identifier, node, depth, recursive=False, visible=True, extension_manager=None):
self.key = key

@@ -201,12 +331,4 @@ self.parent = parent

self.schema = None
self.extension_manager = extension_manager or _get_extension_manager()
@classmethod
def traversable(cls, node):
"""
This method determines if the node is an instance of a class that
supports introspection by the info machinery. This determined by
the presence of a __asdf_traverse__ method.
"""
return hasattr(node, "__asdf_traverse__")
@property

@@ -225,19 +347,9 @@ def visible_children(self):

def info(self):
if self.schema is not None:
return self.schema.get(self.key, None)
if self.schema is None:
return None
return _get_schema_key(self.schema, self.key)
return None
def get_schema_for_property(self, identifier):
subschema = self.schema.get("properties", {}).get(identifier, None)
if subschema is not None:
return subschema
return _get_subschema_for_property(self.schema, identifier) or {}
subschema = self.schema.get("properties", {}).get("patternProperties", None)
if subschema:
for key in subschema:
if re.search(key, identifier):
return subschema[key]
return {}
def set_schema_for_property(self, parent, identifier):

@@ -253,3 +365,3 @@ """Extract a subschema from the parent for the identified property"""

schema_uri = tag_def.schema_uris[0]
schema = load_schema(schema_uri)
schema = load_schema(schema_uri, resolve_references=True)

@@ -259,3 +371,5 @@ self.schema = schema

@classmethod
def from_root_node(cls, key, root_identifier, root_node, schema=None, refresh_extension_manager=False):
def from_root_node(
cls, key, root_identifier, root_node, schema=None, refresh_extension_manager=False, extension_manager=None
):
"""

@@ -266,3 +380,3 @@ Build a NodeSchemaInfo tree from the given ASDF root node.

"""
extension_manager = _get_extension_manager(refresh_extension_manager)
extension_manager = extension_manager or _get_extension_manager(refresh_extension_manager)

@@ -277,22 +391,46 @@ current_nodes = [(None, root_identifier, root_node)]

for parent, identifier, node in current_nodes:
if (isinstance(node, (dict, tuple)) or cls.traversable(node)) and id(node) in seen:
info = NodeSchemaInfo(key, parent, identifier, node, current_depth, recursive=True)
# node is the item in the tree
# We might sometimes not want to use that node directly
# but instead using a different node for traversal.
t_node, traversable, from_converter = _make_traversable(node, extension_manager)
if (is_container(node) or traversable) and id(node) in seen:
info = NodeSchemaInfo(
key,
parent,
identifier,
node,
current_depth,
recursive=True,
extension_manager=extension_manager,
)
parent.children.append(info)
else:
info = NodeSchemaInfo(key, parent, identifier, node, current_depth)
info = NodeSchemaInfo(
key, parent, identifier, node, current_depth, extension_manager=extension_manager
)
# If this is the first node keep a reference so we can return it.
if root_info is None:
root_info = info
if parent is None:
info.schema = schema
if parent is not None:
if parent.schema is not None and not cls.traversable(node):
if parent.schema is not None:
# descend within the schema of the parent
info.set_schema_for_property(parent, identifier)
# track that this node is a child of the parent
parent.children.append(info)
# Track which nodes have been seen to avoid an infinite
# loop and to find recursive references
# This is tree wide but should be per-branch.
seen.add(id(node))
if cls.traversable(node):
t_node = node.__asdf_traverse__()
# if the node has __asdf_traverse__ and a _tag attribute
# that is a valid tag, load it's schema
if traversable:
if hasattr(node, "_tag") and isinstance(node._tag, str):

@@ -308,8 +446,3 @@ try:

else:
t_node = node
if parent is None:
info.schema = schema
# add children to queue
for child_identifier, child_node in get_children(t_node):

@@ -316,0 +449,0 @@ next_nodes.append((info, child_identifier, child_node))

import io
import sys

@@ -8,2 +9,11 @@ import pytest

@pytest.fixture(autouse=True)
def force_isatty(monkeypatch):
def _isatty():
return True
monkeypatch.setattr(sys.stdout, "isatty", _isatty)
yield
def _assert_diffs_equal(test_data_path, filenames, result_file, minimal=False, ignore=None):

@@ -10,0 +20,0 @@ iostream = io.StringIO()

@@ -0,1 +1,2 @@

import contextlib
import os

@@ -7,5 +8,6 @@ import pathlib

import numpy as np
import pytest
import asdf
from asdf.extension import ExtensionManager, ExtensionProxy, ManifestExtension
from asdf.extension import ExtensionProxy, ManifestExtension
from asdf.resource import DirectoryResourceMapping

@@ -134,2 +136,3 @@

@contextlib.contextmanager
def manifest_extension(tmp_path):

@@ -173,4 +176,4 @@ foo_manifest = """%YAML 1.1

archive_catalog:
datatype: int
destination: [ScienceCommon.silly]
datatype: int
destination: [ScienceCommon.silly]
clown:

@@ -237,4 +240,4 @@ title: clown name

archive_catalog:
datatype: str
destination: [ScienceCommon.attribute1]
datatype: str
destination: [ScienceCommon.attribute1]
attribute2:

@@ -244,4 +247,4 @@ title: Attribute2 Title

archive_catalog:
datatype: str
destination: [ScienceCommon.attribute2]
datatype: str
destination: [ScienceCommon.attribute2]
...

@@ -259,2 +262,11 @@ """

description: object description
allOf:
- $ref: drink_ref-1.0.0
...
"""
drink_ref_schema = """
%YAML 1.1
---
$schema: "asdf://stsci.edu/schemas/asdf/asdf-schema-1.1.0"
id: "asdf://somewhere.org/asdf/schemas/drink_ref-1.0.0"
properties:

@@ -266,9 +278,10 @@ attributeOne:

archive_catalog:
datatype: str
destination: [ScienceCommon.attributeOne]
datatype: str
destination: [ScienceCommon.attributeOne]
attributeTwo:
title: AttributeTwo Title
description: AttributeTwo description
type: string
archive_catalog:
allOf:
- title: AttributeTwo Title
description: AttributeTwo description
type: string
archive_catalog:
datatype: str

@@ -288,2 +301,5 @@ destination: [ScienceCommon.attributeTwo]

fschema.write(drink_schema)
spath = tmp_path / "schemas" / "drink_ref-1.0.0.yaml"
with open(spath, "w") as fschema:
fschema.write(drink_ref_schema)
os.mkdir(tmp_path / "manifests")

@@ -293,9 +309,2 @@ mpath = str(tmp_path / "manifests" / "foo_manifest-1.0.yaml")

fmanifest.write(foo_manifest)
config = asdf.get_config()
config.add_resource_mapping(
DirectoryResourceMapping(str(tmp_path / "manifests"), "asdf://somewhere.org/asdf/manifests/"),
)
config.add_resource_mapping(
DirectoryResourceMapping(str(tmp_path / "schemas"), "asdf://somewhere.org/asdf/schemas/"),
)

@@ -355,9 +364,16 @@ class FooConverter:

extension = ManifestExtension.from_uri(
"asdf://somewhere.org/asdf/manifests/foo_manifest-1.0",
converters=[converter1, converter2, converter3],
)
config = asdf.get_config()
proxy = ExtensionProxy(extension)
config.add_extension(proxy)
with asdf.config_context() as config:
config.add_resource_mapping(
DirectoryResourceMapping(str(tmp_path / "manifests"), "asdf://somewhere.org/asdf/manifests/"),
)
config.add_resource_mapping(
DirectoryResourceMapping(str(tmp_path / "schemas"), "asdf://somewhere.org/asdf/schemas/"),
)
extension = ManifestExtension.from_uri(
"asdf://somewhere.org/asdf/manifests/foo_manifest-1.0",
converters=[converter1, converter2, converter3],
)
proxy = ExtensionProxy(extension)
config.add_extension(proxy)
yield config

@@ -385,10 +401,122 @@

def test_schema_info_support(tmp_path):
manifest_extension(tmp_path)
config = asdf.get_config()
af = asdf.AsdfFile()
af._extension_manager = ExtensionManager(config.extensions)
af.tree = create_tree()
with manifest_extension(tmp_path):
af = asdf.AsdfFile()
af.tree = create_tree()
assert af.schema_info("title", refresh_extension_manager=True) == {
"list_of_stuff": [
assert af.schema_info("title") == {
"list_of_stuff": [
{
"attributeOne": {
"title": ("AttributeOne Title", "v1"),
},
"attributeTwo": {
"title": ("AttributeTwo Title", "v2"),
},
"title": ("object with info support 3 title", af.tree["list_of_stuff"][0]),
},
{
"attributeOne": {
"title": ("AttributeOne Title", "x1"),
},
"attributeTwo": {
"title": ("AttributeTwo Title", "x2"),
},
"title": ("object with info support 3 title", af.tree["list_of_stuff"][1]),
},
],
"object": {
"I_example": {"title": ("integer pattern property", 1)},
"S_example": {"title": ("string pattern property", "beep")},
"allof_attribute": {"title": ("allOf example attribute", "good")},
"anyof_attribute": {
"attribute1": {
"title": ("Attribute1 Title", "VAL1"),
},
"attribute2": {
"title": ("Attribute2 Title", "VAL2"),
},
"title": ("object with info support 2 title", af.tree["object"].anyof),
},
"clown": {"title": ("clown name", "Bozo")},
"oneof_attribute": {"title": ("oneOf example attribute", 20)},
"the_meaning_of_life_the_universe_and_everything": {"title": ("Some silly title", 42)},
"title": ("object with info support title", af.tree["object"]),
},
}
assert af.schema_info("archive_catalog") == {
"list_of_stuff": [
{
"attributeOne": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attributeOne"]}, "v1"),
},
"attributeTwo": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attributeTwo"]}, "v2"),
},
},
{
"attributeOne": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attributeOne"]}, "x1"),
},
"attributeTwo": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attributeTwo"]}, "x2"),
},
},
],
"object": {
"anyof_attribute": {
"attribute1": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attribute1"]}, "VAL1"),
},
"attribute2": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attribute2"]}, "VAL2"),
},
},
"clown": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.clown"]}, "Bozo"),
},
"the_meaning_of_life_the_universe_and_everything": {
"archive_catalog": ({"datatype": "int", "destination": ["ScienceCommon.silly"]}, 42),
},
},
}
assert af.schema_info("archive_catalog", preserve_list=False) == {
"list_of_stuff": {
0: {
"attributeOne": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attributeOne"]}, "v1"),
},
"attributeTwo": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attributeTwo"]}, "v2"),
},
},
1: {
"attributeOne": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attributeOne"]}, "x1"),
},
"attributeTwo": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attributeTwo"]}, "x2"),
},
},
},
"object": {
"anyof_attribute": {
"attribute1": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attribute1"]}, "VAL1"),
},
"attribute2": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attribute2"]}, "VAL2"),
},
},
"clown": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.clown"]}, "Bozo"),
},
"the_meaning_of_life_the_universe_and_everything": {
"archive_catalog": ({"datatype": "int", "destination": ["ScienceCommon.silly"]}, 42),
},
},
}
assert af.schema_info("title", "list_of_stuff") == [
{

@@ -412,4 +540,5 @@ "attributeOne": {

},
],
"object": {
]
assert af.schema_info("title", "object") == {
"I_example": {"title": ("integer pattern property", 1)},

@@ -431,105 +560,5 @@ "S_example": {"title": ("string pattern property", "beep")},

"title": ("object with info support title", af.tree["object"]),
},
}
}
assert af.schema_info("archive_catalog", refresh_extension_manager=True) == {
"list_of_stuff": [
{
"attributeOne": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attributeOne"]}, "v1"),
},
"attributeTwo": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attributeTwo"]}, "v2"),
},
},
{
"attributeOne": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attributeOne"]}, "x1"),
},
"attributeTwo": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attributeTwo"]}, "x2"),
},
},
],
"object": {
"anyof_attribute": {
"attribute1": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attribute1"]}, "VAL1"),
},
"attribute2": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attribute2"]}, "VAL2"),
},
},
"clown": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.clown"]}, "Bozo"),
},
"the_meaning_of_life_the_universe_and_everything": {
"archive_catalog": ({"datatype": "int", "destination": ["ScienceCommon.silly"]}, 42),
},
},
}
assert af.schema_info("archive_catalog", preserve_list=False, refresh_extension_manager=True) == {
"list_of_stuff": {
0: {
"attributeOne": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attributeOne"]}, "v1"),
},
"attributeTwo": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attributeTwo"]}, "v2"),
},
},
1: {
"attributeOne": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attributeOne"]}, "x1"),
},
"attributeTwo": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attributeTwo"]}, "x2"),
},
},
},
"object": {
"anyof_attribute": {
"attribute1": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attribute1"]}, "VAL1"),
},
"attribute2": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.attribute2"]}, "VAL2"),
},
},
"clown": {
"archive_catalog": ({"datatype": "str", "destination": ["ScienceCommon.clown"]}, "Bozo"),
},
"the_meaning_of_life_the_universe_and_everything": {
"archive_catalog": ({"datatype": "int", "destination": ["ScienceCommon.silly"]}, 42),
},
},
}
assert af.schema_info("title", "list_of_stuff", refresh_extension_manager=True) == [
{
"attributeOne": {
"title": ("AttributeOne Title", "v1"),
},
"attributeTwo": {
"title": ("AttributeTwo Title", "v2"),
},
"title": ("object with info support 3 title", af.tree["list_of_stuff"][0]),
},
{
"attributeOne": {
"title": ("AttributeOne Title", "x1"),
},
"attributeTwo": {
"title": ("AttributeTwo Title", "x2"),
},
"title": ("object with info support 3 title", af.tree["list_of_stuff"][1]),
},
]
assert af.schema_info("title", "object", refresh_extension_manager=True) == {
"I_example": {"title": ("integer pattern property", 1)},
"S_example": {"title": ("string pattern property", "beep")},
"allof_attribute": {"title": ("allOf example attribute", "good")},
"anyof_attribute": {
assert af.schema_info("title", "object.anyof_attribute") == {
"attribute1": {

@@ -542,97 +571,77 @@ "title": ("Attribute1 Title", "VAL1"),

"title": ("object with info support 2 title", af.tree["object"].anyof),
},
"clown": {"title": ("clown name", "Bozo")},
"oneof_attribute": {"title": ("oneOf example attribute", 20)},
"the_meaning_of_life_the_universe_and_everything": {"title": ("Some silly title", 42)},
"title": ("object with info support title", af.tree["object"]),
}
}
assert af.schema_info("title", "object.anyof_attribute", refresh_extension_manager=True) == {
"attribute1": {
"title": ("Attribute1 Title", "VAL1"),
},
"attribute2": {
assert af.schema_info("title", "object.anyof_attribute.attribute2") == {
"title": ("Attribute2 Title", "VAL2"),
},
"title": ("object with info support 2 title", af.tree["object"].anyof),
}
}
assert af.schema_info("title", "object.anyof_attribute.attribute2", refresh_extension_manager=True) == {
"title": ("Attribute2 Title", "VAL2"),
}
# Test printing the schema_info
assert af.schema_info("title", "object.anyof_attribute.attribute2").__repr__() == "{'title': Attribute2 Title}"
# Test printing the schema_info
assert (
af.schema_info("title", "object.anyof_attribute.attribute2", refresh_extension_manager=True).__repr__()
== "{'title': Attribute2 Title}"
)
assert af.schema_info("title", "object.anyof_attribute.attribute2.foo") is None
assert af.schema_info("title", "object.anyof_attribute.attribute2.foo", refresh_extension_manager=True) is None
assert af.schema_info(refresh_extension_manager=True) == {
"list_of_stuff": [
{
"attributeOne": {"description": ("AttributeOne description", "v1")},
"attributeTwo": {"description": ("AttributeTwo description", "v2")},
"description": ("object description", af.tree["list_of_stuff"][0]),
assert af.schema_info() == {
"list_of_stuff": [
{
"attributeOne": {"description": ("AttributeOne description", "v1")},
"attributeTwo": {"description": ("AttributeTwo description", "v2")},
"description": ("object description", af.tree["list_of_stuff"][0]),
},
{
"attributeOne": {"description": ("AttributeOne description", "x1")},
"attributeTwo": {"description": ("AttributeTwo description", "x2")},
"description": ("object description", af.tree["list_of_stuff"][1]),
},
],
"object": {
"allof_attribute": {
"description": ("allOf description", "good"),
},
"clown": {
"description": ("clown description", "Bozo"),
},
"description": ("object with info support description", af.tree["object"]),
"oneof_attribute": {
"description": ("oneOf description", 20),
},
"the_meaning_of_life_the_universe_and_everything": {
"description": ("Some silly description", 42),
},
},
{
"attributeOne": {"description": ("AttributeOne description", "x1")},
"attributeTwo": {"description": ("AttributeTwo description", "x2")},
"description": ("object description", af.tree["list_of_stuff"][1]),
},
],
"object": {
"allof_attribute": {
"description": ("allOf description", "good"),
},
"clown": {
"description": ("clown description", "Bozo"),
},
"description": ("object with info support description", af.tree["object"]),
"oneof_attribute": {
"description": ("oneOf description", 20),
},
"the_meaning_of_life_the_universe_and_everything": {
"description": ("Some silly description", 42),
},
},
}
}
# Test using a search result
search = af.search("clown")
assert af.schema_info("description", search, refresh_extension_manager=True) == {
"object": {
"clown": {
"description": ("clown description", "Bozo"),
# Test using a search result
search = af.search("clown")
assert af.schema_info("description", search) == {
"object": {
"clown": {
"description": ("clown description", "Bozo"),
},
"description": ("object with info support description", af.tree["object"]),
},
"description": ("object with info support description", af.tree["object"]),
},
}
}
def test_info_object_support(capsys, tmp_path):
manifest_extension(tmp_path)
config = asdf.get_config()
af = asdf.AsdfFile()
af._extension_manager = ExtensionManager(config.extensions)
af.tree = create_tree()
af.info(refresh_extension_manager=True)
with manifest_extension(tmp_path):
af = asdf.AsdfFile()
af.tree = create_tree()
af.info()
captured = capsys.readouterr()
captured = capsys.readouterr()
assert "the_meaning_of_life_the_universe_and_everything" in captured.out
assert "clown" in captured.out
assert "42" in captured.out
assert "Bozo" in captured.out
assert "clown name" in captured.out
assert "silly" in captured.out
assert "info support 2" in captured.out
assert "Attribute2 Title" in captured.out
assert "allOf example attribute" in captured.out
assert "oneOf example attribute" in captured.out
assert "string pattern property" in captured.out
assert "integer pattern property" in captured.out
assert "AttributeOne" in captured.out
assert "AttributeTwo" in captured.out
assert "the_meaning_of_life_the_universe_and_everything" in captured.out
assert "clown" in captured.out
assert "42" in captured.out
assert "Bozo" in captured.out
assert "clown name" in captured.out
assert "silly" in captured.out
assert "info support 2" in captured.out
assert "Attribute2 Title" in captured.out
assert "allOf example attribute" in captured.out
assert "oneOf example attribute" in captured.out
assert "string pattern property" in captured.out
assert "integer pattern property" in captured.out
assert "AttributeOne" in captured.out
assert "AttributeTwo" in captured.out

@@ -656,21 +665,19 @@

tempdir = pathlib.Path(tempfile.mkdtemp())
manifest_extension(tempdir)
config = asdf.get_config()
af = asdf.AsdfFile()
af._extension_manager = ExtensionManager(config.extensions)
with manifest_extension(tempdir):
af = asdf.AsdfFile()
recursive_obj = RecursiveObjectWithInfoSupport()
recursive_obj.recursive = recursive_obj
tree = {"random": 3.14159, "rtest": recursive_obj}
af = asdf.AsdfFile()
# we need to do this to avoid validation against the
# manifest (generated in manifest_extension) which is
# now supported with the default asdf standard 1.6.0
# I'm not sure why the manifest has this restriction
# and prior to switching to the default 1.6.0 was ignored
# which allowed this test to pass.
af._tree = tree
af.info(refresh_extension_manager=True)
captured = capsys.readouterr()
assert "recursive reference" in captured.out
recursive_obj = RecursiveObjectWithInfoSupport()
recursive_obj.recursive = recursive_obj
tree = {"random": 3.14159, "rtest": recursive_obj}
af = asdf.AsdfFile()
# we need to do this to avoid validation against the
# manifest (generated in manifest_extension) which is
# now supported with the default asdf standard 1.6.0
# I'm not sure why the manifest has this restriction
# and prior to switching to the default 1.6.0 was ignored
# which allowed this test to pass.
af._tree = tree
af.info()
captured = capsys.readouterr()
assert "recursive reference" in captured.out

@@ -720,1 +727,124 @@

assert "(NiceStr): nice\n" in captured.out
@pytest.mark.parametrize(
"schema, expected",
[
({"properties": {"foo": {"type": "object"}}}, {"type": "object"}),
({"allOf": [{"properties": {"foo": {"type": "object"}}}]}, {"type": "object"}),
({"oneOf": [{"properties": {"foo": {"type": "object"}}}]}, {"type": "object"}),
({"anyOf": [{"properties": {"foo": {"type": "object"}}}]}, {"type": "object"}),
],
)
def test_node_property(schema, expected):
ni = asdf._node_info.NodeSchemaInfo.from_root_node("title", "root", {}, schema)
assert ni.get_schema_for_property("foo") == expected
@pytest.mark.parametrize(
"schema",
[
{"not": {"properties": {"foo": {"type": "object"}}}},
{"properties": {"foo": {"type": "object"}}, "allOf": [{"properties": {"foo": {"type": "object"}}}]},
{"properties": {"foo": {"type": "object"}}, "anyOf": [{"properties": {"foo": {"type": "object"}}}]},
{"properties": {"foo": {"type": "object"}}, "oneOf": [{"properties": {"foo": {"type": "object"}}}]},
{
"allOf": [{"properties": {"foo": {"type": "object"}}}],
"anyOf": [{"properties": {"foo": {"type": "object"}}}],
},
{
"anyOf": [{"properties": {"foo": {"type": "object"}}}],
"oneOf": [{"properties": {"foo": {"type": "object"}}}],
},
{
"oneOf": [{"properties": {"foo": {"type": "object"}}}],
"allOf": [{"properties": {"foo": {"type": "object"}}}],
},
],
)
def test_node_property_error(schema):
ni = asdf._node_info.NodeSchemaInfo.from_root_node("title", "root", {}, schema)
assert ni.get_schema_for_property("foo") == {}
@pytest.mark.parametrize(
"schema, expected",
[
({"title": "foo"}, "foo"),
({"allOf": [{"title": "foo"}]}, "foo"),
({"oneOf": [{"title": "foo"}]}, "foo"),
({"anyOf": [{"title": "foo"}]}, "foo"),
({"not": {"title": "foo"}}, None),
({"allOf": [{"title": "foo"}, {"title": "bar"}]}, None),
({"oneOf": [{"title": "foo"}, {"title": "bar"}]}, None),
({"anyOf": [{"title": "foo"}, {"title": "bar"}]}, None),
({"allOf": [{"title": "foo"}, {"title": "bar"}]}, None),
],
)
def test_node_info(schema, expected):
ni = asdf._node_info.NodeSchemaInfo.from_root_node("title", "root", {}, schema)
assert ni.info == expected
def test_info_with_custom_extension(capsys):
MY_TAG_URI = "asdf://somewhere.org/tags/foo-1.0.0"
MY_SCHEMA_URI = "asdf://somewhere.org/tags/foo-1.0.0"
schema_bytes = f"""%YAML 1.1
---
$schema: "http://stsci.edu/schemas/yaml-schema/draft-01"
id: {MY_SCHEMA_URI}
title: top_title
properties:
foo:
title: foo_title
type: object
properties:
bar:
title: bar_title
""".encode(
"ascii"
)
class MyExtension:
extension_uri = "asdf://somewhere.org/extensions/foo-1.0.0"
tags = [
asdf.extension.TagDefinition(
MY_TAG_URI,
schema_uris=[MY_SCHEMA_URI],
)
]
class FooThing:
def __asdf_traverse__(self):
return {"bar": 1}
class Thing:
_tag = MY_TAG_URI
def __asdf_traverse__(self):
return {"foo": FooThing()}
with asdf.config_context() as cfg:
cfg.add_resource_mapping({MY_SCHEMA_URI: schema_bytes})
ext = MyExtension()
af = asdf.AsdfFile({"t": Thing()}, extensions=[ext])
af.info(max_cols=None)
captured = capsys.readouterr()
assert "top_title" in captured.out
assert "foo_title" in captured.out
assert "bar_title" in captured.out
def test_info_no_infinite_loop(capsys):
"""
Providing a recursive list used to cause an
infinite loop. Test this is not the case.
"""
af = asdf.AsdfFile()
af["l"] = []
af["l"].append(af["l"])
af.info()
captured = capsys.readouterr()
assert "recursive" in captured.out

@@ -15,3 +15,3 @@ # file generated by setuptools_scm

__version__ = version = '4.0.0'
__version_tuple__ = version_tuple = (4, 0, 0)
__version__ = version = '4.1.0'
__version_tuple__ = version_tuple = (4, 1, 0)

@@ -12,23 +12,2 @@ """

try:
# Provides cross-platform color support
import colorama
colorama.init()
RED = colorama.Fore.RED
GREEN = colorama.Fore.GREEN
RESET = colorama.Style.RESET_ALL
except ImportError:
from sys import platform
# These platforms should support ansi color codes
if platform.startswith("linux") or platform.startswith("darwin"):
RED = "\x1b[31m"
GREEN = "\x1b[32m"
RESET = "\x1b[0m"
else:
RED = ""
GREEN = ""
RESET = ""
import asdf

@@ -43,7 +22,3 @@ from asdf.extension._serialization_context import BlockAccess

RESET_NEWLINE = RESET + "\n"
NDARRAY_TAG = "core/ndarray"
LIST_MARKER = "-"
THIS_MARKER = GREEN + "> "
THAT_MARKER = RED + "< "

@@ -168,19 +143,33 @@

if hasattr(sys.stdout, "isatty") and sys.stdout.isatty():
RED = "\x1b[31m"
GREEN = "\x1b[32m"
RESET = "\x1b[0m"
else:
RED = ""
GREEN = ""
RESET = ""
self.RESET_NEWLINE = RESET + "\n"
self.LIST_MARKER = "-"
self.THIS_MARKER = GREEN + "> "
self.THAT_MARKER = RED + "< "
def print_tree_context(diff_ctx, node_list, other, use_marker, last_was_list):
"""Print context information indicating location in ASDF tree."""
prefix = ""
marker = THAT_MARKER if other else THIS_MARKER
marker = diff_ctx.THAT_MARKER if other else diff_ctx.THIS_MARKER
for node in diff_ctx.print_tree.get_print_list(node_list):
if node is not None:
node_ = LIST_MARKER if isinstance(node, ArrayNode) else node + ":"
node_ = diff_ctx.LIST_MARKER if isinstance(node, ArrayNode) else node + ":"
# All of this logic is just to make the display of arrays prettier
if use_marker:
line_prefix = " " if last_was_list else marker + prefix[2:]
line_suffix = "" if node_ == LIST_MARKER else RESET_NEWLINE
line_suffix = "" if node_ == diff_ctx.LIST_MARKER else diff_ctx.RESET_NEWLINE
else:
line_prefix = prefix
line_suffix = RESET_NEWLINE
line_suffix = diff_ctx.RESET_NEWLINE
diff_ctx.iostream.write(line_prefix + node_ + line_suffix)
last_was_list = node_ == LIST_MARKER
last_was_list = node_ == diff_ctx.LIST_MARKER
prefix += " "

@@ -222,5 +211,5 @@ diff_ctx.print_tree[node_list] = True

use_marker = not last_was_list or ignore_lwl
marker = THAT_MARKER if other else THIS_MARKER
marker = diff_ctx.THAT_MARKER if other else diff_ctx.THIS_MARKER
prefix = marker + " " * len(node_list) if use_marker else " "
diff_ctx.iostream.write(prefix + str(thing) + RESET_NEWLINE)
diff_ctx.iostream.write(prefix + str(thing) + diff_ctx.RESET_NEWLINE)
last_was_list = False

@@ -227,0 +216,0 @@ return last_was_list

@@ -5,3 +5,3 @@ """

from asdf import _convenience as convenience
import asdf

@@ -40,2 +40,3 @@ from .main import Command

def info(filename, max_rows, max_cols, show_values):
convenience.info(filename, max_rows=max_rows, max_cols=max_cols, show_values=show_values)
with asdf.open(filename) as af:
af.info(max_rows, max_cols, show_values)

@@ -40,2 +40,7 @@ """

`from_yaml_tree`.
The ``to_info`` method is optional. If implemented it must
accept 1 parameter ``obj` which is a tree node/custom
object and return a container (list, tuple, dict) containing
information about that object to display during ``AsdfFile.info``.
"""

@@ -296,2 +301,23 @@

def to_info(self, obj):
"""
Convert an object to a container with items further
defining information about this node. This method
is used for "info" and not used for serialization.
Parameters
----------
obj : object
Instance of a custom type to get "info" for.
Returns
-------
object
Must be a container (list, tuple, dict) with
items providing "info" about ``obj``.
"""
if not hasattr(self._delegate, "to_info"):
return obj
return self._delegate.to_info(obj)
@property

@@ -298,0 +324,0 @@ def delegate(self):

@@ -10,4 +10,4 @@ """

from ._display import DEFAULT_MAX_COLS, DEFAULT_MAX_ROWS, DEFAULT_SHOW_VALUES, format_faint, format_italic, render_tree
from ._node_info import NodeSchemaInfo, collect_schema_info
from ._display import DEFAULT_MAX_COLS, DEFAULT_MAX_ROWS, DEFAULT_SHOW_VALUES, render_tree
from ._node_info import collect_schema_info
from .treeutil import get_children, is_container

@@ -326,3 +326,3 @@ from .util import NotSet

if len(lines) == 0:
return format_faint(format_italic("No results found."))
return "No results found."

@@ -358,4 +358,4 @@ return "\n".join(lines)

def __getitem__(self, key):
if isinstance(self._node, (dict, list, tuple)) or NodeSchemaInfo.traversable(self._node):
child = self._node.__asdf_traverse__()[key] if NodeSchemaInfo.traversable(self._node) else self._node[key]
if isinstance(self._node, (dict, list, tuple)) or hasattr(self._node, "__asdf_traverse__"):
child = self._node.__asdf_traverse__()[key] if hasattr(self._node, "__asdf_traverse__") else self._node[key]
else:

@@ -387,5 +387,5 @@ msg = "This node cannot be indexed"

for identifiers, parent, node in current_nodes:
if (isinstance(node, (dict, list, tuple)) or NodeSchemaInfo.traversable(node)) and id(node) in seen:
if (isinstance(node, (dict, list, tuple)) or hasattr(node, "__asdf_traverse__")) and id(node) in seen:
continue
tnode = node.__asdf_traverse__() if NodeSchemaInfo.traversable(node) else node
tnode = node.__asdf_traverse__() if hasattr(node, "__asdf_traverse__") else node
children = get_children(tnode)

@@ -392,0 +392,0 @@ callback(identifiers, parent, node, [c for _, c in children])

@@ -0,1 +1,37 @@

4.1.0 (2025-01-31)
==================
Bugfix
------
- Improve ``schema_info`` handling of schemas with combiners (allOf, anyOf,
etc). (`#1875 <https://github.com/asdf-format/asdf/pull/1875>`_)
- While walking schema for info/search/schema_info walk into nodes with
__asdf_traverse__
if the parent node has a schema. (`#1884
<https://github.com/asdf-format/asdf/pull/1884>`_)
- Don't infinitely loop on recursive lists during info/search/schema_info.
(`#1884 <https://github.com/asdf-format/asdf/pull/1884>`_)
- Use extension_manager of associated AsdfFile in info/search/schema_info.
(`#1884 <https://github.com/asdf-format/asdf/pull/1884>`_)
- Only use ANSI format codes when supported by stdout. (`#1884
<https://github.com/asdf-format/asdf/pull/1884>`_)
Doc
---
- Fix typos in search documentation. (`#1880
<https://github.com/asdf-format/asdf/pull/1880>`_)
- updates docs theme to be consistent with asdf subprojects (`#1897
<https://github.com/asdf-format/asdf/pull/1897>`_)
Feature
-------
- Add ``Converter.to_info`` to allow customizing ``info`` output. (`#1884
<https://github.com/asdf-format/asdf/pull/1884>`_)
4.0.0 (2024-11-19)

@@ -2,0 +38,0 @@ ==================

.. currentmodule:: asdf
**********
Array Data
**********
Saving arrays
-------------
=============

@@ -32,4 +37,5 @@ Beyond the basic data types of dictionaries, lists, strings and numbers, the

Sharing of data
---------------
===============

@@ -64,3 +70,3 @@ Arrays that are views on the same data automatically share the same

Saving inline arrays
--------------------
====================

@@ -109,3 +115,3 @@ For small arrays, you may not care about the efficiency of a binary

Saving external arrays
----------------------
======================

@@ -158,3 +164,3 @@ ASDF files may also be saved in "exploded form", which creates multiple files

Streaming array data
--------------------
====================

@@ -231,3 +237,3 @@ In certain scenarios, you may want to stream data to disk, rather than

Compression
-----------
===========

@@ -279,3 +285,3 @@ Individual blocks in an ASDF file may be compressed.

Memory mapping
--------------
==============

@@ -282,0 +288,0 @@ By default, all internal array data is memory mapped using `numpy.memmap`. This

@@ -225,4 +225,2 @@ .. currentmodule:: asdf.extension

When an object is converted to YAML, the resulting YAML tree is stored in the
If the object produced by the extension supports a class method

@@ -234,2 +232,8 @@ ``.__asdf_traverse__`` then it can be used by those tools to expose the contents

Similarly a `Converter` can implement a method ``to_info`` which converts
an instance of one of the supported types to a dict, tuple or list of
items to show during ``info`` and ``search``. This can be useful when
the supported type cannot be easily updated to add an ``__asdf_traverse__``
method.
.. _extending_extensions_installing:

@@ -236,0 +240,0 @@

@@ -7,2 +7,3 @@ .. currentmodule:: asdf

This section discusses the core features of the ASDF data format, and provides

@@ -559,6 +560,6 @@ examples and use cases that are specific to the Python implementation.

>>> af.search("foo") # Find nodes with key containing the string 'foo' # doctest: +SKIP
>>> af.search(type=int) # Find nodes that are instances of int # doctest: +SKIP
>>> af.search(type_=int) # Find nodes that are instances of int # doctest: +SKIP
>>> af.search(value=10) # Find nodes whose value is equal to 10 # doctest: +SKIP
>>> af.search(
... "foo", type=int, value=10
... "foo", type_=int, value=10
... ) # Find the intersection of the above # doctest: +SKIP

@@ -576,4 +577,4 @@

>>> af.search() # See an overview of the entire ASDF tree # doctest: +SKIP
>>> af.search().search(type="NDArrayType") # Find only ndarrays # doctest: +SKIP
>>> af.search().search(type="NDArrayType").search(
>>> af.search().search(type_="NDArrayType") # Find only ndarrays # doctest: +SKIP
>>> af.search().search(type_="NDArrayType").search(
... "err"

@@ -592,3 +593,3 @@ ... ) # Only ndarrays with 'err' in the key # doctest: +SKIP

>>> af.search()["data"].search(
... type=int
... type_=int
... ) # Find integer descendants of 'data' # doctest: +SKIP

@@ -614,3 +615,3 @@

When the ``type`` argument is a string, the search compares against the fully-qualified
When the ``type_`` argument is a string, the search compares against the fully-qualified
class name of each node:

@@ -621,5 +622,5 @@

>>> af.search(
... type="asdf.tags.core.Software"
... type_="asdf.tags.core.Software"
... ) # Find instances of ASDF's Software type # doctest: +SKIP
>>> af.search(type="^asdf\.") # Find all ASDF objects # doctest: +SKIP
>>> af.search(type_="^asdf\.") # Find all ASDF objects # doctest: +SKIP

@@ -638,4 +639,4 @@ When the ``value`` argument is a string, the search compares against the string

If ``key``, ``type``, and ``value`` aren't sufficient, we can also provide a callback
function to search by arbitrary criteria. The ``filter`` parameter accepts
If ``key``, ``type_``, and ``value`` aren't sufficient, we can also provide a callback
function to search by arbitrary criteria. The ``filter_`` parameter accepts
a callable that receives the node under consideration, and returns ``True``

@@ -647,3 +648,3 @@ to keep it or ``False`` to reject it from the search results. For example,

>>> af.search(type="NDArrayType", filter=lambda n: n.shape[0] == 1024) # doctest: +SKIP
>>> af.search(type_="NDArrayType", filter_=lambda n: n.shape[0] == 1024) # doctest: +SKIP

@@ -661,4 +662,4 @@ Formatting search results

>>> af.search(type=float) # Displays limited rows # doctest: +SKIP
>>> af.search(type=float).format(max_rows=None) # Show all matching rows # doctest: +SKIP
>>> af.search(type_=float) # Displays limited rows # doctest: +SKIP
>>> af.search(type_=float).format(max_rows=None) # Show all matching rows # doctest: +SKIP

@@ -669,3 +670,3 @@ Like `AsdfSearchResult.search`, calls to format may be chained:

>>> af.search("time").format(max_rows=10).search(type=str).format(
>>> af.search("time").format(max_rows=10).search(type_=str).format(
... max_rows=None

@@ -672,0 +673,0 @@ ... ) # doctest: +SKIP

.. currentmodule:: asdf
****************
Using Extensions
****************
The built-in extension
----------------------
======================

@@ -24,3 +29,3 @@ The ability to serialize the following types is provided by `asdf`'s built-in

Custom types
------------
============

@@ -47,3 +52,3 @@ For the purposes of this documentation, a "custom type" is any data type that

Extensions
----------
==========

@@ -63,3 +68,3 @@ In order for the converters and schemas to be used by `asdf`, they must be

Writing custom types to files
*****************************
-----------------------------

@@ -74,3 +79,3 @@ `asdf` is not capable of serializing any custom type unless an extension is

Reading files with custom types
*******************************
-------------------------------

@@ -100,3 +105,3 @@ The `asdf` software is capable of reading files that contain custom data types

Custom types, extensions, and versioning
----------------------------------------
========================================

@@ -116,3 +121,3 @@ Tags and schemas that follow best practices are versioned. This allows changes

Reading files
*************
-------------

@@ -130,3 +135,3 @@ When `asdf` encounters a tagged object in a file, it will compare the URI of

Writing files
*************
-------------

@@ -145,3 +150,3 @@ When writing a object to a file, `asdf` compares the object's type to the list

Extensions from other packages
------------------------------
==============================

@@ -174,3 +179,3 @@ Some external packages may define extensions that allow `asdf` to recognize some

Explicit use of extensions
--------------------------
==========================

@@ -219,3 +224,3 @@ Sometimes no packaged extensions are provided for the types you wish to

Extension checking
------------------
==================

@@ -222,0 +227,0 @@ When writing ASDF files using this software, metadata about the extensions that

@@ -31,3 +31,3 @@ import sys

author = f"{configuration['authors'][0]['name']} <{configuration['authors'][0]['email']}>"
copyright = f"{datetime.datetime.now().year}, {configuration['authors'][0]['name']}"
copyright = f"{datetime.datetime.now().year}, {author}"

@@ -67,2 +67,71 @@ release = distribution(configuration["name"]).version

extensions += ["sphinx_inline_tabs"]
# Docs are hosted as a "subproject" under the main project's domain: https://www.asdf-format.org/projects
# This requires including links to main project (asdf-website) and the other asdf subprojects
# See https://docs.readthedocs.io/en/stable/guides/intersphinx.html#using-intersphinx
subprojects = {
# main project
"asdf-website": ("https://www.asdf-format.org/en/latest", None),
# other subprojects
"asdf-standard": ("https://www.asdf-format.org/projects/asdf-standard/en/latest/", None),
"asdf-coordinates-schemas": ("https://www.asdf-format.org/projects/asdf-coordinates-schemas/en/latest/", None),
"asdf-transform-schemas": ("https://www.asdf-format.org/projects/asdf-transform-schemas/en/latest/", None),
"asdf-wcs-schemas": ("https://www.asdf-format.org/projects/asdf-wcs-schemas/en/latest/", None),
}
intersphinx_mapping.update(subprojects) # noqa: F405
extensions += ["sphinx_inline_tabs", "sphinx.ext.intersphinx", "sphinx.ext.extlinks"] # noqa: F405
html_theme = "furo"
html_static_path = ["_static"]
# Override default settings from sphinx_asdf / sphinx_astropy (incompatible with furo)
html_sidebars = {}
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = "_static/images/favicon.ico"
html_logo = ""
globalnavlinks = {
"ASDF Projects": "https://www.asdf-format.org",
"Tutorials": "https://www.asdf-format.org/en/latest/tutorials/index.html",
"Community": "https://www.asdf-format.org/en/latest/community/index.html",
}
topbanner = ""
for text, link in globalnavlinks.items():
topbanner += f"<a href={link}>{text}</a>"
html_theme_options = {
"light_logo": "images/logo-light-mode.png",
"dark_logo": "images/logo-dark-mode.png",
"announcement": topbanner,
}
pygments_style = "monokai"
# NB Dark style pygments is furo-specific at this time
pygments_dark_style = "monokai"
# Render inheritance diagrams in SVG
graphviz_output_format = "svg"
graphviz_dot_args = [
"-Nfontsize=10",
"-Nfontname=Helvetica Neue, Helvetica, Arial, sans-serif",
"-Efontsize=10",
"-Efontname=Helvetica Neue, Helvetica, Arial, sans-serif",
"-Gbgcolor=white",
"-Gfontsize=10",
"-Gfontname=Helvetica Neue, Helvetica, Arial, sans-serif",
]
# -- Options for LaTeX output --------------------------------------------------
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [("index", project + ".tex", project + " Documentation", author, "manual")]
latex_logo = "_static/images/logo-light-mode.png"
def setup(app):
app.add_css_file("css/globalnav.css")

@@ -70,3 +70,3 @@ .. _asdf:

asdf/user_api
asdf/user_api/index
asdf/developer_api

@@ -112,2 +112,1 @@

* :ref:`modindex`
* :ref:`search`

@@ -9,3 +9,3 @@ name: rtd311

- graphviz
- sphinx_rtd_theme>1.2.0
- furo
- towncrier

@@ -1,4 +0,4 @@

Metadata-Version: 2.1
Metadata-Version: 2.2
Name: asdf
Version: 4.0.0
Version: 4.1.0
Summary: Python implementation of the ASDF Standard

@@ -36,3 +36,3 @@ Author-email: The ASDF Developers <help@stsci.edu>

Project-URL: documentation, https://asdf.readthedocs.io/en/stable
Project-URL: documentation, https://asdf.readthedocs.io/en/stable/
Project-URL: repository, https://github.com/asdf-format/asdf

@@ -67,2 +67,3 @@ Project-URL: tracker, https://github.com/asdf-format/asdf/issues

Requires-Dist: tomli; python_version < "3.11" and extra == "docs"
Requires-Dist: furo; extra == "docs"
Provides-Extra: tests

@@ -69,0 +70,0 @@ Requires-Dist: fsspec[http]>=2022.8.2; extra == "tests"

@@ -43,2 +43,3 @@ [project]

'tomli; python_version < "3.11"',
"furo",
]

@@ -53,3 +54,3 @@ tests = [

[project.urls]
'documentation' = 'https://asdf.readthedocs.io/en/stable'
'documentation' = 'https://asdf.readthedocs.io/en/stable/'
'repository' = 'https://github.com/asdf-format/asdf'

@@ -68,3 +69,3 @@ 'tracker' = 'https://github.com/asdf-format/asdf/issues'

"setuptools>=60",
"setuptools_scm[toml]>=3.4",
"setuptools_scm[toml]>=8",
"wheel",

@@ -71,0 +72,0 @@ ]

+6
-12

@@ -316,13 +316,9 @@ [tox]

commands_pre =
bash -c "pip freeze -q | grep 'asdf @' > {env_tmp_dir}/requirements.txt"
git clone https://github.com/DKISTDC/dkist.git
pip install -e dkist[tests]
pip install -r {env_tmp_dir}/requirements.txt
git clone https://github.com/DKISTDC/dkist.git .
bash -c "pip freeze -q | grep 'asdf @' > {env_tmp_dir}/asdf_requirement.txt"
pip install -e ".[tests]"
pip install -r {env_tmp_dir}/asdf_requirement.txt
pip freeze
commands =
# the AsdfManifestURIMismatchWarning filter can be removed when a new sunpy
# is released which contains the fixed manifests:
# https://github.com/sunpy/sunpy/pull/7432
pytest dkist --benchmark-skip \
-W "ignore::asdf.exceptions.AsdfManifestURIMismatchWarning"
pytest --benchmark-skip

@@ -338,5 +334,3 @@ [testenv:abacusutils]

git clone https://github.com/abacusorg/abacusutils.git
pip install -vU setuptools wheel scipy Cython 'numpy<2' # for classy and corrfunc
pip install --no-build-isolation classy corrfunc
pip install -e abacusutils[test]
pip install -e ./abacusutils pytest
pip install -r {env_tmp_dir}/requirements.txt

@@ -343,0 +337,0 @@ # make an empty pytest.ini to prevent pytest from crawling up

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

.. _user_api:
********
User API
********
.. automodapi:: asdf
:include-all-objects:
:inherited-members:
:no-inheritance-diagram:
:skip: ValidationError
:skip: Stream
.. automodapi:: asdf.search
.. automodapi:: asdf.config