typeid-python
Advanced tools
+314
| # Created by https://www.toptal.com/developers/gitignore/api/venv,python,visualstudiocode,pycharm | ||
| # Edit at https://www.toptal.com/developers/gitignore?templates=venv,python,visualstudiocode,pycharm | ||
| ### PyCharm ### | ||
| # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider | ||
| # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 | ||
| # User-specific stuff | ||
| .idea/**/workspace.xml | ||
| .idea/**/tasks.xml | ||
| .idea/**/usage.statistics.xml | ||
| .idea/**/dictionaries | ||
| .idea/**/shelf | ||
| # AWS User-specific | ||
| .idea/**/aws.xml | ||
| # Generated files | ||
| .idea/**/contentModel.xml | ||
| # Sensitive or high-churn files | ||
| .idea/**/dataSources/ | ||
| .idea/**/dataSources.ids | ||
| .idea/**/dataSources.local.xml | ||
| .idea/**/sqlDataSources.xml | ||
| .idea/**/dynamic.xml | ||
| .idea/**/uiDesigner.xml | ||
| .idea/**/dbnavigator.xml | ||
| # Gradle | ||
| .idea/**/gradle.xml | ||
| .idea/**/libraries | ||
| # Gradle and Maven with auto-import | ||
| # When using Gradle or Maven with auto-import, you should exclude module files, | ||
| # since they will be recreated, and may cause churn. Uncomment if using | ||
| # auto-import. | ||
| # .idea/artifacts | ||
| # .idea/compiler.xml | ||
| # .idea/jarRepositories.xml | ||
| # .idea/modules.xml | ||
| # .idea/*.iml | ||
| # .idea/modules | ||
| # *.iml | ||
| # *.ipr | ||
| # CMake | ||
| cmake-build-*/ | ||
| # Mongo Explorer plugin | ||
| .idea/**/mongoSettings.xml | ||
| # File-based project format | ||
| *.iws | ||
| # IntelliJ | ||
| out/ | ||
| # mpeltonen/sbt-idea plugin | ||
| .idea_modules/ | ||
| # JIRA plugin | ||
| atlassian-ide-plugin.xml | ||
| # Cursive Clojure plugin | ||
| .idea/replstate.xml | ||
| # SonarLint plugin | ||
| .idea/sonarlint/ | ||
| # Crashlytics plugin (for Android Studio and IntelliJ) | ||
| com_crashlytics_export_strings.xml | ||
| crashlytics.properties | ||
| crashlytics-build.properties | ||
| fabric.properties | ||
| # Editor-based Rest Client | ||
| .idea/httpRequests | ||
| # Android studio 3.1+ serialized cache file | ||
| .idea/caches/build_file_checksums.ser | ||
| ### PyCharm Patch ### | ||
| # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 | ||
| # *.iml | ||
| # modules.xml | ||
| # .idea/misc.xml | ||
| # *.ipr | ||
| # Sonarlint plugin | ||
| # https://plugins.jetbrains.com/plugin/7973-sonarlint | ||
| .idea/**/sonarlint/ | ||
| # SonarQube Plugin | ||
| # https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin | ||
| .idea/**/sonarIssues.xml | ||
| # Markdown Navigator plugin | ||
| # https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced | ||
| .idea/**/markdown-navigator.xml | ||
| .idea/**/markdown-navigator-enh.xml | ||
| .idea/**/markdown-navigator/ | ||
| # Cache file creation bug | ||
| # See https://youtrack.jetbrains.com/issue/JBR-2257 | ||
| .idea/$CACHE_FILE$ | ||
| # CodeStream plugin | ||
| # https://plugins.jetbrains.com/plugin/12206-codestream | ||
| .idea/codestream.xml | ||
| # Azure Toolkit for IntelliJ plugin | ||
| # https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij | ||
| .idea/**/azureSettings.xml | ||
| ### Python ### | ||
| # Byte-compiled / optimized / DLL files | ||
| __pycache__/ | ||
| *.py[cod] | ||
| *$py.class | ||
| # C extensions | ||
| *.so | ||
| # Distribution / packaging | ||
| .Python | ||
| build/ | ||
| develop-eggs/ | ||
| dist/ | ||
| downloads/ | ||
| eggs/ | ||
| .eggs/ | ||
| lib/ | ||
| lib64/ | ||
| parts/ | ||
| sdist/ | ||
| var/ | ||
| wheels/ | ||
| share/python-wheels/ | ||
| *.egg-info/ | ||
| .installed.cfg | ||
| *.egg | ||
| MANIFEST | ||
| # PyInstaller | ||
| # Usually these files are written by a python script from a template | ||
| # before PyInstaller builds the exe, so as to inject date/other infos into it. | ||
| *.manifest | ||
| *.spec | ||
| # Installer logs | ||
| pip-log.txt | ||
| pip-delete-this-directory.txt | ||
| # Unit test / coverage reports | ||
| htmlcov/ | ||
| .tox/ | ||
| .nox/ | ||
| .coverage | ||
| .coverage.* | ||
| .cache | ||
| nosetests.xml | ||
| coverage.xml | ||
| *.cover | ||
| *.py,cover | ||
| .hypothesis/ | ||
| .pytest_cache/ | ||
| cover/ | ||
| # Translations | ||
| *.mo | ||
| *.pot | ||
| # Django stuff: | ||
| *.log | ||
| local_settings.py | ||
| db.sqlite3 | ||
| db.sqlite3-journal | ||
| # Flask stuff: | ||
| instance/ | ||
| .webassets-cache | ||
| # Scrapy stuff: | ||
| .scrapy | ||
| # Sphinx documentation | ||
| docs/_build/ | ||
| # PyBuilder | ||
| .pybuilder/ | ||
| target/ | ||
| # Jupyter Notebook | ||
| .ipynb_checkpoints | ||
| # IPython | ||
| profile_default/ | ||
| ipython_config.py | ||
| # pyenv | ||
| # For a library or package, you might want to ignore these files since the code is | ||
| # intended to run in multiple environments; otherwise, check them in: | ||
| # .python-version | ||
| # pipenv | ||
| # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. | ||
| # However, in case of collaboration, if having platform-specific dependencies or dependencies | ||
| # having no cross-platform support, pipenv may install dependencies that don't work, or not | ||
| # install all needed dependencies. | ||
| #Pipfile.lock | ||
| # poetry | ||
| # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. | ||
| # This is especially recommended for binary packages to ensure reproducibility, and is more | ||
| # commonly ignored for libraries. | ||
| # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control | ||
| #poetry.lock | ||
| # pdm | ||
| # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. | ||
| #pdm.lock | ||
| # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it | ||
| # in version control. | ||
| # https://pdm.fming.dev/#use-with-ide | ||
| .pdm.toml | ||
| # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm | ||
| __pypackages__/ | ||
| # Celery stuff | ||
| celerybeat-schedule | ||
| celerybeat.pid | ||
| # SageMath parsed files | ||
| *.sage.py | ||
| # Environments | ||
| .env | ||
| .venv | ||
| env/ | ||
| venv/ | ||
| ENV/ | ||
| env.bak/ | ||
| venv.bak/ | ||
| # Spyder project settings | ||
| .spyderproject | ||
| .spyproject | ||
| # Rope project settings | ||
| .ropeproject | ||
| # mkdocs documentation | ||
| /site | ||
| # mypy | ||
| .mypy_cache/ | ||
| .dmypy.json | ||
| dmypy.json | ||
| # Pyre type checker | ||
| .pyre/ | ||
| # pytype static type analyzer | ||
| .pytype/ | ||
| # Cython debug symbols | ||
| cython_debug/ | ||
| # PyCharm | ||
| # JetBrains specific template is maintained in a separate JetBrains.gitignore that can | ||
| # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore | ||
| # and can be added to the global gitignore or merged into this file. For a more nuclear | ||
| # option (not recommended) you can uncomment the following to ignore the entire idea folder. | ||
| #.idea/ | ||
| ### Python Patch ### | ||
| # Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration | ||
| poetry.toml | ||
| # ruff | ||
| .ruff_cache/ | ||
| ### venv ### | ||
| # Virtualenv | ||
| # http://iamzed.com/2009/05/07/a-primer-on-virtualenv/ | ||
| [Bb]in | ||
| [Ii]nclude | ||
| [Ll]ib | ||
| [Ll]ib64 | ||
| [Ll]ocal | ||
| [Ss]cripts | ||
| pyvenv.cfg | ||
| pip-selfcheck.json | ||
| ### VisualStudioCode ### | ||
| .vscode/ | ||
| # Local History for Visual Studio Code | ||
| .history/ | ||
| # Built Visual Studio Code Extensions | ||
| *.vsix | ||
| ### VisualStudioCode Patch ### | ||
| # Ignore all local history of files | ||
| .history | ||
| .ionide | ||
| # End of https://www.toptal.com/developers/gitignore/api/venv,python,visualstudiocode,pycharm | ||
| .DS_Store |
| """ | ||
| Explain subsystem for TypeID. | ||
| This package provides a high-level, non-breaking API and CLI support | ||
| for answering the question: | ||
| "What is this TypeID?" | ||
| It is intentionally: | ||
| - additive (no changes to existing TypeID semantics), | ||
| - schema-optional (works fully offline), | ||
| - safe by default (read-only, no side effects). | ||
| Public API: | ||
| explain(id_str, schema_path=None, **options) -> Explanation | ||
| """ | ||
| from pathlib import Path | ||
| from typing import Optional | ||
| from .discovery import discover_schema_path | ||
| from .engine import explain as _explain_engine | ||
| from .model import Explanation | ||
| from .registry import load_registry, make_lookup | ||
| __all__ = [ | ||
| "explain", | ||
| "Explanation", | ||
| ] | ||
| def explain( | ||
| id_str: str, | ||
| *, | ||
| schema_path: Optional[str | Path] = None, | ||
| enable_schema: bool = True, | ||
| enable_links: bool = True, | ||
| ) -> Explanation: | ||
| """ | ||
| High-level convenience API for explaining a TypeID. | ||
| This function: | ||
| - parses and validates the TypeID, | ||
| - discovers and loads schema if enabled, | ||
| - executes the explain engine, | ||
| - never raises on normal user errors. | ||
| Args: | ||
| id_str: TypeID string to explain. | ||
| schema_path: Optional explicit path to schema file. | ||
| If None, discovery rules are applied. | ||
| enable_schema: Disable schema usage entirely if False. | ||
| enable_links: Disable link rendering if False. | ||
| Returns: | ||
| Explanation object. | ||
| """ | ||
| lookup = None | ||
| if enable_schema: | ||
| path = None | ||
| if schema_path is not None: | ||
| path = Path(schema_path).expanduser() | ||
| else: | ||
| discovery = discover_schema_path() | ||
| path = discovery.path | ||
| if path is not None: | ||
| result = load_registry(path) | ||
| if result.registry is not None: | ||
| lookup = make_lookup(result.registry) | ||
| # Note: load errors are intentionally not raised here. | ||
| # They will be surfaced as warnings by the CLI layer if desired. | ||
| return _explain_engine( | ||
| id_str, | ||
| schema_lookup=lookup, | ||
| enable_schema=enable_schema, | ||
| enable_links=enable_links, | ||
| ) |
| """ | ||
| Schema discovery for `typeid explain`. | ||
| This module implements a conservative, non-breaking discovery mechanism: | ||
| - If nothing is found, callers proceed without schema (feature still works). | ||
| - No new mandatory dependencies. | ||
| - Paths are resolved deterministically with clear precedence. | ||
| Precedence (first match wins): | ||
| 1) explicit CLI arg: --schema PATH (handled by caller; use discover_schema only if not provided) | ||
| 2) environment variable: TYPEID_SCHEMA | ||
| 3) current working directory: | ||
| - typeid.schema.json | ||
| - typeid.schema.yaml / typeid.schema.yml | ||
| 4) user config directory: | ||
| - <config_dir>/typeid/schema.json | ||
| - <config_dir>/typeid/schema.yaml / schema.yml | ||
| """ | ||
| import os | ||
| from dataclasses import dataclass | ||
| from pathlib import Path | ||
| from typing import Iterable, Optional | ||
| DEFAULT_CWD_CANDIDATES = ( | ||
| "typeid.schema.json", | ||
| "typeid.schema.yaml", | ||
| "typeid.schema.yml", | ||
| ) | ||
| DEFAULT_USER_CANDIDATES = ( | ||
| "schema.json", | ||
| "schema.yaml", | ||
| "schema.yml", | ||
| ) | ||
| @dataclass(frozen=True, slots=True) | ||
| class DiscoveryResult: | ||
| """Result of schema discovery.""" | ||
| path: Optional[Path] | ||
| source: str # e.g., "env:TYPEID_SCHEMA", "cwd", "user_config", "none" | ||
| def discover_schema_path( | ||
| *, | ||
| env_var: str = "TYPEID_SCHEMA", | ||
| cwd: Optional[Path] = None, | ||
| ) -> DiscoveryResult: | ||
| """ | ||
| Discover schema file path using the configured precedence rules. | ||
| Args: | ||
| env_var: environment variable name to check first. | ||
| cwd: optional cwd override (useful for tests). | ||
| Returns: | ||
| DiscoveryResult with found path or None. | ||
| """ | ||
| # 1) Environment variable | ||
| env_value = os.environ.get(env_var) | ||
| if env_value: | ||
| p = Path(env_value).expanduser() | ||
| if p.is_file(): | ||
| return DiscoveryResult(path=p, source=f"env:{env_var}") | ||
| # If provided but invalid, we treat it as "not found" but caller can | ||
| # warn separately if they want. | ||
| return DiscoveryResult(path=None, source=f"env:{env_var} (not found)") | ||
| # 2) Current working directory | ||
| cwd_path = cwd or Path.cwd() | ||
| for name in DEFAULT_CWD_CANDIDATES: | ||
| p = cwd_path / name | ||
| if p.is_file(): | ||
| return DiscoveryResult(path=p, source="cwd") | ||
| # 3) User config directory | ||
| user_cfg = _user_config_dir() | ||
| if user_cfg is not None: | ||
| base = user_cfg / "typeid" | ||
| for name in DEFAULT_USER_CANDIDATES: | ||
| p = base / name | ||
| if p.is_file(): | ||
| return DiscoveryResult(path=p, source="user_config") | ||
| return DiscoveryResult(path=None, source="none") | ||
| def _user_config_dir() -> Optional[Path]: | ||
| """ | ||
| Return OS-appropriate user config directory. | ||
| - Linux/macOS: ~/.config | ||
| - Windows: %APPDATA% | ||
| """ | ||
| # Windows: APPDATA is the typical location for roaming config | ||
| appdata = os.environ.get("APPDATA") | ||
| if appdata: | ||
| return Path(appdata).expanduser() | ||
| # XDG on Linux, also often present on macOS | ||
| xdg = os.environ.get("XDG_CONFIG_HOME") | ||
| if xdg: | ||
| return Path(xdg).expanduser() | ||
| # Fallback to ~/.config | ||
| home = Path.home() | ||
| if home: | ||
| return home / ".config" | ||
| return None | ||
| def iter_default_candidate_paths(*, cwd: Optional[Path] = None) -> Iterable[Path]: | ||
| """ | ||
| Yield all candidate paths in discovery order (excluding env var). | ||
| Useful for debugging or `typeid explain --debug-discovery` style features. | ||
| """ | ||
| cwd_path = cwd or Path.cwd() | ||
| for name in DEFAULT_CWD_CANDIDATES: | ||
| yield cwd_path / name | ||
| user_cfg = _user_config_dir() | ||
| if user_cfg is not None: | ||
| base = user_cfg / "typeid" | ||
| for name in DEFAULT_USER_CANDIDATES: | ||
| yield base / name |
| """ | ||
| Explain engine for the `typeid explain` feature. | ||
| This module is intentionally: | ||
| - Additive (doesn't change existing TypeID behavior) | ||
| - Defensive (never crashes on normal user input) | ||
| - Dependency-light (stdlib only) | ||
| It builds an Explanation by combining: | ||
| 1) parsed + derived facts from the ID (always available if parsable) | ||
| 2) optional schema (registry) data looked up by prefix | ||
| 3) optional rendered links (from schema templates) | ||
| """ | ||
| from dataclasses import replace | ||
| from datetime import datetime, timezone | ||
| from typing import Any, Callable, Dict, Optional | ||
| from typeid import TypeID | ||
| from typeid.errors import TypeIDException | ||
| from .model import Explanation, ParsedTypeID, ParseError, Provenance, TypeSchema | ||
| SchemaLookup = Callable[[str], Optional[TypeSchema]] | ||
| def explain( | ||
| id_str: str, | ||
| *, | ||
| schema_lookup: Optional[SchemaLookup] = None, | ||
| enable_schema: bool = True, | ||
| enable_links: bool = True, | ||
| ) -> Explanation: | ||
| """ | ||
| Produce an Explanation for a TypeID string. | ||
| Args: | ||
| id_str: The TypeID string to explain. | ||
| schema_lookup: Optional callable to fetch TypeSchema by prefix. | ||
| If provided and enable_schema=True, we will look up schema. | ||
| enable_schema: If False, do not attempt schema lookup (offline mode). | ||
| enable_links: If True, render link templates from schema (if any). | ||
| Returns: | ||
| Explanation (always returned; valid=False if parse/validation fails). | ||
| """ | ||
| parsed = _parse_typeid(id_str) | ||
| # Start building explanation; keep it useful even if invalid. | ||
| exp = Explanation( | ||
| id=id_str, | ||
| valid=parsed.valid, | ||
| parsed=parsed, | ||
| schema=None, | ||
| derived={}, | ||
| links={}, | ||
| provenance={}, | ||
| warnings=[], | ||
| errors=list(parsed.errors), | ||
| ) | ||
| # If parse failed, nothing more we can deterministically derive. | ||
| if not parsed.valid or parsed.prefix is None or parsed.suffix is None: | ||
| return exp | ||
| # Schema lookup (optional) | ||
| schema: Optional[TypeSchema] = None | ||
| if enable_schema and schema_lookup is not None and parsed.prefix: | ||
| try: | ||
| schema = schema_lookup(parsed.prefix) | ||
| except Exception as e: # never let schema backend break explain | ||
| exp.warnings.append(f"Schema lookup failed: {e!s}") | ||
| schema = None | ||
| if schema is not None: | ||
| exp = replace(exp, schema=schema) | ||
| _apply_schema_provenance(exp) | ||
| # Render links (optional) | ||
| if enable_links and schema is not None and schema.links: | ||
| rendered, warnings = _render_links(schema.links, exp) | ||
| exp.links.update(rendered) | ||
| exp.warnings.extend(warnings) | ||
| for k in rendered.keys(): | ||
| exp.provenance.setdefault(f"links.{k}", Provenance.SCHEMA) | ||
| # Derived facts provenance | ||
| _apply_derived_provenance(exp) | ||
| return exp | ||
| def _parse_typeid(id_str: str) -> ParsedTypeID: | ||
| """ | ||
| Parse and validate a TypeID using the library's existing logic. | ||
| Implementation detail: | ||
| - We rely on TypeID.from_string() to ensure behavior matches existing users. | ||
| - On error, we still attempt to extract prefix/suffix best-effort to show | ||
| something helpful (without promising correctness). | ||
| """ | ||
| try: | ||
| tid = TypeID.from_string(id_str) | ||
| except TypeIDException as e: | ||
| # Best-effort split so users can see what's wrong. | ||
| prefix, suffix = _best_effort_split(id_str) | ||
| return ParsedTypeID( | ||
| raw=id_str, | ||
| prefix=prefix, | ||
| suffix=suffix, | ||
| valid=False, | ||
| errors=[ParseError(code="invalid_typeid", message=str(e))], | ||
| uuid=None, | ||
| created_at=None, | ||
| sortable=None, | ||
| ) | ||
| except Exception as e: | ||
| prefix, suffix = _best_effort_split(id_str) | ||
| return ParsedTypeID( | ||
| raw=id_str, | ||
| prefix=prefix, | ||
| suffix=suffix, | ||
| valid=False, | ||
| errors=[ParseError(code="parse_error", message=f"Unexpected error: {e!s}")], | ||
| uuid=None, | ||
| created_at=None, | ||
| sortable=None, | ||
| ) | ||
| # Derived facts from the validated TypeID | ||
| uuid_obj = tid.uuid # library returns a UUID object (uuid6.UUID) | ||
| uuid_str = str(uuid_obj) | ||
| created_at = _uuid7_created_at(uuid_obj) | ||
| sortable = True # UUIDv7 is time-ordered by design | ||
| return ParsedTypeID( | ||
| raw=id_str, | ||
| prefix=tid.prefix, | ||
| suffix=tid.suffix, | ||
| valid=True, | ||
| errors=[], | ||
| uuid=uuid_str, | ||
| created_at=created_at, | ||
| sortable=sortable, | ||
| ) | ||
| def _best_effort_split(id_str: str) -> tuple[Optional[str], Optional[str]]: | ||
| """ | ||
| Split by the last underscore (TypeID allows underscores in prefix). | ||
| Returns (prefix, suffix) or (None, None) if not splittable. | ||
| """ | ||
| if "_" not in id_str: | ||
| return None, None | ||
| prefix, suffix = id_str.rsplit("_", 1) | ||
| if not prefix or not suffix: | ||
| return None, None | ||
| return prefix, suffix | ||
| def _uuid7_created_at(uuid_obj: Any) -> Optional[datetime]: | ||
| """ | ||
| Extract created_at from a UUIDv7. | ||
| UUIDv7 layout: the top 48 bits are unix epoch time in milliseconds. | ||
| Python's uuid.UUID.int is a 128-bit integer with the most significant bits first, | ||
| so unix_ms = int >> 80 (128-48). | ||
| Returns: | ||
| UTC datetime or None if extraction fails. | ||
| """ | ||
| try: | ||
| # uuid_obj is likely uuid6.UUID, but supports .int like uuid.UUID | ||
| u_int = int(uuid_obj.int) | ||
| unix_ms = u_int >> 80 | ||
| unix_s = unix_ms / 1000.0 | ||
| return datetime.fromtimestamp(unix_s, tz=timezone.utc) | ||
| except Exception: | ||
| return None | ||
| class _SafeFormatDict(dict): | ||
| """dict that leaves unknown placeholders intact instead of raising KeyError.""" | ||
| def __missing__(self, key: str) -> str: | ||
| return "{" + key + "}" | ||
| def _render_links(templates: Dict[str, str], exp: Explanation) -> tuple[Dict[str, str], list[str]]: | ||
| """ | ||
| Render schema link templates using known values. | ||
| Supported placeholders: | ||
| {id}, {prefix}, {suffix}, {uuid} | ||
| {created_at} (ISO8601 if available) | ||
| Unknown placeholders remain unchanged. | ||
| """ | ||
| mapping = _SafeFormatDict( | ||
| id=exp.id, | ||
| prefix=exp.parsed.prefix or "", | ||
| suffix=exp.parsed.suffix or "", | ||
| uuid=exp.parsed.uuid or "", | ||
| created_at=exp.parsed.created_at.isoformat() if exp.parsed.created_at else "", | ||
| ) | ||
| rendered: Dict[str, str] = {} | ||
| warnings: list[str] = [] | ||
| for name, tmpl in templates.items(): | ||
| if not isinstance(tmpl, str): | ||
| warnings.append(f"Link template '{name}' is not a string; skipping.") | ||
| continue | ||
| try: | ||
| rendered[name] = tmpl.format_map(mapping) | ||
| except Exception as e: | ||
| warnings.append(f"Failed to render link '{name}': {e!s}") | ||
| return rendered, warnings | ||
| def _apply_schema_provenance(exp: Explanation) -> None: | ||
| """ | ||
| Mark common schema fields as coming from schema. | ||
| (We keep this small; schema.raw stays schema by definition.) | ||
| """ | ||
| if exp.schema is None: | ||
| return | ||
| for key in ("name", "description", "owner_team", "pii", "retention"): | ||
| if getattr(exp.schema, key, None) is not None: | ||
| exp.provenance.setdefault(key, Provenance.SCHEMA) | ||
| def _apply_derived_provenance(exp: Explanation) -> None: | ||
| """Mark parsed-derived fields as coming from the ID itself.""" | ||
| if exp.parsed.prefix is not None: | ||
| exp.provenance.setdefault("prefix", Provenance.DERIVED_FROM_ID) | ||
| if exp.parsed.suffix is not None: | ||
| exp.provenance.setdefault("suffix", Provenance.DERIVED_FROM_ID) | ||
| if exp.parsed.uuid is not None: | ||
| exp.provenance.setdefault("uuid", Provenance.DERIVED_FROM_ID) | ||
| if exp.parsed.created_at is not None: | ||
| exp.provenance.setdefault("created_at", Provenance.DERIVED_FROM_ID) | ||
| if exp.parsed.sortable is not None: | ||
| exp.provenance.setdefault("sortable", Provenance.DERIVED_FROM_ID) |
| """ | ||
| Formatting helpers for `typeid explain`. | ||
| This module is intentionally small and dependency-free. | ||
| It supports: | ||
| - YAML-ish pretty output (human-friendly) | ||
| - JSON output via Explanation.to_dict() (machine-friendly) | ||
| It also provides a minimal "safe formatter" for link templates | ||
| (kept here so CLI and engine can share behavior if needed). | ||
| Note: This file does NOT require PyYAML. We output YAML-like text | ||
| without claiming it's strict YAML. | ||
| """ | ||
| import json | ||
| from datetime import datetime | ||
| from typing import Any, Dict, List, Mapping, Optional | ||
| from .model import Explanation, Provenance | ||
| def format_explanation_pretty(exp: Explanation) -> str: | ||
| """ | ||
| Render an Explanation as readable YAML-ish text. | ||
| We intentionally keep it stable-ish and human-friendly: | ||
| - predictable section ordering | ||
| - indentation | ||
| - lists rendered as "- item" | ||
| This is NOT guaranteed to be strict YAML; it is "YAML-like". | ||
| For strict machine consumption, use JSON output. | ||
| """ | ||
| lines: List[str] = [] | ||
| def add(line: str = "") -> None: | ||
| lines.append(line) | ||
| add(f"id: {exp.id}") | ||
| add(f"valid: {str(exp.valid).lower()}") | ||
| if exp.errors: | ||
| add("errors:") | ||
| for e in exp.errors: | ||
| add(f" - code: {e.code}") | ||
| add(f" message: {_quote_if_needed(e.message)}") | ||
| add() | ||
| add("parsed:") | ||
| _emit_kv(lines, " ", "prefix", exp.parsed.prefix) | ||
| _emit_kv(lines, " ", "suffix", exp.parsed.suffix) | ||
| _emit_kv(lines, " ", "uuid", exp.parsed.uuid) | ||
| _emit_kv(lines, " ", "created_at", _iso(exp.parsed.created_at)) | ||
| _emit_kv(lines, " ", "sortable", exp.parsed.sortable) | ||
| # Schema section | ||
| add() | ||
| add("schema:") | ||
| if exp.schema is None: | ||
| add(" found: false") | ||
| else: | ||
| add(" found: true") | ||
| _emit_kv(lines, " ", "prefix", exp.schema.prefix) | ||
| _emit_kv(lines, " ", "name", exp.schema.name) | ||
| _emit_kv(lines, " ", "description", exp.schema.description) | ||
| _emit_kv(lines, " ", "owner_team", exp.schema.owner_team) | ||
| _emit_kv(lines, " ", "pii", exp.schema.pii) | ||
| _emit_kv(lines, " ", "retention", exp.schema.retention) | ||
| # Show extra raw keys (optional, but helpful) | ||
| extra = _schema_extras(exp.schema.raw) | ||
| if extra: | ||
| add(" extra:") | ||
| for k in sorted(extra.keys()): | ||
| _emit_any(lines, " ", k, extra[k]) | ||
| # Derived | ||
| if exp.derived: | ||
| add() | ||
| add("derived:") | ||
| for k in sorted(exp.derived.keys()): | ||
| _emit_any(lines, " ", k, exp.derived[k]) | ||
| # Links | ||
| add() | ||
| add("links:") | ||
| if not exp.links: | ||
| add(" {}") | ||
| else: | ||
| for k in sorted(exp.links.keys()): | ||
| _emit_kv(lines, " ", k, exp.links[k]) | ||
| # Provenance | ||
| if exp.provenance: | ||
| add() | ||
| add("provenance:") | ||
| for k in sorted(exp.provenance.keys()): | ||
| prov = exp.provenance[k] | ||
| add(f" {k}: {prov.value if isinstance(prov, Provenance) else str(prov)}") | ||
| # Warnings | ||
| if exp.warnings: | ||
| add() | ||
| add("warnings:") | ||
| for w in exp.warnings: | ||
| add(f" - {_quote_if_needed(w)}") | ||
| return "\n".join(lines).rstrip() + "\n" | ||
| def format_explanation_json(exp: Explanation, *, indent: int = 2) -> str: | ||
| """ | ||
| Render Explanation as JSON string. | ||
| """ | ||
| return json.dumps(exp.to_dict(), indent=indent, ensure_ascii=False) + "\n" | ||
| class SafeFormatDict(dict): | ||
| """dict that leaves unknown placeholders intact rather than raising KeyError.""" | ||
| def __missing__(self, key: str) -> str: | ||
| return "{" + key + "}" | ||
| def render_template(template: str, mapping: Mapping[str, Any]) -> str: | ||
| """ | ||
| Render a template using str.format_map with SafeFormatDict. | ||
| Unknown placeholders remain unchanged. | ||
| """ | ||
| safe = SafeFormatDict({k: _stringify(v) for k, v in mapping.items()}) | ||
| return template.format_map(safe) | ||
| def _iso(dt: Optional[datetime]) -> Optional[str]: | ||
| return dt.isoformat() if dt else None | ||
| def _emit_kv(lines: List[str], indent: str, key: str, value: Any) -> None: | ||
| if value is None: | ||
| lines.append(f"{indent}{key}: null") | ||
| return | ||
| if isinstance(value, bool): | ||
| lines.append(f"{indent}{key}: {str(value).lower()}") | ||
| return | ||
| if isinstance(value, (int, float)): | ||
| lines.append(f"{indent}{key}: {value}") | ||
| return | ||
| lines.append(f"{indent}{key}: {_quote_if_needed(str(value))}") | ||
| def _emit_any(lines: List[str], indent: str, key: str, value: Any) -> None: | ||
| """ | ||
| Emit arbitrary JSON-y values in YAML-ish style. | ||
| """ | ||
| if value is None or isinstance(value, (str, bool, int, float)): | ||
| _emit_kv(lines, indent, key, value) | ||
| return | ||
| if isinstance(value, list): | ||
| lines.append(f"{indent}{key}:") | ||
| if not value: | ||
| lines.append(f"{indent} []") | ||
| return | ||
| for item in value: | ||
| if isinstance(item, (str, int, float, bool)) or item is None: | ||
| lines.append(f"{indent} - {_quote_if_needed(_stringify(item))}") | ||
| else: | ||
| # nested complex item: render as JSON inline | ||
| lines.append(f"{indent} - {_quote_if_needed(json.dumps(item, ensure_ascii=False))}") | ||
| return | ||
| if isinstance(value, dict): | ||
| lines.append(f"{indent}{key}:") | ||
| if not value: | ||
| lines.append(f"{indent} {{}}") | ||
| return | ||
| for k in sorted(value.keys(), key=lambda x: str(x)): | ||
| _emit_any(lines, indent + " ", str(k), value[k]) | ||
| return | ||
| # Fallback: stringify | ||
| _emit_kv(lines, indent, key, _stringify(value)) | ||
| def _stringify(v: Any) -> str: | ||
| if v is None: | ||
| return "null" | ||
| if isinstance(v, bool): | ||
| return str(v).lower() | ||
| if isinstance(v, (int, float)): | ||
| return str(v) | ||
| if isinstance(v, datetime): | ||
| return v.isoformat() | ||
| return str(v) | ||
| def _quote_if_needed(s: str) -> str: | ||
| """ | ||
| Add quotes if the string contains characters that could confuse YAML-ish output. | ||
| """ | ||
| if s == "": | ||
| return '""' | ||
| # Minimal quoting rules for readability; not strict YAML. | ||
| needs = any(ch in s for ch in [":", "#", "{", "}", "[", "]", ",", "\n", "\r", "\t"]) | ||
| if s.strip() != s: | ||
| needs = True | ||
| if s.lower() in {"true", "false", "null", "none"}: | ||
| needs = True | ||
| if needs: | ||
| escaped = s.replace("\\", "\\\\").replace('"', '\\"') | ||
| return f'"{escaped}"' | ||
| return s | ||
| def _schema_extras(raw: Dict[str, Any]) -> Dict[str, Any]: | ||
| """ | ||
| Return schema keys excluding the ones we already print as normalized fields. | ||
| """ | ||
| exclude = { | ||
| "name", | ||
| "description", | ||
| "owner_team", | ||
| "pii", | ||
| "retention", | ||
| "links", | ||
| } | ||
| return {k: v for k, v in raw.items() if k not in exclude} |
| """ | ||
| Data models for the `typeid explain` feature. | ||
| Design goals: | ||
| - Additive, non-breaking: does not modify existing TypeID behavior. | ||
| - Stable-ish: callers can rely on these dataclasses, but we keep flexibility | ||
| by storing schema/derived sections as dicts (schema evolves without breaking). | ||
| - Provenance: every top-level field can be tagged by where it came from. | ||
| """ | ||
| from dataclasses import dataclass, field | ||
| from datetime import datetime | ||
| from enum import Enum | ||
| from typing import Any, Dict, List, Optional | ||
| class Provenance(str, Enum): | ||
| """Where a piece of information came from.""" | ||
| DERIVED_FROM_ID = "derived_from_id" | ||
| SCHEMA = "schema" | ||
| EXTERNAL = "external" | ||
| UNKNOWN = "unknown" | ||
| @dataclass(frozen=True, slots=True) | ||
| class ParseError: | ||
| """Represents a recoverable parse/validation error.""" | ||
| code: str | ||
| message: str | ||
| @dataclass(frozen=True, slots=True) | ||
| class ParsedTypeID: | ||
| """ | ||
| Facts extracted from the TypeID string without any schema lookup. | ||
| Notes: | ||
| - `prefix` is the full prefix as per TypeID spec (may contain underscores). | ||
| - `suffix` is the encoded UUIDv7 portion (base32 string). | ||
| - `uuid` and `created_at` are *derived* from suffix if possible. | ||
| """ | ||
| raw: str | ||
| prefix: Optional[str] | ||
| suffix: Optional[str] | ||
| valid: bool | ||
| errors: List[ParseError] = field(default_factory=list) | ||
| # Derived (best-effort) | ||
| uuid: Optional[str] = None # keep as string to avoid uuid/uuid6 typing bleed | ||
| created_at: Optional[datetime] = None | ||
| sortable: Optional[bool] = None # TypeIDs w/ UUIDv7 are typically sortable | ||
| @dataclass(frozen=True, slots=True) | ||
| class TypeSchema: | ||
| """ | ||
| Schema info for a given prefix, loaded from a registry file. | ||
| This is intentionally flexible to keep the schema format evolving without | ||
| breaking the Python API: we store raw dict and also normalize a few | ||
| commonly-used fields for nicer UX. | ||
| """ | ||
| prefix: str | ||
| raw: Dict[str, Any] = field(default_factory=dict) | ||
| # Common optional fields (convenience) | ||
| name: Optional[str] = None | ||
| description: Optional[str] = None | ||
| owner_team: Optional[str] = None | ||
| pii: Optional[bool] = None | ||
| retention: Optional[str] = None | ||
| # Link templates (e.g. {"logs": "https://...q={id}"}) | ||
| links: Dict[str, str] = field(default_factory=dict) | ||
| @dataclass(frozen=True, slots=True) | ||
| class Explanation: | ||
| """ | ||
| Final explanation object produced by the explain engine. | ||
| Sections: | ||
| - parsed: always present (even if invalid; fields may be None) | ||
| - schema: may be None if no schema found or schema loading disabled | ||
| - derived: small dict for extra derived facts (extensible) | ||
| - links: rendered links (from schema templates), safe for display | ||
| - provenance: per-field provenance labels for transparency | ||
| """ | ||
| id: str | ||
| valid: bool | ||
| parsed: ParsedTypeID | ||
| schema: Optional[TypeSchema] = None | ||
| # Additional derived facts that aren't worth dedicated fields yet | ||
| derived: Dict[str, Any] = field(default_factory=dict) | ||
| # Rendered (not templates) links | ||
| links: Dict[str, str] = field(default_factory=dict) | ||
| # Field -> provenance label; keep keys simple (e.g. "created_at", "retention") | ||
| provenance: Dict[str, Provenance] = field(default_factory=dict) | ||
| # Non-fatal warnings (e.g. schema loaded but link template failed) | ||
| warnings: List[str] = field(default_factory=list) | ||
| # Errors copied from parsed.errors for convenience (and future external errors) | ||
| errors: List[ParseError] = field(default_factory=list) | ||
| def to_dict(self) -> Dict[str, Any]: | ||
| """ | ||
| Convert to a JSON-serializable dict. | ||
| We avoid serializing complex objects directly (datetime, Enums) without | ||
| conversion to keep `--json` output stable and easy to consume. | ||
| """ | ||
| parsed = { | ||
| "raw": self.parsed.raw, | ||
| "prefix": self.parsed.prefix, | ||
| "suffix": self.parsed.suffix, | ||
| "valid": self.parsed.valid, | ||
| "errors": [e.__dict__ for e in self.parsed.errors], | ||
| "uuid": self.parsed.uuid, | ||
| "created_at": self.parsed.created_at.isoformat() if self.parsed.created_at else None, | ||
| "sortable": self.parsed.sortable, | ||
| } | ||
| schema = None | ||
| if self.schema is not None: | ||
| schema = { | ||
| "prefix": self.schema.prefix, | ||
| "name": self.schema.name, | ||
| "description": self.schema.description, | ||
| "owner_team": self.schema.owner_team, | ||
| "pii": self.schema.pii, | ||
| "retention": self.schema.retention, | ||
| "links": dict(self.schema.links), | ||
| "raw": dict(self.schema.raw), | ||
| } | ||
| return { | ||
| "id": self.id, | ||
| "valid": self.valid, | ||
| "parsed": parsed, | ||
| "derived": dict(self.derived), | ||
| "schema": schema, | ||
| "links": dict(self.links), | ||
| "provenance": {k: str(v.value) for k, v in self.provenance.items()}, | ||
| "warnings": list(self.warnings), | ||
| "errors": [e.__dict__ for e in self.errors], | ||
| } |
| """ | ||
| Schema registry loader for `typeid explain`. | ||
| This module loads a schema file (JSON by default, YAML optionally) and exposes | ||
| a lookup function: prefix -> TypeSchema. | ||
| Goals: | ||
| - Non-breaking: schema is optional; failures are handled gracefully. | ||
| - Minimal dependencies: JSON uses stdlib; YAML support is optional. | ||
| - Future-proof: schema versioning with a light validation layer. | ||
| Schema shape (v1) - JSON/YAML: | ||
| { | ||
| "schema_version": 1, | ||
| "types": { | ||
| "usr": { | ||
| "name": "User", | ||
| "description": "...", | ||
| "owner_team": "...", | ||
| "pii": true, | ||
| "retention": "7y", | ||
| "links": { | ||
| "logs": "https://...q={id}", | ||
| "trace": "https://...?id={id}" | ||
| } | ||
| } | ||
| } | ||
| } | ||
| """ | ||
| import json | ||
| from dataclasses import dataclass | ||
| from pathlib import Path | ||
| from typing import Any, Dict, Optional, Tuple | ||
| from .model import TypeSchema | ||
| @dataclass(frozen=True, slots=True) | ||
| class RegistryLoadError: | ||
| code: str | ||
| message: str | ||
| @dataclass(frozen=True, slots=True) | ||
| class RegistryLoadResult: | ||
| registry: Optional["SchemaRegistry"] | ||
| error: Optional[RegistryLoadError] = None | ||
| class SchemaRegistry: | ||
| """ | ||
| In-memory registry of TypeSchema objects loaded from a schema file. | ||
| Lookup is by full TypeID prefix (which may contain underscores). | ||
| """ | ||
| def __init__(self, *, schema_version: int, types: Dict[str, TypeSchema], source_path: Path): | ||
| self.schema_version = schema_version | ||
| self._types = types | ||
| self.source_path = source_path | ||
| def get(self, prefix: str) -> Optional[TypeSchema]: | ||
| return self._types.get(prefix) | ||
| def __contains__(self, prefix: str) -> bool: | ||
| return prefix in self._types | ||
| def __len__(self) -> int: | ||
| return len(self._types) | ||
| def load_registry(path: Path) -> RegistryLoadResult: | ||
| """ | ||
| Load a schema registry from the given path. | ||
| Returns RegistryLoadResult: | ||
| - registry != None on success | ||
| - error != None on failure (never raises for normal user mistakes) | ||
| """ | ||
| try: | ||
| data, fmt = _read_schema_file(path) | ||
| except Exception as e: | ||
| return RegistryLoadResult( | ||
| registry=None, | ||
| error=RegistryLoadError(code="read_failed", message=f"Failed to read schema: {e!s}"), | ||
| ) | ||
| if not isinstance(data, dict): | ||
| return RegistryLoadResult( | ||
| registry=None, | ||
| error=RegistryLoadError(code="invalid_schema", message="Schema root must be an object/map."), | ||
| ) | ||
| schema_version = data.get("schema_version") | ||
| if schema_version is None: | ||
| return RegistryLoadResult( | ||
| registry=None, | ||
| error=RegistryLoadError(code="missing_schema_version", message="Schema missing 'schema_version'."), | ||
| ) | ||
| if not isinstance(schema_version, int): | ||
| return RegistryLoadResult( | ||
| registry=None, | ||
| error=RegistryLoadError(code="invalid_schema_version", message="'schema_version' must be an integer."), | ||
| ) | ||
| if schema_version != 1: | ||
| return RegistryLoadResult( | ||
| registry=None, | ||
| error=RegistryLoadError( | ||
| code="unsupported_schema_version", | ||
| message=f"Unsupported schema_version={schema_version}. Supported: 1.", | ||
| ), | ||
| ) | ||
| types_raw = data.get("types") | ||
| if types_raw is None: | ||
| return RegistryLoadResult( | ||
| registry=None, | ||
| error=RegistryLoadError(code="missing_types", message="Schema missing 'types' map."), | ||
| ) | ||
| if not isinstance(types_raw, dict): | ||
| return RegistryLoadResult( | ||
| registry=None, | ||
| error=RegistryLoadError(code="invalid_types", message="'types' must be an object/map."), | ||
| ) | ||
| types: Dict[str, TypeSchema] = {} | ||
| for prefix, spec in types_raw.items(): | ||
| if not isinstance(prefix, str) or not prefix: | ||
| # skip invalid keys but don't fail entire load | ||
| continue | ||
| if not isinstance(spec, dict): | ||
| # skip invalid type spec entries | ||
| continue | ||
| types[prefix] = _to_type_schema(prefix, spec) | ||
| return RegistryLoadResult(registry=SchemaRegistry(schema_version=schema_version, types=types, source_path=path)) | ||
| def make_lookup(registry: Optional[SchemaRegistry]): | ||
| """ | ||
| Convenience helper to make a schema_lookup callable for engine.explain(). | ||
| Example: | ||
| reg = load_registry(path).registry | ||
| lookup = make_lookup(reg) | ||
| explanation = explain(id, schema_lookup=lookup) | ||
| """ | ||
| def _lookup(prefix: str) -> Optional[TypeSchema]: | ||
| if registry is None: | ||
| return None | ||
| return registry.get(prefix) | ||
| return _lookup | ||
| def _read_schema_file(path: Path) -> Tuple[Dict[str, Any], str]: | ||
| """ | ||
| Read schema file and parse it into a dict. | ||
| Returns: | ||
| (data, format) where format is 'json' or 'yaml' | ||
| JSON is always supported. | ||
| YAML is supported only if PyYAML is installed. | ||
| """ | ||
| suffix = path.suffix.lower() | ||
| raw = path.read_text(encoding="utf-8") | ||
| if suffix == ".json": | ||
| return json.loads(raw), "json" | ||
| if suffix in (".yaml", ".yml"): | ||
| # Optional dependency | ||
| try: | ||
| import yaml # type: ignore | ||
| except Exception as e: | ||
| raise RuntimeError( | ||
| "YAML schema requires optional dependency. " | ||
| "Install PyYAML (or `typeid[yaml]` if you provide extras)." | ||
| ) from e | ||
| data = yaml.safe_load(raw) | ||
| return data, "yaml" | ||
| # If extension unknown, try JSON first for convenience. | ||
| try: | ||
| return json.loads(raw), "json" | ||
| except Exception as e: | ||
| raise RuntimeError( | ||
| f"Unsupported schema file extension: {path.suffix!s} (supported: .json, .yaml, .yml)" | ||
| ) from e | ||
| def _to_type_schema(prefix: str, spec: Dict[str, Any]) -> TypeSchema: | ||
| """ | ||
| Normalize a raw type spec into TypeSchema. | ||
| We keep `raw` for forward-compatibility but also extract a few common fields | ||
| for nicer UX. | ||
| """ | ||
| links = spec.get("links") or {} | ||
| if not isinstance(links, dict): | ||
| links = {} | ||
| # Extract common fields safely | ||
| name = spec.get("name") | ||
| description = spec.get("description") | ||
| owner_team = spec.get("owner_team") | ||
| pii = spec.get("pii") | ||
| retention = spec.get("retention") | ||
| return TypeSchema( | ||
| prefix=prefix, | ||
| raw=dict(spec), | ||
| name=name if isinstance(name, str) else None, | ||
| description=description if isinstance(description, str) else None, | ||
| owner_team=owner_team if isinstance(owner_team, str) else None, | ||
| pii=pii if isinstance(pii, bool) else None, | ||
| retention=retention if isinstance(retention, str) else None, | ||
| links={str(k): str(v) for k, v in links.items() if isinstance(k, str) and isinstance(v, str)}, | ||
| ) |
| from dataclasses import dataclass | ||
| from functools import lru_cache | ||
| from typing import Callable | ||
| from .typeid import TypeID | ||
| @dataclass(frozen=True, slots=True) | ||
| class TypeIDFactory: | ||
| """ | ||
| Callable object that generates TypeIDs with a fixed prefix. | ||
| Example: | ||
| user_id = TypeIDFactory("user")() | ||
| """ | ||
| prefix: str | ||
| def __call__(self) -> TypeID: | ||
| return TypeID(self.prefix) | ||
| def typeid_factory(prefix: str) -> Callable[[], TypeID]: | ||
| """ | ||
| Return a zero-argument callable that generates TypeIDs with a fixed prefix. | ||
| Example: | ||
| user_id = typeid_factory("user")() | ||
| """ | ||
| return TypeIDFactory(prefix) | ||
| @lru_cache(maxsize=256) | ||
| def cached_typeid_factory(prefix: str) -> Callable[[], TypeID]: | ||
| """ | ||
| Same as typeid_factory, but caches factories by prefix. | ||
| Use this if you create factories repeatedly at runtime. | ||
| """ | ||
| return TypeIDFactory(prefix) |
+224
-18
| Metadata-Version: 2.4 | ||
| Name: typeid-python | ||
| Version: 0.3.3 | ||
| Version: 0.3.4 | ||
| Summary: Python implementation of TypeIDs: type-safe, K-sortable, and globally unique identifiers inspired by Stripe IDs | ||
| License: MIT | ||
| Project-URL: Homepage, https://github.com/akhundMurad/typeid-python | ||
| Project-URL: Repository, https://github.com/akhundMurad/typeid-python | ||
| Project-URL: Bug Tracker, https://github.com/akhundMurad/typeid-python/issues | ||
| Author-email: Murad Akhundov <akhundov1murad@gmail.com> | ||
| License-Expression: MIT | ||
| License-File: LICENSE | ||
| Keywords: typeid,uuid,uuid6,guid | ||
| Author: Murad Akhundov | ||
| Author-email: akhundov1murad@gmail.com | ||
| Requires-Python: >=3.10,<4 | ||
| Keywords: guid,typeid,uuid,uuid6 | ||
| Classifier: Development Status :: 3 - Alpha | ||
| Classifier: License :: OSI Approved :: MIT License | ||
| Classifier: Operating System :: OS Independent | ||
| Classifier: Programming Language :: Python :: 3 | ||
| Classifier: Programming Language :: Python :: 3.10 | ||
@@ -20,5 +20,8 @@ Classifier: Programming Language :: Python :: 3.11 | ||
| Classifier: Programming Language :: Python :: 3.14 | ||
| Requires-Python: <4,>=3.10 | ||
| Requires-Dist: uuid6<2026.0.0,>=2024.7.10 | ||
| Provides-Extra: cli | ||
| Requires-Dist: uuid6 (>=2024.7.10,<2026.0.0) | ||
| Project-URL: Repository, https://github.com/akhundMurad/typeid-python | ||
| Requires-Dist: click; extra == 'cli' | ||
| Provides-Extra: yaml | ||
| Requires-Dist: pyyaml; extra == 'yaml' | ||
| Description-Content-Type: text/markdown | ||
@@ -52,3 +55,3 @@ | ||
| - PyPI: | ||
| - Pip: | ||
@@ -59,2 +62,8 @@ ```console | ||
| - Uv: | ||
| ```console | ||
| uv add typeid-python | ||
| ``` | ||
| - Poetry: | ||
@@ -66,2 +75,15 @@ | ||
| ### Optional dependencies | ||
| TypeID supports schema-based ID explanations using JSON (always available) and | ||
| YAML (optional). | ||
| To enable YAML support: | ||
| ```console | ||
| pip install typeid-python[yaml] | ||
| ``` | ||
| If the extra is not installed, JSON schemas will still work. | ||
| ## Usage | ||
@@ -76,11 +98,14 @@ | ||
| # Default TypeID (no prefix) | ||
| typeid = TypeID() | ||
| print(typeid.prefix) # "" | ||
| print(typeid.suffix) # "01h45ytscbebyvny4gc8cr8ma2" (encoded uuid7 instance) | ||
| assert typeid.prefix == "" | ||
| assert isinstance(typeid.suffix, str) | ||
| assert len(typeid.suffix) > 0 # encoded UUIDv7 | ||
| # TypeID with prefix | ||
| typeid = TypeID(prefix="user") | ||
| print(typeid.prefix) # "user" | ||
| print(str(typeid)) # "user_01h45ytscbebyvny4gc8cr8ma2" | ||
| assert typeid.prefix == "user" | ||
| assert str(typeid).startswith("user_") | ||
| ``` | ||
@@ -93,5 +118,7 @@ | ||
| typeid = TypeID.from_string("user_01h45ytscbebyvny4gc8cr8ma2") | ||
| value = "user_01h45ytscbebyvny4gc8cr8ma2" | ||
| typeid = TypeID.from_string(value) | ||
| print(str(typeid)) # "user_01h45ytscbebyvny4gc8cr8ma2" | ||
| assert str(typeid) == value | ||
| assert typeid.prefix == "user" | ||
| ``` | ||
@@ -105,3 +132,3 @@ | ||
| uuid = uuid7() # UUID('01890bf0-846f-7762-8605-5a3abb40e0e5') | ||
| uuid = uuid7() | ||
| prefix = "user" | ||
@@ -111,5 +138,30 @@ | ||
| print(str(typeid)) # "user_01h45z113fexh8c1at7axm1r75" | ||
| assert typeid.prefix == prefix | ||
| assert str(typeid).startswith(f"{prefix}_") | ||
| ``` | ||
| - Use pre-defined prefix: | ||
| ```python | ||
| from dataclasses import dataclass, field | ||
| from typing import Literal | ||
| from typeid import TypeID, typeid_factory | ||
| UserID = TypeID[Literal["user"]] | ||
| gen_user_id = typeid_factory("user") | ||
| @dataclass | ||
| class UserDTO: | ||
| user_id: UserID = field(default_factory=gen_user_id) | ||
| full_name: str = "A J" | ||
| age: int = 18 | ||
| user = UserDTO() | ||
| assert str(user.user_id).startswith("user_") | ||
| ``` | ||
| ### CLI-tool | ||
@@ -145,1 +197,155 @@ | ||
| ## ✨ NEW: `typeid explain` — “What is this ID?” | ||
| TypeID can now **explain a TypeID** in a human-readable way. | ||
| This is useful when: | ||
| * debugging logs | ||
| * inspecting database records | ||
| * reviewing production incidents | ||
| * understanding IDs shared via Slack, tickets, or dashboards | ||
| ### Basic usage (no schema required) | ||
| ```console | ||
| $ typeid explain user_01h45ytscbebyvny4gc8cr8ma2 | ||
| ``` | ||
| Example output: | ||
| ```yaml | ||
| id: user_01h45ytscbebyvny4gc8cr8ma2 | ||
| valid: true | ||
| parsed: | ||
| prefix: user | ||
| suffix: 01h45ytscbebyvny4gc8cr8ma2 | ||
| uuid: 01890bf0-846f-7762-8605-5a3abb40e0e5 | ||
| created_at: 2025-03-12T10:41:23Z | ||
| sortable: true | ||
| schema: | ||
| found: false | ||
| ``` | ||
| Even without configuration, `typeid explain` can: | ||
| * validate the ID | ||
| * extract the UUID | ||
| * derive creation time (UUIDv7) | ||
| * determine sortability | ||
| ## Schema-based explanations | ||
| To make explanations richer, you can define a **TypeID schema** describing what each | ||
| prefix represents. | ||
| ### Example schema (`typeid.schema.json`) | ||
| ```json | ||
| { | ||
| "schema_version": 1, | ||
| "types": { | ||
| "user": { | ||
| "name": "User", | ||
| "description": "End-user account", | ||
| "owner_team": "identity-platform", | ||
| "pii": true, | ||
| "retention": "7y", | ||
| "links": { | ||
| "logs": "https://logs.company/search?q={id}", | ||
| "trace": "https://traces.company/?id={id}" | ||
| } | ||
| } | ||
| } | ||
| } | ||
| ``` | ||
| ### Explain using schema | ||
| ```console | ||
| $ typeid explain user_01h45ytscbebyvny4gc8cr8ma2 | ||
| ``` | ||
| Output (excerpt): | ||
| ```yaml | ||
| schema: | ||
| found: true | ||
| name: User | ||
| owner_team: identity-platform | ||
| pii: true | ||
| retention: 7y | ||
| links: | ||
| logs: https://logs.company/search?q=user_01h45ytscbebyvny4gc8cr8ma2 | ||
| ``` | ||
| ## Schema discovery rules | ||
| If `--schema` is not provided, TypeID looks for a schema in the following order: | ||
| 1. Environment variable: | ||
| ```console | ||
| TYPEID_SCHEMA=/path/to/schema.json | ||
| ``` | ||
| 2. Current directory: | ||
| * `typeid.schema.json` | ||
| * `typeid.schema.yaml` | ||
| 3. User config directory: | ||
| * `~/.config/typeid/schema.json` | ||
| * `~/.config/typeid/schema.yaml` | ||
| If no schema is found, the command still works with derived information only. | ||
| ## YAML schemas (optional) | ||
| YAML schemas are supported if the optional dependency is installed: | ||
| ```console | ||
| pip install typeid-python[yaml] | ||
| ``` | ||
| Example (`typeid.schema.yaml`): | ||
| ```yaml | ||
| schema_version: 1 | ||
| types: | ||
| user: | ||
| name: User | ||
| owner_team: identity-platform | ||
| links: | ||
| logs: "https://logs.company/search?q={id}" | ||
| ``` | ||
| ## JSON output (machine-readable) | ||
| ```console | ||
| $ typeid explain user_01h45ytscbebyvny4gc8cr8ma2 --json | ||
| ``` | ||
| Useful for: | ||
| * scripts | ||
| * CI pipelines | ||
| * IDE integrations | ||
| ## Design principles | ||
| * **Non-breaking**: existing APIs and CLI commands remain unchanged | ||
| * **Schema-optional**: works fully offline | ||
| * **Read-only**: no side effects or external mutations | ||
| * **Declarative**: meaning is defined by users, not inferred by the tool | ||
| You can think of `typeid explain` as: | ||
| > **OpenAPI — but for identifiers instead of HTTP endpoints** | ||
| ## License | ||
| MIT | ||
+51
-71
@@ -1,85 +0,65 @@ | ||
| [tool.poetry] | ||
| [project] | ||
| name = "typeid-python" | ||
| version = "0.3.3" | ||
| version = "0.3.4" | ||
| description = "Python implementation of TypeIDs: type-safe, K-sortable, and globally unique identifiers inspired by Stripe IDs" | ||
| authors = ["Murad Akhundov <akhundov1murad@gmail.com>"] | ||
| authors = [{ name = "Murad Akhundov", email = "akhundov1murad@gmail.com" }] | ||
| requires-python = ">=3.10,<4" | ||
| readme = "README.md" | ||
| license = "MIT" | ||
| readme = "README.md" | ||
| repository = "https://github.com/akhundMurad/typeid-python" | ||
| keywords = [ | ||
| "typeid", | ||
| "uuid", | ||
| "uuid6", | ||
| "guid", | ||
| ] | ||
| classifiers = [ | ||
| "Development Status :: 3 - Alpha", | ||
| "License :: OSI Approved :: MIT License", | ||
| "Programming Language :: Python :: 3.10", | ||
| "Programming Language :: Python :: 3.11", | ||
| "Programming Language :: Python :: 3.12", | ||
| "Programming Language :: Python :: 3.13", | ||
| "Programming Language :: Python :: 3.14", | ||
| "Operating System :: OS Independent", | ||
| "Development Status :: 3 - Alpha", | ||
| "License :: OSI Approved :: MIT License", | ||
| "Programming Language :: Python :: 3.10", | ||
| "Programming Language :: Python :: 3.11", | ||
| "Programming Language :: Python :: 3.12", | ||
| "Programming Language :: Python :: 3.13", | ||
| "Programming Language :: Python :: 3.14", | ||
| "Operating System :: OS Independent", | ||
| ] | ||
| keywords = ["typeid", "uuid", "uuid6", "guid"] | ||
| packages = [{ include = "typeid" }] | ||
| dependencies = ["uuid6>=2024.7.10,<2026.0.0"] | ||
| [project.optional-dependencies] | ||
| cli = ["click"] | ||
| yaml = ["PyYAML"] | ||
| [tool.hatch.build.targets.sdist] | ||
| exclude = [ | ||
| "/.github", | ||
| "/docs", | ||
| "/examples", | ||
| "/deps", | ||
| "/htmlcov", | ||
| "/tests", | ||
| "mkdocs-plugins.code-workspace", | ||
| "Makefile", | ||
| "CODE_OF_CONDUCT.md", | ||
| ".isort.cfg", | ||
| ".gitignore", | ||
| ".flake8", | ||
| "junit", | ||
| "requirements.txt", | ||
| "mypy.ini", | ||
| "pytest.ini", | ||
| ".flake8", | ||
| "pytest.ini", | ||
| ] | ||
| [project.urls] | ||
| Homepage = "https://github.com/akhundMurad/typeid-python" | ||
| Repository = "https://github.com/akhundMurad/typeid-python" | ||
| "Bug Tracker" = "https://github.com/akhundMurad/typeid-python/issues" | ||
| [tool.poetry.dependencies] | ||
| python = ">=3.10,<4" | ||
| uuid6 = ">=2024.7.10,<2026.0.0" | ||
| [tool.poetry.group.dev.dependencies] | ||
| pytest = "^7.3.2" | ||
| black = "^23.3.0" | ||
| mypy = "^1.3.0" | ||
| requests = "^2.31.0" | ||
| pyyaml = "^6.0" | ||
| ruff = "^0.14.5" | ||
| twine = "^6.2.0" | ||
| [tool.poetry.extras] | ||
| cli = ["click"] | ||
| [tool.poetry.scripts] | ||
| [project.scripts] | ||
| typeid = "typeid.cli:cli" | ||
| [dependency-groups] | ||
| dev = [ | ||
| "pytest>=7.3.2,<8", | ||
| "black>=23.3.0,<24", | ||
| "mypy>=1.3.0,<2", | ||
| "requests>=2.31.0,<3", | ||
| "ruff>=0.14.5,<0.15", | ||
| "twine>=6.2.0,<7", | ||
| "pyyaml>=6.0", | ||
| "mkdocs-material>=9.7.1", | ||
| "mkdocstrings[python]>=1.0.0", | ||
| "mkdocs-git-revision-date-localized-plugin>=1.5.0", | ||
| "mkdocs-gen-files>=0.6.0", | ||
| "mkdocs-literate-nav>=0.6.2", | ||
| "mkdocs-section-index>=0.3.10", | ||
| "pytest-markdown-docs>=0.9.0", | ||
| ] | ||
| [tool.pylint] | ||
| disable = ["C0111", "C0116", "C0114", "R0903"] | ||
| [tool.hatch.build.targets.sdist] | ||
| include = ["typeid"] | ||
| [tool.ruff] | ||
| line-length = 119 | ||
| target-version = "py310" | ||
| src = ["typeid", "tests"] | ||
| [tool.hatch.build.targets.wheel] | ||
| include = ["typeid"] | ||
| [tool.ruff.lint] | ||
| select = ["E", "F", "W", "B", "I"] | ||
| ignore = ["E203", "B028"] | ||
| [tool.ruff.lint.isort] | ||
| known-first-party = ["typeid"] | ||
| [build-system] | ||
| requires = ["poetry-core"] | ||
| build-backend = "poetry.core.masonry.api" | ||
| requires = ["hatchling"] | ||
| build-backend = "hatchling.build" |
+213
-9
@@ -27,3 +27,3 @@ # TypeID Python | ||
| - PyPI: | ||
| - Pip: | ||
@@ -34,2 +34,8 @@ ```console | ||
| - Uv: | ||
| ```console | ||
| uv add typeid-python | ||
| ``` | ||
| - Poetry: | ||
@@ -41,2 +47,15 @@ | ||
| ### Optional dependencies | ||
| TypeID supports schema-based ID explanations using JSON (always available) and | ||
| YAML (optional). | ||
| To enable YAML support: | ||
| ```console | ||
| pip install typeid-python[yaml] | ||
| ``` | ||
| If the extra is not installed, JSON schemas will still work. | ||
| ## Usage | ||
@@ -51,11 +70,14 @@ | ||
| # Default TypeID (no prefix) | ||
| typeid = TypeID() | ||
| print(typeid.prefix) # "" | ||
| print(typeid.suffix) # "01h45ytscbebyvny4gc8cr8ma2" (encoded uuid7 instance) | ||
| assert typeid.prefix == "" | ||
| assert isinstance(typeid.suffix, str) | ||
| assert len(typeid.suffix) > 0 # encoded UUIDv7 | ||
| # TypeID with prefix | ||
| typeid = TypeID(prefix="user") | ||
| print(typeid.prefix) # "user" | ||
| print(str(typeid)) # "user_01h45ytscbebyvny4gc8cr8ma2" | ||
| assert typeid.prefix == "user" | ||
| assert str(typeid).startswith("user_") | ||
| ``` | ||
@@ -68,5 +90,7 @@ | ||
| typeid = TypeID.from_string("user_01h45ytscbebyvny4gc8cr8ma2") | ||
| value = "user_01h45ytscbebyvny4gc8cr8ma2" | ||
| typeid = TypeID.from_string(value) | ||
| print(str(typeid)) # "user_01h45ytscbebyvny4gc8cr8ma2" | ||
| assert str(typeid) == value | ||
| assert typeid.prefix == "user" | ||
| ``` | ||
@@ -80,3 +104,3 @@ | ||
| uuid = uuid7() # UUID('01890bf0-846f-7762-8605-5a3abb40e0e5') | ||
| uuid = uuid7() | ||
| prefix = "user" | ||
@@ -86,5 +110,30 @@ | ||
| print(str(typeid)) # "user_01h45z113fexh8c1at7axm1r75" | ||
| assert typeid.prefix == prefix | ||
| assert str(typeid).startswith(f"{prefix}_") | ||
| ``` | ||
| - Use pre-defined prefix: | ||
| ```python | ||
| from dataclasses import dataclass, field | ||
| from typing import Literal | ||
| from typeid import TypeID, typeid_factory | ||
| UserID = TypeID[Literal["user"]] | ||
| gen_user_id = typeid_factory("user") | ||
| @dataclass | ||
| class UserDTO: | ||
| user_id: UserID = field(default_factory=gen_user_id) | ||
| full_name: str = "A J" | ||
| age: int = 18 | ||
| user = UserDTO() | ||
| assert str(user.user_id).startswith("user_") | ||
| ``` | ||
| ### CLI-tool | ||
@@ -119,1 +168,156 @@ | ||
| ``` | ||
| ## ✨ NEW: `typeid explain` — “What is this ID?” | ||
| TypeID can now **explain a TypeID** in a human-readable way. | ||
| This is useful when: | ||
| * debugging logs | ||
| * inspecting database records | ||
| * reviewing production incidents | ||
| * understanding IDs shared via Slack, tickets, or dashboards | ||
| ### Basic usage (no schema required) | ||
| ```console | ||
| $ typeid explain user_01h45ytscbebyvny4gc8cr8ma2 | ||
| ``` | ||
| Example output: | ||
| ```yaml | ||
| id: user_01h45ytscbebyvny4gc8cr8ma2 | ||
| valid: true | ||
| parsed: | ||
| prefix: user | ||
| suffix: 01h45ytscbebyvny4gc8cr8ma2 | ||
| uuid: 01890bf0-846f-7762-8605-5a3abb40e0e5 | ||
| created_at: 2025-03-12T10:41:23Z | ||
| sortable: true | ||
| schema: | ||
| found: false | ||
| ``` | ||
| Even without configuration, `typeid explain` can: | ||
| * validate the ID | ||
| * extract the UUID | ||
| * derive creation time (UUIDv7) | ||
| * determine sortability | ||
| ## Schema-based explanations | ||
| To make explanations richer, you can define a **TypeID schema** describing what each | ||
| prefix represents. | ||
| ### Example schema (`typeid.schema.json`) | ||
| ```json | ||
| { | ||
| "schema_version": 1, | ||
| "types": { | ||
| "user": { | ||
| "name": "User", | ||
| "description": "End-user account", | ||
| "owner_team": "identity-platform", | ||
| "pii": true, | ||
| "retention": "7y", | ||
| "links": { | ||
| "logs": "https://logs.company/search?q={id}", | ||
| "trace": "https://traces.company/?id={id}" | ||
| } | ||
| } | ||
| } | ||
| } | ||
| ``` | ||
| ### Explain using schema | ||
| ```console | ||
| $ typeid explain user_01h45ytscbebyvny4gc8cr8ma2 | ||
| ``` | ||
| Output (excerpt): | ||
| ```yaml | ||
| schema: | ||
| found: true | ||
| name: User | ||
| owner_team: identity-platform | ||
| pii: true | ||
| retention: 7y | ||
| links: | ||
| logs: https://logs.company/search?q=user_01h45ytscbebyvny4gc8cr8ma2 | ||
| ``` | ||
| ## Schema discovery rules | ||
| If `--schema` is not provided, TypeID looks for a schema in the following order: | ||
| 1. Environment variable: | ||
| ```console | ||
| TYPEID_SCHEMA=/path/to/schema.json | ||
| ``` | ||
| 2. Current directory: | ||
| * `typeid.schema.json` | ||
| * `typeid.schema.yaml` | ||
| 3. User config directory: | ||
| * `~/.config/typeid/schema.json` | ||
| * `~/.config/typeid/schema.yaml` | ||
| If no schema is found, the command still works with derived information only. | ||
| ## YAML schemas (optional) | ||
| YAML schemas are supported if the optional dependency is installed: | ||
| ```console | ||
| pip install typeid-python[yaml] | ||
| ``` | ||
| Example (`typeid.schema.yaml`): | ||
| ```yaml | ||
| schema_version: 1 | ||
| types: | ||
| user: | ||
| name: User | ||
| owner_team: identity-platform | ||
| links: | ||
| logs: "https://logs.company/search?q={id}" | ||
| ``` | ||
| ## JSON output (machine-readable) | ||
| ```console | ||
| $ typeid explain user_01h45ytscbebyvny4gc8cr8ma2 --json | ||
| ``` | ||
| Useful for: | ||
| * scripts | ||
| * CI pipelines | ||
| * IDE integrations | ||
| ## Design principles | ||
| * **Non-breaking**: existing APIs and CLI commands remain unchanged | ||
| * **Schema-optional**: works fully offline | ||
| * **Read-only**: no side effects or external mutations | ||
| * **Declarative**: meaning is defined by users, not inferred by the tool | ||
| You can think of `typeid explain` as: | ||
| > **OpenAPI — but for identifiers instead of HTTP endpoints** | ||
| ## License | ||
| MIT | ||
+10
-1
@@ -0,3 +1,12 @@ | ||
| from .factory import TypeIDFactory, cached_typeid_factory, typeid_factory | ||
| from .typeid import TypeID, from_string, from_uuid, get_prefix_and_suffix | ||
| __all__ = ("TypeID", "from_string", "from_uuid", "get_prefix_and_suffix") | ||
| __all__ = ( | ||
| "TypeID", | ||
| "from_string", | ||
| "from_uuid", | ||
| "get_prefix_and_suffix", | ||
| "TypeIDFactory", | ||
| "typeid_factory", | ||
| "cached_typeid_factory", | ||
| ) |
+117
-2
@@ -0,1 +1,2 @@ | ||
| from pathlib import Path | ||
| from typing import Optional | ||
@@ -7,2 +8,6 @@ | ||
| from typeid import TypeID, base32, from_uuid, get_prefix_and_suffix | ||
| from typeid.explain.discovery import discover_schema_path | ||
| from typeid.explain.engine import explain as explain_engine | ||
| from typeid.explain.formatters import format_explanation_json, format_explanation_pretty | ||
| from typeid.explain.registry import load_registry, make_lookup | ||
@@ -12,2 +17,4 @@ | ||
| def cli(): | ||
| # Root CLI command group. | ||
| # This acts as the entry point for all subcommands. | ||
| pass | ||
@@ -19,2 +26,8 @@ | ||
| def new(prefix: Optional[str] = None) -> None: | ||
| """ | ||
| Generate a new TypeID. | ||
| If a prefix is provided, it will be validated and included in the output. | ||
| If no prefix is provided, a prefix-less TypeID is generated. | ||
| """ | ||
| typeid = TypeID(prefix=prefix) | ||
@@ -28,3 +41,11 @@ click.echo(str(typeid)) | ||
| def encode(uuid: str, prefix: Optional[str] = None) -> None: | ||
| typeid = from_uuid(suffix=UUID(uuid), prefix=prefix) | ||
| """ | ||
| Encode an existing UUID into a TypeID. | ||
| This command is intended for cases where UUIDs already exist | ||
| (e.g. stored in a database) and need to be represented as TypeIDs. | ||
| """ | ||
| uuid_obj = UUID(uuid) | ||
| typeid = from_uuid(suffix=uuid_obj, prefix=prefix) | ||
| click.echo(str(typeid)) | ||
@@ -36,5 +57,15 @@ | ||
| def decode(encoded: str) -> None: | ||
| """ | ||
| Decode a TypeID into its components. | ||
| This extracts: | ||
| - the prefix (if any) | ||
| - the underlying UUID | ||
| This command is primarily intended for inspection and debugging. | ||
| """ | ||
| prefix, suffix = get_prefix_and_suffix(encoded) | ||
| decoded_bytes = bytes(base32.decode(suffix)) | ||
| decoded_bytes = bytes(base32.decode(suffix)) | ||
| uuid = UUID(bytes=decoded_bytes) | ||
@@ -46,3 +77,87 @@ | ||
| @cli.command() | ||
| @click.argument("encoded") | ||
| @click.option( | ||
| "--schema", | ||
| "schema_path", | ||
| type=click.Path(exists=True, dir_okay=False, path_type=str), | ||
| required=False, | ||
| help="Path to TypeID schema file (JSON, or YAML if PyYAML is installed). " | ||
| "If omitted, TypeID will try to discover a schema automatically.", | ||
| ) | ||
| @click.option( | ||
| "--json", | ||
| "as_json", | ||
| is_flag=True, | ||
| help="Output machine-readable JSON.", | ||
| ) | ||
| @click.option( | ||
| "--no-schema", | ||
| is_flag=True, | ||
| help="Disable schema lookup (derived facts only).", | ||
| ) | ||
| @click.option( | ||
| "--no-links", | ||
| is_flag=True, | ||
| help="Disable link template rendering.", | ||
| ) | ||
| def explain( | ||
| encoded: str, | ||
| schema_path: Optional[str], | ||
| as_json: bool, | ||
| no_schema: bool, | ||
| no_links: bool, | ||
| ) -> None: | ||
| """ | ||
| Explain a TypeID: parse/validate it, derive facts (uuid, created_at), | ||
| and optionally enrich explanation from a user-provided schema. | ||
| """ | ||
| enable_schema = not no_schema | ||
| enable_links = not no_links | ||
| schema_lookup = None | ||
| warnings: list[str] = [] | ||
| # Load schema (optional) | ||
| if enable_schema: | ||
| resolved_path = None | ||
| if schema_path: | ||
| resolved_path = schema_path | ||
| else: | ||
| discovery = discover_schema_path() | ||
| if discovery.path is not None: | ||
| resolved_path = str(discovery.path) | ||
| # If env var was set but invalid, discovery returns source info; | ||
| # we keep CLI robust and simply proceed without schema. | ||
| if resolved_path: | ||
| result = load_registry(Path(resolved_path)) | ||
| if result.registry is not None: | ||
| schema_lookup = make_lookup(result.registry) | ||
| else: | ||
| if result.error is not None: | ||
| warnings.append(f"Schema load failed: {result.error.message}") | ||
| # Build explanation (never raises on normal errors) | ||
| exp = explain_engine( | ||
| encoded, | ||
| schema_lookup=schema_lookup, | ||
| enable_schema=enable_schema, | ||
| enable_links=enable_links, | ||
| ) | ||
| # Surface schema-load warnings (if any) | ||
| if warnings: | ||
| exp.warnings.extend(warnings) | ||
| if as_json: | ||
| click.echo(format_explanation_json(exp)) | ||
| else: | ||
| click.echo(format_explanation_pretty(exp)) | ||
| if __name__ == "__main__": | ||
| cli() |
+150
-11
| import uuid | ||
| import warnings | ||
| from typing import Optional | ||
| from typing import Generic, Optional, TypeVar | ||
@@ -11,15 +11,78 @@ import uuid6 | ||
| PrefixT = TypeVar("PrefixT", bound=str) | ||
| class TypeID: | ||
| def __init__(self, prefix: Optional[str] = None, suffix: Optional[str] = None) -> None: | ||
| class TypeID(Generic[PrefixT]): | ||
| """ | ||
| A TypeID is a human-meaningful, UUID-backed identifier. | ||
| A TypeID is rendered as: | ||
| <prefix>_<suffix> or just <suffix> (when prefix is None/empty) | ||
| - **prefix**: optional semantic label (e.g. "user", "order"). It is *not* part of the UUID. | ||
| Prefixes are validated for allowed characters/shape (see `validate_prefix`). | ||
| - **suffix**: a compact, URL-safe Base32 encoding of a UUID (UUIDv7 by default). | ||
| Suffixes are validated structurally (see `validate_suffix`). | ||
| Design notes: | ||
| - A TypeID is intended to be safe to store as a string (e.g. in logs / URLs). | ||
| - The underlying UUID can always be recovered via `.uuid`. | ||
| - Ordering (`>`, `>=`) is based on lexicographic order of the string representation, | ||
| which corresponds to time-ordering if the UUID version is time-sortable (UUIDv7). | ||
| Type parameters: | ||
| PrefixT: a type-level constraint for the prefix (often `str` or a Literal). | ||
| """ | ||
| def __init__(self, prefix: Optional[PrefixT] = None, suffix: Optional[str] = None) -> None: | ||
| """ | ||
| Create a new TypeID. | ||
| If `suffix` is not provided, a new UUIDv7 is generated and encoded as Base32. | ||
| If `prefix` is provided, it is validated. | ||
| Args: | ||
| prefix: Optional prefix. If None, the TypeID has no prefix and its string | ||
| form will be just the suffix. If provided, it must pass `validate_prefix`. | ||
| suffix: Optional Base32-encoded UUID string. If None, a new UUIDv7 is generated. | ||
| Raises: | ||
| InvalidTypeIDStringException (or another project-specific exception): | ||
| If `suffix` is invalid, or if `prefix` is invalid. | ||
| """ | ||
| # If no suffix is provided, generate a new UUIDv7 and encode it as Base32. | ||
| suffix = _convert_uuid_to_b32(uuid6.uuid7()) if not suffix else suffix | ||
| # Ensure the suffix is a valid encoded UUID representation. | ||
| validate_suffix(suffix=suffix) | ||
| if prefix: | ||
| # Prefix is optional; when present it must satisfy the project's prefix rules. | ||
| if prefix is not None: | ||
| validate_prefix(prefix=prefix) | ||
| self._prefix = prefix or "" | ||
| self._suffix = suffix | ||
| # Keep prefix as Optional internally. String rendering decides whether to show it. | ||
| self._prefix: Optional[PrefixT] = prefix | ||
| self._suffix: str = suffix | ||
| @classmethod | ||
| def from_string(cls, string: str): | ||
| def from_string(cls, string: str) -> "TypeID": | ||
| """ | ||
| Parse a TypeID from its string form. | ||
| The input can be either: | ||
| - "<prefix>_<suffix>" | ||
| - "<suffix>" (prefix-less) | ||
| Args: | ||
| string: String representation of a TypeID. | ||
| Returns: | ||
| A `TypeID` instance. | ||
| Raises: | ||
| InvalidTypeIDStringException (or another project-specific exception): | ||
| If the string cannot be split/parsed or if the extracted parts are invalid. | ||
| """ | ||
| # Split into (prefix, suffix) according to project rules. | ||
| prefix, suffix = get_prefix_and_suffix(string=string) | ||
@@ -29,3 +92,17 @@ return cls(suffix=suffix, prefix=prefix) | ||
| @classmethod | ||
| def from_uuid(cls, suffix: uuid.UUID, prefix: Optional[str] = None): | ||
| def from_uuid(cls, suffix: uuid.UUID, prefix: Optional[PrefixT] = None) -> "TypeID": | ||
| """ | ||
| Construct a TypeID from an existing UUID. | ||
| This is useful when you store UUIDs in a database but want to expose | ||
| TypeIDs at the application boundary. | ||
| Args: | ||
| suffix: UUID value to encode into the TypeID suffix. | ||
| prefix: Optional prefix to attach (validated if provided). | ||
| Returns: | ||
| A `TypeID` whose `.uuid` equals the provided UUID. | ||
| """ | ||
| # Encode the UUID into the canonical Base32 suffix representation. | ||
| suffix_str = _convert_uuid_to_b32(suffix) | ||
@@ -36,2 +113,9 @@ return cls(suffix=suffix_str, prefix=prefix) | ||
| def suffix(self) -> str: | ||
| """ | ||
| The Base32-encoded UUID portion of the TypeID (always present). | ||
| Notes: | ||
| - This is the identity-carrying part. | ||
| - It is validated at construction time. | ||
| """ | ||
| return self._suffix | ||
@@ -41,9 +125,35 @@ | ||
| def prefix(self) -> str: | ||
| return self._prefix | ||
| """ | ||
| The prefix portion of the TypeID, as a string. | ||
| Returns: | ||
| The configured prefix, or "" if the TypeID is prefix-less. | ||
| Notes: | ||
| - Empty string is the *presentation* of "no prefix". Internally, `_prefix` | ||
| remains Optional to preserve the distinction between None and a real value. | ||
| """ | ||
| return self._prefix or "" | ||
| @property | ||
| def uuid(self) -> uuid6.UUID: | ||
| """ | ||
| The UUID represented by this TypeID. | ||
| Returns: | ||
| The decoded UUID value. | ||
| Notes: | ||
| - This decodes `self.suffix` each time it is accessed. | ||
| - The UUID type here follows `uuid6.UUID` used by the project. | ||
| """ | ||
| return _convert_b32_to_uuid(self.suffix) | ||
| def __str__(self) -> str: | ||
| """ | ||
| Render the TypeID into its canonical string representation. | ||
| Returns: | ||
| "<prefix>_<suffix>" if prefix is present, otherwise "<suffix>". | ||
| """ | ||
| value = "" | ||
@@ -56,5 +166,17 @@ if self.prefix: | ||
| def __repr__(self): | ||
| """ | ||
| Developer-friendly representation. | ||
| Uses a constructor-like form to make debugging and copy/paste easier. | ||
| """ | ||
| return "%s.from_string(%r)" % (self.__class__.__name__, str(self)) | ||
| def __eq__(self, value: object) -> bool: | ||
| """ | ||
| Equality based on prefix and suffix. | ||
| Notes: | ||
| - Two TypeIDs are considered equal if both their string components match. | ||
| - This is stricter than "same UUID" because prefix is part of the public ID. | ||
| """ | ||
| if not isinstance(value, TypeID): | ||
@@ -64,3 +186,12 @@ return False | ||
| def __gt__(self, other): | ||
| def __gt__(self, other) -> bool: | ||
| """ | ||
| Compare TypeIDs by lexicographic order of their string form. | ||
| This is useful because TypeID suffixes based on UUIDv7 are time-sortable, | ||
| so string order typically corresponds to creation time order (within a prefix). | ||
| Returns: | ||
| True/False if `other` is a TypeID, otherwise NotImplemented. | ||
| """ | ||
| if isinstance(other, TypeID): | ||
@@ -70,3 +201,8 @@ return str(self) > str(other) | ||
| def __ge__(self, other): | ||
| def __ge__(self, other) -> bool: | ||
| """ | ||
| Compare TypeIDs by lexicographic order of their string form (>=). | ||
| See `__gt__` for rationale and notes. | ||
| """ | ||
| if isinstance(other, TypeID): | ||
@@ -77,2 +213,5 @@ return str(self) >= str(other) | ||
| def __hash__(self) -> int: | ||
| """ | ||
| Hash based on (prefix, suffix), allowing TypeIDs to be used as dict keys / set members. | ||
| """ | ||
| return hash((self.prefix, self.suffix)) | ||
@@ -79,0 +218,0 @@ |
Alert delta unavailable
Currently unable to show alert delta for PyPI packages.
78231
256.68%20
66.67%1592
228.25%