New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@ark/schema

Package Overview
Dependencies
Maintainers
0
Versions
52
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@ark/schema - npm Package Compare versions

Comparing version 0.39.0 to 0.40.0

49

out/config.d.ts
import type { ArkRegistry, requireKeys, show } from "@ark/util";
import type { intrinsic } from "./intrinsic.ts";
import type { nodesByRegisteredId } from "./parse.ts";
import type { ActualWriter, ArkErrorCode, ExpectedWriter, MessageWriter, ProblemWriter } from "./shared/errors.ts";
import type { ActualConfig, ArkErrorCode, ArkErrors, ExpectedConfig, MessageConfig, ProblemConfig } from "./shared/errors.ts";
import { type DescriptionWriter, type NodeKind } from "./shared/implement.ts";

@@ -9,3 +9,3 @@ import type { UndeclaredKeyBehavior } from "./structure/structure.ts";

intrinsic: typeof intrinsic;
config: ArkConfig;
config: ArkSchemaConfig;
defaultConfig: ResolvedConfig;

@@ -18,6 +18,6 @@ resolvedConfig: ResolvedConfig;

} & (kind extends ArkErrorCode ? {
expected?: ExpectedWriter<kind>;
actual?: ActualWriter<kind>;
problem?: ProblemWriter<kind>;
message?: MessageWriter<kind>;
expected?: ExpectedConfig<kind>;
actual?: ActualConfig<kind>;
problem?: ProblemConfig<kind>;
message?: MessageConfig<kind>;
} : {})>>;

@@ -28,26 +28,27 @@ type NodeConfigsByKind = {

export type NodeConfig<kind extends NodeKind = NodeKind> = NodeConfigsByKind[kind];
export interface UnknownErrorWriters {
expected?: ExpectedWriter;
actual?: ActualWriter;
problem?: ProblemWriter;
message?: MessageWriter;
export interface UnknownErrorConfigs {
expected?: ExpectedConfig;
actual?: ActualConfig;
problem?: ProblemConfig;
message?: MessageConfig;
}
interface UnknownNodeConfig extends UnknownErrorWriters {
interface UnknownNodeConfig extends UnknownErrorConfigs {
description?: DescriptionWriter;
}
export type ResolvedUnknownNodeConfig = requireKeys<UnknownNodeConfig, "description">;
export declare const configure: (config: ArkConfig) => ArkConfig;
export declare const mergeConfigs: <base extends ArkConfig>(base: base, extensions: ArkConfig | undefined) => base;
export declare const configureSchema: (config: ArkSchemaConfig) => ArkSchemaConfig;
export declare const mergeConfigs: <base extends ArkSchemaConfig>(base: base, extensions: ArkSchemaConfig | undefined) => base;
export type CloneImplementation = <original extends object>(original: original) => original;
export interface ArkConfig extends Partial<Readonly<NodeConfigsByKind>> {
jitless?: boolean;
clone?: boolean | CloneImplementation;
onUndeclaredKey?: UndeclaredKeyBehavior;
numberAllowsNaN?: boolean;
dateAllowsInvalid?: boolean;
export interface ArkSchemaConfig extends Partial<Readonly<NodeConfigsByKind>> {
readonly jitless?: boolean;
readonly clone?: boolean | CloneImplementation;
readonly onUndeclaredKey?: UndeclaredKeyBehavior;
readonly numberAllowsNaN?: boolean;
readonly dateAllowsInvalid?: boolean;
readonly onFail?: ArkErrors.Handler | null;
}
export type resolveConfig<config extends ArkConfig> = show<{
[k in keyof ArkConfig]-?: k extends NodeKind ? Required<config[k]> : k extends "clone" ? CloneImplementation | false : config[k];
} & Omit<config, keyof ArkConfig>>;
export type ResolvedConfig = resolveConfig<ArkConfig>;
export type resolveConfig<config extends ArkSchemaConfig> = show<{
[k in keyof ArkSchemaConfig]-?: k extends NodeKind ? Required<config[k]> : k extends "clone" ? CloneImplementation | false : config[k];
} & Omit<config, keyof ArkSchemaConfig>>;
export type ResolvedConfig = resolveConfig<ArkSchemaConfig>;
export {};

@@ -6,3 +6,3 @@ import { isNodeKind } from "./shared/implement.js";

$ark.config ??= {};
export const configure = (config) => {
export const configureSchema = (config) => {
const result = Object.assign($ark.config, mergeConfigs($ark.config, config));

@@ -9,0 +9,0 @@ $ark.resolvedConfig &&= mergeConfigs($ark.resolvedConfig, result);

@@ -64,5 +64,5 @@ import { type array, type describe, type listable, type satisfy } from "@ark/util";

export type intersectConstraintKinds<l extends ConstraintKind, r extends ConstraintKind> = nodeOfKind<l | r | "unit" | "union"> | Disjoint | null;
export declare const throwInvalidOperandError: (kind: "minLength" | "maxLength" | "pattern" | "required" | "divisor" | "exactLength" | "max" | "min" | "before" | "after" | "optional" | "index" | "sequence" | "structure" | "predicate", expected: BaseRoot<import("./roots/root.ts").InternalRootDeclaration>, actual: BaseRoot<import("./roots/root.ts").InternalRootDeclaration>) => never;
export declare const throwInvalidOperandError: (...args: Parameters<typeof writeInvalidOperandMessage>) => never;
export declare const writeInvalidOperandMessage: <kind extends ConstraintKind, expected extends BaseRoot, actual extends BaseRoot>(kind: kind, expected: expected, actual: actual) => string;
export type writeInvalidOperandMessage<kind extends ConstraintKind, actual> = `${Capitalize<kind>} operand must be ${describe<Prerequisite<kind>>} (was ${describe<Exclude<actual, Prerequisite<kind>>>})`;
export {};

@@ -5,5 +5,3 @@ export declare const intrinsic: {

jsonObject: import("./index.ts").BaseRoot<import("./index.ts").InternalRootDeclaration>;
jsonArray: import("./index.ts").BaseRoot<import("./index.ts").InternalRootDeclaration>;
jsonData: import("./index.ts").BaseRoot<import("./index.ts").InternalRootDeclaration>;
json: import("./index.ts").BaseRoot<import("./index.ts").InternalRootDeclaration>;
integer: import("./index.ts").BaseRoot<import("./index.ts").InternalRootDeclaration>;

@@ -23,6 +21,6 @@ lengthBoundable: import("./index.ts").BaseRoot<import("./index.ts").InternalRootDeclaration>;

Date: import("./index.ts").BaseRoot<import("./index.ts").InternalRootDeclaration>;
unknown: import("./index.ts").BaseRoot<import("./index.ts").InternalRootDeclaration>;
false: import("./index.ts").BaseRoot<import("./index.ts").InternalRootDeclaration>;
never: import("./index.ts").BaseRoot<import("./index.ts").InternalRootDeclaration>;
true: import("./index.ts").BaseRoot<import("./index.ts").InternalRootDeclaration>;
unknown: import("./index.ts").BaseRoot<import("./index.ts").InternalRootDeclaration>;
};

@@ -48,8 +48,3 @@ import { node, schemaScope } from "./scope.js";

},
jsonArray: {
proto: Array,
sequence: "$jsonData"
},
jsonData: ["$jsonPrimitive", "$jsonObject", "$jsonArray"],
json: ["$jsonObject", "$jsonArray"]
jsonData: ["$jsonPrimitive", "$jsonObject"]
}, { prereducedAliases: true }).export();

@@ -56,0 +51,0 @@ export const intrinsic = {

@@ -46,3 +46,4 @@ import { deepClone, envHasCsp, flatMorph, withAlphabetizedKeys } from "@ark/util";

numberAllowsNaN: false,
dateAllowsInvalid: false
dateAllowsInvalid: false,
onFail: null
}));

@@ -49,0 +50,0 @@ $ark.resolvedConfig = mergeConfigs($ark.defaultConfig, $ark.config);

@@ -10,3 +10,3 @@ import { Callable, type GuardablePredicate, type JsonStructure, type Key, type array, type conform, type listable, type mutable } from "@ark/util";

import type { NodeCompiler } from "./shared/compile.ts";
import type { BaseNodeDeclaration, MetaSchema, attachmentsOf } from "./shared/declare.ts";
import type { BaseMeta, BaseNodeDeclaration, MetaSchema, attachmentsOf } from "./shared/declare.ts";
import type { ArkErrors } from "./shared/errors.ts";

@@ -20,6 +20,21 @@ import { type BasisKind, type NodeKind, type OpenNodeKind, type RefinementKind, type StructuralKind, type UnknownAttachments } from "./shared/implement.ts";

* @ts-ignore allow instantiation assignment to the base type */
out d extends BaseNodeDeclaration = BaseNodeDeclaration> extends Callable<(data: d["prerequisite"], ctx?: Traversal) => unknown, attachmentsOf<d>> {
out d extends BaseNodeDeclaration = BaseNodeDeclaration> extends Callable<(data: d["prerequisite"], ctx?: Traversal, onFail?: ArkErrors.Handler | null) => unknown, attachmentsOf<d>> {
attachments: UnknownAttachments;
$: BaseScope;
onFail: ArkErrors.Handler | null;
includesTransform: boolean;
includesContextualPredicate: boolean;
isCyclic: boolean;
allowsRequiresContext: boolean;
rootApplyStrategy: "allows" | "contextual" | "optimistic" | "branchedOptimistic";
contextFreeMorph: ((data: unknown) => unknown) | undefined;
rootApply: (data: unknown, onFail: ArkErrors.Handler | null) => unknown;
referencesById: Record<string, BaseNode>;
shallowReferences: BaseNode[];
flatRefs: FlatRef[];
flatMorphs: FlatRef<Morph.Node>[];
allows: (data: d["prerequisite"]) => boolean;
get shallowMorphs(): array<Morph>;
constructor(attachments: UnknownAttachments, $: BaseScope);
protected createRootApply(): this["rootApply"];
withMeta(meta: ArkEnv.meta | ((currentMeta: ArkEnv.meta) => ArkEnv.meta)): this;

@@ -30,7 +45,2 @@ abstract traverseAllows: TraverseAllows<d["prerequisite"]>;

abstract compile(js: NodeCompiler): void;
readonly includesMorph: boolean;
readonly hasContextualPredicate: boolean;
readonly isCyclic: boolean;
readonly allowsRequiresContext: boolean;
readonly referencesById: Record<string, BaseNode>;
readonly compiledMeta: string;

@@ -40,9 +50,6 @@ protected cacheGetter<name extends keyof this>(name: name, value: this[name]): this[name];

get references(): BaseNode[];
get shallowReferences(): BaseNode[];
get shallowMorphs(): Morph.Node[];
get flatRefs(): array<FlatRef>;
readonly precedence: number;
precompilation: string | undefined;
allows: (data: d["prerequisite"]) => boolean;
traverse(data: d["prerequisite"]): ArkErrors | {} | null | undefined;
assert: (data: d["prerequisite"], pipedFromCtx?: Traversal) => unknown;
traverse(data: d["prerequisite"], pipedFromCtx?: Traversal): ArkErrors | {} | null | undefined;
get in(): this extends {

@@ -110,2 +117,4 @@ [arkKind]: "root";

}
export type DeepNodeTransformation = <kind extends NodeKind>(kind: kind, inner: Inner<kind>, ctx: DeepNodeTransformContext) => NormalizedSchema<kind> | null;
export type DeepNodeTransformation = <kind extends NodeKind>(kind: kind, innerWithMeta: Inner<kind> & {
meta: BaseMeta;
}, ctx: DeepNodeTransformContext) => NormalizedSchema<kind> | null;

@@ -1,2 +0,2 @@

import { Callable, appendUnique, flatMorph, includes, isArray, isEmptyObject, stringifyPath, throwError } from "@ark/util";
import { Callable, appendUnique, flatMorph, includes, isArray, isEmptyObject, stringifyPath, throwError, throwInternalError } from "@ark/util";
import { basisKinds, constraintKinds, precedenceOfKind, refinementKinds, rootKinds, structuralKinds } from "./shared/implement.js";

@@ -9,8 +9,23 @@ import { $ark } from "./shared/registry.js";

$;
onFail;
includesTransform;
// if a predicate accepts exactly one arg, we can safely skip passing context
// technically, a predicate could be written like `(data, ...[ctx]) => ctx.mustBe("malicious")`
// that would break here, but it feels like a pathological case and is better to let people optimize
includesContextualPredicate;
isCyclic;
allowsRequiresContext;
rootApplyStrategy;
contextFreeMorph;
rootApply;
referencesById;
shallowReferences;
flatRefs;
flatMorphs;
allows;
get shallowMorphs() {
return [];
}
constructor(attachments, $) {
super((data, pipedFromCtx) => {
if (!this.includesMorph &&
!this.allowsRequiresContext &&
this.allows(data))
return data;
super((data, pipedFromCtx, onFail = this.onFail) => {
if (pipedFromCtx) {

@@ -22,9 +37,108 @@ this.traverseApply(data, pipedFromCtx);

}
const ctx = new Traversal(data, this.$.resolvedConfig);
this.traverseApply(data, ctx);
return ctx.finalize();
return this.rootApply(data, onFail);
}, { attach: attachments });
this.attachments = attachments;
this.$ = $;
this.onFail = this.meta.onFail ?? this.$.resolvedConfig.onFail;
this.includesTransform =
this.hasKind("morph") ||
(this.hasKind("structure") && this.structuralMorph !== undefined);
// if a predicate accepts exactly one arg, we can safely skip passing context
// technically, a predicate could be written like `(data, ...[ctx]) => ctx.mustBe("malicious")`
// that would break here, but it feels like a pathological case and is better to let people optimize
this.includesContextualPredicate =
this.hasKind("predicate") && this.inner.predicate.length !== 1;
this.isCyclic = this.kind === "alias";
this.referencesById = { [this.id]: this };
this.shallowReferences =
this.hasKind("structure") ?
[this, ...this.children]
: this.children.reduce((acc, child) => appendUniqueNodes(acc, child.shallowReferences), [this]);
const isStructural = this.isStructural();
this.flatRefs = [];
this.flatMorphs = [];
for (let i = 0; i < this.children.length; i++) {
this.includesTransform ||= this.children[i].includesTransform;
this.includesContextualPredicate ||=
this.children[i].includesContextualPredicate;
this.isCyclic ||= this.children[i].isCyclic;
if (!isStructural) {
const childFlatRefs = this.children[i].flatRefs;
for (let j = 0; j < childFlatRefs.length; j++) {
const childRef = childFlatRefs[j];
if (!this.flatRefs.some(existing => flatRefsAreEqual(existing, childRef))) {
this.flatRefs.push(childRef);
for (const branch of childRef.node.branches) {
if (branch.hasKind("morph")) {
this.flatMorphs.push({
path: childRef.path,
propString: childRef.propString,
node: branch
});
}
}
}
}
}
Object.assign(this.referencesById, this.children[i].referencesById);
}
this.flatRefs.sort((l, r) => l.path.length > r.path.length ? 1
: l.path.length < r.path.length ? -1
: l.propString > r.propString ? 1
: l.propString < r.propString ? -1
: l.node.expression < r.node.expression ? -1
: 1);
this.allowsRequiresContext =
this.includesContextualPredicate || this.isCyclic;
this.rootApplyStrategy =
!this.allowsRequiresContext && this.flatMorphs.length === 0 ?
this.shallowMorphs.length === 0 ? "allows"
: this.shallowMorphs.every(morph => morph.length === 1) ?
this.hasKind("union") ?
// multiple morphs not yet supported for optimistic compilation
this.branches.some(branch => branch.shallowMorphs.length > 1) ?
"contextual"
: "branchedOptimistic"
: this.shallowMorphs.length > 1 ? "contextual"
: "optimistic"
: "contextual"
: "contextual";
this.rootApply = this.createRootApply();
this.allows =
this.allowsRequiresContext ?
data => this.traverseAllows(data, new Traversal(data, this.$.resolvedConfig))
: data => this.traverseAllows(data);
}
createRootApply() {
switch (this.rootApplyStrategy) {
case "allows":
return (data, onFail) => {
if (this.allows(data))
return data;
const ctx = new Traversal(data, this.$.resolvedConfig);
this.traverseApply(data, ctx);
return ctx.finalize(onFail);
};
case "contextual":
return (data, onFail) => {
const ctx = new Traversal(data, this.$.resolvedConfig);
this.traverseApply(data, ctx);
return ctx.finalize(onFail);
};
case "optimistic":
this.contextFreeMorph = this.shallowMorphs[0];
return (data, onFail) => {
if (this.allows(data))
return this.contextFreeMorph(data);
const ctx = new Traversal(data, this.$.resolvedConfig);
this.traverseApply(data, ctx);
return ctx.finalize(onFail);
};
case "branchedOptimistic":
return this.createBranchedOptimisticRootApply();
default:
this.rootApplyStrategy;
return throwInternalError(`Unexpected rootApplyStrategy ${this.rootApplyStrategy}`);
}
}
withMeta(meta) {

@@ -36,14 +150,2 @@ return this.$.node(this.kind, {

}
includesMorph = this.kind === "morph" ||
(this.hasKind("optional") && this.hasDefault()) ||
(this.hasKind("sequence") && this.includesDefaultable()) ||
(this.hasKind("structure") && this.inner.undeclared === "delete") ||
this.children.some(child => child.includesMorph);
hasContextualPredicate =
// if a predicate accepts exactly one arg, we can safely skip passing context
(this.hasKind("predicate") && this.inner.predicate.length !== 1) ||
this.children.some(child => child.hasContextualPredicate);
isCyclic = this.kind === "alias" || this.children.some(child => child.isCyclic);
allowsRequiresContext = this.hasContextualPredicate || this.isCyclic;
referencesById = this.children.reduce((result, child) => Object.assign(result, child.referencesById), { [this.id]: this });
compiledMeta = JSON.stringify(this.metaJson);

@@ -63,33 +165,9 @@ cacheGetter(name, value) {

}
get shallowReferences() {
return this.cacheGetter("shallowReferences", this.hasKind("structure") ?
[this, ...this.children]
: this.children.reduce((acc, child) => appendUniqueNodes(acc, child.shallowReferences), [this]));
}
get shallowMorphs() {
return this.cacheGetter("shallowMorphs", this.shallowReferences
.filter(n => n.hasKind("morph"))
.sort((l, r) => (l.expression < r.expression ? -1 : 1)));
}
// overriden by structural kinds so that only the root at each path is added
get flatRefs() {
return this.cacheGetter("flatRefs", this.children
.reduce((acc, child) => appendUniqueFlatRefs(acc, child.flatRefs), [])
.sort((l, r) => l.path.length > r.path.length ? 1
: l.path.length < r.path.length ? -1
: l.propString > r.propString ? 1
: l.propString < r.propString ? -1
: l.node.expression < r.node.expression ? -1
: 1));
}
precedence = precedenceOfKind(this.kind);
precompilation;
allows = (data) => {
if (this.allowsRequiresContext) {
return this.traverseAllows(data, new Traversal(data, this.$.resolvedConfig));
}
return this.traverseAllows(data);
};
traverse(data) {
return this(data);
// defined as an arrow function since it is often detached, e.g. when passing to tRPC
// otherwise, would run into issues with this binding
assert = (data, pipedFromCtx) => this(data, pipedFromCtx, errors => errors.throw());
traverse(data, pipedFromCtx) {
return this(data, pipedFromCtx, null);
}

@@ -105,3 +183,3 @@ get in() {

getIo(ioKind) {
if (!this.includesMorph)
if (!this.includesTransform)
return this;

@@ -129,3 +207,3 @@ const ioInner = {};

toString() {
return this.expression;
return `Type<${this.expression}>`;
}

@@ -246,3 +324,6 @@ equals(r) {

delete ctx.seen[this.id];
const transformedInner = mapper(this.kind, innerWithTransformedChildren, ctx);
const innerWithMeta = Object.assign(innerWithTransformedChildren, {
meta: this.meta
});
const transformedInner = mapper(this.kind, innerWithMeta, ctx);
if (transformedInner === null)

@@ -252,3 +333,6 @@ return null;

return (transformedNode = transformedInner);
if (isEmptyObject(transformedInner) &&
const transformedKeys = Object.keys(transformedInner);
const hasNoTypedKeys = transformedKeys.length === 0 ||
(transformedKeys.length === 1 && transformedKeys[0] === "meta");
if (hasNoTypedKeys &&
// if inner was previously an empty object (e.g. unknown) ensure it is not pruned

@@ -273,3 +357,4 @@ !isEmptyObject(this.inner))

configureShallowDescendants(meta) {
return this.$.finalize(this.transform((kind, inner) => ({ ...inner, meta }), {
const newMeta = typeof meta === "string" ? { description: meta } : meta;
return this.$.finalize(this.transform((kind, inner) => ({ ...inner, meta: { ...inner.meta, ...newMeta } }), {
shouldTransform: node => node.kind !== "structure"

@@ -276,0 +361,0 @@ }));

@@ -36,3 +36,3 @@ import { type Brand, type dict } from "@ark/util";

}
export declare const schemaKindOf: <kind extends RootKind = "morph" | "intersection" | "alias" | "union" | "unit" | "proto" | "domain">(schema: unknown, allowedKinds?: readonly kind[]) => kind;
export declare const schemaKindOf: <kind extends RootKind = RootKind>(schema: unknown, allowedKinds?: readonly kind[]) => kind;
export declare const writeInvalidSchemaMessage: (schema: unknown) => string;

@@ -44,4 +44,12 @@ export type NodeId = Brand<string, "NodeId">;

export declare const parseNode: (ctx: NodeParseContext) => BaseNode;
export declare const createNode: (id: NodeId, kind: NodeKind, inner: dict, meta: BaseMeta, $: BaseScope, ignoreCache?: true) => BaseNode;
export type CreateNodeInput = {
id: NodeId;
kind: NodeKind;
inner: dict;
meta: BaseMeta;
$: BaseScope;
ignoreCache?: true;
};
export declare const createNode: ({ id, kind, inner, meta, $, ignoreCache }: CreateNodeInput) => BaseNode;
export declare const withId: <node extends BaseNode>(node: node, id: NodeId) => node;
export declare const withMeta: <node extends BaseNode>(node: node, meta: ArkEnv.meta, id?: NodeId) => node;

@@ -98,6 +98,12 @@ import { entriesOf, flatMorph, hasDomain, isArray, isEmptyObject, printable, throwInternalError, throwParseError, unset } from "@ark/util";

}
const node = createNode(ctx.id, ctx.kind, inner, meta, ctx.$);
const node = createNode({
id: ctx.id,
kind: ctx.kind,
inner,
meta,
$: ctx.$
});
return node;
};
export const createNode = (id, kind, inner, meta, $, ignoreCache) => {
export const createNode = ({ id, kind, inner, meta, $, ignoreCache }) => {
const impl = nodeImplementationsByKind[kind];

@@ -171,3 +177,10 @@ const innerEntries = entriesOf(inner);

// have to ignore cache to force creation of new potentially cyclic id
return createNode(id, node.kind, node.inner, node.meta, node.$, true);
return createNode({
id,
kind: node.kind,
inner: node.inner,
meta: node.meta,
$: node.$,
ignoreCache: true
});
};

@@ -177,3 +190,9 @@ export const withMeta = (node, meta, id) => {

throwInternalError(`Unexpected attempt to overwrite node id ${id}`);
return createNode(id ?? registerNodeId(meta.alias ?? node.kind), node.kind, node.inner, meta, node.$);
return createNode({
id: id ?? registerNodeId(meta.alias ?? node.kind),
kind: node.kind,
inner: node.inner,
meta,
$: node.$
});
};

@@ -180,0 +199,0 @@ const possiblyCollapse = (json, toKey, allowPrimitive) => {

@@ -34,2 +34,6 @@ import { append, domainDescriptions, printable, throwInternalError, throwParseError } from "@ark/util";

return r;
if (r.isBasis() && !r.overlaps($ark.intrinsic.object)) {
// can be more robust as part of https://github.com/arktypeio/arktype/issues/1026
return Disjoint.init("assignability", $ark.intrinsic.object, r);
}
return ctx.$.lazilyResolve(() => neverIfDisjoint(intersectOrPipeNodes(l.resolution, r, ctx)), `${l.reference}${ctx.pipe ? "=>" : "&"}${r.id}`);

@@ -36,0 +40,0 @@ })

@@ -13,2 +13,3 @@ import { type array, type listable, type show } from "@ark/util";

import type { Domain } from "./domain.ts";
import type { Morph } from "./morph.ts";
import type { Proto } from "./proto.ts";

@@ -60,2 +61,3 @@ import { BaseRoot } from "./root.ts";

expression: string;
get shallowMorphs(): array<Morph>;
get shortDescription(): string;

@@ -62,0 +64,0 @@ protected innerToJsonSchema(): JsonSchema;

@@ -156,3 +156,8 @@ import { flatMorph, hasDomain, isEmptyObject, isKeyOf, throwParseError } from "@ark/util";

structure = this.inner.structure;
expression = describeIntersection(this);
expression = writeIntersectionExpression(this);
get shallowMorphs() {
return this.inner.structure?.structuralMorph ?
[this.inner.structure.structuralMorph]
: [];
}
get shortDescription() {

@@ -242,5 +247,5 @@ return this.basis?.shortDescription ?? "present";

};
const describeIntersection = (node) => {
const writeIntersectionExpression = (node) => {
let expression = node.structure?.expression ||
`${node.basis ? node.basis.nestableExpression + " " : ""}${node.refinements.join(" & ")}` ||
`${node.basis ? node.basis.nestableExpression + " " : ""}${node.refinements.map(n => n.expression).join(" & ")}` ||
"unknown";

@@ -247,0 +252,0 @@ if (expression === "Array == 0")

@@ -31,2 +31,4 @@ import { type array, type listable } from "@ark/util";

type Node = MorphNode;
type In<morph extends Morph> = morph extends Morph<infer i> ? i : never;
type Out<morph extends Morph> = morph extends Morph<never, infer o> ? o : never;
}

@@ -41,2 +43,3 @@ export type Morph<i = any, o = unknown> = (In: i, ctx: Traversal) => o;

introspectableOut: BaseRoot | undefined;
get shallowMorphs(): array<Morph>;
get in(): BaseRoot;

@@ -43,0 +46,0 @@ get out(): BaseRoot;

@@ -90,2 +90,8 @@ import { arrayEquals, liftArray, throwParseError } from "@ark/util";

: undefined;
get shallowMorphs() {
// if the morph input is a union, it should not contain any other shallow morphs
return Array.isArray(this.inner.in?.shallowMorphs) ?
[...this.inner.in.shallowMorphs, ...this.morphs]
: this.morphs;
}
get in() {

@@ -92,0 +98,0 @@ return (this.declaredIn ?? this.inner.in?.in ?? $ark.intrinsic.unknown.internal);

import { inferred, type array } from "@ark/util";
import { type Constraint } from "../constraint.ts";
import type { NodeSchema, nodeOfKind, reducibleKindOf } from "../kinds.ts";
import { BaseNode, type FlatRef, type GettableKeyOrNode, type KeyOrKeyNode } from "../node.ts";
import { BaseNode, type GettableKeyOrNode, type KeyOrKeyNode } from "../node.ts";
import type { Predicate } from "../predicate.ts";

@@ -30,3 +30,2 @@ import type { Divisor } from "../refinements/divisor.ts";

constructor(attachments: UnknownAttachments, $: BaseScope);
assert: (data: unknown) => unknown;
get internal(): this;

@@ -80,3 +79,2 @@ get "~standard"(): StandardSchemaV1.ArkTypeProps;

private pipeOnce;
get flatMorphs(): array<FlatRef<Morph.Node>>;
narrow(predicate: Predicate): BaseRoot;

@@ -83,0 +81,0 @@ constrain<kind extends Constraint.PrimitiveKind>(kind: kind, schema: NodeSchema<kind>): BaseRoot;

import { includes, inferred, omit, throwInternalError, throwParseError } from "@ark/util";
import { throwInvalidOperandError } from "../constraint.js";
import { BaseNode, appendUniqueFlatRefs } from "../node.js";
import { BaseNode } from "../node.js";
import { Disjoint, writeUnsatisfiableExpressionError } from "../shared/disjoint.js";

@@ -17,6 +17,2 @@ import { ArkErrors } from "../shared/errors.js";

}
assert = (data) => {
const result = this.traverse(data);
return result instanceof ArkErrors ? result.throw() : result;
};
get internal() {

@@ -165,6 +161,8 @@ return this;

array() {
return this.$.schema({
proto: Array,
sequence: this
}, { prereduced: true });
return this.$.schema(this.isUnknown() ?
{ proto: Array }
: {
proto: Array,
sequence: this
}, { prereduced: true });
}

@@ -209,6 +207,7 @@ overlaps(r) {

_pipe(...morphs) {
return morphs.reduce((acc, morph) => acc.pipeOnce(morph), this);
const result = morphs.reduce((acc, morph) => acc.pipeOnce(morph), this);
return this.$.finalize(result);
}
tryPipe(...morphs) {
return morphs.reduce((acc, morph) => acc.pipeOnce(hasArkKind(morph, "root") ? morph : ((In, ctx) => {
const result = morphs.reduce((acc, morph) => acc.pipeOnce(hasArkKind(morph, "root") ? morph : ((In, ctx) => {
try {

@@ -225,2 +224,3 @@ return morph(In, ctx);

})), this);
return this.$.finalize(result);
}

@@ -252,14 +252,2 @@ pipe = Object.assign(this._pipe.bind(this), {

}
get flatMorphs() {
return this.cacheGetter("flatMorphs", this.flatRefs.reduce((branches, ref) => appendUniqueFlatRefs(branches, ref.node.hasKind("union") ?
ref.node.branches
.filter(b => b.hasKind("morph"))
.map(branch => ({
path: ref.path,
propString: ref.propString,
node: branch
}))
: ref.node.hasKind("morph") ? ref
: []), []));
}
narrow(predicate) {

@@ -266,0 +254,0 @@ return this.constrainOut("predicate", predicate);

@@ -1,3 +0,4 @@

import { type JsonStructure, type Key, type SerializedPrimitive, type array, type show } from "@ark/util";
import { type JsonStructure, type SerializedPrimitive, type array, type show } from "@ark/util";
import type { NodeSchema, RootSchema, nodeOfKind } from "../kinds.ts";
import type { BaseNode } from "../node.ts";
import { type NodeCompiler } from "../shared/compile.ts";

@@ -10,4 +11,5 @@ import type { BaseErrorContext, BaseNormalizedSchema, declareNode } from "../shared/declare.ts";

import { type RegisteredReference } from "../shared/registry.ts";
import type { TraverseAllows, TraverseApply } from "../shared/traversal.ts";
import { type TraverseAllows, type TraverseApply } from "../shared/traversal.ts";
import type { Domain } from "./domain.ts";
import type { Morph } from "./morph.ts";
import { BaseRoot } from "./root.ts";

@@ -49,2 +51,4 @@ export declare namespace Union {

expression: string;
createBranchedOptimisticRootApply(): BaseNode["rootApply"];
get shallowMorphs(): array<Morph>;
get shortDescription(): string;

@@ -54,2 +58,3 @@ protected innerToJsonSchema(): JsonSchema;

traverseApply: TraverseApply;
traverseOptimistic: (data: unknown) => unknown;
compile(js: NodeCompiler): void;

@@ -67,10 +72,15 @@ private compileIndiscriminable;

export type CaseKey<kind extends DiscriminantKind = DiscriminantKind> = DiscriminantKind extends kind ? string : DiscriminantKinds[kind] | "default";
type DiscriminantContext<kind extends DiscriminantKind = DiscriminantKind> = {
path: Key[];
type DiscriminantLocation<kind extends DiscriminantKind = DiscriminantKind> = {
path: PropertyKey[];
optionallyChainedPropString: string;
kind: kind;
};
export interface Discriminant<kind extends DiscriminantKind = DiscriminantKind> extends DiscriminantContext<kind> {
export interface Discriminant<kind extends DiscriminantKind = DiscriminantKind> extends DiscriminantLocation<kind> {
cases: DiscriminatedCases<kind>;
}
export type CaseContext = {
branchIndices: number[];
condition: nodeOfKind<DiscriminantKind> | Domain.Enumerable;
};
export type CaseDiscriminant = nodeOfKind<DiscriminantKind> | Domain.Enumerable;
export type DiscriminatedCases<kind extends DiscriminantKind = DiscriminantKind> = {

@@ -84,5 +94,5 @@ [caseKey in CaseKey<kind>]: BaseRoot | true;

export type DiscriminantKind = show<keyof DiscriminantKinds>;
export declare const pruneDiscriminant: (discriminantBranch: BaseRoot, discriminantCtx: DiscriminantContext) => BaseRoot | null;
export declare const pruneDiscriminant: (discriminantBranch: BaseRoot, discriminantCtx: DiscriminantLocation) => BaseRoot | null;
export declare const writeIndiscriminableMorphMessage: (lDescription: string, rDescription: string) => string;
export declare const writeOrderedIntersectionMessage: (lDescription: string, rDescription: string) => string;
export {};

@@ -1,2 +0,2 @@

import { appendUnique, arrayEquals, domainDescriptions, flatMorph, groupBy, isArray, jsTypeOfDescriptions, printable, throwParseError } from "@ark/util";
import { appendUnique, arrayEquals, domainDescriptions, flatMorph, groupBy, isArray, jsTypeOfDescriptions, printable, range, throwParseError, unset } from "@ark/util";
import { compileLiteralPropAccess, compileSerializedValue } from "../shared/compile.js";

@@ -7,2 +7,3 @@ import { Disjoint } from "../shared/disjoint.js";

import { $ark, registeredReference } from "../shared/registry.js";
import { Traversal } from "../shared/traversal.js";
import { hasArkKind } from "../shared/utils.js";

@@ -144,2 +145,15 @@ import { BaseRoot } from "./root.js";

expression = this.distribute(n => n.nestableExpression, expressBranches);
createBranchedOptimisticRootApply() {
return (data, onFail) => {
const optimisticResult = this.traverseOptimistic(data);
if (optimisticResult !== unset)
return optimisticResult;
const ctx = new Traversal(data, this.$.resolvedConfig);
this.traverseApply(data, ctx);
return ctx.finalize(onFail);
};
}
get shallowMorphs() {
return this.branches.reduce((morphs, branch) => appendUnique(morphs, branch.shallowMorphs), []);
}
get shortDescription() {

@@ -165,3 +179,3 @@ return this.distribute(branch => branch.shortDescription, describeBranches);

if (!ctx.hasError()) {
if (this.branches[i].includesMorph)
if (this.branches[i].includesTransform)
return ctx.queuedMorphs.push(...ctx.popBranch().queuedMorphs);

@@ -174,2 +188,15 @@ return ctx.popBranch();

};
traverseOptimistic = (data) => {
for (let i = 0; i < this.branches.length; i++) {
const branch = this.branches[i];
if (branch.traverseAllows(data)) {
if (branch.contextFreeMorph)
return branch.contextFreeMorph(data);
// if we're calling this function and the matching branch didn't have
// a context-free morph, it shouldn't have morphs at all
return data;
}
}
return unset;
};
compile(js) {

@@ -188,2 +215,5 @@ if (!this.discriminant ||

const caseKeys = Object.keys(cases);
const { optimistic } = js;
// only the first layer can be optimistic
js.optimistic = false;
js.block(`switch(${condition})`, () => {

@@ -193,3 +223,8 @@ for (const k in cases) {

const caseCondition = k === "default" ? k : `case ${k}`;
js.line(`${caseCondition}: return ${v === true ? v : js.invoke(v)}`);
js.line(`${caseCondition}: return ${v === true ?
optimistic ? js.data
: v
: optimistic ?
`${js.invoke(v)} ? ${v.contextFreeMorph ? `${registeredReference(v.contextFreeMorph)}(${js.data})` : js.data} : "${unset}"`
: js.invoke(v)}`);
}

@@ -199,3 +234,3 @@ return js;

if (js.traversalKind === "Allows") {
js.return(false);
js.return(optimistic ? `"${unset}"` : false);
return;

@@ -211,3 +246,3 @@ }

: caseKeys);
const serializedPathSegments = this.discriminant.path.map(k => typeof k === "string" ? JSON.stringify(k) : registeredReference(k));
const serializedPathSegments = this.discriminant.path.map(k => typeof k === "symbol" ? registeredReference(k) : JSON.stringify(k));
const serializedExpected = JSON.stringify(expected);

@@ -232,3 +267,3 @@ const serializedActual = this.discriminant.kind === "domain" ?

.line(js.invoke(branch))
.if("!ctx.hasError()", () => js.return(branch.includesMorph ?
.if("!ctx.hasError()", () => js.return(branch.includesTransform ?
"ctx.queuedMorphs.push(...ctx.popBranch().queuedMorphs)"

@@ -240,4 +275,11 @@ : "ctx.popBranch()"))

else {
this.branches.forEach(branch => js.if(`${js.invoke(branch)}`, () => js.return(true)));
js.return(false);
const { optimistic } = js;
// only the first layer can be optimistic
js.optimistic = false;
this.branches.forEach(branch => js.if(`${js.invoke(branch)}`, () => js.return(optimistic ?
branch.contextFreeMorph ?
`${registeredReference(branch.contextFreeMorph)}(${js.data})`
: js.data
: true)));
js.return(optimistic ? `"${unset}"` : false);
}

@@ -295,4 +337,10 @@ }

cases: {
[lSerialized]: [l],
[rSerialized]: [r]
[lSerialized]: {
branchIndices: [lIndex],
condition: entry.l
},
[rSerialized]: {
branchIndices: [rIndex],
condition: entry.r
}
},

@@ -303,4 +351,20 @@ path: entry.path

else {
matching.cases[lSerialized] = appendUnique(matching.cases[lSerialized], l);
matching.cases[rSerialized] = appendUnique(matching.cases[rSerialized], r);
if (matching.cases[lSerialized]) {
matching.cases[lSerialized].branchIndices = appendUnique(matching.cases[lSerialized].branchIndices, lIndex);
}
else {
matching.cases[lSerialized] ??= {
branchIndices: [lIndex],
condition: entry.l
};
}
if (matching.cases[rSerialized]) {
matching.cases[rSerialized].branchIndices = appendUnique(matching.cases[rSerialized].branchIndices, rIndex);
}
else {
matching.cases[rSerialized] ??= {
branchIndices: [rIndex],
condition: entry.r
};
}
}

@@ -310,32 +374,33 @@ }

}
const best = candidates
.sort((l, r) => Object.keys(l.cases).length - Object.keys(r.cases).length)
.at(-1);
if (!best)
const orderedCandidates = this.ordered ? orderCandidates(candidates, this.branches) : candidates;
if (!orderedCandidates.length)
return null;
let defaultBranches = [...this.branches];
const bestCtx = {
kind: best.kind,
path: best.path,
optionallyChainedPropString: optionallyChainPropString(best.path)
};
const cases = flatMorph(best.cases, (k, caseBranches) => {
const prunedBranches = [];
defaultBranches = defaultBranches.filter(n => !caseBranches.includes(n));
for (const branch of caseBranches) {
const pruned = pruneDiscriminant(branch, bestCtx);
// if any branch of the union has no constraints (i.e. is unknown)
// return it right away
if (pruned === null)
return [k, true];
prunedBranches.push(pruned);
const ctx = createCaseResolutionContext(orderedCandidates, this);
const cases = {};
for (const k in ctx.best.cases) {
const resolution = resolveCase(ctx, k);
if (resolution === null) {
cases[k] = true;
continue;
}
const caseNode = prunedBranches.length === 1 ?
prunedBranches[0]
: this.$.node("union", prunedBranches);
// if all the branches ended up back in pruned, we'd loop if we continued
// so just bail out- nothing left to discriminate
if (resolution.length === this.branches.length)
return null;
if (this.ordered) {
// ensure the original order of the pruned branches is preserved
resolution.sort((l, r) => l.originalIndex - r.originalIndex);
}
const branches = resolution.map(entry => entry.branch);
const caseNode = branches.length === 1 ?
branches[0]
: this.$.node("union", this.ordered ? { branches, ordered: true } : branches);
Object.assign(this.referencesById, caseNode.referencesById);
return [k, caseNode];
});
if (defaultBranches.length) {
cases.default = this.$.node("union", defaultBranches, {
cases[k] = caseNode;
}
if (ctx.defaultEntries.length) {
// we don't have to worry about order here as it is always preserved
// within defaultEntries
const branches = ctx.defaultEntries.map(entry => entry.branch);
cases.default = this.$.node("union", this.ordered ? { branches, ordered: true } : branches, {
prereduced: true

@@ -345,3 +410,3 @@ });

}
return Object.assign(bestCtx, {
return Object.assign(ctx.location, {
cases

@@ -351,2 +416,113 @@ });

}
const createCaseResolutionContext = (orderedCandidates, node) => {
const best = orderedCandidates.sort((l, r) => Object.keys(r.cases).length - Object.keys(l.cases).length)[0];
const location = {
kind: best.kind,
path: best.path,
optionallyChainedPropString: optionallyChainPropString(best.path)
};
const defaultEntries = node.branches.map((branch, originalIndex) => ({
originalIndex,
branch
}));
return {
best,
location,
defaultEntries,
node
};
};
const resolveCase = (ctx, key) => {
const caseCtx = ctx.best.cases[key];
const discriminantNode = discriminantCaseToNode(caseCtx.condition, ctx.location.path, ctx.node.$);
let resolvedEntries = [];
const nextDefaults = [];
for (let i = 0; i < ctx.defaultEntries.length; i++) {
const entry = ctx.defaultEntries[i];
if (caseCtx.branchIndices.includes(entry.originalIndex)) {
const pruned = pruneDiscriminant(ctx.node.branches[entry.originalIndex], ctx.location);
if (pruned === null) {
// if any branch of the union has no constraints (i.e. is
// unknown), the others won't affect the resolution type, but could still
// remove additional cases from defaultEntries
resolvedEntries = null;
}
else {
resolvedEntries?.push({
originalIndex: entry.originalIndex,
branch: pruned
});
}
}
else if (
// we shouldn't need a special case for alias to avoid the below
// once alias resolution issues are improved:
// https://github.com/arktypeio/arktype/issues/1026
entry.branch.hasKind("alias") &&
discriminantNode.hasKind("domain") &&
discriminantNode.domain === "object")
resolvedEntries?.push(entry);
else {
if (entry.branch.in.overlaps(discriminantNode)) {
// include cases where an object not including the
// discriminant path might have that value present as an undeclared key
const overlapping = pruneDiscriminant(entry.branch, ctx.location);
resolvedEntries?.push({
originalIndex: entry.originalIndex,
branch: overlapping
});
}
nextDefaults.push(entry);
}
}
ctx.defaultEntries = nextDefaults;
return resolvedEntries;
};
const orderCandidates = (candidates, originalBranches) => {
const viableCandidates = candidates.filter(candidate => {
const caseGroups = Object.values(candidate.cases).map(caseCtx => caseCtx.branchIndices);
// compare each group against all subsequent groups.
for (let i = 0; i < caseGroups.length - 1; i++) {
const currentGroup = caseGroups[i];
for (let j = i + 1; j < caseGroups.length; j++) {
const nextGroup = caseGroups[j];
// for each group pair, check for branches whose order was reversed
for (const currentIndex of currentGroup) {
for (const nextIndex of nextGroup) {
if (currentIndex > nextIndex) {
if (originalBranches[currentIndex].overlaps(originalBranches[nextIndex])) {
// if the order was not preserved and the branches overlap,
// this is not a viable discriminant as it cannot guarantee the same behavior
return false;
}
}
}
}
}
}
// branch groups preserved order for non-disjoint pairs and is viable
return true;
});
return viableCandidates;
};
const discriminantCaseToNode = (caseDiscriminant, path, $) => {
let node = caseDiscriminant === "undefined" ? $.node("unit", { unit: undefined })
: caseDiscriminant === "null" ? $.node("unit", { unit: null })
: caseDiscriminant === "boolean" ? $.units([true, false])
: caseDiscriminant;
for (let i = path.length - 1; i >= 0; i--) {
const key = path[i];
node = $.node("intersection", typeof key === "number" ?
{
proto: "Array",
// create unknown for preceding elements (could be optimized with safe imports)
sequence: [...range(key).map(_ => ({})), node]
}
: {
domain: "object",
required: [{ key, value: node }]
});
}
return node;
};
const optionallyChainPropString = (path) => path.reduce((acc, k) => acc + compileLiteralPropAccess(k, true), "data");

@@ -482,11 +658,12 @@ const serializedTypeOfDescriptions = registeredReference(jsTypeOfDescriptions);

const assertDeterminateOverlap = (l, r) => {
if ((l.includesMorph || r.includesMorph) &&
(!arrayEquals(l.shallowMorphs, r.shallowMorphs, {
isEqual: (l, r) => l.hasEqualMorphs(r)
}) ||
!arrayEquals(l.flatMorphs, r.flatMorphs, {
isEqual: (l, r) => l.propString === r.propString && l.node.hasEqualMorphs(r.node)
}))) {
if (!l.includesTransform && !r.includesTransform)
return;
if (!arrayEquals(l.shallowMorphs, r.shallowMorphs)) {
throwParseError(writeIndiscriminableMorphMessage(l.expression, r.expression));
}
if (!arrayEquals(l.flatMorphs, r.flatMorphs, {
isEqual: (l, r) => l.propString === r.propString && l.node.hasEqualMorphs(r.node)
})) {
throwParseError(writeIndiscriminableMorphMessage(l.expression, r.expression));
}
};

@@ -493,0 +670,0 @@ export const pruneDiscriminant = (discriminantBranch, discriminantCtx) => discriminantBranch.transform((nodeKind, inner) => {

@@ -1,3 +0,3 @@

import { ParseError, type JsonStructure, type anyOrNever, type array, type conform, type flattenListable, type listable, type noSuggest, type satisfy } from "@ark/util";
import { type ArkConfig, type ResolvedConfig } from "./config.ts";
import { ParseError, type JsonStructure, type anyOrNever, type array, type conform, type flattenListable, type intersectUnion, type listable, type noSuggest, type satisfy, type show } from "@ark/util";
import { type ArkSchemaConfig, type ResolvedConfig } from "./config.ts";
import { GenericRoot, type GenericRootParser } from "./generic.ts";

@@ -19,2 +19,19 @@ import { type NodeSchema, type RootSchema, type nodeOfKind, type reducibleKindOf } from "./kinds.ts";

export type resolveReference<reference extends resolvableReferenceIn<$>, $> = reference extends keyof $ ? $[reference] : $[`#${reference}` & keyof $];
export type flatResolutionsOf<$> = show<intersectUnion<resolvableReferenceIn<$> extends infer k ? k extends keyof $ & string ? resolutionsOfReference<k, $[k]> : unknown : unknown>>;
type resolutionsOfReference<k extends string, v> = [
v
] extends [{
[arkKind]: "module";
}] ? [
v
] extends [anyOrNever] ? {
[_ in k]: v;
} : prefixKeys<flatResolutionsOf<v>, k> & {
[innerKey in keyof v as innerKey extends "root" ? k : never]: v[innerKey];
} : {
[_ in k]: v;
};
type prefixKeys<o, prefix extends string> = {
[k in keyof o & string as `${prefix}.${k}`]: o[k];
} & unknown;
export type PrivateDeclaration<key extends string = string> = `#${key}`;

@@ -33,8 +50,8 @@ export type InternalResolution = BaseRoot | GenericRoot | InternalModule;

export type AliasDefEntry = [name: string, defValue: unknown];
export type GlobalOnlyConfigOptionName = satisfy<keyof ArkConfig, "dateAllowsInvalid" | "numberAllowsNaN" | "onUndeclaredKey">;
export type GlobalOnlyConfigOptionName = satisfy<keyof ArkSchemaConfig, "dateAllowsInvalid" | "numberAllowsNaN" | "onUndeclaredKey">;
export interface ScopeOnlyConfigOptions {
ambient?: boolean | string;
name?: string;
prereducedAliases?: boolean;
}
export interface ArkScopeConfig extends Omit<ArkConfig, GlobalOnlyConfigOptionName>, ScopeOnlyConfigOptions {
export interface ArkSchemaScopeConfig extends Omit<ArkSchemaConfig, GlobalOnlyConfigOptionName>, ScopeOnlyConfigOptions {
}

@@ -44,5 +61,5 @@ export interface ResolvedScopeConfig extends ResolvedConfig, ScopeOnlyConfigOptions {

export declare abstract class BaseScope<$ extends {} = {}> {
readonly config: ArkScopeConfig;
readonly config: ArkSchemaScopeConfig;
readonly resolvedConfig: ResolvedScopeConfig;
readonly id: string;
readonly name: string;
get [arkKind](): "scope";

@@ -63,3 +80,3 @@ readonly referencesById: {

* corresponding definitions.**/
def: Record<string, unknown>, config?: ArkScopeConfig);
def: Record<string, unknown>, config?: ArkSchemaScopeConfig);
protected cacheGetter<name extends keyof this>(name: name, value: this[name]): this[name];

@@ -122,3 +139,3 @@ get internal(): this;

[k in keyof aliases]: conform<aliases[k], RootSchema | PreparsedNodeResolution>;
}, config?: ArkScopeConfig) => BaseScope<instantiateAliases<aliases>>;
}, config?: ArkSchemaScopeConfig) => BaseScope<instantiateAliases<aliases>>;
export declare const schemaScope: SchemaScopeParser;

@@ -125,0 +142,0 @@ export type InternalSchemaParser = (schema: RootSchema, opts?: BaseParseOptions) => BaseRoot;

@@ -17,9 +17,53 @@ import { ParseError, flatMorph, hasDomain, isArray, isThunk, printable, throwInternalError, throwParseError } from "@ark/util";

export const writeDuplicateAliasError = (alias) => `#${alias} duplicates public alias ${alias}`;
const scopesById = {};
const scopesByName = {};
$ark.ambient ??= {};
let rawUnknownUnion;
const precompile = (references) => bindPrecompilation(references, writePrecompilation(references));
const bindPrecompilation = (references, precompilation) => {
const compiledTraversals = instantiatePrecompilation(precompilation);
for (const node of references) {
if (node.precompilation) {
// if node has already been bound to another scope or anonymous type, don't rebind it
continue;
}
node.traverseAllows =
compiledTraversals[`${node.id}Allows`].bind(compiledTraversals);
if (node.isRoot() && !node.allowsRequiresContext) {
// if the reference doesn't require context, we can assign over
// it directly to avoid having to initialize it
node.allows = node.traverseAllows;
}
node.traverseApply =
compiledTraversals[`${node.id}Apply`].bind(compiledTraversals);
if (compiledTraversals[`${node.id}Optimistic`]) {
;
node.traverseOptimistic =
compiledTraversals[`${node.id}Optimistic`].bind(compiledTraversals);
}
node.precompilation = precompilation;
}
};
const instantiatePrecompilation = (precompilation) => new CompiledFunction().return(precompilation).compile()();
const writePrecompilation = (references) => references.reduce((js, node) => {
const allowsCompiler = new NodeCompiler({ kind: "Allows" }).indent();
node.compile(allowsCompiler);
const allowsJs = allowsCompiler.write(`${node.id}Allows`);
const applyCompiler = new NodeCompiler({ kind: "Apply" }).indent();
node.compile(applyCompiler);
const applyJs = applyCompiler.write(`${node.id}Apply`);
const result = `${js}${allowsJs},\n${applyJs},\n`;
if (!node.hasKind("union"))
return result;
const optimisticCompiler = new NodeCompiler({
kind: "Allows",
optimistic: true
}).indent();
node.compile(optimisticCompiler);
const optimisticJs = optimisticCompiler.write(`${node.id}Optimistic`);
return `${result}${optimisticJs},\n`;
}, "{\n") + "}";
export class BaseScope {
config;
resolvedConfig;
id = `${Object.keys(scopesById).length}$`;
name;
get [arkKind]() {

@@ -41,2 +85,8 @@ return "scope";

this.resolvedConfig = mergeConfigs($ark.resolvedConfig, config);
this.name =
this.resolvedConfig.name ??
`anonymousScope${Object.keys(scopesByName).length}`;
if (this.name in scopesByName)
throwParseError(`A Scope already named ${this.name} already exists`);
scopesByName[this.name] = this;
const aliasEntries = Object.entries(def).map(entry => this.preparseOwnAliasEntry(...entry));

@@ -83,3 +133,2 @@ aliasEntries.forEach(([k, v]) => {

this.nodesByHash[rawUnknownUnion.hash] = this.node("intersection", {}, { prereduced: true });
scopesById[this.id] = this;
}

@@ -280,13 +329,2 @@ cacheGetter(name, value) {

this.lazyResolutions.forEach(node => node.resolution);
if (this.resolvedConfig.ambient === true)
// spread all exports to ambient
Object.assign($ark.ambient, this._exports);
else if (typeof this.resolvedConfig.ambient === "string") {
// add exports as a subscope with the config value as a name
Object.assign($ark.ambient, {
[this.resolvedConfig.ambient]: new RootModule({
...this._exports
})
});
}
this._exportedResolutions = resolutionsOfModule(this, this._exports);

@@ -431,32 +469,2 @@ this._json = resolutionsToJson(this._exportedResolutions);

export const writeMissingSubmoduleAccessMessage = (name) => `Reference to submodule '${name}' must specify an alias`;
const precompile = (references) => bindPrecompilation(references, writePrecompilation(references));
const bindPrecompilation = (references, precompilation) => {
const compiledTraversals = instantiatePrecompilation(precompilation);
for (const node of references) {
if (node.precompilation) {
// if node has already been bound to another scope or anonymous type, don't rebind it
continue;
}
node.traverseAllows =
compiledTraversals[`${node.id}Allows`].bind(compiledTraversals);
if (node.isRoot() && !node.allowsRequiresContext) {
// if the reference doesn't require context, we can assign over
// it directly to avoid having to initialize it
node.allows = node.traverseAllows;
}
node.traverseApply =
compiledTraversals[`${node.id}Apply`].bind(compiledTraversals);
node.precompilation = precompilation;
}
};
const instantiatePrecompilation = (precompilation) => new CompiledFunction().return(precompilation).compile()();
const writePrecompilation = (references) => references.reduce((js, node) => {
const allowsCompiler = new NodeCompiler("Allows").indent();
node.compile(allowsCompiler);
const allowsJs = allowsCompiler.write(`${node.id}Allows`);
const applyCompiler = new NodeCompiler("Apply").indent();
node.compile(applyCompiler);
const applyJs = applyCompiler.write(`${node.id}Apply`);
return `${js}${allowsJs},\n${applyJs},\n`;
}, "{\n") + "}";
// ensure the scope is resolved so JIT will be applied to future types

@@ -463,0 +471,0 @@ rootSchemaScope.export();

import { CastableBase } from "@ark/util";
import type { BaseNode } from "../node.ts";
import type { NodeId } from "../parse.ts";
import type { Discriminant } from "../roots/union.ts";
import type { TraversalKind } from "./traversal.ts";

@@ -47,7 +46,12 @@ export type CoercibleValue = string | number | boolean | null | undefined;

}
export declare namespace NodeCompiler {
interface Context {
kind: TraversalKind;
optimistic?: true;
}
}
export declare class NodeCompiler extends CompiledFunction<["data", "ctx"]> {
path: string[];
discriminants: Discriminant[];
traversalKind: TraversalKind;
constructor(traversalKind: TraversalKind);
optimistic: boolean;
constructor(ctx: NodeCompiler.Context);
invoke(node: BaseNode | NodeId, opts?: InvokeOptions): string;

@@ -54,0 +58,0 @@ referenceToId(id: NodeId, opts?: ReferenceOptions): string;

@@ -93,8 +93,8 @@ import { CastableBase, DynamicFunction, hasDomain, isDotAccessible, serializePrimitive } from "@ark/util";

export class NodeCompiler extends CompiledFunction {
path = [];
discriminants = [];
traversalKind;
constructor(traversalKind) {
optimistic;
constructor(ctx) {
super("data", "ctx");
this.traversalKind = traversalKind;
this.traversalKind = ctx.kind;
this.optimistic = ctx.optimistic === true;
}

@@ -101,0 +101,0 @@ invoke(node, opts) {

import type { merge, show } from "@ark/util";
import type { UnknownErrorWriters } from "../config.ts";
import type { UnknownErrorConfigs } from "../config.ts";
import type { nodeOfKind, reducibleKindOf } from "../kinds.ts";
import type { Disjoint } from "./disjoint.ts";
import type { ArkErrors } from "./errors.ts";
import type { NarrowedAttachments, NodeKind } from "./implement.ts";

@@ -10,11 +11,16 @@ import type { JsonSchema } from "./jsonSchema.ts";

};
export interface BaseMeta extends JsonSchema.Meta, UnknownErrorWriters {
export interface DefaultArkEnv {
meta(): {};
onFail(errors: ArkErrors): ArkErrors;
}
export interface BaseMeta extends JsonSchema.Meta, UnknownErrorConfigs {
alias?: string;
onFail?: ArkErrors.Handler;
}
declare global {
export interface ArkEnv {
meta(): {};
export interface ArkEnv extends DefaultArkEnv {
}
export namespace ArkEnv {
type meta = show<BaseMeta & ReturnType<ArkEnv["meta"]>>;
type onFail = ReturnType<ArkEnv["onFail"]>;
}

@@ -21,0 +27,0 @@ }

@@ -35,3 +35,3 @@ import { type Key } from "@ark/util";

invert(): Disjoint;
withPrefixKey(key: string | symbol, kind: Prop.Kind): Disjoint;
withPrefixKey(key: PropertyKey, kind: Prop.Kind): Disjoint;
toNeverIfDisjoint(): BaseRoot;

@@ -38,0 +38,0 @@ }

@@ -27,2 +27,5 @@ import { CastableBase, ReadonlyArray, ReadonlyPath, type array, type merge, type propwiseXor, type show } from "@ark/util";

}
export declare namespace ArkErrors {
type Handler<returns = unknown> = (errors: ArkErrors) => returns;
}
/**

@@ -111,7 +114,11 @@ * A ReadonlyArray of `ArkError`s returned by a Type on invalid input.

export type ArkErrorInput = string | ArkErrorContextInput | CustomErrorInput;
export type ProblemConfig<code extends ArkErrorCode = ArkErrorCode> = string | ProblemWriter<code>;
export type ProblemWriter<code extends ArkErrorCode = ArkErrorCode> = (context: ProblemContext<code>) => string;
export type MessageConfig<code extends ArkErrorCode = ArkErrorCode> = string | MessageWriter<code>;
export type MessageWriter<code extends ArkErrorCode = ArkErrorCode> = (context: MessageContext<code>) => string;
export type getAssociatedDataForError<code extends ArkErrorCode> = code extends NodeKind ? Prerequisite<code> : unknown;
export type ExpectedConfig<code extends ArkErrorCode = ArkErrorCode> = string | ExpectedWriter<code>;
export type ExpectedWriter<code extends ArkErrorCode = ArkErrorCode> = (source: errorContext<code>) => string;
export type ActualConfig<code extends ArkErrorCode = ArkErrorCode> = string | ActualWriter<code>;
export type ActualWriter<code extends ArkErrorCode = ArkErrorCode> = (data: getAssociatedDataForError<code>) => string;
export {};

@@ -56,20 +56,24 @@ import { CastableBase, ReadonlyArray, ReadonlyPath, append, conflatenateAll, defineProperties, stringifyPath } from "@ark/util";

get expected() {
return (this.input.expected ??
this.meta?.expected?.(this.input) ??
this.nodeConfig.expected?.(this.input));
if (this.input.expected)
return this.input.expected;
const config = this.meta?.expected ?? this.nodeConfig.expected;
return typeof config === "function" ? config(this.input) : config;
}
get actual() {
return (this.input.actual ??
this.meta?.actual?.(this.data) ??
this.nodeConfig.actual?.(this.data));
if (this.input.actual)
return this.input.actual;
const config = this.meta?.actual ?? this.nodeConfig.actual;
return typeof config === "function" ? config(this.data) : config;
}
get problem() {
return (this.input.problem ??
this.meta?.problem?.(this) ??
this.nodeConfig.problem(this));
if (this.input.problem)
return this.input.problem;
const config = this.meta?.problem ?? this.nodeConfig.problem;
return typeof config === "function" ? config(this) : config;
}
get message() {
return (this.input.message ??
this.meta?.message?.(this) ??
this.nodeConfig.message(this));
if (this.input.message)
return this.input.message;
const config = this.meta?.message ?? this.nodeConfig.message;
return typeof config === "function" ? config(this) : config;
}

@@ -76,0 +80,0 @@ toString() {

@@ -81,3 +81,3 @@ import { type Entry, type Json, type JsonStructure, type KeySet, type arrayIndexOf, type keySetOf, type listable, type requireKeys, type show } from "@ark/util";

export declare const schemaKindsRightOf: <kind extends RootKind>(kind: kind) => schemaKindRightOf<kind>[];
export declare const unionChildKinds: readonly [...("morph" | "intersection" | "unit" | "proto" | "domain")[], "alias"];
export declare const unionChildKinds: readonly [...("intersection" | "morph" | "unit" | "proto" | "domain")[], "alias"];
export type UnionChildKind = (typeof unionChildKinds)[number];

@@ -134,3 +134,2 @@ export declare const morphChildKinds: readonly [...("intersection" | "unit" | "proto" | "domain")[], "alias"];

export interface UnknownAttachments {
alias?: string;
readonly kind: NodeKind;

@@ -137,0 +136,0 @@ readonly impl: UnknownNodeImplementation;

@@ -33,3 +33,3 @@ import { Disjoint } from "./disjoint.js";

}
const isPureIntersection = !ctx.pipe || (!l.includesMorph && !r.includesMorph);
const isPureIntersection = !ctx.pipe || (!l.includesTransform && !r.includesTransform);
if (isPureIntersection && l.equals(r))

@@ -71,3 +71,3 @@ return l;

};
const _pipeNodes = (l, r, ctx) => l.includesMorph || r.includesMorph ?
const _pipeNodes = (l, r, ctx) => l.includesTransform || r.includesTransform ?
ctx.invert ?

@@ -74,0 +74,0 @@ pipeMorphed(r, l, ctx)

@@ -89,3 +89,3 @@ import { ReadonlyPath, type array } from "@ark/util";

queueMorphs(morphs: array<Morph>): void;
finalize(): unknown;
finalize(onFail?: ArkErrors.Handler | null): unknown;
get currentErrorCount(): number;

@@ -92,0 +92,0 @@ get failFast(): boolean;

@@ -103,11 +103,13 @@ import { ReadonlyPath, stringifyPath } from "@ark/util";

}
finalize() {
if (!this.queuedMorphs.length)
return this.hasError() ? this.errors : this.root;
if (typeof this.root === "object" &&
this.root !== null &&
this.config.clone)
this.root = this.config.clone(this.root);
this.applyQueuedMorphs();
return this.hasError() ? this.errors : this.root;
finalize(onFail) {
if (this.queuedMorphs.length) {
if (typeof this.root === "object" &&
this.root !== null &&
this.config.clone)
this.root = this.config.clone(this.root);
this.applyQueuedMorphs();
}
if (this.hasError())
return onFail ? onFail(this.errors) : this.errors;
return this.root;
}

@@ -114,0 +116,0 @@ get currentErrorCount() {

import { BaseConstraint } from "../constraint.ts";
import type { RootSchema, nodeOfKind } from "../kinds.ts";
import { type BaseNode, type DeepNodeTransformContext, type DeepNodeTransformation, type FlatRef } from "../node.ts";
import { type BaseNode, type DeepNodeTransformContext, type DeepNodeTransformation } from "../node.ts";
import type { BaseRoot } from "../roots/root.ts";

@@ -34,6 +34,6 @@ import type { BaseNormalizedSchema, declareNode } from "../shared/declare.ts";

expression: string;
flatRefs: import("../node.ts").FlatRef<BaseRoot<import("../roots/root.ts").InternalRootDeclaration>>[];
traverseAllows: TraverseAllows<object>;
traverseApply: TraverseApply<object>;
protected _transform(mapper: DeepNodeTransformation, ctx: DeepNodeTransformContext): BaseNode | null;
get flatRefs(): FlatRef[];
compile(): void;

@@ -40,0 +40,0 @@ }

@@ -60,2 +60,3 @@ import { append, printable, stringAndSymbolicEntriesOf, throwParseError } from "@ark/util";

expression = `[${this.signature.expression}]: ${this.value.expression}`;
flatRefs = append(this.value.flatRefs.map(ref => flatRef([this.signature, ...ref.path], ref.node)), flatRef([this.signature], this.value));
traverseAllows = (data, ctx) => stringAndSymbolicEntriesOf(data).every(entry => {

@@ -78,5 +79,2 @@ if (this.signature.traverseAllows(entry[0], ctx)) {

}
get flatRefs() {
return append(this.value.flatRefs.map(ref => flatRef([this.signature, ...ref.path], ref.node)), flatRef([this.signature], this.value));
}
compile() {

@@ -83,0 +81,0 @@ // this is currently handled by StructureNode

@@ -0,1 +1,2 @@

import { type requireKeys } from "@ark/util";
import type { Morph } from "../roots/morph.ts";

@@ -19,2 +20,5 @@ import type { BaseRoot } from "../roots/root.ts";

type Node = OptionalNode;
namespace Node {
type withDefault = requireKeys<Node, "default" | "defaultValueMorph" | "defaultValueMorphRef">;
}
}

@@ -25,4 +29,4 @@ export declare class OptionalNode extends BaseProp<"optional"> {

expression: string;
defaultValueMorphs: Morph[];
defaultValueMorphsReference: `$ark.${string}` | `$ark0.${string}` | `$ark1.${string}` | `$ark2.${string}` | `$ark9.${string}` | `$ark4.${string}` | `$ark3.${string}` | `$ark5.${string}` | `$ark6.${string}` | `$ark7.${string}` | `$ark8.${string}` | `$ark${`1${string}` & `${bigint}`}.${string}` | `$ark${`2${string}` & `${bigint}`}.${string}` | `$ark${`9${string}` & `${bigint}`}.${string}` | `$ark${`4${string}` & `${bigint}`}.${string}` | `$ark${`3${string}` & `${bigint}`}.${string}` | `$ark${`5${string}` & `${bigint}`}.${string}` | `$ark${`6${string}` & `${bigint}`}.${string}` | `$ark${`7${string}` & `${bigint}`}.${string}` | `$ark${`8${string}` & `${bigint}`}.${string}`;
defaultValueMorph: Morph | undefined;
defaultValueMorphRef: string | undefined;
}

@@ -43,5 +47,5 @@ export declare const Optional: {

};
export declare const computeDefaultValueMorphs: (key: PropertyKey, value: BaseRoot, defaultInput: unknown) => Morph[];
export declare const computeDefaultValueMorph: (key: PropertyKey, value: BaseRoot, defaultInput: unknown) => Morph;
export declare const assertDefaultValueAssignability: (node: BaseRoot, value: unknown, key: PropertyKey | null) => unknown;
export type writeUnassignableDefaultValueMessage<baseDef extends string, defaultValue extends string> = `Default value ${defaultValue} must be assignable to ${baseDef}`;
export declare const writeNonPrimitiveNonFunctionDefaultValueMessage: (key: PropertyKey | null) => string;
import { hasDomain, isThunk, printable, throwParseError } from "@ark/util";
import { compileSerializedValue } from "../shared/compile.js";
import { ArkErrors } from "../shared/errors.js";
import { implementNode } from "../shared/implement.js";
import { defaultValueSerializer, implementNode } from "../shared/implement.js";
import { registeredReference } from "../shared/registry.js";

@@ -45,6 +45,4 @@ import { traverseKey } from "../shared/traversal.js";

: `${this.compiledKey}?: ${this.value.expression}`;
defaultValueMorphs = this.hasDefault() ?
computeDefaultValueMorphs(this.key, this.value, this.default)
: [];
defaultValueMorphsReference = registeredReference(this.defaultValueMorphs);
defaultValueMorph = getDefaultableMorph(this);
defaultValueMorphRef = this.defaultValueMorph && registeredReference(this.defaultValueMorph);
}

@@ -55,32 +53,35 @@ export const Optional = {

};
export const computeDefaultValueMorphs = (key, value, defaultInput) => {
const defaultableMorphCache = {};
const getDefaultableMorph = (node) => {
if (!node.hasDefault())
return;
const cacheKey = `{${node.compiledKey}: ${node.value.id} = ${defaultValueSerializer(node.default)}}`;
return (defaultableMorphCache[cacheKey] ??= computeDefaultValueMorph(node.key, node.value, node.default));
};
export const computeDefaultValueMorph = (key, value, defaultInput) => {
if (typeof defaultInput === "function") {
return [
// if the value has a morph, pipe context through it
value.includesMorph ?
(data, ctx) => {
traverseKey(key, () => value((data[key] = defaultInput()), ctx), ctx);
return data;
}
: data => {
data[key] = defaultInput();
return data;
}
];
}
// non-functional defaults can be safely cached as long as the morph is
// guaranteed to be pure and the output is primitive
const precomputedMorphedDefault = value.includesMorph ? value.assert(defaultInput) : defaultInput;
return [
hasDomain(precomputedMorphedDefault, "object") ?
// the type signature only allows this if the value was morphed
// if the value has a morph, pipe context through it
return value.includesTransform ?
(data, ctx) => {
traverseKey(key, () => value((data[key] = defaultInput), ctx), ctx);
traverseKey(key, () => value((data[key] = defaultInput()), ctx), ctx);
return data;
}
: data => {
data[key] = precomputedMorphedDefault;
data[key] = defaultInput();
return data;
}
];
};
}
// non-functional defaults can be safely cached as long as the morph is
// guaranteed to be pure and the output is primitive
const precomputedMorphedDefault = value.includesTransform ? value.assert(defaultInput) : defaultInput;
return hasDomain(precomputedMorphedDefault, "object") ?
// the type signature only allows this if the value was morphed
(data, ctx) => {
traverseKey(key, () => value((data[key] = defaultInput), ctx), ctx);
return data;
}
: data => {
data[key] = precomputedMorphedDefault;
return data;
};
};

@@ -87,0 +88,0 @@ export const assertDefaultValueAssignability = (node, value, key) => {

@@ -38,7 +38,5 @@ import { type Key } from "@ark/util";

compiledKey: string;
get flatRefs(): FlatRef[];
flatRefs: FlatRef[];
protected _transform(mapper: DeepNodeTransformation, ctx: DeepNodeTransformContext): BaseNode | null;
hasDefault(): this is Optional.Node & {
default: unknown;
};
hasDefault(): this is Optional.Node.withDefault;
traverseAllows: TraverseAllows<object>;

@@ -45,0 +43,0 @@ traverseApply: TraverseApply<object>;

@@ -50,5 +50,3 @@ import { append, printable, throwParseError, unset } from "@ark/util";

compiledKey = typeof this.key === "string" ? this.key : this.serializedKey;
get flatRefs() {
return append(this.value.flatRefs.map(ref => flatRef([this.key, ...ref.path], ref.node)), flatRef([this.key], this.value));
}
flatRefs = append(this.value.flatRefs.map(ref => flatRef([this.key, ...ref.path], ref.node)), flatRef([this.key], this.value));
_transform(mapper, ctx) {

@@ -76,4 +74,2 @@ ctx.path.push(this.key);

ctx.errorFromNodeContext(this.errorContext);
else if (this.hasDefault())
ctx.queueMorphs(this.defaultValueMorphs);
};

@@ -91,5 +87,2 @@ compile(js) {

}
else if (js.traversalKind === "Apply" && this.hasDefault()) {
js.else(() => js.line(`ctx.queueMorphs(${this.defaultValueMorphsReference})`));
}
if (js.traversalKind === "Allows")

@@ -96,0 +89,0 @@ js.return(true);

@@ -54,4 +54,7 @@ import { type array, type satisfy } from "@ark/util";

postfixLength: number;
defaultablesAndOptionals: BaseRoot[];
prevariadic: array<PrevariadicSequenceElement>;
variadicOrPostfix: array<BaseRoot>;
flatRefs: FlatRef[];
protected addFlatRefs(): FlatRef[];
isVariadicOnly: boolean;

@@ -64,9 +67,7 @@ minVariadicLength: number;

impliedSiblings: array<MaxLengthNode | MinLengthNode | ExactLengthNode>;
defaultValueMorphs: Morph[][];
defaultValueMorphsReference: `$ark.${string}` | `$ark0.${string}` | `$ark1.${string}` | `$ark2.${string}` | `$ark9.${string}` | `$ark4.${string}` | `$ark3.${string}` | `$ark5.${string}` | `$ark6.${string}` | `$ark7.${string}` | `$ark8.${string}` | `$ark${`1${string}` & `${bigint}`}.${string}` | `$ark${`2${string}` & `${bigint}`}.${string}` | `$ark${`9${string}` & `${bigint}`}.${string}` | `$ark${`4${string}` & `${bigint}`}.${string}` | `$ark${`3${string}` & `${bigint}`}.${string}` | `$ark${`5${string}` & `${bigint}`}.${string}` | `$ark${`6${string}` & `${bigint}`}.${string}` | `$ark${`7${string}` & `${bigint}`}.${string}` | `$ark${`8${string}` & `${bigint}`}.${string}`;
includesDefaultable(): boolean;
defaultValueMorphs: Morph[];
defaultValueMorphsReference: `$ark.${string}` | `$ark0.${string}` | `$ark${`2${string}` & `${bigint}`}.${string}` | `$ark${`1${string}` & `${bigint}`}.${string}` | `$ark${`3${string}` & `${bigint}`}.${string}` | `$ark${`4${string}` & `${bigint}`}.${string}` | `$ark${`5${string}` & `${bigint}`}.${string}` | `$ark${`6${string}` & `${bigint}`}.${string}` | `$ark${`7${string}` & `${bigint}`}.${string}` | `$ark${`8${string}` & `${bigint}`}.${string}` | `$ark${`9${string}` & `${bigint}`}.${string}` | undefined;
protected elementAtIndex(data: array, index: number): SequenceElement;
traverseAllows: TraverseAllows<array>;
traverseApply: TraverseApply<array>;
get flatRefs(): FlatRef[];
get element(): BaseRoot;

@@ -73,0 +74,0 @@ compile(js: NodeCompiler): void;

@@ -10,3 +10,3 @@ import { append, conflatenate, printable, throwInternalError, throwParseError } from "@ark/util";

import { traverseKey } from "../shared/traversal.js";
import { assertDefaultValueAssignability, computeDefaultValueMorphs } from "./optional.js";
import { assertDefaultValueAssignability, computeDefaultValueMorph } from "./optional.js";
import { writeDefaultIntersectionMessage } from "./prop.js";

@@ -195,6 +195,22 @@ const implementation = implementNode({

postfixLength = this.postfix?.length ?? 0;
prevariadic = this.tuple.filter(el => el.kind === "prefix" ||
el.kind === "defaultables" ||
el.kind === "optionals");
defaultablesAndOptionals = [];
prevariadic = this.tuple.filter((el) => {
if (el.kind === "defaultables" || el.kind === "optionals") {
// populate defaultablesAndOptionals while filtering prevariadic
this.defaultablesAndOptionals.push(el.node);
return true;
}
return el.kind === "prefix";
});
variadicOrPostfix = conflatenate(this.variadic && [this.variadic], this.postfix);
// have to wait until prevariadic and variadicOrPostfix are set to calculate
flatRefs = this.addFlatRefs();
addFlatRefs() {
appendUniqueFlatRefs(this.flatRefs, this.prevariadic.flatMap((element, i) => append(element.node.flatRefs.map(ref => flatRef([`${i}`, ...ref.path], ref.node)), flatRef([`${i}`], element.node))));
appendUniqueFlatRefs(this.flatRefs, this.variadicOrPostfix.flatMap(element =>
// a postfix index can't be directly represented as a type
// key, so we just use the same matcher for variadic
append(element.flatRefs.map(ref => flatRef([$ark.intrinsic.nonNegativeIntegerString.internal, ...ref.path], ref.node)), flatRef([$ark.intrinsic.nonNegativeIntegerString.internal], element))));
return this.flatRefs;
}
isVariadicOnly = this.prevariadic.length + this.postfixLength === 0;

@@ -216,8 +232,6 @@ minVariadicLength = this.inner.minVariadicLength ?? 0;

: [];
defaultValueMorphs = this.defaultables?.map(([node, defaultValue], i) => computeDefaultValueMorphs(this.prefixLength + i, node, defaultValue)) ?? [];
defaultValueMorphsReference = registeredReference(this.defaultValueMorphs);
includesDefaultable() {
// this is called before initialization so must not reference node properties
return this.inner.defaultables !== undefined;
}
defaultValueMorphs = getDefaultableMorphs(this);
defaultValueMorphsReference = this.defaultValueMorphs.length ?
registeredReference(this.defaultValueMorphs)
: undefined;
elementAtIndex(data, index) {

@@ -248,14 +262,3 @@ if (index < this.prevariadic.length)

}
for (; i < this.prefixLength + this.defaultablesLength; i++)
ctx.queueMorphs(this.defaultValueMorphs[i - this.prefixLength]);
};
get flatRefs() {
const refs = [];
appendUniqueFlatRefs(refs, this.prevariadic.flatMap((element, i) => append(element.node.flatRefs.map(ref => flatRef([`${i}`, ...ref.path], ref.node)), flatRef([`${i}`], element.node))));
appendUniqueFlatRefs(refs, this.variadicOrPostfix.flatMap(element =>
// a postfix index can't be directly represented as a type
// key, so we just use the same matcher for variadic
append(element.flatRefs.map(ref => flatRef([$ark.intrinsic.nonNegativeIntegerString.internal, ...ref.path], ref.node)), flatRef([$ark.intrinsic.nonNegativeIntegerString.internal], element))));
return refs;
}
get element() {

@@ -267,11 +270,4 @@ return this.cacheGetter("element", this.$.node("union", this.children));

this.prefix?.forEach((node, i) => js.traverseKey(`${i}`, `data[${i}]`, node));
this.defaultables?.forEach((node, i) => {
this.defaultablesAndOptionals.forEach((node, i) => {
const dataIndex = `${i + this.prefixLength}`;
js.if(`${dataIndex} >= ${js.data}.length`, () => js.traversalKind === "Allows" ?
js.return(true)
: js.return(`ctx.queueMorphs(${this.defaultValueMorphsReference}[${i}])`));
js.traverseKey(dataIndex, `data[${dataIndex}]`, node[0]);
});
this.optionals?.forEach((node, i) => {
const dataIndex = `${i + this.prefixLength + this.defaultablesLength}`;
js.if(`${dataIndex} >= ${js.data}.length`, () => js.traversalKind === "Allows" ? js.return(true) : js.return());

@@ -329,2 +325,17 @@ js.traverseKey(dataIndex, `data[${dataIndex}]`, node);

}
const defaultableMorphsCache = {};
const getDefaultableMorphs = (node) => {
if (!node.defaultables)
return [];
const morphs = [];
let cacheKey = "[";
const lastDefaultableIndex = node.prefixLength + node.defaultablesLength - 1;
for (let i = node.prefixLength; i <= lastDefaultableIndex; i++) {
const [elementNode, defaultValue] = node.defaultables[i - node.prefixLength];
morphs.push(computeDefaultValueMorph(i, elementNode, defaultValue));
cacheKey += `${i}: ${elementNode.id} = ${defaultValueSerializer(defaultValue)}, `;
}
cacheKey += "]";
return (defaultableMorphsCache[cacheKey] ??= morphs);
};
export const Sequence = {

@@ -401,3 +412,3 @@ implementation,

// but not trivial to serialize postfix elements as keys
kind === "prefix" ? `${s.result.length}` : `-${lTail.length + 1}`, "required"));
kind === "prefix" ? s.result.length : `-${lTail.length + 1}`, "required"));
s.result = [...s.result, { kind, node: $ark.intrinsic.never.internal }];

@@ -404,0 +415,0 @@ }

import { type array, type Key, type listable } from "@ark/util";
import { BaseConstraint } from "../constraint.ts";
import type { GettableKeyOrNode, KeyOrKeyNode } from "../node.ts";
import type { Morph } from "../roots/morph.ts";
import { type BaseRoot } from "../roots/root.ts";

@@ -78,3 +79,8 @@ import type { BaseScope } from "../scope.ts";

protected _traverse: (traversalKind: TraversalKind, data: object, ctx: InternalTraversal) => boolean;
compile(js: NodeCompiler): void;
get defaultable(): Optional.Node.withDefault[];
declaresKey: (k: Key) => boolean;
declaresKeyRef: RegisteredReference;
get structuralMorph(): Morph | undefined;
structuralMorphRef: RegisteredReference | undefined;
compile(js: NodeCompiler): unknown;
protected compileExhaustiveEntry(js: NodeCompiler): NodeCompiler;

@@ -81,0 +87,0 @@ reduceJsonSchema(schema: JsonSchema.Structure): JsonSchema.Structure;

import { append, conflatenate, flatMorph, printable, spliterate, throwParseError } from "@ark/util";
import { BaseConstraint, constraintKeyParser, flattenConstraints, intersectConstraints } from "../constraint.js";
import { intrinsic } from "../intrinsic.js";
import { typeOrTermExtends } from "../roots/root.js";

@@ -12,7 +13,6 @@ import { Disjoint } from "../shared/disjoint.js";

import { Optional } from "./optional.js";
import { arrayIndexMatcherReference } from "./shared.js";
const createStructuralWriter = (childStringProp) => (node) => {
if (node.props.length || node.index) {
const parts = node.index?.map(String) ?? [];
node.props.forEach(node => parts.push(node[childStringProp]));
const parts = node.index?.map(index => index[childStringProp]) ?? [];
node.props.forEach(prop => parts.push(prop[childStringProp]));
if (node.undeclared)

@@ -351,3 +351,3 @@ parts.push(`+ (undeclared): ${node.undeclared}`);

}
if (!requireExhasutiveTraversal(this, traversalKind))
if (!this.index && this.undeclared !== "reject")
return true;

@@ -358,3 +358,2 @@ const keys = Object.keys(data);

const k = keys[i];
let matched = false;
if (this.index) {

@@ -373,40 +372,36 @@ for (const node of this.index) {

}
matched = true;
}
}
}
if (this.undeclared) {
matched ||= k in this.propsByKey;
matched ||=
this.sequence !== undefined &&
typeof k === "string" &&
$ark.intrinsic.nonNegativeIntegerString.allows(k);
if (!matched) {
if (traversalKind === "Allows")
return false;
if (this.undeclared === "reject") {
ctx.errorFromNodeContext({
// TODO: this should have its own error code
code: "predicate",
expected: "removed",
actual: "",
relativePath: [k],
meta: this.meta
});
}
else {
ctx.queueMorphs([
data => {
delete data[k];
return data;
}
]);
}
if (ctx.failFast)
return false;
}
if (this.undeclared === "reject" && !this.declaresKey(k)) {
if (traversalKind === "Allows")
return false;
ctx.errorFromNodeContext({
// TODO: this should have its own error code
code: "predicate",
expected: "removed",
actual: "",
relativePath: [k],
meta: this.meta
});
if (ctx.failFast)
return false;
}
}
if (this.structuralMorph && !ctx.hasError())
ctx.queueMorphs([this.structuralMorph]);
return true;
};
get defaultable() {
return this.cacheGetter("defaultable", this.optional?.filter(o => o.hasDefault()) ?? []);
}
declaresKey = (k) => k in this.propsByKey ||
this.index?.some(n => n.signature.allows(k)) ||
(this.sequence !== undefined &&
$ark.intrinsic.nonNegativeIntegerString.allows(k));
declaresKeyRef = registeredReference(this.declaresKey);
get structuralMorph() {
return this.cacheGetter("structuralMorph", getPossibleMorph(this));
}
structuralMorphRef = this.structuralMorph && registeredReference(this.structuralMorph);
compile(js) {

@@ -425,3 +420,3 @@ if (js.traversalKind === "Apply")

}
if (requireExhasutiveTraversal(this, js.traversalKind)) {
if (this.index || this.undeclared === "reject") {
js.const("keys", "Object.keys(data)");

@@ -432,32 +427,22 @@ js.line("keys.push(...Object.getOwnPropertySymbols(data))");

if (js.traversalKind === "Allows")
js.return(true);
return js.return(true);
// always queue deleteUndeclared on valid traversal for "delete"
if (this.structuralMorphRef) {
js.if("!ctx.hasError()", () => js.line(`ctx.queueMorphs([${this.structuralMorphRef}])`));
}
}
compileExhaustiveEntry(js) {
js.const("k", "keys[i]");
if (this.undeclared)
js.let("matched", false);
this.index?.forEach(node => {
js.if(`${js.invoke(node.signature, { arg: "k", kind: "Allows" })}`, () => {
js.traverseKey("k", "data[k]", node.value);
if (this.undeclared)
js.set("matched", true);
return js;
});
js.if(`${js.invoke(node.signature, { arg: "k", kind: "Allows" })}`, () => js.traverseKey("k", "data[k]", node.value));
});
if (this.undeclared) {
if (this.props?.length !== 0)
js.line(`matched ||= k in ${this.propsByKeyReference}`);
if (this.sequence) {
js.line(`matched ||= typeof k === "string" && ${arrayIndexMatcherReference}.test(k)`);
}
js.if("!matched", () => {
if (this.undeclared === "reject") {
js.if(`!${this.declaresKeyRef}(k)`, () => {
if (js.traversalKind === "Allows")
return js.return(false);
return this.undeclared === "reject" ?
js
.line(
// TODO: should have its own error code
`ctx.errorFromNodeContext({ code: "predicate", expected: "removed", actual: "", relativePath: [k], meta: ${this.compiledMeta} })`)
.if("ctx.failFast", () => js.return())
: js.line(`ctx.queueMorphs([data => { delete data[k]; return data }])`);
return js
.line(
// TODO: should have its own error code
`ctx.errorFromNodeContext({ code: "predicate", expected: "removed", actual: "", relativePath: [k], meta: ${this.compiledMeta} })`)
.if("ctx.failFast", () => js.return());
});

@@ -513,2 +498,48 @@ }

}
const defaultableMorphsCache = {};
const getPossibleMorph = (
// important to only use attached props since this is referenced from
// its own super class constructor (defaultable + keyof are declared the same way and safe)
node) => {
let cacheKey = "";
for (let i = 0; i < node.defaultable.length; i++)
cacheKey += node.defaultable[i].defaultValueMorphRef;
if (node.sequence?.defaultValueMorphsReference)
cacheKey += node.sequence?.defaultValueMorphsReference;
if (node.undeclared === "delete") {
cacheKey += "delete !(";
node.required?.forEach(n => (cacheKey += n.compiledKey + " | "));
node.optional?.forEach(n => (cacheKey += n.compiledKey + " | "));
node.index?.forEach(index => (cacheKey += index.signature.id + " | "));
if (node.sequence) {
if (node.sequence.maxLength === null)
// for an arbitrary length array, the breakdown of
// optional, required variadic etc. elements doesn't matter-
// no index will be deleted
cacheKey += intrinsic.nonNegativeIntegerString.id;
// otherwise, add keys based on length
else {
cacheKey += node.sequence.tuple.forEach((_, i) => (cacheKey += i + " | "));
}
}
cacheKey += ")";
}
if (!cacheKey)
return undefined;
return (defaultableMorphsCache[cacheKey] ??= (data, ctx) => {
for (let i = 0; i < node.defaultable.length; i++) {
if (!(node.defaultable[i].key in data))
node.defaultable[i].defaultValueMorph(data, ctx);
}
if (node.sequence?.defaultables) {
for (let i = data.length - node.sequence.prefixLength; i < node.sequence.defaultables.length; i++)
node.sequence.defaultValueMorphs[i](data, ctx);
}
if (node.undeclared === "delete")
for (const k in data)
if (!node.declaresKey(k))
delete data[k];
return data;
});
};
export const Structure = {

@@ -518,11 +549,2 @@ implementation,

};
const requireExhasutiveTraversal = (node, traversalKind) => {
if (node.index || node.undeclared === "reject")
return true;
// when applying key deletion, we must queue morphs for all undeclared keys
// when checking whether an input is allowed, they are irrelevant because it always will be
if (node.undeclared === "delete" && traversalKind === "Apply")
return true;
return false;
};
const indexerToKey = (indexable) => {

@@ -529,0 +551,0 @@ if (hasArkKind(indexable, "root") && indexable.hasKind("unit"))

{
"name": "@ark/schema",
"version": "0.39.0",
"version": "0.40.0",
"license": "MIT",

@@ -32,3 +32,3 @@ "author": {

"dependencies": {
"@ark/util": "0.39.0"
"@ark/util": "0.40.0"
},

@@ -35,0 +35,0 @@ "publishConfig": {

@@ -12,3 +12,3 @@ # @ark/schema

- Basis: this is the base type to which refinements like props are applied. It is one of three things, getting narrower as you move down the list:
- Domain: `"string" | "number" | "bigint" | "object" | "symbol"` parallels built-in TS keywords for non-enumerable value sets
- Domain: `"string" | "number" | "bigint" | "object" | "symbol"` parallels builtin TS keywords for non-enumerable value sets
- Proto: Must be an `instanceof` some class (implies domain `"object"`)

@@ -15,0 +15,0 @@ - Unit: Must `===` some value (can be intersected with any other constraint and reduced to itself or a disjoint)

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc