@effect/schema
Advanced tools
Comparing version 0.0.0-snapshot-c0ae728e57df2c572ea803e1bb7121088cd67b49 to 0.0.0-snapshot-e1ebaaa379fd8da009fc70115350394e6295b9d4
@@ -6,12 +6,11 @@ "use strict"; | ||
}); | ||
exports.stringConstraints = exports.numberConstraints = exports.make = exports.integerConstraints = exports.getConstraints = exports.combineConstraints = exports.bigintConstraints = exports.arrayConstraints = exports.arbitrary = exports.ArbitraryHookId = void 0; | ||
exports.makeLazy = exports.make = exports.getConstraints = exports.combineConstraints = exports.arbitrary = exports.StringConstraints = exports.NumberConstraints = exports.IntegerConstraints = exports.BigIntConstraints = exports.ArrayConstraints = exports.ArbitraryHookId = void 0; | ||
var Arr = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Array")); | ||
var Option = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Option")); | ||
var Predicate = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Predicate")); | ||
var ReadonlyArray = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/ReadonlyArray")); | ||
var AST = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./AST.js")); | ||
var Internal = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/ast.js")); | ||
var filters = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/filters.js")); | ||
var hooks = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/hooks.js")); | ||
var InternalSchema = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/schema.js")); | ||
var Parser = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./Parser.js")); | ||
var FastCheck = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./FastCheck.js")); | ||
var errors_ = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/errors.js")); | ||
var filters_ = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/filters.js")); | ||
var util_ = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/util.js")); | ||
function _getRequireWildcardCache(e) { | ||
@@ -36,3 +35,3 @@ if ("function" != typeof WeakMap) return null; | ||
a = Object.defineProperty && Object.getOwnPropertyDescriptor; | ||
for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { | ||
for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { | ||
var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; | ||
@@ -51,3 +50,3 @@ i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; | ||
*/ | ||
const ArbitraryHookId = exports.ArbitraryHookId = hooks.ArbitraryHookId; | ||
const ArbitraryHookId = exports.ArbitraryHookId = /*#__PURE__*/Symbol.for("@effect/schema/ArbitraryHookId"); | ||
/** | ||
@@ -57,4 +56,14 @@ * @category annotations | ||
*/ | ||
const arbitrary = handler => self => InternalSchema.make(AST.setAnnotation(self.ast, ArbitraryHookId, handler)); | ||
const arbitrary = handler => self => self.annotations({ | ||
[ArbitraryHookId]: handler | ||
}); | ||
/** | ||
* Returns a LazyArbitrary for the `A` type of the provided schema. | ||
* | ||
* @category arbitrary | ||
* @since 1.0.0 | ||
*/ | ||
exports.arbitrary = arbitrary; | ||
const makeLazy = schema => go(schema.ast, {}, []); | ||
/** | ||
* Returns a fast-check Arbitrary for the `A` type of the provided schema. | ||
@@ -65,4 +74,4 @@ * | ||
*/ | ||
exports.arbitrary = arbitrary; | ||
const make = schema => go(schema.ast, {}); | ||
exports.makeLazy = makeLazy; | ||
const make = schema => makeLazy(schema)(FastCheck); | ||
exports.make = make; | ||
@@ -85,3 +94,3 @@ const depthSize = 1; | ||
const getHook = /*#__PURE__*/AST.getAnnotation(ArbitraryHookId); | ||
const getRefinementFromArbitrary = (ast, options) => { | ||
const getRefinementFromArbitrary = (ast, options, path) => { | ||
const constraints = combineConstraints(options.constraints, getConstraints(ast)); | ||
@@ -91,5 +100,6 @@ return go(ast.from, constraints ? { | ||
constraints | ||
} : options); | ||
} : options, path); | ||
}; | ||
const go = (ast, options) => { | ||
const getArbitraryErrorMessage = (message, path) => errors_.getErrorMessageWithPath(`cannot build an Arbitrary for ${message}`, path); | ||
const go = (ast, options, path) => { | ||
const hook = getHook(ast); | ||
@@ -99,5 +109,5 @@ if (Option.isSome(hook)) { | ||
case "Declaration": | ||
return hook.value(...ast.typeParameters.map(p => go(p, options))); | ||
return hook.value(...ast.typeParameters.map(p => go(p, options, path))); | ||
case "Refinement": | ||
return hook.value(getRefinementFromArbitrary(ast, options)); | ||
return hook.value(getRefinementFromArbitrary(ast, options, path)); | ||
default: | ||
@@ -110,3 +120,3 @@ return hook.value(); | ||
{ | ||
throw new Error(`cannot build an Arbitrary for a declaration without annotations (${AST.format(ast)})`); | ||
throw new Error(getArbitraryErrorMessage(`a declaration without annotations (${ast})`, path)); | ||
} | ||
@@ -122,3 +132,3 @@ case "Literal": | ||
return () => { | ||
throw new Error("cannot build an Arbitrary for `never`"); | ||
throw new Error(getArbitraryErrorMessage("`never`", path)); | ||
}; | ||
@@ -187,8 +197,9 @@ case "UnknownKeyword": | ||
} | ||
case "Tuple": | ||
case "TupleType": | ||
{ | ||
const elements = []; | ||
let hasOptionals = false; | ||
let i = 0; | ||
for (const element of ast.elements) { | ||
elements.push(go(element.type, options)); | ||
elements.push(go(element.type, options, path.concat(i++))); | ||
if (element.isOptional) { | ||
@@ -198,3 +209,3 @@ hasOptionals = true; | ||
} | ||
const rest = Option.map(ast.rest, ReadonlyArray.map(e => go(e, options))); | ||
const rest = ast.rest.map(e => go(e, options, path)); | ||
return fc => { | ||
@@ -219,4 +230,4 @@ // --------------------------------------------- | ||
// --------------------------------------------- | ||
if (Option.isSome(rest)) { | ||
const [head, ...tail] = rest.value; | ||
if (Arr.isNonEmptyReadonlyArray(rest)) { | ||
const [head, ...tail] = rest; | ||
const arb = head(fc); | ||
@@ -250,4 +261,4 @@ const constraints = options.constraints; | ||
{ | ||
const propertySignaturesTypes = ast.propertySignatures.map(f => go(f.type, options)); | ||
const indexSignatures = ast.indexSignatures.map(is => [go(is.parameter, options), go(is.type, options)]); | ||
const propertySignaturesTypes = ast.propertySignatures.map(ps => go(ps.type, options, path.concat(ps.name))); | ||
const indexSignatures = ast.indexSignatures.map(is => [go(is.parameter, options, path), go(is.type, options, path)]); | ||
return fc => { | ||
@@ -288,3 +299,3 @@ const arbs = {}; | ||
{ | ||
const types = ast.types.map(t => go(t, options)); | ||
const types = ast.types.map(t => go(t, options, path)); | ||
return fc => fc.oneof({ | ||
@@ -297,3 +308,3 @@ depthSize | ||
if (ast.enums.length === 0) { | ||
throw new Error("cannot build an Arbitrary for an empty enum"); | ||
throw new Error(getArbitraryErrorMessage("an empty enum", path)); | ||
} | ||
@@ -304,64 +315,99 @@ return fc => fc.oneof(...ast.enums.map(([_, value]) => fc.constant(value))); | ||
{ | ||
const from = getRefinementFromArbitrary(ast, options); | ||
return fc => from(fc).filter(a => Option.isNone(ast.filter(a, Parser.defaultParseOption, ast))); | ||
const from = getRefinementFromArbitrary(ast, options, path); | ||
return fc => from(fc).filter(a => Option.isNone(ast.filter(a, AST.defaultParseOption, ast))); | ||
} | ||
case "Suspend": | ||
{ | ||
const get = Internal.memoizeThunk(() => go(ast.f(), { | ||
const get = util_.memoizeThunk(() => go(ast.f(), { | ||
...options, | ||
isSuspend: true | ||
})); | ||
}, path)); | ||
return fc => fc.constant(null).chain(() => get()(fc)); | ||
} | ||
case "Transform": | ||
return go(ast.to, options); | ||
case "Transformation": | ||
return go(ast.to, options, path); | ||
} | ||
}; | ||
/** @internal */ | ||
const numberConstraints = constraints => { | ||
if (Predicate.isNumber(constraints.min)) { | ||
constraints.min = Math.fround(constraints.min); | ||
class NumberConstraints { | ||
_tag = "NumberConstraints"; | ||
constraints; | ||
constructor(options) { | ||
this.constraints = {}; | ||
if (Predicate.isNumber(options.min)) { | ||
this.constraints.min = Math.fround(options.min); | ||
} | ||
if (Predicate.isNumber(options.max)) { | ||
this.constraints.max = Math.fround(options.max); | ||
} | ||
if (Predicate.isBoolean(options.noNaN)) { | ||
this.constraints.noNaN = options.noNaN; | ||
} | ||
if (Predicate.isBoolean(options.noDefaultInfinity)) { | ||
this.constraints.noDefaultInfinity = options.noDefaultInfinity; | ||
} | ||
} | ||
if (Predicate.isNumber(constraints.max)) { | ||
constraints.max = Math.fround(constraints.max); | ||
} | ||
/** @internal */ | ||
exports.NumberConstraints = NumberConstraints; | ||
class StringConstraints { | ||
_tag = "StringConstraints"; | ||
constraints; | ||
constructor(options) { | ||
this.constraints = {}; | ||
if (Predicate.isNumber(options.minLength)) { | ||
this.constraints.minLength = options.minLength; | ||
} | ||
if (Predicate.isNumber(options.maxLength)) { | ||
this.constraints.maxLength = options.maxLength; | ||
} | ||
} | ||
return { | ||
_tag: "NumberConstraints", | ||
constraints | ||
}; | ||
}; | ||
} | ||
/** @internal */ | ||
exports.numberConstraints = numberConstraints; | ||
const stringConstraints = constraints => { | ||
return { | ||
_tag: "StringConstraints", | ||
constraints | ||
}; | ||
}; | ||
exports.StringConstraints = StringConstraints; | ||
class IntegerConstraints { | ||
_tag = "IntegerConstraints"; | ||
constraints; | ||
constructor(options) { | ||
this.constraints = {}; | ||
if (Predicate.isNumber(options.min)) { | ||
this.constraints.min = options.min; | ||
} | ||
if (Predicate.isNumber(options.max)) { | ||
this.constraints.max = options.max; | ||
} | ||
} | ||
} | ||
/** @internal */ | ||
exports.stringConstraints = stringConstraints; | ||
const integerConstraints = constraints => { | ||
return { | ||
_tag: "IntegerConstraints", | ||
constraints | ||
}; | ||
}; | ||
exports.IntegerConstraints = IntegerConstraints; | ||
class ArrayConstraints { | ||
_tag = "ArrayConstraints"; | ||
constraints; | ||
constructor(options) { | ||
this.constraints = {}; | ||
if (Predicate.isNumber(options.minLength)) { | ||
this.constraints.minLength = options.minLength; | ||
} | ||
if (Predicate.isNumber(options.maxLength)) { | ||
this.constraints.maxLength = options.maxLength; | ||
} | ||
} | ||
} | ||
/** @internal */ | ||
exports.integerConstraints = integerConstraints; | ||
const arrayConstraints = constraints => { | ||
return { | ||
_tag: "ArrayConstraints", | ||
constraints | ||
}; | ||
}; | ||
exports.ArrayConstraints = ArrayConstraints; | ||
class BigIntConstraints { | ||
_tag = "BigIntConstraints"; | ||
constraints; | ||
constructor(options) { | ||
this.constraints = {}; | ||
if (Predicate.isBigInt(options.min)) { | ||
this.constraints.min = options.min; | ||
} | ||
if (Predicate.isBigInt(options.max)) { | ||
this.constraints.max = options.max; | ||
} | ||
} | ||
} | ||
/** @internal */ | ||
exports.arrayConstraints = arrayConstraints; | ||
const bigintConstraints = constraints => { | ||
return { | ||
_tag: "BigIntConstraints", | ||
constraints | ||
}; | ||
}; | ||
/** @internal */ | ||
exports.bigintConstraints = bigintConstraints; | ||
exports.BigIntConstraints = BigIntConstraints; | ||
const getConstraints = ast => { | ||
@@ -371,84 +417,35 @@ const TypeAnnotationId = ast.annotations[AST.TypeAnnotationId]; | ||
switch (TypeAnnotationId) { | ||
// int | ||
case filters_.IntTypeId: | ||
return new IntegerConstraints({}); | ||
// number | ||
case filters.GreaterThanTypeId: | ||
case filters.GreaterThanOrEqualToTypeId: | ||
return numberConstraints({ | ||
min: jsonSchema.exclusiveMinimum ?? jsonSchema.minimum | ||
}); | ||
case filters.LessThanTypeId: | ||
case filters.LessThanOrEqualToTypeId: | ||
return numberConstraints({ | ||
case filters_.GreaterThanTypeId: | ||
case filters_.GreaterThanOrEqualToTypeId: | ||
case filters_.LessThanTypeId: | ||
case filters_.LessThanOrEqualToTypeId: | ||
case filters_.BetweenTypeId: | ||
return new NumberConstraints({ | ||
min: jsonSchema.exclusiveMinimum ?? jsonSchema.minimum, | ||
max: jsonSchema.exclusiveMaximum ?? jsonSchema.maximum | ||
}); | ||
case filters.IntTypeId: | ||
return integerConstraints({}); | ||
case filters.BetweenTypeId: | ||
{ | ||
const min = jsonSchema.minimum; | ||
const max = jsonSchema.maximum; | ||
const constraints = {}; | ||
if (Predicate.isNumber(min)) { | ||
constraints.min = min; | ||
} | ||
if (Predicate.isNumber(max)) { | ||
constraints.max = max; | ||
} | ||
return numberConstraints(constraints); | ||
} | ||
// bigint | ||
case filters.GreaterThanBigintTypeId: | ||
case filters.GreaterThanOrEqualToBigintTypeId: | ||
case filters_.GreaterThanBigintTypeId: | ||
case filters_.GreaterThanOrEqualToBigIntTypeId: | ||
case filters_.LessThanBigIntTypeId: | ||
case filters_.LessThanOrEqualToBigIntTypeId: | ||
case filters_.BetweenBigintTypeId: | ||
{ | ||
const params = ast.annotations[TypeAnnotationId]; | ||
return bigintConstraints({ | ||
min: params.min | ||
}); | ||
const constraints = ast.annotations[TypeAnnotationId]; | ||
return new BigIntConstraints(constraints); | ||
} | ||
case filters.LessThanBigintTypeId: | ||
case filters.LessThanOrEqualToBigintTypeId: | ||
{ | ||
const params = ast.annotations[TypeAnnotationId]; | ||
return bigintConstraints({ | ||
max: params.max | ||
}); | ||
} | ||
case filters.BetweenBigintTypeId: | ||
{ | ||
const params = ast.annotations[TypeAnnotationId]; | ||
const min = params.min; | ||
const max = params.max; | ||
const constraints = {}; | ||
if (Predicate.isBigInt(min)) { | ||
constraints.min = min; | ||
} | ||
if (Predicate.isBigInt(max)) { | ||
constraints.max = max; | ||
} | ||
return bigintConstraints(constraints); | ||
} | ||
// string | ||
case filters.MinLengthTypeId: | ||
return stringConstraints({ | ||
minLength: jsonSchema.minLength | ||
}); | ||
case filters.MaxLengthTypeId: | ||
return stringConstraints({ | ||
maxLength: jsonSchema.maxLength | ||
}); | ||
case filters.LengthTypeId: | ||
return stringConstraints({ | ||
minLength: jsonSchema.minLength, | ||
maxLength: jsonSchema.maxLength | ||
}); | ||
case filters_.MinLengthTypeId: | ||
case filters_.MaxLengthTypeId: | ||
case filters_.LengthTypeId: | ||
return new StringConstraints(jsonSchema); | ||
// array | ||
case filters.MinItemsTypeId: | ||
return arrayConstraints({ | ||
minLength: jsonSchema.minItems | ||
}); | ||
case filters.MaxItemsTypeId: | ||
return arrayConstraints({ | ||
maxLength: jsonSchema.maxItems | ||
}); | ||
case filters.ItemsCountTypeId: | ||
return arrayConstraints({ | ||
case filters_.MinItemsTypeId: | ||
case filters_.MaxItemsTypeId: | ||
case filters_.ItemsCountTypeId: | ||
return new ArrayConstraints({ | ||
minLength: jsonSchema.minItems, | ||
@@ -473,17 +470,6 @@ maxLength: jsonSchema.maxItems | ||
case "ArrayConstraints": | ||
{ | ||
const c = { | ||
...c1.constraints, | ||
...c2.constraints | ||
}; | ||
const minLength = getMax(c1.constraints.minLength, c2.constraints.minLength); | ||
if (Predicate.isNumber(minLength)) { | ||
c.minLength = minLength; | ||
} | ||
const maxLength = getMin(c1.constraints.maxLength, c2.constraints.maxLength); | ||
if (Predicate.isNumber(maxLength)) { | ||
c.maxLength = maxLength; | ||
} | ||
return arrayConstraints(c); | ||
} | ||
return new ArrayConstraints({ | ||
minLength: getMax(c1.constraints.minLength, c2.constraints.minLength), | ||
maxLength: getMin(c1.constraints.maxLength, c2.constraints.maxLength) | ||
}); | ||
} | ||
@@ -496,32 +482,13 @@ break; | ||
case "NumberConstraints": | ||
{ | ||
const c = { | ||
...c1.constraints, | ||
...c2.constraints | ||
}; | ||
const min = getMax(c1.constraints.min, c2.constraints.min); | ||
if (Predicate.isNumber(min)) { | ||
c.min = min; | ||
} | ||
const max = getMin(c1.constraints.max, c2.constraints.max); | ||
if (Predicate.isNumber(max)) { | ||
c.max = max; | ||
} | ||
return numberConstraints(c); | ||
} | ||
return new NumberConstraints({ | ||
min: getMax(c1.constraints.min, c2.constraints.min), | ||
max: getMin(c1.constraints.max, c2.constraints.max), | ||
noNaN: getOr(c1.constraints.noNaN, c2.constraints.noNaN), | ||
noDefaultInfinity: getOr(c1.constraints.noDefaultInfinity, c2.constraints.noDefaultInfinity) | ||
}); | ||
case "IntegerConstraints": | ||
{ | ||
const c = { | ||
...c2.constraints | ||
}; | ||
const min = getMax(c1.constraints.min, c2.constraints.min); | ||
if (Predicate.isNumber(min)) { | ||
c.min = min; | ||
} | ||
const max = getMin(c1.constraints.max, c2.constraints.max); | ||
if (Predicate.isNumber(max)) { | ||
c.max = max; | ||
} | ||
return integerConstraints(c); | ||
} | ||
return new IntegerConstraints({ | ||
min: getMax(c1.constraints.min, c2.constraints.min), | ||
max: getMin(c1.constraints.max, c2.constraints.max) | ||
}); | ||
} | ||
@@ -534,17 +501,6 @@ break; | ||
case "BigIntConstraints": | ||
{ | ||
const c = { | ||
...c1.constraints, | ||
...c2.constraints | ||
}; | ||
const min = getMax(c1.constraints.min, c2.constraints.min); | ||
if (Predicate.isBigInt(min)) { | ||
c.min = min; | ||
} | ||
const max = getMin(c1.constraints.max, c2.constraints.max); | ||
if (Predicate.isBigInt(max)) { | ||
c.max = max; | ||
} | ||
return bigintConstraints(c); | ||
} | ||
return new BigIntConstraints({ | ||
min: getMax(c1.constraints.min, c2.constraints.min), | ||
max: getMin(c1.constraints.max, c2.constraints.max) | ||
}); | ||
} | ||
@@ -557,17 +513,6 @@ break; | ||
case "StringConstraints": | ||
{ | ||
const c = { | ||
...c1.constraints, | ||
...c2.constraints | ||
}; | ||
const minLength = getMax(c1.constraints.minLength, c2.constraints.minLength); | ||
if (Predicate.isNumber(minLength)) { | ||
c.minLength = minLength; | ||
} | ||
const maxLength = getMin(c1.constraints.maxLength, c2.constraints.maxLength); | ||
if (Predicate.isNumber(maxLength)) { | ||
c.maxLength = maxLength; | ||
} | ||
return stringConstraints(c); | ||
} | ||
return new StringConstraints({ | ||
minLength: getMax(c1.constraints.minLength, c2.constraints.minLength), | ||
maxLength: getMin(c1.constraints.maxLength, c2.constraints.maxLength) | ||
}); | ||
} | ||
@@ -582,14 +527,6 @@ break; | ||
{ | ||
const c = { | ||
...c1.constraints | ||
}; | ||
const min = getMax(c1.constraints.min, c2.constraints.min); | ||
if (Predicate.isNumber(min)) { | ||
c.min = min; | ||
} | ||
const max = getMin(c1.constraints.max, c2.constraints.max); | ||
if (Predicate.isNumber(max)) { | ||
c.max = max; | ||
} | ||
return integerConstraints(c); | ||
return new IntegerConstraints({ | ||
min: getMax(c1.constraints.min, c2.constraints.min), | ||
max: getMin(c1.constraints.max, c2.constraints.max) | ||
}); | ||
} | ||
@@ -602,2 +539,5 @@ } | ||
exports.combineConstraints = combineConstraints; | ||
const getOr = (a, b) => { | ||
return a === undefined ? b : b === undefined ? a : a || b; | ||
}; | ||
function getMax(n1, n2) { | ||
@@ -604,0 +544,0 @@ return n1 === undefined ? n2 : n2 === undefined ? n1 : n1 <= n2 ? n2 : n1; |
@@ -6,6 +6,5 @@ "use strict"; | ||
}); | ||
exports.formatIssues = exports.formatIssue = exports.formatError = void 0; | ||
var Option = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Option")); | ||
var ReadonlyArray = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/ReadonlyArray")); | ||
var AST = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./AST.js")); | ||
exports.formatIssueSync = exports.formatIssue = exports.formatErrorSync = exports.formatError = void 0; | ||
var Arr = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Array")); | ||
var Effect = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Effect")); | ||
var TreeFormatter = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./TreeFormatter.js")); | ||
@@ -31,3 +30,3 @@ function _getRequireWildcardCache(e) { | ||
a = Object.defineProperty && Object.getOwnPropertyDescriptor; | ||
for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { | ||
for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { | ||
var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; | ||
@@ -42,2 +41,36 @@ i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
const formatIssue = issue => go(issue); | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
exports.formatIssue = formatIssue; | ||
const formatIssueSync = issue => Effect.runSync(formatIssue(issue)); | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
exports.formatIssueSync = formatIssueSync; | ||
const formatError = error => formatIssue(error.error); | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
exports.formatError = formatError; | ||
const formatErrorSync = error => formatIssueSync(error.error); | ||
exports.formatErrorSync = formatErrorSync; | ||
const succeed = issue => Effect.succeed([issue]); | ||
const getArray = (issue, path, onFailure) => Effect.matchEffect(TreeFormatter.getMessage(issue), { | ||
onFailure, | ||
onSuccess: message => succeed({ | ||
_tag: issue._tag, | ||
path, | ||
message | ||
}) | ||
}); | ||
const flatten = eff => Effect.map(eff, Arr.flatten); | ||
const go = (e, path = []) => { | ||
@@ -47,106 +80,44 @@ const _tag = e._tag; | ||
case "Type": | ||
return [{ | ||
return Effect.map(TreeFormatter.formatTypeMessage(e), message => [{ | ||
_tag, | ||
path, | ||
message: TreeFormatter.formatTypeMessage(e) | ||
}]; | ||
message | ||
}]); | ||
case "Forbidden": | ||
return [{ | ||
return succeed({ | ||
_tag, | ||
path, | ||
message: TreeFormatter.formatForbiddenMessage(e) | ||
}]; | ||
}); | ||
case "Unexpected": | ||
return [{ | ||
return succeed({ | ||
_tag, | ||
path, | ||
message: `is unexpected, expected ${AST.format(e.ast, true)}` | ||
}]; | ||
message: `is unexpected, expected ${e.ast.toString(true)}` | ||
}); | ||
case "Missing": | ||
return [{ | ||
return succeed({ | ||
_tag, | ||
path, | ||
message: "is missing" | ||
}]; | ||
}); | ||
case "Union": | ||
return Option.match(TreeFormatter.getMessage(e.ast, e.actual), { | ||
onNone: () => ReadonlyArray.flatMap(e.errors, e => { | ||
switch (e._tag) { | ||
case "Member": | ||
return go(e.error, path); | ||
default: | ||
return go(e, path); | ||
} | ||
}), | ||
onSome: message => [{ | ||
_tag, | ||
path, | ||
message | ||
}] | ||
}); | ||
case "Tuple": | ||
return Option.match(TreeFormatter.getMessage(e.ast, e.actual), { | ||
onNone: () => ReadonlyArray.flatMap(e.errors, index => go(index.error, [...path, index.index])), | ||
onSome: message => [{ | ||
_tag, | ||
path, | ||
message | ||
}] | ||
}); | ||
return getArray(e, path, () => flatten(Effect.forEach(e.errors, e => { | ||
switch (e._tag) { | ||
case "Member": | ||
return go(e.error, path); | ||
default: | ||
return go(e, path); | ||
} | ||
}))); | ||
case "TupleType": | ||
return getArray(e, path, () => flatten(Effect.forEach(e.errors, index => go(index.error, path.concat(index.index))))); | ||
case "TypeLiteral": | ||
return Option.match(TreeFormatter.getMessage(e.ast, e.actual), { | ||
onNone: () => ReadonlyArray.flatMap(e.errors, key => go(key.error, [...path, key.key])), | ||
onSome: message => [{ | ||
_tag, | ||
path, | ||
message | ||
}] | ||
}); | ||
case "Transform": | ||
return Option.match(TreeFormatter.getTransformMessage(e, e.actual), { | ||
onNone: () => go(e.error, path), | ||
onSome: message => [{ | ||
_tag, | ||
path, | ||
message | ||
}] | ||
}); | ||
return getArray(e, path, () => flatten(Effect.forEach(e.errors, key => go(key.error, path.concat(key.key))))); | ||
case "Declaration": | ||
case "Refinement": | ||
return Option.match(TreeFormatter.getRefinementMessage(e, e.actual), { | ||
onNone: () => go(e.error, path), | ||
onSome: message => [{ | ||
_tag, | ||
path, | ||
message | ||
}] | ||
}); | ||
case "Declaration": | ||
return Option.match(TreeFormatter.getMessage(e.ast, e.actual), { | ||
onNone: () => go(e.error, path), | ||
onSome: message => [{ | ||
_tag, | ||
path, | ||
message | ||
}] | ||
}); | ||
case "Transformation": | ||
return getArray(e, path, () => go(e.error, path)); | ||
} | ||
}; | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
const formatIssues = issues => ReadonlyArray.flatMap(issues, e => go(e)); | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
exports.formatIssues = formatIssues; | ||
const formatIssue = error => formatIssues([error]); | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
exports.formatIssue = formatIssue; | ||
const formatError = error => formatIssue(error.error); | ||
exports.formatError = formatError; | ||
//# sourceMappingURL=ArrayFormatter.js.map |
@@ -7,11 +7,11 @@ "use strict"; | ||
exports.make = exports.equivalence = exports.EquivalenceHookId = void 0; | ||
var Arr = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Array")); | ||
var Equal = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Equal")); | ||
var Equivalence = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Equivalence")); | ||
var Option = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Option")); | ||
var Predicate = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Predicate")); | ||
var ReadonlyArray = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/ReadonlyArray")); | ||
var AST = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./AST.js")); | ||
var Internal = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/ast.js")); | ||
var hooks = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/hooks.js")); | ||
var InternalSchema = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/schema.js")); | ||
var Parser = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./Parser.js")); | ||
var errors_ = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/errors.js")); | ||
var util_ = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/util.js")); | ||
var ParseResult = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./ParseResult.js")); | ||
function _getRequireWildcardCache(e) { | ||
@@ -36,3 +36,3 @@ if ("function" != typeof WeakMap) return null; | ||
a = Object.defineProperty && Object.getOwnPropertyDescriptor; | ||
for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { | ||
for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { | ||
var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; | ||
@@ -51,3 +51,3 @@ i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; | ||
*/ | ||
const EquivalenceHookId = exports.EquivalenceHookId = hooks.EquivalenceHookId; | ||
const EquivalenceHookId = exports.EquivalenceHookId = /*#__PURE__*/Symbol.for("@effect/schema/EquivalenceHookId"); | ||
/** | ||
@@ -57,3 +57,5 @@ * @category annotations | ||
*/ | ||
const equivalence = handler => self => InternalSchema.make(AST.setAnnotation(self.ast, EquivalenceHookId, handler)); | ||
const equivalence = handler => self => self.annotations({ | ||
[EquivalenceHookId]: handler | ||
}); | ||
/** | ||
@@ -64,6 +66,7 @@ * @category Equivalence | ||
exports.equivalence = equivalence; | ||
const make = schema => go(schema.ast); | ||
const make = schema => go(schema.ast, []); | ||
exports.make = make; | ||
const getHook = /*#__PURE__*/AST.getAnnotation(EquivalenceHookId); | ||
const go = ast => { | ||
const getEquivalenceErrorMessage = (message, path) => errors_.getErrorMessageWithPath(`cannot build an Equivalence for ${message}`, path); | ||
const go = (ast, path) => { | ||
const hook = getHook(ast); | ||
@@ -73,5 +76,5 @@ if (Option.isSome(hook)) { | ||
case "Declaration": | ||
return hook.value(...ast.typeParameters.map(go)); | ||
return hook.value(...ast.typeParameters.map(tp => go(tp, path))); | ||
case "Refinement": | ||
return hook.value(go(ast.from)); | ||
return hook.value(go(ast.from, path)); | ||
default: | ||
@@ -83,5 +86,5 @@ return hook.value(); | ||
case "NeverKeyword": | ||
throw new Error("cannot build an Equivalence for `never`"); | ||
case "Transform": | ||
return go(ast.to); | ||
throw new Error(getEquivalenceErrorMessage("`never`", path)); | ||
case "Transformation": | ||
return go(ast.to, path); | ||
case "Declaration": | ||
@@ -102,14 +105,14 @@ case "Literal": | ||
case "ObjectKeyword": | ||
return Equivalence.strict(); | ||
return Equal.equals; | ||
case "Refinement": | ||
return go(ast.from); | ||
return go(ast.from, path); | ||
case "Suspend": | ||
{ | ||
const get = Internal.memoizeThunk(() => go(ast.f())); | ||
const get = util_.memoizeThunk(() => go(ast.f(), path)); | ||
return (a, b) => get()(a, b); | ||
} | ||
case "Tuple": | ||
case "TupleType": | ||
{ | ||
const elements = ast.elements.map(element => go(element.type)); | ||
const rest = Option.map(ast.rest, ReadonlyArray.map(go)); | ||
const elements = ast.elements.map((element, i) => go(element.type, path.concat(i))); | ||
const rest = ast.rest.map(ast => go(ast, path)); | ||
return Equivalence.make((a, b) => { | ||
@@ -132,4 +135,4 @@ const len = a.length; | ||
// --------------------------------------------- | ||
if (Option.isSome(rest)) { | ||
const [head, ...tail] = rest.value; | ||
if (Arr.isNonEmptyReadonlyArray(rest)) { | ||
const [head, ...tail] = rest; | ||
for (; i < len - tail.length; i++) { | ||
@@ -156,6 +159,6 @@ if (!head(a[i], b[i])) { | ||
if (ast.propertySignatures.length === 0 && ast.indexSignatures.length === 0) { | ||
return Equivalence.strict(); | ||
return Equal.equals; | ||
} | ||
const propertySignatures = ast.propertySignatures.map(ps => go(ps.type)); | ||
const indexSignatures = ast.indexSignatures.map(is => go(is.type)); | ||
const propertySignatures = ast.propertySignatures.map(ps => go(ps.type, path.concat(ps.name))); | ||
const indexSignatures = ast.indexSignatures.map(is => go(is.type, path)); | ||
return Equivalence.make((a, b) => { | ||
@@ -214,4 +217,4 @@ const aStringKeys = Object.keys(a); | ||
{ | ||
const searchTree = Parser.getSearchTree(ast.types, true); | ||
const ownKeys = Internal.ownKeys(searchTree.keys); | ||
const searchTree = ParseResult.getSearchTree(ast.types, true); | ||
const ownKeys = util_.ownKeys(searchTree.keys); | ||
const len = ownKeys.length; | ||
@@ -235,3 +238,5 @@ return Equivalence.make((a, b) => { | ||
} | ||
const tuples = candidates.map(ast => [go(ast), Parser.is(InternalSchema.make(ast))]); | ||
const tuples = candidates.map(ast => [go(ast, path), ParseResult.is({ | ||
ast | ||
})]); | ||
for (let i = 0; i < tuples.length; i++) { | ||
@@ -238,0 +243,0 @@ const [equivalence, is] = tuples[i]; |
@@ -6,3 +6,3 @@ "use strict"; | ||
}); | ||
exports.TreeFormatter = exports.Serializable = exports.Schema = exports.Pretty = exports.Parser = exports.ParseResult = exports.JSONSchema = exports.Format = exports.Equivalence = exports.ArrayFormatter = exports.Arbitrary = exports.AST = void 0; | ||
exports.TreeFormatter = exports.Serializable = exports.Schema = exports.Pretty = exports.ParseResult = exports.JSONSchema = exports.FastCheck = exports.Equivalence = exports.ArrayFormatter = exports.Arbitrary = exports.AST = void 0; | ||
var _AST = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./AST.js")); | ||
@@ -16,4 +16,4 @@ exports.AST = _AST; | ||
exports.Equivalence = _Equivalence; | ||
var _Format = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./Format.js")); | ||
exports.Format = _Format; | ||
var _FastCheck = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./FastCheck.js")); | ||
exports.FastCheck = _FastCheck; | ||
var _JSONSchema = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./JSONSchema.js")); | ||
@@ -23,4 +23,2 @@ exports.JSONSchema = _JSONSchema; | ||
exports.ParseResult = _ParseResult; | ||
var _Parser = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./Parser.js")); | ||
exports.Parser = _Parser; | ||
var _Pretty = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./Pretty.js")); | ||
@@ -53,3 +51,3 @@ exports.Pretty = _Pretty; | ||
a = Object.defineProperty && Object.getOwnPropertyDescriptor; | ||
for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { | ||
for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { | ||
var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; | ||
@@ -56,0 +54,0 @@ i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; |
@@ -6,3 +6,3 @@ "use strict"; | ||
}); | ||
exports.MinLengthTypeId = exports.MinItemsTypeId = exports.MaxLengthTypeId = exports.MaxItemsTypeId = exports.LessThanTypeId = exports.LessThanOrEqualToTypeId = exports.LessThanOrEqualToBigintTypeId = exports.LessThanBigintTypeId = exports.LengthTypeId = exports.ItemsCountTypeId = exports.IntTypeId = exports.GreaterThanTypeId = exports.GreaterThanOrEqualToTypeId = exports.GreaterThanOrEqualToBigintTypeId = exports.GreaterThanBigintTypeId = exports.BetweenTypeId = exports.BetweenBigintTypeId = void 0; | ||
exports.MinLengthTypeId = exports.MinItemsTypeId = exports.MaxLengthTypeId = exports.MaxItemsTypeId = exports.LessThanTypeId = exports.LessThanOrEqualToTypeId = exports.LessThanOrEqualToBigIntTypeId = exports.LessThanBigIntTypeId = exports.LengthTypeId = exports.ItemsCountTypeId = exports.IntTypeId = exports.GreaterThanTypeId = exports.GreaterThanOrEqualToTypeId = exports.GreaterThanOrEqualToBigIntTypeId = exports.GreaterThanBigintTypeId = exports.BetweenTypeId = exports.BetweenBigintTypeId = void 0; | ||
/** @internal */ | ||
@@ -23,7 +23,7 @@ const GreaterThanTypeId = exports.GreaterThanTypeId = /*#__PURE__*/Symbol.for("@effect/schema/TypeId/GreaterThan"); | ||
/** @internal */ | ||
const GreaterThanOrEqualToBigintTypeId = exports.GreaterThanOrEqualToBigintTypeId = /*#__PURE__*/Symbol.for("@effect/schema/TypeId/GreaterThanOrEqualToBigint"); | ||
const GreaterThanOrEqualToBigIntTypeId = exports.GreaterThanOrEqualToBigIntTypeId = /*#__PURE__*/Symbol.for("@effect/schema/TypeId/GreaterThanOrEqualToBigint"); | ||
/** @internal */ | ||
const LessThanBigintTypeId = exports.LessThanBigintTypeId = /*#__PURE__*/Symbol.for("@effect/schema/TypeId/LessThanBigint"); | ||
const LessThanBigIntTypeId = exports.LessThanBigIntTypeId = /*#__PURE__*/Symbol.for("@effect/schema/TypeId/LessThanBigint"); | ||
/** @internal */ | ||
const LessThanOrEqualToBigintTypeId = exports.LessThanOrEqualToBigintTypeId = /*#__PURE__*/Symbol.for("@effect/schema/TypeId/LessThanOrEqualToBigint"); | ||
const LessThanOrEqualToBigIntTypeId = exports.LessThanOrEqualToBigIntTypeId = /*#__PURE__*/Symbol.for("@effect/schema/TypeId/LessThanOrEqualToBigint"); | ||
/** @internal */ | ||
@@ -30,0 +30,0 @@ const BetweenBigintTypeId = exports.BetweenBigintTypeId = /*#__PURE__*/Symbol.for("@effect/schema/TypeId/BetweenBigint"); |
@@ -6,8 +6,8 @@ "use strict"; | ||
}); | ||
exports.make = exports.goRoot = exports.DEFINITION_PREFIX = void 0; | ||
exports.make = exports.DEFINITION_PREFIX = void 0; | ||
var Option = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Option")); | ||
var Predicate = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Predicate")); | ||
var ReadonlyArray = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/ReadonlyArray")); | ||
var ReadonlyRecord = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/ReadonlyRecord")); | ||
var Record = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Record")); | ||
var AST = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./AST.js")); | ||
var errors_ = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/errors.js")); | ||
function _getRequireWildcardCache(e) { | ||
@@ -32,3 +32,3 @@ if ("function" != typeof WeakMap) return null; | ||
a = Object.defineProperty && Object.getOwnPropertyDescriptor; | ||
for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { | ||
for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { | ||
var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; | ||
@@ -47,3 +47,20 @@ i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; | ||
*/ | ||
const make = schema => goRoot(schema.ast); | ||
const make = schema => { | ||
const $defs = {}; | ||
const jsonSchema = go(schema.ast, $defs, true, []); | ||
const out = { | ||
$schema, | ||
...jsonSchema | ||
}; | ||
// clean up self-referencing entries | ||
for (const id in $defs) { | ||
if ($defs[id]["$ref"] === get$ref(id)) { | ||
delete $defs[id]; | ||
} | ||
} | ||
if (!Record.isEmptyRecord($defs)) { | ||
out.$defs = $defs; | ||
} | ||
return out; | ||
}; | ||
exports.make = make; | ||
@@ -64,3 +81,3 @@ const anyJsonSchema = { | ||
}; | ||
const emptyJsonSchema = { | ||
const empty = () => ({ | ||
"$id": "/schemas/{}", | ||
@@ -72,35 +89,5 @@ "oneOf": [{ | ||
}] | ||
}; | ||
}); | ||
const $schema = "http://json-schema.org/draft-07/schema#"; | ||
/** @internal */ | ||
const goRoot = ast => { | ||
const $defs = {}; | ||
const jsonSchema = goWithIdentifier(ast, $defs); | ||
const out = { | ||
$schema, | ||
...jsonSchema | ||
}; | ||
if (!ReadonlyRecord.isEmptyRecord($defs)) { | ||
out.$defs = $defs; | ||
} | ||
return out; | ||
}; | ||
exports.goRoot = goRoot; | ||
const goWithIdentifier = (ast, $defs) => { | ||
const identifier = AST.getIdentifierAnnotation(ast); | ||
return Option.match(identifier, { | ||
onNone: () => goWithMetaData(ast, $defs), | ||
onSome: id => { | ||
const out = { | ||
$ref: `${DEFINITION_PREFIX}${id}` | ||
}; | ||
if (!ReadonlyRecord.has($defs, id)) { | ||
$defs[id] = out; | ||
$defs[id] = goWithMetaData(ast, $defs); | ||
} | ||
return out; | ||
} | ||
}); | ||
}; | ||
const getMetaData = annotated => ReadonlyRecord.getSomes({ | ||
const getJsonSchemaAnnotations = annotated => Record.getSomes({ | ||
description: AST.getDescriptionAnnotation(annotated), | ||
@@ -111,27 +98,72 @@ title: AST.getTitleAnnotation(annotated), | ||
}); | ||
const goWithMetaData = (ast, $defs) => { | ||
return { | ||
...go(ast, $defs), | ||
...getMetaData(ast) | ||
}; | ||
const pruneUndefinedKeyword = ps => { | ||
const type = ps.type; | ||
if (ps.isOptional && AST.isUnion(type) && Option.isNone(AST.getJSONSchemaAnnotation(type))) { | ||
return AST.Union.make(type.types.filter(type => !AST.isUndefinedKeyword(type)), type.annotations); | ||
} | ||
return type; | ||
}; | ||
const getMissingAnnotationErrorMessage = (name, path) => errors_.getErrorMessageWithPath(`cannot build a JSON Schema for ${name} without a JSON Schema annotation`, path); | ||
const getUnsupportedIndexSignatureParameterErrorMessage = (parameter, path) => errors_.getErrorMessageWithPath(`unsupported index signature parameter (${parameter})`, path); | ||
/** @internal */ | ||
const DEFINITION_PREFIX = exports.DEFINITION_PREFIX = "#/$defs/"; | ||
const go = (ast, $defs) => { | ||
const get$ref = id => `${DEFINITION_PREFIX}${id}`; | ||
const hasTransformation = ast => { | ||
switch (ast.from._tag) { | ||
case "Transformation": | ||
return true; | ||
case "Refinement": | ||
return hasTransformation(ast.from); | ||
case "Suspend": | ||
{ | ||
const from = ast.from.f(); | ||
if (AST.isRefinement(from)) { | ||
return hasTransformation(from); | ||
} | ||
} | ||
break; | ||
} | ||
return false; | ||
}; | ||
const go = (ast, $defs, handleIdentifier, path) => { | ||
const hook = AST.getJSONSchemaAnnotation(ast); | ||
if (Option.isSome(hook)) { | ||
switch (ast._tag) { | ||
case "Refinement": | ||
const handler = hook.value; | ||
if (AST.isRefinement(ast) && !hasTransformation(ast)) { | ||
try { | ||
return { | ||
...goWithIdentifier(ast.from, $defs), | ||
...hook.value | ||
...go(ast.from, $defs, true, path), | ||
...getJsonSchemaAnnotations(ast), | ||
...handler | ||
}; | ||
} catch (e) { | ||
return { | ||
...getJsonSchemaAnnotations(ast), | ||
...handler | ||
}; | ||
} | ||
} | ||
return hook.value; | ||
return handler; | ||
} | ||
const surrogate = AST.getSurrogateAnnotation(ast); | ||
if (Option.isSome(surrogate)) { | ||
return go(surrogate.value, $defs, handleIdentifier, path); | ||
} | ||
if (handleIdentifier && !AST.isTransformation(ast)) { | ||
const identifier = AST.getJSONIdentifier(ast); | ||
if (Option.isSome(identifier)) { | ||
const id = identifier.value; | ||
const out = { | ||
$ref: get$ref(id) | ||
}; | ||
if (!Record.has($defs, id)) { | ||
$defs[id] = out; | ||
$defs[id] = go(ast, $defs, false, path); | ||
} | ||
return out; | ||
} | ||
} | ||
switch (ast._tag) { | ||
case "Declaration": | ||
{ | ||
throw new Error("cannot build a JSON Schema for a declaration without a JSON Schema annotation"); | ||
} | ||
throw new Error(getMissingAnnotationErrorMessage("a declaration", path)); | ||
case "Literal": | ||
@@ -142,59 +174,70 @@ { | ||
return { | ||
const: null | ||
const: null, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
} else if (Predicate.isString(literal)) { | ||
return { | ||
const: literal | ||
const: literal, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
} else if (Predicate.isNumber(literal)) { | ||
return { | ||
const: literal | ||
const: literal, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
} else if (Predicate.isBoolean(literal)) { | ||
return { | ||
const: literal | ||
const: literal, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
} | ||
throw new Error("cannot build a JSON Schema for a bigint literal without a JSON Schema annotation"); | ||
throw new Error(getMissingAnnotationErrorMessage("a bigint literal", path)); | ||
} | ||
case "UniqueSymbol": | ||
throw new Error("cannot build a JSON Schema for a unique symbol without a JSON Schema annotation"); | ||
throw new Error(getMissingAnnotationErrorMessage("a unique symbol", path)); | ||
case "UndefinedKeyword": | ||
throw new Error("cannot build a JSON Schema for `undefined` without a JSON Schema annotation"); | ||
throw new Error(getMissingAnnotationErrorMessage("`undefined`", path)); | ||
case "VoidKeyword": | ||
throw new Error("cannot build a JSON Schema for `void` without a JSON Schema annotation"); | ||
throw new Error(getMissingAnnotationErrorMessage("`void`", path)); | ||
case "NeverKeyword": | ||
throw new Error("cannot build a JSON Schema for `never` without a JSON Schema annotation"); | ||
throw new Error(getMissingAnnotationErrorMessage("`never`", path)); | ||
case "UnknownKeyword": | ||
return { | ||
...unknownJsonSchema | ||
...unknownJsonSchema, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
case "AnyKeyword": | ||
return { | ||
...anyJsonSchema | ||
...anyJsonSchema, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
case "ObjectKeyword": | ||
return { | ||
...objectJsonSchema | ||
...objectJsonSchema, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
case "StringKeyword": | ||
return { | ||
type: "string" | ||
type: "string", | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
case "NumberKeyword": | ||
return { | ||
type: "number" | ||
type: "number", | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
case "BooleanKeyword": | ||
return { | ||
type: "boolean" | ||
type: "boolean", | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
case "BigIntKeyword": | ||
throw new Error("cannot build a JSON Schema for `bigint` without a JSON Schema annotation"); | ||
throw new Error(getMissingAnnotationErrorMessage("`bigint`", path)); | ||
case "SymbolKeyword": | ||
throw new Error("cannot build a JSON Schema for `symbol` without a JSON Schema annotation"); | ||
case "Tuple": | ||
throw new Error(getMissingAnnotationErrorMessage("`symbol`", path)); | ||
case "TupleType": | ||
{ | ||
const elements = ast.elements.map(e => goWithIdentifier(e.type, $defs)); | ||
const rest = Option.map(ast.rest, ReadonlyArray.map(ast => goWithIdentifier(ast, $defs))); | ||
const len = ast.elements.length; | ||
const elements = ast.elements.map((e, i) => go(e.type, $defs, true, path.concat(i))); | ||
const rest = ast.rest.map(ast => go(ast, $defs, true, path)); | ||
const output = { | ||
@@ -206,3 +249,2 @@ type: "array" | ||
// --------------------------------------------- | ||
const len = elements.length; | ||
if (len > 0) { | ||
@@ -215,4 +257,4 @@ output.minItems = len - ast.elements.filter(element => element.isOptional).length; | ||
// --------------------------------------------- | ||
if (Option.isSome(rest)) { | ||
const head = rest.value[0]; | ||
if (rest.length > 0) { | ||
const head = rest[0]; | ||
if (len > 0) { | ||
@@ -226,4 +268,4 @@ output.additionalItems = head; | ||
// --------------------------------------------- | ||
if (rest.value.length > 1) { | ||
throw new Error("Generating a JSON Schema for post-rest elements is not currently supported. You're welcome to contribute by submitting a Pull Request."); | ||
if (rest.length > 1) { | ||
throw new Error(errors_.getErrorMessageWithPath("Generating a JSON Schema for post-rest elements is not currently supported. You're welcome to contribute by submitting a Pull Request.", path)); | ||
} | ||
@@ -237,3 +279,6 @@ } else { | ||
} | ||
return output; | ||
return { | ||
...output, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
} | ||
@@ -244,3 +289,4 @@ case "TypeLiteral": | ||
return { | ||
...emptyJsonSchema | ||
...empty(), | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
@@ -255,3 +301,3 @@ } | ||
{ | ||
additionalProperties = goWithIdentifier(is.type, $defs); | ||
additionalProperties = go(is.type, $defs, true, path); | ||
break; | ||
@@ -262,3 +308,3 @@ } | ||
patternProperties = { | ||
[AST.getTemplateLiteralRegex(parameter).source]: goWithIdentifier(is.type, $defs) | ||
[AST.getTemplateLiteralRegExp(parameter).source]: go(is.type, $defs, true, path) | ||
}; | ||
@@ -272,10 +318,10 @@ break; | ||
patternProperties = { | ||
[hook.value.pattern]: goWithIdentifier(is.type, $defs) | ||
[hook.value.pattern]: go(is.type, $defs, true, path) | ||
}; | ||
break; | ||
} | ||
throw new Error(`Unsupported index signature parameter (${AST.format(parameter)})`); | ||
throw new Error(getUnsupportedIndexSignatureParameterErrorMessage(parameter, path)); | ||
} | ||
case "SymbolKeyword": | ||
throw new Error(`Unsupported index signature parameter (${AST.format(parameter)})`); | ||
throw new Error(getUnsupportedIndexSignatureParameterErrorMessage(parameter, path)); | ||
} | ||
@@ -285,4 +331,4 @@ } | ||
return { | ||
...goWithIdentifier(ps.type, $defs), | ||
...getMetaData(ps) | ||
...go(pruneUndefinedKeyword(ps), $defs, true, path.concat(ps.name)), | ||
...getJsonSchemaAnnotations(ps) | ||
}; | ||
@@ -301,3 +347,3 @@ }); | ||
const name = ast.propertySignatures[i].name; | ||
if (typeof name === "string") { | ||
if (Predicate.isString(name)) { | ||
output.properties[name] = propertySignatures[i]; | ||
@@ -311,3 +357,3 @@ // --------------------------------------------- | ||
} else { | ||
throw new Error(`Cannot encode ${String(name)} key to JSON Schema`); | ||
throw new Error(errors_.getErrorMessageWithPath(`cannot encode ${String(name)} key to JSON Schema`, path)); | ||
} | ||
@@ -324,3 +370,6 @@ } | ||
} | ||
return output; | ||
return { | ||
...output, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
} | ||
@@ -332,3 +381,3 @@ case "Union": | ||
for (const type of ast.types) { | ||
const schema = goWithIdentifier(type, $defs); | ||
const schema = go(type, $defs, true, path); | ||
if ("const" in schema) { | ||
@@ -347,7 +396,9 @@ if (Object.keys(schema).length > 1) { | ||
return { | ||
const: enums[0] | ||
const: enums[0], | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
} else { | ||
return { | ||
enum: enums | ||
enum: enums, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
@@ -366,3 +417,4 @@ } | ||
return { | ||
anyOf | ||
anyOf, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
@@ -378,3 +430,4 @@ } | ||
const: e[1] | ||
})) | ||
})), | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
@@ -384,11 +437,12 @@ } | ||
{ | ||
throw new Error("cannot build a JSON Schema for a refinement without a JSON Schema annotation"); | ||
throw new Error(errors_.getErrorMessageWithPath("cannot build a JSON Schema for a refinement without a JSON Schema annotation", path)); | ||
} | ||
case "TemplateLiteral": | ||
{ | ||
const regex = AST.getTemplateLiteralRegex(ast); | ||
const regex = AST.getTemplateLiteralRegExp(ast); | ||
return { | ||
type: "string", | ||
description: "a template literal", | ||
pattern: regex.source | ||
pattern: regex.source, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
@@ -398,12 +452,12 @@ } | ||
{ | ||
const identifier = Option.orElse(AST.getIdentifierAnnotation(ast), () => AST.getIdentifierAnnotation(ast.f())); | ||
const identifier = Option.orElse(AST.getJSONIdentifier(ast), () => AST.getJSONIdentifier(ast.f())); | ||
if (Option.isNone(identifier)) { | ||
throw new Error("Generating a JSON Schema for suspended schemas requires an identifier annotation"); | ||
throw new Error(errors_.getErrorMessageWithPath("Generating a JSON Schema for suspended schemas requires an identifier annotation", path)); | ||
} | ||
return goWithIdentifier(ast.f(), $defs); | ||
return go(ast.f(), $defs, true, path); | ||
} | ||
case "Transform": | ||
return goWithIdentifier(ast.to, $defs); | ||
case "Transformation": | ||
return go(ast.from, $defs, true, path); | ||
} | ||
}; | ||
//# sourceMappingURL=JSONSchema.js.map |
@@ -6,65 +6,4 @@ "use strict"; | ||
}); | ||
exports.declaration = exports.ParseError = void 0; | ||
Object.defineProperty(exports, "decodeUnknown", { | ||
enumerable: true, | ||
get: function () { | ||
return _Parser.decodeUnknown; | ||
} | ||
}); | ||
Object.defineProperty(exports, "decodeUnknownEither", { | ||
enumerable: true, | ||
get: function () { | ||
return _Parser.decodeUnknownEither; | ||
} | ||
}); | ||
Object.defineProperty(exports, "decodeUnknownOption", { | ||
enumerable: true, | ||
get: function () { | ||
return _Parser.decodeUnknownOption; | ||
} | ||
}); | ||
Object.defineProperty(exports, "decodeUnknownPromise", { | ||
enumerable: true, | ||
get: function () { | ||
return _Parser.decodeUnknownPromise; | ||
} | ||
}); | ||
Object.defineProperty(exports, "decodeUnknownSync", { | ||
enumerable: true, | ||
get: function () { | ||
return _Parser.decodeUnknownSync; | ||
} | ||
}); | ||
exports.eitherOrUndefined = void 0; | ||
Object.defineProperty(exports, "encodeUnknown", { | ||
enumerable: true, | ||
get: function () { | ||
return _Parser.encodeUnknown; | ||
} | ||
}); | ||
Object.defineProperty(exports, "encodeUnknownEither", { | ||
enumerable: true, | ||
get: function () { | ||
return _Parser.encodeUnknownEither; | ||
} | ||
}); | ||
Object.defineProperty(exports, "encodeUnknownOption", { | ||
enumerable: true, | ||
get: function () { | ||
return _Parser.encodeUnknownOption; | ||
} | ||
}); | ||
Object.defineProperty(exports, "encodeUnknownPromise", { | ||
enumerable: true, | ||
get: function () { | ||
return _Parser.encodeUnknownPromise; | ||
} | ||
}); | ||
Object.defineProperty(exports, "encodeUnknownSync", { | ||
enumerable: true, | ||
get: function () { | ||
return _Parser.encodeUnknownSync; | ||
} | ||
}); | ||
exports.union = exports.unexpected = exports.typeLiteral = exports.type = exports.tuple = exports.try = exports.transform = exports.succeed = exports.refinement = exports.parseError = exports.orElse = exports.missing = exports.member = exports.mapError = exports.mapBoth = exports.map = exports.key = exports.index = exports.forbidden = exports.flatMap = exports.fail = void 0; | ||
exports.validateSync = exports.validatePromise = exports.validateOption = exports.validateEither = exports.validate = exports.try = exports.succeed = exports.parseError = exports.orElse = exports.missing = exports.mergeParseOptions = exports.mapError = exports.mapBoth = exports.map = exports.is = exports.getSearchTree = exports.getLiterals = exports.getFinalTransformation = exports.fromOption = exports.flatMap = exports.fail = exports.encodeUnknownSync = exports.encodeUnknownPromise = exports.encodeUnknownOption = exports.encodeUnknownEither = exports.encodeUnknown = exports.encodeSync = exports.encodePromise = exports.encodeOption = exports.encodeEither = exports.encode = exports.eitherOrUndefined = exports.decodeUnknownSync = exports.decodeUnknownPromise = exports.decodeUnknownOption = exports.decodeUnknownEither = exports.decodeUnknown = exports.decodeSync = exports.decodePromise = exports.decodeOption = exports.decodeEither = exports.decode = exports.asserts = exports.Union = exports.Unexpected = exports.TypeLiteral = exports.Type = exports.TupleType = exports.Transformation = exports.Refinement = exports.ParseError = exports.Missing = exports.Member = exports.Key = exports.Index = exports.Forbidden = exports.Declaration = void 0; | ||
var Arr = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Array")); | ||
var _Data = /*#__PURE__*/require("effect/Data"); | ||
@@ -74,6 +13,9 @@ var Effect = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Effect")); | ||
var _Function = /*#__PURE__*/require("effect/Function"); | ||
var _GlobalValue = /*#__PURE__*/require("effect/GlobalValue"); | ||
var Inspectable = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Inspectable")); | ||
var InternalParser = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/parser.js")); | ||
var Option = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Option")); | ||
var Predicate = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Predicate")); | ||
var AST = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./AST.js")); | ||
var util_ = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/util.js")); | ||
var TreeFormatter = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./TreeFormatter.js")); | ||
var _Parser = /*#__PURE__*/require("./Parser.js"); | ||
function _getRequireWildcardCache(e) { | ||
@@ -98,3 +40,3 @@ if ("function" != typeof WeakMap) return null; | ||
a = Object.defineProperty && Object.getOwnPropertyDescriptor; | ||
for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { | ||
for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { | ||
var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; | ||
@@ -110,75 +52,230 @@ i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; | ||
/** | ||
* Error that occurs when a declaration has an error. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
class ParseError extends (0, _Data.TaggedError)("ParseError") { | ||
get message() { | ||
return this.toString(); | ||
} | ||
class Declaration { | ||
ast; | ||
actual; | ||
error; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString() { | ||
return TreeFormatter.formatIssue(this.error); | ||
_tag = "Declaration"; | ||
constructor(ast, actual, error) { | ||
this.ast = ast; | ||
this.actual = actual; | ||
this.error = error; | ||
} | ||
} | ||
/** | ||
* Error that occurs when a refinement has an error. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
exports.Declaration = Declaration; | ||
class Refinement { | ||
ast; | ||
actual; | ||
kind; | ||
error; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_id: "ParseError", | ||
message: this.toString() | ||
}; | ||
_tag = "Refinement"; | ||
constructor(ast, actual, kind, error) { | ||
this.ast = ast; | ||
this.actual = actual; | ||
this.kind = kind; | ||
this.error = error; | ||
} | ||
} | ||
/** | ||
* Error that occurs when an array or tuple has an error. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
exports.Refinement = Refinement; | ||
class TupleType { | ||
ast; | ||
actual; | ||
errors; | ||
output; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
[Inspectable.NodeInspectSymbol]() { | ||
return this.toJSON(); | ||
_tag = "TupleType"; | ||
constructor(ast, actual, errors, output = []) { | ||
this.ast = ast; | ||
this.actual = actual; | ||
this.errors = errors; | ||
this.output = output; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* The `Index` error indicates that there was an error at a specific index in an array or tuple. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
exports.ParseError = ParseError; | ||
const parseError = issue => new ParseError({ | ||
error: issue | ||
}); | ||
exports.TupleType = TupleType; | ||
class Index { | ||
index; | ||
error; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Index"; | ||
constructor(index, error) { | ||
this.index = index; | ||
this.error = error; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* Error that occurs when a type literal or record has an error. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
exports.parseError = parseError; | ||
const succeed = exports.succeed = Either.right; | ||
exports.Index = Index; | ||
class TypeLiteral { | ||
ast; | ||
actual; | ||
errors; | ||
output; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "TypeLiteral"; | ||
constructor(ast, actual, errors, output = {}) { | ||
this.ast = ast; | ||
this.actual = actual; | ||
this.errors = errors; | ||
this.output = output; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* The `Key` variant of the `ParseIssue` type represents an error that occurs when a key in a type literal or record is invalid. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
const fail = exports.fail = Either.left; | ||
const _try = exports.try = Either.try; | ||
exports.TypeLiteral = TypeLiteral; | ||
class Key { | ||
key; | ||
error; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Key"; | ||
constructor(key, error) { | ||
this.key = key; | ||
this.error = error; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* Error that occurs when an unexpected key or index is present. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
const transform = exports.transform = InternalParser.transform; | ||
exports.Key = Key; | ||
class Unexpected { | ||
ast; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Unexpected"; | ||
constructor(ast) { | ||
this.ast = ast; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* Error that occurs when a transformation has an error. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
const type = exports.type = InternalParser.type; | ||
exports.Unexpected = Unexpected; | ||
class Transformation { | ||
ast; | ||
actual; | ||
kind; | ||
error; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Transformation"; | ||
constructor(ast, actual, kind, error) { | ||
this.ast = ast; | ||
this.actual = actual; | ||
this.kind = kind; | ||
this.error = error; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* The `Type` variant of the `ParseIssue` type represents an error that occurs when the `actual` value is not of the expected type. | ||
* The `ast` field specifies the expected type, and the `actual` field contains the value that caused the error. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
const forbidden = exports.forbidden = InternalParser.forbidden; | ||
exports.Transformation = Transformation; | ||
class Type { | ||
ast; | ||
actual; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Type"; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
message; | ||
constructor(ast, actual, message) { | ||
this.ast = ast; | ||
this.actual = actual; | ||
this.message = Option.fromNullable(message); | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* The `Forbidden` variant of the `ParseIssue` type represents a forbidden operation, such as when encountering an Effect that is not allowed to execute (e.g., using `runSync`). | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
const declaration = exports.declaration = InternalParser.declaration; | ||
exports.Type = Type; | ||
class Forbidden { | ||
ast; | ||
actual; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Forbidden"; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
message; | ||
constructor(ast, actual, message) { | ||
this.ast = ast; | ||
this.actual = actual; | ||
this.message = Option.fromNullable(message); | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* Error that occurs when a required key or index is missing. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
const refinement = exports.refinement = InternalParser.refinement; | ||
exports.Forbidden = Forbidden; | ||
class Missing { | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Missing"; | ||
} | ||
/** | ||
@@ -188,24 +285,73 @@ * @category constructors | ||
*/ | ||
const tuple = (ast, actual, errors) => ({ | ||
_tag: "Tuple", | ||
ast, | ||
actual, | ||
errors | ||
}); | ||
exports.Missing = Missing; | ||
const missing = exports.missing = /*#__PURE__*/new Missing(); | ||
/** | ||
* @category constructors | ||
* Error that occurs when a member in a union has an error. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
exports.tuple = tuple; | ||
const typeLiteral = exports.typeLiteral = InternalParser.typeLiteral; | ||
class Member { | ||
ast; | ||
error; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Member"; | ||
constructor(ast, error) { | ||
this.ast = ast; | ||
this.error = error; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* Error that occurs when a union has an error. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
const index = exports.index = InternalParser.index; | ||
exports.Member = Member; | ||
class Union { | ||
ast; | ||
actual; | ||
errors; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Union"; | ||
constructor(ast, actual, errors) { | ||
this.ast = ast; | ||
this.actual = actual; | ||
this.errors = errors; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
const key = exports.key = InternalParser.key; | ||
exports.Union = Union; | ||
class ParseError extends (0, _Data.TaggedError)("ParseError") { | ||
get message() { | ||
return this.toString(); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString() { | ||
return TreeFormatter.formatIssueSync(this.error); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_id: "ParseError", | ||
message: this.toString() | ||
}; | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
[Inspectable.NodeInspectSymbol]() { | ||
return this.toJSON(); | ||
} | ||
} | ||
/** | ||
@@ -215,3 +361,6 @@ * @category constructors | ||
*/ | ||
const missing = exports.missing = InternalParser.missing; | ||
exports.ParseError = ParseError; | ||
const parseError = issue => new ParseError({ | ||
error: issue | ||
}); | ||
/** | ||
@@ -221,3 +370,4 @@ * @category constructors | ||
*/ | ||
const unexpected = exports.unexpected = InternalParser.unexpected; | ||
exports.parseError = parseError; | ||
const succeed = exports.succeed = Either.right; | ||
/** | ||
@@ -227,3 +377,4 @@ * @category constructors | ||
*/ | ||
const union = exports.union = InternalParser.union; | ||
const fail = exports.fail = Either.left; | ||
const _try = exports.try = Either.try; | ||
/** | ||
@@ -233,3 +384,3 @@ * @category constructors | ||
*/ | ||
const member = exports.member = InternalParser.member; | ||
const fromOption = exports.fromOption = Either.fromOption; | ||
/** | ||
@@ -239,3 +390,12 @@ * @category optimisation | ||
*/ | ||
const eitherOrUndefined = exports.eitherOrUndefined = InternalParser.eitherOrUndefined; | ||
const flatMap = exports.flatMap = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => { | ||
const s = self; | ||
if (s["_tag"] === "Left") { | ||
return s; | ||
} | ||
if (s["_tag"] === "Right") { | ||
return f(s.right); | ||
} | ||
return Effect.flatMap(self, f); | ||
}); | ||
/** | ||
@@ -245,3 +405,12 @@ * @category optimisation | ||
*/ | ||
const flatMap = exports.flatMap = InternalParser.flatMap; | ||
const map = exports.map = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => { | ||
const s = self; | ||
if (s["_tag"] === "Left") { | ||
return s; | ||
} | ||
if (s["_tag"] === "Right") { | ||
return Either.right(f(s.right)); | ||
} | ||
return Effect.map(self, f); | ||
}); | ||
/** | ||
@@ -251,3 +420,12 @@ * @category optimisation | ||
*/ | ||
const map = exports.map = InternalParser.map; | ||
const mapError = exports.mapError = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => { | ||
const s = self; | ||
if (s["_tag"] === "Left") { | ||
return Either.left(f(s.left)); | ||
} | ||
if (s["_tag"] === "Right") { | ||
return s; | ||
} | ||
return Effect.mapError(self, f); | ||
}); | ||
/** | ||
@@ -257,3 +435,8 @@ * @category optimisation | ||
*/ | ||
const mapError = exports.mapError = InternalParser.mapError; | ||
const eitherOrUndefined = self => { | ||
const s = self; | ||
if (s["_tag"] === "Left" || s["_tag"] === "Right") { | ||
return s; | ||
} | ||
}; | ||
/** | ||
@@ -263,2 +446,3 @@ * @category optimisation | ||
*/ | ||
exports.eitherOrUndefined = eitherOrUndefined; | ||
const mapBoth = exports.mapBoth = /*#__PURE__*/(0, _Function.dual)(2, (self, options) => { | ||
@@ -288,3 +472,956 @@ const s = self; | ||
}); | ||
/* c8 ignore start */ | ||
/** @internal */ | ||
const mergeParseOptions = (options, overrideOptions) => { | ||
if (overrideOptions === undefined || Predicate.isNumber(overrideOptions)) { | ||
return options; | ||
} | ||
if (options === undefined) { | ||
return overrideOptions; | ||
} | ||
const out = {}; | ||
out.errors = overrideOptions.errors ?? options.errors; | ||
out.onExcessProperty = overrideOptions.onExcessProperty ?? options.onExcessProperty; | ||
return out; | ||
}; | ||
exports.mergeParseOptions = mergeParseOptions; | ||
const getEither = (ast, isDecoding, options) => { | ||
const parser = goMemo(ast, isDecoding); | ||
return (u, overrideOptions) => parser(u, mergeParseOptions(options, overrideOptions)); | ||
}; | ||
const getSync = (ast, isDecoding, options) => { | ||
const parser = getEither(ast, isDecoding, options); | ||
return (input, overrideOptions) => Either.getOrThrowWith(parser(input, overrideOptions), issue => new Error(TreeFormatter.formatIssueSync(issue), { | ||
cause: issue | ||
})); | ||
}; | ||
const getOption = (ast, isDecoding, options) => { | ||
const parser = getEither(ast, isDecoding, options); | ||
return (input, overrideOptions) => Option.getRight(parser(input, overrideOptions)); | ||
}; | ||
const getEffect = (ast, isDecoding, options) => { | ||
const parser = goMemo(ast, isDecoding); | ||
return (input, overrideOptions) => parser(input, { | ||
...mergeParseOptions(options, overrideOptions), | ||
isEffectAllowed: true | ||
}); | ||
}; | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
const decodeUnknownSync = (schema, options) => getSync(schema.ast, true, options); | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
exports.decodeUnknownSync = decodeUnknownSync; | ||
const decodeUnknownOption = (schema, options) => getOption(schema.ast, true, options); | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
exports.decodeUnknownOption = decodeUnknownOption; | ||
const decodeUnknownEither = (schema, options) => getEither(schema.ast, true, options); | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
exports.decodeUnknownEither = decodeUnknownEither; | ||
const decodeUnknownPromise = (schema, options) => { | ||
const parser = decodeUnknown(schema, options); | ||
return (u, overrideOptions) => Effect.runPromise(parser(u, overrideOptions)); | ||
}; | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
exports.decodeUnknownPromise = decodeUnknownPromise; | ||
const decodeUnknown = (schema, options) => getEffect(schema.ast, true, options); | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
exports.decodeUnknown = decodeUnknown; | ||
const encodeUnknownSync = (schema, options) => getSync(schema.ast, false, options); | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
exports.encodeUnknownSync = encodeUnknownSync; | ||
const encodeUnknownOption = (schema, options) => getOption(schema.ast, false, options); | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
exports.encodeUnknownOption = encodeUnknownOption; | ||
const encodeUnknownEither = (schema, options) => getEither(schema.ast, false, options); | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
exports.encodeUnknownEither = encodeUnknownEither; | ||
const encodeUnknownPromise = (schema, options) => { | ||
const parser = encodeUnknown(schema, options); | ||
return (u, overrideOptions) => Effect.runPromise(parser(u, overrideOptions)); | ||
}; | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
exports.encodeUnknownPromise = encodeUnknownPromise; | ||
const encodeUnknown = (schema, options) => getEffect(schema.ast, false, options); | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
exports.encodeUnknown = encodeUnknown; | ||
const decodeSync = exports.decodeSync = decodeUnknownSync; | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
const decodeOption = exports.decodeOption = decodeUnknownOption; | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
const decodeEither = exports.decodeEither = decodeUnknownEither; | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
const decodePromise = exports.decodePromise = decodeUnknownPromise; | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
const decode = exports.decode = decodeUnknown; | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
const validateSync = (schema, options) => getSync(AST.typeAST(schema.ast), true, options); | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
exports.validateSync = validateSync; | ||
const validateOption = (schema, options) => getOption(AST.typeAST(schema.ast), true, options); | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
exports.validateOption = validateOption; | ||
const validateEither = (schema, options) => getEither(AST.typeAST(schema.ast), true, options); | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
exports.validateEither = validateEither; | ||
const validatePromise = (schema, options) => { | ||
const parser = validate(schema, options); | ||
return (u, overrideOptions) => Effect.runPromise(parser(u, overrideOptions)); | ||
}; | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
exports.validatePromise = validatePromise; | ||
const validate = (schema, options) => getEffect(AST.typeAST(schema.ast), true, options); | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
exports.validate = validate; | ||
const is = (schema, options) => { | ||
const parser = goMemo(AST.typeAST(schema.ast), true); | ||
return (u, overrideOptions) => Either.isRight(parser(u, { | ||
...mergeParseOptions(options, overrideOptions), | ||
isExact: true | ||
})); | ||
}; | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
exports.is = is; | ||
const asserts = (schema, options) => { | ||
const parser = goMemo(AST.typeAST(schema.ast), true); | ||
return (u, overrideOptions) => { | ||
const result = parser(u, { | ||
...mergeParseOptions(options, overrideOptions), | ||
isExact: true | ||
}); | ||
if (Either.isLeft(result)) { | ||
throw new Error(TreeFormatter.formatIssueSync(result.left), { | ||
cause: result.left | ||
}); | ||
} | ||
}; | ||
}; | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
exports.asserts = asserts; | ||
const encodeSync = exports.encodeSync = encodeUnknownSync; | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
const encodeOption = exports.encodeOption = encodeUnknownOption; | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
const encodeEither = exports.encodeEither = encodeUnknownEither; | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
const encodePromise = exports.encodePromise = encodeUnknownPromise; | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
const encode = exports.encode = encodeUnknown; | ||
const decodeMemoMap = /*#__PURE__*/(0, _GlobalValue.globalValue)( /*#__PURE__*/Symbol.for("@effect/schema/Parser/decodeMemoMap"), () => new WeakMap()); | ||
const encodeMemoMap = /*#__PURE__*/(0, _GlobalValue.globalValue)( /*#__PURE__*/Symbol.for("@effect/schema/Parser/encodeMemoMap"), () => new WeakMap()); | ||
const goMemo = (ast, isDecoding) => { | ||
const memoMap = isDecoding ? decodeMemoMap : encodeMemoMap; | ||
const memo = memoMap.get(ast); | ||
if (memo) { | ||
return memo; | ||
} | ||
const parser = go(ast, isDecoding); | ||
memoMap.set(ast, parser); | ||
return parser; | ||
}; | ||
const getConcurrency = ast => Option.getOrUndefined(AST.getConcurrencyAnnotation(ast)); | ||
const getBatching = ast => Option.getOrUndefined(AST.getBatchingAnnotation(ast)); | ||
const go = (ast, isDecoding) => { | ||
switch (ast._tag) { | ||
case "Refinement": | ||
{ | ||
if (isDecoding) { | ||
const from = goMemo(ast.from, true); | ||
return (i, options) => handleForbidden(flatMap(mapError(from(i, options), e => new Refinement(ast, i, "From", e)), a => Option.match(ast.filter(a, options ?? AST.defaultParseOption, ast), { | ||
onNone: () => Either.right(a), | ||
onSome: e => Either.left(new Refinement(ast, i, "Predicate", e)) | ||
})), ast, i, options); | ||
} else { | ||
const from = goMemo(AST.typeAST(ast), true); | ||
const to = goMemo(dropRightRefinement(ast.from), false); | ||
return (i, options) => handleForbidden(flatMap(from(i, options), a => to(a, options)), ast, i, options); | ||
} | ||
} | ||
case "Transformation": | ||
{ | ||
const transform = getFinalTransformation(ast.transformation, isDecoding); | ||
const from = isDecoding ? goMemo(ast.from, true) : goMemo(ast.to, false); | ||
const to = isDecoding ? goMemo(ast.to, true) : goMemo(ast.from, false); | ||
return (i1, options) => handleForbidden(flatMap(mapError(from(i1, options), e => new Transformation(ast, i1, isDecoding ? "Encoded" : "Type", e)), a => flatMap(mapError(transform(a, options ?? AST.defaultParseOption, ast), e => new Transformation(ast, i1, "Transformation", e)), i2 => mapError(to(i2, options), e => new Transformation(ast, i1, isDecoding ? "Type" : "Encoded", e)))), ast, i1, options); | ||
} | ||
case "Declaration": | ||
{ | ||
const parse = isDecoding ? ast.decodeUnknown(...ast.typeParameters) : ast.encodeUnknown(...ast.typeParameters); | ||
return (i, options) => handleForbidden(mapError(parse(i, options ?? AST.defaultParseOption, ast), e => new Declaration(ast, i, e)), ast, i, options); | ||
} | ||
case "Literal": | ||
return fromRefinement(ast, u => u === ast.literal); | ||
case "UniqueSymbol": | ||
return fromRefinement(ast, u => u === ast.symbol); | ||
case "UndefinedKeyword": | ||
return fromRefinement(ast, Predicate.isUndefined); | ||
case "VoidKeyword": | ||
return fromRefinement(ast, Predicate.isUndefined); | ||
case "NeverKeyword": | ||
return fromRefinement(ast, Predicate.isNever); | ||
case "UnknownKeyword": | ||
case "AnyKeyword": | ||
return Either.right; | ||
case "StringKeyword": | ||
return fromRefinement(ast, Predicate.isString); | ||
case "NumberKeyword": | ||
return fromRefinement(ast, Predicate.isNumber); | ||
case "BooleanKeyword": | ||
return fromRefinement(ast, Predicate.isBoolean); | ||
case "BigIntKeyword": | ||
return fromRefinement(ast, Predicate.isBigInt); | ||
case "SymbolKeyword": | ||
return fromRefinement(ast, Predicate.isSymbol); | ||
case "ObjectKeyword": | ||
return fromRefinement(ast, Predicate.isObject); | ||
case "Enums": | ||
return fromRefinement(ast, u => ast.enums.some(([_, value]) => value === u)); | ||
case "TemplateLiteral": | ||
{ | ||
const regex = AST.getTemplateLiteralRegExp(ast); | ||
return fromRefinement(ast, u => Predicate.isString(u) && regex.test(u)); | ||
} | ||
case "TupleType": | ||
{ | ||
const elements = ast.elements.map(e => goMemo(e.type, isDecoding)); | ||
const rest = ast.rest.map(ast => goMemo(ast, isDecoding)); | ||
let requiredLen = ast.elements.filter(e => !e.isOptional).length; | ||
if (ast.rest.length > 0) { | ||
requiredLen += ast.rest.length - 1; | ||
} | ||
const expectedAST = AST.Union.make(ast.elements.map((_, i) => new AST.Literal(i))); | ||
const concurrency = getConcurrency(ast); | ||
const batching = getBatching(ast); | ||
return (input, options) => { | ||
if (!Arr.isArray(input)) { | ||
return Either.left(new Type(ast, input)); | ||
} | ||
const allErrors = options?.errors === "all"; | ||
const es = []; | ||
let stepKey = 0; | ||
// --------------------------------------------- | ||
// handle missing indexes | ||
// --------------------------------------------- | ||
const len = input.length; | ||
for (let i = len; i <= requiredLen - 1; i++) { | ||
const e = new Index(i, missing); | ||
if (allErrors) { | ||
es.push([stepKey++, e]); | ||
continue; | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e])); | ||
} | ||
} | ||
// --------------------------------------------- | ||
// handle excess indexes | ||
// --------------------------------------------- | ||
if (ast.rest.length === 0) { | ||
for (let i = ast.elements.length; i <= len - 1; i++) { | ||
const e = new Index(i, new Unexpected(expectedAST)); | ||
if (allErrors) { | ||
es.push([stepKey++, e]); | ||
continue; | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e])); | ||
} | ||
} | ||
} | ||
const output = []; | ||
let i = 0; | ||
let queue = undefined; | ||
// --------------------------------------------- | ||
// handle elements | ||
// --------------------------------------------- | ||
for (; i < elements.length; i++) { | ||
if (len < i + 1) { | ||
if (ast.elements[i].isOptional) { | ||
// the input element is missing | ||
continue; | ||
} | ||
} else { | ||
const parser = elements[i]; | ||
const te = parser(input[i], options); | ||
const eu = eitherOrUndefined(te); | ||
if (eu) { | ||
if (Either.isLeft(eu)) { | ||
// the input element is present but is not valid | ||
const e = new Index(i, eu.left); | ||
if (allErrors) { | ||
es.push([stepKey++, e]); | ||
continue; | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e], sortByIndex(output))); | ||
} | ||
} | ||
output.push([stepKey++, eu.right]); | ||
} else { | ||
const nk = stepKey++; | ||
const index = i; | ||
if (!queue) { | ||
queue = []; | ||
} | ||
queue.push(({ | ||
es, | ||
output | ||
}) => Effect.flatMap(Effect.either(te), t => { | ||
if (Either.isLeft(t)) { | ||
// the input element is present but is not valid | ||
const e = new Index(index, t.left); | ||
if (allErrors) { | ||
es.push([nk, e]); | ||
return Effect.void; | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e], sortByIndex(output))); | ||
} | ||
} | ||
output.push([nk, t.right]); | ||
return Effect.void; | ||
})); | ||
} | ||
} | ||
} | ||
// --------------------------------------------- | ||
// handle rest element | ||
// --------------------------------------------- | ||
if (Arr.isNonEmptyReadonlyArray(rest)) { | ||
const [head, ...tail] = rest; | ||
for (; i < len - tail.length; i++) { | ||
const te = head(input[i], options); | ||
const eu = eitherOrUndefined(te); | ||
if (eu) { | ||
if (Either.isLeft(eu)) { | ||
const e = new Index(i, eu.left); | ||
if (allErrors) { | ||
es.push([stepKey++, e]); | ||
continue; | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e], sortByIndex(output))); | ||
} | ||
} else { | ||
output.push([stepKey++, eu.right]); | ||
} | ||
} else { | ||
const nk = stepKey++; | ||
const index = i; | ||
if (!queue) { | ||
queue = []; | ||
} | ||
queue.push(({ | ||
es, | ||
output | ||
}) => Effect.flatMap(Effect.either(te), t => { | ||
if (Either.isLeft(t)) { | ||
const e = new Index(index, t.left); | ||
if (allErrors) { | ||
es.push([nk, e]); | ||
return Effect.void; | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e], sortByIndex(output))); | ||
} | ||
} else { | ||
output.push([nk, t.right]); | ||
return Effect.void; | ||
} | ||
})); | ||
} | ||
} | ||
// --------------------------------------------- | ||
// handle post rest elements | ||
// --------------------------------------------- | ||
for (let j = 0; j < tail.length; j++) { | ||
i += j; | ||
if (len < i + 1) { | ||
continue; | ||
} else { | ||
const te = tail[j](input[i], options); | ||
const eu = eitherOrUndefined(te); | ||
if (eu) { | ||
if (Either.isLeft(eu)) { | ||
// the input element is present but is not valid | ||
const e = new Index(i, eu.left); | ||
if (allErrors) { | ||
es.push([stepKey++, e]); | ||
continue; | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e], sortByIndex(output))); | ||
} | ||
} | ||
output.push([stepKey++, eu.right]); | ||
} else { | ||
const nk = stepKey++; | ||
const index = i; | ||
if (!queue) { | ||
queue = []; | ||
} | ||
queue.push(({ | ||
es, | ||
output | ||
}) => Effect.flatMap(Effect.either(te), t => { | ||
if (Either.isLeft(t)) { | ||
// the input element is present but is not valid | ||
const e = new Index(index, t.left); | ||
if (allErrors) { | ||
es.push([nk, e]); | ||
return Effect.void; | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e], sortByIndex(output))); | ||
} | ||
} | ||
output.push([nk, t.right]); | ||
return Effect.void; | ||
})); | ||
} | ||
} | ||
} | ||
} | ||
// --------------------------------------------- | ||
// compute result | ||
// --------------------------------------------- | ||
const computeResult = ({ | ||
es, | ||
output | ||
}) => Arr.isNonEmptyArray(es) ? Either.left(new TupleType(ast, input, sortByIndex(es), sortByIndex(output))) : Either.right(sortByIndex(output)); | ||
if (queue && queue.length > 0) { | ||
const cqueue = queue; | ||
return Effect.suspend(() => { | ||
const state = { | ||
es: Arr.copy(es), | ||
output: Arr.copy(output) | ||
}; | ||
return Effect.flatMap(Effect.forEach(cqueue, f => f(state), { | ||
concurrency, | ||
batching, | ||
discard: true | ||
}), () => computeResult(state)); | ||
}); | ||
} | ||
return computeResult({ | ||
output, | ||
es | ||
}); | ||
}; | ||
} | ||
case "TypeLiteral": | ||
{ | ||
if (ast.propertySignatures.length === 0 && ast.indexSignatures.length === 0) { | ||
return fromRefinement(ast, Predicate.isNotNullable); | ||
} | ||
const propertySignatures = []; | ||
const expectedKeys = {}; | ||
for (const ps of ast.propertySignatures) { | ||
propertySignatures.push([goMemo(ps.type, isDecoding), ps]); | ||
expectedKeys[ps.name] = null; | ||
} | ||
const indexSignatures = ast.indexSignatures.map(is => [goMemo(is.parameter, isDecoding), goMemo(is.type, isDecoding), is.parameter]); | ||
const expectedAST = AST.Union.make(ast.indexSignatures.map(is => is.parameter).concat(util_.ownKeys(expectedKeys).map(key => Predicate.isSymbol(key) ? new AST.UniqueSymbol(key) : new AST.Literal(key)))); | ||
const expected = goMemo(expectedAST, isDecoding); | ||
const concurrency = getConcurrency(ast); | ||
const batching = getBatching(ast); | ||
return (input, options) => { | ||
if (!Predicate.isRecord(input)) { | ||
return Either.left(new Type(ast, input)); | ||
} | ||
const allErrors = options?.errors === "all"; | ||
const es = []; | ||
let stepKey = 0; | ||
// --------------------------------------------- | ||
// handle excess properties | ||
// --------------------------------------------- | ||
const onExcessPropertyError = options?.onExcessProperty === "error"; | ||
const onExcessPropertyPreserve = options?.onExcessProperty === "preserve"; | ||
const output = {}; | ||
if (onExcessPropertyError || onExcessPropertyPreserve) { | ||
for (const key of util_.ownKeys(input)) { | ||
const eu = eitherOrUndefined(expected(key, options)); | ||
if (Either.isLeft(eu)) { | ||
// key is unexpected | ||
if (onExcessPropertyError) { | ||
const e = new Key(key, new Unexpected(expectedAST)); | ||
if (allErrors) { | ||
es.push([stepKey++, e]); | ||
continue; | ||
} else { | ||
return Either.left(new TypeLiteral(ast, input, [e], output)); | ||
} | ||
} else { | ||
// preserve key | ||
output[key] = input[key]; | ||
} | ||
} | ||
} | ||
} | ||
let queue = undefined; | ||
const isExact = options?.isExact === true; | ||
for (let i = 0; i < propertySignatures.length; i++) { | ||
const ps = propertySignatures[i][1]; | ||
const name = ps.name; | ||
const hasKey = Object.prototype.hasOwnProperty.call(input, name); | ||
if (!hasKey) { | ||
if (ps.isOptional) { | ||
continue; | ||
} else if (isExact) { | ||
const e = new Key(name, missing); | ||
if (allErrors) { | ||
es.push([stepKey++, e]); | ||
continue; | ||
} else { | ||
return Either.left(new TypeLiteral(ast, input, [e], output)); | ||
} | ||
} | ||
} | ||
const parser = propertySignatures[i][0]; | ||
const te = parser(input[name], options); | ||
const eu = eitherOrUndefined(te); | ||
if (eu) { | ||
if (Either.isLeft(eu)) { | ||
const e = new Key(name, hasKey ? eu.left : missing); | ||
if (allErrors) { | ||
es.push([stepKey++, e]); | ||
continue; | ||
} else { | ||
return Either.left(new TypeLiteral(ast, input, [e], output)); | ||
} | ||
} | ||
output[name] = eu.right; | ||
} else { | ||
const nk = stepKey++; | ||
const index = name; | ||
if (!queue) { | ||
queue = []; | ||
} | ||
queue.push(({ | ||
es, | ||
output | ||
}) => Effect.flatMap(Effect.either(te), t => { | ||
if (Either.isLeft(t)) { | ||
const e = new Key(index, hasKey ? t.left : missing); | ||
if (allErrors) { | ||
es.push([nk, e]); | ||
return Effect.void; | ||
} else { | ||
return Either.left(new TypeLiteral(ast, input, [e], output)); | ||
} | ||
} | ||
output[index] = t.right; | ||
return Effect.void; | ||
})); | ||
} | ||
} | ||
// --------------------------------------------- | ||
// handle index signatures | ||
// --------------------------------------------- | ||
for (let i = 0; i < indexSignatures.length; i++) { | ||
const indexSignature = indexSignatures[i]; | ||
const parameter = indexSignature[0]; | ||
const type = indexSignature[1]; | ||
const keys = util_.getKeysForIndexSignature(input, indexSignature[2]); | ||
for (const key of keys) { | ||
// --------------------------------------------- | ||
// handle keys | ||
// --------------------------------------------- | ||
const keu = eitherOrUndefined(parameter(key, options)); | ||
if (keu && Either.isRight(keu)) { | ||
// --------------------------------------------- | ||
// handle values | ||
// --------------------------------------------- | ||
const vpr = type(input[key], options); | ||
const veu = eitherOrUndefined(vpr); | ||
if (veu) { | ||
if (Either.isLeft(veu)) { | ||
const e = new Key(key, veu.left); | ||
if (allErrors) { | ||
es.push([stepKey++, e]); | ||
continue; | ||
} else { | ||
return Either.left(new TypeLiteral(ast, input, [e], output)); | ||
} | ||
} else { | ||
if (!Object.prototype.hasOwnProperty.call(expectedKeys, key)) { | ||
output[key] = veu.right; | ||
} | ||
} | ||
} else { | ||
const nk = stepKey++; | ||
const index = key; | ||
if (!queue) { | ||
queue = []; | ||
} | ||
queue.push(({ | ||
es, | ||
output | ||
}) => Effect.flatMap(Effect.either(vpr), tv => { | ||
if (Either.isLeft(tv)) { | ||
const e = new Key(index, tv.left); | ||
if (allErrors) { | ||
es.push([nk, e]); | ||
return Effect.void; | ||
} else { | ||
return Either.left(new TypeLiteral(ast, input, [e], output)); | ||
} | ||
} else { | ||
if (!Object.prototype.hasOwnProperty.call(expectedKeys, key)) { | ||
output[key] = tv.right; | ||
} | ||
return Effect.void; | ||
} | ||
})); | ||
} | ||
} | ||
} | ||
} | ||
// --------------------------------------------- | ||
// compute result | ||
// --------------------------------------------- | ||
const computeResult = ({ | ||
es, | ||
output | ||
}) => Arr.isNonEmptyArray(es) ? Either.left(new TypeLiteral(ast, input, sortByIndex(es), output)) : Either.right(output); | ||
if (queue && queue.length > 0) { | ||
const cqueue = queue; | ||
return Effect.suspend(() => { | ||
const state = { | ||
es: Arr.copy(es), | ||
output: Object.assign({}, output) | ||
}; | ||
return Effect.flatMap(Effect.forEach(cqueue, f => f(state), { | ||
concurrency, | ||
batching, | ||
discard: true | ||
}), () => computeResult(state)); | ||
}); | ||
} | ||
return computeResult({ | ||
es, | ||
output | ||
}); | ||
}; | ||
} | ||
case "Union": | ||
{ | ||
const searchTree = getSearchTree(ast.types, isDecoding); | ||
const ownKeys = util_.ownKeys(searchTree.keys); | ||
const len = ownKeys.length; | ||
const map = new Map(); | ||
for (let i = 0; i < ast.types.length; i++) { | ||
map.set(ast.types[i], goMemo(ast.types[i], isDecoding)); | ||
} | ||
const concurrency = getConcurrency(ast) ?? 1; | ||
const batching = getBatching(ast); | ||
return (input, options) => { | ||
const es = []; | ||
let stepKey = 0; | ||
let candidates = []; | ||
if (len > 0) { | ||
// if there is at least one key then input must be an object | ||
if (Predicate.isRecord(input)) { | ||
for (let i = 0; i < len; i++) { | ||
const name = ownKeys[i]; | ||
const buckets = searchTree.keys[name].buckets; | ||
// for each property that should contain a literal, check if the input contains that property | ||
if (Object.prototype.hasOwnProperty.call(input, name)) { | ||
const literal = String(input[name]); | ||
// check that the value obtained from the input for the property corresponds to an existing bucket | ||
if (Object.prototype.hasOwnProperty.call(buckets, literal)) { | ||
// retrive the minimal set of candidates for decoding | ||
candidates = candidates.concat(buckets[literal]); | ||
} else { | ||
const literals = AST.Union.make(searchTree.keys[name].literals); | ||
es.push([stepKey++, new TypeLiteral(new AST.TypeLiteral([new AST.PropertySignature(name, literals, false, true)], []), input, [new Key(name, new Type(literals, input[name]))])]); | ||
} | ||
} else { | ||
const literals = AST.Union.make(searchTree.keys[name].literals); | ||
es.push([stepKey++, new TypeLiteral(new AST.TypeLiteral([new AST.PropertySignature(name, literals, false, true)], []), input, [new Key(name, missing)])]); | ||
} | ||
} | ||
} else { | ||
es.push([stepKey++, new Type(ast, input)]); | ||
} | ||
} | ||
if (searchTree.otherwise.length > 0) { | ||
candidates = candidates.concat(searchTree.otherwise); | ||
} | ||
let queue = undefined; | ||
for (let i = 0; i < candidates.length; i++) { | ||
const candidate = candidates[i]; | ||
const pr = map.get(candidate)(input, options); | ||
// the members of a union are ordered based on which one should be decoded first, | ||
// therefore if one member has added a task, all subsequent members must | ||
// also add a task to the queue even if they are synchronous | ||
const eu = !queue || queue.length === 0 ? eitherOrUndefined(pr) : undefined; | ||
if (eu) { | ||
if (Either.isRight(eu)) { | ||
return Either.right(eu.right); | ||
} else { | ||
es.push([stepKey++, new Member(candidate, eu.left)]); | ||
} | ||
} else { | ||
const nk = stepKey++; | ||
if (!queue) { | ||
queue = []; | ||
} | ||
queue.push(state => Effect.suspend(() => { | ||
if ("finalResult" in state) { | ||
return Effect.void; | ||
} else { | ||
return Effect.flatMap(Effect.either(pr), t => { | ||
if (Either.isRight(t)) { | ||
state.finalResult = Either.right(t.right); | ||
} else { | ||
state.es.push([nk, new Member(candidate, t.left)]); | ||
} | ||
return Effect.void; | ||
}); | ||
} | ||
})); | ||
} | ||
} | ||
// --------------------------------------------- | ||
// compute result | ||
// --------------------------------------------- | ||
const computeResult = es => Arr.isNonEmptyArray(es) ? es.length === 1 && es[0][1]._tag === "Type" ? Either.left(es[0][1]) : Either.left(new Union(ast, input, sortByIndex(es))) : | ||
// this should never happen | ||
Either.left(new Type(AST.neverKeyword, input)); | ||
if (queue && queue.length > 0) { | ||
const cqueue = queue; | ||
return Effect.suspend(() => { | ||
const state = { | ||
es: Arr.copy(es) | ||
}; | ||
return Effect.flatMap(Effect.forEach(cqueue, f => f(state), { | ||
concurrency, | ||
batching, | ||
discard: true | ||
}), () => { | ||
if ("finalResult" in state) { | ||
return state.finalResult; | ||
} | ||
return computeResult(state.es); | ||
}); | ||
}); | ||
} | ||
return computeResult(es); | ||
}; | ||
} | ||
case "Suspend": | ||
{ | ||
const get = util_.memoizeThunk(() => goMemo(AST.annotations(ast.f(), ast.annotations), isDecoding)); | ||
return (a, options) => get()(a, options); | ||
} | ||
} | ||
}; | ||
const fromRefinement = (ast, refinement) => u => refinement(u) ? Either.right(u) : Either.left(new Type(ast, u)); | ||
/** @internal */ | ||
const getLiterals = (ast, isDecoding) => { | ||
switch (ast._tag) { | ||
case "Declaration": | ||
{ | ||
const annotation = AST.getSurrogateAnnotation(ast); | ||
if (Option.isSome(annotation)) { | ||
return getLiterals(annotation.value, isDecoding); | ||
} | ||
break; | ||
} | ||
case "TypeLiteral": | ||
{ | ||
const out = []; | ||
for (let i = 0; i < ast.propertySignatures.length; i++) { | ||
const propertySignature = ast.propertySignatures[i]; | ||
const type = isDecoding ? AST.encodedAST(propertySignature.type) : AST.typeAST(propertySignature.type); | ||
if (AST.isLiteral(type) && !propertySignature.isOptional) { | ||
out.push([propertySignature.name, type]); | ||
} | ||
} | ||
return out; | ||
} | ||
case "Refinement": | ||
return getLiterals(ast.from, isDecoding); | ||
case "Suspend": | ||
return getLiterals(ast.f(), isDecoding); | ||
case "Transformation": | ||
return getLiterals(isDecoding ? ast.from : ast.to, isDecoding); | ||
} | ||
return []; | ||
}; | ||
/** | ||
* The purpose of the algorithm is to narrow down the pool of possible candidates for decoding as much as possible. | ||
* | ||
* This function separates the schemas into two groups, `keys` and `otherwise`: | ||
* | ||
* - `keys`: the schema has at least one property with a literal value | ||
* - `otherwise`: the schema has no properties with a literal value | ||
* | ||
* If a schema has at least one property with a literal value, so it ends up in `keys`, first a namespace is created for | ||
* the name of the property containing the literal, and then within this namespace a "bucket" is created for the literal | ||
* value in which to store all the schemas that have the same property and literal value. | ||
* | ||
* @internal | ||
*/ | ||
exports.getLiterals = getLiterals; | ||
const getSearchTree = (members, isDecoding) => { | ||
const keys = {}; | ||
const otherwise = []; | ||
for (let i = 0; i < members.length; i++) { | ||
const member = members[i]; | ||
const tags = getLiterals(member, isDecoding); | ||
if (tags.length > 0) { | ||
for (let j = 0; j < tags.length; j++) { | ||
const [key, literal] = tags[j]; | ||
const hash = String(literal.literal); | ||
keys[key] = keys[key] || { | ||
buckets: {}, | ||
literals: [] | ||
}; | ||
const buckets = keys[key].buckets; | ||
if (Object.prototype.hasOwnProperty.call(buckets, hash)) { | ||
if (j < tags.length - 1) { | ||
continue; | ||
} | ||
buckets[hash].push(member); | ||
keys[key].literals.push(literal); | ||
} else { | ||
buckets[hash] = [member]; | ||
keys[key].literals.push(literal); | ||
break; | ||
} | ||
} | ||
} else { | ||
otherwise.push(member); | ||
} | ||
} | ||
return { | ||
keys, | ||
otherwise | ||
}; | ||
}; | ||
exports.getSearchTree = getSearchTree; | ||
const dropRightRefinement = ast => AST.isRefinement(ast) ? dropRightRefinement(ast.from) : ast; | ||
const handleForbidden = (effect, ast, actual, options) => { | ||
const eu = eitherOrUndefined(effect); | ||
if (eu) { | ||
return eu; | ||
} | ||
if (options?.isEffectAllowed === true) { | ||
return effect; | ||
} | ||
try { | ||
return Effect.runSync(Effect.either(effect)); | ||
} catch (e) { | ||
return Either.left(new Forbidden(ast, actual, "cannot be be resolved synchronously, this is caused by using runSync on an effect that performs async work")); | ||
} | ||
}; | ||
function sortByIndex(es) { | ||
return es.sort(([a], [b]) => a > b ? 1 : a < b ? -1 : 0).map(([_, a]) => a); | ||
} | ||
// ------------------------------------------------------------------------------------- | ||
// transformations interpreter | ||
// ------------------------------------------------------------------------------------- | ||
/** @internal */ | ||
const getFinalTransformation = (transformation, isDecoding) => { | ||
switch (transformation._tag) { | ||
case "FinalTransformation": | ||
return isDecoding ? transformation.decode : transformation.encode; | ||
case "ComposeTransformation": | ||
return Either.right; | ||
case "TypeLiteralTransformation": | ||
return input => { | ||
let out = Either.right(input); | ||
// --------------------------------------------- | ||
// handle property signature transformations | ||
// --------------------------------------------- | ||
for (const pst of transformation.propertySignatureTransformations) { | ||
const [from, to] = isDecoding ? [pst.from, pst.to] : [pst.to, pst.from]; | ||
const transformation = isDecoding ? pst.decode : pst.encode; | ||
const f = input => { | ||
const o = transformation(Object.prototype.hasOwnProperty.call(input, from) ? Option.some(input[from]) : Option.none()); | ||
delete input[from]; | ||
if (Option.isSome(o)) { | ||
input[to] = o.value; | ||
} | ||
return input; | ||
}; | ||
out = map(out, f); | ||
} | ||
return out; | ||
}; | ||
} | ||
}; | ||
exports.getFinalTransformation = getFinalTransformation; | ||
//# sourceMappingURL=ParseResult.js.map |
@@ -7,9 +7,8 @@ "use strict"; | ||
exports.pretty = exports.match = exports.make = exports.PrettyHookId = void 0; | ||
var Arr = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Array")); | ||
var Option = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Option")); | ||
var ReadonlyArray = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/ReadonlyArray")); | ||
var AST = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./AST.js")); | ||
var Internal = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/ast.js")); | ||
var hooks = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/hooks.js")); | ||
var InternalSchema = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/schema.js")); | ||
var Parser = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./Parser.js")); | ||
var errors_ = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/errors.js")); | ||
var util_ = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/util.js")); | ||
var ParseResult = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./ParseResult.js")); | ||
function _getRequireWildcardCache(e) { | ||
@@ -34,3 +33,3 @@ if ("function" != typeof WeakMap) return null; | ||
a = Object.defineProperty && Object.getOwnPropertyDescriptor; | ||
for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { | ||
for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { | ||
var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; | ||
@@ -49,3 +48,3 @@ i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; | ||
*/ | ||
const PrettyHookId = exports.PrettyHookId = hooks.PrettyHookId; | ||
const PrettyHookId = exports.PrettyHookId = /*#__PURE__*/Symbol.for("@effect/schema/PrettyHookId"); | ||
/** | ||
@@ -55,3 +54,5 @@ * @category annotations | ||
*/ | ||
const pretty = handler => self => InternalSchema.make(AST.setAnnotation(self.ast, PrettyHookId, handler)); | ||
const pretty = handler => self => self.annotations({ | ||
[PrettyHookId]: handler | ||
}); | ||
/** | ||
@@ -62,3 +63,3 @@ * @category prettify | ||
exports.pretty = pretty; | ||
const make = schema => compile(schema.ast); | ||
const make = schema => compile(schema.ast, []); | ||
exports.make = make; | ||
@@ -72,3 +73,4 @@ const getHook = /*#__PURE__*/AST.getAnnotation(PrettyHookId); | ||
const stringify = /*#__PURE__*/getMatcher(a => JSON.stringify(a)); | ||
const formatUnknown = /*#__PURE__*/getMatcher(AST.formatUnknown); | ||
const formatUnknown = /*#__PURE__*/getMatcher(util_.formatUnknown); | ||
const getPrettyErrorMessage = (message, path) => errors_.getErrorMessageWithPath(`cannot build a Pretty for ${message}`, path); | ||
/** | ||
@@ -78,8 +80,8 @@ * @since 1.0.0 | ||
const match = exports.match = { | ||
"Declaration": (ast, go) => { | ||
"Declaration": (ast, go, path) => { | ||
const hook = getHook(ast); | ||
if (Option.isSome(hook)) { | ||
return hook.value(...ast.typeParameters.map(go)); | ||
return hook.value(...ast.typeParameters.map(tp => go(tp, path))); | ||
} | ||
throw new Error(`cannot build a Pretty for a declaration without annotations (${AST.format(ast)})`); | ||
throw new Error(getPrettyErrorMessage(`a declaration without annotations (${ast})`, path)); | ||
}, | ||
@@ -103,3 +105,3 @@ "VoidKeyword": /*#__PURE__*/getMatcher(() => "void(0)"), | ||
"Enums": stringify, | ||
"Tuple": (ast, go) => { | ||
"TupleType": (ast, go, path) => { | ||
const hook = getHook(ast); | ||
@@ -109,4 +111,4 @@ if (Option.isSome(hook)) { | ||
} | ||
const elements = ast.elements.map(e => go(e.type)); | ||
const rest = Option.map(ast.rest, ReadonlyArray.map(go)); | ||
const elements = ast.elements.map((e, i) => go(e.type, path.concat(i))); | ||
const rest = ast.rest.map(ast => go(ast, path)); | ||
return input => { | ||
@@ -130,4 +132,4 @@ const output = []; | ||
// --------------------------------------------- | ||
if (Option.isSome(rest)) { | ||
const [head, ...tail] = rest.value; | ||
if (Arr.isNonEmptyReadonlyArray(rest)) { | ||
const [head, ...tail] = rest; | ||
for (; i < input.length - tail.length; i++) { | ||
@@ -147,3 +149,3 @@ output.push(head(input[i])); | ||
}, | ||
"TypeLiteral": (ast, go) => { | ||
"TypeLiteral": (ast, go, path) => { | ||
const hook = getHook(ast); | ||
@@ -153,4 +155,4 @@ if (Option.isSome(hook)) { | ||
} | ||
const propertySignaturesTypes = ast.propertySignatures.map(f => go(f.type)); | ||
const indexSignatureTypes = ast.indexSignatures.map(is => go(is.type)); | ||
const propertySignaturesTypes = ast.propertySignatures.map(ps => go(ps.type, path.concat(ps.name))); | ||
const indexSignatureTypes = ast.indexSignatures.map(is => go(is.type, path)); | ||
const expectedKeys = {}; | ||
@@ -171,3 +173,3 @@ for (let i = 0; i < propertySignaturesTypes.length; i++) { | ||
} | ||
output.push(`${getPrettyPropertyKey(name)}: ${propertySignaturesTypes[i](input[name])}`); | ||
output.push(`${util_.formatPropertyKey(name)}: ${propertySignaturesTypes[i](input[name])}`); | ||
} | ||
@@ -180,3 +182,3 @@ // --------------------------------------------- | ||
const type = indexSignatureTypes[i]; | ||
const keys = Internal.getKeysForIndexSignature(input, ast.indexSignatures[i].parameter); | ||
const keys = util_.getKeysForIndexSignature(input, ast.indexSignatures[i].parameter); | ||
for (const key of keys) { | ||
@@ -186,10 +188,10 @@ if (Object.prototype.hasOwnProperty.call(expectedKeys, key)) { | ||
} | ||
output.push(`${getPrettyPropertyKey(key)}: ${type(input[key])}`); | ||
output.push(`${util_.formatPropertyKey(key)}: ${type(input[key])}`); | ||
} | ||
} | ||
} | ||
return ReadonlyArray.isNonEmptyReadonlyArray(output) ? "{ " + output.join(", ") + " }" : "{}"; | ||
return Arr.isNonEmptyReadonlyArray(output) ? "{ " + output.join(", ") + " }" : "{}"; | ||
}; | ||
}, | ||
"Union": (ast, go) => { | ||
"Union": (ast, go, path) => { | ||
const hook = getHook(ast); | ||
@@ -199,3 +201,5 @@ if (Option.isSome(hook)) { | ||
} | ||
const types = ast.types.map(ast => [Parser.is(InternalSchema.make(ast)), go(ast)]); | ||
const types = ast.types.map(ast => [ParseResult.is({ | ||
ast | ||
}), go(ast, path)]); | ||
return a => { | ||
@@ -206,6 +210,6 @@ const index = types.findIndex(([is]) => is(a)); | ||
}, | ||
"Suspend": (ast, go) => { | ||
"Suspend": (ast, go, path) => { | ||
return Option.match(getHook(ast), { | ||
onNone: () => { | ||
const get = Internal.memoizeThunk(() => go(ast.f())); | ||
const get = util_.memoizeThunk(() => go(ast.f(), path)); | ||
return a => get()(a); | ||
@@ -216,11 +220,11 @@ }, | ||
}, | ||
"Refinement": (ast, go) => { | ||
"Refinement": (ast, go, path) => { | ||
return Option.match(getHook(ast), { | ||
onNone: () => go(ast.from), | ||
onNone: () => go(ast.from, path), | ||
onSome: handler => handler() | ||
}); | ||
}, | ||
"Transform": (ast, go) => { | ||
"Transformation": (ast, go, path) => { | ||
return Option.match(getHook(ast), { | ||
onNone: () => go(ast.to), | ||
onNone: () => go(ast.to, path), | ||
onSome: handler => handler() | ||
@@ -231,3 +235,2 @@ }); | ||
const compile = /*#__PURE__*/AST.getCompiler(match); | ||
const getPrettyPropertyKey = name => typeof name === "string" ? JSON.stringify(name) : String(name); | ||
//# sourceMappingURL=Pretty.js.map |
@@ -9,3 +9,3 @@ "use strict"; | ||
var _GlobalValue = /*#__PURE__*/require("effect/GlobalValue"); | ||
var Internal = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/serializable.js")); | ||
var serializable_ = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/serializable.js")); | ||
var Schema = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./Schema.js")); | ||
@@ -31,3 +31,3 @@ function _getRequireWildcardCache(e) { | ||
a = Object.defineProperty && Object.getOwnPropertyDescriptor; | ||
for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { | ||
for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { | ||
var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; | ||
@@ -42,3 +42,3 @@ i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; | ||
*/ | ||
const symbol = exports.symbol = Internal.symbol; | ||
const symbol = exports.symbol = serializable_.symbol; | ||
/** | ||
@@ -54,3 +54,3 @@ * @since 1.0.0 | ||
exports.selfSchema = selfSchema; | ||
const symbolResult = exports.symbolResult = Internal.symbolResult; | ||
const symbolResult = exports.symbolResult = serializable_.symbolResult; | ||
/** | ||
@@ -76,7 +76,13 @@ * @since 1.0.0 | ||
if (!(symbolResult in proto)) { | ||
return Schema.exit(failureSchema(self), successSchema(self)); | ||
return Schema.Exit({ | ||
failure: failureSchema(self), | ||
success: successSchema(self) | ||
}); | ||
} | ||
let schema = exitSchemaCache.get(proto); | ||
if (schema === undefined) { | ||
schema = Schema.exit(failureSchema(self), successSchema(self)); | ||
schema = Schema.Exit({ | ||
failure: failureSchema(self), | ||
success: successSchema(self) | ||
}); | ||
exitSchemaCache.set(proto, schema); | ||
@@ -83,0 +89,0 @@ } |
@@ -6,5 +6,8 @@ "use strict"; | ||
}); | ||
exports.getTransformMessage = exports.getRefinementMessage = exports.getMessage = exports.formatTypeMessage = exports.formatIssues = exports.formatIssue = exports.formatForbiddenMessage = exports.formatError = void 0; | ||
exports.getMessage = exports.formatTypeMessage = exports.formatIssueSync = exports.formatIssue = exports.formatForbiddenMessage = exports.formatErrorSync = exports.formatError = void 0; | ||
var Effect = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Effect")); | ||
var Option = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Option")); | ||
var Predicate = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("effect/Predicate")); | ||
var AST = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./AST.js")); | ||
var util_ = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("./internal/util.js")); | ||
function _getRequireWildcardCache(e) { | ||
@@ -29,3 +32,3 @@ if ("function" != typeof WeakMap) return null; | ||
a = Object.defineProperty && Object.getOwnPropertyDescriptor; | ||
for (var u in e) if ("default" !== u && Object.prototype.hasOwnProperty.call(e, u)) { | ||
for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { | ||
var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; | ||
@@ -48,6 +51,3 @@ i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; | ||
*/ | ||
const formatIssues = issues => { | ||
const forest = issues.map(go); | ||
return drawTree(forest.length === 1 ? forest[0] : make(`error(s) found`, issues.map(go))); | ||
}; | ||
const formatIssue = issue => Effect.map(go(issue), tree => drawTree(tree)); | ||
/** | ||
@@ -57,4 +57,4 @@ * @category formatting | ||
*/ | ||
exports.formatIssues = formatIssues; | ||
const formatIssue = issue => formatIssues([issue]); | ||
exports.formatIssue = formatIssue; | ||
const formatIssueSync = issue => Effect.runSync(formatIssue(issue)); | ||
/** | ||
@@ -64,5 +64,11 @@ * @category formatting | ||
*/ | ||
exports.formatIssue = formatIssue; | ||
exports.formatIssueSync = formatIssueSync; | ||
const formatError = error => formatIssue(error.error); | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
exports.formatError = formatError; | ||
const formatErrorSync = error => formatIssueSync(error.error); | ||
exports.formatErrorSync = formatErrorSync; | ||
const drawTree = tree => tree.value + draw("\n", tree.forest); | ||
@@ -83,8 +89,8 @@ const draw = (indentation, forest) => { | ||
switch (kind) { | ||
case "From": | ||
return "From side transformation failure"; | ||
case "Encoded": | ||
return "Encoded side transformation failure"; | ||
case "Transformation": | ||
return "Transformation process failure"; | ||
case "To": | ||
return "To side transformation failure"; | ||
case "Type": | ||
return "Type side transformation failure"; | ||
} | ||
@@ -100,90 +106,89 @@ }; | ||
}; | ||
const getInnerMessage = issue => { | ||
switch (issue._tag) { | ||
case "Refinement": | ||
{ | ||
if (issue.kind === "From") { | ||
return getMessage(issue.error); | ||
} | ||
break; | ||
} | ||
case "Transformation": | ||
{ | ||
return getMessage(issue.error); | ||
} | ||
} | ||
return Option.none(); | ||
}; | ||
const getCurrentMessage = issue => AST.getMessageAnnotation(issue.ast).pipe(Effect.flatMap(annotation => { | ||
const out = annotation(issue); | ||
return Predicate.isString(out) ? Effect.succeed({ | ||
message: out, | ||
override: false | ||
}) : Effect.isEffect(out) ? Effect.map(out, message => ({ | ||
message, | ||
override: false | ||
})) : Predicate.isString(out.message) ? Effect.succeed({ | ||
message: out.message, | ||
override: out.override | ||
}) : Effect.map(out.message, message => ({ | ||
message, | ||
override: out.override | ||
})); | ||
})); | ||
/** @internal */ | ||
const getMessage = (ast, actual) => { | ||
return AST.getMessageAnnotation(ast).pipe(Option.map(annotation => annotation(actual))); | ||
const getMessage = issue => { | ||
const current = getCurrentMessage(issue); | ||
return getInnerMessage(issue).pipe(Effect.flatMap(inner => Effect.map(current, current => current.override ? current.message : inner)), Effect.catchAll(() => Effect.flatMap(current, current => { | ||
if (!current.override && (issue._tag === "Refinement" && issue.kind !== "Predicate" || issue._tag === "Transformation" && issue.kind !== "Transformation")) { | ||
return Option.none(); | ||
} | ||
return Effect.succeed(current.message); | ||
}))); | ||
}; | ||
/** @internal */ | ||
exports.getMessage = getMessage; | ||
const formatTypeMessage = e => getMessage(e.ast, e.actual).pipe(Option.orElse(() => e.message), Option.getOrElse(() => `Expected ${AST.format(e.ast, true)}, actual ${AST.formatUnknown(e.actual)}`)); | ||
const getParseIssueTitleAnnotation = issue => Option.filterMap(AST.getParseIssueTitleAnnotation(issue.ast), annotation => Option.fromNullable(annotation(issue))); | ||
/** @internal */ | ||
const formatTypeMessage = e => getMessage(e).pipe(Effect.orElse(() => getParseIssueTitleAnnotation(e)), Effect.orElse(() => e.message), Effect.catchAll(() => Effect.succeed(`Expected ${e.ast.toString(true)}, actual ${util_.formatUnknown(e.actual)}`))); | ||
exports.formatTypeMessage = formatTypeMessage; | ||
const getParseIssueTitle = issue => Option.getOrElse(getParseIssueTitleAnnotation(issue), () => String(issue.ast)); | ||
/** @internal */ | ||
const formatForbiddenMessage = e => Option.getOrElse(e.message, () => "is forbidden"); | ||
exports.formatForbiddenMessage = formatForbiddenMessage; | ||
const getParseIsssueMessage = (issue, orElse) => { | ||
switch (issue._tag) { | ||
case "Refinement": | ||
return Option.orElse(getRefinementMessage(issue, issue.actual), orElse); | ||
case "Transform": | ||
return Option.orElse(getTransformMessage(issue, issue.actual), orElse); | ||
case "Tuple": | ||
case "TypeLiteral": | ||
case "Union": | ||
case "Type": | ||
return Option.orElse(getMessage(issue.ast, issue.actual), orElse); | ||
} | ||
return orElse(); | ||
}; | ||
/** @internal */ | ||
const getRefinementMessage = (e, actual) => { | ||
if (e.kind === "From") { | ||
return getParseIsssueMessage(e.error, () => getMessage(e.ast, actual)); | ||
} | ||
return getMessage(e.ast, actual); | ||
}; | ||
/** @internal */ | ||
exports.getRefinementMessage = getRefinementMessage; | ||
const getTransformMessage = (e, actual) => { | ||
return getParseIsssueMessage(e.error, () => getMessage(e.ast, actual)); | ||
}; | ||
exports.getTransformMessage = getTransformMessage; | ||
const getTree = (issue, onFailure) => Effect.matchEffect(getMessage(issue), { | ||
onFailure, | ||
onSuccess: message => Effect.succeed(make(message)) | ||
}); | ||
const go = e => { | ||
switch (e._tag) { | ||
case "Type": | ||
return make(formatTypeMessage(e)); | ||
return Effect.map(formatTypeMessage(e), make); | ||
case "Forbidden": | ||
return make(AST.format(e.ast), [make(formatForbiddenMessage(e))]); | ||
return Effect.succeed(make(getParseIssueTitle(e), [make(formatForbiddenMessage(e))])); | ||
case "Unexpected": | ||
return make(`is unexpected, expected ${AST.format(e.ast, true)}`); | ||
return Effect.succeed(make(`is unexpected, expected ${e.ast.toString(true)}`)); | ||
case "Missing": | ||
return make("is missing"); | ||
return Effect.succeed(make("is missing")); | ||
case "Union": | ||
return Option.match(getMessage(e.ast, e.actual), { | ||
onNone: () => make(AST.format(e.ast), e.errors.map(e => { | ||
switch (e._tag) { | ||
case "Member": | ||
return make(`Union member`, [go(e.error)]); | ||
default: | ||
return go(e); | ||
} | ||
})), | ||
onSome: make | ||
}); | ||
case "Tuple": | ||
return Option.match(getMessage(e.ast, e.actual), { | ||
onNone: () => make(AST.format(e.ast), e.errors.map(index => make(`[${index.index}]`, [go(index.error)]))), | ||
onSome: make | ||
}); | ||
return getTree(e, () => Effect.map(Effect.forEach(e.errors, e => { | ||
switch (e._tag) { | ||
case "Member": | ||
return Effect.map(go(e.error), tree => make(`Union member`, [tree])); | ||
default: | ||
return go(e); | ||
} | ||
}), forest => make(getParseIssueTitle(e), forest))); | ||
case "TupleType": | ||
return getTree(e, () => Effect.map(Effect.forEach(e.errors, index => Effect.map(go(index.error), tree => make(`[${util_.formatPropertyKey(index.index)}]`, [tree]))), forest => make(getParseIssueTitle(e), forest))); | ||
case "TypeLiteral": | ||
return Option.match(getMessage(e.ast, e.actual), { | ||
onNone: () => make(AST.format(e.ast), e.errors.map(key => make(`[${AST.formatUnknown(key.key)}]`, [go(key.error)]))), | ||
onSome: make | ||
}); | ||
case "Transform": | ||
return Option.match(getTransformMessage(e, e.actual), { | ||
onNone: () => make(AST.format(e.ast), [make(formatTransformationKind(e.kind), [go(e.error)])]), | ||
onSome: make | ||
}); | ||
return getTree(e, () => Effect.map(Effect.forEach(e.errors, key => Effect.map(go(key.error), tree => make(`[${util_.formatPropertyKey(key.key)}]`, [tree]))), forest => make(getParseIssueTitle(e), forest))); | ||
case "Transformation": | ||
return getTree(e, () => Effect.map(go(e.error), tree => make(getParseIssueTitle(e), [make(formatTransformationKind(e.kind), [tree])]))); | ||
case "Refinement": | ||
return Option.match(getRefinementMessage(e, e.actual), { | ||
onNone: () => make(AST.format(e.ast), [make(formatRefinementKind(e.kind), [go(e.error)])]), | ||
onSome: make | ||
}); | ||
return getTree(e, () => Effect.map(go(e.error), tree => make(getParseIssueTitle(e), [make(formatRefinementKind(e.kind), [tree])]))); | ||
case "Declaration": | ||
return Option.match(getMessage(e.ast, e.actual), { | ||
onNone: () => { | ||
const error = e.error; | ||
const shouldSkipDefaultMessage = error._tag === "Type" && error.ast === e.ast; | ||
return shouldSkipDefaultMessage ? go(error) : make(AST.format(e.ast), [go(e.error)]); | ||
}, | ||
onSome: make | ||
return getTree(e, () => { | ||
const error = e.error; | ||
const shouldSkipDefaultMessage = error._tag === "Type" && error.ast === e.ast; | ||
return shouldSkipDefaultMessage ? go(error) : Effect.map(go(error), tree => make(getParseIssueTitle(e), [tree])); | ||
}); | ||
@@ -190,0 +195,0 @@ } |
/** | ||
* @since 1.0.0 | ||
*/ | ||
import * as ReadonlyArray from "effect/ReadonlyArray"; | ||
import type * as FastCheck from "fast-check"; | ||
import * as FastCheck from "./FastCheck.js"; | ||
import type * as Schema from "./Schema.js"; | ||
@@ -11,3 +10,3 @@ /** | ||
*/ | ||
export interface Arbitrary<A> { | ||
export interface LazyArbitrary<A> { | ||
(fc: typeof FastCheck): FastCheck.Arbitrary<A>; | ||
@@ -29,4 +28,11 @@ } | ||
*/ | ||
export declare const arbitrary: <A>(handler: (...args: ReadonlyArray<Arbitrary<any>>) => Arbitrary<A>) => <I, R>(self: Schema.Schema<A, I, R>) => Schema.Schema<A, I, R>; | ||
export declare const arbitrary: <A>(handler: (...args: ReadonlyArray<LazyArbitrary<any>>) => LazyArbitrary<A>) => <I, R>(self: Schema.Schema<A, I, R>) => Schema.Schema<A, I, R>; | ||
/** | ||
* Returns a LazyArbitrary for the `A` type of the provided schema. | ||
* | ||
* @category arbitrary | ||
* @since 1.0.0 | ||
*/ | ||
export declare const makeLazy: <A, I, R>(schema: Schema.Schema<A, I, R>) => LazyArbitrary<A>; | ||
/** | ||
* Returns a fast-check Arbitrary for the `A` type of the provided schema. | ||
@@ -37,3 +43,3 @@ * | ||
*/ | ||
export declare const make: <A, I, R>(schema: Schema.Schema<A, I, R>) => Arbitrary<A>; | ||
export declare const make: <A, I, R>(schema: Schema.Schema<A, I, R>) => FastCheck.Arbitrary<A>; | ||
//# sourceMappingURL=Arbitrary.d.ts.map |
/** | ||
* @since 1.0.0 | ||
*/ | ||
import * as Effect from "effect/Effect"; | ||
import type * as ParseResult from "./ParseResult.js"; | ||
@@ -10,3 +11,3 @@ /** | ||
export interface Issue { | ||
readonly _tag: ParseResult.ParseIssue["_tag"] | ParseResult.Missing["_tag"] | ParseResult.Unexpected["_tag"]; | ||
readonly _tag: "Transformation" | "Type" | "Declaration" | "Refinement" | "TupleType" | "TypeLiteral" | "Union" | "Forbidden" | "Missing" | "Unexpected"; | ||
readonly path: ReadonlyArray<PropertyKey>; | ||
@@ -19,3 +20,3 @@ readonly message: string; | ||
*/ | ||
export declare const formatIssues: (issues: readonly [ParseResult.ParseIssue, ...ParseResult.ParseIssue[]]) => Array<Issue>; | ||
export declare const formatIssue: (issue: ParseResult.ParseIssue) => Effect.Effect<Array<Issue>>; | ||
/** | ||
@@ -25,3 +26,3 @@ * @category formatting | ||
*/ | ||
export declare const formatIssue: (error: ParseResult.ParseIssue) => Array<Issue>; | ||
export declare const formatIssueSync: (issue: ParseResult.ParseIssue) => Array<Issue>; | ||
/** | ||
@@ -31,3 +32,8 @@ * @category formatting | ||
*/ | ||
export declare const formatError: (error: ParseResult.ParseError) => Array<Issue>; | ||
export declare const formatError: (error: ParseResult.ParseError) => Effect.Effect<Array<Issue>>; | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export declare const formatErrorSync: (error: ParseResult.ParseError) => Array<Issue>; | ||
//# sourceMappingURL=ArrayFormatter.d.ts.map |
/** | ||
* @since 1.0.0 | ||
*/ | ||
import * as Arr from "effect/Array"; | ||
import type { Effect } from "effect/Effect"; | ||
import * as Option from "effect/Option"; | ||
import * as ReadonlyArray from "effect/ReadonlyArray"; | ||
import type { Concurrency } from "effect/Types"; | ||
import type { ParseIssue } from "./ParseResult.js"; | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export type AST = Declaration | Literal | UniqueSymbol | UndefinedKeyword | VoidKeyword | NeverKeyword | UnknownKeyword | AnyKeyword | StringKeyword | NumberKeyword | BooleanKeyword | BigIntKeyword | SymbolKeyword | ObjectKeyword | Enums | TemplateLiteral | Refinement | TupleType | TypeLiteral | Union | Suspend | Transformation; | ||
/** | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export type BrandAnnotation = ReadonlyArray<string>; | ||
export type BrandAnnotation = Arr.NonEmptyReadonlyArray<string | symbol>; | ||
/** | ||
@@ -32,3 +38,6 @@ * @category annotations | ||
*/ | ||
export type MessageAnnotation<A> = (a: A) => string; | ||
export type MessageAnnotation = (issue: ParseIssue) => string | Effect<string> | { | ||
readonly message: string | Effect<string>; | ||
readonly override: boolean; | ||
}; | ||
/** | ||
@@ -73,3 +82,3 @@ * @category annotations | ||
*/ | ||
export type ExamplesAnnotation = ReadonlyArray<unknown>; | ||
export type ExamplesAnnotation<A> = Arr.NonEmptyReadonlyArray<A>; | ||
/** | ||
@@ -84,3 +93,3 @@ * @category annotations | ||
*/ | ||
export type DefaultAnnotation = unknown; | ||
export type DefaultAnnotation<A> = A; | ||
/** | ||
@@ -115,2 +124,32 @@ * @category annotations | ||
*/ | ||
export type ConcurrencyAnnotation = Concurrency | undefined; | ||
/** | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export declare const ConcurrencyAnnotationId: unique symbol; | ||
/** | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export type BatchingAnnotation = boolean | "inherit" | undefined; | ||
/** | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export declare const BatchingAnnotationId: unique symbol; | ||
/** | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export type ParseIssueTitleAnnotation = (issue: ParseIssue) => string | undefined; | ||
/** | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export declare const ParseIssueTitleAnnotationId: unique symbol; | ||
/** | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export interface Annotations { | ||
@@ -138,3 +177,3 @@ readonly [_: symbol]: unknown; | ||
*/ | ||
export declare const getMessageAnnotation: (annotated: Annotated) => Option.Option<MessageAnnotation<unknown>>; | ||
export declare const getBrandAnnotation: (annotated: Annotated) => Option.Option<readonly [string | symbol, ...(string | symbol)[]]>; | ||
/** | ||
@@ -144,2 +183,7 @@ * @category annotations | ||
*/ | ||
export declare const getMessageAnnotation: (annotated: Annotated) => Option.Option<MessageAnnotation>; | ||
/** | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export declare const getTitleAnnotation: (annotated: Annotated) => Option.Option<string>; | ||
@@ -160,3 +204,3 @@ /** | ||
*/ | ||
export declare const getExamplesAnnotation: (annotated: Annotated) => Option.Option<ExamplesAnnotation>; | ||
export declare const getExamplesAnnotation: (annotated: Annotated) => Option.Option<readonly [unknown, ...unknown[]]>; | ||
/** | ||
@@ -173,22 +217,45 @@ * @category annotations | ||
/** | ||
* @category model | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export type AST = Declaration | Literal | UniqueSymbol | UndefinedKeyword | VoidKeyword | NeverKeyword | UnknownKeyword | AnyKeyword | StringKeyword | NumberKeyword | BooleanKeyword | BigIntKeyword | SymbolKeyword | ObjectKeyword | Enums | TemplateLiteral | Refinement | Tuple | TypeLiteral | Union | Suspend | Transform; | ||
export declare const getDocumentationAnnotation: (annotated: Annotated) => Option.Option<string>; | ||
/** | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export declare const getConcurrencyAnnotation: (annotated: Annotated) => Option.Option<ConcurrencyAnnotation>; | ||
/** | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export declare const getBatchingAnnotation: (annotated: Annotated) => Option.Option<BatchingAnnotation>; | ||
/** | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export declare const getParseIssueTitleAnnotation: (annotated: Annotated) => Option.Option<ParseIssueTitleAnnotation>; | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export interface Declaration extends Annotated { | ||
readonly _tag: "Declaration"; | ||
export declare class Declaration implements Annotated { | ||
readonly typeParameters: ReadonlyArray<AST>; | ||
readonly decodeUnknown: (...typeParameters: ReadonlyArray<AST>) => (input: unknown, options: ParseOptions, self: Declaration) => Effect<any, ParseIssue, any>; | ||
readonly encodeUnknown: (...typeParameters: ReadonlyArray<AST>) => (input: unknown, options: ParseOptions, self: Declaration) => Effect<any, ParseIssue, any>; | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Declaration"; | ||
constructor(typeParameters: ReadonlyArray<AST>, decodeUnknown: (...typeParameters: ReadonlyArray<AST>) => (input: unknown, options: ParseOptions, self: Declaration) => Effect<any, ParseIssue, any>, encodeUnknown: (...typeParameters: ReadonlyArray<AST>) => (input: unknown, options: ParseOptions, self: Declaration) => Effect<any, ParseIssue, any>, annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const createDeclaration: (typeParameters: ReadonlyArray<AST>, decodeUnknown: Declaration["decodeUnknown"], encodeUnknown: Declaration["encodeUnknown"], annotations?: Annotations) => Declaration; | ||
/** | ||
* @category guards | ||
@@ -207,16 +274,31 @@ * @since 1.0.0 | ||
*/ | ||
export interface Literal extends Annotated { | ||
readonly _tag: "Literal"; | ||
export declare class Literal implements Annotated { | ||
readonly literal: LiteralValue; | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Literal"; | ||
constructor(literal: LiteralValue, annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
/** | ||
* @category constructors | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export declare const createLiteral: (literal: LiteralValue, annotations?: Annotations) => Literal; | ||
export declare const isLiteral: (ast: AST) => ast is Literal; | ||
declare const $null: Literal; | ||
export { | ||
/** | ||
* @category guards | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const isLiteral: (ast: AST) => ast is Literal; | ||
$null as null }; | ||
/** | ||
@@ -226,12 +308,20 @@ * @category model | ||
*/ | ||
export interface UniqueSymbol extends Annotated { | ||
readonly _tag: "UniqueSymbol"; | ||
export declare class UniqueSymbol implements Annotated { | ||
readonly symbol: symbol; | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "UniqueSymbol"; | ||
constructor(symbol: symbol, annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const createUniqueSymbol: (symbol: symbol, annotations?: Annotations) => UniqueSymbol; | ||
/** | ||
* @category guards | ||
@@ -245,4 +335,17 @@ * @since 1.0.0 | ||
*/ | ||
export interface UndefinedKeyword extends Annotated { | ||
readonly _tag: "UndefinedKeyword"; | ||
export declare class UndefinedKeyword implements Annotated { | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "UndefinedKeyword"; | ||
constructor(annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
@@ -263,4 +366,17 @@ /** | ||
*/ | ||
export interface VoidKeyword extends Annotated { | ||
readonly _tag: "VoidKeyword"; | ||
export declare class VoidKeyword implements Annotated { | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "VoidKeyword"; | ||
constructor(annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
@@ -281,4 +397,17 @@ /** | ||
*/ | ||
export interface NeverKeyword extends Annotated { | ||
readonly _tag: "NeverKeyword"; | ||
export declare class NeverKeyword implements Annotated { | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "NeverKeyword"; | ||
constructor(annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
@@ -299,4 +428,17 @@ /** | ||
*/ | ||
export interface UnknownKeyword extends Annotated { | ||
readonly _tag: "UnknownKeyword"; | ||
export declare class UnknownKeyword implements Annotated { | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "UnknownKeyword"; | ||
constructor(annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
@@ -317,4 +459,17 @@ /** | ||
*/ | ||
export interface AnyKeyword extends Annotated { | ||
readonly _tag: "AnyKeyword"; | ||
export declare class AnyKeyword implements Annotated { | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "AnyKeyword"; | ||
constructor(annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
@@ -335,4 +490,17 @@ /** | ||
*/ | ||
export interface StringKeyword extends Annotated { | ||
readonly _tag: "StringKeyword"; | ||
export declare class StringKeyword implements Annotated { | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "StringKeyword"; | ||
constructor(annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
@@ -353,4 +521,17 @@ /** | ||
*/ | ||
export interface NumberKeyword extends Annotated { | ||
readonly _tag: "NumberKeyword"; | ||
export declare class NumberKeyword implements Annotated { | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "NumberKeyword"; | ||
constructor(annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
@@ -371,4 +552,17 @@ /** | ||
*/ | ||
export interface BooleanKeyword extends Annotated { | ||
readonly _tag: "BooleanKeyword"; | ||
export declare class BooleanKeyword implements Annotated { | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "BooleanKeyword"; | ||
constructor(annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
@@ -389,4 +583,17 @@ /** | ||
*/ | ||
export interface BigIntKeyword extends Annotated { | ||
readonly _tag: "BigIntKeyword"; | ||
export declare class BigIntKeyword implements Annotated { | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "BigIntKeyword"; | ||
constructor(annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
@@ -407,4 +614,17 @@ /** | ||
*/ | ||
export interface SymbolKeyword extends Annotated { | ||
readonly _tag: "SymbolKeyword"; | ||
export declare class SymbolKeyword implements Annotated { | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "SymbolKeyword"; | ||
constructor(annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
@@ -425,4 +645,17 @@ /** | ||
*/ | ||
export interface ObjectKeyword extends Annotated { | ||
readonly _tag: "ObjectKeyword"; | ||
export declare class ObjectKeyword implements Annotated { | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "ObjectKeyword"; | ||
constructor(annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
@@ -443,12 +676,20 @@ /** | ||
*/ | ||
export interface Enums extends Annotated { | ||
readonly _tag: "Enums"; | ||
export declare class Enums implements Annotated { | ||
readonly enums: ReadonlyArray<readonly [string, string | number]>; | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Enums"; | ||
constructor(enums: ReadonlyArray<readonly [string, string | number]>, annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const createEnums: (enums: ReadonlyArray<readonly [string, string | number]>, annotations?: Annotations) => Enums; | ||
/** | ||
* @category guards | ||
@@ -459,7 +700,17 @@ * @since 1.0.0 | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export interface TemplateLiteralSpan { | ||
export declare class TemplateLiteralSpan { | ||
readonly type: StringKeyword | NumberKeyword; | ||
readonly literal: string; | ||
constructor(type: StringKeyword | NumberKeyword, literal: string); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(): "${string}" | "${number}"; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
@@ -470,13 +721,22 @@ /** | ||
*/ | ||
export interface TemplateLiteral extends Annotated { | ||
readonly _tag: "TemplateLiteral"; | ||
export declare class TemplateLiteral implements Annotated { | ||
readonly head: string; | ||
readonly spans: ReadonlyArray.NonEmptyReadonlyArray<TemplateLiteralSpan>; | ||
readonly spans: Arr.NonEmptyReadonlyArray<TemplateLiteralSpan>; | ||
readonly annotations: Annotations; | ||
static make: (head: string, spans: ReadonlyArray<TemplateLiteralSpan>, annotations?: Annotations) => TemplateLiteral | Literal; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "TemplateLiteral"; | ||
private constructor(); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const createTemplateLiteral: (head: string, spans: ReadonlyArray<TemplateLiteralSpan>, annotations?: Annotations) => TemplateLiteral | Literal; | ||
/** | ||
* @category guards | ||
@@ -487,36 +747,51 @@ * @since 1.0.0 | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export interface Element { | ||
export declare class Element { | ||
readonly type: AST; | ||
readonly isOptional: boolean; | ||
constructor(type: AST, isOptional: boolean); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(): string; | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
export declare const createElement: (type: AST, isOptional: boolean) => Element; | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export interface Tuple extends Annotated { | ||
readonly _tag: "Tuple"; | ||
export declare class TupleType implements Annotated { | ||
readonly elements: ReadonlyArray<Element>; | ||
readonly rest: Option.Option<ReadonlyArray.NonEmptyReadonlyArray<AST>>; | ||
readonly rest: ReadonlyArray<AST>; | ||
readonly isReadonly: boolean; | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "TupleType"; | ||
constructor(elements: ReadonlyArray<Element>, rest: ReadonlyArray<AST>, isReadonly: boolean, annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const createTuple: (elements: ReadonlyArray<Element>, rest: Option.Option<ReadonlyArray.NonEmptyReadonlyArray<AST>>, isReadonly: boolean, annotations?: Annotations) => Tuple; | ||
/** | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export declare const isTuple: (ast: AST) => ast is Tuple; | ||
export declare const isTupleType: (ast: AST) => ast is TupleType; | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export interface PropertySignature extends Annotated { | ||
export declare class PropertySignature implements Annotated { | ||
readonly name: PropertyKey; | ||
@@ -526,2 +801,8 @@ readonly type: AST; | ||
readonly isReadonly: boolean; | ||
readonly annotations: Annotations; | ||
constructor(name: PropertyKey, type: AST, isOptional: boolean, isReadonly: boolean, annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
@@ -531,6 +812,2 @@ /** | ||
*/ | ||
export declare const createPropertySignature: (name: PropertyKey, type: AST, isOptional: boolean, isReadonly: boolean, annotations?: Annotations) => PropertySignature; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
export type Parameter = StringKeyword | SymbolKeyword | TemplateLiteral | Refinement<Parameter>; | ||
@@ -542,28 +819,47 @@ /** | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export interface IndexSignature { | ||
readonly parameter: Parameter; | ||
export declare class IndexSignature { | ||
readonly type: AST; | ||
readonly isReadonly: boolean; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly parameter: Parameter; | ||
constructor(parameter: AST, type: AST, isReadonly: boolean); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
export declare const createIndexSignature: (parameter: AST, type: AST, isReadonly: boolean) => IndexSignature; | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export interface TypeLiteral extends Annotated { | ||
readonly _tag: "TypeLiteral"; | ||
export declare class TypeLiteral implements Annotated { | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "TypeLiteral"; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly propertySignatures: ReadonlyArray<PropertySignature>; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly indexSignatures: ReadonlyArray<IndexSignature>; | ||
constructor(propertySignatures: ReadonlyArray<PropertySignature>, indexSignatures: ReadonlyArray<IndexSignature>, annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const createTypeLiteral: (propertySignatures: ReadonlyArray<PropertySignature>, indexSignatures: ReadonlyArray<IndexSignature>, annotations?: Annotations) => TypeLiteral; | ||
/** | ||
* @category guards | ||
@@ -581,12 +877,21 @@ * @since 1.0.0 | ||
*/ | ||
export interface Union extends Annotated { | ||
readonly _tag: "Union"; | ||
export declare class Union implements Annotated { | ||
readonly types: Members<AST>; | ||
readonly annotations: Annotations; | ||
static make: (candidates: ReadonlyArray<AST>, annotations?: Annotations) => AST; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Union"; | ||
private constructor(); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const createUnion: (candidates: ReadonlyArray<AST>, annotations?: Annotations) => AST; | ||
/** | ||
* @category guards | ||
@@ -600,12 +905,20 @@ * @since 1.0.0 | ||
*/ | ||
export interface Suspend extends Annotated { | ||
readonly _tag: "Suspend"; | ||
export declare class Suspend implements Annotated { | ||
readonly f: () => AST; | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Suspend"; | ||
constructor(f: () => AST, annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const createSuspend: (f: () => AST, annotations?: Annotations) => Suspend; | ||
/** | ||
* @category guards | ||
@@ -619,13 +932,21 @@ * @since 1.0.0 | ||
*/ | ||
export interface Refinement<From = AST> extends Annotated { | ||
readonly _tag: "Refinement"; | ||
export declare class Refinement<From extends AST = AST> implements Annotated { | ||
readonly from: From; | ||
readonly filter: (input: any, options: ParseOptions, self: Refinement) => Option.Option<ParseIssue>; | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Refinement"; | ||
constructor(from: From, filter: (input: any, options: ParseOptions, self: Refinement) => Option.Option<ParseIssue>, annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const createRefinement: <From extends AST>(from: From, filter: Refinement["filter"], annotations?: Annotations) => Refinement<From>; | ||
/** | ||
* @category guards | ||
@@ -646,21 +967,33 @@ * @since 1.0.0 | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
export declare const defaultParseOption: ParseOptions; | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export interface Transform extends Annotated { | ||
readonly _tag: "Transform"; | ||
export declare class Transformation implements Annotated { | ||
readonly from: AST; | ||
readonly to: AST; | ||
readonly transformation: Transformation; | ||
readonly transformation: TransformationKind; | ||
readonly annotations: Annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Transformation"; | ||
constructor(from: AST, to: AST, transformation: TransformationKind, annotations?: Annotations); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose?: boolean): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): object; | ||
} | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export declare const createTransform: (from: AST, to: AST, transformation: Transformation, annotations?: Annotations) => Transform; | ||
/** | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export declare const isTransform: (ast: AST) => ast is Transform; | ||
export declare const isTransformation: (ast: AST) => ast is Transformation; | ||
/** | ||
@@ -670,3 +1003,3 @@ * @category model | ||
*/ | ||
export type Transformation = FinalTransformation | ComposeTransformation | TypeLiteralTransformation; | ||
export type TransformationKind = FinalTransformation | ComposeTransformation | TypeLiteralTransformation; | ||
/** | ||
@@ -676,23 +1009,25 @@ * @category model | ||
*/ | ||
export interface FinalTransformation { | ||
readonly _tag: "FinalTransformation"; | ||
readonly decode: (input: any, options: ParseOptions, self: Transform) => Effect<any, ParseIssue, any>; | ||
readonly encode: (input: any, options: ParseOptions, self: Transform) => Effect<any, ParseIssue, any>; | ||
export declare class FinalTransformation { | ||
readonly decode: (input: any, options: ParseOptions, self: Transformation) => Effect<any, ParseIssue, any>; | ||
readonly encode: (input: any, options: ParseOptions, self: Transformation) => Effect<any, ParseIssue, any>; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "FinalTransformation"; | ||
constructor(decode: (input: any, options: ParseOptions, self: Transformation) => Effect<any, ParseIssue, any>, encode: (input: any, options: ParseOptions, self: Transformation) => Effect<any, ParseIssue, any>); | ||
} | ||
/** | ||
* @category constructors | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export declare const createFinalTransformation: (decode: FinalTransformation["decode"], encode: FinalTransformation["encode"]) => FinalTransformation; | ||
export declare const isFinalTransformation: (ast: TransformationKind) => ast is FinalTransformation; | ||
/** | ||
* @category guard | ||
* @since 1.0.0 | ||
*/ | ||
export declare const isFinalTransformation: (ast: Transformation) => ast is FinalTransformation; | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export interface ComposeTransformation { | ||
readonly _tag: "ComposeTransformation"; | ||
export declare class ComposeTransformation { | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "ComposeTransformation"; | ||
} | ||
@@ -705,6 +1040,6 @@ /** | ||
/** | ||
* @category guard | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export declare const isComposeTransformation: (ast: Transformation) => ast is ComposeTransformation; | ||
export declare const isComposeTransformation: (ast: TransformationKind) => ast is ComposeTransformation; | ||
/** | ||
@@ -724,55 +1059,27 @@ * Represents a `PropertySignature -> PropertySignature` transformation | ||
*/ | ||
export interface FinalPropertySignatureTransformation { | ||
readonly _tag: "FinalPropertySignatureTransformation"; | ||
export declare class PropertySignatureTransformation { | ||
readonly from: PropertyKey; | ||
readonly to: PropertyKey; | ||
readonly decode: (o: Option.Option<any>) => Option.Option<any>; | ||
readonly encode: (o: Option.Option<any>) => Option.Option<any>; | ||
constructor(from: PropertyKey, to: PropertyKey, decode: (o: Option.Option<any>) => Option.Option<any>, encode: (o: Option.Option<any>) => Option.Option<any>); | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const createFinalPropertySignatureTransformation: (decode: FinalPropertySignatureTransformation["decode"], encode: FinalPropertySignatureTransformation["encode"]) => FinalPropertySignatureTransformation; | ||
/** | ||
* @category guard | ||
* @since 1.0.0 | ||
*/ | ||
export declare const isFinalPropertySignatureTransformation: (ast: PropertySignatureTransformation) => ast is FinalPropertySignatureTransformation; | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export type PropertySignatureTransformation = FinalPropertySignatureTransformation; | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export interface PropertySignatureTransform { | ||
readonly from: PropertyKey; | ||
readonly to: PropertyKey; | ||
readonly propertySignatureTransformation: PropertySignatureTransformation; | ||
export declare class TypeLiteralTransformation { | ||
readonly propertySignatureTransformations: ReadonlyArray<PropertySignatureTransformation>; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "TypeLiteralTransformation"; | ||
constructor(propertySignatureTransformations: ReadonlyArray<PropertySignatureTransformation>); | ||
} | ||
/** | ||
* @category constructors | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export declare const createPropertySignatureTransform: (from: PropertyKey, to: PropertyKey, propertySignatureTransformation: PropertySignatureTransformation) => PropertySignatureTransform; | ||
export declare const isTypeLiteralTransformation: (ast: TransformationKind) => ast is TypeLiteralTransformation; | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export interface TypeLiteralTransformation { | ||
readonly _tag: "TypeLiteralTransformation"; | ||
readonly propertySignatureTransformations: ReadonlyArray<PropertySignatureTransform>; | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const createTypeLiteralTransformation: (propertySignatureTransformations: TypeLiteralTransformation["propertySignatureTransformations"]) => TypeLiteralTransformation; | ||
/** | ||
* @category guard | ||
* @since 1.0.0 | ||
*/ | ||
export declare const isTypeLiteralTransformation: (ast: Transformation) => ast is TypeLiteralTransformation; | ||
/** | ||
* Adds a group of annotations, potentially overwriting existing annotations. | ||
@@ -782,24 +1089,4 @@ * | ||
*/ | ||
export declare const mergeAnnotations: (ast: AST, annotations: Annotations) => AST; | ||
export declare const annotations: (ast: AST, annotations: Annotations) => AST; | ||
/** | ||
* Adds an annotation, potentially overwriting the existing annotation with the specified id. | ||
* | ||
* @since 1.0.0 | ||
*/ | ||
export declare const setAnnotation: (ast: AST, sym: symbol, value: unknown) => AST; | ||
/** | ||
* Adds a rest element to the end of a tuple, or throws an exception if the rest element is already present. | ||
* | ||
* @since 1.0.0 | ||
*/ | ||
export declare const appendRestElement: (ast: Tuple, restElement: AST) => Tuple; | ||
/** | ||
* Appends an element to a tuple or throws an exception in the following cases: | ||
* - A required element cannot follow an optional element. ts(1257) | ||
* - An optional element cannot follow a rest element. ts(1266) | ||
* | ||
* @since 1.0.0 | ||
*/ | ||
export declare const appendElement: (ast: Tuple, newElement: Element) => Tuple; | ||
/** | ||
* Equivalent at runtime to the TypeScript type-level `keyof` operator. | ||
@@ -813,9 +1100,7 @@ * | ||
*/ | ||
export declare const getPropertySignatures: (ast: AST) => Array<PropertySignature>; | ||
export declare const getTemplateLiteralRegExp: (ast: TemplateLiteral) => RegExp; | ||
/** | ||
* Create a record with the specified key type and value type. | ||
* | ||
* @since 1.0.0 | ||
*/ | ||
export declare const createRecord: (key: AST, value: AST, isReadonly: boolean) => TypeLiteral; | ||
export declare const getPropertySignatures: (ast: AST) => Array<PropertySignature>; | ||
/** | ||
@@ -826,3 +1111,3 @@ * Equivalent at runtime to the built-in TypeScript utility type `Pick`. | ||
*/ | ||
export declare const pick: (ast: AST, keys: ReadonlyArray<PropertyKey>) => TypeLiteral; | ||
export declare const pick: (ast: AST, keys: ReadonlyArray<PropertyKey>) => TypeLiteral | Transformation; | ||
/** | ||
@@ -833,3 +1118,3 @@ * Equivalent at runtime to the built-in TypeScript utility type `Omit`. | ||
*/ | ||
export declare const omit: (ast: AST, keys: ReadonlyArray<PropertyKey>) => TypeLiteral; | ||
export declare const omit: (ast: AST, keys: ReadonlyArray<PropertyKey>) => TypeLiteral | Transformation; | ||
/** | ||
@@ -840,3 +1125,5 @@ * Equivalent at runtime to the built-in TypeScript utility type `Partial`. | ||
*/ | ||
export declare const partial: (ast: AST) => AST; | ||
export declare const partial: (ast: AST, options?: { | ||
readonly exact: true; | ||
}) => AST; | ||
/** | ||
@@ -859,3 +1146,3 @@ * Equivalent at runtime to the built-in TypeScript utility type `Required`. | ||
*/ | ||
export type Compiler<A> = (ast: AST) => A; | ||
export type Compiler<A> = (ast: AST, path: ReadonlyArray<PropertyKey>) => A; | ||
/** | ||
@@ -867,3 +1154,3 @@ * @since 1.0.0 | ||
_tag: K; | ||
}>, compile: Compiler<A>) => A; | ||
}>, compile: Compiler<A>, path: ReadonlyArray<PropertyKey>) => A; | ||
}; | ||
@@ -877,16 +1164,7 @@ /** | ||
*/ | ||
export declare const to: (ast: AST) => AST; | ||
export declare const typeAST: (ast: AST) => AST; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
export declare const from: (ast: AST) => AST; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
export declare const hash: (ast: AST) => number; | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export declare const format: (ast: AST, verbose?: boolean) => string; | ||
export declare const encodedAST: (ast: AST) => AST; | ||
//# sourceMappingURL=AST.d.ts.map |
@@ -5,3 +5,2 @@ /** | ||
import * as Equivalence from "effect/Equivalence"; | ||
import * as ReadonlyArray from "effect/ReadonlyArray"; | ||
import type * as Schema from "./Schema.js"; | ||
@@ -8,0 +7,0 @@ /** |
@@ -20,3 +20,3 @@ /** | ||
*/ | ||
export * as Format from "./Format.js"; | ||
export * as FastCheck from "./FastCheck.js"; | ||
/** | ||
@@ -33,6 +33,2 @@ * @since 1.0.0 | ||
*/ | ||
export * as Parser from "./Parser.js"; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
export * as Pretty from "./Pretty.js"; | ||
@@ -39,0 +35,0 @@ /** |
@@ -10,3 +10,13 @@ /** | ||
*/ | ||
export interface JsonSchema7Any { | ||
export interface JsonSchemaAnnotations { | ||
title?: string; | ||
description?: string; | ||
default?: unknown; | ||
examples?: Array<unknown>; | ||
} | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export interface JsonSchema7Any extends JsonSchemaAnnotations { | ||
$id: "/schemas/any"; | ||
@@ -18,3 +28,3 @@ } | ||
*/ | ||
export interface JsonSchema7Unknown { | ||
export interface JsonSchema7Unknown extends JsonSchemaAnnotations { | ||
$id: "/schemas/unknown"; | ||
@@ -26,3 +36,3 @@ } | ||
*/ | ||
export interface JsonSchema7object { | ||
export interface JsonSchema7object extends JsonSchemaAnnotations { | ||
$id: "/schemas/object"; | ||
@@ -42,3 +52,3 @@ oneOf: [ | ||
*/ | ||
export interface JsonSchema7empty { | ||
export interface JsonSchema7empty extends JsonSchemaAnnotations { | ||
$id: "/schemas/{}"; | ||
@@ -58,3 +68,3 @@ oneOf: [ | ||
*/ | ||
export interface JsonSchema7Ref { | ||
export interface JsonSchema7Ref extends JsonSchemaAnnotations { | ||
$ref: string; | ||
@@ -66,3 +76,3 @@ } | ||
*/ | ||
export interface JsonSchema7Const { | ||
export interface JsonSchema7Const extends JsonSchemaAnnotations { | ||
const: AST.LiteralValue; | ||
@@ -74,3 +84,3 @@ } | ||
*/ | ||
export interface JsonSchema7String { | ||
export interface JsonSchema7String extends JsonSchemaAnnotations { | ||
type: "string"; | ||
@@ -80,3 +90,2 @@ minLength?: number; | ||
pattern?: string; | ||
description?: string; | ||
} | ||
@@ -87,3 +96,3 @@ /** | ||
*/ | ||
export interface JsonSchema7Numeric { | ||
export interface JsonSchema7Numeric extends JsonSchemaAnnotations { | ||
minimum?: number; | ||
@@ -112,3 +121,3 @@ exclusiveMinimum?: number; | ||
*/ | ||
export interface JsonSchema7Boolean { | ||
export interface JsonSchema7Boolean extends JsonSchemaAnnotations { | ||
type: "boolean"; | ||
@@ -120,3 +129,3 @@ } | ||
*/ | ||
export interface JsonSchema7Array { | ||
export interface JsonSchema7Array extends JsonSchemaAnnotations { | ||
type: "array"; | ||
@@ -132,3 +141,3 @@ items?: JsonSchema7 | Array<JsonSchema7>; | ||
*/ | ||
export interface JsonSchema7OneOf { | ||
export interface JsonSchema7OneOf extends JsonSchemaAnnotations { | ||
oneOf: Array<JsonSchema7>; | ||
@@ -140,3 +149,3 @@ } | ||
*/ | ||
export interface JsonSchema7Enum { | ||
export interface JsonSchema7Enum extends JsonSchemaAnnotations { | ||
enum: Array<AST.LiteralValue>; | ||
@@ -148,3 +157,3 @@ } | ||
*/ | ||
export interface JsonSchema7Enums { | ||
export interface JsonSchema7Enums extends JsonSchemaAnnotations { | ||
$comment: "/schemas/enums"; | ||
@@ -160,3 +169,3 @@ oneOf: Array<{ | ||
*/ | ||
export interface JsonSchema7AnyOf { | ||
export interface JsonSchema7AnyOf extends JsonSchemaAnnotations { | ||
anyOf: Array<JsonSchema7>; | ||
@@ -168,3 +177,3 @@ } | ||
*/ | ||
export interface JsonSchema7Object { | ||
export interface JsonSchema7Object extends JsonSchemaAnnotations { | ||
type: "object"; | ||
@@ -171,0 +180,0 @@ required: Array<string>; |
/** | ||
* @since 1.0.0 | ||
*/ | ||
import * as Arr from "effect/Array"; | ||
import * as Effect from "effect/Effect"; | ||
@@ -8,60 +9,6 @@ import * as Either from "effect/Either"; | ||
import * as Inspectable from "effect/Inspectable"; | ||
import type * as Option from "effect/Option"; | ||
import type * as ReadonlyArray from "effect/ReadonlyArray"; | ||
import type * as AST from "./AST.js"; | ||
declare const ParseError_base: new <A extends Record<string, any> = {}>(args: import("effect/Types").Equals<A, {}> extends true ? void : { readonly [P in keyof A as P extends "_tag" ? never : P]: A[P]; }) => import("effect/Cause").YieldableError & { | ||
readonly _tag: "ParseError"; | ||
} & Readonly<A>; | ||
import * as Option from "effect/Option"; | ||
import * as AST from "./AST.js"; | ||
import type * as Schema from "./Schema.js"; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
export declare class ParseError extends ParseError_base<{ | ||
readonly error: ParseIssue; | ||
}> { | ||
get message(): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): { | ||
_id: string; | ||
message: string; | ||
}; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
[Inspectable.NodeInspectSymbol](): { | ||
_id: string; | ||
message: string; | ||
}; | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const parseError: (issue: ParseIssue) => ParseError; | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const succeed: <A>(a: A) => Either.Either<ParseIssue, A>; | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const fail: (issue: ParseIssue) => Either.Either<ParseIssue, never>; | ||
declare const _try: <A>(options: { | ||
try: LazyArg<A>; | ||
catch: (e: unknown) => ParseIssue; | ||
}) => Either.Either<ParseIssue, A>; | ||
export { | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
_try as try }; | ||
/** | ||
* `ParseIssue` is a type that represents the different types of errors that can occur when decoding/encoding a value. | ||
@@ -72,5 +19,5 @@ * | ||
*/ | ||
export type ParseIssue = Declaration | Refinement | Tuple | TypeLiteral | Union | Transform | Type | Forbidden; | ||
export type ParseIssue = Declaration | Refinement | TupleType | TypeLiteral | Union | Transformation | Type | Forbidden; | ||
/** | ||
* Error that occurs when a transformation has an error. | ||
* Error that occurs when a declaration has an error. | ||
* | ||
@@ -80,17 +27,14 @@ * @category model | ||
*/ | ||
export interface Transform { | ||
readonly _tag: "Transform"; | ||
readonly ast: AST.Transform; | ||
export declare class Declaration { | ||
readonly ast: AST.Declaration; | ||
readonly actual: unknown; | ||
readonly kind: "From" | "Transformation" | "To"; | ||
readonly error: ParseIssue; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Declaration"; | ||
constructor(ast: AST.Declaration, actual: unknown, error: ParseIssue); | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const transform: (ast: AST.Transform, actual: unknown, kind: "From" | "Transformation" | "To", error: ParseIssue) => Transform; | ||
/** | ||
* The `Type` variant of the `ParseIssue` type represents an error that occurs when the `actual` value is not of the expected type. | ||
* The `ast` field specifies the expected type, and the `actual` field contains the value that caused the error. | ||
* Error that occurs when a refinement has an error. | ||
* | ||
@@ -100,15 +44,15 @@ * @category model | ||
*/ | ||
export interface Type { | ||
readonly _tag: "Type"; | ||
readonly ast: AST.AST; | ||
export declare class Refinement { | ||
readonly ast: AST.Refinement<AST.AST>; | ||
readonly actual: unknown; | ||
readonly message: Option.Option<string>; | ||
readonly kind: "From" | "Predicate"; | ||
readonly error: ParseIssue; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Refinement"; | ||
constructor(ast: AST.Refinement<AST.AST>, actual: unknown, kind: "From" | "Predicate", error: ParseIssue); | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const type: (ast: AST.AST, actual: unknown, message?: string | undefined) => Type; | ||
/** | ||
* The `Forbidden` variant of the `ParseIssue` type represents an error that occurs when an Effect is encounter but disallowed from execution. | ||
* Error that occurs when an array or tuple has an error. | ||
* | ||
@@ -118,15 +62,30 @@ * @category model | ||
*/ | ||
export interface Forbidden { | ||
readonly _tag: "Forbidden"; | ||
readonly ast: AST.AST; | ||
export declare class TupleType { | ||
readonly ast: AST.TupleType; | ||
readonly actual: unknown; | ||
readonly message: Option.Option<string>; | ||
readonly errors: Arr.NonEmptyReadonlyArray<Index>; | ||
readonly output: ReadonlyArray<unknown>; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "TupleType"; | ||
constructor(ast: AST.TupleType, actual: unknown, errors: Arr.NonEmptyReadonlyArray<Index>, output?: ReadonlyArray<unknown>); | ||
} | ||
/** | ||
* @category constructors | ||
* The `Index` error indicates that there was an error at a specific index in an array or tuple. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export declare const forbidden: (ast: AST.AST, actual: unknown, message?: string | undefined) => Forbidden; | ||
export declare class Index { | ||
readonly index: number; | ||
readonly error: ParseIssue | Missing | Unexpected; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Index"; | ||
constructor(index: number, error: ParseIssue | Missing | Unexpected); | ||
} | ||
/** | ||
* Error that occurs when a declaration has an error. | ||
* Error that occurs when a type literal or record has an error. | ||
* | ||
@@ -136,15 +95,19 @@ * @category model | ||
*/ | ||
export interface Declaration { | ||
readonly _tag: "Declaration"; | ||
readonly ast: AST.Declaration; | ||
export declare class TypeLiteral { | ||
readonly ast: AST.TypeLiteral; | ||
readonly actual: unknown; | ||
readonly error: ParseIssue; | ||
readonly errors: Arr.NonEmptyReadonlyArray<Key>; | ||
readonly output: { | ||
readonly [x: string]: unknown; | ||
}; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "TypeLiteral"; | ||
constructor(ast: AST.TypeLiteral, actual: unknown, errors: Arr.NonEmptyReadonlyArray<Key>, output?: { | ||
readonly [x: string]: unknown; | ||
}); | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const declaration: (ast: AST.Declaration, actual: unknown, error: ParseIssue) => Declaration; | ||
/** | ||
* Error that occurs when a refinement has an error. | ||
* The `Key` variant of the `ParseIssue` type represents an error that occurs when a key in a type literal or record is invalid. | ||
* | ||
@@ -154,16 +117,13 @@ * @category model | ||
*/ | ||
export interface Refinement { | ||
readonly _tag: "Refinement"; | ||
readonly ast: AST.Refinement; | ||
readonly actual: unknown; | ||
readonly kind: "From" | "Predicate"; | ||
readonly error: ParseIssue; | ||
export declare class Key { | ||
readonly key: PropertyKey; | ||
readonly error: ParseIssue | Missing | Unexpected; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Key"; | ||
constructor(key: PropertyKey, error: ParseIssue | Missing | Unexpected); | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const refinement: (ast: AST.Refinement<AST.AST>, actual: unknown, kind: "From" | "Predicate", error: ParseIssue) => Refinement; | ||
/** | ||
* Error that occurs when an array or tuple has an error. | ||
* Error that occurs when an unexpected key or index is present. | ||
* | ||
@@ -173,15 +133,12 @@ * @category model | ||
*/ | ||
export interface Tuple { | ||
readonly _tag: "Tuple"; | ||
readonly ast: AST.Tuple; | ||
readonly actual: unknown; | ||
readonly errors: ReadonlyArray.NonEmptyReadonlyArray<Index>; | ||
export declare class Unexpected { | ||
readonly ast: AST.AST; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Unexpected"; | ||
constructor(ast: AST.AST); | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const tuple: (ast: AST.Tuple, actual: unknown, errors: readonly [Index, ...Index[]]) => Tuple; | ||
/** | ||
* Error that occurs when a type literal or record has an error. | ||
* Error that occurs when a transformation has an error. | ||
* | ||
@@ -191,15 +148,16 @@ * @category model | ||
*/ | ||
export interface TypeLiteral { | ||
readonly _tag: "TypeLiteral"; | ||
readonly ast: AST.TypeLiteral; | ||
export declare class Transformation { | ||
readonly ast: AST.Transformation; | ||
readonly actual: unknown; | ||
readonly errors: ReadonlyArray.NonEmptyReadonlyArray<Key>; | ||
readonly kind: "Encoded" | "Transformation" | "Type"; | ||
readonly error: ParseIssue; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Transformation"; | ||
constructor(ast: AST.Transformation, actual: unknown, kind: "Encoded" | "Transformation" | "Type", error: ParseIssue); | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const typeLiteral: (ast: AST.TypeLiteral, actual: unknown, errors: readonly [Key, ...Key[]]) => TypeLiteral; | ||
/** | ||
* The `Index` error indicates that there was an error at a specific index in an array or tuple. | ||
* The `Type` variant of the `ParseIssue` type represents an error that occurs when the `actual` value is not of the expected type. | ||
* The `ast` field specifies the expected type, and the `actual` field contains the value that caused the error. | ||
* | ||
@@ -209,14 +167,17 @@ * @category model | ||
*/ | ||
export interface Index { | ||
readonly _tag: "Index"; | ||
readonly index: number; | ||
readonly error: ParseIssue | Missing | Unexpected; | ||
export declare class Type { | ||
readonly ast: AST.AST; | ||
readonly actual: unknown; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Type"; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly message: Option.Option<string>; | ||
constructor(ast: AST.AST, actual: unknown, message?: string); | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const index: (index: number, error: ParseIssue | Missing | Unexpected) => Index; | ||
/** | ||
* The `Key` variant of the `ParseIssue` type represents an error that occurs when a key in a type literal or record is invalid. | ||
* The `Forbidden` variant of the `ParseIssue` type represents a forbidden operation, such as when encountering an Effect that is not allowed to execute (e.g., using `runSync`). | ||
* | ||
@@ -226,13 +187,16 @@ * @category model | ||
*/ | ||
export interface Key { | ||
readonly _tag: "Key"; | ||
readonly key: PropertyKey; | ||
readonly error: ParseIssue | Missing | Unexpected; | ||
export declare class Forbidden { | ||
readonly ast: AST.AST; | ||
readonly actual: unknown; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Forbidden"; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly message: Option.Option<string>; | ||
constructor(ast: AST.AST, actual: unknown, message?: string); | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const key: (key: PropertyKey, error: ParseIssue | Missing | Unexpected) => Key; | ||
/** | ||
* Error that occurs when a required key or index is missing. | ||
@@ -243,4 +207,7 @@ * | ||
*/ | ||
export interface Missing { | ||
readonly _tag: "Missing"; | ||
export declare class Missing { | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Missing"; | ||
} | ||
@@ -253,3 +220,3 @@ /** | ||
/** | ||
* Error that occurs when an unexpected key or index is present. | ||
* Error that occurs when a member in a union has an error. | ||
* | ||
@@ -259,12 +226,12 @@ * @category model | ||
*/ | ||
export interface Unexpected { | ||
readonly _tag: "Unexpected"; | ||
export declare class Member { | ||
readonly ast: AST.AST; | ||
readonly error: ParseIssue; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Member"; | ||
constructor(ast: AST.AST, error: ParseIssue); | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const unexpected: (ast: AST.AST) => Unexpected; | ||
/** | ||
* Error that occurs when a union has an error. | ||
@@ -275,24 +242,51 @@ * | ||
*/ | ||
export interface Union { | ||
readonly _tag: "Union"; | ||
export declare class Union { | ||
readonly ast: AST.Union; | ||
readonly actual: unknown; | ||
readonly errors: ReadonlyArray.NonEmptyReadonlyArray<Type | TypeLiteral | Member>; | ||
readonly errors: Arr.NonEmptyReadonlyArray<Type | TypeLiteral | Member>; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Union"; | ||
constructor(ast: AST.Union, actual: unknown, errors: Arr.NonEmptyReadonlyArray<Type | TypeLiteral | Member>); | ||
} | ||
declare const ParseError_base: new <A extends Record<string, any> = {}>(args: import("effect/Types").Equals<A, {}> extends true ? void : { readonly [P in keyof A as P extends "_tag" ? never : P]: A[P]; }) => import("effect/Cause").YieldableError & { | ||
readonly _tag: "ParseError"; | ||
} & Readonly<A>; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
export declare class ParseError extends ParseError_base<{ | ||
readonly error: ParseIssue; | ||
}> { | ||
get message(): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(): string; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON(): { | ||
_id: string; | ||
message: string; | ||
}; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
[Inspectable.NodeInspectSymbol](): { | ||
_id: string; | ||
message: string; | ||
}; | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const union: (ast: AST.Union, actual: unknown, errors: readonly [TypeLiteral | Type | Member, ...(TypeLiteral | Type | Member)[]]) => Union; | ||
export declare const parseError: (issue: ParseIssue) => ParseError; | ||
/** | ||
* Error that occurs when a member in a union has an error. | ||
* | ||
* @category model | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export interface Member { | ||
readonly _tag: "Member"; | ||
readonly ast: AST.AST; | ||
readonly error: ParseIssue; | ||
} | ||
export declare const succeed: <A>(a: A) => Either.Either<A, ParseIssue>; | ||
/** | ||
@@ -302,9 +296,22 @@ * @category constructors | ||
*/ | ||
export declare const member: (ast: AST.AST, error: ParseIssue) => Member; | ||
export declare const fail: (issue: ParseIssue) => Either.Either<never, ParseIssue>; | ||
declare const _try: <A>(options: { | ||
try: LazyArg<A>; | ||
catch: (e: unknown) => ParseIssue; | ||
}) => Either.Either<A, ParseIssue>; | ||
export { | ||
/** | ||
* @category optimisation | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const eitherOrUndefined: <A, E, R>(self: Effect.Effect<A, E, R>) => Either.Either<E, A> | undefined; | ||
_try as try }; | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export declare const fromOption: { | ||
(onNone: () => ParseIssue): <A>(self: Option.Option<A>) => Either.Either<A, ParseIssue>; | ||
<A>(self: Option.Option<A>, onNone: () => ParseIssue): Either.Either<A, ParseIssue>; | ||
}; | ||
/** | ||
* @category optimisation | ||
@@ -315,3 +322,3 @@ * @since 1.0.0 | ||
<A, B, E1, R1>(f: (a: A) => Effect.Effect<B, E1, R1>): <E, R>(self: Effect.Effect<A, E, R>) => Effect.Effect<B, E1 | E, R1 | R>; | ||
<A_1, E_1, R_1, B_1, E1_1, R1_1>(self: Effect.Effect<A_1, E_1, R_1>, f: (a: A_1) => Effect.Effect<B_1, E1_1, R1_1>): Effect.Effect<B_1, E_1 | E1_1, R_1 | R1_1>; | ||
<A, E, R, B, E1, R1>(self: Effect.Effect<A, E, R>, f: (a: A) => Effect.Effect<B, E1, R1>): Effect.Effect<B, E | E1, R | R1>; | ||
}; | ||
@@ -324,3 +331,3 @@ /** | ||
<A, B>(f: (a: A) => B): <E, R>(self: Effect.Effect<A, E, R>) => Effect.Effect<B, E, R>; | ||
<A_1, E_1, R_1, B_1>(self: Effect.Effect<A_1, E_1, R_1>, f: (a: A_1) => B_1): Effect.Effect<B_1, E_1, R_1>; | ||
<A, E, R, B>(self: Effect.Effect<A, E, R>, f: (a: A) => B): Effect.Effect<B, E, R>; | ||
}; | ||
@@ -333,3 +340,3 @@ /** | ||
<E, E2>(f: (e: E) => E2): <A, R>(self: Effect.Effect<A, E, R>) => Effect.Effect<A, E2, R>; | ||
<A_1, E_1, R_1, E2_1>(self: Effect.Effect<A_1, E_1, R_1>, f: (e: E_1) => E2_1): Effect.Effect<A_1, E2_1, R_1>; | ||
<A, E, R, E2>(self: Effect.Effect<A, E, R>, f: (e: E) => E2): Effect.Effect<A, E2, R>; | ||
}; | ||
@@ -340,2 +347,7 @@ /** | ||
*/ | ||
export declare const eitherOrUndefined: <A, E, R>(self: Effect.Effect<A, E, R>) => Either.Either<A, E> | undefined; | ||
/** | ||
* @category optimisation | ||
* @since 1.0.0 | ||
*/ | ||
export declare const mapBoth: { | ||
@@ -359,8 +371,15 @@ <E, E2, A, A2>(options: { | ||
}; | ||
export { | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
export type DecodeUnknown<Out, R> = (u: unknown, options?: AST.ParseOptions) => Effect.Effect<Out, ParseIssue, R>; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
export type DeclarationDecodeUnknown<Out, R> = (u: unknown, options: AST.ParseOptions, ast: AST.Declaration) => Effect.Effect<Out, ParseIssue, R>; | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
decodeUnknown, | ||
export declare const decodeUnknownSync: <A, I>(schema: Schema.Schema<A, I, never>, options?: AST.ParseOptions) => (u: unknown, overrideOptions?: AST.ParseOptions) => A; | ||
/** | ||
@@ -370,3 +389,3 @@ * @category decoding | ||
*/ | ||
decodeUnknownEither, | ||
export declare const decodeUnknownOption: <A, I>(schema: Schema.Schema<A, I, never>, options?: AST.ParseOptions) => (u: unknown, overrideOptions?: AST.ParseOptions) => Option.Option<A>; | ||
/** | ||
@@ -376,3 +395,3 @@ * @category decoding | ||
*/ | ||
decodeUnknownOption, | ||
export declare const decodeUnknownEither: <A, I>(schema: Schema.Schema<A, I, never>, options?: AST.ParseOptions) => (u: unknown, overrideOptions?: AST.ParseOptions) => Either.Either<A, ParseIssue>; | ||
/** | ||
@@ -382,3 +401,3 @@ * @category decoding | ||
*/ | ||
decodeUnknownPromise, | ||
export declare const decodeUnknownPromise: <A, I>(schema: Schema.Schema<A, I, never>, options?: AST.ParseOptions) => (u: unknown, overrideOptions?: AST.ParseOptions) => Promise<A>; | ||
/** | ||
@@ -388,3 +407,3 @@ * @category decoding | ||
*/ | ||
decodeUnknownSync, | ||
export declare const decodeUnknown: <A, I, R>(schema: Schema.Schema<A, I, R>, options?: AST.ParseOptions) => (u: unknown, overrideOptions?: AST.ParseOptions) => Effect.Effect<A, ParseIssue, R>; | ||
/** | ||
@@ -394,3 +413,3 @@ * @category encoding | ||
*/ | ||
encodeUnknown, | ||
export declare const encodeUnknownSync: <A, I>(schema: Schema.Schema<A, I, never>, options?: AST.ParseOptions) => (u: unknown, overrideOptions?: AST.ParseOptions) => I; | ||
/** | ||
@@ -400,3 +419,3 @@ * @category encoding | ||
*/ | ||
encodeUnknownEither, | ||
export declare const encodeUnknownOption: <A, I>(schema: Schema.Schema<A, I, never>, options?: AST.ParseOptions) => (u: unknown, overrideOptions?: AST.ParseOptions) => Option.Option<I>; | ||
/** | ||
@@ -406,3 +425,3 @@ * @category encoding | ||
*/ | ||
encodeUnknownOption, | ||
export declare const encodeUnknownEither: <A, I>(schema: Schema.Schema<A, I, never>, options?: AST.ParseOptions) => (u: unknown, overrideOptions?: AST.ParseOptions) => Either.Either<I, ParseIssue>; | ||
/** | ||
@@ -412,3 +431,3 @@ * @category encoding | ||
*/ | ||
encodeUnknownPromise, | ||
export declare const encodeUnknownPromise: <A, I>(schema: Schema.Schema<A, I, never>, options?: AST.ParseOptions) => (u: unknown, overrideOptions?: AST.ParseOptions) => Promise<I>; | ||
/** | ||
@@ -418,11 +437,88 @@ * @category encoding | ||
*/ | ||
encodeUnknownSync } from "./Parser.js"; | ||
export declare const encodeUnknown: <A, I, R>(schema: Schema.Schema<A, I, R>, options?: AST.ParseOptions) => (u: unknown, overrideOptions?: AST.ParseOptions) => Effect.Effect<I, ParseIssue, R>; | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export type DecodeUnknown<Out, R> = (u: unknown, options?: AST.ParseOptions) => Effect.Effect<Out, ParseIssue, R>; | ||
export declare const decodeSync: <A, I>(schema: Schema.Schema<A, I, never>, options?: AST.ParseOptions) => (i: I, overrideOptions?: AST.ParseOptions) => A; | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export type DeclarationDecodeUnknown<Out, R> = (u: unknown, options: AST.ParseOptions, ast: AST.Declaration) => Effect.Effect<Out, ParseIssue, R>; | ||
export declare const decodeOption: <A, I>(schema: Schema.Schema<A, I, never>, options?: AST.ParseOptions) => (i: I, overrideOptions?: AST.ParseOptions) => Option.Option<A>; | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export declare const decodeEither: <A, I>(schema: Schema.Schema<A, I, never>, options?: AST.ParseOptions) => (i: I, overrideOptions?: AST.ParseOptions) => Either.Either<A, ParseIssue>; | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export declare const decodePromise: <A, I>(schema: Schema.Schema<A, I, never>, options?: AST.ParseOptions) => (i: I, overrideOptions?: AST.ParseOptions) => Promise<A>; | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export declare const decode: <A, I, R>(schema: Schema.Schema<A, I, R>, options?: AST.ParseOptions) => (i: I, overrideOptions?: AST.ParseOptions) => Effect.Effect<A, ParseIssue, R>; | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export declare const validateSync: <A, I, R>(schema: Schema.Schema<A, I, R>, options?: AST.ParseOptions) => (u: unknown, overrideOptions?: AST.ParseOptions) => A; | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export declare const validateOption: <A, I, R>(schema: Schema.Schema<A, I, R>, options?: AST.ParseOptions) => (u: unknown, overrideOptions?: AST.ParseOptions) => Option.Option<A>; | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export declare const validateEither: <A, I, R>(schema: Schema.Schema<A, I, R>, options?: AST.ParseOptions) => (u: unknown, overrideOptions?: AST.ParseOptions) => Either.Either<A, ParseIssue>; | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export declare const validatePromise: <A, I>(schema: Schema.Schema<A, I, never>, options?: AST.ParseOptions) => (u: unknown, overrideOptions?: AST.ParseOptions) => Promise<A>; | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export declare const validate: <A, I, R>(schema: Schema.Schema<A, I, R>, options?: AST.ParseOptions) => (a: unknown, overrideOptions?: AST.ParseOptions) => Effect.Effect<A, ParseIssue, R>; | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export declare const is: <A, I, R>(schema: Schema.Schema<A, I, R>, options?: AST.ParseOptions) => (u: unknown, overrideOptions?: AST.ParseOptions | number) => u is A; | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export declare const asserts: <A, I, R>(schema: Schema.Schema<A, I, R>, options?: AST.ParseOptions) => (u: unknown, overrideOptions?: AST.ParseOptions) => asserts u is A; | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export declare const encodeSync: <A, I>(schema: Schema.Schema<A, I, never>, options?: AST.ParseOptions) => (a: A, overrideOptions?: AST.ParseOptions) => I; | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export declare const encodeOption: <A, I>(schema: Schema.Schema<A, I, never>, options?: AST.ParseOptions) => (input: A, overrideOptions?: AST.ParseOptions) => Option.Option<I>; | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export declare const encodeEither: <A, I>(schema: Schema.Schema<A, I, never>, options?: AST.ParseOptions) => (a: A, overrideOptions?: AST.ParseOptions) => Either.Either<I, ParseIssue>; | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export declare const encodePromise: <A, I>(schema: Schema.Schema<A, I, never>, options?: AST.ParseOptions) => (a: A, overrideOptions?: AST.ParseOptions) => Promise<I>; | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export declare const encode: <A, I, R>(schema: Schema.Schema<A, I, R>, options?: AST.ParseOptions) => (a: A, overrideOptions?: AST.ParseOptions) => Effect.Effect<I, ParseIssue, R>; | ||
//# sourceMappingURL=ParseResult.d.ts.map |
@@ -1,2 +0,1 @@ | ||
import * as ReadonlyArray from "effect/ReadonlyArray"; | ||
import * as AST from "./AST.js"; | ||
@@ -3,0 +2,0 @@ import type * as Schema from "./Schema.js"; |
@@ -46,6 +46,6 @@ /** | ||
*/ | ||
export interface WithResult<R, IE, E, IA, A> { | ||
export interface WithResult<SuccessA, SuccessI, FailureA, FailureI, SuccessAndFailureR> { | ||
readonly [symbolResult]: { | ||
readonly Failure: Schema.Schema<E, IE, R>; | ||
readonly Success: Schema.Schema<A, IA, R>; | ||
readonly Success: Schema.Schema<SuccessA, SuccessI, SuccessAndFailureR>; | ||
readonly Failure: Schema.Schema<FailureA, FailureI, SuccessAndFailureR>; | ||
}; | ||
@@ -61,3 +61,3 @@ } | ||
*/ | ||
type Context<T> = T extends WithResult<infer R, infer _IE, infer _E, infer _IA, infer _A> ? R : never; | ||
type Context<T> = T extends WithResult<infer _A, infer _I, infer _E, infer _EI, infer R> ? R : never; | ||
} | ||
@@ -68,3 +68,3 @@ /** | ||
*/ | ||
export declare const failureSchema: <R, IE, E, IA, A>(self: WithResult<R, IE, E, IA, A>) => Schema.Schema<E, IE, R>; | ||
export declare const failureSchema: <A, I, E, EI, R>(self: WithResult<A, I, E, EI, R>) => Schema.Schema<E, EI, R>; | ||
/** | ||
@@ -74,3 +74,3 @@ * @since 1.0.0 | ||
*/ | ||
export declare const successSchema: <R, IE, E, IA, A>(self: WithResult<R, IE, E, IA, A>) => Schema.Schema<A, IA, R>; | ||
export declare const successSchema: <A, I, E, EI, R>(self: WithResult<A, I, E, EI, R>) => Schema.Schema<A, I, R>; | ||
/** | ||
@@ -80,3 +80,3 @@ * @since 1.0.0 | ||
*/ | ||
export declare const exitSchema: <R, IE, E, IA, A>(self: WithResult<R, IE, E, IA, A>) => Schema.Schema<Exit.Exit<A, E>, Schema.ExitFrom<IA, IE>, R>; | ||
export declare const exitSchema: <A, I, E, EI, R>(self: WithResult<A, I, E, EI, R>) => Schema.Schema<Exit.Exit<A, E>, Schema.ExitEncoded<I, EI>, R>; | ||
/** | ||
@@ -86,3 +86,3 @@ * @since 1.0.0 | ||
*/ | ||
export interface SerializableWithResult<R, IS, S, RR, IE, E, IA, A> extends Serializable<S, IS, R>, WithResult<RR, IE, E, IA, A> { | ||
export interface SerializableWithResult<Self, FieldsI, FieldsR, SuccessA, SuccessI, FailureA, FailureI, SuccessAndFailureR> extends Serializable<Self, FieldsI, FieldsR>, WithResult<SuccessA, SuccessI, FailureA, FailureI, SuccessAndFailureR> { | ||
} | ||
@@ -97,3 +97,3 @@ /** | ||
*/ | ||
type Context<T> = T extends SerializableWithResult<infer R, infer _IS, infer _S, infer RR, infer _IE, infer _E, infer _IA, infer _A> ? R | RR : never; | ||
type Context<T> = T extends SerializableWithResult<infer _S, infer _SI, infer SR, infer _A, infer _AI, infer _E, infer _EI, infer RR> ? SR | RR : never; | ||
} | ||
@@ -118,4 +118,4 @@ /** | ||
export declare const serializeFailure: { | ||
<E>(value: E): <R, IE, IA, A>(self: WithResult<R, IE, E, IA, A>) => Effect.Effect<IE, ParseResult.ParseError, R>; | ||
<R, IE, E, IA, A>(self: WithResult<R, IE, E, IA, A>, value: E): Effect.Effect<IE, ParseResult.ParseError, R>; | ||
<E>(value: E): <A, I, EI, R>(self: WithResult<A, I, E, EI, R>) => Effect.Effect<EI, ParseResult.ParseError, R>; | ||
<A, I, E, EI, R>(self: WithResult<A, I, E, EI, R>, value: E): Effect.Effect<EI, ParseResult.ParseError, R>; | ||
}; | ||
@@ -127,4 +127,4 @@ /** | ||
export declare const deserializeFailure: { | ||
(value: unknown): <R, IE, E, IA, A>(self: WithResult<R, IE, E, IA, A>) => Effect.Effect<E, ParseResult.ParseError, R>; | ||
<R, IE, E, IA, A>(self: WithResult<R, IE, E, IA, A>, value: unknown): Effect.Effect<E, ParseResult.ParseError, R>; | ||
(value: unknown): <A, I, E, EI, R>(self: WithResult<A, I, E, EI, R>) => Effect.Effect<E, ParseResult.ParseError, R>; | ||
<A, I, E, EI, R>(self: WithResult<A, I, E, EI, R>, value: unknown): Effect.Effect<E, ParseResult.ParseError, R>; | ||
}; | ||
@@ -136,4 +136,4 @@ /** | ||
export declare const serializeSuccess: { | ||
<A>(value: A): <R, IE, E, IA>(self: WithResult<R, IE, E, IA, A>) => Effect.Effect<IA, ParseResult.ParseError, R>; | ||
<R, IE, E, IA, A>(self: WithResult<R, IE, E, IA, A>, value: A): Effect.Effect<IA, ParseResult.ParseError, R>; | ||
<A>(value: A): <I, E, EI, R>(self: WithResult<A, I, E, EI, R>) => Effect.Effect<I, ParseResult.ParseError, R>; | ||
<A, I, E, EI, R>(self: WithResult<A, I, E, EI, R>, value: A): Effect.Effect<I, ParseResult.ParseError, R>; | ||
}; | ||
@@ -145,4 +145,4 @@ /** | ||
export declare const deserializeSuccess: { | ||
(value: unknown): <R, IE, E, IA, A>(self: WithResult<R, IE, E, IA, A>) => Effect.Effect<A, ParseResult.ParseError, R>; | ||
<R, IE, E, IA, A>(self: WithResult<R, IE, E, IA, A>, value: unknown): Effect.Effect<A, ParseResult.ParseError, R>; | ||
(value: unknown): <A, I, E, EI, R>(self: WithResult<A, I, E, EI, R>) => Effect.Effect<A, ParseResult.ParseError, R>; | ||
<A, I, E, EI, R>(self: WithResult<A, I, E, EI, R>, value: unknown): Effect.Effect<A, ParseResult.ParseError, R>; | ||
}; | ||
@@ -154,4 +154,4 @@ /** | ||
export declare const serializeExit: { | ||
<E, A>(value: Exit.Exit<A, E>): <R, IE, IA>(self: WithResult<R, IE, E, IA, A>) => Effect.Effect<Schema.ExitFrom<IA, IE>, ParseResult.ParseError, R>; | ||
<R, IE, E, IA, A>(self: WithResult<R, IE, E, IA, A>, value: Exit.Exit<A, E>): Effect.Effect<Schema.ExitFrom<IA, IE>, ParseResult.ParseError, R>; | ||
<A, E>(value: Exit.Exit<A, E>): <I, EI, R>(self: WithResult<A, I, E, EI, R>) => Effect.Effect<Schema.ExitEncoded<I, EI>, ParseResult.ParseError, R>; | ||
<A, I, E, EI, R>(self: WithResult<A, I, E, EI, R>, value: Exit.Exit<A, E>): Effect.Effect<Schema.ExitEncoded<I, EI>, ParseResult.ParseError, R>; | ||
}; | ||
@@ -163,5 +163,5 @@ /** | ||
export declare const deserializeExit: { | ||
(value: unknown): <R, IE, E, IA, A>(self: WithResult<R, IE, E, IA, A>) => Effect.Effect<Exit.Exit<A, E>, ParseResult.ParseError, R>; | ||
<R, IE, E, IA, A>(self: WithResult<R, IE, E, IA, A>, value: unknown): Effect.Effect<Exit.Exit<A, E>, ParseResult.ParseError, R>; | ||
(value: unknown): <A, I, E, EI, R>(self: WithResult<A, I, E, EI, R>) => Effect.Effect<Exit.Exit<A, E>, ParseResult.ParseError, R>; | ||
<A, I, E, EI, R>(self: WithResult<A, I, E, EI, R>, value: unknown): Effect.Effect<Exit.Exit<A, E>, ParseResult.ParseError, R>; | ||
}; | ||
//# sourceMappingURL=Serializable.d.ts.map |
/** | ||
* @since 1.0.0 | ||
*/ | ||
import * as Effect from "effect/Effect"; | ||
import type * as ParseResult from "./ParseResult.js"; | ||
@@ -9,3 +10,3 @@ /** | ||
*/ | ||
export declare const formatIssues: (issues: readonly [ParseResult.ParseIssue, ...ParseResult.ParseIssue[]]) => string; | ||
export declare const formatIssue: (issue: ParseResult.ParseIssue) => Effect.Effect<string>; | ||
/** | ||
@@ -15,3 +16,3 @@ * @category formatting | ||
*/ | ||
export declare const formatIssue: (issue: ParseResult.ParseIssue) => string; | ||
export declare const formatIssueSync: (issue: ParseResult.ParseIssue) => string; | ||
/** | ||
@@ -21,3 +22,8 @@ * @category formatting | ||
*/ | ||
export declare const formatError: (error: ParseResult.ParseError) => string; | ||
export declare const formatError: (error: ParseResult.ParseError) => Effect.Effect<string>; | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export declare const formatErrorSync: (error: ParseResult.ParseError) => string; | ||
//# sourceMappingURL=TreeFormatter.d.ts.map |
/** | ||
* @since 1.0.0 | ||
*/ | ||
import * as Arr from "effect/Array"; | ||
import * as Option from "effect/Option"; | ||
import * as Predicate from "effect/Predicate"; | ||
import * as ReadonlyArray from "effect/ReadonlyArray"; | ||
import * as AST from "./AST.js"; | ||
import * as Internal from "./internal/ast.js"; | ||
import * as filters from "./internal/filters.js"; | ||
import * as hooks from "./internal/hooks.js"; | ||
import * as InternalSchema from "./internal/schema.js"; | ||
import * as Parser from "./Parser.js"; | ||
import * as FastCheck from "./FastCheck.js"; | ||
import * as errors_ from "./internal/errors.js"; | ||
import * as filters_ from "./internal/filters.js"; | ||
import * as util_ from "./internal/util.js"; | ||
/** | ||
@@ -17,3 +16,3 @@ * @category hooks | ||
*/ | ||
export const ArbitraryHookId = hooks.ArbitraryHookId; | ||
export const ArbitraryHookId = /*#__PURE__*/Symbol.for("@effect/schema/ArbitraryHookId"); | ||
/** | ||
@@ -23,4 +22,13 @@ * @category annotations | ||
*/ | ||
export const arbitrary = handler => self => InternalSchema.make(AST.setAnnotation(self.ast, ArbitraryHookId, handler)); | ||
export const arbitrary = handler => self => self.annotations({ | ||
[ArbitraryHookId]: handler | ||
}); | ||
/** | ||
* Returns a LazyArbitrary for the `A` type of the provided schema. | ||
* | ||
* @category arbitrary | ||
* @since 1.0.0 | ||
*/ | ||
export const makeLazy = schema => go(schema.ast, {}, []); | ||
/** | ||
* Returns a fast-check Arbitrary for the `A` type of the provided schema. | ||
@@ -31,3 +39,3 @@ * | ||
*/ | ||
export const make = schema => go(schema.ast, {}); | ||
export const make = schema => makeLazy(schema)(FastCheck); | ||
const depthSize = 1; | ||
@@ -49,3 +57,3 @@ const record = (fc, key, value, options) => { | ||
const getHook = /*#__PURE__*/AST.getAnnotation(ArbitraryHookId); | ||
const getRefinementFromArbitrary = (ast, options) => { | ||
const getRefinementFromArbitrary = (ast, options, path) => { | ||
const constraints = combineConstraints(options.constraints, getConstraints(ast)); | ||
@@ -55,5 +63,6 @@ return go(ast.from, constraints ? { | ||
constraints | ||
} : options); | ||
} : options, path); | ||
}; | ||
const go = (ast, options) => { | ||
const getArbitraryErrorMessage = (message, path) => errors_.getErrorMessageWithPath(`cannot build an Arbitrary for ${message}`, path); | ||
const go = (ast, options, path) => { | ||
const hook = getHook(ast); | ||
@@ -63,5 +72,5 @@ if (Option.isSome(hook)) { | ||
case "Declaration": | ||
return hook.value(...ast.typeParameters.map(p => go(p, options))); | ||
return hook.value(...ast.typeParameters.map(p => go(p, options, path))); | ||
case "Refinement": | ||
return hook.value(getRefinementFromArbitrary(ast, options)); | ||
return hook.value(getRefinementFromArbitrary(ast, options, path)); | ||
default: | ||
@@ -74,3 +83,3 @@ return hook.value(); | ||
{ | ||
throw new Error(`cannot build an Arbitrary for a declaration without annotations (${AST.format(ast)})`); | ||
throw new Error(getArbitraryErrorMessage(`a declaration without annotations (${ast})`, path)); | ||
} | ||
@@ -86,3 +95,3 @@ case "Literal": | ||
return () => { | ||
throw new Error("cannot build an Arbitrary for `never`"); | ||
throw new Error(getArbitraryErrorMessage("`never`", path)); | ||
}; | ||
@@ -151,8 +160,9 @@ case "UnknownKeyword": | ||
} | ||
case "Tuple": | ||
case "TupleType": | ||
{ | ||
const elements = []; | ||
let hasOptionals = false; | ||
let i = 0; | ||
for (const element of ast.elements) { | ||
elements.push(go(element.type, options)); | ||
elements.push(go(element.type, options, path.concat(i++))); | ||
if (element.isOptional) { | ||
@@ -162,3 +172,3 @@ hasOptionals = true; | ||
} | ||
const rest = Option.map(ast.rest, ReadonlyArray.map(e => go(e, options))); | ||
const rest = ast.rest.map(e => go(e, options, path)); | ||
return fc => { | ||
@@ -183,4 +193,4 @@ // --------------------------------------------- | ||
// --------------------------------------------- | ||
if (Option.isSome(rest)) { | ||
const [head, ...tail] = rest.value; | ||
if (Arr.isNonEmptyReadonlyArray(rest)) { | ||
const [head, ...tail] = rest; | ||
const arb = head(fc); | ||
@@ -214,4 +224,4 @@ const constraints = options.constraints; | ||
{ | ||
const propertySignaturesTypes = ast.propertySignatures.map(f => go(f.type, options)); | ||
const indexSignatures = ast.indexSignatures.map(is => [go(is.parameter, options), go(is.type, options)]); | ||
const propertySignaturesTypes = ast.propertySignatures.map(ps => go(ps.type, options, path.concat(ps.name))); | ||
const indexSignatures = ast.indexSignatures.map(is => [go(is.parameter, options, path), go(is.type, options, path)]); | ||
return fc => { | ||
@@ -252,3 +262,3 @@ const arbs = {}; | ||
{ | ||
const types = ast.types.map(t => go(t, options)); | ||
const types = ast.types.map(t => go(t, options, path)); | ||
return fc => fc.oneof({ | ||
@@ -261,3 +271,3 @@ depthSize | ||
if (ast.enums.length === 0) { | ||
throw new Error("cannot build an Arbitrary for an empty enum"); | ||
throw new Error(getArbitraryErrorMessage("an empty enum", path)); | ||
} | ||
@@ -268,59 +278,94 @@ return fc => fc.oneof(...ast.enums.map(([_, value]) => fc.constant(value))); | ||
{ | ||
const from = getRefinementFromArbitrary(ast, options); | ||
return fc => from(fc).filter(a => Option.isNone(ast.filter(a, Parser.defaultParseOption, ast))); | ||
const from = getRefinementFromArbitrary(ast, options, path); | ||
return fc => from(fc).filter(a => Option.isNone(ast.filter(a, AST.defaultParseOption, ast))); | ||
} | ||
case "Suspend": | ||
{ | ||
const get = Internal.memoizeThunk(() => go(ast.f(), { | ||
const get = util_.memoizeThunk(() => go(ast.f(), { | ||
...options, | ||
isSuspend: true | ||
})); | ||
}, path)); | ||
return fc => fc.constant(null).chain(() => get()(fc)); | ||
} | ||
case "Transform": | ||
return go(ast.to, options); | ||
case "Transformation": | ||
return go(ast.to, options, path); | ||
} | ||
}; | ||
/** @internal */ | ||
export const numberConstraints = constraints => { | ||
if (Predicate.isNumber(constraints.min)) { | ||
constraints.min = Math.fround(constraints.min); | ||
export class NumberConstraints { | ||
_tag = "NumberConstraints"; | ||
constraints; | ||
constructor(options) { | ||
this.constraints = {}; | ||
if (Predicate.isNumber(options.min)) { | ||
this.constraints.min = Math.fround(options.min); | ||
} | ||
if (Predicate.isNumber(options.max)) { | ||
this.constraints.max = Math.fround(options.max); | ||
} | ||
if (Predicate.isBoolean(options.noNaN)) { | ||
this.constraints.noNaN = options.noNaN; | ||
} | ||
if (Predicate.isBoolean(options.noDefaultInfinity)) { | ||
this.constraints.noDefaultInfinity = options.noDefaultInfinity; | ||
} | ||
} | ||
if (Predicate.isNumber(constraints.max)) { | ||
constraints.max = Math.fround(constraints.max); | ||
} | ||
/** @internal */ | ||
export class StringConstraints { | ||
_tag = "StringConstraints"; | ||
constraints; | ||
constructor(options) { | ||
this.constraints = {}; | ||
if (Predicate.isNumber(options.minLength)) { | ||
this.constraints.minLength = options.minLength; | ||
} | ||
if (Predicate.isNumber(options.maxLength)) { | ||
this.constraints.maxLength = options.maxLength; | ||
} | ||
} | ||
return { | ||
_tag: "NumberConstraints", | ||
constraints | ||
}; | ||
}; | ||
} | ||
/** @internal */ | ||
export const stringConstraints = constraints => { | ||
return { | ||
_tag: "StringConstraints", | ||
constraints | ||
}; | ||
}; | ||
export class IntegerConstraints { | ||
_tag = "IntegerConstraints"; | ||
constraints; | ||
constructor(options) { | ||
this.constraints = {}; | ||
if (Predicate.isNumber(options.min)) { | ||
this.constraints.min = options.min; | ||
} | ||
if (Predicate.isNumber(options.max)) { | ||
this.constraints.max = options.max; | ||
} | ||
} | ||
} | ||
/** @internal */ | ||
export const integerConstraints = constraints => { | ||
return { | ||
_tag: "IntegerConstraints", | ||
constraints | ||
}; | ||
}; | ||
export class ArrayConstraints { | ||
_tag = "ArrayConstraints"; | ||
constraints; | ||
constructor(options) { | ||
this.constraints = {}; | ||
if (Predicate.isNumber(options.minLength)) { | ||
this.constraints.minLength = options.minLength; | ||
} | ||
if (Predicate.isNumber(options.maxLength)) { | ||
this.constraints.maxLength = options.maxLength; | ||
} | ||
} | ||
} | ||
/** @internal */ | ||
export const arrayConstraints = constraints => { | ||
return { | ||
_tag: "ArrayConstraints", | ||
constraints | ||
}; | ||
}; | ||
export class BigIntConstraints { | ||
_tag = "BigIntConstraints"; | ||
constraints; | ||
constructor(options) { | ||
this.constraints = {}; | ||
if (Predicate.isBigInt(options.min)) { | ||
this.constraints.min = options.min; | ||
} | ||
if (Predicate.isBigInt(options.max)) { | ||
this.constraints.max = options.max; | ||
} | ||
} | ||
} | ||
/** @internal */ | ||
export const bigintConstraints = constraints => { | ||
return { | ||
_tag: "BigIntConstraints", | ||
constraints | ||
}; | ||
}; | ||
/** @internal */ | ||
export const getConstraints = ast => { | ||
@@ -330,84 +375,35 @@ const TypeAnnotationId = ast.annotations[AST.TypeAnnotationId]; | ||
switch (TypeAnnotationId) { | ||
// int | ||
case filters_.IntTypeId: | ||
return new IntegerConstraints({}); | ||
// number | ||
case filters.GreaterThanTypeId: | ||
case filters.GreaterThanOrEqualToTypeId: | ||
return numberConstraints({ | ||
min: jsonSchema.exclusiveMinimum ?? jsonSchema.minimum | ||
}); | ||
case filters.LessThanTypeId: | ||
case filters.LessThanOrEqualToTypeId: | ||
return numberConstraints({ | ||
case filters_.GreaterThanTypeId: | ||
case filters_.GreaterThanOrEqualToTypeId: | ||
case filters_.LessThanTypeId: | ||
case filters_.LessThanOrEqualToTypeId: | ||
case filters_.BetweenTypeId: | ||
return new NumberConstraints({ | ||
min: jsonSchema.exclusiveMinimum ?? jsonSchema.minimum, | ||
max: jsonSchema.exclusiveMaximum ?? jsonSchema.maximum | ||
}); | ||
case filters.IntTypeId: | ||
return integerConstraints({}); | ||
case filters.BetweenTypeId: | ||
{ | ||
const min = jsonSchema.minimum; | ||
const max = jsonSchema.maximum; | ||
const constraints = {}; | ||
if (Predicate.isNumber(min)) { | ||
constraints.min = min; | ||
} | ||
if (Predicate.isNumber(max)) { | ||
constraints.max = max; | ||
} | ||
return numberConstraints(constraints); | ||
} | ||
// bigint | ||
case filters.GreaterThanBigintTypeId: | ||
case filters.GreaterThanOrEqualToBigintTypeId: | ||
case filters_.GreaterThanBigintTypeId: | ||
case filters_.GreaterThanOrEqualToBigIntTypeId: | ||
case filters_.LessThanBigIntTypeId: | ||
case filters_.LessThanOrEqualToBigIntTypeId: | ||
case filters_.BetweenBigintTypeId: | ||
{ | ||
const params = ast.annotations[TypeAnnotationId]; | ||
return bigintConstraints({ | ||
min: params.min | ||
}); | ||
const constraints = ast.annotations[TypeAnnotationId]; | ||
return new BigIntConstraints(constraints); | ||
} | ||
case filters.LessThanBigintTypeId: | ||
case filters.LessThanOrEqualToBigintTypeId: | ||
{ | ||
const params = ast.annotations[TypeAnnotationId]; | ||
return bigintConstraints({ | ||
max: params.max | ||
}); | ||
} | ||
case filters.BetweenBigintTypeId: | ||
{ | ||
const params = ast.annotations[TypeAnnotationId]; | ||
const min = params.min; | ||
const max = params.max; | ||
const constraints = {}; | ||
if (Predicate.isBigInt(min)) { | ||
constraints.min = min; | ||
} | ||
if (Predicate.isBigInt(max)) { | ||
constraints.max = max; | ||
} | ||
return bigintConstraints(constraints); | ||
} | ||
// string | ||
case filters.MinLengthTypeId: | ||
return stringConstraints({ | ||
minLength: jsonSchema.minLength | ||
}); | ||
case filters.MaxLengthTypeId: | ||
return stringConstraints({ | ||
maxLength: jsonSchema.maxLength | ||
}); | ||
case filters.LengthTypeId: | ||
return stringConstraints({ | ||
minLength: jsonSchema.minLength, | ||
maxLength: jsonSchema.maxLength | ||
}); | ||
case filters_.MinLengthTypeId: | ||
case filters_.MaxLengthTypeId: | ||
case filters_.LengthTypeId: | ||
return new StringConstraints(jsonSchema); | ||
// array | ||
case filters.MinItemsTypeId: | ||
return arrayConstraints({ | ||
minLength: jsonSchema.minItems | ||
}); | ||
case filters.MaxItemsTypeId: | ||
return arrayConstraints({ | ||
maxLength: jsonSchema.maxItems | ||
}); | ||
case filters.ItemsCountTypeId: | ||
return arrayConstraints({ | ||
case filters_.MinItemsTypeId: | ||
case filters_.MaxItemsTypeId: | ||
case filters_.ItemsCountTypeId: | ||
return new ArrayConstraints({ | ||
minLength: jsonSchema.minItems, | ||
@@ -431,17 +427,6 @@ maxLength: jsonSchema.maxItems | ||
case "ArrayConstraints": | ||
{ | ||
const c = { | ||
...c1.constraints, | ||
...c2.constraints | ||
}; | ||
const minLength = getMax(c1.constraints.minLength, c2.constraints.minLength); | ||
if (Predicate.isNumber(minLength)) { | ||
c.minLength = minLength; | ||
} | ||
const maxLength = getMin(c1.constraints.maxLength, c2.constraints.maxLength); | ||
if (Predicate.isNumber(maxLength)) { | ||
c.maxLength = maxLength; | ||
} | ||
return arrayConstraints(c); | ||
} | ||
return new ArrayConstraints({ | ||
minLength: getMax(c1.constraints.minLength, c2.constraints.minLength), | ||
maxLength: getMin(c1.constraints.maxLength, c2.constraints.maxLength) | ||
}); | ||
} | ||
@@ -454,32 +439,13 @@ break; | ||
case "NumberConstraints": | ||
{ | ||
const c = { | ||
...c1.constraints, | ||
...c2.constraints | ||
}; | ||
const min = getMax(c1.constraints.min, c2.constraints.min); | ||
if (Predicate.isNumber(min)) { | ||
c.min = min; | ||
} | ||
const max = getMin(c1.constraints.max, c2.constraints.max); | ||
if (Predicate.isNumber(max)) { | ||
c.max = max; | ||
} | ||
return numberConstraints(c); | ||
} | ||
return new NumberConstraints({ | ||
min: getMax(c1.constraints.min, c2.constraints.min), | ||
max: getMin(c1.constraints.max, c2.constraints.max), | ||
noNaN: getOr(c1.constraints.noNaN, c2.constraints.noNaN), | ||
noDefaultInfinity: getOr(c1.constraints.noDefaultInfinity, c2.constraints.noDefaultInfinity) | ||
}); | ||
case "IntegerConstraints": | ||
{ | ||
const c = { | ||
...c2.constraints | ||
}; | ||
const min = getMax(c1.constraints.min, c2.constraints.min); | ||
if (Predicate.isNumber(min)) { | ||
c.min = min; | ||
} | ||
const max = getMin(c1.constraints.max, c2.constraints.max); | ||
if (Predicate.isNumber(max)) { | ||
c.max = max; | ||
} | ||
return integerConstraints(c); | ||
} | ||
return new IntegerConstraints({ | ||
min: getMax(c1.constraints.min, c2.constraints.min), | ||
max: getMin(c1.constraints.max, c2.constraints.max) | ||
}); | ||
} | ||
@@ -492,17 +458,6 @@ break; | ||
case "BigIntConstraints": | ||
{ | ||
const c = { | ||
...c1.constraints, | ||
...c2.constraints | ||
}; | ||
const min = getMax(c1.constraints.min, c2.constraints.min); | ||
if (Predicate.isBigInt(min)) { | ||
c.min = min; | ||
} | ||
const max = getMin(c1.constraints.max, c2.constraints.max); | ||
if (Predicate.isBigInt(max)) { | ||
c.max = max; | ||
} | ||
return bigintConstraints(c); | ||
} | ||
return new BigIntConstraints({ | ||
min: getMax(c1.constraints.min, c2.constraints.min), | ||
max: getMin(c1.constraints.max, c2.constraints.max) | ||
}); | ||
} | ||
@@ -515,17 +470,6 @@ break; | ||
case "StringConstraints": | ||
{ | ||
const c = { | ||
...c1.constraints, | ||
...c2.constraints | ||
}; | ||
const minLength = getMax(c1.constraints.minLength, c2.constraints.minLength); | ||
if (Predicate.isNumber(minLength)) { | ||
c.minLength = minLength; | ||
} | ||
const maxLength = getMin(c1.constraints.maxLength, c2.constraints.maxLength); | ||
if (Predicate.isNumber(maxLength)) { | ||
c.maxLength = maxLength; | ||
} | ||
return stringConstraints(c); | ||
} | ||
return new StringConstraints({ | ||
minLength: getMax(c1.constraints.minLength, c2.constraints.minLength), | ||
maxLength: getMin(c1.constraints.maxLength, c2.constraints.maxLength) | ||
}); | ||
} | ||
@@ -540,14 +484,6 @@ break; | ||
{ | ||
const c = { | ||
...c1.constraints | ||
}; | ||
const min = getMax(c1.constraints.min, c2.constraints.min); | ||
if (Predicate.isNumber(min)) { | ||
c.min = min; | ||
} | ||
const max = getMin(c1.constraints.max, c2.constraints.max); | ||
if (Predicate.isNumber(max)) { | ||
c.max = max; | ||
} | ||
return integerConstraints(c); | ||
return new IntegerConstraints({ | ||
min: getMax(c1.constraints.min, c2.constraints.min), | ||
max: getMin(c1.constraints.max, c2.constraints.max) | ||
}); | ||
} | ||
@@ -559,2 +495,5 @@ } | ||
}; | ||
const getOr = (a, b) => { | ||
return a === undefined ? b : b === undefined ? a : a || b; | ||
}; | ||
function getMax(n1, n2) { | ||
@@ -561,0 +500,0 @@ return n1 === undefined ? n2 : n2 === undefined ? n1 : n1 <= n2 ? n2 : n1; |
/** | ||
* @since 1.0.0 | ||
*/ | ||
import * as Option from "effect/Option"; | ||
import * as ReadonlyArray from "effect/ReadonlyArray"; | ||
import * as AST from "./AST.js"; | ||
import * as Arr from "effect/Array"; | ||
import * as Effect from "effect/Effect"; | ||
import * as TreeFormatter from "./TreeFormatter.js"; | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export const formatIssue = issue => go(issue); | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export const formatIssueSync = issue => Effect.runSync(formatIssue(issue)); | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export const formatError = error => formatIssue(error.error); | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export const formatErrorSync = error => formatIssueSync(error.error); | ||
const succeed = issue => Effect.succeed([issue]); | ||
const getArray = (issue, path, onFailure) => Effect.matchEffect(TreeFormatter.getMessage(issue), { | ||
onFailure, | ||
onSuccess: message => succeed({ | ||
_tag: issue._tag, | ||
path, | ||
message | ||
}) | ||
}); | ||
const flatten = eff => Effect.map(eff, Arr.flatten); | ||
const go = (e, path = []) => { | ||
@@ -12,103 +41,44 @@ const _tag = e._tag; | ||
case "Type": | ||
return [{ | ||
return Effect.map(TreeFormatter.formatTypeMessage(e), message => [{ | ||
_tag, | ||
path, | ||
message: TreeFormatter.formatTypeMessage(e) | ||
}]; | ||
message | ||
}]); | ||
case "Forbidden": | ||
return [{ | ||
return succeed({ | ||
_tag, | ||
path, | ||
message: TreeFormatter.formatForbiddenMessage(e) | ||
}]; | ||
}); | ||
case "Unexpected": | ||
return [{ | ||
return succeed({ | ||
_tag, | ||
path, | ||
message: `is unexpected, expected ${AST.format(e.ast, true)}` | ||
}]; | ||
message: `is unexpected, expected ${e.ast.toString(true)}` | ||
}); | ||
case "Missing": | ||
return [{ | ||
return succeed({ | ||
_tag, | ||
path, | ||
message: "is missing" | ||
}]; | ||
}); | ||
case "Union": | ||
return Option.match(TreeFormatter.getMessage(e.ast, e.actual), { | ||
onNone: () => ReadonlyArray.flatMap(e.errors, e => { | ||
switch (e._tag) { | ||
case "Member": | ||
return go(e.error, path); | ||
default: | ||
return go(e, path); | ||
} | ||
}), | ||
onSome: message => [{ | ||
_tag, | ||
path, | ||
message | ||
}] | ||
}); | ||
case "Tuple": | ||
return Option.match(TreeFormatter.getMessage(e.ast, e.actual), { | ||
onNone: () => ReadonlyArray.flatMap(e.errors, index => go(index.error, [...path, index.index])), | ||
onSome: message => [{ | ||
_tag, | ||
path, | ||
message | ||
}] | ||
}); | ||
return getArray(e, path, () => flatten(Effect.forEach(e.errors, e => { | ||
switch (e._tag) { | ||
case "Member": | ||
return go(e.error, path); | ||
default: | ||
return go(e, path); | ||
} | ||
}))); | ||
case "TupleType": | ||
return getArray(e, path, () => flatten(Effect.forEach(e.errors, index => go(index.error, path.concat(index.index))))); | ||
case "TypeLiteral": | ||
return Option.match(TreeFormatter.getMessage(e.ast, e.actual), { | ||
onNone: () => ReadonlyArray.flatMap(e.errors, key => go(key.error, [...path, key.key])), | ||
onSome: message => [{ | ||
_tag, | ||
path, | ||
message | ||
}] | ||
}); | ||
case "Transform": | ||
return Option.match(TreeFormatter.getTransformMessage(e, e.actual), { | ||
onNone: () => go(e.error, path), | ||
onSome: message => [{ | ||
_tag, | ||
path, | ||
message | ||
}] | ||
}); | ||
return getArray(e, path, () => flatten(Effect.forEach(e.errors, key => go(key.error, path.concat(key.key))))); | ||
case "Declaration": | ||
case "Refinement": | ||
return Option.match(TreeFormatter.getRefinementMessage(e, e.actual), { | ||
onNone: () => go(e.error, path), | ||
onSome: message => [{ | ||
_tag, | ||
path, | ||
message | ||
}] | ||
}); | ||
case "Declaration": | ||
return Option.match(TreeFormatter.getMessage(e.ast, e.actual), { | ||
onNone: () => go(e.error, path), | ||
onSome: message => [{ | ||
_tag, | ||
path, | ||
message | ||
}] | ||
}); | ||
case "Transformation": | ||
return getArray(e, path, () => go(e.error, path)); | ||
} | ||
}; | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export const formatIssues = issues => ReadonlyArray.flatMap(issues, e => go(e)); | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export const formatIssue = error => formatIssues([error]); | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export const formatError = error => formatIssue(error.error); | ||
//# sourceMappingURL=ArrayFormatter.js.map |
2186
dist/esm/AST.js
/** | ||
* @since 1.0.0 | ||
*/ | ||
import { dual, identity, pipe } from "effect/Function"; | ||
import * as Arr from "effect/Array"; | ||
import { dual, identity } from "effect/Function"; | ||
import { globalValue } from "effect/GlobalValue"; | ||
import * as Hash from "effect/Hash"; | ||
import * as Number from "effect/Number"; | ||
@@ -11,4 +11,5 @@ import * as Option from "effect/Option"; | ||
import * as Predicate from "effect/Predicate"; | ||
import * as ReadonlyArray from "effect/ReadonlyArray"; | ||
import * as Internal from "./internal/ast.js"; | ||
import * as regexp from "effect/RegExp"; | ||
import * as errors_ from "./internal/errors.js"; | ||
import * as util_ from "./internal/util.js"; | ||
/** | ||
@@ -68,2 +69,19 @@ * @category annotations | ||
*/ | ||
export const ConcurrencyAnnotationId = /*#__PURE__*/Symbol.for("@effect/schema/annotation/Concurrency"); | ||
/** | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export const BatchingAnnotationId = /*#__PURE__*/Symbol.for("@effect/schema/annotation/Batching"); | ||
/** @internal */ | ||
export const SurrogateAnnotationId = /*#__PURE__*/Symbol.for("@effect/schema/annotation/Surrogate"); | ||
/** | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export const ParseIssueTitleAnnotationId = /*#__PURE__*/Symbol.for("@effect/schema/annotation/ParseIssueTitle"); | ||
/** | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export const getAnnotation = /*#__PURE__*/dual(2, (annotated, key) => Object.prototype.hasOwnProperty.call(annotated.annotations, key) ? Option.some(annotated.annotations[key]) : Option.none()); | ||
@@ -74,2 +92,7 @@ /** | ||
*/ | ||
export const getBrandAnnotation = /*#__PURE__*/getAnnotation(BrandAnnotationId); | ||
/** | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export const getMessageAnnotation = /*#__PURE__*/getAnnotation(MessageAnnotationId); | ||
@@ -107,44 +130,63 @@ /** | ||
/** | ||
* @category constructors | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export const createDeclaration = (typeParameters, decodeUnknown, encodeUnknown, annotations = {}) => ({ | ||
_tag: "Declaration", | ||
typeParameters, | ||
decodeUnknown, | ||
encodeUnknown, | ||
annotations | ||
}); | ||
export const getDocumentationAnnotation = /*#__PURE__*/getAnnotation(DocumentationAnnotationId); | ||
/** | ||
* @category guards | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export const isDeclaration = ast => ast._tag === "Declaration"; | ||
export const getConcurrencyAnnotation = /*#__PURE__*/getAnnotation(ConcurrencyAnnotationId); | ||
/** | ||
* @category constructors | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export const createLiteral = (literal, annotations = {}) => ({ | ||
_tag: "Literal", | ||
literal, | ||
annotations | ||
}); | ||
export const getBatchingAnnotation = /*#__PURE__*/getAnnotation(BatchingAnnotationId); | ||
/** | ||
* @category guards | ||
* @category annotations | ||
* @since 1.0.0 | ||
*/ | ||
export const isLiteral = ast => ast._tag === "Literal"; | ||
export const getParseIssueTitleAnnotation = /*#__PURE__*/getAnnotation(ParseIssueTitleAnnotationId); | ||
/** @internal */ | ||
export const _null = /*#__PURE__*/createLiteral(null, { | ||
[IdentifierAnnotationId]: "null" | ||
}); | ||
export const getSurrogateAnnotation = /*#__PURE__*/getAnnotation(SurrogateAnnotationId); | ||
const JSONIdentifierAnnotationId = /*#__PURE__*/Symbol.for("@effect/schema/annotation/JSONIdentifier"); | ||
/** @internal */ | ||
export const getJSONIdentifierAnnotation = /*#__PURE__*/getAnnotation(JSONIdentifierAnnotationId); | ||
/** | ||
* @category constructors | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const createUniqueSymbol = (symbol, annotations = {}) => ({ | ||
_tag: "UniqueSymbol", | ||
symbol, | ||
annotations | ||
}); | ||
export class Declaration { | ||
typeParameters; | ||
decodeUnknown; | ||
encodeUnknown; | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Declaration"; | ||
constructor(typeParameters, decodeUnknown, encodeUnknown, annotations = {}) { | ||
this.typeParameters = typeParameters; | ||
this.decodeUnknown = decodeUnknown; | ||
this.encodeUnknown = encodeUnknown; | ||
this.annotations = annotations; | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return Option.getOrElse(getExpected(this, verbose), () => "<declaration schema>"); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
typeParameters: this.typeParameters.map(ast => ast.toJSON()), | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
const createASTGuard = tag => ast => ast._tag === tag; | ||
/** | ||
@@ -154,13 +196,35 @@ * @category guards | ||
*/ | ||
export const isUniqueSymbol = ast => ast._tag === "UniqueSymbol"; | ||
export const isDeclaration = /*#__PURE__*/createASTGuard("Declaration"); | ||
/** | ||
* @category constructors | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const undefinedKeyword = { | ||
_tag: "UndefinedKeyword", | ||
annotations: { | ||
[TitleAnnotationId]: "undefined" | ||
export class Literal { | ||
literal; | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Literal"; | ||
constructor(literal, annotations = {}) { | ||
this.literal = literal; | ||
this.annotations = annotations; | ||
} | ||
}; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return Option.getOrElse(getExpected(this, verbose), () => util_.formatUnknown(this.literal)); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
literal: Predicate.isBigInt(this.literal) ? String(this.literal) : this.literal, | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
/** | ||
@@ -170,3 +234,7 @@ * @category guards | ||
*/ | ||
export const isUndefinedKeyword = ast => ast._tag === "UndefinedKeyword"; | ||
export const isLiteral = /*#__PURE__*/createASTGuard("Literal"); | ||
const $null = /*#__PURE__*/new Literal(null, { | ||
[IdentifierAnnotationId]: "null" | ||
}); | ||
export { | ||
/** | ||
@@ -176,8 +244,35 @@ * @category constructors | ||
*/ | ||
export const voidKeyword = { | ||
_tag: "VoidKeyword", | ||
annotations: { | ||
[TitleAnnotationId]: "void" | ||
$null as null }; | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export class UniqueSymbol { | ||
symbol; | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "UniqueSymbol"; | ||
constructor(symbol, annotations = {}) { | ||
this.symbol = symbol; | ||
this.annotations = annotations; | ||
} | ||
}; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return Option.getOrElse(getExpected(this, verbose), () => util_.formatUnknown(this.symbol)); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
symbol: String(this.symbol), | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
/** | ||
@@ -187,153 +282,412 @@ * @category guards | ||
*/ | ||
export const isVoidKeyword = ast => ast._tag === "VoidKeyword"; | ||
export const isUniqueSymbol = /*#__PURE__*/createASTGuard("UniqueSymbol"); | ||
/** | ||
* @category constructors | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const neverKeyword = { | ||
_tag: "NeverKeyword", | ||
annotations: { | ||
[TitleAnnotationId]: "never" | ||
export class UndefinedKeyword { | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "UndefinedKeyword"; | ||
constructor(annotations = {}) { | ||
this.annotations = annotations; | ||
} | ||
}; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return formatKeyword(this, verbose); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const undefinedKeyword = /*#__PURE__*/new UndefinedKeyword({ | ||
[TitleAnnotationId]: "undefined" | ||
}); | ||
/** | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export const isNeverKeyword = ast => ast._tag === "NeverKeyword"; | ||
export const isUndefinedKeyword = /*#__PURE__*/createASTGuard("UndefinedKeyword"); | ||
/** | ||
* @category constructors | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const unknownKeyword = { | ||
_tag: "UnknownKeyword", | ||
annotations: { | ||
[TitleAnnotationId]: "unknown" | ||
export class VoidKeyword { | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "VoidKeyword"; | ||
constructor(annotations = {}) { | ||
this.annotations = annotations; | ||
} | ||
}; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return formatKeyword(this, verbose); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const voidKeyword = /*#__PURE__*/new VoidKeyword({ | ||
[TitleAnnotationId]: "void" | ||
}); | ||
/** | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export const isUnknownKeyword = ast => ast._tag === "UnknownKeyword"; | ||
export const isVoidKeyword = /*#__PURE__*/createASTGuard("VoidKeyword"); | ||
/** | ||
* @category constructors | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const anyKeyword = { | ||
_tag: "AnyKeyword", | ||
annotations: { | ||
[TitleAnnotationId]: "any" | ||
export class NeverKeyword { | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "NeverKeyword"; | ||
constructor(annotations = {}) { | ||
this.annotations = annotations; | ||
} | ||
}; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return formatKeyword(this, verbose); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const neverKeyword = /*#__PURE__*/new NeverKeyword({ | ||
[TitleAnnotationId]: "never" | ||
}); | ||
/** | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export const isAnyKeyword = ast => ast._tag === "AnyKeyword"; | ||
export const isNeverKeyword = /*#__PURE__*/createASTGuard("NeverKeyword"); | ||
/** | ||
* @category constructors | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const stringKeyword = { | ||
_tag: "StringKeyword", | ||
annotations: { | ||
[TitleAnnotationId]: "string", | ||
[DescriptionAnnotationId]: "a string" | ||
export class UnknownKeyword { | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "UnknownKeyword"; | ||
constructor(annotations = {}) { | ||
this.annotations = annotations; | ||
} | ||
}; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return formatKeyword(this, verbose); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const unknownKeyword = /*#__PURE__*/new UnknownKeyword({ | ||
[TitleAnnotationId]: "unknown" | ||
}); | ||
/** | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export const isStringKeyword = ast => ast._tag === "StringKeyword"; | ||
export const isUnknownKeyword = /*#__PURE__*/createASTGuard("UnknownKeyword"); | ||
/** | ||
* @category constructors | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const numberKeyword = { | ||
_tag: "NumberKeyword", | ||
annotations: { | ||
[TitleAnnotationId]: "number", | ||
[DescriptionAnnotationId]: "a number" | ||
export class AnyKeyword { | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "AnyKeyword"; | ||
constructor(annotations = {}) { | ||
this.annotations = annotations; | ||
} | ||
}; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return formatKeyword(this, verbose); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const anyKeyword = /*#__PURE__*/new AnyKeyword({ | ||
[TitleAnnotationId]: "any" | ||
}); | ||
/** | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export const isNumberKeyword = ast => ast._tag === "NumberKeyword"; | ||
export const isAnyKeyword = /*#__PURE__*/createASTGuard("AnyKeyword"); | ||
/** | ||
* @category constructors | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const booleanKeyword = { | ||
_tag: "BooleanKeyword", | ||
annotations: { | ||
[TitleAnnotationId]: "boolean", | ||
[DescriptionAnnotationId]: "a boolean" | ||
export class StringKeyword { | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "StringKeyword"; | ||
constructor(annotations = {}) { | ||
this.annotations = annotations; | ||
} | ||
}; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return formatKeyword(this, verbose); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const stringKeyword = /*#__PURE__*/new StringKeyword({ | ||
[TitleAnnotationId]: "string", | ||
[DescriptionAnnotationId]: "a string" | ||
}); | ||
/** | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export const isBooleanKeyword = ast => ast._tag === "BooleanKeyword"; | ||
export const isStringKeyword = /*#__PURE__*/createASTGuard("StringKeyword"); | ||
/** | ||
* @category constructors | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const bigIntKeyword = { | ||
_tag: "BigIntKeyword", | ||
annotations: { | ||
[TitleAnnotationId]: "bigint", | ||
[DescriptionAnnotationId]: "a bigint" | ||
export class NumberKeyword { | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "NumberKeyword"; | ||
constructor(annotations = {}) { | ||
this.annotations = annotations; | ||
} | ||
}; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return formatKeyword(this, verbose); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const numberKeyword = /*#__PURE__*/new NumberKeyword({ | ||
[TitleAnnotationId]: "number", | ||
[DescriptionAnnotationId]: "a number" | ||
}); | ||
/** | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export const isBigIntKeyword = ast => ast._tag === "BigIntKeyword"; | ||
export const isNumberKeyword = /*#__PURE__*/createASTGuard("NumberKeyword"); | ||
/** | ||
* @category constructors | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const symbolKeyword = { | ||
_tag: "SymbolKeyword", | ||
annotations: { | ||
[TitleAnnotationId]: "symbol", | ||
[DescriptionAnnotationId]: "a symbol" | ||
export class BooleanKeyword { | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "BooleanKeyword"; | ||
constructor(annotations = {}) { | ||
this.annotations = annotations; | ||
} | ||
}; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return formatKeyword(this, verbose); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const booleanKeyword = /*#__PURE__*/new BooleanKeyword({ | ||
[TitleAnnotationId]: "boolean", | ||
[DescriptionAnnotationId]: "a boolean" | ||
}); | ||
/** | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export const isSymbolKeyword = ast => ast._tag === "SymbolKeyword"; | ||
export const isBooleanKeyword = /*#__PURE__*/createASTGuard("BooleanKeyword"); | ||
/** | ||
* @category constructors | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const objectKeyword = { | ||
_tag: "ObjectKeyword", | ||
annotations: { | ||
[IdentifierAnnotationId]: "object", | ||
[TitleAnnotationId]: "object", | ||
[DescriptionAnnotationId]: "an object in the TypeScript meaning, i.e. the `object` type" | ||
export class BigIntKeyword { | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "BigIntKeyword"; | ||
constructor(annotations = {}) { | ||
this.annotations = annotations; | ||
} | ||
}; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return formatKeyword(this, verbose); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const bigIntKeyword = /*#__PURE__*/new BigIntKeyword({ | ||
[TitleAnnotationId]: "bigint", | ||
[DescriptionAnnotationId]: "a bigint" | ||
}); | ||
/** | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export const isObjectKeyword = ast => ast._tag === "ObjectKeyword"; | ||
export const isBigIntKeyword = /*#__PURE__*/createASTGuard("BigIntKeyword"); | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export class SymbolKeyword { | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "SymbolKeyword"; | ||
constructor(annotations = {}) { | ||
this.annotations = annotations; | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return formatKeyword(this, verbose); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const createEnums = (enums, annotations = {}) => ({ | ||
_tag: "Enums", | ||
enums, | ||
annotations | ||
export const symbolKeyword = /*#__PURE__*/new SymbolKeyword({ | ||
[TitleAnnotationId]: "symbol", | ||
[DescriptionAnnotationId]: "a symbol" | ||
}); | ||
@@ -344,13 +698,41 @@ /** | ||
*/ | ||
export const isEnums = ast => ast._tag === "Enums"; | ||
export const isSymbolKeyword = /*#__PURE__*/createASTGuard("SymbolKeyword"); | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export class ObjectKeyword { | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "ObjectKeyword"; | ||
constructor(annotations = {}) { | ||
this.annotations = annotations; | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return formatKeyword(this, verbose); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const createTemplateLiteral = (head, spans, annotations = {}) => ReadonlyArray.isNonEmptyReadonlyArray(spans) ? { | ||
_tag: "TemplateLiteral", | ||
head, | ||
spans, | ||
annotations | ||
} : createLiteral(head); | ||
export const objectKeyword = /*#__PURE__*/new ObjectKeyword({ | ||
[IdentifierAnnotationId]: "object", | ||
[TitleAnnotationId]: "object", | ||
[DescriptionAnnotationId]: "an object in the TypeScript meaning, i.e. the `object` type" | ||
}); | ||
/** | ||
@@ -360,39 +742,254 @@ * @category guards | ||
*/ | ||
export const isTemplateLiteral = ast => ast._tag === "TemplateLiteral"; | ||
export const isObjectKeyword = /*#__PURE__*/createASTGuard("ObjectKeyword"); | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const createElement = (type, isOptional) => ({ | ||
type, | ||
isOptional | ||
}); | ||
export class Enums { | ||
enums; | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Enums"; | ||
constructor(enums, annotations = {}) { | ||
this.enums = enums; | ||
this.annotations = annotations; | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return Option.getOrElse(getExpected(this, verbose), () => `<enum ${this.enums.length} value(s): ${this.enums.map((_, value) => JSON.stringify(value)).join(" | ")}>`); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
enums: this.enums, | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export const createTuple = (elements, rest, isReadonly, annotations = {}) => ({ | ||
_tag: "Tuple", | ||
elements, | ||
rest, | ||
isReadonly, | ||
annotations | ||
}); | ||
export const isEnums = /*#__PURE__*/createASTGuard("Enums"); | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export class TemplateLiteralSpan { | ||
type; | ||
literal; | ||
constructor(type, literal) { | ||
this.type = type; | ||
this.literal = literal; | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString() { | ||
switch (this.type._tag) { | ||
case "StringKeyword": | ||
return "${string}"; | ||
case "NumberKeyword": | ||
return "${number}"; | ||
} | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
type: this.type.toJSON(), | ||
literal: this.literal | ||
}; | ||
} | ||
} | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export class TemplateLiteral { | ||
head; | ||
spans; | ||
annotations; | ||
static make = (head, spans, annotations = {}) => Arr.isNonEmptyReadonlyArray(spans) ? new TemplateLiteral(head, spans, annotations) : new Literal(head); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "TemplateLiteral"; | ||
constructor(head, spans, annotations = {}) { | ||
this.head = head; | ||
this.spans = spans; | ||
this.annotations = annotations; | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return Option.getOrElse(getExpected(this, verbose), () => formatTemplateLiteral(this)); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
head: this.head, | ||
spans: this.spans.map(span => span.toJSON()), | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
const formatTemplateLiteral = ast => "`" + ast.head + ast.spans.map(span => String(span) + span.literal).join("") + "`"; | ||
/** | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export const isTuple = ast => ast._tag === "Tuple"; | ||
export const isTemplateLiteral = /*#__PURE__*/createASTGuard("TemplateLiteral"); | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const createPropertySignature = (name, type, isOptional, isReadonly, annotations = {}) => ({ | ||
name, | ||
type, | ||
isOptional, | ||
isReadonly, | ||
annotations | ||
}); | ||
export class Element { | ||
type; | ||
isOptional; | ||
constructor(type, isOptional) { | ||
this.type = type; | ||
this.isOptional = isOptional; | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
type: this.type.toJSON(), | ||
isOptional: this.isOptional | ||
}; | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString() { | ||
return String(this.type) + (this.isOptional ? "?" : ""); | ||
} | ||
} | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export class TupleType { | ||
elements; | ||
rest; | ||
isReadonly; | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "TupleType"; | ||
constructor(elements, rest, isReadonly, annotations = {}) { | ||
this.elements = elements; | ||
this.rest = rest; | ||
this.isReadonly = isReadonly; | ||
this.annotations = annotations; | ||
let hasOptionalElement = false; | ||
let hasIllegalRequiredElement = false; | ||
for (const e of elements) { | ||
if (e.isOptional) { | ||
hasOptionalElement = true; | ||
} else if (hasOptionalElement) { | ||
hasIllegalRequiredElement = true; | ||
break; | ||
} | ||
} | ||
if (hasIllegalRequiredElement || hasOptionalElement && rest.length > 1) { | ||
throw new Error(getRequiredElementFollowinAnOptionalElementErrorMessage); | ||
} | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return Option.getOrElse(getExpected(this, verbose), () => formatTuple(this)); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
elements: this.elements.map(e => e.toJSON()), | ||
rest: this.rest.map(ast => ast.toJSON()), | ||
isReadonly: this.isReadonly, | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
const formatTuple = ast => { | ||
const formattedElements = ast.elements.map(String).join(", "); | ||
return Arr.matchLeft(ast.rest, { | ||
onEmpty: () => `readonly [${formattedElements}]`, | ||
onNonEmpty: (head, tail) => { | ||
const formattedHead = String(head); | ||
const wrappedHead = formattedHead.includes(" | ") ? `(${formattedHead})` : formattedHead; | ||
if (tail.length > 0) { | ||
const formattedTail = tail.map(String).join(", "); | ||
if (ast.elements.length > 0) { | ||
return `readonly [${formattedElements}, ...${wrappedHead}[], ${formattedTail}]`; | ||
} else { | ||
return `readonly [...${wrappedHead}[], ${formattedTail}]`; | ||
} | ||
} else { | ||
if (ast.elements.length > 0) { | ||
return `readonly [${formattedElements}, ...${wrappedHead}[]]`; | ||
} else { | ||
return `ReadonlyArray<${formattedHead}>`; | ||
} | ||
} | ||
} | ||
}); | ||
}; | ||
/** | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export const isTupleType = /*#__PURE__*/createASTGuard("TupleType"); | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export class PropertySignature { | ||
name; | ||
type; | ||
isOptional; | ||
isReadonly; | ||
annotations; | ||
constructor(name, type, isOptional, isReadonly, annotations = {}) { | ||
this.name = name; | ||
this.type = type; | ||
this.isOptional = isOptional; | ||
this.isReadonly = isReadonly; | ||
this.annotations = annotations; | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
name: String(this.name), | ||
type: this.type.toJSON(), | ||
isOptional: this.isOptional, | ||
isReadonly: this.isReadonly, | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
export const isParameter = ast => { | ||
@@ -406,58 +1003,121 @@ switch (ast._tag) { | ||
return isParameter(ast.from); | ||
default: | ||
return false; | ||
} | ||
return false; | ||
}; | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const createIndexSignature = (parameter, type, isReadonly) => { | ||
if (isParameter(parameter)) { | ||
export class IndexSignature { | ||
type; | ||
isReadonly; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
parameter; | ||
constructor(parameter, type, isReadonly) { | ||
this.type = type; | ||
this.isReadonly = isReadonly; | ||
if (isParameter(parameter)) { | ||
this.parameter = parameter; | ||
} else { | ||
throw new Error(getIndexSignatureParameterErrorMessage); | ||
} | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
parameter, | ||
type, | ||
isReadonly | ||
parameter: this.parameter.toJSON(), | ||
type: this.type.toJSON(), | ||
isReadonly: this.isReadonly | ||
}; | ||
} | ||
throw new Error("An index signature parameter type must be 'string', 'symbol', a template literal type or a refinement of the previous types"); | ||
}; | ||
} | ||
/** | ||
* @category constructors | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const createTypeLiteral = (propertySignatures, indexSignatures, annotations = {}) => { | ||
// check for duplicate property signatures | ||
const keys = {}; | ||
for (let i = 0; i < propertySignatures.length; i++) { | ||
const name = propertySignatures[i].name; | ||
if (Object.prototype.hasOwnProperty.call(keys, name)) { | ||
throw new Error(`Duplicate property signature ${String(name)}`); | ||
export class TypeLiteral { | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "TypeLiteral"; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
propertySignatures; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
indexSignatures; | ||
constructor(propertySignatures, indexSignatures, annotations = {}) { | ||
this.annotations = annotations; | ||
// check for duplicate property signatures | ||
const keys = {}; | ||
for (let i = 0; i < propertySignatures.length; i++) { | ||
const name = propertySignatures[i].name; | ||
if (Object.prototype.hasOwnProperty.call(keys, name)) { | ||
throw new Error(errors_.getDuplicatePropertySignatureErrorMessage(name)); | ||
} | ||
keys[name] = null; | ||
} | ||
keys[name] = null; | ||
} | ||
// check for duplicate index signatures | ||
const parameters = { | ||
string: false, | ||
symbol: false | ||
}; | ||
for (let i = 0; i < indexSignatures.length; i++) { | ||
const parameter = getParameterBase(indexSignatures[i].parameter); | ||
if (isStringKeyword(parameter)) { | ||
if (parameters.string) { | ||
throw new Error("Duplicate index signature for type `string`"); | ||
// check for duplicate index signatures | ||
const parameters = { | ||
string: false, | ||
symbol: false | ||
}; | ||
for (let i = 0; i < indexSignatures.length; i++) { | ||
const parameter = getParameterBase(indexSignatures[i].parameter); | ||
if (isStringKeyword(parameter)) { | ||
if (parameters.string) { | ||
throw new Error(getDuplicateIndexSignatureErrorMessage("string")); | ||
} | ||
parameters.string = true; | ||
} else if (isSymbolKeyword(parameter)) { | ||
if (parameters.symbol) { | ||
throw new Error(getDuplicateIndexSignatureErrorMessage("symbol")); | ||
} | ||
parameters.symbol = true; | ||
} | ||
parameters.string = true; | ||
} else if (isSymbolKeyword(parameter)) { | ||
if (parameters.symbol) { | ||
throw new Error("Duplicate index signature for type `symbol`"); | ||
} | ||
parameters.symbol = true; | ||
} | ||
this.propertySignatures = sortPropertySignatures(propertySignatures); | ||
this.indexSignatures = sortIndexSignatures(indexSignatures); | ||
} | ||
return { | ||
_tag: "TypeLiteral", | ||
propertySignatures: sortPropertySignatures(propertySignatures), | ||
indexSignatures: sortIndexSignatures(indexSignatures), | ||
annotations | ||
}; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return Option.getOrElse(getExpected(this, verbose), () => formatTypeLiteral(this)); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
propertySignatures: this.propertySignatures.map(ps => ps.toJSON()), | ||
indexSignatures: this.indexSignatures.map(ps => ps.toJSON()), | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
const formatTypeLiteral = ast => { | ||
const formattedPropertySignatures = ast.propertySignatures.map(ps => (ps.isReadonly ? "readonly " : "") + String(ps.name) + (ps.isOptional ? "?" : "") + ": " + ps.type).join("; "); | ||
if (ast.indexSignatures.length > 0) { | ||
const formattedIndexSignatures = ast.indexSignatures.map(is => (is.isReadonly ? "readonly " : "") + `[x: ${getParameterBase(is.parameter)}]: ${is.type}`).join("; "); | ||
if (ast.propertySignatures.length > 0) { | ||
return `{ ${formattedPropertySignatures}; ${formattedIndexSignatures} }`; | ||
} else { | ||
return `{ ${formattedIndexSignatures} }`; | ||
} | ||
} else { | ||
if (ast.propertySignatures.length > 0) { | ||
return `{ ${formattedPropertySignatures} }`; | ||
} else { | ||
return "{}"; | ||
} | ||
} | ||
}; | ||
@@ -468,22 +1128,181 @@ /** | ||
*/ | ||
export const isTypeLiteral = ast => ast._tag === "TypeLiteral"; | ||
const isMembers = as => as.length > 1; | ||
export const isTypeLiteral = /*#__PURE__*/createASTGuard("TypeLiteral"); | ||
const removeNevers = candidates => candidates.filter(ast => !(ast === neverKeyword)); | ||
const sortCandidates = /*#__PURE__*/Arr.sort( /*#__PURE__*/Order.mapInput(Number.Order, ast => { | ||
switch (ast._tag) { | ||
case "AnyKeyword": | ||
return 0; | ||
case "UnknownKeyword": | ||
return 1; | ||
case "ObjectKeyword": | ||
return 2; | ||
case "StringKeyword": | ||
case "NumberKeyword": | ||
case "BooleanKeyword": | ||
case "BigIntKeyword": | ||
case "SymbolKeyword": | ||
return 3; | ||
} | ||
return 4; | ||
})); | ||
const literalMap = { | ||
string: "StringKeyword", | ||
number: "NumberKeyword", | ||
boolean: "BooleanKeyword", | ||
bigint: "BigIntKeyword" | ||
}; | ||
/** @internal */ | ||
export const flatten = candidates => Arr.flatMap(candidates, ast => isUnion(ast) ? flatten(ast.types) : [ast]); | ||
/** @internal */ | ||
export const unify = candidates => { | ||
const cs = sortCandidates(candidates); | ||
const out = []; | ||
const uniques = {}; | ||
const literals = []; | ||
for (const ast of cs) { | ||
switch (ast._tag) { | ||
case "NeverKeyword": | ||
break; | ||
case "AnyKeyword": | ||
return [anyKeyword]; | ||
case "UnknownKeyword": | ||
return [unknownKeyword]; | ||
// uniques | ||
case "ObjectKeyword": | ||
case "UndefinedKeyword": | ||
case "VoidKeyword": | ||
case "StringKeyword": | ||
case "NumberKeyword": | ||
case "BooleanKeyword": | ||
case "BigIntKeyword": | ||
case "SymbolKeyword": | ||
{ | ||
if (!uniques[ast._tag]) { | ||
uniques[ast._tag] = ast; | ||
out.push(ast); | ||
} | ||
break; | ||
} | ||
case "Literal": | ||
{ | ||
const type = typeof ast.literal; | ||
switch (type) { | ||
case "string": | ||
case "number": | ||
case "bigint": | ||
case "boolean": | ||
{ | ||
const _tag = literalMap[type]; | ||
if (!uniques[_tag] && !literals.includes(ast.literal)) { | ||
literals.push(ast.literal); | ||
out.push(ast); | ||
} | ||
break; | ||
} | ||
// null | ||
case "object": | ||
{ | ||
if (!literals.includes(ast.literal)) { | ||
literals.push(ast.literal); | ||
out.push(ast); | ||
} | ||
break; | ||
} | ||
} | ||
break; | ||
} | ||
case "UniqueSymbol": | ||
{ | ||
if (!uniques["SymbolKeyword"] && !literals.includes(ast.symbol)) { | ||
literals.push(ast.symbol); | ||
out.push(ast); | ||
} | ||
break; | ||
} | ||
case "TupleType": | ||
{ | ||
if (!uniques["ObjectKeyword"]) { | ||
out.push(ast); | ||
} | ||
break; | ||
} | ||
case "TypeLiteral": | ||
{ | ||
if (ast.propertySignatures.length === 0 && ast.indexSignatures.length === 0) { | ||
if (!uniques["{}"]) { | ||
uniques["{}"] = ast; | ||
out.push(ast); | ||
} | ||
} else if (!uniques["ObjectKeyword"]) { | ||
out.push(ast); | ||
} | ||
break; | ||
} | ||
default: | ||
out.push(ast); | ||
} | ||
} | ||
return out; | ||
}; | ||
/** | ||
* @category constructors | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const createUnion = (candidates, annotations = {}) => { | ||
const types = unify(candidates); | ||
if (isMembers(types)) { | ||
export class Union { | ||
types; | ||
annotations; | ||
static make = (candidates, annotations) => { | ||
const types = []; | ||
const memo = new Set(); | ||
for (let i = 0; i < candidates.length; i++) { | ||
const ast = candidates[i]; | ||
if (ast === neverKeyword || memo.has(ast)) { | ||
continue; | ||
} | ||
memo.add(ast); | ||
types.push(ast); | ||
} | ||
return Union.union(types, annotations); | ||
}; | ||
/** @internal */ | ||
static members = (candidates, annotations) => { | ||
return Union.union(removeNevers(candidates), annotations); | ||
}; | ||
/** @internal */ | ||
static unify = (candidates, annotations) => { | ||
return Union.union(unify(flatten(candidates)), annotations); | ||
}; | ||
/** @internal */ | ||
static union = (types, annotations) => { | ||
return isMembers(types) ? new Union(types, annotations) : types.length === 1 ? types[0] : neverKeyword; | ||
}; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Union"; | ||
constructor(types, annotations = {}) { | ||
this.types = types; | ||
this.annotations = annotations; | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return Option.getOrElse(getExpected(this, verbose), () => this.types.map(String).join(" | ")); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: "Union", | ||
types: sortUnionMembers(types), | ||
annotations | ||
_tag: this._tag, | ||
types: this.types.map(ast => ast.toJSON()), | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
if (ReadonlyArray.isNonEmptyReadonlyArray(types)) { | ||
return types[0]; | ||
} | ||
return neverKeyword; | ||
}; | ||
} | ||
/** @internal */ | ||
export const mapMembers = (members, f) => members.map(f); | ||
/** @internal */ | ||
export const isMembers = as => as.length > 1; | ||
/** | ||
@@ -493,12 +1312,47 @@ * @category guards | ||
*/ | ||
export const isUnion = ast => ast._tag === "Union"; | ||
export const isUnion = /*#__PURE__*/createASTGuard("Union"); | ||
const toJSONMemoMap = /*#__PURE__*/globalValue( /*#__PURE__*/Symbol.for("@effect/schema/AST/toJSONMemoMap"), () => new WeakMap()); | ||
/** | ||
* @category constructors | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const createSuspend = (f, annotations = {}) => ({ | ||
_tag: "Suspend", | ||
f: Internal.memoizeThunk(f), | ||
annotations | ||
}); | ||
export class Suspend { | ||
f; | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Suspend"; | ||
constructor(f, annotations = {}) { | ||
this.f = f; | ||
this.annotations = annotations; | ||
this.f = util_.memoizeThunk(f); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return getExpected(this, verbose).pipe(Option.orElse(() => Option.flatMap(Option.liftThrowable(this.f)(), ast => getExpected(ast, verbose))), Option.getOrElse(() => "<suspended schema>")); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
const ast = this.f(); | ||
let out = toJSONMemoMap.get(ast); | ||
if (out) { | ||
return out; | ||
} | ||
toJSONMemoMap.set(ast, { | ||
_tag: this._tag | ||
}); | ||
out = { | ||
_tag: this._tag, | ||
ast: ast.toJSON(), | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
toJSONMemoMap.set(ast, out); | ||
return out; | ||
} | ||
} | ||
/** | ||
@@ -508,15 +1362,37 @@ * @category guards | ||
*/ | ||
export const isSuspend = ast => ast._tag === "Suspend"; | ||
export const isSuspend = /*#__PURE__*/createASTGuard("Suspend"); | ||
/** | ||
* @category constructors | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const createRefinement = (from, filter, annotations = {}) => { | ||
return { | ||
_tag: "Refinement", | ||
from, | ||
filter, | ||
annotations | ||
}; | ||
}; | ||
export class Refinement { | ||
from; | ||
filter; | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Refinement"; | ||
constructor(from, filter, annotations = {}) { | ||
this.from = from; | ||
this.filter = filter; | ||
this.annotations = annotations; | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return Option.getOrElse(getExpected(this, verbose), () => `{ ${this.from} | filter }`); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
from: this.from.toJSON(), | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
/** | ||
@@ -526,45 +1402,81 @@ * @category guards | ||
*/ | ||
export const isRefinement = ast => ast._tag === "Refinement"; | ||
export const isRefinement = /*#__PURE__*/createASTGuard("Refinement"); | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const createTransform = (from, to, transformation, annotations = {}) => ({ | ||
_tag: "Transform", | ||
from, | ||
to, | ||
transformation, | ||
annotations | ||
}); | ||
export const defaultParseOption = {}; | ||
/** | ||
* @category guards | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const isTransform = ast => ast._tag === "Transform"; | ||
export class Transformation { | ||
from; | ||
to; | ||
transformation; | ||
annotations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Transformation"; | ||
constructor(from, to, transformation, annotations = {}) { | ||
this.from = from; | ||
this.to = to; | ||
this.transformation = transformation; | ||
this.annotations = annotations; | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString(verbose = false) { | ||
return Option.getOrElse(getExpected(this, verbose), () => `(${String(this.from)} <-> ${String(this.to)})`); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_tag: this._tag, | ||
from: this.from.toJSON(), | ||
to: this.to.toJSON(), | ||
annotations: toJSONAnnotations(this.annotations) | ||
}; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export const createFinalTransformation = (decode, encode) => ({ | ||
_tag: "FinalTransformation", | ||
decode, | ||
encode | ||
}); | ||
export const isTransformation = /*#__PURE__*/createASTGuard("Transformation"); | ||
/** | ||
* @category guard | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const isFinalTransformation = ast => ast._tag === "FinalTransformation"; | ||
export class FinalTransformation { | ||
decode; | ||
encode; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "FinalTransformation"; | ||
constructor(decode, encode) { | ||
this.decode = decode; | ||
this.encode = encode; | ||
} | ||
} | ||
const createTransformationGuard = tag => ast => ast._tag === tag; | ||
/** | ||
* @category constructors | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export const composeTransformation = { | ||
_tag: "ComposeTransformation" | ||
}; | ||
export const isFinalTransformation = /*#__PURE__*/createTransformationGuard("FinalTransformation"); | ||
/** | ||
* @category guard | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const isComposeTransformation = ast => ast._tag === "ComposeTransformation"; | ||
export class ComposeTransformation { | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "ComposeTransformation"; | ||
} | ||
/** | ||
@@ -574,45 +1486,69 @@ * @category constructors | ||
*/ | ||
export const createFinalPropertySignatureTransformation = (decode, encode) => ({ | ||
_tag: "FinalPropertySignatureTransformation", | ||
decode, | ||
encode | ||
}); | ||
export const composeTransformation = /*#__PURE__*/new ComposeTransformation(); | ||
/** | ||
* @category guard | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export const isFinalPropertySignatureTransformation = ast => ast._tag === "FinalPropertySignatureTransformation"; | ||
export const isComposeTransformation = /*#__PURE__*/createTransformationGuard("ComposeTransformation"); | ||
/** | ||
* @category constructors | ||
* Represents a `PropertySignature -> PropertySignature` transformation | ||
* | ||
* The semantic of `decode` is: | ||
* - `none()` represents the absence of the key/value pair | ||
* - `some(value)` represents the presence of the key/value pair | ||
* | ||
* The semantic of `encode` is: | ||
* - `none()` you don't want to output the key/value pair | ||
* - `some(value)` you want to output the key/value pair | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const createPropertySignatureTransform = (from, to, propertySignatureTransformation) => ({ | ||
from, | ||
to, | ||
propertySignatureTransformation | ||
}); | ||
export class PropertySignatureTransformation { | ||
from; | ||
to; | ||
decode; | ||
encode; | ||
constructor(from, to, decode, encode) { | ||
this.from = from; | ||
this.to = to; | ||
this.decode = decode; | ||
this.encode = encode; | ||
} | ||
} | ||
const isRenamingPropertySignatureTransformation = t => t.decode === identity && t.encode === identity; | ||
/** | ||
* @category constructors | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const createTypeLiteralTransformation = propertySignatureTransformations => { | ||
// check for duplicate property signature transformations | ||
const keys = {}; | ||
for (const pst of propertySignatureTransformations) { | ||
const key = pst.from; | ||
if (keys[key]) { | ||
throw new Error(`Duplicate property signature transformation ${String(key)}`); | ||
export class TypeLiteralTransformation { | ||
propertySignatureTransformations; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "TypeLiteralTransformation"; | ||
constructor(propertySignatureTransformations) { | ||
this.propertySignatureTransformations = propertySignatureTransformations; | ||
// check for duplicate property signature transformations | ||
const fromKeys = {}; | ||
const toKeys = {}; | ||
for (const pst of propertySignatureTransformations) { | ||
const from = pst.from; | ||
if (fromKeys[from]) { | ||
throw new Error(getDuplicatePropertySignatureTransformationErrorMessage(from)); | ||
} | ||
fromKeys[from] = true; | ||
const to = pst.to; | ||
if (toKeys[to]) { | ||
throw new Error(getDuplicatePropertySignatureTransformationErrorMessage(to)); | ||
} | ||
toKeys[to] = true; | ||
} | ||
keys[key] = true; | ||
} | ||
return { | ||
_tag: "TypeLiteralTransformation", | ||
propertySignatureTransformations | ||
}; | ||
}; | ||
} | ||
/** | ||
* @category guard | ||
* @category guards | ||
* @since 1.0.0 | ||
*/ | ||
export const isTypeLiteralTransformation = ast => ast._tag === "TypeLiteralTransformation"; | ||
export const isTypeLiteralTransformation = /*#__PURE__*/createTransformationGuard("TypeLiteralTransformation"); | ||
// ------------------------------------------------------------------------------------- | ||
@@ -626,74 +1562,30 @@ // API | ||
*/ | ||
export const mergeAnnotations = (ast, annotations) => { | ||
return { | ||
...ast, | ||
annotations: { | ||
...ast.annotations, | ||
...annotations | ||
} | ||
export const annotations = (ast, annotations) => { | ||
const d = Object.getOwnPropertyDescriptors(ast); | ||
d.annotations.value = { | ||
...ast.annotations, | ||
...annotations | ||
}; | ||
return Object.create(Object.getPrototypeOf(ast), d); | ||
}; | ||
/** | ||
* Adds an annotation, potentially overwriting the existing annotation with the specified id. | ||
* Equivalent at runtime to the TypeScript type-level `keyof` operator. | ||
* | ||
* @since 1.0.0 | ||
*/ | ||
export const setAnnotation = (ast, sym, value) => { | ||
return { | ||
...ast, | ||
annotations: { | ||
...ast.annotations, | ||
[sym]: value | ||
} | ||
}; | ||
}; | ||
export const keyof = ast => Union.unify(_keyof(ast)); | ||
const STRING_KEYWORD_PATTERN = ".*"; | ||
const NUMBER_KEYWORD_PATTERN = "[+-]?\\d*\\.?\\d+(?:[Ee][+-]?\\d+)?"; | ||
/** | ||
* Adds a rest element to the end of a tuple, or throws an exception if the rest element is already present. | ||
* | ||
* @since 1.0.0 | ||
*/ | ||
export const appendRestElement = (ast, restElement) => { | ||
if (Option.isSome(ast.rest)) { | ||
// example: `type A = [...string[], ...number[]]` is illegal | ||
throw new Error("A rest element cannot follow another rest element. ts(1265)"); | ||
} | ||
return createTuple(ast.elements, Option.some([restElement]), ast.isReadonly); | ||
}; | ||
/** | ||
* Appends an element to a tuple or throws an exception in the following cases: | ||
* - A required element cannot follow an optional element. ts(1257) | ||
* - An optional element cannot follow a rest element. ts(1266) | ||
* | ||
* @since 1.0.0 | ||
*/ | ||
export const appendElement = (ast, newElement) => { | ||
if (ast.elements.some(e => e.isOptional) && !newElement.isOptional) { | ||
throw new Error("A required element cannot follow an optional element. ts(1257)"); | ||
} | ||
return pipe(ast.rest, Option.match({ | ||
onNone: () => createTuple([...ast.elements, newElement], Option.none(), ast.isReadonly), | ||
onSome: rest => { | ||
if (newElement.isOptional) { | ||
throw new Error("An optional element cannot follow a rest element. ts(1266)"); | ||
} | ||
return createTuple(ast.elements, Option.some([...rest, newElement.type]), ast.isReadonly); | ||
} | ||
})); | ||
}; | ||
/** | ||
* Equivalent at runtime to the TypeScript type-level `keyof` operator. | ||
* | ||
* @since 1.0.0 | ||
*/ | ||
export const keyof = ast => createUnion(_keyof(ast)); | ||
/** @internal */ | ||
export const getTemplateLiteralRegex = ast => { | ||
let pattern = `^${ast.head}`; | ||
export const getTemplateLiteralRegExp = ast => { | ||
let pattern = `^${regexp.escape(ast.head)}`; | ||
for (const span of ast.spans) { | ||
if (isStringKeyword(span.type)) { | ||
pattern += ".*"; | ||
pattern += STRING_KEYWORD_PATTERN; | ||
} else if (isNumberKeyword(span.type)) { | ||
pattern += "[+-]?\\d*\\.?\\d+(?:[Ee][+-]?\\d+)?"; | ||
pattern += NUMBER_KEYWORD_PATTERN; | ||
} | ||
pattern += span.literal; | ||
pattern += regexp.escape(span.literal); | ||
} | ||
@@ -708,2 +1600,10 @@ pattern += "$"; | ||
switch (ast._tag) { | ||
case "Declaration": | ||
{ | ||
const annotation = getSurrogateAnnotation(ast); | ||
if (Option.isSome(annotation)) { | ||
return getPropertySignatures(annotation.value); | ||
} | ||
break; | ||
} | ||
case "TypeLiteral": | ||
@@ -719,6 +1619,6 @@ return ast.propertySignatures.slice(); | ||
switch (ast._tag) { | ||
case "Tuple": | ||
case "TupleType": | ||
{ | ||
let hasOptional = false; | ||
const out = []; | ||
let out = []; | ||
for (const e of ast.elements) { | ||
@@ -733,6 +1633,4 @@ if (e.isOptional) { | ||
} | ||
if (Option.isSome(ast.rest)) { | ||
out.push(...ast.rest.value); | ||
} | ||
return createUnion(out); | ||
out = out.concat(ast.rest); | ||
return Union.make(out); | ||
} | ||
@@ -742,7 +1640,7 @@ case "Refinement": | ||
case "Union": | ||
return createUnion(ast.types.map(getNumberIndexedAccess)); | ||
return Union.make(ast.types.map(getNumberIndexedAccess)); | ||
case "Suspend": | ||
return getNumberIndexedAccess(ast.f()); | ||
} | ||
throw new Error(`getNumberIndexedAccess: unsupported schema (${format(ast)})`); | ||
throw new Error(errors_.getErrorMessage("getNumberIndexedAccess", `unsupported schema (${ast})`)); | ||
}; | ||
@@ -752,5 +1650,13 @@ /** @internal */ | ||
switch (ast._tag) { | ||
case "Declaration": | ||
{ | ||
const annotation = getSurrogateAnnotation(ast); | ||
if (Option.isSome(annotation)) { | ||
return getPropertyKeyIndexedAccess(annotation.value, name); | ||
} | ||
break; | ||
} | ||
case "TypeLiteral": | ||
{ | ||
const ops = ReadonlyArray.findFirst(ast.propertySignatures, ps => ps.name === name); | ||
const ops = Arr.findFirst(ast.propertySignatures, ps => ps.name === name); | ||
if (Option.isSome(ops)) { | ||
@@ -765,5 +1671,5 @@ return ops.value; | ||
{ | ||
const regex = getTemplateLiteralRegex(parameterBase); | ||
const regex = getTemplateLiteralRegExp(parameterBase); | ||
if (regex.test(name)) { | ||
return createPropertySignature(name, is.type, false, false); | ||
return new PropertySignature(name, is.type, false, true); | ||
} | ||
@@ -773,3 +1679,3 @@ break; | ||
case "StringKeyword": | ||
return createPropertySignature(name, is.type, false, false); | ||
return new PropertySignature(name, is.type, false, true); | ||
} | ||
@@ -781,3 +1687,3 @@ } | ||
if (isSymbolKeyword(parameterBase)) { | ||
return createPropertySignature(name, is.type, false, false); | ||
return new PropertySignature(name, is.type, false, true); | ||
} | ||
@@ -790,10 +1696,18 @@ } | ||
case "Union": | ||
return createPropertySignature(name, createUnion(ast.types.map(ast => getPropertyKeyIndexedAccess(ast, name).type)), false, true); | ||
return new PropertySignature(name, Union.make(ast.types.map(ast => getPropertyKeyIndexedAccess(ast, name).type)), false, true); | ||
case "Suspend": | ||
return getPropertyKeyIndexedAccess(ast.f(), name); | ||
} | ||
return createPropertySignature(name, neverKeyword, false, true); | ||
return new PropertySignature(name, neverKeyword, false, true); | ||
}; | ||
const getPropertyKeys = ast => { | ||
switch (ast._tag) { | ||
case "Declaration": | ||
{ | ||
const annotation = getSurrogateAnnotation(ast); | ||
if (Option.isSome(annotation)) { | ||
return getPropertyKeys(annotation.value); | ||
} | ||
break; | ||
} | ||
case "TypeLiteral": | ||
@@ -804,12 +1718,10 @@ return ast.propertySignatures.map(ps => ps.name); | ||
case "Union": | ||
return ast.types.slice(1).reduce((out, ast) => ReadonlyArray.intersection(out, getPropertyKeys(ast)), getPropertyKeys(ast.types[0])); | ||
return ast.types.slice(1).reduce((out, ast) => Arr.intersection(out, getPropertyKeys(ast)), getPropertyKeys(ast.types[0])); | ||
case "Transformation": | ||
return getPropertyKeys(ast.to); | ||
} | ||
return []; | ||
}; | ||
/** | ||
* Create a record with the specified key type and value type. | ||
* | ||
* @since 1.0.0 | ||
*/ | ||
export const createRecord = (key, value, isReadonly) => { | ||
/** @internal */ | ||
export const record = (key, value) => { | ||
const propertySignatures = []; | ||
@@ -825,13 +1737,20 @@ const indexSignatures = []; | ||
case "Refinement": | ||
indexSignatures.push(createIndexSignature(key, value, isReadonly)); | ||
indexSignatures.push(new IndexSignature(key, value, true)); | ||
break; | ||
case "Literal": | ||
if (Predicate.isString(key.literal) || Predicate.isNumber(key.literal)) { | ||
propertySignatures.push(createPropertySignature(key.literal, value, false, isReadonly)); | ||
propertySignatures.push(new PropertySignature(key.literal, value, false, true)); | ||
} else { | ||
throw new Error(`createRecord: unsupported literal (${formatUnknown(key.literal)})`); | ||
throw new Error(errors_.getErrorMessage("record", `unsupported literal (${util_.formatUnknown(key.literal)})`)); | ||
} | ||
break; | ||
case "Enums": | ||
{ | ||
for (const [_, name] of key.enums) { | ||
propertySignatures.push(new PropertySignature(name, value, false, true)); | ||
} | ||
break; | ||
} | ||
case "UniqueSymbol": | ||
propertySignatures.push(createPropertySignature(key.symbol, value, false, isReadonly)); | ||
propertySignatures.push(new PropertySignature(key.symbol, value, false, true)); | ||
break; | ||
@@ -842,7 +1761,10 @@ case "Union": | ||
default: | ||
throw new Error(`createRecord: unsupported key schema (${format(key)})`); | ||
throw new Error(errors_.getErrorMessage("record", `unsupported key schema (${key})`)); | ||
} | ||
}; | ||
go(key); | ||
return createTypeLiteral(propertySignatures, indexSignatures); | ||
return { | ||
propertySignatures, | ||
indexSignatures | ||
}; | ||
}; | ||
@@ -854,3 +1776,34 @@ /** | ||
*/ | ||
export const pick = (ast, keys) => createTypeLiteral(keys.map(key => getPropertyKeyIndexedAccess(ast, key)), []); | ||
export const pick = (ast, keys) => { | ||
if (isTransformation(ast)) { | ||
switch (ast.transformation._tag) { | ||
case "ComposeTransformation": | ||
return new Transformation(pick(ast.from, keys), pick(ast.to, keys), composeTransformation); | ||
case "TypeLiteralTransformation": | ||
{ | ||
const ts = []; | ||
const fromKeys = []; | ||
for (const k of keys) { | ||
const t = ast.transformation.propertySignatureTransformations.find(t => t.to === k); | ||
if (t) { | ||
ts.push(t); | ||
fromKeys.push(t.from); | ||
} else { | ||
fromKeys.push(k); | ||
} | ||
} | ||
return Arr.isNonEmptyReadonlyArray(ts) ? new Transformation(pick(ast.from, fromKeys), pick(ast.to, keys), new TypeLiteralTransformation(ts)) : pick(ast.from, fromKeys); | ||
} | ||
case "FinalTransformation": | ||
{ | ||
const annotation = getSurrogateAnnotation(ast); | ||
if (Option.isSome(annotation)) { | ||
return pick(annotation.value, keys); | ||
} | ||
throw new Error(errors_.getErrorMessage("pick", "cannot handle this kind of transformation")); | ||
} | ||
} | ||
} | ||
return new TypeLiteral(keys.map(key => getPropertyKeyIndexedAccess(ast, key)), []); | ||
}; | ||
/** | ||
@@ -862,2 +1815,4 @@ * Equivalent at runtime to the built-in TypeScript utility type `Omit`. | ||
export const omit = (ast, keys) => pick(ast, getPropertyKeys(ast).filter(name => !keys.includes(name))); | ||
/** @internal */ | ||
export const orUndefined = ast => Union.make([ast, undefinedKeyword]); | ||
/** | ||
@@ -868,21 +1823,29 @@ * Equivalent at runtime to the built-in TypeScript utility type `Partial`. | ||
*/ | ||
export const partial = ast => { | ||
export const partial = (ast, options) => { | ||
const exact = options?.exact === true; | ||
switch (ast._tag) { | ||
case "Tuple": | ||
return createTuple(ast.elements.map(e => createElement(e.type, true)), pipe(ast.rest, Option.map(rest => [createUnion([...rest, undefinedKeyword])])), ast.isReadonly); | ||
case "TupleType": | ||
return new TupleType(ast.elements.map(e => new Element(exact ? e.type : orUndefined(e.type), true)), Arr.match(ast.rest, { | ||
onEmpty: () => ast.rest, | ||
onNonEmpty: rest => [Union.make([...rest, undefinedKeyword])] | ||
}), ast.isReadonly); | ||
case "TypeLiteral": | ||
return createTypeLiteral(ast.propertySignatures.map(f => createPropertySignature(f.name, f.type, true, f.isReadonly, f.annotations)), ast.indexSignatures); | ||
return new TypeLiteral(ast.propertySignatures.map(ps => new PropertySignature(ps.name, exact ? ps.type : orUndefined(ps.type), true, ps.isReadonly, ps.annotations)), ast.indexSignatures.map(is => new IndexSignature(is.parameter, orUndefined(is.type), is.isReadonly))); | ||
case "Union": | ||
return createUnion(ast.types.map(member => partial(member))); | ||
return Union.make(ast.types.map(member => partial(member, options))); | ||
case "Suspend": | ||
return createSuspend(() => partial(ast.f())); | ||
return new Suspend(() => partial(ast.f(), options)); | ||
case "Declaration": | ||
throw new Error("`partial` cannot handle declarations"); | ||
throw new Error(errors_.getErrorMessage("partial", "cannot handle declarations")); | ||
case "Refinement": | ||
throw new Error("`partial` cannot handle refinements"); | ||
case "Transform": | ||
throw new Error("`partial` cannot handle transformations"); | ||
default: | ||
return ast; | ||
throw new Error(errors_.getErrorMessage("partial", "cannot handle refinements")); | ||
case "Transformation": | ||
{ | ||
if (isTypeLiteralTransformation(ast.transformation) && ast.transformation.propertySignatureTransformations.every(isRenamingPropertySignatureTransformation)) { | ||
return new Transformation(partial(ast.from, options), partial(ast.to, options), ast.transformation); | ||
} | ||
throw new Error(errors_.getErrorMessage("partial", "cannot handle transformations")); | ||
} | ||
} | ||
return ast; | ||
}; | ||
@@ -896,22 +1859,23 @@ /** | ||
switch (ast._tag) { | ||
case "Tuple": | ||
return createTuple(ast.elements.map(e => createElement(e.type, false)), pipe(ast.rest, Option.map(rest => { | ||
const u = createUnion([...rest]); | ||
return ReadonlyArray.map(rest, () => u); | ||
})), ast.isReadonly); | ||
case "TupleType": | ||
return new TupleType(ast.elements.map(e => new Element(e.type, false)), ast.rest, ast.isReadonly); | ||
case "TypeLiteral": | ||
return createTypeLiteral(ast.propertySignatures.map(f => createPropertySignature(f.name, f.type, false, f.isReadonly, f.annotations)), ast.indexSignatures); | ||
return new TypeLiteral(ast.propertySignatures.map(f => new PropertySignature(f.name, f.type, false, f.isReadonly, f.annotations)), ast.indexSignatures); | ||
case "Union": | ||
return createUnion(ast.types.map(member => required(member))); | ||
return Union.make(ast.types.map(member => required(member))); | ||
case "Suspend": | ||
return createSuspend(() => required(ast.f())); | ||
return new Suspend(() => required(ast.f())); | ||
case "Declaration": | ||
throw new Error("`required` cannot handle declarations"); | ||
throw new Error(errors_.getErrorMessage("required", "cannot handle declarations")); | ||
case "Refinement": | ||
throw new Error("`required` cannot handle refinements"); | ||
case "Transform": | ||
throw new Error("`required` cannot handle transformations"); | ||
default: | ||
return ast; | ||
throw new Error(errors_.getErrorMessage("required", "cannot handle refinements")); | ||
case "Transformation": | ||
{ | ||
if (isTypeLiteralTransformation(ast.transformation) && ast.transformation.propertySignatureTransformations.every(isRenamingPropertySignatureTransformation)) { | ||
return new Transformation(required(ast.from), required(ast.to), ast.transformation); | ||
} | ||
throw new Error(errors_.getErrorMessage("required", "cannot handle transformations")); | ||
} | ||
} | ||
return ast; | ||
}; | ||
@@ -927,14 +1891,28 @@ /** | ||
switch (ast._tag) { | ||
case "Tuple": | ||
return createTuple(ast.elements, ast.rest, false, ast.annotations); | ||
case "TupleType": | ||
return ast.isReadonly === false ? ast : new TupleType(ast.elements, ast.rest, false, ast.annotations); | ||
case "TypeLiteral": | ||
return createTypeLiteral(ast.propertySignatures.map(ps => createPropertySignature(ps.name, ps.type, ps.isOptional, false, ps.annotations)), ast.indexSignatures.map(is => createIndexSignature(is.parameter, is.type, false)), ast.annotations); | ||
{ | ||
const propertySignatures = changeMap(ast.propertySignatures, ps => ps.isReadonly === false ? ps : new PropertySignature(ps.name, ps.type, ps.isOptional, false, ps.annotations)); | ||
const indexSignatures = changeMap(ast.indexSignatures, is => is.isReadonly === false ? is : new IndexSignature(is.parameter, is.type, false)); | ||
return propertySignatures === ast.propertySignatures && indexSignatures === ast.indexSignatures ? ast : new TypeLiteral(propertySignatures, indexSignatures, ast.annotations); | ||
} | ||
case "Union": | ||
return createUnion(ast.types.map(mutable), ast.annotations); | ||
{ | ||
const types = changeMap(ast.types, mutable); | ||
return types === ast.types ? ast : Union.make(types, ast.annotations); | ||
} | ||
case "Suspend": | ||
return createSuspend(() => mutable(ast.f()), ast.annotations); | ||
return new Suspend(() => mutable(ast.f()), ast.annotations); | ||
case "Refinement": | ||
return createRefinement(mutable(ast.from), ast.filter, ast.annotations); | ||
case "Transform": | ||
return createTransform(mutable(ast.from), mutable(ast.to), ast.transformation, ast.annotations); | ||
{ | ||
const from = mutable(ast.from); | ||
return from === ast.from ? ast : new Refinement(from, ast.filter, ast.annotations); | ||
} | ||
case "Transformation": | ||
{ | ||
const from = mutable(ast.from); | ||
const to = mutable(ast.to); | ||
return from === ast.from && to === ast.to ? ast : new Transformation(from, to, ast.transformation, ast.annotations); | ||
} | ||
} | ||
@@ -947,105 +1925,126 @@ return ast; | ||
export const getCompiler = match => { | ||
const compile = ast => match[ast._tag](ast, compile); | ||
const compile = (ast, path) => match[ast._tag](ast, compile, path); | ||
return compile; | ||
}; | ||
/** @internal */ | ||
export const getToPropertySignatures = ps => ps.map(p => createPropertySignature(p.name, to(p.type), p.isOptional, p.isReadonly, p.annotations)); | ||
/** @internal */ | ||
export const getToIndexSignatures = ps => ps.map(is => createIndexSignature(is.parameter, to(is.type), is.isReadonly)); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
export const to = ast => { | ||
export const typeAST = ast => { | ||
switch (ast._tag) { | ||
case "Declaration": | ||
return createDeclaration(ast.typeParameters.map(to), ast.decodeUnknown, ast.encodeUnknown, ast.annotations); | ||
case "Tuple": | ||
return createTuple(ast.elements.map(e => createElement(to(e.type), e.isOptional)), Option.map(ast.rest, ReadonlyArray.map(to)), ast.isReadonly, ast.annotations); | ||
{ | ||
const typeParameters = changeMap(ast.typeParameters, typeAST); | ||
return typeParameters === ast.typeParameters ? ast : new Declaration(typeParameters, ast.decodeUnknown, ast.encodeUnknown, ast.annotations); | ||
} | ||
case "TupleType": | ||
{ | ||
const elements = changeMap(ast.elements, e => { | ||
const type = typeAST(e.type); | ||
return type === e.type ? e : new Element(type, e.isOptional); | ||
}); | ||
const rest = changeMap(ast.rest, typeAST); | ||
return elements === ast.elements && rest === ast.rest ? ast : new TupleType(elements, rest, ast.isReadonly, ast.annotations); | ||
} | ||
case "TypeLiteral": | ||
return createTypeLiteral(getToPropertySignatures(ast.propertySignatures), getToIndexSignatures(ast.indexSignatures), ast.annotations); | ||
{ | ||
const propertySignatures = changeMap(ast.propertySignatures, p => { | ||
const type = typeAST(p.type); | ||
return type === p.type ? p : new PropertySignature(p.name, type, p.isOptional, p.isReadonly); | ||
}); | ||
const indexSignatures = changeMap(ast.indexSignatures, is => { | ||
const type = typeAST(is.type); | ||
return type === is.type ? is : new IndexSignature(is.parameter, type, is.isReadonly); | ||
}); | ||
return propertySignatures === ast.propertySignatures && indexSignatures === ast.indexSignatures ? ast : new TypeLiteral(propertySignatures, indexSignatures, ast.annotations); | ||
} | ||
case "Union": | ||
return createUnion(ast.types.map(to), ast.annotations); | ||
{ | ||
const types = changeMap(ast.types, typeAST); | ||
return types === ast.types ? ast : Union.make(types, ast.annotations); | ||
} | ||
case "Suspend": | ||
return createSuspend(() => to(ast.f()), ast.annotations); | ||
return new Suspend(() => typeAST(ast.f()), ast.annotations); | ||
case "Refinement": | ||
return createRefinement(to(ast.from), ast.filter, ast.annotations); | ||
case "Transform": | ||
return to(ast.to); | ||
{ | ||
const from = typeAST(ast.from); | ||
return from === ast.from ? ast : new Refinement(from, ast.filter, ast.annotations); | ||
} | ||
case "Transformation": | ||
return typeAST(ast.to); | ||
} | ||
return ast; | ||
}; | ||
const preserveIdentifierAnnotation = annotated => { | ||
return Option.match(getIdentifierAnnotation(annotated), { | ||
onNone: () => undefined, | ||
onSome: identifier => ({ | ||
[IdentifierAnnotationId]: identifier | ||
}) | ||
}); | ||
}; | ||
/** @internal */ | ||
export const getJSONIdentifier = annotated => Option.orElse(getJSONIdentifierAnnotation(annotated), () => getIdentifierAnnotation(annotated)); | ||
const createJSONIdentifierAnnotation = annotated => Option.match(getJSONIdentifier(annotated), { | ||
onNone: () => undefined, | ||
onSome: identifier => ({ | ||
[JSONIdentifierAnnotationId]: identifier | ||
}) | ||
}); | ||
function changeMap(as, f) { | ||
let changed = false; | ||
const out = Arr.allocate(as.length); | ||
for (let i = 0; i < as.length; i++) { | ||
const a = as[i]; | ||
const fa = f(a); | ||
if (fa !== a) { | ||
changed = true; | ||
} | ||
out[i] = fa; | ||
} | ||
return changed ? out : as; | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
export const from = ast => { | ||
export const encodedAST = ast => { | ||
switch (ast._tag) { | ||
case "Declaration": | ||
return createDeclaration(ast.typeParameters.map(from), ast.decodeUnknown, ast.encodeUnknown, ast.annotations); | ||
case "Tuple": | ||
return createTuple(ast.elements.map(e => createElement(from(e.type), e.isOptional)), Option.map(ast.rest, ReadonlyArray.map(from)), ast.isReadonly, preserveIdentifierAnnotation(ast)); | ||
{ | ||
const typeParameters = changeMap(ast.typeParameters, encodedAST); | ||
return typeParameters === ast.typeParameters ? ast : new Declaration(typeParameters, ast.decodeUnknown, ast.encodeUnknown, ast.annotations); | ||
} | ||
case "TupleType": | ||
{ | ||
const elements = changeMap(ast.elements, e => { | ||
const type = encodedAST(e.type); | ||
return type === e.type ? e : new Element(type, e.isOptional); | ||
}); | ||
const rest = changeMap(ast.rest, encodedAST); | ||
return elements === ast.elements && rest === ast.rest ? ast : new TupleType(elements, rest, ast.isReadonly, createJSONIdentifierAnnotation(ast)); | ||
} | ||
case "TypeLiteral": | ||
return createTypeLiteral(ast.propertySignatures.map(p => createPropertySignature(p.name, from(p.type), p.isOptional, p.isReadonly)), ast.indexSignatures.map(is => createIndexSignature(is.parameter, from(is.type), is.isReadonly)), preserveIdentifierAnnotation(ast)); | ||
{ | ||
const propertySignatures = changeMap(ast.propertySignatures, ps => { | ||
const type = encodedAST(ps.type); | ||
return type === ps.type ? ps : new PropertySignature(ps.name, type, ps.isOptional, ps.isReadonly); | ||
}); | ||
const indexSignatures = changeMap(ast.indexSignatures, is => { | ||
const type = encodedAST(is.type); | ||
return type === is.type ? is : new IndexSignature(is.parameter, type, is.isReadonly); | ||
}); | ||
return propertySignatures === ast.propertySignatures && indexSignatures === ast.indexSignatures ? ast : new TypeLiteral(propertySignatures, indexSignatures, createJSONIdentifierAnnotation(ast)); | ||
} | ||
case "Union": | ||
return createUnion(ast.types.map(from), preserveIdentifierAnnotation(ast)); | ||
{ | ||
const types = changeMap(ast.types, encodedAST); | ||
return types === ast.types ? ast : Union.make(types, createJSONIdentifierAnnotation(ast)); | ||
} | ||
case "Suspend": | ||
return createSuspend(() => from(ast.f()), preserveIdentifierAnnotation(ast)); | ||
return new Suspend(() => encodedAST(ast.f()), createJSONIdentifierAnnotation(ast)); | ||
case "Refinement": | ||
case "Transform": | ||
return from(ast.from); | ||
case "Transformation": | ||
return encodedAST(ast.from); | ||
} | ||
return ast; | ||
}; | ||
const toStringMemoSet = /*#__PURE__*/globalValue( /*#__PURE__*/Symbol.for("@effect/schema/AST/toStringMemoSet"), () => new WeakSet()); | ||
const containerASTTags = { | ||
Declaration: true, | ||
Refinement: true, | ||
Tuple: true, | ||
TypeLiteral: true, | ||
Union: true, | ||
Suspend: true, | ||
Transform: true | ||
const toJSONAnnotations = annotations => { | ||
const out = {}; | ||
for (const k of Object.getOwnPropertySymbols(annotations)) { | ||
out[String(k)] = annotations[k]; | ||
} | ||
return out; | ||
}; | ||
const isContainerAST = ast => "_tag" in ast && Predicate.isString(ast["_tag"]) && ast["_tag"] in containerASTTags; | ||
/** @internal */ | ||
export const toString = ast => JSON.stringify(ast, (key, value) => { | ||
if (Predicate.isSymbol(value)) { | ||
return String(value); | ||
} | ||
if (typeof value === "object" && value !== null) { | ||
if (isContainerAST(value)) { | ||
if (toStringMemoSet.has(value)) { | ||
return "<suspended schema>"; | ||
} | ||
toStringMemoSet.add(value); | ||
if (isSuspend(value)) { | ||
const out = value.f(); | ||
if (toStringMemoSet.has(out)) { | ||
return "<suspended schema>"; | ||
} | ||
toStringMemoSet.add(out); | ||
return out; | ||
} | ||
} else if (key === "annotations") { | ||
const out = {}; | ||
for (const k of Internal.ownKeys(value)) { | ||
out[String(k)] = value[k]; | ||
} | ||
return out; | ||
} | ||
} | ||
return value; | ||
}, 2); | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
export const hash = ast => Hash.string(toString(ast)); | ||
/** @internal */ | ||
export const getCardinality = ast => { | ||
@@ -1076,4 +2075,4 @@ switch (ast._tag) { | ||
}; | ||
const sortPropertySignatures = /*#__PURE__*/ReadonlyArray.sort( /*#__PURE__*/pipe(Number.Order, /*#__PURE__*/Order.mapInput(ps => getCardinality(ps.type)))); | ||
const sortIndexSignatures = /*#__PURE__*/ReadonlyArray.sort( /*#__PURE__*/pipe(Number.Order, /*#__PURE__*/Order.mapInput(is => { | ||
const sortPropertySignatures = /*#__PURE__*/Arr.sort( /*#__PURE__*/Order.mapInput(Number.Order, ps => getCardinality(ps.type))); | ||
const sortIndexSignatures = /*#__PURE__*/Arr.sort( /*#__PURE__*/Order.mapInput(Number.Order, is => { | ||
switch (getParameterBase(is.parameter)._tag) { | ||
@@ -1087,3 +2086,3 @@ case "StringKeyword": | ||
} | ||
}))); | ||
})); | ||
const WeightOrder = /*#__PURE__*/Order.tuple(Number.Order, Number.Order, Number.Order); | ||
@@ -1096,7 +2095,5 @@ const maxWeight = /*#__PURE__*/Order.max(WeightOrder); | ||
switch (ast._tag) { | ||
case "Tuple": | ||
case "TupleType": | ||
{ | ||
const y = ast.elements.length; | ||
const z = Option.isSome(ast.rest) ? ast.rest.value.length : 0; | ||
return [2, y, z]; | ||
return [2, ast.elements.length, ast.rest.length]; | ||
} | ||
@@ -1111,2 +2108,7 @@ case "TypeLiteral": | ||
{ | ||
const annotation = getSurrogateAnnotation(ast); | ||
if (Option.isSome(annotation)) { | ||
const [_, y, z] = getWeight(annotation.value); | ||
return [6, y, z]; | ||
} | ||
return [6, 0, 0]; | ||
@@ -1123,3 +2125,3 @@ } | ||
} | ||
case "Transform": | ||
case "Transformation": | ||
return getWeight(ast.from); | ||
@@ -1135,38 +2137,2 @@ case "ObjectKeyword": | ||
}; | ||
const sortUnionMembers = /*#__PURE__*/ReadonlyArray.sort( /*#__PURE__*/Order.reverse( /*#__PURE__*/Order.mapInput(WeightOrder, getWeight))); | ||
const unify = candidates => { | ||
let out = pipe(candidates, ReadonlyArray.flatMap(ast => { | ||
switch (ast._tag) { | ||
case "NeverKeyword": | ||
return []; | ||
case "Union": | ||
return ast.types; | ||
default: | ||
return [ast]; | ||
} | ||
})); | ||
if (out.some(isAnyKeyword)) { | ||
return [anyKeyword]; | ||
} | ||
if (out.some(isUnknownKeyword)) { | ||
return [unknownKeyword]; | ||
} | ||
let i; | ||
if ((i = out.findIndex(isStringKeyword)) !== -1) { | ||
out = out.filter((m, j) => j === i || !isStringKeyword(m) && !(isLiteral(m) && typeof m.literal === "string")); | ||
} | ||
if ((i = out.findIndex(isNumberKeyword)) !== -1) { | ||
out = out.filter((m, j) => j === i || !isNumberKeyword(m) && !(isLiteral(m) && typeof m.literal === "number")); | ||
} | ||
if ((i = out.findIndex(isBooleanKeyword)) !== -1) { | ||
out = out.filter((m, j) => j === i || !isBooleanKeyword(m) && !(isLiteral(m) && typeof m.literal === "boolean")); | ||
} | ||
if ((i = out.findIndex(isBigIntKeyword)) !== -1) { | ||
out = out.filter((m, j) => j === i || !isBigIntKeyword(m) && !(isLiteral(m) && typeof m.literal === "bigint")); | ||
} | ||
if ((i = out.findIndex(isSymbolKeyword)) !== -1) { | ||
out = out.filter((m, j) => j === i || !isSymbolKeyword(m) && !isUniqueSymbol(m)); | ||
} | ||
return out; | ||
}; | ||
/** @internal */ | ||
@@ -1183,14 +2149,60 @@ export const getParameterBase = ast => { | ||
}; | ||
const equalsTemplateLiteralSpan = /*#__PURE__*/Arr.getEquivalence((self, that) => self.type._tag === that.type._tag && self.literal === that.literal); | ||
const equalsEnums = /*#__PURE__*/Arr.getEquivalence((self, that) => that[0] === self[0] && that[1] === self[1]); | ||
const equals = (self, that) => { | ||
switch (self._tag) { | ||
case "Literal": | ||
return isLiteral(that) && that.literal === self.literal; | ||
case "UniqueSymbol": | ||
return isUniqueSymbol(that) && that.symbol === self.symbol; | ||
case "UndefinedKeyword": | ||
case "VoidKeyword": | ||
case "NeverKeyword": | ||
case "UnknownKeyword": | ||
case "AnyKeyword": | ||
case "StringKeyword": | ||
case "NumberKeyword": | ||
case "BooleanKeyword": | ||
case "BigIntKeyword": | ||
case "SymbolKeyword": | ||
case "ObjectKeyword": | ||
return that._tag === self._tag; | ||
case "TemplateLiteral": | ||
return isTemplateLiteral(that) && that.head === self.head && equalsTemplateLiteralSpan(that.spans, self.spans); | ||
case "Enums": | ||
return isEnums(that) && equalsEnums(that.enums, self.enums); | ||
case "Refinement": | ||
case "TupleType": | ||
case "TypeLiteral": | ||
case "Union": | ||
case "Suspend": | ||
case "Transformation": | ||
case "Declaration": | ||
return self === that; | ||
} | ||
}; | ||
const intersection = /*#__PURE__*/Arr.intersectionWith(equals); | ||
const _keyof = ast => { | ||
switch (ast._tag) { | ||
case "Declaration": | ||
{ | ||
const annotation = getSurrogateAnnotation(ast); | ||
if (Option.isSome(annotation)) { | ||
return _keyof(annotation.value); | ||
} | ||
break; | ||
} | ||
case "TypeLiteral": | ||
return ast.propertySignatures.map(p => Predicate.isSymbol(p.name) ? createUniqueSymbol(p.name) : createLiteral(p.name)).concat(ast.indexSignatures.map(is => getParameterBase(is.parameter))); | ||
return ast.propertySignatures.map(p => Predicate.isSymbol(p.name) ? new UniqueSymbol(p.name) : new Literal(p.name)).concat(ast.indexSignatures.map(is => getParameterBase(is.parameter))); | ||
case "Suspend": | ||
return _keyof(ast.f()); | ||
default: | ||
throw new Error(`keyof: unsupported schema (${format(ast)})`); | ||
case "Union": | ||
return ast.types.slice(1).reduce((out, ast) => intersection(out, _keyof(ast)), _keyof(ast.types[0])); | ||
case "Transformation": | ||
return _keyof(ast.to); | ||
} | ||
throw new Error(errors_.getErrorMessage("keyof", `unsupported schema (${ast})`)); | ||
}; | ||
/** @internal */ | ||
export const compose = (ab, cd) => createTransform(ab, cd, composeTransformation); | ||
export const compose = (ab, cd) => new Transformation(ab, cd, composeTransformation); | ||
/** @internal */ | ||
@@ -1201,85 +2213,25 @@ export const rename = (ast, mapping) => { | ||
{ | ||
const propertySignatureTransforms = []; | ||
for (const key of Internal.ownKeys(mapping)) { | ||
const propertySignatureTransformations = []; | ||
for (const key of util_.ownKeys(mapping)) { | ||
const name = mapping[key]; | ||
if (name !== undefined) { | ||
propertySignatureTransforms.push(createPropertySignatureTransform(key, name, createFinalPropertySignatureTransformation(identity, identity))); | ||
propertySignatureTransformations.push(new PropertySignatureTransformation(key, name, identity, identity)); | ||
} | ||
} | ||
if (propertySignatureTransforms.length === 0) { | ||
if (propertySignatureTransformations.length === 0) { | ||
return ast; | ||
} | ||
return createTransform(ast, createTypeLiteral(ast.propertySignatures.map(ps => { | ||
return new Transformation(ast, new TypeLiteral(ast.propertySignatures.map(ps => { | ||
const name = mapping[ps.name]; | ||
return createPropertySignature(name === undefined ? ps.name : name, to(ps.type), ps.isOptional, ps.isReadonly, ps.annotations); | ||
}), ast.indexSignatures), createTypeLiteralTransformation(propertySignatureTransforms)); | ||
return new PropertySignature(name === undefined ? ps.name : name, typeAST(ps.type), ps.isOptional, ps.isReadonly, ps.annotations); | ||
}), ast.indexSignatures), new TypeLiteralTransformation(propertySignatureTransformations)); | ||
} | ||
case "Suspend": | ||
return createSuspend(() => rename(ast.f(), mapping)); | ||
case "Transform": | ||
return compose(ast, rename(to(ast), mapping)); | ||
return new Suspend(() => rename(ast.f(), mapping)); | ||
case "Transformation": | ||
return compose(ast, rename(typeAST(ast), mapping)); | ||
} | ||
throw new Error(`rename: cannot rename (${format(ast)})`); | ||
throw new Error(`rename: cannot rename (${ast})`); | ||
}; | ||
const formatTransformation = (from, to) => `(${from} <-> ${to})`; | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export const format = (ast, verbose = false) => { | ||
switch (ast._tag) { | ||
case "StringKeyword": | ||
case "NumberKeyword": | ||
case "BooleanKeyword": | ||
case "BigIntKeyword": | ||
case "UndefinedKeyword": | ||
case "SymbolKeyword": | ||
case "ObjectKeyword": | ||
case "AnyKeyword": | ||
case "UnknownKeyword": | ||
case "VoidKeyword": | ||
case "NeverKeyword": | ||
return Option.getOrElse(getExpected(ast, verbose), () => ast._tag); | ||
case "Literal": | ||
return Option.getOrElse(getExpected(ast, verbose), () => formatUnknown(ast.literal)); | ||
case "UniqueSymbol": | ||
return Option.getOrElse(getExpected(ast, verbose), () => formatUnknown(ast.symbol)); | ||
case "Union": | ||
return Option.getOrElse(getExpected(ast, verbose), () => ast.types.map(member => format(member)).join(" | ")); | ||
case "TemplateLiteral": | ||
return Option.getOrElse(getExpected(ast, verbose), () => formatTemplateLiteral(ast)); | ||
case "Tuple": | ||
return Option.getOrElse(getExpected(ast, verbose), () => formatTuple(ast)); | ||
case "TypeLiteral": | ||
return Option.getOrElse(getExpected(ast, verbose), () => formatTypeLiteral(ast)); | ||
case "Enums": | ||
return Option.getOrElse(getExpected(ast, verbose), () => `<enum ${ast.enums.length} value(s): ${ast.enums.map((_, value) => JSON.stringify(value)).join(" | ")}>`); | ||
case "Suspend": | ||
return getExpected(ast, verbose).pipe(Option.orElse(() => Option.flatMap(Option.liftThrowable(ast.f)(), ast => getExpected(ast, verbose))), Option.getOrElse(() => "<suspended schema>")); | ||
case "Declaration": | ||
return Option.getOrElse(getExpected(ast, verbose), () => "<declaration schema>"); | ||
case "Refinement": | ||
return Option.getOrElse(getExpected(ast, verbose), () => "<refinement schema>"); | ||
case "Transform": | ||
return Option.getOrElse(getExpected(ast, verbose), () => formatTransformation(format(ast.from), format(ast.to))); | ||
} | ||
}; | ||
/** @internal */ | ||
export const formatUnknown = u => { | ||
if (Predicate.isString(u)) { | ||
return JSON.stringify(u); | ||
} else if (Predicate.isNumber(u) || u == null || Predicate.isBoolean(u) || Predicate.isSymbol(u) || Predicate.isDate(u)) { | ||
return String(u); | ||
} else if (Predicate.isBigInt(u)) { | ||
return String(u) + "n"; | ||
} else if (!Array.isArray(u) && Predicate.hasProperty(u, "toString") && Predicate.isFunction(u["toString"]) && u["toString"] !== Object.prototype.toString) { | ||
return u["toString"](); | ||
} | ||
try { | ||
return JSON.stringify(u); | ||
} catch (e) { | ||
return String(u); | ||
} | ||
}; | ||
const formatTemplateLiteral = ast => "`" + ast.head + ast.spans.map(span => formatTemplateLiteralSpan(span) + span.literal).join("") + "`"; | ||
const formatKeyword = (ast, verbose = false) => Option.getOrElse(getExpected(ast, verbose), () => ast._tag); | ||
const getExpected = (ast, verbose) => { | ||
@@ -1295,54 +2247,10 @@ if (verbose) { | ||
}); | ||
} | ||
return getIdentifierAnnotation(ast).pipe(Option.orElse(() => getTitleAnnotation(ast)), Option.orElse(() => getDescriptionAnnotation(ast))); | ||
}; | ||
const formatTuple = ast => { | ||
const formattedElements = ast.elements.map(element => format(element.type) + (element.isOptional ? "?" : "")).join(", "); | ||
return Option.match(ast.rest, { | ||
onNone: () => "readonly [" + formattedElements + "]", | ||
onSome: ([head, ...tail]) => { | ||
const formattedHead = format(head); | ||
const wrappedHead = formattedHead.includes(" | ") ? "(" + formattedHead + ")" : formattedHead; | ||
if (tail.length > 0) { | ||
const formattedTail = tail.map(ast => format(ast)).join(", "); | ||
if (ast.elements.length > 0) { | ||
return `readonly [${formattedElements}, ...${wrappedHead}[], ${formattedTail}]`; | ||
} else { | ||
return `readonly [...${wrappedHead}[], ${formattedTail}]`; | ||
} | ||
} else { | ||
if (ast.elements.length > 0) { | ||
return `readonly [${formattedElements}, ...${wrappedHead}[]]`; | ||
} else { | ||
return `ReadonlyArray<${formattedHead}>`; | ||
} | ||
} | ||
} | ||
}); | ||
}; | ||
const formatTypeLiteral = ast => { | ||
const formattedPropertySignatures = ast.propertySignatures.map(ps => String(ps.name) + (ps.isOptional ? "?" : "") + ": " + format(ps.type)).join("; "); | ||
if (ast.indexSignatures.length > 0) { | ||
const formattedIndexSignatures = ast.indexSignatures.map(is => `[x: ${format(getParameterBase(is.parameter))}]: ${format(is.type)}`).join("; "); | ||
if (ast.propertySignatures.length > 0) { | ||
return `{ ${formattedPropertySignatures}; ${formattedIndexSignatures} }`; | ||
} else { | ||
return `{ ${formattedIndexSignatures} }`; | ||
} | ||
} else { | ||
if (ast.propertySignatures.length > 0) { | ||
return `{ ${formattedPropertySignatures} }`; | ||
} else { | ||
return "{}"; | ||
} | ||
return getIdentifierAnnotation(ast).pipe(Option.orElse(() => getTitleAnnotation(ast)), Option.orElse(() => getDescriptionAnnotation(ast))); | ||
} | ||
}; | ||
const formatTemplateLiteralSpan = span => { | ||
switch (span.type._tag) { | ||
case "StringKeyword": | ||
return "${string}"; | ||
case "NumberKeyword": | ||
return "${number}"; | ||
} | ||
}; | ||
const getDuplicateIndexSignatureErrorMessage = name => `Duplicate index signature for type \`${name}\``; | ||
const getIndexSignatureParameterErrorMessage = "An index signature parameter type must be `string`, `symbol`, a template literal type or a refinement of the previous types"; | ||
const getRequiredElementFollowinAnOptionalElementErrorMessage = "A required element cannot follow an optional element. ts(1257)"; | ||
const getDuplicatePropertySignatureTransformationErrorMessage = name => `Duplicate property signature transformation ${util_.formatUnknown(name)}`; | ||
//# sourceMappingURL=AST.js.map |
/** | ||
* @since 1.0.0 | ||
*/ | ||
import * as Arr from "effect/Array"; | ||
import * as Equal from "effect/Equal"; | ||
import * as Equivalence from "effect/Equivalence"; | ||
import * as Option from "effect/Option"; | ||
import * as Predicate from "effect/Predicate"; | ||
import * as ReadonlyArray from "effect/ReadonlyArray"; | ||
import * as AST from "./AST.js"; | ||
import * as Internal from "./internal/ast.js"; | ||
import * as hooks from "./internal/hooks.js"; | ||
import * as InternalSchema from "./internal/schema.js"; | ||
import * as Parser from "./Parser.js"; | ||
import * as errors_ from "./internal/errors.js"; | ||
import * as util_ from "./internal/util.js"; | ||
import * as ParseResult from "./ParseResult.js"; | ||
/** | ||
@@ -17,3 +17,3 @@ * @category hooks | ||
*/ | ||
export const EquivalenceHookId = hooks.EquivalenceHookId; | ||
export const EquivalenceHookId = /*#__PURE__*/Symbol.for("@effect/schema/EquivalenceHookId"); | ||
/** | ||
@@ -23,3 +23,5 @@ * @category annotations | ||
*/ | ||
export const equivalence = handler => self => InternalSchema.make(AST.setAnnotation(self.ast, EquivalenceHookId, handler)); | ||
export const equivalence = handler => self => self.annotations({ | ||
[EquivalenceHookId]: handler | ||
}); | ||
/** | ||
@@ -29,5 +31,6 @@ * @category Equivalence | ||
*/ | ||
export const make = schema => go(schema.ast); | ||
export const make = schema => go(schema.ast, []); | ||
const getHook = /*#__PURE__*/AST.getAnnotation(EquivalenceHookId); | ||
const go = ast => { | ||
const getEquivalenceErrorMessage = (message, path) => errors_.getErrorMessageWithPath(`cannot build an Equivalence for ${message}`, path); | ||
const go = (ast, path) => { | ||
const hook = getHook(ast); | ||
@@ -37,5 +40,5 @@ if (Option.isSome(hook)) { | ||
case "Declaration": | ||
return hook.value(...ast.typeParameters.map(go)); | ||
return hook.value(...ast.typeParameters.map(tp => go(tp, path))); | ||
case "Refinement": | ||
return hook.value(go(ast.from)); | ||
return hook.value(go(ast.from, path)); | ||
default: | ||
@@ -47,5 +50,5 @@ return hook.value(); | ||
case "NeverKeyword": | ||
throw new Error("cannot build an Equivalence for `never`"); | ||
case "Transform": | ||
return go(ast.to); | ||
throw new Error(getEquivalenceErrorMessage("`never`", path)); | ||
case "Transformation": | ||
return go(ast.to, path); | ||
case "Declaration": | ||
@@ -66,14 +69,14 @@ case "Literal": | ||
case "ObjectKeyword": | ||
return Equivalence.strict(); | ||
return Equal.equals; | ||
case "Refinement": | ||
return go(ast.from); | ||
return go(ast.from, path); | ||
case "Suspend": | ||
{ | ||
const get = Internal.memoizeThunk(() => go(ast.f())); | ||
const get = util_.memoizeThunk(() => go(ast.f(), path)); | ||
return (a, b) => get()(a, b); | ||
} | ||
case "Tuple": | ||
case "TupleType": | ||
{ | ||
const elements = ast.elements.map(element => go(element.type)); | ||
const rest = Option.map(ast.rest, ReadonlyArray.map(go)); | ||
const elements = ast.elements.map((element, i) => go(element.type, path.concat(i))); | ||
const rest = ast.rest.map(ast => go(ast, path)); | ||
return Equivalence.make((a, b) => { | ||
@@ -96,4 +99,4 @@ const len = a.length; | ||
// --------------------------------------------- | ||
if (Option.isSome(rest)) { | ||
const [head, ...tail] = rest.value; | ||
if (Arr.isNonEmptyReadonlyArray(rest)) { | ||
const [head, ...tail] = rest; | ||
for (; i < len - tail.length; i++) { | ||
@@ -120,6 +123,6 @@ if (!head(a[i], b[i])) { | ||
if (ast.propertySignatures.length === 0 && ast.indexSignatures.length === 0) { | ||
return Equivalence.strict(); | ||
return Equal.equals; | ||
} | ||
const propertySignatures = ast.propertySignatures.map(ps => go(ps.type)); | ||
const indexSignatures = ast.indexSignatures.map(is => go(is.type)); | ||
const propertySignatures = ast.propertySignatures.map(ps => go(ps.type, path.concat(ps.name))); | ||
const indexSignatures = ast.indexSignatures.map(is => go(is.type, path)); | ||
return Equivalence.make((a, b) => { | ||
@@ -178,4 +181,4 @@ const aStringKeys = Object.keys(a); | ||
{ | ||
const searchTree = Parser.getSearchTree(ast.types, true); | ||
const ownKeys = Internal.ownKeys(searchTree.keys); | ||
const searchTree = ParseResult.getSearchTree(ast.types, true); | ||
const ownKeys = util_.ownKeys(searchTree.keys); | ||
const len = ownKeys.length; | ||
@@ -199,3 +202,5 @@ return Equivalence.make((a, b) => { | ||
} | ||
const tuples = candidates.map(ast => [go(ast), Parser.is(InternalSchema.make(ast))]); | ||
const tuples = candidates.map(ast => [go(ast, path), ParseResult.is({ | ||
ast | ||
})]); | ||
for (let i = 0; i < tuples.length; i++) { | ||
@@ -202,0 +207,0 @@ const [equivalence, is] = tuples[i]; |
@@ -20,3 +20,3 @@ /** | ||
*/ | ||
export * as Format from "./Format.js"; | ||
export * as FastCheck from "./FastCheck.js"; | ||
/** | ||
@@ -33,6 +33,2 @@ * @since 1.0.0 | ||
*/ | ||
export * as Parser from "./Parser.js"; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
export * as Pretty from "./Pretty.js"; | ||
@@ -39,0 +35,0 @@ /** |
@@ -16,7 +16,7 @@ /** @internal */ | ||
/** @internal */ | ||
export const GreaterThanOrEqualToBigintTypeId = /*#__PURE__*/Symbol.for("@effect/schema/TypeId/GreaterThanOrEqualToBigint"); | ||
export const GreaterThanOrEqualToBigIntTypeId = /*#__PURE__*/Symbol.for("@effect/schema/TypeId/GreaterThanOrEqualToBigint"); | ||
/** @internal */ | ||
export const LessThanBigintTypeId = /*#__PURE__*/Symbol.for("@effect/schema/TypeId/LessThanBigint"); | ||
export const LessThanBigIntTypeId = /*#__PURE__*/Symbol.for("@effect/schema/TypeId/LessThanBigint"); | ||
/** @internal */ | ||
export const LessThanOrEqualToBigintTypeId = /*#__PURE__*/Symbol.for("@effect/schema/TypeId/LessThanOrEqualToBigint"); | ||
export const LessThanOrEqualToBigIntTypeId = /*#__PURE__*/Symbol.for("@effect/schema/TypeId/LessThanOrEqualToBigint"); | ||
/** @internal */ | ||
@@ -23,0 +23,0 @@ export const BetweenBigintTypeId = /*#__PURE__*/Symbol.for("@effect/schema/TypeId/BetweenBigint"); |
@@ -6,5 +6,5 @@ /** | ||
import * as Predicate from "effect/Predicate"; | ||
import * as ReadonlyArray from "effect/ReadonlyArray"; | ||
import * as ReadonlyRecord from "effect/ReadonlyRecord"; | ||
import * as Record from "effect/Record"; | ||
import * as AST from "./AST.js"; | ||
import * as errors_ from "./internal/errors.js"; | ||
/** | ||
@@ -14,3 +14,20 @@ * @category encoding | ||
*/ | ||
export const make = schema => goRoot(schema.ast); | ||
export const make = schema => { | ||
const $defs = {}; | ||
const jsonSchema = go(schema.ast, $defs, true, []); | ||
const out = { | ||
$schema, | ||
...jsonSchema | ||
}; | ||
// clean up self-referencing entries | ||
for (const id in $defs) { | ||
if ($defs[id]["$ref"] === get$ref(id)) { | ||
delete $defs[id]; | ||
} | ||
} | ||
if (!Record.isEmptyRecord($defs)) { | ||
out.$defs = $defs; | ||
} | ||
return out; | ||
}; | ||
const anyJsonSchema = { | ||
@@ -30,3 +47,3 @@ $id: "/schemas/any" | ||
}; | ||
const emptyJsonSchema = { | ||
const empty = () => ({ | ||
"$id": "/schemas/{}", | ||
@@ -38,34 +55,5 @@ "oneOf": [{ | ||
}] | ||
}; | ||
}); | ||
const $schema = "http://json-schema.org/draft-07/schema#"; | ||
/** @internal */ | ||
export const goRoot = ast => { | ||
const $defs = {}; | ||
const jsonSchema = goWithIdentifier(ast, $defs); | ||
const out = { | ||
$schema, | ||
...jsonSchema | ||
}; | ||
if (!ReadonlyRecord.isEmptyRecord($defs)) { | ||
out.$defs = $defs; | ||
} | ||
return out; | ||
}; | ||
const goWithIdentifier = (ast, $defs) => { | ||
const identifier = AST.getIdentifierAnnotation(ast); | ||
return Option.match(identifier, { | ||
onNone: () => goWithMetaData(ast, $defs), | ||
onSome: id => { | ||
const out = { | ||
$ref: `${DEFINITION_PREFIX}${id}` | ||
}; | ||
if (!ReadonlyRecord.has($defs, id)) { | ||
$defs[id] = out; | ||
$defs[id] = goWithMetaData(ast, $defs); | ||
} | ||
return out; | ||
} | ||
}); | ||
}; | ||
const getMetaData = annotated => ReadonlyRecord.getSomes({ | ||
const getJsonSchemaAnnotations = annotated => Record.getSomes({ | ||
description: AST.getDescriptionAnnotation(annotated), | ||
@@ -76,27 +64,72 @@ title: AST.getTitleAnnotation(annotated), | ||
}); | ||
const goWithMetaData = (ast, $defs) => { | ||
return { | ||
...go(ast, $defs), | ||
...getMetaData(ast) | ||
}; | ||
const pruneUndefinedKeyword = ps => { | ||
const type = ps.type; | ||
if (ps.isOptional && AST.isUnion(type) && Option.isNone(AST.getJSONSchemaAnnotation(type))) { | ||
return AST.Union.make(type.types.filter(type => !AST.isUndefinedKeyword(type)), type.annotations); | ||
} | ||
return type; | ||
}; | ||
const getMissingAnnotationErrorMessage = (name, path) => errors_.getErrorMessageWithPath(`cannot build a JSON Schema for ${name} without a JSON Schema annotation`, path); | ||
const getUnsupportedIndexSignatureParameterErrorMessage = (parameter, path) => errors_.getErrorMessageWithPath(`unsupported index signature parameter (${parameter})`, path); | ||
/** @internal */ | ||
export const DEFINITION_PREFIX = "#/$defs/"; | ||
const go = (ast, $defs) => { | ||
const get$ref = id => `${DEFINITION_PREFIX}${id}`; | ||
const hasTransformation = ast => { | ||
switch (ast.from._tag) { | ||
case "Transformation": | ||
return true; | ||
case "Refinement": | ||
return hasTransformation(ast.from); | ||
case "Suspend": | ||
{ | ||
const from = ast.from.f(); | ||
if (AST.isRefinement(from)) { | ||
return hasTransformation(from); | ||
} | ||
} | ||
break; | ||
} | ||
return false; | ||
}; | ||
const go = (ast, $defs, handleIdentifier, path) => { | ||
const hook = AST.getJSONSchemaAnnotation(ast); | ||
if (Option.isSome(hook)) { | ||
switch (ast._tag) { | ||
case "Refinement": | ||
const handler = hook.value; | ||
if (AST.isRefinement(ast) && !hasTransformation(ast)) { | ||
try { | ||
return { | ||
...goWithIdentifier(ast.from, $defs), | ||
...hook.value | ||
...go(ast.from, $defs, true, path), | ||
...getJsonSchemaAnnotations(ast), | ||
...handler | ||
}; | ||
} catch (e) { | ||
return { | ||
...getJsonSchemaAnnotations(ast), | ||
...handler | ||
}; | ||
} | ||
} | ||
return hook.value; | ||
return handler; | ||
} | ||
const surrogate = AST.getSurrogateAnnotation(ast); | ||
if (Option.isSome(surrogate)) { | ||
return go(surrogate.value, $defs, handleIdentifier, path); | ||
} | ||
if (handleIdentifier && !AST.isTransformation(ast)) { | ||
const identifier = AST.getJSONIdentifier(ast); | ||
if (Option.isSome(identifier)) { | ||
const id = identifier.value; | ||
const out = { | ||
$ref: get$ref(id) | ||
}; | ||
if (!Record.has($defs, id)) { | ||
$defs[id] = out; | ||
$defs[id] = go(ast, $defs, false, path); | ||
} | ||
return out; | ||
} | ||
} | ||
switch (ast._tag) { | ||
case "Declaration": | ||
{ | ||
throw new Error("cannot build a JSON Schema for a declaration without a JSON Schema annotation"); | ||
} | ||
throw new Error(getMissingAnnotationErrorMessage("a declaration", path)); | ||
case "Literal": | ||
@@ -107,59 +140,70 @@ { | ||
return { | ||
const: null | ||
const: null, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
} else if (Predicate.isString(literal)) { | ||
return { | ||
const: literal | ||
const: literal, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
} else if (Predicate.isNumber(literal)) { | ||
return { | ||
const: literal | ||
const: literal, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
} else if (Predicate.isBoolean(literal)) { | ||
return { | ||
const: literal | ||
const: literal, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
} | ||
throw new Error("cannot build a JSON Schema for a bigint literal without a JSON Schema annotation"); | ||
throw new Error(getMissingAnnotationErrorMessage("a bigint literal", path)); | ||
} | ||
case "UniqueSymbol": | ||
throw new Error("cannot build a JSON Schema for a unique symbol without a JSON Schema annotation"); | ||
throw new Error(getMissingAnnotationErrorMessage("a unique symbol", path)); | ||
case "UndefinedKeyword": | ||
throw new Error("cannot build a JSON Schema for `undefined` without a JSON Schema annotation"); | ||
throw new Error(getMissingAnnotationErrorMessage("`undefined`", path)); | ||
case "VoidKeyword": | ||
throw new Error("cannot build a JSON Schema for `void` without a JSON Schema annotation"); | ||
throw new Error(getMissingAnnotationErrorMessage("`void`", path)); | ||
case "NeverKeyword": | ||
throw new Error("cannot build a JSON Schema for `never` without a JSON Schema annotation"); | ||
throw new Error(getMissingAnnotationErrorMessage("`never`", path)); | ||
case "UnknownKeyword": | ||
return { | ||
...unknownJsonSchema | ||
...unknownJsonSchema, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
case "AnyKeyword": | ||
return { | ||
...anyJsonSchema | ||
...anyJsonSchema, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
case "ObjectKeyword": | ||
return { | ||
...objectJsonSchema | ||
...objectJsonSchema, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
case "StringKeyword": | ||
return { | ||
type: "string" | ||
type: "string", | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
case "NumberKeyword": | ||
return { | ||
type: "number" | ||
type: "number", | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
case "BooleanKeyword": | ||
return { | ||
type: "boolean" | ||
type: "boolean", | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
case "BigIntKeyword": | ||
throw new Error("cannot build a JSON Schema for `bigint` without a JSON Schema annotation"); | ||
throw new Error(getMissingAnnotationErrorMessage("`bigint`", path)); | ||
case "SymbolKeyword": | ||
throw new Error("cannot build a JSON Schema for `symbol` without a JSON Schema annotation"); | ||
case "Tuple": | ||
throw new Error(getMissingAnnotationErrorMessage("`symbol`", path)); | ||
case "TupleType": | ||
{ | ||
const elements = ast.elements.map(e => goWithIdentifier(e.type, $defs)); | ||
const rest = Option.map(ast.rest, ReadonlyArray.map(ast => goWithIdentifier(ast, $defs))); | ||
const len = ast.elements.length; | ||
const elements = ast.elements.map((e, i) => go(e.type, $defs, true, path.concat(i))); | ||
const rest = ast.rest.map(ast => go(ast, $defs, true, path)); | ||
const output = { | ||
@@ -171,3 +215,2 @@ type: "array" | ||
// --------------------------------------------- | ||
const len = elements.length; | ||
if (len > 0) { | ||
@@ -180,4 +223,4 @@ output.minItems = len - ast.elements.filter(element => element.isOptional).length; | ||
// --------------------------------------------- | ||
if (Option.isSome(rest)) { | ||
const head = rest.value[0]; | ||
if (rest.length > 0) { | ||
const head = rest[0]; | ||
if (len > 0) { | ||
@@ -191,4 +234,4 @@ output.additionalItems = head; | ||
// --------------------------------------------- | ||
if (rest.value.length > 1) { | ||
throw new Error("Generating a JSON Schema for post-rest elements is not currently supported. You're welcome to contribute by submitting a Pull Request."); | ||
if (rest.length > 1) { | ||
throw new Error(errors_.getErrorMessageWithPath("Generating a JSON Schema for post-rest elements is not currently supported. You're welcome to contribute by submitting a Pull Request.", path)); | ||
} | ||
@@ -202,3 +245,6 @@ } else { | ||
} | ||
return output; | ||
return { | ||
...output, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
} | ||
@@ -209,3 +255,4 @@ case "TypeLiteral": | ||
return { | ||
...emptyJsonSchema | ||
...empty(), | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
@@ -220,3 +267,3 @@ } | ||
{ | ||
additionalProperties = goWithIdentifier(is.type, $defs); | ||
additionalProperties = go(is.type, $defs, true, path); | ||
break; | ||
@@ -227,3 +274,3 @@ } | ||
patternProperties = { | ||
[AST.getTemplateLiteralRegex(parameter).source]: goWithIdentifier(is.type, $defs) | ||
[AST.getTemplateLiteralRegExp(parameter).source]: go(is.type, $defs, true, path) | ||
}; | ||
@@ -237,10 +284,10 @@ break; | ||
patternProperties = { | ||
[hook.value.pattern]: goWithIdentifier(is.type, $defs) | ||
[hook.value.pattern]: go(is.type, $defs, true, path) | ||
}; | ||
break; | ||
} | ||
throw new Error(`Unsupported index signature parameter (${AST.format(parameter)})`); | ||
throw new Error(getUnsupportedIndexSignatureParameterErrorMessage(parameter, path)); | ||
} | ||
case "SymbolKeyword": | ||
throw new Error(`Unsupported index signature parameter (${AST.format(parameter)})`); | ||
throw new Error(getUnsupportedIndexSignatureParameterErrorMessage(parameter, path)); | ||
} | ||
@@ -250,4 +297,4 @@ } | ||
return { | ||
...goWithIdentifier(ps.type, $defs), | ||
...getMetaData(ps) | ||
...go(pruneUndefinedKeyword(ps), $defs, true, path.concat(ps.name)), | ||
...getJsonSchemaAnnotations(ps) | ||
}; | ||
@@ -266,3 +313,3 @@ }); | ||
const name = ast.propertySignatures[i].name; | ||
if (typeof name === "string") { | ||
if (Predicate.isString(name)) { | ||
output.properties[name] = propertySignatures[i]; | ||
@@ -276,3 +323,3 @@ // --------------------------------------------- | ||
} else { | ||
throw new Error(`Cannot encode ${String(name)} key to JSON Schema`); | ||
throw new Error(errors_.getErrorMessageWithPath(`cannot encode ${String(name)} key to JSON Schema`, path)); | ||
} | ||
@@ -289,3 +336,6 @@ } | ||
} | ||
return output; | ||
return { | ||
...output, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
} | ||
@@ -297,3 +347,3 @@ case "Union": | ||
for (const type of ast.types) { | ||
const schema = goWithIdentifier(type, $defs); | ||
const schema = go(type, $defs, true, path); | ||
if ("const" in schema) { | ||
@@ -312,7 +362,9 @@ if (Object.keys(schema).length > 1) { | ||
return { | ||
const: enums[0] | ||
const: enums[0], | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
} else { | ||
return { | ||
enum: enums | ||
enum: enums, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
@@ -331,3 +383,4 @@ } | ||
return { | ||
anyOf | ||
anyOf, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
@@ -343,3 +396,4 @@ } | ||
const: e[1] | ||
})) | ||
})), | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
@@ -349,11 +403,12 @@ } | ||
{ | ||
throw new Error("cannot build a JSON Schema for a refinement without a JSON Schema annotation"); | ||
throw new Error(errors_.getErrorMessageWithPath("cannot build a JSON Schema for a refinement without a JSON Schema annotation", path)); | ||
} | ||
case "TemplateLiteral": | ||
{ | ||
const regex = AST.getTemplateLiteralRegex(ast); | ||
const regex = AST.getTemplateLiteralRegExp(ast); | ||
return { | ||
type: "string", | ||
description: "a template literal", | ||
pattern: regex.source | ||
pattern: regex.source, | ||
...getJsonSchemaAnnotations(ast) | ||
}; | ||
@@ -363,12 +418,12 @@ } | ||
{ | ||
const identifier = Option.orElse(AST.getIdentifierAnnotation(ast), () => AST.getIdentifierAnnotation(ast.f())); | ||
const identifier = Option.orElse(AST.getJSONIdentifier(ast), () => AST.getJSONIdentifier(ast.f())); | ||
if (Option.isNone(identifier)) { | ||
throw new Error("Generating a JSON Schema for suspended schemas requires an identifier annotation"); | ||
throw new Error(errors_.getErrorMessageWithPath("Generating a JSON Schema for suspended schemas requires an identifier annotation", path)); | ||
} | ||
return goWithIdentifier(ast.f(), $defs); | ||
return go(ast.f(), $defs, true, path); | ||
} | ||
case "Transform": | ||
return goWithIdentifier(ast.to, $defs); | ||
case "Transformation": | ||
return go(ast.from, $defs, true, path); | ||
} | ||
}; | ||
//# sourceMappingURL=JSONSchema.js.map |
/** | ||
* @since 1.0.0 | ||
*/ | ||
import * as Arr from "effect/Array"; | ||
import { TaggedError } from "effect/Data"; | ||
@@ -8,78 +9,228 @@ import * as Effect from "effect/Effect"; | ||
import { dual } from "effect/Function"; | ||
import { globalValue } from "effect/GlobalValue"; | ||
import * as Inspectable from "effect/Inspectable"; | ||
import * as InternalParser from "./internal/parser.js"; | ||
import * as Option from "effect/Option"; | ||
import * as Predicate from "effect/Predicate"; | ||
import * as AST from "./AST.js"; | ||
import * as util_ from "./internal/util.js"; | ||
import * as TreeFormatter from "./TreeFormatter.js"; | ||
/** | ||
* Error that occurs when a declaration has an error. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export class ParseError extends TaggedError("ParseError") { | ||
get message() { | ||
return this.toString(); | ||
} | ||
export class Declaration { | ||
ast; | ||
actual; | ||
error; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString() { | ||
return TreeFormatter.formatIssue(this.error); | ||
_tag = "Declaration"; | ||
constructor(ast, actual, error) { | ||
this.ast = ast; | ||
this.actual = actual; | ||
this.error = error; | ||
} | ||
} | ||
/** | ||
* Error that occurs when a refinement has an error. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export class Refinement { | ||
ast; | ||
actual; | ||
kind; | ||
error; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_id: "ParseError", | ||
message: this.toString() | ||
}; | ||
_tag = "Refinement"; | ||
constructor(ast, actual, kind, error) { | ||
this.ast = ast; | ||
this.actual = actual; | ||
this.kind = kind; | ||
this.error = error; | ||
} | ||
} | ||
/** | ||
* Error that occurs when an array or tuple has an error. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export class TupleType { | ||
ast; | ||
actual; | ||
errors; | ||
output; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
[Inspectable.NodeInspectSymbol]() { | ||
return this.toJSON(); | ||
_tag = "TupleType"; | ||
constructor(ast, actual, errors, output = []) { | ||
this.ast = ast; | ||
this.actual = actual; | ||
this.errors = errors; | ||
this.output = output; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* The `Index` error indicates that there was an error at a specific index in an array or tuple. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const parseError = issue => new ParseError({ | ||
error: issue | ||
}); | ||
export class Index { | ||
index; | ||
error; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Index"; | ||
constructor(index, error) { | ||
this.index = index; | ||
this.error = error; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* Error that occurs when a type literal or record has an error. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const succeed = Either.right; | ||
export class TypeLiteral { | ||
ast; | ||
actual; | ||
errors; | ||
output; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "TypeLiteral"; | ||
constructor(ast, actual, errors, output = {}) { | ||
this.ast = ast; | ||
this.actual = actual; | ||
this.errors = errors; | ||
this.output = output; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* The `Key` variant of the `ParseIssue` type represents an error that occurs when a key in a type literal or record is invalid. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const fail = Either.left; | ||
const _try = Either.try; | ||
export { | ||
export class Key { | ||
key; | ||
error; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Key"; | ||
constructor(key, error) { | ||
this.key = key; | ||
this.error = error; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* Error that occurs when an unexpected key or index is present. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
_try as try }; | ||
export class Unexpected { | ||
ast; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Unexpected"; | ||
constructor(ast) { | ||
this.ast = ast; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* Error that occurs when a transformation has an error. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const transform = InternalParser.transform; | ||
export class Transformation { | ||
ast; | ||
actual; | ||
kind; | ||
error; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Transformation"; | ||
constructor(ast, actual, kind, error) { | ||
this.ast = ast; | ||
this.actual = actual; | ||
this.kind = kind; | ||
this.error = error; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* The `Type` variant of the `ParseIssue` type represents an error that occurs when the `actual` value is not of the expected type. | ||
* The `ast` field specifies the expected type, and the `actual` field contains the value that caused the error. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const type = InternalParser.type; | ||
export class Type { | ||
ast; | ||
actual; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Type"; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
message; | ||
constructor(ast, actual, message) { | ||
this.ast = ast; | ||
this.actual = actual; | ||
this.message = Option.fromNullable(message); | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* The `Forbidden` variant of the `ParseIssue` type represents a forbidden operation, such as when encountering an Effect that is not allowed to execute (e.g., using `runSync`). | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const forbidden = InternalParser.forbidden; | ||
export class Forbidden { | ||
ast; | ||
actual; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Forbidden"; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
message; | ||
constructor(ast, actual, message) { | ||
this.ast = ast; | ||
this.actual = actual; | ||
this.message = Option.fromNullable(message); | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* Error that occurs when a required key or index is missing. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const declaration = InternalParser.declaration; | ||
export class Missing { | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Missing"; | ||
} | ||
/** | ||
@@ -89,23 +240,70 @@ * @category constructors | ||
*/ | ||
export const refinement = InternalParser.refinement; | ||
export const missing = /*#__PURE__*/new Missing(); | ||
/** | ||
* @category constructors | ||
* Error that occurs when a member in a union has an error. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const tuple = (ast, actual, errors) => ({ | ||
_tag: "Tuple", | ||
ast, | ||
actual, | ||
errors | ||
}); | ||
export class Member { | ||
ast; | ||
error; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Member"; | ||
constructor(ast, error) { | ||
this.ast = ast; | ||
this.error = error; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* Error that occurs when a union has an error. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const typeLiteral = InternalParser.typeLiteral; | ||
export class Union { | ||
ast; | ||
actual; | ||
errors; | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
_tag = "Union"; | ||
constructor(ast, actual, errors) { | ||
this.ast = ast; | ||
this.actual = actual; | ||
this.errors = errors; | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const index = InternalParser.index; | ||
export class ParseError extends TaggedError("ParseError") { | ||
get message() { | ||
return this.toString(); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString() { | ||
return TreeFormatter.formatIssueSync(this.error); | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_id: "ParseError", | ||
message: this.toString() | ||
}; | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
[Inspectable.NodeInspectSymbol]() { | ||
return this.toJSON(); | ||
} | ||
} | ||
/** | ||
@@ -115,3 +313,5 @@ * @category constructors | ||
*/ | ||
export const key = InternalParser.key; | ||
export const parseError = issue => new ParseError({ | ||
error: issue | ||
}); | ||
/** | ||
@@ -121,3 +321,3 @@ * @category constructors | ||
*/ | ||
export const missing = InternalParser.missing; | ||
export const succeed = Either.right; | ||
/** | ||
@@ -127,3 +327,5 @@ * @category constructors | ||
*/ | ||
export const unexpected = InternalParser.unexpected; | ||
export const fail = Either.left; | ||
const _try = Either.try; | ||
export { | ||
/** | ||
@@ -133,3 +335,3 @@ * @category constructors | ||
*/ | ||
export const union = InternalParser.union; | ||
_try as try }; | ||
/** | ||
@@ -139,3 +341,3 @@ * @category constructors | ||
*/ | ||
export const member = InternalParser.member; | ||
export const fromOption = Either.fromOption; | ||
/** | ||
@@ -145,3 +347,12 @@ * @category optimisation | ||
*/ | ||
export const eitherOrUndefined = InternalParser.eitherOrUndefined; | ||
export const flatMap = /*#__PURE__*/dual(2, (self, f) => { | ||
const s = self; | ||
if (s["_tag"] === "Left") { | ||
return s; | ||
} | ||
if (s["_tag"] === "Right") { | ||
return f(s.right); | ||
} | ||
return Effect.flatMap(self, f); | ||
}); | ||
/** | ||
@@ -151,3 +362,12 @@ * @category optimisation | ||
*/ | ||
export const flatMap = InternalParser.flatMap; | ||
export const map = /*#__PURE__*/dual(2, (self, f) => { | ||
const s = self; | ||
if (s["_tag"] === "Left") { | ||
return s; | ||
} | ||
if (s["_tag"] === "Right") { | ||
return Either.right(f(s.right)); | ||
} | ||
return Effect.map(self, f); | ||
}); | ||
/** | ||
@@ -157,3 +377,12 @@ * @category optimisation | ||
*/ | ||
export const map = InternalParser.map; | ||
export const mapError = /*#__PURE__*/dual(2, (self, f) => { | ||
const s = self; | ||
if (s["_tag"] === "Left") { | ||
return Either.left(f(s.left)); | ||
} | ||
if (s["_tag"] === "Right") { | ||
return s; | ||
} | ||
return Effect.mapError(self, f); | ||
}); | ||
/** | ||
@@ -163,3 +392,8 @@ * @category optimisation | ||
*/ | ||
export const mapError = InternalParser.mapError; | ||
export const eitherOrUndefined = self => { | ||
const s = self; | ||
if (s["_tag"] === "Left" || s["_tag"] === "Right") { | ||
return s; | ||
} | ||
}; | ||
/** | ||
@@ -193,4 +427,36 @@ * @category optimisation | ||
}); | ||
/* c8 ignore start */ | ||
export { | ||
/** @internal */ | ||
export const mergeParseOptions = (options, overrideOptions) => { | ||
if (overrideOptions === undefined || Predicate.isNumber(overrideOptions)) { | ||
return options; | ||
} | ||
if (options === undefined) { | ||
return overrideOptions; | ||
} | ||
const out = {}; | ||
out.errors = overrideOptions.errors ?? options.errors; | ||
out.onExcessProperty = overrideOptions.onExcessProperty ?? options.onExcessProperty; | ||
return out; | ||
}; | ||
const getEither = (ast, isDecoding, options) => { | ||
const parser = goMemo(ast, isDecoding); | ||
return (u, overrideOptions) => parser(u, mergeParseOptions(options, overrideOptions)); | ||
}; | ||
const getSync = (ast, isDecoding, options) => { | ||
const parser = getEither(ast, isDecoding, options); | ||
return (input, overrideOptions) => Either.getOrThrowWith(parser(input, overrideOptions), issue => new Error(TreeFormatter.formatIssueSync(issue), { | ||
cause: issue | ||
})); | ||
}; | ||
const getOption = (ast, isDecoding, options) => { | ||
const parser = getEither(ast, isDecoding, options); | ||
return (input, overrideOptions) => Option.getRight(parser(input, overrideOptions)); | ||
}; | ||
const getEffect = (ast, isDecoding, options) => { | ||
const parser = goMemo(ast, isDecoding); | ||
return (input, overrideOptions) => parser(input, { | ||
...mergeParseOptions(options, overrideOptions), | ||
isEffectAllowed: true | ||
}); | ||
}; | ||
/** | ||
@@ -200,3 +466,3 @@ * @category decoding | ||
*/ | ||
decodeUnknown, | ||
export const decodeUnknownSync = (schema, options) => getSync(schema.ast, true, options); | ||
/** | ||
@@ -206,3 +472,3 @@ * @category decoding | ||
*/ | ||
decodeUnknownEither, | ||
export const decodeUnknownOption = (schema, options) => getOption(schema.ast, true, options); | ||
/** | ||
@@ -212,3 +478,3 @@ * @category decoding | ||
*/ | ||
decodeUnknownOption, | ||
export const decodeUnknownEither = (schema, options) => getEither(schema.ast, true, options); | ||
/** | ||
@@ -218,3 +484,6 @@ * @category decoding | ||
*/ | ||
decodeUnknownPromise, | ||
export const decodeUnknownPromise = (schema, options) => { | ||
const parser = decodeUnknown(schema, options); | ||
return (u, overrideOptions) => Effect.runPromise(parser(u, overrideOptions)); | ||
}; | ||
/** | ||
@@ -224,3 +493,3 @@ * @category decoding | ||
*/ | ||
decodeUnknownSync, | ||
export const decodeUnknown = (schema, options) => getEffect(schema.ast, true, options); | ||
/** | ||
@@ -230,3 +499,3 @@ * @category encoding | ||
*/ | ||
encodeUnknown, | ||
export const encodeUnknownSync = (schema, options) => getSync(schema.ast, false, options); | ||
/** | ||
@@ -236,3 +505,3 @@ * @category encoding | ||
*/ | ||
encodeUnknownEither, | ||
export const encodeUnknownOption = (schema, options) => getOption(schema.ast, false, options); | ||
/** | ||
@@ -242,3 +511,3 @@ * @category encoding | ||
*/ | ||
encodeUnknownOption, | ||
export const encodeUnknownEither = (schema, options) => getEither(schema.ast, false, options); | ||
/** | ||
@@ -248,3 +517,6 @@ * @category encoding | ||
*/ | ||
encodeUnknownPromise, | ||
export const encodeUnknownPromise = (schema, options) => { | ||
const parser = encodeUnknown(schema, options); | ||
return (u, overrideOptions) => Effect.runPromise(parser(u, overrideOptions)); | ||
}; | ||
/** | ||
@@ -254,3 +526,846 @@ * @category encoding | ||
*/ | ||
encodeUnknownSync } from "./Parser.js"; | ||
export const encodeUnknown = (schema, options) => getEffect(schema.ast, false, options); | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export const decodeSync = decodeUnknownSync; | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export const decodeOption = decodeUnknownOption; | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export const decodeEither = decodeUnknownEither; | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export const decodePromise = decodeUnknownPromise; | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export const decode = decodeUnknown; | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export const validateSync = (schema, options) => getSync(AST.typeAST(schema.ast), true, options); | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export const validateOption = (schema, options) => getOption(AST.typeAST(schema.ast), true, options); | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export const validateEither = (schema, options) => getEither(AST.typeAST(schema.ast), true, options); | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export const validatePromise = (schema, options) => { | ||
const parser = validate(schema, options); | ||
return (u, overrideOptions) => Effect.runPromise(parser(u, overrideOptions)); | ||
}; | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export const validate = (schema, options) => getEffect(AST.typeAST(schema.ast), true, options); | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export const is = (schema, options) => { | ||
const parser = goMemo(AST.typeAST(schema.ast), true); | ||
return (u, overrideOptions) => Either.isRight(parser(u, { | ||
...mergeParseOptions(options, overrideOptions), | ||
isExact: true | ||
})); | ||
}; | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export const asserts = (schema, options) => { | ||
const parser = goMemo(AST.typeAST(schema.ast), true); | ||
return (u, overrideOptions) => { | ||
const result = parser(u, { | ||
...mergeParseOptions(options, overrideOptions), | ||
isExact: true | ||
}); | ||
if (Either.isLeft(result)) { | ||
throw new Error(TreeFormatter.formatIssueSync(result.left), { | ||
cause: result.left | ||
}); | ||
} | ||
}; | ||
}; | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export const encodeSync = encodeUnknownSync; | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export const encodeOption = encodeUnknownOption; | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export const encodeEither = encodeUnknownEither; | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export const encodePromise = encodeUnknownPromise; | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export const encode = encodeUnknown; | ||
const decodeMemoMap = /*#__PURE__*/globalValue( /*#__PURE__*/Symbol.for("@effect/schema/Parser/decodeMemoMap"), () => new WeakMap()); | ||
const encodeMemoMap = /*#__PURE__*/globalValue( /*#__PURE__*/Symbol.for("@effect/schema/Parser/encodeMemoMap"), () => new WeakMap()); | ||
const goMemo = (ast, isDecoding) => { | ||
const memoMap = isDecoding ? decodeMemoMap : encodeMemoMap; | ||
const memo = memoMap.get(ast); | ||
if (memo) { | ||
return memo; | ||
} | ||
const parser = go(ast, isDecoding); | ||
memoMap.set(ast, parser); | ||
return parser; | ||
}; | ||
const getConcurrency = ast => Option.getOrUndefined(AST.getConcurrencyAnnotation(ast)); | ||
const getBatching = ast => Option.getOrUndefined(AST.getBatchingAnnotation(ast)); | ||
const go = (ast, isDecoding) => { | ||
switch (ast._tag) { | ||
case "Refinement": | ||
{ | ||
if (isDecoding) { | ||
const from = goMemo(ast.from, true); | ||
return (i, options) => handleForbidden(flatMap(mapError(from(i, options), e => new Refinement(ast, i, "From", e)), a => Option.match(ast.filter(a, options ?? AST.defaultParseOption, ast), { | ||
onNone: () => Either.right(a), | ||
onSome: e => Either.left(new Refinement(ast, i, "Predicate", e)) | ||
})), ast, i, options); | ||
} else { | ||
const from = goMemo(AST.typeAST(ast), true); | ||
const to = goMemo(dropRightRefinement(ast.from), false); | ||
return (i, options) => handleForbidden(flatMap(from(i, options), a => to(a, options)), ast, i, options); | ||
} | ||
} | ||
case "Transformation": | ||
{ | ||
const transform = getFinalTransformation(ast.transformation, isDecoding); | ||
const from = isDecoding ? goMemo(ast.from, true) : goMemo(ast.to, false); | ||
const to = isDecoding ? goMemo(ast.to, true) : goMemo(ast.from, false); | ||
return (i1, options) => handleForbidden(flatMap(mapError(from(i1, options), e => new Transformation(ast, i1, isDecoding ? "Encoded" : "Type", e)), a => flatMap(mapError(transform(a, options ?? AST.defaultParseOption, ast), e => new Transformation(ast, i1, "Transformation", e)), i2 => mapError(to(i2, options), e => new Transformation(ast, i1, isDecoding ? "Type" : "Encoded", e)))), ast, i1, options); | ||
} | ||
case "Declaration": | ||
{ | ||
const parse = isDecoding ? ast.decodeUnknown(...ast.typeParameters) : ast.encodeUnknown(...ast.typeParameters); | ||
return (i, options) => handleForbidden(mapError(parse(i, options ?? AST.defaultParseOption, ast), e => new Declaration(ast, i, e)), ast, i, options); | ||
} | ||
case "Literal": | ||
return fromRefinement(ast, u => u === ast.literal); | ||
case "UniqueSymbol": | ||
return fromRefinement(ast, u => u === ast.symbol); | ||
case "UndefinedKeyword": | ||
return fromRefinement(ast, Predicate.isUndefined); | ||
case "VoidKeyword": | ||
return fromRefinement(ast, Predicate.isUndefined); | ||
case "NeverKeyword": | ||
return fromRefinement(ast, Predicate.isNever); | ||
case "UnknownKeyword": | ||
case "AnyKeyword": | ||
return Either.right; | ||
case "StringKeyword": | ||
return fromRefinement(ast, Predicate.isString); | ||
case "NumberKeyword": | ||
return fromRefinement(ast, Predicate.isNumber); | ||
case "BooleanKeyword": | ||
return fromRefinement(ast, Predicate.isBoolean); | ||
case "BigIntKeyword": | ||
return fromRefinement(ast, Predicate.isBigInt); | ||
case "SymbolKeyword": | ||
return fromRefinement(ast, Predicate.isSymbol); | ||
case "ObjectKeyword": | ||
return fromRefinement(ast, Predicate.isObject); | ||
case "Enums": | ||
return fromRefinement(ast, u => ast.enums.some(([_, value]) => value === u)); | ||
case "TemplateLiteral": | ||
{ | ||
const regex = AST.getTemplateLiteralRegExp(ast); | ||
return fromRefinement(ast, u => Predicate.isString(u) && regex.test(u)); | ||
} | ||
case "TupleType": | ||
{ | ||
const elements = ast.elements.map(e => goMemo(e.type, isDecoding)); | ||
const rest = ast.rest.map(ast => goMemo(ast, isDecoding)); | ||
let requiredLen = ast.elements.filter(e => !e.isOptional).length; | ||
if (ast.rest.length > 0) { | ||
requiredLen += ast.rest.length - 1; | ||
} | ||
const expectedAST = AST.Union.make(ast.elements.map((_, i) => new AST.Literal(i))); | ||
const concurrency = getConcurrency(ast); | ||
const batching = getBatching(ast); | ||
return (input, options) => { | ||
if (!Arr.isArray(input)) { | ||
return Either.left(new Type(ast, input)); | ||
} | ||
const allErrors = options?.errors === "all"; | ||
const es = []; | ||
let stepKey = 0; | ||
// --------------------------------------------- | ||
// handle missing indexes | ||
// --------------------------------------------- | ||
const len = input.length; | ||
for (let i = len; i <= requiredLen - 1; i++) { | ||
const e = new Index(i, missing); | ||
if (allErrors) { | ||
es.push([stepKey++, e]); | ||
continue; | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e])); | ||
} | ||
} | ||
// --------------------------------------------- | ||
// handle excess indexes | ||
// --------------------------------------------- | ||
if (ast.rest.length === 0) { | ||
for (let i = ast.elements.length; i <= len - 1; i++) { | ||
const e = new Index(i, new Unexpected(expectedAST)); | ||
if (allErrors) { | ||
es.push([stepKey++, e]); | ||
continue; | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e])); | ||
} | ||
} | ||
} | ||
const output = []; | ||
let i = 0; | ||
let queue = undefined; | ||
// --------------------------------------------- | ||
// handle elements | ||
// --------------------------------------------- | ||
for (; i < elements.length; i++) { | ||
if (len < i + 1) { | ||
if (ast.elements[i].isOptional) { | ||
// the input element is missing | ||
continue; | ||
} | ||
} else { | ||
const parser = elements[i]; | ||
const te = parser(input[i], options); | ||
const eu = eitherOrUndefined(te); | ||
if (eu) { | ||
if (Either.isLeft(eu)) { | ||
// the input element is present but is not valid | ||
const e = new Index(i, eu.left); | ||
if (allErrors) { | ||
es.push([stepKey++, e]); | ||
continue; | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e], sortByIndex(output))); | ||
} | ||
} | ||
output.push([stepKey++, eu.right]); | ||
} else { | ||
const nk = stepKey++; | ||
const index = i; | ||
if (!queue) { | ||
queue = []; | ||
} | ||
queue.push(({ | ||
es, | ||
output | ||
}) => Effect.flatMap(Effect.either(te), t => { | ||
if (Either.isLeft(t)) { | ||
// the input element is present but is not valid | ||
const e = new Index(index, t.left); | ||
if (allErrors) { | ||
es.push([nk, e]); | ||
return Effect.void; | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e], sortByIndex(output))); | ||
} | ||
} | ||
output.push([nk, t.right]); | ||
return Effect.void; | ||
})); | ||
} | ||
} | ||
} | ||
// --------------------------------------------- | ||
// handle rest element | ||
// --------------------------------------------- | ||
if (Arr.isNonEmptyReadonlyArray(rest)) { | ||
const [head, ...tail] = rest; | ||
for (; i < len - tail.length; i++) { | ||
const te = head(input[i], options); | ||
const eu = eitherOrUndefined(te); | ||
if (eu) { | ||
if (Either.isLeft(eu)) { | ||
const e = new Index(i, eu.left); | ||
if (allErrors) { | ||
es.push([stepKey++, e]); | ||
continue; | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e], sortByIndex(output))); | ||
} | ||
} else { | ||
output.push([stepKey++, eu.right]); | ||
} | ||
} else { | ||
const nk = stepKey++; | ||
const index = i; | ||
if (!queue) { | ||
queue = []; | ||
} | ||
queue.push(({ | ||
es, | ||
output | ||
}) => Effect.flatMap(Effect.either(te), t => { | ||
if (Either.isLeft(t)) { | ||
const e = new Index(index, t.left); | ||
if (allErrors) { | ||
es.push([nk, e]); | ||
return Effect.void; | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e], sortByIndex(output))); | ||
} | ||
} else { | ||
output.push([nk, t.right]); | ||
return Effect.void; | ||
} | ||
})); | ||
} | ||
} | ||
// --------------------------------------------- | ||
// handle post rest elements | ||
// --------------------------------------------- | ||
for (let j = 0; j < tail.length; j++) { | ||
i += j; | ||
if (len < i + 1) { | ||
continue; | ||
} else { | ||
const te = tail[j](input[i], options); | ||
const eu = eitherOrUndefined(te); | ||
if (eu) { | ||
if (Either.isLeft(eu)) { | ||
// the input element is present but is not valid | ||
const e = new Index(i, eu.left); | ||
if (allErrors) { | ||
es.push([stepKey++, e]); | ||
continue; | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e], sortByIndex(output))); | ||
} | ||
} | ||
output.push([stepKey++, eu.right]); | ||
} else { | ||
const nk = stepKey++; | ||
const index = i; | ||
if (!queue) { | ||
queue = []; | ||
} | ||
queue.push(({ | ||
es, | ||
output | ||
}) => Effect.flatMap(Effect.either(te), t => { | ||
if (Either.isLeft(t)) { | ||
// the input element is present but is not valid | ||
const e = new Index(index, t.left); | ||
if (allErrors) { | ||
es.push([nk, e]); | ||
return Effect.void; | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e], sortByIndex(output))); | ||
} | ||
} | ||
output.push([nk, t.right]); | ||
return Effect.void; | ||
})); | ||
} | ||
} | ||
} | ||
} | ||
// --------------------------------------------- | ||
// compute result | ||
// --------------------------------------------- | ||
const computeResult = ({ | ||
es, | ||
output | ||
}) => Arr.isNonEmptyArray(es) ? Either.left(new TupleType(ast, input, sortByIndex(es), sortByIndex(output))) : Either.right(sortByIndex(output)); | ||
if (queue && queue.length > 0) { | ||
const cqueue = queue; | ||
return Effect.suspend(() => { | ||
const state = { | ||
es: Arr.copy(es), | ||
output: Arr.copy(output) | ||
}; | ||
return Effect.flatMap(Effect.forEach(cqueue, f => f(state), { | ||
concurrency, | ||
batching, | ||
discard: true | ||
}), () => computeResult(state)); | ||
}); | ||
} | ||
return computeResult({ | ||
output, | ||
es | ||
}); | ||
}; | ||
} | ||
case "TypeLiteral": | ||
{ | ||
if (ast.propertySignatures.length === 0 && ast.indexSignatures.length === 0) { | ||
return fromRefinement(ast, Predicate.isNotNullable); | ||
} | ||
const propertySignatures = []; | ||
const expectedKeys = {}; | ||
for (const ps of ast.propertySignatures) { | ||
propertySignatures.push([goMemo(ps.type, isDecoding), ps]); | ||
expectedKeys[ps.name] = null; | ||
} | ||
const indexSignatures = ast.indexSignatures.map(is => [goMemo(is.parameter, isDecoding), goMemo(is.type, isDecoding), is.parameter]); | ||
const expectedAST = AST.Union.make(ast.indexSignatures.map(is => is.parameter).concat(util_.ownKeys(expectedKeys).map(key => Predicate.isSymbol(key) ? new AST.UniqueSymbol(key) : new AST.Literal(key)))); | ||
const expected = goMemo(expectedAST, isDecoding); | ||
const concurrency = getConcurrency(ast); | ||
const batching = getBatching(ast); | ||
return (input, options) => { | ||
if (!Predicate.isRecord(input)) { | ||
return Either.left(new Type(ast, input)); | ||
} | ||
const allErrors = options?.errors === "all"; | ||
const es = []; | ||
let stepKey = 0; | ||
// --------------------------------------------- | ||
// handle excess properties | ||
// --------------------------------------------- | ||
const onExcessPropertyError = options?.onExcessProperty === "error"; | ||
const onExcessPropertyPreserve = options?.onExcessProperty === "preserve"; | ||
const output = {}; | ||
if (onExcessPropertyError || onExcessPropertyPreserve) { | ||
for (const key of util_.ownKeys(input)) { | ||
const eu = eitherOrUndefined(expected(key, options)); | ||
if (Either.isLeft(eu)) { | ||
// key is unexpected | ||
if (onExcessPropertyError) { | ||
const e = new Key(key, new Unexpected(expectedAST)); | ||
if (allErrors) { | ||
es.push([stepKey++, e]); | ||
continue; | ||
} else { | ||
return Either.left(new TypeLiteral(ast, input, [e], output)); | ||
} | ||
} else { | ||
// preserve key | ||
output[key] = input[key]; | ||
} | ||
} | ||
} | ||
} | ||
let queue = undefined; | ||
const isExact = options?.isExact === true; | ||
for (let i = 0; i < propertySignatures.length; i++) { | ||
const ps = propertySignatures[i][1]; | ||
const name = ps.name; | ||
const hasKey = Object.prototype.hasOwnProperty.call(input, name); | ||
if (!hasKey) { | ||
if (ps.isOptional) { | ||
continue; | ||
} else if (isExact) { | ||
const e = new Key(name, missing); | ||
if (allErrors) { | ||
es.push([stepKey++, e]); | ||
continue; | ||
} else { | ||
return Either.left(new TypeLiteral(ast, input, [e], output)); | ||
} | ||
} | ||
} | ||
const parser = propertySignatures[i][0]; | ||
const te = parser(input[name], options); | ||
const eu = eitherOrUndefined(te); | ||
if (eu) { | ||
if (Either.isLeft(eu)) { | ||
const e = new Key(name, hasKey ? eu.left : missing); | ||
if (allErrors) { | ||
es.push([stepKey++, e]); | ||
continue; | ||
} else { | ||
return Either.left(new TypeLiteral(ast, input, [e], output)); | ||
} | ||
} | ||
output[name] = eu.right; | ||
} else { | ||
const nk = stepKey++; | ||
const index = name; | ||
if (!queue) { | ||
queue = []; | ||
} | ||
queue.push(({ | ||
es, | ||
output | ||
}) => Effect.flatMap(Effect.either(te), t => { | ||
if (Either.isLeft(t)) { | ||
const e = new Key(index, hasKey ? t.left : missing); | ||
if (allErrors) { | ||
es.push([nk, e]); | ||
return Effect.void; | ||
} else { | ||
return Either.left(new TypeLiteral(ast, input, [e], output)); | ||
} | ||
} | ||
output[index] = t.right; | ||
return Effect.void; | ||
})); | ||
} | ||
} | ||
// --------------------------------------------- | ||
// handle index signatures | ||
// --------------------------------------------- | ||
for (let i = 0; i < indexSignatures.length; i++) { | ||
const indexSignature = indexSignatures[i]; | ||
const parameter = indexSignature[0]; | ||
const type = indexSignature[1]; | ||
const keys = util_.getKeysForIndexSignature(input, indexSignature[2]); | ||
for (const key of keys) { | ||
// --------------------------------------------- | ||
// handle keys | ||
// --------------------------------------------- | ||
const keu = eitherOrUndefined(parameter(key, options)); | ||
if (keu && Either.isRight(keu)) { | ||
// --------------------------------------------- | ||
// handle values | ||
// --------------------------------------------- | ||
const vpr = type(input[key], options); | ||
const veu = eitherOrUndefined(vpr); | ||
if (veu) { | ||
if (Either.isLeft(veu)) { | ||
const e = new Key(key, veu.left); | ||
if (allErrors) { | ||
es.push([stepKey++, e]); | ||
continue; | ||
} else { | ||
return Either.left(new TypeLiteral(ast, input, [e], output)); | ||
} | ||
} else { | ||
if (!Object.prototype.hasOwnProperty.call(expectedKeys, key)) { | ||
output[key] = veu.right; | ||
} | ||
} | ||
} else { | ||
const nk = stepKey++; | ||
const index = key; | ||
if (!queue) { | ||
queue = []; | ||
} | ||
queue.push(({ | ||
es, | ||
output | ||
}) => Effect.flatMap(Effect.either(vpr), tv => { | ||
if (Either.isLeft(tv)) { | ||
const e = new Key(index, tv.left); | ||
if (allErrors) { | ||
es.push([nk, e]); | ||
return Effect.void; | ||
} else { | ||
return Either.left(new TypeLiteral(ast, input, [e], output)); | ||
} | ||
} else { | ||
if (!Object.prototype.hasOwnProperty.call(expectedKeys, key)) { | ||
output[key] = tv.right; | ||
} | ||
return Effect.void; | ||
} | ||
})); | ||
} | ||
} | ||
} | ||
} | ||
// --------------------------------------------- | ||
// compute result | ||
// --------------------------------------------- | ||
const computeResult = ({ | ||
es, | ||
output | ||
}) => Arr.isNonEmptyArray(es) ? Either.left(new TypeLiteral(ast, input, sortByIndex(es), output)) : Either.right(output); | ||
if (queue && queue.length > 0) { | ||
const cqueue = queue; | ||
return Effect.suspend(() => { | ||
const state = { | ||
es: Arr.copy(es), | ||
output: Object.assign({}, output) | ||
}; | ||
return Effect.flatMap(Effect.forEach(cqueue, f => f(state), { | ||
concurrency, | ||
batching, | ||
discard: true | ||
}), () => computeResult(state)); | ||
}); | ||
} | ||
return computeResult({ | ||
es, | ||
output | ||
}); | ||
}; | ||
} | ||
case "Union": | ||
{ | ||
const searchTree = getSearchTree(ast.types, isDecoding); | ||
const ownKeys = util_.ownKeys(searchTree.keys); | ||
const len = ownKeys.length; | ||
const map = new Map(); | ||
for (let i = 0; i < ast.types.length; i++) { | ||
map.set(ast.types[i], goMemo(ast.types[i], isDecoding)); | ||
} | ||
const concurrency = getConcurrency(ast) ?? 1; | ||
const batching = getBatching(ast); | ||
return (input, options) => { | ||
const es = []; | ||
let stepKey = 0; | ||
let candidates = []; | ||
if (len > 0) { | ||
// if there is at least one key then input must be an object | ||
if (Predicate.isRecord(input)) { | ||
for (let i = 0; i < len; i++) { | ||
const name = ownKeys[i]; | ||
const buckets = searchTree.keys[name].buckets; | ||
// for each property that should contain a literal, check if the input contains that property | ||
if (Object.prototype.hasOwnProperty.call(input, name)) { | ||
const literal = String(input[name]); | ||
// check that the value obtained from the input for the property corresponds to an existing bucket | ||
if (Object.prototype.hasOwnProperty.call(buckets, literal)) { | ||
// retrive the minimal set of candidates for decoding | ||
candidates = candidates.concat(buckets[literal]); | ||
} else { | ||
const literals = AST.Union.make(searchTree.keys[name].literals); | ||
es.push([stepKey++, new TypeLiteral(new AST.TypeLiteral([new AST.PropertySignature(name, literals, false, true)], []), input, [new Key(name, new Type(literals, input[name]))])]); | ||
} | ||
} else { | ||
const literals = AST.Union.make(searchTree.keys[name].literals); | ||
es.push([stepKey++, new TypeLiteral(new AST.TypeLiteral([new AST.PropertySignature(name, literals, false, true)], []), input, [new Key(name, missing)])]); | ||
} | ||
} | ||
} else { | ||
es.push([stepKey++, new Type(ast, input)]); | ||
} | ||
} | ||
if (searchTree.otherwise.length > 0) { | ||
candidates = candidates.concat(searchTree.otherwise); | ||
} | ||
let queue = undefined; | ||
for (let i = 0; i < candidates.length; i++) { | ||
const candidate = candidates[i]; | ||
const pr = map.get(candidate)(input, options); | ||
// the members of a union are ordered based on which one should be decoded first, | ||
// therefore if one member has added a task, all subsequent members must | ||
// also add a task to the queue even if they are synchronous | ||
const eu = !queue || queue.length === 0 ? eitherOrUndefined(pr) : undefined; | ||
if (eu) { | ||
if (Either.isRight(eu)) { | ||
return Either.right(eu.right); | ||
} else { | ||
es.push([stepKey++, new Member(candidate, eu.left)]); | ||
} | ||
} else { | ||
const nk = stepKey++; | ||
if (!queue) { | ||
queue = []; | ||
} | ||
queue.push(state => Effect.suspend(() => { | ||
if ("finalResult" in state) { | ||
return Effect.void; | ||
} else { | ||
return Effect.flatMap(Effect.either(pr), t => { | ||
if (Either.isRight(t)) { | ||
state.finalResult = Either.right(t.right); | ||
} else { | ||
state.es.push([nk, new Member(candidate, t.left)]); | ||
} | ||
return Effect.void; | ||
}); | ||
} | ||
})); | ||
} | ||
} | ||
// --------------------------------------------- | ||
// compute result | ||
// --------------------------------------------- | ||
const computeResult = es => Arr.isNonEmptyArray(es) ? es.length === 1 && es[0][1]._tag === "Type" ? Either.left(es[0][1]) : Either.left(new Union(ast, input, sortByIndex(es))) : | ||
// this should never happen | ||
Either.left(new Type(AST.neverKeyword, input)); | ||
if (queue && queue.length > 0) { | ||
const cqueue = queue; | ||
return Effect.suspend(() => { | ||
const state = { | ||
es: Arr.copy(es) | ||
}; | ||
return Effect.flatMap(Effect.forEach(cqueue, f => f(state), { | ||
concurrency, | ||
batching, | ||
discard: true | ||
}), () => { | ||
if ("finalResult" in state) { | ||
return state.finalResult; | ||
} | ||
return computeResult(state.es); | ||
}); | ||
}); | ||
} | ||
return computeResult(es); | ||
}; | ||
} | ||
case "Suspend": | ||
{ | ||
const get = util_.memoizeThunk(() => goMemo(AST.annotations(ast.f(), ast.annotations), isDecoding)); | ||
return (a, options) => get()(a, options); | ||
} | ||
} | ||
}; | ||
const fromRefinement = (ast, refinement) => u => refinement(u) ? Either.right(u) : Either.left(new Type(ast, u)); | ||
/** @internal */ | ||
export const getLiterals = (ast, isDecoding) => { | ||
switch (ast._tag) { | ||
case "Declaration": | ||
{ | ||
const annotation = AST.getSurrogateAnnotation(ast); | ||
if (Option.isSome(annotation)) { | ||
return getLiterals(annotation.value, isDecoding); | ||
} | ||
break; | ||
} | ||
case "TypeLiteral": | ||
{ | ||
const out = []; | ||
for (let i = 0; i < ast.propertySignatures.length; i++) { | ||
const propertySignature = ast.propertySignatures[i]; | ||
const type = isDecoding ? AST.encodedAST(propertySignature.type) : AST.typeAST(propertySignature.type); | ||
if (AST.isLiteral(type) && !propertySignature.isOptional) { | ||
out.push([propertySignature.name, type]); | ||
} | ||
} | ||
return out; | ||
} | ||
case "Refinement": | ||
return getLiterals(ast.from, isDecoding); | ||
case "Suspend": | ||
return getLiterals(ast.f(), isDecoding); | ||
case "Transformation": | ||
return getLiterals(isDecoding ? ast.from : ast.to, isDecoding); | ||
} | ||
return []; | ||
}; | ||
/** | ||
* The purpose of the algorithm is to narrow down the pool of possible candidates for decoding as much as possible. | ||
* | ||
* This function separates the schemas into two groups, `keys` and `otherwise`: | ||
* | ||
* - `keys`: the schema has at least one property with a literal value | ||
* - `otherwise`: the schema has no properties with a literal value | ||
* | ||
* If a schema has at least one property with a literal value, so it ends up in `keys`, first a namespace is created for | ||
* the name of the property containing the literal, and then within this namespace a "bucket" is created for the literal | ||
* value in which to store all the schemas that have the same property and literal value. | ||
* | ||
* @internal | ||
*/ | ||
export const getSearchTree = (members, isDecoding) => { | ||
const keys = {}; | ||
const otherwise = []; | ||
for (let i = 0; i < members.length; i++) { | ||
const member = members[i]; | ||
const tags = getLiterals(member, isDecoding); | ||
if (tags.length > 0) { | ||
for (let j = 0; j < tags.length; j++) { | ||
const [key, literal] = tags[j]; | ||
const hash = String(literal.literal); | ||
keys[key] = keys[key] || { | ||
buckets: {}, | ||
literals: [] | ||
}; | ||
const buckets = keys[key].buckets; | ||
if (Object.prototype.hasOwnProperty.call(buckets, hash)) { | ||
if (j < tags.length - 1) { | ||
continue; | ||
} | ||
buckets[hash].push(member); | ||
keys[key].literals.push(literal); | ||
} else { | ||
buckets[hash] = [member]; | ||
keys[key].literals.push(literal); | ||
break; | ||
} | ||
} | ||
} else { | ||
otherwise.push(member); | ||
} | ||
} | ||
return { | ||
keys, | ||
otherwise | ||
}; | ||
}; | ||
const dropRightRefinement = ast => AST.isRefinement(ast) ? dropRightRefinement(ast.from) : ast; | ||
const handleForbidden = (effect, ast, actual, options) => { | ||
const eu = eitherOrUndefined(effect); | ||
if (eu) { | ||
return eu; | ||
} | ||
if (options?.isEffectAllowed === true) { | ||
return effect; | ||
} | ||
try { | ||
return Effect.runSync(Effect.either(effect)); | ||
} catch (e) { | ||
return Either.left(new Forbidden(ast, actual, "cannot be be resolved synchronously, this is caused by using runSync on an effect that performs async work")); | ||
} | ||
}; | ||
function sortByIndex(es) { | ||
return es.sort(([a], [b]) => a > b ? 1 : a < b ? -1 : 0).map(([_, a]) => a); | ||
} | ||
// ------------------------------------------------------------------------------------- | ||
// transformations interpreter | ||
// ------------------------------------------------------------------------------------- | ||
/** @internal */ | ||
export const getFinalTransformation = (transformation, isDecoding) => { | ||
switch (transformation._tag) { | ||
case "FinalTransformation": | ||
return isDecoding ? transformation.decode : transformation.encode; | ||
case "ComposeTransformation": | ||
return Either.right; | ||
case "TypeLiteralTransformation": | ||
return input => { | ||
let out = Either.right(input); | ||
// --------------------------------------------- | ||
// handle property signature transformations | ||
// --------------------------------------------- | ||
for (const pst of transformation.propertySignatureTransformations) { | ||
const [from, to] = isDecoding ? [pst.from, pst.to] : [pst.to, pst.from]; | ||
const transformation = isDecoding ? pst.decode : pst.encode; | ||
const f = input => { | ||
const o = transformation(Object.prototype.hasOwnProperty.call(input, from) ? Option.some(input[from]) : Option.none()); | ||
delete input[from]; | ||
if (Option.isSome(o)) { | ||
input[to] = o.value; | ||
} | ||
return input; | ||
}; | ||
out = map(out, f); | ||
} | ||
return out; | ||
}; | ||
} | ||
}; | ||
//# sourceMappingURL=ParseResult.js.map |
/** | ||
* @since 1.0.0 | ||
*/ | ||
import * as Arr from "effect/Array"; | ||
import * as Option from "effect/Option"; | ||
import * as ReadonlyArray from "effect/ReadonlyArray"; | ||
import * as AST from "./AST.js"; | ||
import * as Internal from "./internal/ast.js"; | ||
import * as hooks from "./internal/hooks.js"; | ||
import * as InternalSchema from "./internal/schema.js"; | ||
import * as Parser from "./Parser.js"; | ||
import * as errors_ from "./internal/errors.js"; | ||
import * as util_ from "./internal/util.js"; | ||
import * as ParseResult from "./ParseResult.js"; | ||
/** | ||
@@ -15,3 +14,3 @@ * @category hooks | ||
*/ | ||
export const PrettyHookId = hooks.PrettyHookId; | ||
export const PrettyHookId = /*#__PURE__*/Symbol.for("@effect/schema/PrettyHookId"); | ||
/** | ||
@@ -21,3 +20,5 @@ * @category annotations | ||
*/ | ||
export const pretty = handler => self => InternalSchema.make(AST.setAnnotation(self.ast, PrettyHookId, handler)); | ||
export const pretty = handler => self => self.annotations({ | ||
[PrettyHookId]: handler | ||
}); | ||
/** | ||
@@ -27,3 +28,3 @@ * @category prettify | ||
*/ | ||
export const make = schema => compile(schema.ast); | ||
export const make = schema => compile(schema.ast, []); | ||
const getHook = /*#__PURE__*/AST.getAnnotation(PrettyHookId); | ||
@@ -36,3 +37,4 @@ const getMatcher = defaultPretty => ast => Option.match(getHook(ast), { | ||
const stringify = /*#__PURE__*/getMatcher(a => JSON.stringify(a)); | ||
const formatUnknown = /*#__PURE__*/getMatcher(AST.formatUnknown); | ||
const formatUnknown = /*#__PURE__*/getMatcher(util_.formatUnknown); | ||
const getPrettyErrorMessage = (message, path) => errors_.getErrorMessageWithPath(`cannot build a Pretty for ${message}`, path); | ||
/** | ||
@@ -42,8 +44,8 @@ * @since 1.0.0 | ||
export const match = { | ||
"Declaration": (ast, go) => { | ||
"Declaration": (ast, go, path) => { | ||
const hook = getHook(ast); | ||
if (Option.isSome(hook)) { | ||
return hook.value(...ast.typeParameters.map(go)); | ||
return hook.value(...ast.typeParameters.map(tp => go(tp, path))); | ||
} | ||
throw new Error(`cannot build a Pretty for a declaration without annotations (${AST.format(ast)})`); | ||
throw new Error(getPrettyErrorMessage(`a declaration without annotations (${ast})`, path)); | ||
}, | ||
@@ -67,3 +69,3 @@ "VoidKeyword": /*#__PURE__*/getMatcher(() => "void(0)"), | ||
"Enums": stringify, | ||
"Tuple": (ast, go) => { | ||
"TupleType": (ast, go, path) => { | ||
const hook = getHook(ast); | ||
@@ -73,4 +75,4 @@ if (Option.isSome(hook)) { | ||
} | ||
const elements = ast.elements.map(e => go(e.type)); | ||
const rest = Option.map(ast.rest, ReadonlyArray.map(go)); | ||
const elements = ast.elements.map((e, i) => go(e.type, path.concat(i))); | ||
const rest = ast.rest.map(ast => go(ast, path)); | ||
return input => { | ||
@@ -94,4 +96,4 @@ const output = []; | ||
// --------------------------------------------- | ||
if (Option.isSome(rest)) { | ||
const [head, ...tail] = rest.value; | ||
if (Arr.isNonEmptyReadonlyArray(rest)) { | ||
const [head, ...tail] = rest; | ||
for (; i < input.length - tail.length; i++) { | ||
@@ -111,3 +113,3 @@ output.push(head(input[i])); | ||
}, | ||
"TypeLiteral": (ast, go) => { | ||
"TypeLiteral": (ast, go, path) => { | ||
const hook = getHook(ast); | ||
@@ -117,4 +119,4 @@ if (Option.isSome(hook)) { | ||
} | ||
const propertySignaturesTypes = ast.propertySignatures.map(f => go(f.type)); | ||
const indexSignatureTypes = ast.indexSignatures.map(is => go(is.type)); | ||
const propertySignaturesTypes = ast.propertySignatures.map(ps => go(ps.type, path.concat(ps.name))); | ||
const indexSignatureTypes = ast.indexSignatures.map(is => go(is.type, path)); | ||
const expectedKeys = {}; | ||
@@ -135,3 +137,3 @@ for (let i = 0; i < propertySignaturesTypes.length; i++) { | ||
} | ||
output.push(`${getPrettyPropertyKey(name)}: ${propertySignaturesTypes[i](input[name])}`); | ||
output.push(`${util_.formatPropertyKey(name)}: ${propertySignaturesTypes[i](input[name])}`); | ||
} | ||
@@ -144,3 +146,3 @@ // --------------------------------------------- | ||
const type = indexSignatureTypes[i]; | ||
const keys = Internal.getKeysForIndexSignature(input, ast.indexSignatures[i].parameter); | ||
const keys = util_.getKeysForIndexSignature(input, ast.indexSignatures[i].parameter); | ||
for (const key of keys) { | ||
@@ -150,10 +152,10 @@ if (Object.prototype.hasOwnProperty.call(expectedKeys, key)) { | ||
} | ||
output.push(`${getPrettyPropertyKey(key)}: ${type(input[key])}`); | ||
output.push(`${util_.formatPropertyKey(key)}: ${type(input[key])}`); | ||
} | ||
} | ||
} | ||
return ReadonlyArray.isNonEmptyReadonlyArray(output) ? "{ " + output.join(", ") + " }" : "{}"; | ||
return Arr.isNonEmptyReadonlyArray(output) ? "{ " + output.join(", ") + " }" : "{}"; | ||
}; | ||
}, | ||
"Union": (ast, go) => { | ||
"Union": (ast, go, path) => { | ||
const hook = getHook(ast); | ||
@@ -163,3 +165,5 @@ if (Option.isSome(hook)) { | ||
} | ||
const types = ast.types.map(ast => [Parser.is(InternalSchema.make(ast)), go(ast)]); | ||
const types = ast.types.map(ast => [ParseResult.is({ | ||
ast | ||
}), go(ast, path)]); | ||
return a => { | ||
@@ -170,6 +174,6 @@ const index = types.findIndex(([is]) => is(a)); | ||
}, | ||
"Suspend": (ast, go) => { | ||
"Suspend": (ast, go, path) => { | ||
return Option.match(getHook(ast), { | ||
onNone: () => { | ||
const get = Internal.memoizeThunk(() => go(ast.f())); | ||
const get = util_.memoizeThunk(() => go(ast.f(), path)); | ||
return a => get()(a); | ||
@@ -180,11 +184,11 @@ }, | ||
}, | ||
"Refinement": (ast, go) => { | ||
"Refinement": (ast, go, path) => { | ||
return Option.match(getHook(ast), { | ||
onNone: () => go(ast.from), | ||
onNone: () => go(ast.from, path), | ||
onSome: handler => handler() | ||
}); | ||
}, | ||
"Transform": (ast, go) => { | ||
"Transformation": (ast, go, path) => { | ||
return Option.match(getHook(ast), { | ||
onNone: () => go(ast.to), | ||
onNone: () => go(ast.to, path), | ||
onSome: handler => handler() | ||
@@ -195,3 +199,2 @@ }); | ||
const compile = /*#__PURE__*/AST.getCompiler(match); | ||
const getPrettyPropertyKey = name => typeof name === "string" ? JSON.stringify(name) : String(name); | ||
//# sourceMappingURL=Pretty.js.map |
import { dual } from "effect/Function"; | ||
import { globalValue } from "effect/GlobalValue"; | ||
import * as Internal from "./internal/serializable.js"; | ||
import * as serializable_ from "./internal/serializable.js"; | ||
import * as Schema from "./Schema.js"; | ||
@@ -9,3 +9,3 @@ /** | ||
*/ | ||
export const symbol = Internal.symbol; | ||
export const symbol = serializable_.symbol; | ||
/** | ||
@@ -20,3 +20,3 @@ * @since 1.0.0 | ||
*/ | ||
export const symbolResult = Internal.symbolResult; | ||
export const symbolResult = serializable_.symbolResult; | ||
/** | ||
@@ -40,7 +40,13 @@ * @since 1.0.0 | ||
if (!(symbolResult in proto)) { | ||
return Schema.exit(failureSchema(self), successSchema(self)); | ||
return Schema.Exit({ | ||
failure: failureSchema(self), | ||
success: successSchema(self) | ||
}); | ||
} | ||
let schema = exitSchemaCache.get(proto); | ||
if (schema === undefined) { | ||
schema = Schema.exit(failureSchema(self), successSchema(self)); | ||
schema = Schema.Exit({ | ||
failure: failureSchema(self), | ||
success: successSchema(self) | ||
}); | ||
exitSchemaCache.set(proto, schema); | ||
@@ -47,0 +53,0 @@ } |
/** | ||
* @since 1.0.0 | ||
*/ | ||
import * as Effect from "effect/Effect"; | ||
import * as Option from "effect/Option"; | ||
import * as Predicate from "effect/Predicate"; | ||
import * as AST from "./AST.js"; | ||
import * as util_ from "./internal/util.js"; | ||
const make = (value, forest = []) => ({ | ||
@@ -14,6 +17,3 @@ value, | ||
*/ | ||
export const formatIssues = issues => { | ||
const forest = issues.map(go); | ||
return drawTree(forest.length === 1 ? forest[0] : make(`error(s) found`, issues.map(go))); | ||
}; | ||
export const formatIssue = issue => Effect.map(go(issue), tree => drawTree(tree)); | ||
/** | ||
@@ -23,3 +23,3 @@ * @category formatting | ||
*/ | ||
export const formatIssue = issue => formatIssues([issue]); | ||
export const formatIssueSync = issue => Effect.runSync(formatIssue(issue)); | ||
/** | ||
@@ -30,2 +30,7 @@ * @category formatting | ||
export const formatError = error => formatIssue(error.error); | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export const formatErrorSync = error => formatIssueSync(error.error); | ||
const drawTree = tree => tree.value + draw("\n", tree.forest); | ||
@@ -46,8 +51,8 @@ const draw = (indentation, forest) => { | ||
switch (kind) { | ||
case "From": | ||
return "From side transformation failure"; | ||
case "Encoded": | ||
return "Encoded side transformation failure"; | ||
case "Transformation": | ||
return "Transformation process failure"; | ||
case "To": | ||
return "To side transformation failure"; | ||
case "Type": | ||
return "Type side transformation failure"; | ||
} | ||
@@ -63,85 +68,86 @@ }; | ||
}; | ||
/** @internal */ | ||
export const getMessage = (ast, actual) => { | ||
return AST.getMessageAnnotation(ast).pipe(Option.map(annotation => annotation(actual))); | ||
}; | ||
/** @internal */ | ||
export const formatTypeMessage = e => getMessage(e.ast, e.actual).pipe(Option.orElse(() => e.message), Option.getOrElse(() => `Expected ${AST.format(e.ast, true)}, actual ${AST.formatUnknown(e.actual)}`)); | ||
/** @internal */ | ||
export const formatForbiddenMessage = e => Option.getOrElse(e.message, () => "is forbidden"); | ||
const getParseIsssueMessage = (issue, orElse) => { | ||
const getInnerMessage = issue => { | ||
switch (issue._tag) { | ||
case "Refinement": | ||
return Option.orElse(getRefinementMessage(issue, issue.actual), orElse); | ||
case "Transform": | ||
return Option.orElse(getTransformMessage(issue, issue.actual), orElse); | ||
case "Tuple": | ||
case "TypeLiteral": | ||
case "Union": | ||
case "Type": | ||
return Option.orElse(getMessage(issue.ast, issue.actual), orElse); | ||
{ | ||
if (issue.kind === "From") { | ||
return getMessage(issue.error); | ||
} | ||
break; | ||
} | ||
case "Transformation": | ||
{ | ||
return getMessage(issue.error); | ||
} | ||
} | ||
return orElse(); | ||
return Option.none(); | ||
}; | ||
const getCurrentMessage = issue => AST.getMessageAnnotation(issue.ast).pipe(Effect.flatMap(annotation => { | ||
const out = annotation(issue); | ||
return Predicate.isString(out) ? Effect.succeed({ | ||
message: out, | ||
override: false | ||
}) : Effect.isEffect(out) ? Effect.map(out, message => ({ | ||
message, | ||
override: false | ||
})) : Predicate.isString(out.message) ? Effect.succeed({ | ||
message: out.message, | ||
override: out.override | ||
}) : Effect.map(out.message, message => ({ | ||
message, | ||
override: out.override | ||
})); | ||
})); | ||
/** @internal */ | ||
export const getRefinementMessage = (e, actual) => { | ||
if (e.kind === "From") { | ||
return getParseIsssueMessage(e.error, () => getMessage(e.ast, actual)); | ||
} | ||
return getMessage(e.ast, actual); | ||
export const getMessage = issue => { | ||
const current = getCurrentMessage(issue); | ||
return getInnerMessage(issue).pipe(Effect.flatMap(inner => Effect.map(current, current => current.override ? current.message : inner)), Effect.catchAll(() => Effect.flatMap(current, current => { | ||
if (!current.override && (issue._tag === "Refinement" && issue.kind !== "Predicate" || issue._tag === "Transformation" && issue.kind !== "Transformation")) { | ||
return Option.none(); | ||
} | ||
return Effect.succeed(current.message); | ||
}))); | ||
}; | ||
const getParseIssueTitleAnnotation = issue => Option.filterMap(AST.getParseIssueTitleAnnotation(issue.ast), annotation => Option.fromNullable(annotation(issue))); | ||
/** @internal */ | ||
export const getTransformMessage = (e, actual) => { | ||
return getParseIsssueMessage(e.error, () => getMessage(e.ast, actual)); | ||
}; | ||
export const formatTypeMessage = e => getMessage(e).pipe(Effect.orElse(() => getParseIssueTitleAnnotation(e)), Effect.orElse(() => e.message), Effect.catchAll(() => Effect.succeed(`Expected ${e.ast.toString(true)}, actual ${util_.formatUnknown(e.actual)}`))); | ||
const getParseIssueTitle = issue => Option.getOrElse(getParseIssueTitleAnnotation(issue), () => String(issue.ast)); | ||
/** @internal */ | ||
export const formatForbiddenMessage = e => Option.getOrElse(e.message, () => "is forbidden"); | ||
const getTree = (issue, onFailure) => Effect.matchEffect(getMessage(issue), { | ||
onFailure, | ||
onSuccess: message => Effect.succeed(make(message)) | ||
}); | ||
const go = e => { | ||
switch (e._tag) { | ||
case "Type": | ||
return make(formatTypeMessage(e)); | ||
return Effect.map(formatTypeMessage(e), make); | ||
case "Forbidden": | ||
return make(AST.format(e.ast), [make(formatForbiddenMessage(e))]); | ||
return Effect.succeed(make(getParseIssueTitle(e), [make(formatForbiddenMessage(e))])); | ||
case "Unexpected": | ||
return make(`is unexpected, expected ${AST.format(e.ast, true)}`); | ||
return Effect.succeed(make(`is unexpected, expected ${e.ast.toString(true)}`)); | ||
case "Missing": | ||
return make("is missing"); | ||
return Effect.succeed(make("is missing")); | ||
case "Union": | ||
return Option.match(getMessage(e.ast, e.actual), { | ||
onNone: () => make(AST.format(e.ast), e.errors.map(e => { | ||
switch (e._tag) { | ||
case "Member": | ||
return make(`Union member`, [go(e.error)]); | ||
default: | ||
return go(e); | ||
} | ||
})), | ||
onSome: make | ||
}); | ||
case "Tuple": | ||
return Option.match(getMessage(e.ast, e.actual), { | ||
onNone: () => make(AST.format(e.ast), e.errors.map(index => make(`[${index.index}]`, [go(index.error)]))), | ||
onSome: make | ||
}); | ||
return getTree(e, () => Effect.map(Effect.forEach(e.errors, e => { | ||
switch (e._tag) { | ||
case "Member": | ||
return Effect.map(go(e.error), tree => make(`Union member`, [tree])); | ||
default: | ||
return go(e); | ||
} | ||
}), forest => make(getParseIssueTitle(e), forest))); | ||
case "TupleType": | ||
return getTree(e, () => Effect.map(Effect.forEach(e.errors, index => Effect.map(go(index.error), tree => make(`[${util_.formatPropertyKey(index.index)}]`, [tree]))), forest => make(getParseIssueTitle(e), forest))); | ||
case "TypeLiteral": | ||
return Option.match(getMessage(e.ast, e.actual), { | ||
onNone: () => make(AST.format(e.ast), e.errors.map(key => make(`[${AST.formatUnknown(key.key)}]`, [go(key.error)]))), | ||
onSome: make | ||
}); | ||
case "Transform": | ||
return Option.match(getTransformMessage(e, e.actual), { | ||
onNone: () => make(AST.format(e.ast), [make(formatTransformationKind(e.kind), [go(e.error)])]), | ||
onSome: make | ||
}); | ||
return getTree(e, () => Effect.map(Effect.forEach(e.errors, key => Effect.map(go(key.error), tree => make(`[${util_.formatPropertyKey(key.key)}]`, [tree]))), forest => make(getParseIssueTitle(e), forest))); | ||
case "Transformation": | ||
return getTree(e, () => Effect.map(go(e.error), tree => make(getParseIssueTitle(e), [make(formatTransformationKind(e.kind), [tree])]))); | ||
case "Refinement": | ||
return Option.match(getRefinementMessage(e, e.actual), { | ||
onNone: () => make(AST.format(e.ast), [make(formatRefinementKind(e.kind), [go(e.error)])]), | ||
onSome: make | ||
}); | ||
return getTree(e, () => Effect.map(go(e.error), tree => make(getParseIssueTitle(e), [make(formatRefinementKind(e.kind), [tree])]))); | ||
case "Declaration": | ||
return Option.match(getMessage(e.ast, e.actual), { | ||
onNone: () => { | ||
const error = e.error; | ||
const shouldSkipDefaultMessage = error._tag === "Type" && error.ast === e.ast; | ||
return shouldSkipDefaultMessage ? go(error) : make(AST.format(e.ast), [go(e.error)]); | ||
}, | ||
onSome: make | ||
return getTree(e, () => { | ||
const error = e.error; | ||
const shouldSkipDefaultMessage = error._tag === "Type" && error.ast === e.ast; | ||
return shouldSkipDefaultMessage ? go(error) : Effect.map(go(error), tree => make(getParseIssueTitle(e), [tree])); | ||
}); | ||
@@ -148,0 +154,0 @@ } |
{ | ||
"name": "@effect/schema", | ||
"version": "0.0.0-snapshot-c0ae728e57df2c572ea803e1bb7121088cd67b49", | ||
"version": "0.0.0-snapshot-e1ebaaa379fd8da009fc70115350394e6295b9d4", | ||
"description": "Modeling the schema of data structures as first-class values", | ||
@@ -8,9 +8,15 @@ "license": "MIT", | ||
"type": "git", | ||
"url": "https://github.com/effect-ts/effect.git" | ||
"url": "https://github.com/Effect-TS/effect.git", | ||
"directory": "packages/schema" | ||
}, | ||
"sideEffects": [], | ||
"dependencies": { | ||
"fast-check": "^3.17.2" | ||
}, | ||
"peerDependencies": { | ||
"fast-check": "^3.13.2", | ||
"effect": "^0.0.0-snapshot-c0ae728e57df2c572ea803e1bb7121088cd67b49" | ||
"effect": "^0.0.0-snapshot-e1ebaaa379fd8da009fc70115350394e6295b9d4" | ||
}, | ||
"publishConfig": { | ||
"provenance": true | ||
}, | ||
"main": "./dist/cjs/index.js", | ||
@@ -46,6 +52,6 @@ "module": "./dist/esm/index.js", | ||
}, | ||
"./Format": { | ||
"types": "./dist/dts/Format.d.ts", | ||
"import": "./dist/esm/Format.js", | ||
"default": "./dist/cjs/Format.js" | ||
"./FastCheck": { | ||
"types": "./dist/dts/FastCheck.d.ts", | ||
"import": "./dist/esm/FastCheck.js", | ||
"default": "./dist/cjs/FastCheck.js" | ||
}, | ||
@@ -62,7 +68,2 @@ "./JSONSchema": { | ||
}, | ||
"./Parser": { | ||
"types": "./dist/dts/Parser.d.ts", | ||
"import": "./dist/esm/Parser.js", | ||
"default": "./dist/cjs/Parser.js" | ||
}, | ||
"./Pretty": { | ||
@@ -103,4 +104,4 @@ "types": "./dist/dts/Pretty.d.ts", | ||
], | ||
"Format": [ | ||
"./dist/dts/Format.d.ts" | ||
"FastCheck": [ | ||
"./dist/dts/FastCheck.d.ts" | ||
], | ||
@@ -113,5 +114,2 @@ "JSONSchema": [ | ||
], | ||
"Parser": [ | ||
"./dist/dts/Parser.d.ts" | ||
], | ||
"Pretty": [ | ||
@@ -118,0 +116,0 @@ "./dist/dts/Pretty.d.ts" |
@@ -5,12 +5,10 @@ /** | ||
import * as Arr from "effect/Array" | ||
import * as Option from "effect/Option" | ||
import * as Predicate from "effect/Predicate" | ||
import * as ReadonlyArray from "effect/ReadonlyArray" | ||
import type * as FastCheck from "fast-check" | ||
import * as AST from "./AST.js" | ||
import * as Internal from "./internal/ast.js" | ||
import * as filters from "./internal/filters.js" | ||
import * as hooks from "./internal/hooks.js" | ||
import * as InternalSchema from "./internal/schema.js" | ||
import * as Parser from "./Parser.js" | ||
import * as FastCheck from "./FastCheck.js" | ||
import * as errors_ from "./internal/errors.js" | ||
import * as filters_ from "./internal/filters.js" | ||
import * as util_ from "./internal/util.js" | ||
import type * as Schema from "./Schema.js" | ||
@@ -22,3 +20,3 @@ | ||
*/ | ||
export interface Arbitrary<A> { | ||
export interface LazyArbitrary<A> { | ||
(fc: typeof FastCheck): FastCheck.Arbitrary<A> | ||
@@ -31,3 +29,3 @@ } | ||
*/ | ||
export const ArbitraryHookId: unique symbol = hooks.ArbitraryHookId | ||
export const ArbitraryHookId: unique symbol = Symbol.for("@effect/schema/ArbitraryHookId") | ||
@@ -45,7 +43,14 @@ /** | ||
export const arbitrary = | ||
<A>(handler: (...args: ReadonlyArray<Arbitrary<any>>) => Arbitrary<A>) => | ||
<I, R>(self: Schema.Schema<A, I, R>): Schema.Schema<A, I, R> => | ||
InternalSchema.make(AST.setAnnotation(self.ast, ArbitraryHookId, handler)) | ||
<A>(handler: (...args: ReadonlyArray<LazyArbitrary<any>>) => LazyArbitrary<A>) => | ||
<I, R>(self: Schema.Schema<A, I, R>): Schema.Schema<A, I, R> => self.annotations({ [ArbitraryHookId]: handler }) | ||
/** | ||
* Returns a LazyArbitrary for the `A` type of the provided schema. | ||
* | ||
* @category arbitrary | ||
* @since 1.0.0 | ||
*/ | ||
export const makeLazy = <A, I, R>(schema: Schema.Schema<A, I, R>): LazyArbitrary<A> => go(schema.ast, {}, []) | ||
/** | ||
* Returns a fast-check Arbitrary for the `A` type of the provided schema. | ||
@@ -56,3 +61,3 @@ * | ||
*/ | ||
export const make = <A, I, R>(schema: Schema.Schema<A, I, R>): Arbitrary<A> => go(schema.ast, {}) | ||
export const make = <A, I, R>(schema: Schema.Schema<A, I, R>): FastCheck.Arbitrary<A> => makeLazy(schema)(FastCheck) | ||
@@ -83,3 +88,3 @@ const depthSize = 1 | ||
const getHook = AST.getAnnotation< | ||
(...args: ReadonlyArray<Arbitrary<any>>) => Arbitrary<any> | ||
(...args: ReadonlyArray<LazyArbitrary<any>>) => LazyArbitrary<any> | ||
>(ArbitraryHookId) | ||
@@ -92,8 +97,11 @@ | ||
const getRefinementFromArbitrary = (ast: AST.Refinement, options: Options) => { | ||
const getRefinementFromArbitrary = (ast: AST.Refinement, options: Options, path: ReadonlyArray<PropertyKey>) => { | ||
const constraints = combineConstraints(options.constraints, getConstraints(ast)) | ||
return go(ast.from, constraints ? { ...options, constraints } : options) | ||
return go(ast.from, constraints ? { ...options, constraints } : options, path) | ||
} | ||
const go = (ast: AST.AST, options: Options): Arbitrary<any> => { | ||
const getArbitraryErrorMessage = (message: string, path: ReadonlyArray<PropertyKey>) => | ||
errors_.getErrorMessageWithPath(`cannot build an Arbitrary for ${message}`, path) | ||
const go = (ast: AST.AST, options: Options, path: ReadonlyArray<PropertyKey>): LazyArbitrary<any> => { | ||
const hook = getHook(ast) | ||
@@ -103,5 +111,5 @@ if (Option.isSome(hook)) { | ||
case "Declaration": | ||
return hook.value(...ast.typeParameters.map((p) => go(p, options))) | ||
return hook.value(...ast.typeParameters.map((p) => go(p, options, path))) | ||
case "Refinement": | ||
return hook.value(getRefinementFromArbitrary(ast, options)) | ||
return hook.value(getRefinementFromArbitrary(ast, options, path)) | ||
default: | ||
@@ -113,3 +121,3 @@ return hook.value() | ||
case "Declaration": { | ||
throw new Error(`cannot build an Arbitrary for a declaration without annotations (${AST.format(ast)})`) | ||
throw new Error(getArbitraryErrorMessage(`a declaration without annotations (${ast})`, path)) | ||
} | ||
@@ -125,3 +133,3 @@ case "Literal": | ||
return () => { | ||
throw new Error("cannot build an Arbitrary for `never`") | ||
throw new Error(getArbitraryErrorMessage("`never`", path)) | ||
} | ||
@@ -185,7 +193,8 @@ case "UnknownKeyword": | ||
} | ||
case "Tuple": { | ||
const elements: Array<Arbitrary<any>> = [] | ||
case "TupleType": { | ||
const elements: Array<LazyArbitrary<any>> = [] | ||
let hasOptionals = false | ||
let i = 0 | ||
for (const element of ast.elements) { | ||
elements.push(go(element.type, options)) | ||
elements.push(go(element.type, options, path.concat(i++))) | ||
if (element.isOptional) { | ||
@@ -195,3 +204,3 @@ hasOptionals = true | ||
} | ||
const rest = Option.map(ast.rest, ReadonlyArray.map((e) => go(e, options))) | ||
const rest = ast.rest.map((e) => go(e, options, path)) | ||
return (fc) => { | ||
@@ -221,4 +230,4 @@ // --------------------------------------------- | ||
// --------------------------------------------- | ||
if (Option.isSome(rest)) { | ||
const [head, ...tail] = rest.value | ||
if (Arr.isNonEmptyReadonlyArray(rest)) { | ||
const [head, ...tail] = rest | ||
const arb = head(fc) | ||
@@ -251,5 +260,5 @@ const constraints = options.constraints | ||
case "TypeLiteral": { | ||
const propertySignaturesTypes = ast.propertySignatures.map((f) => go(f.type, options)) | ||
const propertySignaturesTypes = ast.propertySignatures.map((ps) => go(ps.type, options, path.concat(ps.name))) | ||
const indexSignatures = ast.indexSignatures.map((is) => | ||
[go(is.parameter, options), go(is.type, options)] as const | ||
[go(is.parameter, options, path), go(is.type, options, path)] as const | ||
) | ||
@@ -286,3 +295,3 @@ return (fc) => { | ||
case "Union": { | ||
const types = ast.types.map((t) => go(t, options)) | ||
const types = ast.types.map((t) => go(t, options, path)) | ||
return (fc) => fc.oneof({ depthSize }, ...types.map((arb) => arb(fc))) | ||
@@ -292,3 +301,3 @@ } | ||
if (ast.enums.length === 0) { | ||
throw new Error("cannot build an Arbitrary for an empty enum") | ||
throw new Error(getArbitraryErrorMessage("an empty enum", path)) | ||
} | ||
@@ -298,81 +307,113 @@ return (fc) => fc.oneof(...ast.enums.map(([_, value]) => fc.constant(value))) | ||
case "Refinement": { | ||
const from = getRefinementFromArbitrary(ast, options) | ||
return (fc) => from(fc).filter((a) => Option.isNone(ast.filter(a, Parser.defaultParseOption, ast))) | ||
const from = getRefinementFromArbitrary(ast, options, path) | ||
return (fc) => from(fc).filter((a) => Option.isNone(ast.filter(a, AST.defaultParseOption, ast))) | ||
} | ||
case "Suspend": { | ||
const get = Internal.memoizeThunk(() => go(ast.f(), { ...options, isSuspend: true })) | ||
const get = util_.memoizeThunk(() => go(ast.f(), { ...options, isSuspend: true }, path)) | ||
return (fc) => fc.constant(null).chain(() => get()(fc)) | ||
} | ||
case "Transform": | ||
return go(ast.to, options) | ||
case "Transformation": | ||
return go(ast.to, options, path) | ||
} | ||
} | ||
interface NumberConstraints { | ||
readonly _tag: "NumberConstraints" | ||
/** @internal */ | ||
export class NumberConstraints { | ||
readonly _tag = "NumberConstraints" | ||
readonly constraints: FastCheck.FloatConstraints | ||
constructor(options: { | ||
readonly min?: number | undefined | ||
readonly max?: number | undefined | ||
readonly noNaN?: boolean | undefined | ||
readonly noDefaultInfinity?: boolean | undefined | ||
}) { | ||
this.constraints = {} | ||
if (Predicate.isNumber(options.min)) { | ||
this.constraints.min = Math.fround(options.min) | ||
} | ||
if (Predicate.isNumber(options.max)) { | ||
this.constraints.max = Math.fround(options.max) | ||
} | ||
if (Predicate.isBoolean(options.noNaN)) { | ||
this.constraints.noNaN = options.noNaN | ||
} | ||
if (Predicate.isBoolean(options.noDefaultInfinity)) { | ||
this.constraints.noDefaultInfinity = options.noDefaultInfinity | ||
} | ||
} | ||
} | ||
/** @internal */ | ||
export const numberConstraints = ( | ||
constraints: NumberConstraints["constraints"] | ||
): NumberConstraints => { | ||
if (Predicate.isNumber(constraints.min)) { | ||
constraints.min = Math.fround(constraints.min) | ||
export class StringConstraints { | ||
readonly _tag = "StringConstraints" | ||
readonly constraints: FastCheck.StringSharedConstraints | ||
constructor(options: { | ||
readonly minLength?: number | undefined | ||
readonly maxLength?: number | undefined | ||
}) { | ||
this.constraints = {} | ||
if (Predicate.isNumber(options.minLength)) { | ||
this.constraints.minLength = options.minLength | ||
} | ||
if (Predicate.isNumber(options.maxLength)) { | ||
this.constraints.maxLength = options.maxLength | ||
} | ||
} | ||
if (Predicate.isNumber(constraints.max)) { | ||
constraints.max = Math.fround(constraints.max) | ||
} | ||
return { _tag: "NumberConstraints", constraints } | ||
} | ||
interface StringConstraints { | ||
readonly _tag: "StringConstraints" | ||
readonly constraints: FastCheck.StringSharedConstraints | ||
} | ||
/** @internal */ | ||
export const stringConstraints = ( | ||
constraints: StringConstraints["constraints"] | ||
): StringConstraints => { | ||
return { _tag: "StringConstraints", constraints } | ||
} | ||
interface IntegerConstraints { | ||
readonly _tag: "IntegerConstraints" | ||
export class IntegerConstraints { | ||
readonly _tag = "IntegerConstraints" | ||
readonly constraints: FastCheck.IntegerConstraints | ||
constructor(options: { | ||
readonly min?: number | undefined | ||
readonly max?: number | undefined | ||
}) { | ||
this.constraints = {} | ||
if (Predicate.isNumber(options.min)) { | ||
this.constraints.min = options.min | ||
} | ||
if (Predicate.isNumber(options.max)) { | ||
this.constraints.max = options.max | ||
} | ||
} | ||
} | ||
/** @internal */ | ||
export const integerConstraints = ( | ||
constraints: IntegerConstraints["constraints"] | ||
): IntegerConstraints => { | ||
return { _tag: "IntegerConstraints", constraints } | ||
} | ||
interface ArrayConstraints { | ||
readonly _tag: "ArrayConstraints" | ||
export class ArrayConstraints { | ||
readonly _tag = "ArrayConstraints" | ||
readonly constraints: FastCheck.ArrayConstraints | ||
constructor(options: { | ||
readonly minLength?: number | undefined | ||
readonly maxLength?: number | undefined | ||
}) { | ||
this.constraints = {} | ||
if (Predicate.isNumber(options.minLength)) { | ||
this.constraints.minLength = options.minLength | ||
} | ||
if (Predicate.isNumber(options.maxLength)) { | ||
this.constraints.maxLength = options.maxLength | ||
} | ||
} | ||
} | ||
/** @internal */ | ||
export const arrayConstraints = ( | ||
constraints: ArrayConstraints["constraints"] | ||
): ArrayConstraints => { | ||
return { _tag: "ArrayConstraints", constraints } | ||
} | ||
interface BigIntConstraints { | ||
readonly _tag: "BigIntConstraints" | ||
export class BigIntConstraints { | ||
readonly _tag = "BigIntConstraints" | ||
readonly constraints: FastCheck.BigIntConstraints | ||
constructor(options: { | ||
readonly min?: bigint | undefined | ||
readonly max?: bigint | undefined | ||
}) { | ||
this.constraints = {} | ||
if (Predicate.isBigInt(options.min)) { | ||
this.constraints.min = options.min | ||
} | ||
if (Predicate.isBigInt(options.max)) { | ||
this.constraints.max = options.max | ||
} | ||
} | ||
} | ||
/** @internal */ | ||
export const bigintConstraints = ( | ||
constraints: BigIntConstraints["constraints"] | ||
): BigIntConstraints => { | ||
return { _tag: "BigIntConstraints", constraints } | ||
} | ||
/** @internal */ | ||
export type Constraints = | ||
@@ -390,61 +431,37 @@ | NumberConstraints | ||
switch (TypeAnnotationId) { | ||
// int | ||
case filters_.IntTypeId: | ||
return new IntegerConstraints({}) | ||
// number | ||
case filters.GreaterThanTypeId: | ||
case filters.GreaterThanOrEqualToTypeId: | ||
return numberConstraints({ min: jsonSchema.exclusiveMinimum ?? jsonSchema.minimum }) | ||
case filters.LessThanTypeId: | ||
case filters.LessThanOrEqualToTypeId: | ||
return numberConstraints({ max: jsonSchema.exclusiveMaximum ?? jsonSchema.maximum }) | ||
case filters.IntTypeId: | ||
return integerConstraints({}) | ||
case filters.BetweenTypeId: { | ||
const min = jsonSchema.minimum | ||
const max = jsonSchema.maximum | ||
const constraints: NumberConstraints["constraints"] = {} | ||
if (Predicate.isNumber(min)) { | ||
constraints.min = min | ||
} | ||
if (Predicate.isNumber(max)) { | ||
constraints.max = max | ||
} | ||
return numberConstraints(constraints) | ||
} | ||
case filters_.GreaterThanTypeId: | ||
case filters_.GreaterThanOrEqualToTypeId: | ||
case filters_.LessThanTypeId: | ||
case filters_.LessThanOrEqualToTypeId: | ||
case filters_.BetweenTypeId: | ||
return new NumberConstraints({ | ||
min: jsonSchema.exclusiveMinimum ?? jsonSchema.minimum, | ||
max: jsonSchema.exclusiveMaximum ?? jsonSchema.maximum | ||
}) | ||
// bigint | ||
case filters.GreaterThanBigintTypeId: | ||
case filters.GreaterThanOrEqualToBigintTypeId: { | ||
const params: any = ast.annotations[TypeAnnotationId] | ||
return bigintConstraints({ min: params.min }) | ||
case filters_.GreaterThanBigintTypeId: | ||
case filters_.GreaterThanOrEqualToBigIntTypeId: | ||
case filters_.LessThanBigIntTypeId: | ||
case filters_.LessThanOrEqualToBigIntTypeId: | ||
case filters_.BetweenBigintTypeId: { | ||
const constraints: any = ast.annotations[TypeAnnotationId] | ||
return new BigIntConstraints(constraints) | ||
} | ||
case filters.LessThanBigintTypeId: | ||
case filters.LessThanOrEqualToBigintTypeId: { | ||
const params: any = ast.annotations[TypeAnnotationId] | ||
return bigintConstraints({ max: params.max }) | ||
} | ||
case filters.BetweenBigintTypeId: { | ||
const params: any = ast.annotations[TypeAnnotationId] | ||
const min = params.min | ||
const max = params.max | ||
const constraints: BigIntConstraints["constraints"] = {} | ||
if (Predicate.isBigInt(min)) { | ||
constraints.min = min | ||
} | ||
if (Predicate.isBigInt(max)) { | ||
constraints.max = max | ||
} | ||
return bigintConstraints(constraints) | ||
} | ||
// string | ||
case filters.MinLengthTypeId: | ||
return stringConstraints({ minLength: jsonSchema.minLength }) | ||
case filters.MaxLengthTypeId: | ||
return stringConstraints({ maxLength: jsonSchema.maxLength }) | ||
case filters.LengthTypeId: | ||
return stringConstraints({ minLength: jsonSchema.minLength, maxLength: jsonSchema.maxLength }) | ||
case filters_.MinLengthTypeId: | ||
case filters_.MaxLengthTypeId: | ||
case filters_.LengthTypeId: | ||
return new StringConstraints(jsonSchema) | ||
// array | ||
case filters.MinItemsTypeId: | ||
return arrayConstraints({ minLength: jsonSchema.minItems }) | ||
case filters.MaxItemsTypeId: | ||
return arrayConstraints({ maxLength: jsonSchema.maxItems }) | ||
case filters.ItemsCountTypeId: | ||
return arrayConstraints({ minLength: jsonSchema.minItems, maxLength: jsonSchema.maxItems }) | ||
case filters_.MinItemsTypeId: | ||
case filters_.MaxItemsTypeId: | ||
case filters_.ItemsCountTypeId: | ||
return new ArrayConstraints({ | ||
minLength: jsonSchema.minItems, | ||
maxLength: jsonSchema.maxItems | ||
}) | ||
} | ||
@@ -467,17 +484,7 @@ } | ||
switch (c2._tag) { | ||
case "ArrayConstraints": { | ||
const c: ArrayConstraints["constraints"] = { | ||
...c1.constraints, | ||
...c2.constraints | ||
} | ||
const minLength = getMax(c1.constraints.minLength, c2.constraints.minLength) | ||
if (Predicate.isNumber(minLength)) { | ||
c.minLength = minLength | ||
} | ||
const maxLength = getMin(c1.constraints.maxLength, c2.constraints.maxLength) | ||
if (Predicate.isNumber(maxLength)) { | ||
c.maxLength = maxLength | ||
} | ||
return arrayConstraints(c) | ||
} | ||
case "ArrayConstraints": | ||
return new ArrayConstraints({ | ||
minLength: getMax(c1.constraints.minLength, c2.constraints.minLength), | ||
maxLength: getMin(c1.constraints.maxLength, c2.constraints.maxLength) | ||
}) | ||
} | ||
@@ -488,29 +495,14 @@ break | ||
switch (c2._tag) { | ||
case "NumberConstraints": { | ||
const c: NumberConstraints["constraints"] = { | ||
...c1.constraints, | ||
...c2.constraints | ||
} | ||
const min = getMax(c1.constraints.min, c2.constraints.min) | ||
if (Predicate.isNumber(min)) { | ||
c.min = min | ||
} | ||
const max = getMin(c1.constraints.max, c2.constraints.max) | ||
if (Predicate.isNumber(max)) { | ||
c.max = max | ||
} | ||
return numberConstraints(c) | ||
} | ||
case "IntegerConstraints": { | ||
const c: IntegerConstraints["constraints"] = { ...c2.constraints } | ||
const min = getMax(c1.constraints.min, c2.constraints.min) | ||
if (Predicate.isNumber(min)) { | ||
c.min = min | ||
} | ||
const max = getMin(c1.constraints.max, c2.constraints.max) | ||
if (Predicate.isNumber(max)) { | ||
c.max = max | ||
} | ||
return integerConstraints(c) | ||
} | ||
case "NumberConstraints": | ||
return new NumberConstraints({ | ||
min: getMax(c1.constraints.min, c2.constraints.min), | ||
max: getMin(c1.constraints.max, c2.constraints.max), | ||
noNaN: getOr(c1.constraints.noNaN, c2.constraints.noNaN), | ||
noDefaultInfinity: getOr(c1.constraints.noDefaultInfinity, c2.constraints.noDefaultInfinity) | ||
}) | ||
case "IntegerConstraints": | ||
return new IntegerConstraints({ | ||
min: getMax(c1.constraints.min, c2.constraints.min), | ||
max: getMin(c1.constraints.max, c2.constraints.max) | ||
}) | ||
} | ||
@@ -521,17 +513,7 @@ break | ||
switch (c2._tag) { | ||
case "BigIntConstraints": { | ||
const c: BigIntConstraints["constraints"] = { | ||
...c1.constraints, | ||
...c2.constraints | ||
} | ||
const min = getMax(c1.constraints.min, c2.constraints.min) | ||
if (Predicate.isBigInt(min)) { | ||
c.min = min | ||
} | ||
const max = getMin(c1.constraints.max, c2.constraints.max) | ||
if (Predicate.isBigInt(max)) { | ||
c.max = max | ||
} | ||
return bigintConstraints(c) | ||
} | ||
case "BigIntConstraints": | ||
return new BigIntConstraints({ | ||
min: getMax(c1.constraints.min, c2.constraints.min), | ||
max: getMin(c1.constraints.max, c2.constraints.max) | ||
}) | ||
} | ||
@@ -542,17 +524,7 @@ break | ||
switch (c2._tag) { | ||
case "StringConstraints": { | ||
const c: StringConstraints["constraints"] = { | ||
...c1.constraints, | ||
...c2.constraints | ||
} | ||
const minLength = getMax(c1.constraints.minLength, c2.constraints.minLength) | ||
if (Predicate.isNumber(minLength)) { | ||
c.minLength = minLength | ||
} | ||
const maxLength = getMin(c1.constraints.maxLength, c2.constraints.maxLength) | ||
if (Predicate.isNumber(maxLength)) { | ||
c.maxLength = maxLength | ||
} | ||
return stringConstraints(c) | ||
} | ||
case "StringConstraints": | ||
return new StringConstraints({ | ||
minLength: getMax(c1.constraints.minLength, c2.constraints.minLength), | ||
maxLength: getMin(c1.constraints.maxLength, c2.constraints.maxLength) | ||
}) | ||
} | ||
@@ -565,12 +537,6 @@ break | ||
case "IntegerConstraints": { | ||
const c: IntegerConstraints["constraints"] = { ...c1.constraints } | ||
const min = getMax(c1.constraints.min, c2.constraints.min) | ||
if (Predicate.isNumber(min)) { | ||
c.min = min | ||
} | ||
const max = getMin(c1.constraints.max, c2.constraints.max) | ||
if (Predicate.isNumber(max)) { | ||
c.max = max | ||
} | ||
return integerConstraints(c) | ||
return new IntegerConstraints({ | ||
min: getMax(c1.constraints.min, c2.constraints.min), | ||
max: getMin(c1.constraints.max, c2.constraints.max) | ||
}) | ||
} | ||
@@ -583,2 +549,6 @@ } | ||
const getOr = (a: boolean | undefined, b: boolean | undefined): boolean | undefined => { | ||
return a === undefined ? b : b === undefined ? a : a || b | ||
} | ||
function getMax(n1: bigint | undefined, n2: bigint | undefined): bigint | undefined | ||
@@ -585,0 +555,0 @@ function getMax(n1: number | undefined, n2: number | undefined): number | undefined |
@@ -5,5 +5,4 @@ /** | ||
import * as Option from "effect/Option" | ||
import * as ReadonlyArray from "effect/ReadonlyArray" | ||
import * as AST from "./AST.js" | ||
import * as Arr from "effect/Array" | ||
import * as Effect from "effect/Effect" | ||
import type * as ParseResult from "./ParseResult.js" | ||
@@ -17,3 +16,13 @@ import * as TreeFormatter from "./TreeFormatter.js" | ||
export interface Issue { | ||
readonly _tag: ParseResult.ParseIssue["_tag"] | ParseResult.Missing["_tag"] | ParseResult.Unexpected["_tag"] | ||
readonly _tag: | ||
| "Transformation" | ||
| "Type" | ||
| "Declaration" | ||
| "Refinement" | ||
| "TupleType" | ||
| "TypeLiteral" | ||
| "Union" | ||
| "Forbidden" | ||
| "Missing" | ||
| "Unexpected" | ||
readonly path: ReadonlyArray<PropertyKey> | ||
@@ -23,20 +32,58 @@ readonly message: string | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export const formatIssue = (issue: ParseResult.ParseIssue): Effect.Effect<Array<Issue>> => go(issue) | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export const formatIssueSync = (issue: ParseResult.ParseIssue): Array<Issue> => Effect.runSync(formatIssue(issue)) | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export const formatError = (error: ParseResult.ParseError): Effect.Effect<Array<Issue>> => formatIssue(error.error) | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export const formatErrorSync = (error: ParseResult.ParseError): Array<Issue> => formatIssueSync(error.error) | ||
const succeed = (issue: Issue) => Effect.succeed([issue]) | ||
const getArray = ( | ||
issue: ParseResult.ParseIssue, | ||
path: ReadonlyArray<PropertyKey>, | ||
onFailure: () => Effect.Effect<Array<Issue>> | ||
) => | ||
Effect.matchEffect(TreeFormatter.getMessage(issue), { | ||
onFailure, | ||
onSuccess: (message) => succeed({ _tag: issue._tag, path, message }) | ||
}) | ||
const flatten = (eff: Effect.Effect<Array<Array<Issue>>>): Effect.Effect<Array<Issue>> => Effect.map(eff, Arr.flatten) | ||
const go = ( | ||
e: ParseResult.ParseIssue | ParseResult.Missing | ParseResult.Unexpected, | ||
path: ReadonlyArray<PropertyKey> = [] | ||
): Array<Issue> => { | ||
): Effect.Effect<Array<Issue>> => { | ||
const _tag = e._tag | ||
switch (_tag) { | ||
case "Type": | ||
return [{ _tag, path, message: TreeFormatter.formatTypeMessage(e) }] | ||
return Effect.map(TreeFormatter.formatTypeMessage(e), (message) => [{ _tag, path, message }]) | ||
case "Forbidden": | ||
return [{ _tag, path, message: TreeFormatter.formatForbiddenMessage(e) }] | ||
return succeed({ _tag, path, message: TreeFormatter.formatForbiddenMessage(e) }) | ||
case "Unexpected": | ||
return [{ _tag, path, message: `is unexpected, expected ${AST.format(e.ast, true)}` }] | ||
return succeed({ _tag, path, message: `is unexpected, expected ${e.ast.toString(true)}` }) | ||
case "Missing": | ||
return [{ _tag, path, message: "is missing" }] | ||
return succeed({ _tag, path, message: "is missing" }) | ||
case "Union": | ||
return Option.match(TreeFormatter.getMessage(e.ast, e.actual), { | ||
onNone: () => | ||
ReadonlyArray.flatMap(e.errors, (e) => { | ||
return getArray(e, path, () => | ||
flatten( | ||
Effect.forEach(e.errors, (e) => { | ||
switch (e._tag) { | ||
@@ -48,58 +95,21 @@ case "Member": | ||
} | ||
}), | ||
onSome: (message) => [{ _tag, path, message }] | ||
}) | ||
case "Tuple": | ||
return Option.match(TreeFormatter.getMessage(e.ast, e.actual), { | ||
onNone: () => | ||
ReadonlyArray.flatMap( | ||
e.errors, | ||
(index) => go(index.error, [...path, index.index]) | ||
), | ||
onSome: (message) => [{ _tag, path, message }] | ||
}) | ||
}) | ||
)) | ||
case "TupleType": | ||
return getArray( | ||
e, | ||
path, | ||
() => flatten(Effect.forEach(e.errors, (index) => go(index.error, path.concat(index.index)))) | ||
) | ||
case "TypeLiteral": | ||
return Option.match(TreeFormatter.getMessage(e.ast, e.actual), { | ||
onNone: () => | ||
ReadonlyArray.flatMap( | ||
e.errors, | ||
(key) => go(key.error, [...path, key.key]) | ||
), | ||
onSome: (message) => [{ _tag, path, message }] | ||
}) | ||
case "Transform": | ||
return Option.match(TreeFormatter.getTransformMessage(e, e.actual), { | ||
onNone: () => go(e.error, path), | ||
onSome: (message) => [{ _tag, path, message }] | ||
}) | ||
return getArray( | ||
e, | ||
path, | ||
() => flatten(Effect.forEach(e.errors, (key) => go(key.error, path.concat(key.key)))) | ||
) | ||
case "Declaration": | ||
case "Refinement": | ||
return Option.match(TreeFormatter.getRefinementMessage(e, e.actual), { | ||
onNone: () => go(e.error, path), | ||
onSome: (message) => [{ _tag, path, message }] | ||
}) | ||
case "Declaration": | ||
return Option.match(TreeFormatter.getMessage(e.ast, e.actual), { | ||
onNone: () => go(e.error, path), | ||
onSome: (message) => [{ _tag, path, message }] | ||
}) | ||
case "Transformation": | ||
return getArray(e, path, () => go(e.error, path)) | ||
} | ||
} | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export const formatIssues = (issues: ReadonlyArray.NonEmptyReadonlyArray<ParseResult.ParseIssue>): Array<Issue> => | ||
ReadonlyArray.flatMap(issues, (e) => go(e)) | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export const formatIssue = (error: ParseResult.ParseIssue): Array<Issue> => formatIssues([error]) | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export const formatError = (error: ParseResult.ParseError): Array<Issue> => formatIssue(error.error) |
@@ -5,11 +5,11 @@ /** | ||
import * as Arr from "effect/Array" | ||
import * as Equal from "effect/Equal" | ||
import * as Equivalence from "effect/Equivalence" | ||
import * as Option from "effect/Option" | ||
import * as Predicate from "effect/Predicate" | ||
import * as ReadonlyArray from "effect/ReadonlyArray" | ||
import * as AST from "./AST.js" | ||
import * as Internal from "./internal/ast.js" | ||
import * as hooks from "./internal/hooks.js" | ||
import * as InternalSchema from "./internal/schema.js" | ||
import * as Parser from "./Parser.js" | ||
import * as errors_ from "./internal/errors.js" | ||
import * as util_ from "./internal/util.js" | ||
import * as ParseResult from "./ParseResult.js" | ||
import type * as Schema from "./Schema.js" | ||
@@ -21,3 +21,3 @@ | ||
*/ | ||
export const EquivalenceHookId: unique symbol = hooks.EquivalenceHookId | ||
export const EquivalenceHookId: unique symbol = Symbol.for("@effect/schema/EquivalenceHookId") | ||
@@ -36,4 +36,3 @@ /** | ||
<A>(handler: (...args: ReadonlyArray<Equivalence.Equivalence<any>>) => Equivalence.Equivalence<A>) => | ||
<I, R>(self: Schema.Schema<A, I, R>): Schema.Schema<A, I, R> => | ||
InternalSchema.make(AST.setAnnotation(self.ast, EquivalenceHookId, handler)) | ||
<I, R>(self: Schema.Schema<A, I, R>): Schema.Schema<A, I, R> => self.annotations({ [EquivalenceHookId]: handler }) | ||
@@ -44,3 +43,3 @@ /** | ||
*/ | ||
export const make = <A, I, R>(schema: Schema.Schema<A, I, R>): Equivalence.Equivalence<A> => go(schema.ast) | ||
export const make = <A, I, R>(schema: Schema.Schema<A, I, R>): Equivalence.Equivalence<A> => go(schema.ast, []) | ||
@@ -53,3 +52,6 @@ const getHook = AST.getAnnotation< | ||
const go = (ast: AST.AST): Equivalence.Equivalence<any> => { | ||
const getEquivalenceErrorMessage = (message: string, path: ReadonlyArray<PropertyKey>) => | ||
errors_.getErrorMessageWithPath(`cannot build an Equivalence for ${message}`, path) | ||
const go = (ast: AST.AST, path: ReadonlyArray<PropertyKey>): Equivalence.Equivalence<any> => { | ||
const hook = getHook(ast) | ||
@@ -59,5 +61,5 @@ if (Option.isSome(hook)) { | ||
case "Declaration": | ||
return hook.value(...ast.typeParameters.map(go)) | ||
return hook.value(...ast.typeParameters.map((tp) => go(tp, path))) | ||
case "Refinement": | ||
return hook.value(go(ast.from)) | ||
return hook.value(go(ast.from, path)) | ||
default: | ||
@@ -69,5 +71,5 @@ return hook.value() | ||
case "NeverKeyword": | ||
throw new Error("cannot build an Equivalence for `never`") | ||
case "Transform": | ||
return go(ast.to) | ||
throw new Error(getEquivalenceErrorMessage("`never`", path)) | ||
case "Transformation": | ||
return go(ast.to, path) | ||
case "Declaration": | ||
@@ -88,12 +90,12 @@ case "Literal": | ||
case "ObjectKeyword": | ||
return Equivalence.strict() | ||
return Equal.equals | ||
case "Refinement": | ||
return go(ast.from) | ||
return go(ast.from, path) | ||
case "Suspend": { | ||
const get = Internal.memoizeThunk(() => go(ast.f())) | ||
const get = util_.memoizeThunk(() => go(ast.f(), path)) | ||
return (a, b) => get()(a, b) | ||
} | ||
case "Tuple": { | ||
const elements = ast.elements.map((element) => go(element.type)) | ||
const rest = Option.map(ast.rest, ReadonlyArray.map(go)) | ||
case "TupleType": { | ||
const elements = ast.elements.map((element, i) => go(element.type, path.concat(i))) | ||
const rest = ast.rest.map((ast) => go(ast, path)) | ||
return Equivalence.make((a, b) => { | ||
@@ -116,4 +118,4 @@ const len = a.length | ||
// --------------------------------------------- | ||
if (Option.isSome(rest)) { | ||
const [head, ...tail] = rest.value | ||
if (Arr.isNonEmptyReadonlyArray(rest)) { | ||
const [head, ...tail] = rest | ||
for (; i < len - tail.length; i++) { | ||
@@ -139,6 +141,6 @@ if (!head(a[i], b[i])) { | ||
if (ast.propertySignatures.length === 0 && ast.indexSignatures.length === 0) { | ||
return Equivalence.strict() | ||
return Equal.equals | ||
} | ||
const propertySignatures = ast.propertySignatures.map((ps) => go(ps.type)) | ||
const indexSignatures = ast.indexSignatures.map((is) => go(is.type)) | ||
const propertySignatures = ast.propertySignatures.map((ps) => go(ps.type, path.concat(ps.name))) | ||
const indexSignatures = ast.indexSignatures.map((is) => go(is.type, path)) | ||
return Equivalence.make((a, b) => { | ||
@@ -198,4 +200,4 @@ const aStringKeys = Object.keys(a) | ||
case "Union": { | ||
const searchTree = Parser.getSearchTree(ast.types, true) | ||
const ownKeys = Internal.ownKeys(searchTree.keys) | ||
const searchTree = ParseResult.getSearchTree(ast.types, true) | ||
const ownKeys = util_.ownKeys(searchTree.keys) | ||
const len = ownKeys.length | ||
@@ -219,3 +221,3 @@ return Equivalence.make((a, b) => { | ||
} | ||
const tuples = candidates.map((ast) => [go(ast), Parser.is(InternalSchema.make(ast))] as const) | ||
const tuples = candidates.map((ast) => [go(ast, path), ParseResult.is({ ast } as any)] as const) | ||
for (let i = 0; i < tuples.length; i++) { | ||
@@ -222,0 +224,0 @@ const [equivalence, is] = tuples[i] |
@@ -24,3 +24,3 @@ /** | ||
*/ | ||
export * as Format from "./Format.js" | ||
export * as FastCheck from "./FastCheck.js" | ||
@@ -40,7 +40,2 @@ /** | ||
*/ | ||
export * as Parser from "./Parser.js" | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
export * as Pretty from "./Pretty.js" | ||
@@ -47,0 +42,0 @@ |
@@ -1,4 +0,1 @@ | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
import type * as Schema from "../Schema.js" | ||
@@ -37,25 +34,25 @@ | ||
/** @internal */ | ||
export const GreaterThanBigintTypeId: Schema.GreaterThanBigintTypeId = Symbol.for( | ||
export const GreaterThanBigintTypeId: Schema.GreaterThanBigIntTypeId = Symbol.for( | ||
"@effect/schema/TypeId/GreaterThanBigint" | ||
) as Schema.GreaterThanBigintTypeId | ||
) as Schema.GreaterThanBigIntTypeId | ||
/** @internal */ | ||
export const GreaterThanOrEqualToBigintTypeId: Schema.GreaterThanOrEqualToBigintTypeId = Symbol.for( | ||
export const GreaterThanOrEqualToBigIntTypeId: Schema.GreaterThanOrEqualToBigIntTypeId = Symbol.for( | ||
"@effect/schema/TypeId/GreaterThanOrEqualToBigint" | ||
) as Schema.GreaterThanOrEqualToBigintTypeId | ||
) as Schema.GreaterThanOrEqualToBigIntTypeId | ||
/** @internal */ | ||
export const LessThanBigintTypeId: Schema.LessThanBigintTypeId = Symbol.for( | ||
export const LessThanBigIntTypeId: Schema.LessThanBigIntTypeId = Symbol.for( | ||
"@effect/schema/TypeId/LessThanBigint" | ||
) as Schema.LessThanBigintTypeId | ||
) as Schema.LessThanBigIntTypeId | ||
/** @internal */ | ||
export const LessThanOrEqualToBigintTypeId: Schema.LessThanOrEqualToBigintTypeId = Symbol.for( | ||
export const LessThanOrEqualToBigIntTypeId: Schema.LessThanOrEqualToBigIntTypeId = Symbol.for( | ||
"@effect/schema/TypeId/LessThanOrEqualToBigint" | ||
) as Schema.LessThanOrEqualToBigintTypeId | ||
) as Schema.LessThanOrEqualToBigIntTypeId | ||
/** @internal */ | ||
export const BetweenBigintTypeId: Schema.BetweenBigintTypeId = Symbol.for( | ||
export const BetweenBigintTypeId: Schema.BetweenBigIntTypeId = Symbol.for( | ||
"@effect/schema/TypeId/BetweenBigint" | ||
) as Schema.BetweenBigintTypeId | ||
) as Schema.BetweenBigIntTypeId | ||
@@ -62,0 +59,0 @@ /** @internal */ |
@@ -7,5 +7,5 @@ /** | ||
import * as Predicate from "effect/Predicate" | ||
import * as ReadonlyArray from "effect/ReadonlyArray" | ||
import * as ReadonlyRecord from "effect/ReadonlyRecord" | ||
import * as Record from "effect/Record" | ||
import * as AST from "./AST.js" | ||
import * as errors_ from "./internal/errors.js" | ||
import type * as Schema from "./Schema.js" | ||
@@ -17,3 +17,14 @@ | ||
*/ | ||
export interface JsonSchema7Any { | ||
export interface JsonSchemaAnnotations { | ||
title?: string | ||
description?: string | ||
default?: unknown | ||
examples?: Array<unknown> | ||
} | ||
/** | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export interface JsonSchema7Any extends JsonSchemaAnnotations { | ||
$id: "/schemas/any" | ||
@@ -26,3 +37,3 @@ } | ||
*/ | ||
export interface JsonSchema7Unknown { | ||
export interface JsonSchema7Unknown extends JsonSchemaAnnotations { | ||
$id: "/schemas/unknown" | ||
@@ -35,3 +46,3 @@ } | ||
*/ | ||
export interface JsonSchema7object { | ||
export interface JsonSchema7object extends JsonSchemaAnnotations { | ||
$id: "/schemas/object" | ||
@@ -48,3 +59,3 @@ oneOf: [ | ||
*/ | ||
export interface JsonSchema7empty { | ||
export interface JsonSchema7empty extends JsonSchemaAnnotations { | ||
$id: "/schemas/{}" | ||
@@ -61,3 +72,3 @@ oneOf: [ | ||
*/ | ||
export interface JsonSchema7Ref { | ||
export interface JsonSchema7Ref extends JsonSchemaAnnotations { | ||
$ref: string | ||
@@ -70,3 +81,3 @@ } | ||
*/ | ||
export interface JsonSchema7Const { | ||
export interface JsonSchema7Const extends JsonSchemaAnnotations { | ||
const: AST.LiteralValue | ||
@@ -79,3 +90,3 @@ } | ||
*/ | ||
export interface JsonSchema7String { | ||
export interface JsonSchema7String extends JsonSchemaAnnotations { | ||
type: "string" | ||
@@ -85,3 +96,2 @@ minLength?: number | ||
pattern?: string | ||
description?: string | ||
} | ||
@@ -93,3 +103,3 @@ | ||
*/ | ||
export interface JsonSchema7Numeric { | ||
export interface JsonSchema7Numeric extends JsonSchemaAnnotations { | ||
minimum?: number | ||
@@ -121,3 +131,3 @@ exclusiveMinimum?: number | ||
*/ | ||
export interface JsonSchema7Boolean { | ||
export interface JsonSchema7Boolean extends JsonSchemaAnnotations { | ||
type: "boolean" | ||
@@ -130,3 +140,3 @@ } | ||
*/ | ||
export interface JsonSchema7Array { | ||
export interface JsonSchema7Array extends JsonSchemaAnnotations { | ||
type: "array" | ||
@@ -143,3 +153,3 @@ items?: JsonSchema7 | Array<JsonSchema7> | ||
*/ | ||
export interface JsonSchema7OneOf { | ||
export interface JsonSchema7OneOf extends JsonSchemaAnnotations { | ||
oneOf: Array<JsonSchema7> | ||
@@ -152,3 +162,3 @@ } | ||
*/ | ||
export interface JsonSchema7Enum { | ||
export interface JsonSchema7Enum extends JsonSchemaAnnotations { | ||
enum: Array<AST.LiteralValue> | ||
@@ -161,3 +171,3 @@ } | ||
*/ | ||
export interface JsonSchema7Enums { | ||
export interface JsonSchema7Enums extends JsonSchemaAnnotations { | ||
$comment: "/schemas/enums" | ||
@@ -174,3 +184,3 @@ oneOf: Array<{ | ||
*/ | ||
export interface JsonSchema7AnyOf { | ||
export interface JsonSchema7AnyOf extends JsonSchemaAnnotations { | ||
anyOf: Array<JsonSchema7> | ||
@@ -183,3 +193,3 @@ } | ||
*/ | ||
export interface JsonSchema7Object { | ||
export interface JsonSchema7Object extends JsonSchemaAnnotations { | ||
type: "object" | ||
@@ -227,3 +237,20 @@ required: Array<string> | ||
*/ | ||
export const make = <A, I, R>(schema: Schema.Schema<A, I, R>): JsonSchema7Root => goRoot(schema.ast) | ||
export const make = <A, I, R>(schema: Schema.Schema<A, I, R>): JsonSchema7Root => { | ||
const $defs: Record<string, any> = {} | ||
const jsonSchema = go(schema.ast, $defs, true, []) | ||
const out: JsonSchema7Root = { | ||
$schema, | ||
...jsonSchema | ||
} | ||
// clean up self-referencing entries | ||
for (const id in $defs) { | ||
if ($defs[id]["$ref"] === get$ref(id)) { | ||
delete $defs[id] | ||
} | ||
} | ||
if (!Record.isEmptyRecord($defs)) { | ||
out.$defs = $defs | ||
} | ||
return out | ||
} | ||
@@ -242,3 +269,3 @@ const anyJsonSchema: JsonSchema7 = { $id: "/schemas/any" } | ||
const emptyJsonSchema: JsonSchema7 = { | ||
const empty = (): JsonSchema7 => ({ | ||
"$id": "/schemas/{}", | ||
@@ -249,37 +276,8 @@ "oneOf": [ | ||
] | ||
} | ||
}) | ||
const $schema = "http://json-schema.org/draft-07/schema#" | ||
/** @internal */ | ||
export const goRoot = (ast: AST.AST): JsonSchema7Root => { | ||
const $defs = {} | ||
const jsonSchema = goWithIdentifier(ast, $defs) | ||
const out: JsonSchema7Root = { | ||
$schema, | ||
...jsonSchema | ||
} | ||
if (!ReadonlyRecord.isEmptyRecord($defs)) { | ||
out.$defs = $defs | ||
} | ||
return out | ||
} | ||
const goWithIdentifier = (ast: AST.AST, $defs: Record<string, JsonSchema7>): JsonSchema7 => { | ||
const identifier = AST.getIdentifierAnnotation(ast) | ||
return Option.match(identifier, { | ||
onNone: () => goWithMetaData(ast, $defs), | ||
onSome: (id) => { | ||
const out = { $ref: `${DEFINITION_PREFIX}${id}` } | ||
if (!ReadonlyRecord.has($defs, id)) { | ||
$defs[id] = out | ||
$defs[id] = goWithMetaData(ast, $defs) | ||
} | ||
return out | ||
} | ||
}) | ||
} | ||
const getMetaData = (annotated: AST.Annotated) => | ||
ReadonlyRecord.getSomes({ | ||
const getJsonSchemaAnnotations = (annotated: AST.Annotated): JsonSchemaAnnotations => | ||
Record.getSomes({ | ||
description: AST.getDescriptionAnnotation(annotated), | ||
@@ -291,68 +289,119 @@ title: AST.getTitleAnnotation(annotated), | ||
const goWithMetaData = (ast: AST.AST, $defs: Record<string, JsonSchema7>): JsonSchema7 => { | ||
return { | ||
...go(ast, $defs), | ||
...getMetaData(ast) | ||
const pruneUndefinedKeyword = (ps: AST.PropertySignature): AST.AST => { | ||
const type = ps.type | ||
if (ps.isOptional && AST.isUnion(type) && Option.isNone(AST.getJSONSchemaAnnotation(type))) { | ||
return AST.Union.make(type.types.filter((type) => !AST.isUndefinedKeyword(type)), type.annotations) | ||
} | ||
return type | ||
} | ||
const getMissingAnnotationErrorMessage = (name: string, path: ReadonlyArray<PropertyKey>): string => | ||
errors_.getErrorMessageWithPath(`cannot build a JSON Schema for ${name} without a JSON Schema annotation`, path) | ||
const getUnsupportedIndexSignatureParameterErrorMessage = ( | ||
parameter: AST.AST, | ||
path: ReadonlyArray<PropertyKey> | ||
): string => errors_.getErrorMessageWithPath(`unsupported index signature parameter (${parameter})`, path) | ||
/** @internal */ | ||
export const DEFINITION_PREFIX = "#/$defs/" | ||
const go = (ast: AST.AST, $defs: Record<string, JsonSchema7>): JsonSchema7 => { | ||
const get$ref = (id: string): string => `${DEFINITION_PREFIX}${id}` | ||
const hasTransformation = (ast: AST.Refinement): boolean => { | ||
switch (ast.from._tag) { | ||
case "Transformation": | ||
return true | ||
case "Refinement": | ||
return hasTransformation(ast.from) | ||
case "Suspend": | ||
{ | ||
const from = ast.from.f() | ||
if (AST.isRefinement(from)) { | ||
return hasTransformation(from) | ||
} | ||
} | ||
break | ||
} | ||
return false | ||
} | ||
const go = ( | ||
ast: AST.AST, | ||
$defs: Record<string, JsonSchema7>, | ||
handleIdentifier: boolean, | ||
path: ReadonlyArray<PropertyKey> | ||
): JsonSchema7 => { | ||
const hook = AST.getJSONSchemaAnnotation(ast) | ||
if (Option.isSome(hook)) { | ||
switch (ast._tag) { | ||
case "Refinement": | ||
return { ...goWithIdentifier(ast.from, $defs), ...hook.value } | ||
const handler = hook.value as JsonSchema7 | ||
if (AST.isRefinement(ast) && !hasTransformation(ast)) { | ||
try { | ||
return { ...go(ast.from, $defs, true, path), ...getJsonSchemaAnnotations(ast), ...handler } | ||
} catch (e) { | ||
return { ...getJsonSchemaAnnotations(ast), ...handler } | ||
} | ||
} | ||
return hook.value as any | ||
return handler | ||
} | ||
const surrogate = AST.getSurrogateAnnotation(ast) | ||
if (Option.isSome(surrogate)) { | ||
return go(surrogate.value, $defs, handleIdentifier, path) | ||
} | ||
if (handleIdentifier && !AST.isTransformation(ast)) { | ||
const identifier = AST.getJSONIdentifier(ast) | ||
if (Option.isSome(identifier)) { | ||
const id = identifier.value | ||
const out = { $ref: get$ref(id) } | ||
if (!Record.has($defs, id)) { | ||
$defs[id] = out | ||
$defs[id] = go(ast, $defs, false, path) | ||
} | ||
return out | ||
} | ||
} | ||
switch (ast._tag) { | ||
case "Declaration": { | ||
throw new Error("cannot build a JSON Schema for a declaration without a JSON Schema annotation") | ||
} | ||
case "Declaration": | ||
throw new Error(getMissingAnnotationErrorMessage("a declaration", path)) | ||
case "Literal": { | ||
const literal = ast.literal | ||
if (literal === null) { | ||
return { const: null } | ||
return { const: null, ...getJsonSchemaAnnotations(ast) } | ||
} else if (Predicate.isString(literal)) { | ||
return { const: literal } | ||
return { const: literal, ...getJsonSchemaAnnotations(ast) } | ||
} else if (Predicate.isNumber(literal)) { | ||
return { const: literal } | ||
return { const: literal, ...getJsonSchemaAnnotations(ast) } | ||
} else if (Predicate.isBoolean(literal)) { | ||
return { const: literal } | ||
return { const: literal, ...getJsonSchemaAnnotations(ast) } | ||
} | ||
throw new Error("cannot build a JSON Schema for a bigint literal without a JSON Schema annotation") | ||
throw new Error(getMissingAnnotationErrorMessage("a bigint literal", path)) | ||
} | ||
case "UniqueSymbol": | ||
throw new Error("cannot build a JSON Schema for a unique symbol without a JSON Schema annotation") | ||
throw new Error(getMissingAnnotationErrorMessage("a unique symbol", path)) | ||
case "UndefinedKeyword": | ||
throw new Error("cannot build a JSON Schema for `undefined` without a JSON Schema annotation") | ||
throw new Error(getMissingAnnotationErrorMessage("`undefined`", path)) | ||
case "VoidKeyword": | ||
throw new Error("cannot build a JSON Schema for `void` without a JSON Schema annotation") | ||
throw new Error(getMissingAnnotationErrorMessage("`void`", path)) | ||
case "NeverKeyword": | ||
throw new Error("cannot build a JSON Schema for `never` without a JSON Schema annotation") | ||
throw new Error(getMissingAnnotationErrorMessage("`never`", path)) | ||
case "UnknownKeyword": | ||
return { ...unknownJsonSchema } | ||
return { ...unknownJsonSchema, ...getJsonSchemaAnnotations(ast) } | ||
case "AnyKeyword": | ||
return { ...anyJsonSchema } | ||
return { ...anyJsonSchema, ...getJsonSchemaAnnotations(ast) } | ||
case "ObjectKeyword": | ||
return { ...objectJsonSchema } | ||
return { ...objectJsonSchema, ...getJsonSchemaAnnotations(ast) } | ||
case "StringKeyword": | ||
return { type: "string" } | ||
return { type: "string", ...getJsonSchemaAnnotations(ast) } | ||
case "NumberKeyword": | ||
return { type: "number" } | ||
return { type: "number", ...getJsonSchemaAnnotations(ast) } | ||
case "BooleanKeyword": | ||
return { type: "boolean" } | ||
return { type: "boolean", ...getJsonSchemaAnnotations(ast) } | ||
case "BigIntKeyword": | ||
throw new Error("cannot build a JSON Schema for `bigint` without a JSON Schema annotation") | ||
throw new Error(getMissingAnnotationErrorMessage("`bigint`", path)) | ||
case "SymbolKeyword": | ||
throw new Error("cannot build a JSON Schema for `symbol` without a JSON Schema annotation") | ||
case "Tuple": { | ||
const elements = ast.elements.map((e) => goWithIdentifier(e.type, $defs)) | ||
const rest = Option.map( | ||
ast.rest, | ||
ReadonlyArray.map((ast) => goWithIdentifier(ast, $defs)) | ||
) | ||
throw new Error(getMissingAnnotationErrorMessage("`symbol`", path)) | ||
case "TupleType": { | ||
const len = ast.elements.length | ||
const elements = ast.elements.map((e, i) => go(e.type, $defs, true, path.concat(i))) | ||
const rest = ast.rest.map((ast) => go(ast, $defs, true, path)) | ||
const output: JsonSchema7Array = { type: "array" } | ||
@@ -362,3 +411,2 @@ // --------------------------------------------- | ||
// --------------------------------------------- | ||
const len = elements.length | ||
if (len > 0) { | ||
@@ -371,4 +419,4 @@ output.minItems = len - ast.elements.filter((element) => element.isOptional).length | ||
// --------------------------------------------- | ||
if (Option.isSome(rest)) { | ||
const head = rest.value[0] | ||
if (rest.length > 0) { | ||
const head = rest[0] | ||
if (len > 0) { | ||
@@ -383,5 +431,8 @@ output.additionalItems = head | ||
// --------------------------------------------- | ||
if (rest.value.length > 1) { | ||
if (rest.length > 1) { | ||
throw new Error( | ||
"Generating a JSON Schema for post-rest elements is not currently supported. You're welcome to contribute by submitting a Pull Request." | ||
errors_.getErrorMessageWithPath( | ||
"Generating a JSON Schema for post-rest elements is not currently supported. You're welcome to contribute by submitting a Pull Request.", | ||
path | ||
) | ||
) | ||
@@ -397,7 +448,7 @@ } | ||
return output | ||
return { ...output, ...getJsonSchemaAnnotations(ast) } | ||
} | ||
case "TypeLiteral": { | ||
if (ast.propertySignatures.length === 0 && ast.indexSignatures.length === 0) { | ||
return { ...emptyJsonSchema } | ||
return { ...empty(), ...getJsonSchemaAnnotations(ast) } | ||
} | ||
@@ -410,3 +461,3 @@ let additionalProperties: JsonSchema7 | undefined = undefined | ||
case "StringKeyword": { | ||
additionalProperties = goWithIdentifier(is.type, $defs) | ||
additionalProperties = go(is.type, $defs, true, path) | ||
break | ||
@@ -416,6 +467,3 @@ } | ||
patternProperties = { | ||
[AST.getTemplateLiteralRegex(parameter).source]: goWithIdentifier( | ||
is.type, | ||
$defs | ||
) | ||
[AST.getTemplateLiteralRegExp(parameter).source]: go(is.type, $defs, true, path) | ||
} | ||
@@ -431,17 +479,17 @@ break | ||
patternProperties = { | ||
[hook.value.pattern]: goWithIdentifier( | ||
is.type, | ||
$defs | ||
) | ||
[hook.value.pattern]: go(is.type, $defs, true, path) | ||
} | ||
break | ||
} | ||
throw new Error(`Unsupported index signature parameter (${AST.format(parameter)})`) | ||
throw new Error(getUnsupportedIndexSignatureParameterErrorMessage(parameter, path)) | ||
} | ||
case "SymbolKeyword": | ||
throw new Error(`Unsupported index signature parameter (${AST.format(parameter)})`) | ||
throw new Error(getUnsupportedIndexSignatureParameterErrorMessage(parameter, path)) | ||
} | ||
} | ||
const propertySignatures = ast.propertySignatures.map((ps) => { | ||
return { ...goWithIdentifier(ps.type, $defs), ...getMetaData(ps) } | ||
return { | ||
...go(pruneUndefinedKeyword(ps), $defs, true, path.concat(ps.name)), | ||
...getJsonSchemaAnnotations(ps) | ||
} | ||
}) | ||
@@ -459,3 +507,3 @@ const output: JsonSchema7Object = { | ||
const name = ast.propertySignatures[i].name | ||
if (typeof name === "string") { | ||
if (Predicate.isString(name)) { | ||
output.properties[name] = propertySignatures[i] | ||
@@ -469,3 +517,3 @@ // --------------------------------------------- | ||
} else { | ||
throw new Error(`Cannot encode ${String(name)} key to JSON Schema`) | ||
throw new Error(errors_.getErrorMessageWithPath(`cannot encode ${String(name)} key to JSON Schema`, path)) | ||
} | ||
@@ -483,3 +531,3 @@ } | ||
return output | ||
return { ...output, ...getJsonSchemaAnnotations(ast) } | ||
} | ||
@@ -490,3 +538,3 @@ case "Union": { | ||
for (const type of ast.types) { | ||
const schema = goWithIdentifier(type, $defs) | ||
const schema = go(type, $defs, true, path) | ||
if ("const" in schema) { | ||
@@ -504,5 +552,5 @@ if (Object.keys(schema).length > 1) { | ||
if (enums.length === 1) { | ||
return { const: enums[0] } | ||
return { const: enums[0], ...getJsonSchemaAnnotations(ast) } | ||
} else { | ||
return { enum: enums } | ||
return { enum: enums, ...getJsonSchemaAnnotations(ast) } | ||
} | ||
@@ -515,3 +563,3 @@ } else { | ||
} | ||
return { anyOf } | ||
return { anyOf, ...getJsonSchemaAnnotations(ast) } | ||
} | ||
@@ -522,28 +570,38 @@ } | ||
$comment: "/schemas/enums", | ||
oneOf: ast.enums.map((e) => ({ title: e[0], const: e[1] })) | ||
oneOf: ast.enums.map((e) => ({ title: e[0], const: e[1] })), | ||
...getJsonSchemaAnnotations(ast) | ||
} | ||
} | ||
case "Refinement": { | ||
throw new Error("cannot build a JSON Schema for a refinement without a JSON Schema annotation") | ||
throw new Error( | ||
errors_.getErrorMessageWithPath( | ||
"cannot build a JSON Schema for a refinement without a JSON Schema annotation", | ||
path | ||
) | ||
) | ||
} | ||
case "TemplateLiteral": { | ||
const regex = AST.getTemplateLiteralRegex(ast) | ||
const regex = AST.getTemplateLiteralRegExp(ast) | ||
return { | ||
type: "string", | ||
description: "a template literal", | ||
pattern: regex.source | ||
pattern: regex.source, | ||
...getJsonSchemaAnnotations(ast) | ||
} | ||
} | ||
case "Suspend": { | ||
const identifier = Option.orElse(AST.getIdentifierAnnotation(ast), () => AST.getIdentifierAnnotation(ast.f())) | ||
const identifier = Option.orElse(AST.getJSONIdentifier(ast), () => AST.getJSONIdentifier(ast.f())) | ||
if (Option.isNone(identifier)) { | ||
throw new Error( | ||
"Generating a JSON Schema for suspended schemas requires an identifier annotation" | ||
errors_.getErrorMessageWithPath( | ||
"Generating a JSON Schema for suspended schemas requires an identifier annotation", | ||
path | ||
) | ||
) | ||
} | ||
return goWithIdentifier(ast.f(), $defs) | ||
return go(ast.f(), $defs, true, path) | ||
} | ||
case "Transform": | ||
return goWithIdentifier(ast.to, $defs) | ||
case "Transformation": | ||
return go(ast.from, $defs, true, path) | ||
} | ||
} |
@@ -5,76 +5,19 @@ /** | ||
import * as Arr from "effect/Array" | ||
import { TaggedError } from "effect/Data" | ||
import * as Effect from "effect/Effect" | ||
import * as Either from "effect/Either" | ||
import type { LazyArg } from "effect/Function" | ||
import { dual } from "effect/Function" | ||
import type { LazyArg } from "effect/Function" | ||
import { globalValue } from "effect/GlobalValue" | ||
import * as Inspectable from "effect/Inspectable" | ||
import type * as Option from "effect/Option" | ||
import type * as ReadonlyArray from "effect/ReadonlyArray" | ||
import type * as AST from "./AST.js" | ||
import * as InternalParser from "./internal/parser.js" | ||
import * as Option from "effect/Option" | ||
import * as Predicate from "effect/Predicate" | ||
import type { Concurrency, Mutable } from "effect/Types" | ||
import * as AST from "./AST.js" | ||
import * as util_ from "./internal/util.js" | ||
import type * as Schema from "./Schema.js" | ||
import * as TreeFormatter from "./TreeFormatter.js" | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
export class ParseError extends TaggedError("ParseError")<{ readonly error: ParseIssue }> { | ||
get message() { | ||
return this.toString() | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString() { | ||
return TreeFormatter.formatIssue(this.error) | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_id: "ParseError", | ||
message: this.toString() | ||
} | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
[Inspectable.NodeInspectSymbol]() { | ||
return this.toJSON() | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const parseError = (issue: ParseIssue): ParseError => new ParseError({ error: issue }) | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const succeed: <A>(a: A) => Either.Either<ParseIssue, A> = Either.right | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const fail: (issue: ParseIssue) => Either.Either<ParseIssue, never> = Either.left | ||
const _try: <A>(options: { | ||
try: LazyArg<A> | ||
catch: (e: unknown) => ParseIssue | ||
}) => Either.Either<ParseIssue, A> = Either.try | ||
export { | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
_try as try | ||
} | ||
/** | ||
* `ParseIssue` is a type that represents the different types of errors that can occur when decoding/encoding a value. | ||
@@ -88,6 +31,6 @@ * | ||
| Refinement | ||
| Tuple | ||
| TupleType | ||
| TypeLiteral | ||
| Union | ||
| Transform | ||
| Transformation | ||
| Type | ||
@@ -97,3 +40,3 @@ | Forbidden | ||
/** | ||
* Error that occurs when a transformation has an error. | ||
* Error that occurs when a declaration has an error. | ||
* | ||
@@ -103,19 +46,31 @@ * @category model | ||
*/ | ||
export interface Transform { | ||
readonly _tag: "Transform" | ||
readonly ast: AST.Transform | ||
readonly actual: unknown | ||
readonly kind: "From" | "Transformation" | "To" | ||
readonly error: ParseIssue | ||
export class Declaration { | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Declaration" | ||
constructor(readonly ast: AST.Declaration, readonly actual: unknown, readonly error: ParseIssue) {} | ||
} | ||
/** | ||
* @category constructors | ||
* Error that occurs when a refinement has an error. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const transform = InternalParser.transform | ||
export class Refinement { | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Refinement" | ||
constructor( | ||
readonly ast: AST.Refinement<AST.AST>, | ||
readonly actual: unknown, | ||
readonly kind: "From" | "Predicate", | ||
readonly error: ParseIssue | ||
) {} | ||
} | ||
/** | ||
* The `Type` variant of the `ParseIssue` type represents an error that occurs when the `actual` value is not of the expected type. | ||
* The `ast` field specifies the expected type, and the `actual` field contains the value that caused the error. | ||
* Error that occurs when an array or tuple has an error. | ||
* | ||
@@ -125,17 +80,31 @@ * @category model | ||
*/ | ||
export interface Type { | ||
readonly _tag: "Type" | ||
readonly ast: AST.AST | ||
readonly actual: unknown | ||
readonly message: Option.Option<string> | ||
export class TupleType { | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "TupleType" | ||
constructor( | ||
readonly ast: AST.TupleType, | ||
readonly actual: unknown, | ||
readonly errors: Arr.NonEmptyReadonlyArray<Index>, | ||
readonly output: ReadonlyArray<unknown> = [] | ||
) {} | ||
} | ||
/** | ||
* @category constructors | ||
* The `Index` error indicates that there was an error at a specific index in an array or tuple. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const type = InternalParser.type | ||
export class Index { | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Index" | ||
constructor(readonly index: number, readonly error: ParseIssue | Missing | Unexpected) {} | ||
} | ||
/** | ||
* The `Forbidden` variant of the `ParseIssue` type represents an error that occurs when an Effect is encounter but disallowed from execution. | ||
* Error that occurs when a type literal or record has an error. | ||
* | ||
@@ -145,17 +114,31 @@ * @category model | ||
*/ | ||
export interface Forbidden { | ||
readonly _tag: "Forbidden" | ||
readonly ast: AST.AST | ||
readonly actual: unknown | ||
readonly message: Option.Option<string> | ||
export class TypeLiteral { | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "TypeLiteral" | ||
constructor( | ||
readonly ast: AST.TypeLiteral, | ||
readonly actual: unknown, | ||
readonly errors: Arr.NonEmptyReadonlyArray<Key>, | ||
readonly output: { readonly [x: string]: unknown } = {} | ||
) {} | ||
} | ||
/** | ||
* @category constructors | ||
* The `Key` variant of the `ParseIssue` type represents an error that occurs when a key in a type literal or record is invalid. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export const forbidden = InternalParser.forbidden | ||
export class Key { | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Key" | ||
constructor(readonly key: PropertyKey, readonly error: ParseIssue | Missing | Unexpected) {} | ||
} | ||
/** | ||
* Error that occurs when a declaration has an error. | ||
* Error that occurs when an unexpected key or index is present. | ||
* | ||
@@ -165,17 +148,12 @@ * @category model | ||
*/ | ||
export interface Declaration { | ||
readonly _tag: "Declaration" | ||
readonly ast: AST.Declaration | ||
readonly actual: unknown | ||
readonly error: ParseIssue | ||
export class Unexpected { | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Unexpected" | ||
constructor(readonly ast: AST.AST) {} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const declaration = InternalParser.declaration | ||
/** | ||
* Error that occurs when a refinement has an error. | ||
* Error that occurs when a transformation has an error. | ||
* | ||
@@ -185,18 +163,18 @@ * @category model | ||
*/ | ||
export interface Refinement { | ||
readonly _tag: "Refinement" | ||
readonly ast: AST.Refinement | ||
readonly actual: unknown | ||
readonly kind: "From" | "Predicate" | ||
readonly error: ParseIssue | ||
export class Transformation { | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Transformation" | ||
constructor( | ||
readonly ast: AST.Transformation, | ||
readonly actual: unknown, | ||
readonly kind: "Encoded" | "Transformation" | "Type", | ||
readonly error: ParseIssue | ||
) {} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const refinement = InternalParser.refinement | ||
/** | ||
* Error that occurs when an array or tuple has an error. | ||
* The `Type` variant of the `ParseIssue` type represents an error that occurs when the `actual` value is not of the expected type. | ||
* The `ast` field specifies the expected type, and the `actual` field contains the value that caused the error. | ||
* | ||
@@ -206,21 +184,18 @@ * @category model | ||
*/ | ||
export interface Tuple { | ||
readonly _tag: "Tuple" | ||
readonly ast: AST.Tuple | ||
readonly actual: unknown | ||
readonly errors: ReadonlyArray.NonEmptyReadonlyArray<Index> | ||
export class Type { | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Type" | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly message: Option.Option<string> | ||
constructor(readonly ast: AST.AST, readonly actual: unknown, message?: string) { | ||
this.message = Option.fromNullable(message) | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const tuple = ( | ||
ast: AST.Tuple, | ||
actual: unknown, | ||
errors: ReadonlyArray.NonEmptyReadonlyArray<Index> | ||
): Tuple => ({ _tag: "Tuple", ast, actual, errors }) | ||
/** | ||
* Error that occurs when a type literal or record has an error. | ||
* The `Forbidden` variant of the `ParseIssue` type represents a forbidden operation, such as when encountering an Effect that is not allowed to execute (e.g., using `runSync`). | ||
* | ||
@@ -230,17 +205,18 @@ * @category model | ||
*/ | ||
export interface TypeLiteral { | ||
readonly _tag: "TypeLiteral" | ||
readonly ast: AST.TypeLiteral | ||
readonly actual: unknown | ||
readonly errors: ReadonlyArray.NonEmptyReadonlyArray<Key> | ||
export class Forbidden { | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Forbidden" | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly message: Option.Option<string> | ||
constructor(readonly ast: AST.AST, readonly actual: unknown, message?: string) { | ||
this.message = Option.fromNullable(message) | ||
} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const typeLiteral = InternalParser.typeLiteral | ||
/** | ||
* The `Index` error indicates that there was an error at a specific index in an array or tuple. | ||
* Error that occurs when a required key or index is missing. | ||
* | ||
@@ -250,6 +226,7 @@ * @category model | ||
*/ | ||
export interface Index { | ||
readonly _tag: "Index" | ||
readonly index: number | ||
readonly error: ParseIssue | Missing | Unexpected | ||
export class Missing { | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Missing" | ||
} | ||
@@ -261,6 +238,6 @@ | ||
*/ | ||
export const index = InternalParser.index | ||
export const missing: Missing = new Missing() | ||
/** | ||
* The `Key` variant of the `ParseIssue` type represents an error that occurs when a key in a type literal or record is invalid. | ||
* Error that occurs when a member in a union has an error. | ||
* | ||
@@ -270,16 +247,12 @@ * @category model | ||
*/ | ||
export interface Key { | ||
readonly _tag: "Key" | ||
readonly key: PropertyKey | ||
readonly error: ParseIssue | Missing | Unexpected | ||
export class Member { | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Member" | ||
constructor(readonly ast: AST.AST, readonly error: ParseIssue) {} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const key = InternalParser.key | ||
/** | ||
* Error that occurs when a required key or index is missing. | ||
* Error that occurs when a union has an error. | ||
* | ||
@@ -289,21 +262,42 @@ * @category model | ||
*/ | ||
export interface Missing { | ||
readonly _tag: "Missing" | ||
export class Union { | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
readonly _tag = "Union" | ||
constructor( | ||
readonly ast: AST.Union, | ||
readonly actual: unknown, | ||
readonly errors: Arr.NonEmptyReadonlyArray<Type | TypeLiteral | Member> | ||
) {} | ||
} | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export const missing: Missing = InternalParser.missing | ||
/** | ||
* Error that occurs when an unexpected key or index is present. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export interface Unexpected { | ||
readonly _tag: "Unexpected" | ||
readonly ast: AST.AST | ||
export class ParseError extends TaggedError("ParseError")<{ readonly error: ParseIssue }> { | ||
get message() { | ||
return this.toString() | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toString() { | ||
return TreeFormatter.formatIssueSync(this.error) | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
toJSON() { | ||
return { | ||
_id: "ParseError", | ||
message: this.toString() | ||
} | ||
} | ||
/** | ||
* @since 1.0.0 | ||
*/ | ||
[Inspectable.NodeInspectSymbol]() { | ||
return this.toJSON() | ||
} | ||
} | ||
@@ -315,16 +309,9 @@ | ||
*/ | ||
export const unexpected = InternalParser.unexpected | ||
export const parseError = (issue: ParseIssue): ParseError => new ParseError({ error: issue }) | ||
/** | ||
* Error that occurs when a union has an error. | ||
* | ||
* @category model | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
export interface Union { | ||
readonly _tag: "Union" | ||
readonly ast: AST.Union | ||
readonly actual: unknown | ||
readonly errors: ReadonlyArray.NonEmptyReadonlyArray<Type | TypeLiteral | Member> | ||
} | ||
export const succeed: <A>(a: A) => Either.Either<A, ParseIssue> = Either.right | ||
@@ -335,14 +322,15 @@ /** | ||
*/ | ||
export const union = InternalParser.union | ||
export const fail: (issue: ParseIssue) => Either.Either<never, ParseIssue> = Either.left | ||
/** | ||
* Error that occurs when a member in a union has an error. | ||
* | ||
* @category model | ||
* @since 1.0.0 | ||
*/ | ||
export interface Member { | ||
readonly _tag: "Member" | ||
readonly ast: AST.AST | ||
readonly error: ParseIssue | ||
const _try: <A>(options: { | ||
try: LazyArg<A> | ||
catch: (e: unknown) => ParseIssue | ||
}) => Either.Either<A, ParseIssue> = Either.try | ||
export { | ||
/** | ||
* @category constructors | ||
* @since 1.0.0 | ||
*/ | ||
_try as try | ||
} | ||
@@ -354,3 +342,6 @@ | ||
*/ | ||
export const member = InternalParser.member | ||
export const fromOption: { | ||
(onNone: () => ParseIssue): <A>(self: Option.Option<A>) => Either.Either<A, ParseIssue> | ||
<A>(self: Option.Option<A>, onNone: () => ParseIssue): Either.Either<A, ParseIssue> | ||
} = Either.fromOption | ||
@@ -361,3 +352,23 @@ /** | ||
*/ | ||
export const eitherOrUndefined = InternalParser.eitherOrUndefined | ||
export const flatMap: { | ||
<A, B, E1, R1>( | ||
f: (a: A) => Effect.Effect<B, E1, R1> | ||
): <E, R>(self: Effect.Effect<A, E, R>) => Effect.Effect<B, E1 | E, R1 | R> | ||
<A, E, R, B, E1, R1>( | ||
self: Effect.Effect<A, E, R>, | ||
f: (a: A) => Effect.Effect<B, E1, R1> | ||
): Effect.Effect<B, E | E1, R | R1> | ||
} = dual(2, <A, E, R, B, E1, R1>( | ||
self: Effect.Effect<A, E, R>, | ||
f: (a: A) => Effect.Effect<B, E1, R1> | ||
): Effect.Effect<B, E | E1, R | R1> => { | ||
const s: any = self | ||
if (s["_tag"] === "Left") { | ||
return s | ||
} | ||
if (s["_tag"] === "Right") { | ||
return f(s.right) | ||
} | ||
return Effect.flatMap(self, f) | ||
}) | ||
@@ -368,3 +379,15 @@ /** | ||
*/ | ||
export const flatMap = InternalParser.flatMap | ||
export const map: { | ||
<A, B>(f: (a: A) => B): <E, R>(self: Effect.Effect<A, E, R>) => Effect.Effect<B, E, R> | ||
<A, E, R, B>(self: Effect.Effect<A, E, R>, f: (a: A) => B): Effect.Effect<B, E, R> | ||
} = dual(2, <A, E, R, B>(self: Effect.Effect<A, E, R>, f: (a: A) => B): Effect.Effect<B, E, R> => { | ||
const s: any = self | ||
if (s["_tag"] === "Left") { | ||
return s | ||
} | ||
if (s["_tag"] === "Right") { | ||
return Either.right(f(s.right)) | ||
} | ||
return Effect.map(self, f) | ||
}) | ||
@@ -375,3 +398,15 @@ /** | ||
*/ | ||
export const map = InternalParser.map | ||
export const mapError: { | ||
<E, E2>(f: (e: E) => E2): <A, R>(self: Effect.Effect<A, E, R>) => Effect.Effect<A, E2, R> | ||
<A, E, R, E2>(self: Effect.Effect<A, E, R>, f: (e: E) => E2): Effect.Effect<A, E2, R> | ||
} = dual(2, <A, E, R, E2>(self: Effect.Effect<A, E, R>, f: (e: E) => E2): Effect.Effect<A, E2, R> => { | ||
const s: any = self | ||
if (s["_tag"] === "Left") { | ||
return Either.left(f(s.left)) | ||
} | ||
if (s["_tag"] === "Right") { | ||
return s | ||
} | ||
return Effect.mapError(self, f) | ||
}) | ||
@@ -382,3 +417,10 @@ /** | ||
*/ | ||
export const mapError = InternalParser.mapError | ||
export const eitherOrUndefined = <A, E, R>( | ||
self: Effect.Effect<A, E, R> | ||
): Either.Either<A, E> | undefined => { | ||
const s: any = self | ||
if (s["_tag"] === "Left" || s["_tag"] === "Right") { | ||
return s | ||
} | ||
} | ||
@@ -437,57 +479,2 @@ /** | ||
/* c8 ignore start */ | ||
export { | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
decodeUnknown, | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
decodeUnknownEither, | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
decodeUnknownOption, | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
decodeUnknownPromise, | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
decodeUnknownSync, | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
encodeUnknown, | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
encodeUnknownEither, | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
encodeUnknownOption, | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
encodeUnknownPromise, | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
encodeUnknownSync | ||
} from "./Parser.js" | ||
/* c8 ignore end */ | ||
/** | ||
@@ -506,1 +493,1221 @@ * @since 1.0.0 | ||
) => Effect.Effect<Out, ParseIssue, R> | ||
/** @internal */ | ||
export const mergeParseOptions = ( | ||
options: AST.ParseOptions | undefined, | ||
overrideOptions: AST.ParseOptions | number | undefined | ||
): AST.ParseOptions | undefined => { | ||
if (overrideOptions === undefined || Predicate.isNumber(overrideOptions)) { | ||
return options | ||
} | ||
if (options === undefined) { | ||
return overrideOptions | ||
} | ||
const out: Mutable<AST.ParseOptions> = {} | ||
out.errors = overrideOptions.errors ?? options.errors | ||
out.onExcessProperty = overrideOptions.onExcessProperty ?? options.onExcessProperty | ||
return out | ||
} | ||
const getEither = (ast: AST.AST, isDecoding: boolean, options?: AST.ParseOptions) => { | ||
const parser = goMemo(ast, isDecoding) | ||
return (u: unknown, overrideOptions?: AST.ParseOptions): Either.Either<any, ParseIssue> => | ||
parser(u, mergeParseOptions(options, overrideOptions)) as any | ||
} | ||
const getSync = (ast: AST.AST, isDecoding: boolean, options?: AST.ParseOptions) => { | ||
const parser = getEither(ast, isDecoding, options) | ||
return (input: unknown, overrideOptions?: AST.ParseOptions) => | ||
Either.getOrThrowWith( | ||
parser(input, overrideOptions), | ||
(issue) => new Error(TreeFormatter.formatIssueSync(issue), { cause: issue }) | ||
) | ||
} | ||
const getOption = (ast: AST.AST, isDecoding: boolean, options?: AST.ParseOptions) => { | ||
const parser = getEither(ast, isDecoding, options) | ||
return (input: unknown, overrideOptions?: AST.ParseOptions): Option.Option<any> => | ||
Option.getRight(parser(input, overrideOptions)) | ||
} | ||
const getEffect = <R>(ast: AST.AST, isDecoding: boolean, options?: AST.ParseOptions) => { | ||
const parser = goMemo(ast, isDecoding) | ||
return (input: unknown, overrideOptions?: AST.ParseOptions): Effect.Effect<any, ParseIssue, R> => | ||
parser(input, { ...mergeParseOptions(options, overrideOptions), isEffectAllowed: true }) | ||
} | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export const decodeUnknownSync = <A, I>( | ||
schema: Schema.Schema<A, I, never>, | ||
options?: AST.ParseOptions | ||
): (u: unknown, overrideOptions?: AST.ParseOptions) => A => getSync(schema.ast, true, options) | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export const decodeUnknownOption = <A, I>( | ||
schema: Schema.Schema<A, I, never>, | ||
options?: AST.ParseOptions | ||
): (u: unknown, overrideOptions?: AST.ParseOptions) => Option.Option<A> => getOption(schema.ast, true, options) | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export const decodeUnknownEither = <A, I>( | ||
schema: Schema.Schema<A, I, never>, | ||
options?: AST.ParseOptions | ||
): (u: unknown, overrideOptions?: AST.ParseOptions) => Either.Either<A, ParseIssue> => | ||
getEither(schema.ast, true, options) | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export const decodeUnknownPromise = <A, I>( | ||
schema: Schema.Schema<A, I, never>, | ||
options?: AST.ParseOptions | ||
) => { | ||
const parser = decodeUnknown(schema, options) | ||
return (u: unknown, overrideOptions?: AST.ParseOptions): Promise<A> => Effect.runPromise(parser(u, overrideOptions)) | ||
} | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export const decodeUnknown = <A, I, R>( | ||
schema: Schema.Schema<A, I, R>, | ||
options?: AST.ParseOptions | ||
): (u: unknown, overrideOptions?: AST.ParseOptions) => Effect.Effect<A, ParseIssue, R> => | ||
getEffect(schema.ast, true, options) | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export const encodeUnknownSync = <A, I>( | ||
schema: Schema.Schema<A, I, never>, | ||
options?: AST.ParseOptions | ||
): (u: unknown, overrideOptions?: AST.ParseOptions) => I => getSync(schema.ast, false, options) | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export const encodeUnknownOption = <A, I>( | ||
schema: Schema.Schema<A, I, never>, | ||
options?: AST.ParseOptions | ||
): (u: unknown, overrideOptions?: AST.ParseOptions) => Option.Option<I> => getOption(schema.ast, false, options) | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export const encodeUnknownEither = <A, I>( | ||
schema: Schema.Schema<A, I, never>, | ||
options?: AST.ParseOptions | ||
): (u: unknown, overrideOptions?: AST.ParseOptions) => Either.Either<I, ParseIssue> => | ||
getEither(schema.ast, false, options) | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export const encodeUnknownPromise = <A, I>( | ||
schema: Schema.Schema<A, I, never>, | ||
options?: AST.ParseOptions | ||
) => { | ||
const parser = encodeUnknown(schema, options) | ||
return (u: unknown, overrideOptions?: AST.ParseOptions): Promise<I> => Effect.runPromise(parser(u, overrideOptions)) | ||
} | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export const encodeUnknown = <A, I, R>( | ||
schema: Schema.Schema<A, I, R>, | ||
options?: AST.ParseOptions | ||
): (u: unknown, overrideOptions?: AST.ParseOptions) => Effect.Effect<I, ParseIssue, R> => | ||
getEffect(schema.ast, false, options) | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export const decodeSync: <A, I>( | ||
schema: Schema.Schema<A, I, never>, | ||
options?: AST.ParseOptions | ||
) => (i: I, overrideOptions?: AST.ParseOptions) => A = decodeUnknownSync | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export const decodeOption: <A, I>( | ||
schema: Schema.Schema<A, I, never>, | ||
options?: AST.ParseOptions | ||
) => (i: I, overrideOptions?: AST.ParseOptions) => Option.Option<A> = decodeUnknownOption | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export const decodeEither: <A, I>( | ||
schema: Schema.Schema<A, I, never>, | ||
options?: AST.ParseOptions | ||
) => (i: I, overrideOptions?: AST.ParseOptions) => Either.Either<A, ParseIssue> = decodeUnknownEither | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export const decodePromise: <A, I>( | ||
schema: Schema.Schema<A, I, never>, | ||
options?: AST.ParseOptions | ||
) => (i: I, overrideOptions?: AST.ParseOptions) => Promise<A> = decodeUnknownPromise | ||
/** | ||
* @category decoding | ||
* @since 1.0.0 | ||
*/ | ||
export const decode: <A, I, R>( | ||
schema: Schema.Schema<A, I, R>, | ||
options?: AST.ParseOptions | ||
) => (i: I, overrideOptions?: AST.ParseOptions) => Effect.Effect<A, ParseIssue, R> = decodeUnknown | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export const validateSync = <A, I, R>( | ||
schema: Schema.Schema<A, I, R>, | ||
options?: AST.ParseOptions | ||
): (u: unknown, overrideOptions?: AST.ParseOptions) => A => getSync(AST.typeAST(schema.ast), true, options) | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export const validateOption = <A, I, R>( | ||
schema: Schema.Schema<A, I, R>, | ||
options?: AST.ParseOptions | ||
): (u: unknown, overrideOptions?: AST.ParseOptions) => Option.Option<A> => | ||
getOption(AST.typeAST(schema.ast), true, options) | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export const validateEither = <A, I, R>( | ||
schema: Schema.Schema<A, I, R>, | ||
options?: AST.ParseOptions | ||
): (u: unknown, overrideOptions?: AST.ParseOptions) => Either.Either<A, ParseIssue> => | ||
getEither(AST.typeAST(schema.ast), true, options) | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export const validatePromise = <A, I>( | ||
schema: Schema.Schema<A, I, never>, | ||
options?: AST.ParseOptions | ||
) => { | ||
const parser = validate(schema, options) | ||
return (u: unknown, overrideOptions?: AST.ParseOptions): Promise<A> => Effect.runPromise(parser(u, overrideOptions)) | ||
} | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export const validate = <A, I, R>( | ||
schema: Schema.Schema<A, I, R>, | ||
options?: AST.ParseOptions | ||
): (a: unknown, overrideOptions?: AST.ParseOptions) => Effect.Effect<A, ParseIssue, R> => | ||
getEffect(AST.typeAST(schema.ast), true, options) | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export const is = <A, I, R>(schema: Schema.Schema<A, I, R>, options?: AST.ParseOptions) => { | ||
const parser = goMemo(AST.typeAST(schema.ast), true) | ||
return (u: unknown, overrideOptions?: AST.ParseOptions | number): u is A => | ||
Either.isRight(parser(u, { ...mergeParseOptions(options, overrideOptions), isExact: true }) as any) | ||
} | ||
/** | ||
* @category validation | ||
* @since 1.0.0 | ||
*/ | ||
export const asserts = <A, I, R>(schema: Schema.Schema<A, I, R>, options?: AST.ParseOptions) => { | ||
const parser = goMemo(AST.typeAST(schema.ast), true) | ||
return (u: unknown, overrideOptions?: AST.ParseOptions): asserts u is A => { | ||
const result: Either.Either<any, ParseIssue> = parser(u, { | ||
...mergeParseOptions(options, overrideOptions), | ||
isExact: true | ||
}) as any | ||
if (Either.isLeft(result)) { | ||
throw new Error(TreeFormatter.formatIssueSync(result.left), { cause: result.left }) | ||
} | ||
} | ||
} | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export const encodeSync: <A, I>( | ||
schema: Schema.Schema<A, I, never>, | ||
options?: AST.ParseOptions | ||
) => (a: A, overrideOptions?: AST.ParseOptions) => I = encodeUnknownSync | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export const encodeOption: <A, I>( | ||
schema: Schema.Schema<A, I, never>, | ||
options?: AST.ParseOptions | ||
) => (input: A, overrideOptions?: AST.ParseOptions) => Option.Option<I> = encodeUnknownOption | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export const encodeEither: <A, I>( | ||
schema: Schema.Schema<A, I, never>, | ||
options?: AST.ParseOptions | ||
) => (a: A, overrideOptions?: AST.ParseOptions) => Either.Either<I, ParseIssue> = encodeUnknownEither | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export const encodePromise: <A, I>( | ||
schema: Schema.Schema<A, I, never>, | ||
options?: AST.ParseOptions | ||
) => (a: A, overrideOptions?: AST.ParseOptions) => Promise<I> = encodeUnknownPromise | ||
/** | ||
* @category encoding | ||
* @since 1.0.0 | ||
*/ | ||
export const encode: <A, I, R>( | ||
schema: Schema.Schema<A, I, R>, | ||
options?: AST.ParseOptions | ||
) => (a: A, overrideOptions?: AST.ParseOptions) => Effect.Effect<I, ParseIssue, R> = encodeUnknown | ||
interface InternalOptions extends AST.ParseOptions { | ||
readonly isEffectAllowed?: boolean | ||
// `isExact = false` means that missing keys are treated as undefined values (`{ key: undefined }`) | ||
readonly isExact?: boolean | ||
} | ||
interface Parser { | ||
(i: any, options?: InternalOptions): Effect.Effect<any, ParseIssue, any> | ||
} | ||
const decodeMemoMap = globalValue( | ||
Symbol.for("@effect/schema/Parser/decodeMemoMap"), | ||
() => new WeakMap<AST.AST, Parser>() | ||
) | ||
const encodeMemoMap = globalValue( | ||
Symbol.for("@effect/schema/Parser/encodeMemoMap"), | ||
() => new WeakMap<AST.AST, Parser>() | ||
) | ||
const goMemo = (ast: AST.AST, isDecoding: boolean): Parser => { | ||
const memoMap = isDecoding ? decodeMemoMap : encodeMemoMap | ||
const memo = memoMap.get(ast) | ||
if (memo) { | ||
return memo | ||
} | ||
const parser = go(ast, isDecoding) | ||
memoMap.set(ast, parser) | ||
return parser | ||
} | ||
const getConcurrency = (ast: AST.AST): Concurrency | undefined => | ||
Option.getOrUndefined(AST.getConcurrencyAnnotation(ast)) | ||
const getBatching = (ast: AST.AST): boolean | "inherit" | undefined => | ||
Option.getOrUndefined(AST.getBatchingAnnotation(ast)) | ||
const go = (ast: AST.AST, isDecoding: boolean): Parser => { | ||
switch (ast._tag) { | ||
case "Refinement": { | ||
if (isDecoding) { | ||
const from = goMemo(ast.from, true) | ||
return (i, options) => | ||
handleForbidden( | ||
flatMap( | ||
mapError(from(i, options), (e) => new Refinement(ast, i, "From", e)), | ||
(a) => | ||
Option.match( | ||
ast.filter(a, options ?? AST.defaultParseOption, ast), | ||
{ | ||
onNone: () => Either.right(a), | ||
onSome: (e) => Either.left(new Refinement(ast, i, "Predicate", e)) | ||
} | ||
) | ||
), | ||
ast, | ||
i, | ||
options | ||
) | ||
} else { | ||
const from = goMemo(AST.typeAST(ast), true) | ||
const to = goMemo(dropRightRefinement(ast.from), false) | ||
return (i, options) => handleForbidden(flatMap(from(i, options), (a) => to(a, options)), ast, i, options) | ||
} | ||
} | ||
case "Transformation": { | ||
const transform = getFinalTransformation(ast.transformation, isDecoding) | ||
const from = isDecoding ? goMemo(ast.from, true) : goMemo(ast.to, false) | ||
const to = isDecoding ? goMemo(ast.to, true) : goMemo(ast.from, false) | ||
return (i1, options) => | ||
handleForbidden( | ||
flatMap( | ||
mapError( | ||
from(i1, options), | ||
(e) => new Transformation(ast, i1, isDecoding ? "Encoded" : "Type", e) | ||
), | ||
(a) => | ||
flatMap( | ||
mapError( | ||
transform(a, options ?? AST.defaultParseOption, ast), | ||
(e) => new Transformation(ast, i1, "Transformation", e) | ||
), | ||
(i2) => | ||
mapError( | ||
to(i2, options), | ||
(e) => new Transformation(ast, i1, isDecoding ? "Type" : "Encoded", e) | ||
) | ||
) | ||
), | ||
ast, | ||
i1, | ||
options | ||
) | ||
} | ||
case "Declaration": { | ||
const parse = isDecoding | ||
? ast.decodeUnknown(...ast.typeParameters) | ||
: ast.encodeUnknown(...ast.typeParameters) | ||
return (i, options) => | ||
handleForbidden( | ||
mapError(parse(i, options ?? AST.defaultParseOption, ast), (e) => new Declaration(ast, i, e)), | ||
ast, | ||
i, | ||
options | ||
) | ||
} | ||
case "Literal": | ||
return fromRefinement(ast, (u): u is typeof ast.literal => u === ast.literal) | ||
case "UniqueSymbol": | ||
return fromRefinement(ast, (u): u is typeof ast.symbol => u === ast.symbol) | ||
case "UndefinedKeyword": | ||
return fromRefinement(ast, Predicate.isUndefined) | ||
case "VoidKeyword": | ||
return fromRefinement(ast, Predicate.isUndefined) | ||
case "NeverKeyword": | ||
return fromRefinement(ast, Predicate.isNever) | ||
case "UnknownKeyword": | ||
case "AnyKeyword": | ||
return Either.right | ||
case "StringKeyword": | ||
return fromRefinement(ast, Predicate.isString) | ||
case "NumberKeyword": | ||
return fromRefinement(ast, Predicate.isNumber) | ||
case "BooleanKeyword": | ||
return fromRefinement(ast, Predicate.isBoolean) | ||
case "BigIntKeyword": | ||
return fromRefinement(ast, Predicate.isBigInt) | ||
case "SymbolKeyword": | ||
return fromRefinement(ast, Predicate.isSymbol) | ||
case "ObjectKeyword": | ||
return fromRefinement(ast, Predicate.isObject) | ||
case "Enums": | ||
return fromRefinement(ast, (u): u is any => ast.enums.some(([_, value]) => value === u)) | ||
case "TemplateLiteral": { | ||
const regex = AST.getTemplateLiteralRegExp(ast) | ||
return fromRefinement(ast, (u): u is any => Predicate.isString(u) && regex.test(u)) | ||
} | ||
case "TupleType": { | ||
const elements = ast.elements.map((e) => goMemo(e.type, isDecoding)) | ||
const rest = ast.rest.map((ast) => goMemo(ast, isDecoding)) | ||
let requiredLen = ast.elements.filter((e) => !e.isOptional).length | ||
if (ast.rest.length > 0) { | ||
requiredLen += ast.rest.length - 1 | ||
} | ||
const expectedAST = AST.Union.make(ast.elements.map((_, i) => new AST.Literal(i))) | ||
const concurrency = getConcurrency(ast) | ||
const batching = getBatching(ast) | ||
return (input: unknown, options) => { | ||
if (!Arr.isArray(input)) { | ||
return Either.left(new Type(ast, input)) | ||
} | ||
const allErrors = options?.errors === "all" | ||
const es: Array<[number, Index]> = [] | ||
let stepKey = 0 | ||
// --------------------------------------------- | ||
// handle missing indexes | ||
// --------------------------------------------- | ||
const len = input.length | ||
for (let i = len; i <= requiredLen - 1; i++) { | ||
const e = new Index(i, missing) | ||
if (allErrors) { | ||
es.push([stepKey++, e]) | ||
continue | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e])) | ||
} | ||
} | ||
// --------------------------------------------- | ||
// handle excess indexes | ||
// --------------------------------------------- | ||
if (ast.rest.length === 0) { | ||
for (let i = ast.elements.length; i <= len - 1; i++) { | ||
const e = new Index(i, new Unexpected(expectedAST)) | ||
if (allErrors) { | ||
es.push([stepKey++, e]) | ||
continue | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e])) | ||
} | ||
} | ||
} | ||
const output: Array<[number, any]> = [] | ||
let i = 0 | ||
type State = { | ||
es: typeof es | ||
output: typeof output | ||
} | ||
let queue: | ||
| Array<(_: State) => Effect.Effect<void, ParseIssue, any>> | ||
| undefined = undefined | ||
// --------------------------------------------- | ||
// handle elements | ||
// --------------------------------------------- | ||
for (; i < elements.length; i++) { | ||
if (len < i + 1) { | ||
if (ast.elements[i].isOptional) { | ||
// the input element is missing | ||
continue | ||
} | ||
} else { | ||
const parser = elements[i] | ||
const te = parser(input[i], options) | ||
const eu = eitherOrUndefined(te) | ||
if (eu) { | ||
if (Either.isLeft(eu)) { | ||
// the input element is present but is not valid | ||
const e = new Index(i, eu.left) | ||
if (allErrors) { | ||
es.push([stepKey++, e]) | ||
continue | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e], sortByIndex(output))) | ||
} | ||
} | ||
output.push([stepKey++, eu.right]) | ||
} else { | ||
const nk = stepKey++ | ||
const index = i | ||
if (!queue) { | ||
queue = [] | ||
} | ||
queue.push(({ es, output }: State) => | ||
Effect.flatMap(Effect.either(te), (t) => { | ||
if (Either.isLeft(t)) { | ||
// the input element is present but is not valid | ||
const e = new Index(index, t.left) | ||
if (allErrors) { | ||
es.push([nk, e]) | ||
return Effect.void | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e], sortByIndex(output))) | ||
} | ||
} | ||
output.push([nk, t.right]) | ||
return Effect.void | ||
}) | ||
) | ||
} | ||
} | ||
} | ||
// --------------------------------------------- | ||
// handle rest element | ||
// --------------------------------------------- | ||
if (Arr.isNonEmptyReadonlyArray(rest)) { | ||
const [head, ...tail] = rest | ||
for (; i < len - tail.length; i++) { | ||
const te = head(input[i], options) | ||
const eu = eitherOrUndefined(te) | ||
if (eu) { | ||
if (Either.isLeft(eu)) { | ||
const e = new Index(i, eu.left) | ||
if (allErrors) { | ||
es.push([stepKey++, e]) | ||
continue | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e], sortByIndex(output))) | ||
} | ||
} else { | ||
output.push([stepKey++, eu.right]) | ||
} | ||
} else { | ||
const nk = stepKey++ | ||
const index = i | ||
if (!queue) { | ||
queue = [] | ||
} | ||
queue.push( | ||
({ es, output }: State) => | ||
Effect.flatMap(Effect.either(te), (t) => { | ||
if (Either.isLeft(t)) { | ||
const e = new Index(index, t.left) | ||
if (allErrors) { | ||
es.push([nk, e]) | ||
return Effect.void | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e], sortByIndex(output))) | ||
} | ||
} else { | ||
output.push([nk, t.right]) | ||
return Effect.void | ||
} | ||
}) | ||
) | ||
} | ||
} | ||
// --------------------------------------------- | ||
// handle post rest elements | ||
// --------------------------------------------- | ||
for (let j = 0; j < tail.length; j++) { | ||
i += j | ||
if (len < i + 1) { | ||
continue | ||
} else { | ||
const te = tail[j](input[i], options) | ||
const eu = eitherOrUndefined(te) | ||
if (eu) { | ||
if (Either.isLeft(eu)) { | ||
// the input element is present but is not valid | ||
const e = new Index(i, eu.left) | ||
if (allErrors) { | ||
es.push([stepKey++, e]) | ||
continue | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e], sortByIndex(output))) | ||
} | ||
} | ||
output.push([stepKey++, eu.right]) | ||
} else { | ||
const nk = stepKey++ | ||
const index = i | ||
if (!queue) { | ||
queue = [] | ||
} | ||
queue.push( | ||
({ es, output }: State) => | ||
Effect.flatMap(Effect.either(te), (t) => { | ||
if (Either.isLeft(t)) { | ||
// the input element is present but is not valid | ||
const e = new Index(index, t.left) | ||
if (allErrors) { | ||
es.push([nk, e]) | ||
return Effect.void | ||
} else { | ||
return Either.left(new TupleType(ast, input, [e], sortByIndex(output))) | ||
} | ||
} | ||
output.push([nk, t.right]) | ||
return Effect.void | ||
}) | ||
) | ||
} | ||
} | ||
} | ||
} | ||
// --------------------------------------------- | ||
// compute result | ||
// --------------------------------------------- | ||
const computeResult = ({ es, output }: State) => | ||
Arr.isNonEmptyArray(es) ? | ||
Either.left(new TupleType(ast, input, sortByIndex(es), sortByIndex(output))) : | ||
Either.right(sortByIndex(output)) | ||
if (queue && queue.length > 0) { | ||
const cqueue = queue | ||
return Effect.suspend(() => { | ||
const state: State = { | ||
es: Arr.copy(es), | ||
output: Arr.copy(output) | ||
} | ||
return Effect.flatMap( | ||
Effect.forEach(cqueue, (f) => f(state), { concurrency, batching, discard: true }), | ||
() => computeResult(state) | ||
) | ||
}) | ||
} | ||
return computeResult({ output, es }) | ||
} | ||
} | ||
case "TypeLiteral": { | ||
if (ast.propertySignatures.length === 0 && ast.indexSignatures.length === 0) { | ||
return fromRefinement(ast, Predicate.isNotNullable) | ||
} | ||
const propertySignatures: Array<readonly [Parser, AST.PropertySignature]> = [] | ||
const expectedKeys: Record<PropertyKey, null> = {} | ||
for (const ps of ast.propertySignatures) { | ||
propertySignatures.push([goMemo(ps.type, isDecoding), ps]) | ||
expectedKeys[ps.name] = null | ||
} | ||
const indexSignatures = ast.indexSignatures.map((is) => | ||
[ | ||
goMemo(is.parameter, isDecoding), | ||
goMemo(is.type, isDecoding), | ||
is.parameter | ||
] as const | ||
) | ||
const expectedAST = AST.Union.make( | ||
ast.indexSignatures.map((is): AST.AST => is.parameter).concat( | ||
util_.ownKeys(expectedKeys).map((key) => | ||
Predicate.isSymbol(key) ? new AST.UniqueSymbol(key) : new AST.Literal(key) | ||
) | ||
) | ||
) | ||
const expected = goMemo(expectedAST, isDecoding) | ||
const concurrency = getConcurrency(ast) | ||
const batching = getBatching(ast) | ||
return (input: unknown, options) => { | ||
if (!Predicate.isRecord(input)) { | ||
return Either.left(new Type(ast, input)) | ||
} | ||
const allErrors = options?.errors === "all" | ||
const es: Array<[number, Key]> = [] | ||
let stepKey = 0 | ||
// --------------------------------------------- | ||
// handle excess properties | ||
// --------------------------------------------- | ||
const onExcessPropertyError = options?.onExcessProperty === "error" | ||
const onExcessPropertyPreserve = options?.onExcessProperty === "preserve" | ||
const output: any = {} | ||
if (onExcessPropertyError || onExcessPropertyPreserve) { | ||
for (const key of util_.ownKeys(input)) { | ||
const eu = eitherOrUndefined(expected(key, options))! | ||
if (Either.isLeft(eu)) { | ||
// key is unexpected | ||
if (onExcessPropertyError) { | ||
const e = new Key(key, new Unexpected(expectedAST)) | ||
if (allErrors) { | ||
es.push([stepKey++, e]) | ||
continue | ||
} else { | ||
return Either.left(new TypeLiteral(ast, input, [e], output)) | ||
} | ||
} else { | ||
// preserve key | ||
output[key] = input[key] | ||
} | ||
} | ||
} | ||
} | ||
// --------------------------------------------- | ||
// handle property signatures | ||
// --------------------------------------------- | ||
type State = { | ||
es: typeof es | ||
output: typeof output | ||
} | ||
let queue: | ||
| Array<(state: State) => Effect.Effect<void, ParseIssue, any>> | ||
| undefined = undefined | ||
const isExact = options?.isExact === true | ||
for (let i = 0; i < propertySignatures.length; i++) { | ||
const ps = propertySignatures[i][1] | ||
const name = ps.name | ||
const hasKey = Object.prototype.hasOwnProperty.call(input, name) | ||
if (!hasKey) { | ||
if (ps.isOptional) { | ||
continue | ||
} else if (isExact) { | ||
const e = new Key(name, missing) | ||
if (allErrors) { | ||
es.push([stepKey++, e]) | ||
continue | ||
} else { | ||
return Either.left(new TypeLiteral(ast, input, [e], output)) | ||
} | ||
} | ||
} | ||
const parser = propertySignatures[i][0] | ||
const te = parser(input[name], options) | ||
const eu = eitherOrUndefined(te) | ||
if (eu) { | ||
if (Either.isLeft(eu)) { | ||
const e = new Key(name, hasKey ? eu.left : missing) | ||
if (allErrors) { | ||
es.push([stepKey++, e]) | ||
continue | ||
} else { | ||
return Either.left(new TypeLiteral(ast, input, [e], output)) | ||
} | ||
} | ||
output[name] = eu.right | ||
} else { | ||
const nk = stepKey++ | ||
const index = name | ||
if (!queue) { | ||
queue = [] | ||
} | ||
queue.push( | ||
({ es, output }: State) => | ||
Effect.flatMap(Effect.either(te), (t) => { | ||
if (Either.isLeft(t)) { | ||
const e = new Key(index, hasKey ? t.left : missing) | ||
if (allErrors) { | ||
es.push([nk, e]) | ||
return Effect.void | ||
} else { | ||
return Either.left(new TypeLiteral(ast, input, [e], output)) | ||
} | ||
} | ||
output[index] = t.right | ||
return Effect.void | ||
}) | ||
) | ||
} | ||
} | ||
// --------------------------------------------- | ||
// handle index signatures | ||
// --------------------------------------------- | ||
for (let i = 0; i < indexSignatures.length; i++) { | ||
const indexSignature = indexSignatures[i] | ||
const parameter = indexSignature[0] | ||
const type = indexSignature[1] | ||
const keys = util_.getKeysForIndexSignature(input, indexSignature[2]) | ||
for (const key of keys) { | ||
// --------------------------------------------- | ||
// handle keys | ||
// --------------------------------------------- | ||
const keu = eitherOrUndefined(parameter(key, options)) | ||
if (keu && Either.isRight(keu)) { | ||
// --------------------------------------------- | ||
// handle values | ||
// --------------------------------------------- | ||
const vpr = type(input[key], options) | ||
const veu = eitherOrUndefined(vpr) | ||
if (veu) { | ||
if (Either.isLeft(veu)) { | ||
const e = new Key(key, veu.left) | ||
if (allErrors) { | ||
es.push([stepKey++, e]) | ||
continue | ||
} else { | ||
return Either.left(new TypeLiteral(ast, input, [e], output)) | ||
} | ||
} else { | ||
if (!Object.prototype.hasOwnProperty.call(expectedKeys, key)) { | ||
output[key] = veu.right | ||
} | ||
} | ||
} else { | ||
const nk = stepKey++ | ||
const index = key | ||
if (!queue) { | ||
queue = [] | ||
} | ||
queue.push( | ||
({ es, output }: State) => | ||
Effect.flatMap( | ||
Effect.either(vpr), | ||
(tv) => { | ||
if (Either.isLeft(tv)) { | ||
const e = new Key(index, tv.left) | ||
if (allErrors) { | ||
es.push([nk, e]) | ||
return Effect.void | ||
} else { | ||
return Either.left(new TypeLiteral(ast, input, [e], output)) | ||
} | ||
} else { | ||
if (!Object.prototype.hasOwnProperty.call(expectedKeys, key)) { | ||
output[key] = tv.right | ||
} | ||
return Effect.void | ||
} | ||
} | ||
) | ||
) | ||
} | ||
} | ||
} | ||
} | ||
// --------------------------------------------- | ||
// compute result | ||
// --------------------------------------------- | ||
const computeResult = ({ es, output }: State) => | ||
Arr.isNonEmptyArray(es) ? | ||
Either.left(new TypeLiteral(ast, input, sortByIndex(es), output)) : | ||
Either.right(output) | ||
if (queue && queue.length > 0) { | ||
const cqueue = queue | ||
return Effect.suspend(() => { | ||
const state: State = { | ||
es: Arr.copy(es), | ||
output: Object.assign({}, output) | ||
} | ||
return Effect.flatMap( | ||
Effect.forEach(cqueue, (f) => f(state), { concurrency, batching, discard: true }), | ||
() => computeResult(state) | ||
) | ||
}) | ||
} | ||
return computeResult({ es, output }) | ||
} | ||
} | ||
case "Union": { | ||
const searchTree = getSearchTree(ast.types, isDecoding) | ||
const ownKeys = util_.ownKeys(searchTree.keys) | ||
const len = ownKeys.length | ||
const map = new Map<any, Parser>() | ||
for (let i = 0; i < ast.types.length; i++) { | ||
map.set(ast.types[i], goMemo(ast.types[i], isDecoding)) | ||
} | ||
const concurrency = getConcurrency(ast) ?? 1 | ||
const batching = getBatching(ast) | ||
return (input, options) => { | ||
const es: Array<[number, Type | TypeLiteral | Member]> = [] | ||
let stepKey = 0 | ||
let candidates: Array<AST.AST> = [] | ||
if (len > 0) { | ||
// if there is at least one key then input must be an object | ||
if (Predicate.isRecord(input)) { | ||
for (let i = 0; i < len; i++) { | ||
const name = ownKeys[i] | ||
const buckets = searchTree.keys[name].buckets | ||
// for each property that should contain a literal, check if the input contains that property | ||
if (Object.prototype.hasOwnProperty.call(input, name)) { | ||
const literal = String(input[name]) | ||
// check that the value obtained from the input for the property corresponds to an existing bucket | ||
if (Object.prototype.hasOwnProperty.call(buckets, literal)) { | ||
// retrive the minimal set of candidates for decoding | ||
candidates = candidates.concat(buckets[literal]) | ||
} else { | ||
const literals = AST.Union.make(searchTree.keys[name].literals) | ||
es.push([ | ||
stepKey++, | ||
new TypeLiteral( | ||
new AST.TypeLiteral([ | ||
new AST.PropertySignature(name, literals, false, true) | ||
], []), | ||
input, | ||
[new Key(name, new Type(literals, input[name]))] | ||
) | ||
]) | ||
} | ||
} else { | ||
const literals = AST.Union.make(searchTree.keys[name].literals) | ||
es.push([ | ||
stepKey++, | ||
new TypeLiteral( | ||
new AST.TypeLiteral([ | ||
new AST.PropertySignature(name, literals, false, true) | ||
], []), | ||
input, | ||
[new Key(name, missing)] | ||
) | ||
]) | ||
} | ||
} | ||
} else { | ||
es.push([stepKey++, new Type(ast, input)]) | ||
} | ||
} | ||
if (searchTree.otherwise.length > 0) { | ||
candidates = candidates.concat(searchTree.otherwise) | ||
} | ||
let queue: | ||
| Array<(state: State) => Effect.Effect<unknown, ParseIssue, any>> | ||
| undefined = undefined | ||
type State = { | ||
finalResult?: any | ||
es: typeof es | ||
} | ||
for (let i = 0; i < candidates.length; i++) { | ||
const candidate = candidates[i] | ||
const pr = map.get(candidate)!(input, options) | ||
// the members of a union are ordered based on which one should be decoded first, | ||
// therefore if one member has added a task, all subsequent members must | ||
// also add a task to the queue even if they are synchronous | ||
const eu = !queue || queue.length === 0 ? eitherOrUndefined(pr) : undefined | ||
if (eu) { | ||
if (Either.isRight(eu)) { | ||
return Either.right(eu.right) | ||
} else { | ||
es.push([stepKey++, new Member(candidate, eu.left)]) | ||
} | ||
} else { | ||
const nk = stepKey++ | ||
if (!queue) { | ||
queue = [] | ||
} | ||
queue.push( | ||
(state) => | ||
Effect.suspend(() => { | ||
if ("finalResult" in state) { | ||
return Effect.void | ||
} else { | ||
return Effect.flatMap(Effect.either(pr), (t) => { | ||
if (Either.isRight(t)) { | ||
state.finalResult = Either.right(t.right) | ||
} else { | ||
state.es.push([nk, new Member(candidate, t.left)]) | ||
} | ||
return Effect.void | ||
}) | ||
} | ||
}) | ||
) | ||
} | ||
} | ||
// --------------------------------------------- | ||
// compute result | ||
// --------------------------------------------- | ||
const computeResult = (es: State["es"]) => | ||
Arr.isNonEmptyArray(es) ? | ||
es.length === 1 && es[0][1]._tag === "Type" ? | ||
Either.left(es[0][1]) : | ||
Either.left(new Union(ast, input, sortByIndex(es))) : | ||
// this should never happen | ||
Either.left(new Type(AST.neverKeyword, input)) | ||
if (queue && queue.length > 0) { | ||
const cqueue = queue | ||
return Effect.suspend(() => { | ||
const state: State = { es: Arr.copy(es) } | ||
return Effect.flatMap( | ||
Effect.forEach(cqueue, (f) => f(state), { concurrency, batching, discard: true }), | ||
() => { | ||
if ("finalResult" in state) { | ||
return state.finalResult | ||
} | ||
return computeResult(state.es) | ||
} | ||
) | ||
}) | ||
} | ||
return computeResult(es) | ||
} | ||
} | ||
case "Suspend": { | ||
const get = util_.memoizeThunk(() => goMemo(AST.annotations(ast.f(), ast.annotations), isDecoding)) | ||
return (a, options) => get()(a, options) | ||
} | ||
} | ||
} | ||
const fromRefinement = <A>(ast: AST.AST, refinement: (u: unknown) => u is A): Parser => (u) => | ||
refinement(u) ? Either.right(u) : Either.left(new Type(ast, u)) | ||
/** @internal */ | ||
export const getLiterals = ( | ||
ast: AST.AST, | ||
isDecoding: boolean | ||
): ReadonlyArray<[PropertyKey, AST.Literal]> => { | ||
switch (ast._tag) { | ||
case "Declaration": { | ||
const annotation = AST.getSurrogateAnnotation(ast) | ||
if (Option.isSome(annotation)) { | ||
return getLiterals(annotation.value, isDecoding) | ||
} | ||
break | ||
} | ||
case "TypeLiteral": { | ||
const out: Array<[PropertyKey, AST.Literal]> = [] | ||
for (let i = 0; i < ast.propertySignatures.length; i++) { | ||
const propertySignature = ast.propertySignatures[i] | ||
const type = isDecoding ? AST.encodedAST(propertySignature.type) : AST.typeAST(propertySignature.type) | ||
if (AST.isLiteral(type) && !propertySignature.isOptional) { | ||
out.push([propertySignature.name, type]) | ||
} | ||
} | ||
return out | ||
} | ||
case "Refinement": | ||
return getLiterals(ast.from, isDecoding) | ||
case "Suspend": | ||
return getLiterals(ast.f(), isDecoding) | ||
case "Transformation": | ||
return getLiterals(isDecoding ? ast.from : ast.to, isDecoding) | ||
} | ||
return [] | ||
} | ||
/** | ||
* The purpose of the algorithm is to narrow down the pool of possible candidates for decoding as much as possible. | ||
* | ||
* This function separates the schemas into two groups, `keys` and `otherwise`: | ||
* | ||
* - `keys`: the schema has at least one property with a literal value | ||
* - `otherwise`: the schema has no properties with a literal value | ||
* | ||
* If a schema has at least one property with a literal value, so it ends up in `keys`, first a namespace is created for | ||
* the name of the property containing the literal, and then within this namespace a "bucket" is created for the literal | ||
* value in which to store all the schemas that have the same property and literal value. | ||
* | ||
* @internal | ||
*/ | ||
export const getSearchTree = ( | ||
members: ReadonlyArray<AST.AST>, | ||
isDecoding: boolean | ||
): { | ||
keys: { | ||
readonly [key: PropertyKey]: { | ||
buckets: { [literal: string]: ReadonlyArray<AST.AST> } | ||
literals: ReadonlyArray<AST.Literal> // this is for error messages | ||
} | ||
} | ||
otherwise: ReadonlyArray<AST.AST> | ||
} => { | ||
const keys: { | ||
[key: PropertyKey]: { | ||
buckets: { [literal: string]: Array<AST.AST> } | ||
literals: Array<AST.Literal> | ||
} | ||
} = {} | ||
const otherwise: Array<AST.AST> = [] | ||
for (let i = 0; i < members.length; i++) { | ||
const member = members[i] | ||
const tags = getLiterals(member, isDecoding) | ||
if (tags.length > 0) { | ||
for (let j = 0; j < tags.length; j++) { | ||
const [key, literal] = tags[j] | ||
const hash = String(literal.literal) | ||
keys[key] = keys[key] || { buckets: {}, literals: [] } | ||
const buckets = keys[key].buckets | ||
if (Object.prototype.hasOwnProperty.call(buckets, hash)) { | ||
if (j < tags.length - 1) { | ||
continue | ||
} | ||
buckets[hash].push(member) | ||
keys[key].literals.push(literal) | ||
} else { | ||
buckets[hash] = [member] | ||
keys[key].literals.push(literal) | ||
break | ||
} | ||
} | ||
} else { | ||
otherwise.push(member) | ||
} | ||
} | ||
return { keys, otherwise } | ||
} | ||
const dropRightRefinement = (ast: AST.AST): AST.AST => AST.isRefinement(ast) ? dropRightRefinement(ast.from) : ast | ||
const handleForbidden = <R, A>( | ||
effect: Effect.Effect<A, ParseIssue, R>, | ||
ast: AST.AST, | ||
actual: unknown, | ||
options: InternalOptions | undefined | ||
): Effect.Effect<A, ParseIssue, R> => { | ||
const eu = eitherOrUndefined(effect) | ||
if (eu) { | ||
return eu | ||
} | ||
if (options?.isEffectAllowed === true) { | ||
return effect | ||
} | ||
try { | ||
return Effect.runSync(Effect.either(effect as Effect.Effect<A, ParseIssue>)) | ||
} catch (e) { | ||
return Either.left( | ||
new Forbidden( | ||
ast, | ||
actual, | ||
"cannot be be resolved synchronously, this is caused by using runSync on an effect that performs async work" | ||
) | ||
) | ||
} | ||
} | ||
function sortByIndex<T>( | ||
es: Arr.NonEmptyArray<[number, T]> | ||
): Arr.NonEmptyArray<T> | ||
function sortByIndex<T>(es: Array<[number, T]>): Array<T> | ||
function sortByIndex(es: Array<[number, any]>): any { | ||
return es.sort(([a], [b]) => a > b ? 1 : a < b ? -1 : 0).map(([_, a]) => a) | ||
} | ||
// ------------------------------------------------------------------------------------- | ||
// transformations interpreter | ||
// ------------------------------------------------------------------------------------- | ||
/** @internal */ | ||
export const getFinalTransformation = ( | ||
transformation: AST.TransformationKind, | ||
isDecoding: boolean | ||
): ( | ||
input: any, | ||
options: AST.ParseOptions, | ||
self: AST.Transformation | ||
) => Effect.Effect<any, ParseIssue, any> => { | ||
switch (transformation._tag) { | ||
case "FinalTransformation": | ||
return isDecoding ? transformation.decode : transformation.encode | ||
case "ComposeTransformation": | ||
return Either.right | ||
case "TypeLiteralTransformation": | ||
return (input) => { | ||
let out: Effect.Effect<any, ParseIssue, any> = Either.right(input) | ||
// --------------------------------------------- | ||
// handle property signature transformations | ||
// --------------------------------------------- | ||
for (const pst of transformation.propertySignatureTransformations) { | ||
const [from, to] = isDecoding ? | ||
[pst.from, pst.to] : | ||
[pst.to, pst.from] | ||
const transformation = isDecoding ? pst.decode : pst.encode | ||
const f = (input: any) => { | ||
const o = transformation( | ||
Object.prototype.hasOwnProperty.call(input, from) ? | ||
Option.some(input[from]) : | ||
Option.none() | ||
) | ||
delete input[from] | ||
if (Option.isSome(o)) { | ||
input[to] = o.value | ||
} | ||
return input | ||
} | ||
out = map(out, f) | ||
} | ||
return out | ||
} | ||
} | ||
} |
/** | ||
* @since 1.0.0 | ||
*/ | ||
import * as Arr from "effect/Array" | ||
import * as Option from "effect/Option" | ||
import * as ReadonlyArray from "effect/ReadonlyArray" | ||
import * as AST from "./AST.js" | ||
import * as Internal from "./internal/ast.js" | ||
import * as hooks from "./internal/hooks.js" | ||
import * as InternalSchema from "./internal/schema.js" | ||
import * as Parser from "./Parser.js" | ||
import * as errors_ from "./internal/errors.js" | ||
import * as util_ from "./internal/util.js" | ||
import * as ParseResult from "./ParseResult.js" | ||
import type * as Schema from "./Schema.js" | ||
@@ -25,3 +24,3 @@ | ||
*/ | ||
export const PrettyHookId: unique symbol = hooks.PrettyHookId | ||
export const PrettyHookId: unique symbol = Symbol.for("@effect/schema/PrettyHookId") | ||
@@ -40,4 +39,3 @@ /** | ||
<A>(handler: (...args: ReadonlyArray<Pretty<any>>) => Pretty<A>) => | ||
<I, R>(self: Schema.Schema<A, I, R>): Schema.Schema<A, I, R> => | ||
InternalSchema.make(AST.setAnnotation(self.ast, PrettyHookId, handler)) | ||
<I, R>(self: Schema.Schema<A, I, R>): Schema.Schema<A, I, R> => self.annotations({ [PrettyHookId]: handler }) | ||
@@ -48,3 +46,3 @@ /** | ||
*/ | ||
export const make = <A, I, R>(schema: Schema.Schema<A, I, R>): (a: A) => string => compile(schema.ast) | ||
export const make = <A, I, R>(schema: Schema.Schema<A, I, R>): (a: A) => string => compile(schema.ast, []) | ||
@@ -65,4 +63,7 @@ const getHook = AST.getAnnotation<(...args: ReadonlyArray<Pretty<any>>) => Pretty<any>>( | ||
const formatUnknown = getMatcher(AST.formatUnknown) | ||
const formatUnknown = getMatcher(util_.formatUnknown) | ||
const getPrettyErrorMessage = (message: string, path: ReadonlyArray<PropertyKey>) => | ||
errors_.getErrorMessageWithPath(`cannot build a Pretty for ${message}`, path) | ||
/** | ||
@@ -72,8 +73,8 @@ * @since 1.0.0 | ||
export const match: AST.Match<Pretty<any>> = { | ||
"Declaration": (ast, go) => { | ||
"Declaration": (ast, go, path) => { | ||
const hook = getHook(ast) | ||
if (Option.isSome(hook)) { | ||
return hook.value(...ast.typeParameters.map(go)) | ||
return hook.value(...ast.typeParameters.map((tp) => go(tp, path))) | ||
} | ||
throw new Error(`cannot build a Pretty for a declaration without annotations (${AST.format(ast)})`) | ||
throw new Error(getPrettyErrorMessage(`a declaration without annotations (${ast})`, path)) | ||
}, | ||
@@ -101,3 +102,3 @@ "VoidKeyword": getMatcher(() => "void(0)"), | ||
"Enums": stringify, | ||
"Tuple": (ast, go) => { | ||
"TupleType": (ast, go, path) => { | ||
const hook = getHook(ast) | ||
@@ -107,4 +108,4 @@ if (Option.isSome(hook)) { | ||
} | ||
const elements = ast.elements.map((e) => go(e.type)) | ||
const rest = Option.map(ast.rest, ReadonlyArray.map(go)) | ||
const elements = ast.elements.map((e, i) => go(e.type, path.concat(i))) | ||
const rest = ast.rest.map((ast) => go(ast, path)) | ||
return (input: ReadonlyArray<unknown>) => { | ||
@@ -128,4 +129,4 @@ const output: Array<string> = [] | ||
// --------------------------------------------- | ||
if (Option.isSome(rest)) { | ||
const [head, ...tail] = rest.value | ||
if (Arr.isNonEmptyReadonlyArray(rest)) { | ||
const [head, ...tail] = rest | ||
for (; i < input.length - tail.length; i++) { | ||
@@ -146,3 +147,3 @@ output.push(head(input[i])) | ||
}, | ||
"TypeLiteral": (ast, go) => { | ||
"TypeLiteral": (ast, go, path) => { | ||
const hook = getHook(ast) | ||
@@ -152,4 +153,4 @@ if (Option.isSome(hook)) { | ||
} | ||
const propertySignaturesTypes = ast.propertySignatures.map((f) => go(f.type)) | ||
const indexSignatureTypes = ast.indexSignatures.map((is) => go(is.type)) | ||
const propertySignaturesTypes = ast.propertySignatures.map((ps) => go(ps.type, path.concat(ps.name))) | ||
const indexSignatureTypes = ast.indexSignatures.map((is) => go(is.type, path)) | ||
const expectedKeys: any = {} | ||
@@ -171,3 +172,3 @@ for (let i = 0; i < propertySignaturesTypes.length; i++) { | ||
output.push( | ||
`${getPrettyPropertyKey(name)}: ${propertySignaturesTypes[i](input[name])}` | ||
`${util_.formatPropertyKey(name)}: ${propertySignaturesTypes[i](input[name])}` | ||
) | ||
@@ -181,3 +182,3 @@ } | ||
const type = indexSignatureTypes[i] | ||
const keys = Internal.getKeysForIndexSignature(input, ast.indexSignatures[i].parameter) | ||
const keys = util_.getKeysForIndexSignature(input, ast.indexSignatures[i].parameter) | ||
for (const key of keys) { | ||
@@ -187,3 +188,3 @@ if (Object.prototype.hasOwnProperty.call(expectedKeys, key)) { | ||
} | ||
output.push(`${getPrettyPropertyKey(key)}: ${type(input[key])}`) | ||
output.push(`${util_.formatPropertyKey(key)}: ${type(input[key])}`) | ||
} | ||
@@ -193,6 +194,6 @@ } | ||
return ReadonlyArray.isNonEmptyReadonlyArray(output) ? "{ " + output.join(", ") + " }" : "{}" | ||
return Arr.isNonEmptyReadonlyArray(output) ? "{ " + output.join(", ") + " }" : "{}" | ||
} | ||
}, | ||
"Union": (ast, go) => { | ||
"Union": (ast, go, path) => { | ||
const hook = getHook(ast) | ||
@@ -202,3 +203,3 @@ if (Option.isSome(hook)) { | ||
} | ||
const types = ast.types.map((ast) => [Parser.is(InternalSchema.make(ast)), go(ast)] as const) | ||
const types = ast.types.map((ast) => [ParseResult.is({ ast } as any), go(ast, path)] as const) | ||
return (a) => { | ||
@@ -209,6 +210,6 @@ const index = types.findIndex(([is]) => is(a)) | ||
}, | ||
"Suspend": (ast, go) => { | ||
"Suspend": (ast, go, path) => { | ||
return Option.match(getHook(ast), { | ||
onNone: () => { | ||
const get = Internal.memoizeThunk(() => go(ast.f())) | ||
const get = util_.memoizeThunk(() => go(ast.f(), path)) | ||
return (a) => get()(a) | ||
@@ -219,11 +220,11 @@ }, | ||
}, | ||
"Refinement": (ast, go) => { | ||
"Refinement": (ast, go, path) => { | ||
return Option.match(getHook(ast), { | ||
onNone: () => go(ast.from), | ||
onNone: () => go(ast.from, path), | ||
onSome: (handler) => handler() | ||
}) | ||
}, | ||
"Transform": (ast, go) => { | ||
"Transformation": (ast, go, path) => { | ||
return Option.match(getHook(ast), { | ||
onNone: () => go(ast.to), | ||
onNone: () => go(ast.to, path), | ||
onSome: (handler) => handler() | ||
@@ -235,4 +236,1 @@ }) | ||
const compile = AST.getCompiler(match) | ||
const getPrettyPropertyKey = (name: PropertyKey): string => | ||
typeof name === "string" ? JSON.stringify(name) : String(name) |
@@ -10,3 +10,3 @@ /** | ||
import { globalValue } from "effect/GlobalValue" | ||
import * as Internal from "./internal/serializable.js" | ||
import * as serializable_ from "./internal/serializable.js" | ||
import type * as ParseResult from "./ParseResult.js" | ||
@@ -19,3 +19,3 @@ import * as Schema from "./Schema.js" | ||
*/ | ||
export const symbol: unique symbol = Internal.symbol as any | ||
export const symbol: unique symbol = serializable_.symbol as any | ||
@@ -51,3 +51,3 @@ /** | ||
*/ | ||
export const symbolResult: unique symbol = Internal.symbolResult as any | ||
export const symbolResult: unique symbol = serializable_.symbolResult as any | ||
@@ -58,6 +58,6 @@ /** | ||
*/ | ||
export interface WithResult<R, IE, E, IA, A> { | ||
export interface WithResult<SuccessA, SuccessI, FailureA, FailureI, SuccessAndFailureR> { | ||
readonly [symbolResult]: { | ||
readonly Failure: Schema.Schema<E, IE, R> | ||
readonly Success: Schema.Schema<A, IA, R> | ||
readonly Success: Schema.Schema<SuccessA, SuccessI, SuccessAndFailureR> | ||
readonly Failure: Schema.Schema<FailureA, FailureI, SuccessAndFailureR> | ||
} | ||
@@ -74,3 +74,3 @@ } | ||
*/ | ||
export type Context<T> = T extends WithResult<infer R, infer _IE, infer _E, infer _IA, infer _A> ? R : never | ||
export type Context<T> = T extends WithResult<infer _A, infer _I, infer _E, infer _EI, infer R> ? R : never | ||
} | ||
@@ -82,5 +82,5 @@ | ||
*/ | ||
export const failureSchema = <R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A> | ||
): Schema.Schema<E, IE, R> => self[symbolResult].Failure | ||
export const failureSchema = <A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R> | ||
): Schema.Schema<E, EI, R> => self[symbolResult].Failure | ||
@@ -91,5 +91,5 @@ /** | ||
*/ | ||
export const successSchema = <R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A> | ||
): Schema.Schema<A, IA, R> => self[symbolResult].Success | ||
export const successSchema = <A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R> | ||
): Schema.Schema<A, I, R> => self[symbolResult].Success | ||
@@ -105,12 +105,12 @@ const exitSchemaCache = globalValue( | ||
*/ | ||
export const exitSchema = <R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A> | ||
): Schema.Schema<Exit.Exit<A, E>, Schema.ExitFrom<IA, IE>, R> => { | ||
export const exitSchema = <A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R> | ||
): Schema.Schema<Exit.Exit<A, E>, Schema.ExitEncoded<I, EI>, R> => { | ||
const proto = Object.getPrototypeOf(self) | ||
if (!(symbolResult in proto)) { | ||
return Schema.exit(failureSchema(self), successSchema(self)) | ||
return Schema.Exit({ failure: failureSchema(self), success: successSchema(self) }) | ||
} | ||
let schema = exitSchemaCache.get(proto) | ||
if (schema === undefined) { | ||
schema = Schema.exit(failureSchema(self), successSchema(self)) | ||
schema = Schema.Exit({ failure: failureSchema(self), success: successSchema(self) }) | ||
exitSchemaCache.set(proto, schema) | ||
@@ -125,4 +125,14 @@ } | ||
*/ | ||
export interface SerializableWithResult<R, IS, S, RR, IE, E, IA, A> | ||
extends Serializable<S, IS, R>, WithResult<RR, IE, E, IA, A> | ||
export interface SerializableWithResult< | ||
Self, | ||
FieldsI, | ||
FieldsR, | ||
SuccessA, | ||
SuccessI, | ||
FailureA, | ||
FailureI, | ||
SuccessAndFailureR | ||
> extends | ||
Serializable<Self, FieldsI, FieldsR>, | ||
WithResult<SuccessA, SuccessI, FailureA, FailureI, SuccessAndFailureR> | ||
{} | ||
@@ -139,3 +149,3 @@ | ||
export type Context<T> = T extends | ||
SerializableWithResult<infer R, infer _IS, infer _S, infer RR, infer _IE, infer _E, infer _IA, infer _A> ? R | RR | ||
SerializableWithResult<infer _S, infer _SI, infer SR, infer _A, infer _AI, infer _E, infer _EI, infer RR> ? SR | RR | ||
: never | ||
@@ -178,15 +188,15 @@ } | ||
value: E | ||
): <R, IE, IA, A>(self: WithResult<R, IE, E, IA, A>) => Effect.Effect<IE, ParseResult.ParseError, R> | ||
<R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A>, | ||
): <A, I, EI, R>(self: WithResult<A, I, E, EI, R>) => Effect.Effect<EI, ParseResult.ParseError, R> | ||
<A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R>, | ||
value: E | ||
): Effect.Effect<IE, ParseResult.ParseError, R> | ||
): Effect.Effect<EI, ParseResult.ParseError, R> | ||
} = dual< | ||
<E>(value: E) => <R, IE, IA, A>( | ||
self: WithResult<R, IE, E, IA, A> | ||
) => Effect.Effect<IE, ParseResult.ParseError, R>, | ||
<R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A>, | ||
<E>(value: E) => <A, I, EI, R>( | ||
self: WithResult<A, I, E, EI, R> | ||
) => Effect.Effect<EI, ParseResult.ParseError, R>, | ||
<A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R>, | ||
value: E | ||
) => Effect.Effect<IE, ParseResult.ParseError, R> | ||
) => Effect.Effect<EI, ParseResult.ParseError, R> | ||
>(2, (self, value) => Schema.encode(self[symbolResult].Failure)(value)) | ||
@@ -199,15 +209,15 @@ | ||
export const deserializeFailure: { | ||
(value: unknown): <R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A> | ||
(value: unknown): <A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R> | ||
) => Effect.Effect<E, ParseResult.ParseError, R> | ||
<R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A>, | ||
<A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R>, | ||
value: unknown | ||
): Effect.Effect<E, ParseResult.ParseError, R> | ||
} = dual< | ||
(value: unknown) => <R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A> | ||
(value: unknown) => <A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R> | ||
) => Effect.Effect<E, ParseResult.ParseError, R>, | ||
<R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A>, | ||
<A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R>, | ||
value: unknown | ||
@@ -224,15 +234,15 @@ ) => Effect.Effect<E, ParseResult.ParseError, R> | ||
value: A | ||
): <R, IE, E, IA>(self: WithResult<R, IE, E, IA, A>) => Effect.Effect<IA, ParseResult.ParseError, R> | ||
<R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A>, | ||
): <I, E, EI, R>(self: WithResult<A, I, E, EI, R>) => Effect.Effect<I, ParseResult.ParseError, R> | ||
<A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R>, | ||
value: A | ||
): Effect.Effect<IA, ParseResult.ParseError, R> | ||
): Effect.Effect<I, ParseResult.ParseError, R> | ||
} = dual< | ||
<A>(value: A) => <R, IE, E, IA>( | ||
self: WithResult<R, IE, E, IA, A> | ||
) => Effect.Effect<IA, ParseResult.ParseError, R>, | ||
<R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A>, | ||
<A>(value: A) => <I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R> | ||
) => Effect.Effect<I, ParseResult.ParseError, R>, | ||
<A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R>, | ||
value: A | ||
) => Effect.Effect<IA, ParseResult.ParseError, R> | ||
) => Effect.Effect<I, ParseResult.ParseError, R> | ||
>(2, (self, value) => Schema.encode(self[symbolResult].Success)(value)) | ||
@@ -247,15 +257,15 @@ | ||
value: unknown | ||
): <R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A> | ||
): <A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R> | ||
) => Effect.Effect<A, ParseResult.ParseError, R> | ||
<R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A>, | ||
<A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R>, | ||
value: unknown | ||
): Effect.Effect<A, ParseResult.ParseError, R> | ||
} = dual< | ||
(value: unknown) => <R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A> | ||
(value: unknown) => <A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R> | ||
) => Effect.Effect<A, ParseResult.ParseError, R>, | ||
<R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A>, | ||
<A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R>, | ||
value: unknown | ||
@@ -270,20 +280,15 @@ ) => Effect.Effect<A, ParseResult.ParseError, R> | ||
export const serializeExit: { | ||
<E, A>( | ||
<A, E>( | ||
value: Exit.Exit<A, E> | ||
): <R, IE, IA>( | ||
self: WithResult<R, IE, E, IA, A> | ||
) => Effect.Effect<Schema.ExitFrom<IA, IE>, ParseResult.ParseError, R> | ||
<R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A>, | ||
): <I, EI, R>( | ||
self: WithResult<A, I, E, EI, R> | ||
) => Effect.Effect<Schema.ExitEncoded<I, EI>, ParseResult.ParseError, R> | ||
<A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R>, | ||
value: Exit.Exit<A, E> | ||
): Effect.Effect<Schema.ExitFrom<IA, IE>, ParseResult.ParseError, R> | ||
} = dual< | ||
<E, A>(value: Exit.Exit<A, E>) => <R, IE, IA>( | ||
self: WithResult<R, IE, E, IA, A> | ||
) => Effect.Effect<Schema.ExitFrom<IA, IE>, ParseResult.ParseError, R>, | ||
<R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A>, | ||
value: Exit.Exit<A, E> | ||
) => Effect.Effect<Schema.ExitFrom<IA, IE>, ParseResult.ParseError, R> | ||
>(2, (self, value) => Schema.encode(exitSchema(self))(value)) | ||
): Effect.Effect<Schema.ExitEncoded<I, EI>, ParseResult.ParseError, R> | ||
} = dual(2, <A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R>, | ||
value: Exit.Exit<A, E> | ||
): Effect.Effect<Schema.ExitEncoded<I, EI>, ParseResult.ParseError, R> => Schema.encode(exitSchema(self))(value)) | ||
@@ -295,17 +300,12 @@ /** | ||
export const deserializeExit: { | ||
(value: unknown): <R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A> | ||
(value: unknown): <A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R> | ||
) => Effect.Effect<Exit.Exit<A, E>, ParseResult.ParseError, R> | ||
<R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A>, | ||
<A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R>, | ||
value: unknown | ||
): Effect.Effect<Exit.Exit<A, E>, ParseResult.ParseError, R> | ||
} = dual< | ||
(value: unknown) => <R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A> | ||
) => Effect.Effect<Exit.Exit<A, E>, ParseResult.ParseError, R>, | ||
<R, IE, E, IA, A>( | ||
self: WithResult<R, IE, E, IA, A>, | ||
value: unknown | ||
) => Effect.Effect<Exit.Exit<A, E>, ParseResult.ParseError, R> | ||
>(2, (self, value) => Schema.decodeUnknown(exitSchema(self))(value)) | ||
} = dual(2, <A, I, E, EI, R>( | ||
self: WithResult<A, I, E, EI, R>, | ||
value: unknown | ||
): Effect.Effect<Exit.Exit<A, E>, ParseResult.ParseError, R> => Schema.decodeUnknown(exitSchema(self))(value)) |
@@ -5,5 +5,8 @@ /** | ||
import type * as Cause from "effect/Cause" | ||
import * as Effect from "effect/Effect" | ||
import * as Option from "effect/Option" | ||
import type { NonEmptyReadonlyArray } from "effect/ReadonlyArray" | ||
import * as Predicate from "effect/Predicate" | ||
import * as AST from "./AST.js" | ||
import * as util_ from "./internal/util.js" | ||
import type * as ParseResult from "./ParseResult.js" | ||
@@ -27,6 +30,4 @@ | ||
*/ | ||
export const formatIssues = (issues: NonEmptyReadonlyArray<ParseResult.ParseIssue>): string => { | ||
const forest = issues.map(go) | ||
return drawTree(forest.length === 1 ? forest[0] : make(`error(s) found`, issues.map(go))) | ||
} | ||
export const formatIssue = (issue: ParseResult.ParseIssue): Effect.Effect<string> => | ||
Effect.map(go(issue), (tree) => drawTree(tree)) | ||
@@ -37,3 +38,3 @@ /** | ||
*/ | ||
export const formatIssue = (issue: ParseResult.ParseIssue): string => formatIssues([issue]) | ||
export const formatIssueSync = (issue: ParseResult.ParseIssue): string => Effect.runSync(formatIssue(issue)) | ||
@@ -44,4 +45,10 @@ /** | ||
*/ | ||
export const formatError = (error: ParseResult.ParseError): string => formatIssue(error.error) | ||
export const formatError = (error: ParseResult.ParseError): Effect.Effect<string> => formatIssue(error.error) | ||
/** | ||
* @category formatting | ||
* @since 1.0.0 | ||
*/ | ||
export const formatErrorSync = (error: ParseResult.ParseError): string => formatIssueSync(error.error) | ||
const drawTree = (tree: Tree<string>): string => tree.value + draw("\n", tree.forest) | ||
@@ -62,10 +69,10 @@ | ||
const formatTransformationKind = (kind: ParseResult.Transform["kind"]): string => { | ||
const formatTransformationKind = (kind: ParseResult.Transformation["kind"]): string => { | ||
switch (kind) { | ||
case "From": | ||
return "From side transformation failure" | ||
case "Encoded": | ||
return "Encoded side transformation failure" | ||
case "Transformation": | ||
return "Transformation process failure" | ||
case "To": | ||
return "To side transformation failure" | ||
case "Type": | ||
return "Type side transformation failure" | ||
} | ||
@@ -83,16 +90,75 @@ } | ||
const getInnerMessage = ( | ||
issue: ParseResult.ParseIssue | ||
): Effect.Effect<string, Cause.NoSuchElementException> => { | ||
switch (issue._tag) { | ||
case "Refinement": { | ||
if (issue.kind === "From") { | ||
return getMessage(issue.error) | ||
} | ||
break | ||
} | ||
case "Transformation": { | ||
return getMessage(issue.error) | ||
} | ||
} | ||
return Option.none() | ||
} | ||
const getCurrentMessage: ( | ||
issue: ParseResult.ParseIssue | ||
) => Effect.Effect<{ message: string; override: boolean }, Cause.NoSuchElementException> = ( | ||
issue: ParseResult.ParseIssue | ||
) => | ||
AST.getMessageAnnotation(issue.ast).pipe(Effect.flatMap((annotation) => { | ||
const out = annotation(issue) | ||
return Predicate.isString(out) | ||
? Effect.succeed({ message: out, override: false }) | ||
: Effect.isEffect(out) | ||
? Effect.map(out, (message) => ({ message, override: false })) | ||
: Predicate.isString(out.message) | ||
? Effect.succeed({ message: out.message, override: out.override }) | ||
: Effect.map(out.message, (message) => ({ message, override: out.override })) | ||
})) | ||
/** @internal */ | ||
export const getMessage = (ast: AST.AST, actual: unknown): Option.Option<string> => { | ||
return AST.getMessageAnnotation(ast).pipe( | ||
Option.map((annotation) => annotation(actual)) | ||
export const getMessage: ( | ||
issue: ParseResult.ParseIssue | ||
) => Effect.Effect<string, Cause.NoSuchElementException> = (issue: ParseResult.ParseIssue) => { | ||
const current = getCurrentMessage(issue) | ||
return getInnerMessage(issue).pipe( | ||
Effect.flatMap((inner) => Effect.map(current, (current) => current.override ? current.message : inner)), | ||
Effect.catchAll(() => | ||
Effect.flatMap(current, (current) => { | ||
if ( | ||
!current.override && ( | ||
(issue._tag === "Refinement" && issue.kind !== "Predicate") || | ||
(issue._tag === "Transformation" && issue.kind !== "Transformation") | ||
) | ||
) { | ||
return Option.none() | ||
} | ||
return Effect.succeed(current.message) | ||
}) | ||
) | ||
) | ||
} | ||
const getParseIssueTitleAnnotation = (issue: ParseResult.ParseIssue): Option.Option<string> => | ||
Option.filterMap( | ||
AST.getParseIssueTitleAnnotation(issue.ast), | ||
(annotation) => Option.fromNullable(annotation(issue)) | ||
) | ||
/** @internal */ | ||
export const formatTypeMessage = (e: ParseResult.Type): string => | ||
getMessage(e.ast, e.actual).pipe( | ||
Option.orElse(() => e.message), | ||
Option.getOrElse(() => `Expected ${AST.format(e.ast, true)}, actual ${AST.formatUnknown(e.actual)}`) | ||
export const formatTypeMessage = (e: ParseResult.Type): Effect.Effect<string> => | ||
getMessage(e).pipe( | ||
Effect.orElse(() => getParseIssueTitleAnnotation(e)), | ||
Effect.orElse(() => e.message), | ||
Effect.catchAll(() => Effect.succeed(`Expected ${e.ast.toString(true)}, actual ${util_.formatUnknown(e.actual)}`)) | ||
) | ||
const getParseIssueTitle = (issue: ParseResult.ParseIssue): string => | ||
Option.getOrElse(getParseIssueTitleAnnotation(issue), () => String(issue.ast)) | ||
/** @internal */ | ||
@@ -102,100 +168,69 @@ export const formatForbiddenMessage = (e: ParseResult.Forbidden): string => | ||
const getParseIsssueMessage = ( | ||
issue: ParseResult.ParseIssue, | ||
orElse: () => Option.Option<string> | ||
): Option.Option<string> => { | ||
switch (issue._tag) { | ||
case "Refinement": | ||
return Option.orElse(getRefinementMessage(issue, issue.actual), orElse) | ||
case "Transform": | ||
return Option.orElse(getTransformMessage(issue, issue.actual), orElse) | ||
case "Tuple": | ||
case "TypeLiteral": | ||
case "Union": | ||
case "Type": | ||
return Option.orElse(getMessage(issue.ast, issue.actual), orElse) | ||
} | ||
return orElse() | ||
} | ||
const getTree = (issue: ParseResult.ParseIssue, onFailure: () => Effect.Effect<Tree<string>>) => | ||
Effect.matchEffect(getMessage(issue), { | ||
onFailure, | ||
onSuccess: (message) => Effect.succeed(make(message)) | ||
}) | ||
/** @internal */ | ||
export const getRefinementMessage = (e: ParseResult.Refinement, actual: unknown): Option.Option<string> => { | ||
if (e.kind === "From") { | ||
return getParseIsssueMessage(e.error, () => getMessage(e.ast, actual)) | ||
} | ||
return getMessage(e.ast, actual) | ||
} | ||
/** @internal */ | ||
export const getTransformMessage = (e: ParseResult.Transform, actual: unknown): Option.Option<string> => { | ||
return getParseIsssueMessage(e.error, () => getMessage(e.ast, actual)) | ||
} | ||
const go = (e: ParseResult.ParseIssue | ParseResult.Missing | ParseResult.Unexpected): Tree<string> => { | ||
const go = (e: ParseResult.ParseIssue | ParseResult.Missing | ParseResult.Unexpected): Effect.Effect<Tree<string>> => { | ||
switch (e._tag) { | ||
case "Type": | ||
return make(formatTypeMessage(e)) | ||
return Effect.map(formatTypeMessage(e), make) | ||
case "Forbidden": | ||
return make(AST.format(e.ast), [make(formatForbiddenMessage(e))]) | ||
return Effect.succeed(make(getParseIssueTitle(e), [make(formatForbiddenMessage(e))])) | ||
case "Unexpected": | ||
return make(`is unexpected, expected ${AST.format(e.ast, true)}`) | ||
return Effect.succeed(make(`is unexpected, expected ${e.ast.toString(true)}`)) | ||
case "Missing": | ||
return make("is missing") | ||
return Effect.succeed(make("is missing")) | ||
case "Union": | ||
return Option.match(getMessage(e.ast, e.actual), { | ||
onNone: () => | ||
make( | ||
AST.format(e.ast), | ||
e.errors.map((e) => { | ||
switch (e._tag) { | ||
case "Member": | ||
return make(`Union member`, [go(e.error)]) | ||
default: | ||
return go(e) | ||
} | ||
}) | ||
return getTree(e, () => | ||
Effect.map( | ||
Effect.forEach(e.errors, (e) => { | ||
switch (e._tag) { | ||
case "Member": | ||
return Effect.map(go(e.error), (tree) => make(`Union member`, [tree])) | ||
default: | ||
return go(e) | ||
} | ||
}), | ||
(forest) => make(getParseIssueTitle(e), forest) | ||
)) | ||
case "TupleType": | ||
return getTree(e, () => | ||
Effect.map( | ||
Effect.forEach( | ||
e.errors, | ||
(index) => Effect.map(go(index.error), (tree) => make(`[${util_.formatPropertyKey(index.index)}]`, [tree])) | ||
), | ||
onSome: make | ||
}) | ||
case "Tuple": | ||
return Option.match(getMessage(e.ast, e.actual), { | ||
onNone: () => | ||
make( | ||
AST.format(e.ast), | ||
e.errors.map((index) => make(`[${index.index}]`, [go(index.error)])) | ||
), | ||
onSome: make | ||
}) | ||
(forest) => make(getParseIssueTitle(e), forest) | ||
)) | ||
case "TypeLiteral": | ||
return Option.match(getMessage(e.ast, e.actual), { | ||
onNone: () => | ||
make( | ||
AST.format(e.ast), | ||
e.errors.map((key) => make(`[${AST.formatUnknown(key.key)}]`, [go(key.error)])) | ||
), | ||
onSome: make | ||
}) | ||
case "Transform": | ||
return Option.match(getTransformMessage(e, e.actual), { | ||
onNone: () => make(AST.format(e.ast), [make(formatTransformationKind(e.kind), [go(e.error)])]), | ||
onSome: make | ||
}) | ||
return getTree(e, () => | ||
Effect.map( | ||
Effect.forEach(e.errors, (key) => | ||
Effect.map(go(key.error), (tree) => make(`[${util_.formatPropertyKey(key.key)}]`, [tree]))), | ||
(forest) => | ||
make(getParseIssueTitle(e), forest) | ||
)) | ||
case "Transformation": | ||
return getTree(e, () => | ||
Effect.map( | ||
go(e.error), | ||
(tree) => make(getParseIssueTitle(e), [make(formatTransformationKind(e.kind), [tree])]) | ||
)) | ||
case "Refinement": | ||
return Option.match(getRefinementMessage(e, e.actual), { | ||
onNone: () => | ||
make(AST.format(e.ast), [ | ||
make(formatRefinementKind(e.kind), [go(e.error)]) | ||
]), | ||
onSome: make | ||
}) | ||
return getTree( | ||
e, | ||
() => | ||
Effect.map(go(e.error), (tree) => make(getParseIssueTitle(e), [make(formatRefinementKind(e.kind), [tree])])) | ||
) | ||
case "Declaration": | ||
return Option.match(getMessage(e.ast, e.actual), { | ||
onNone: () => { | ||
const error = e.error | ||
const shouldSkipDefaultMessage = error._tag === "Type" && error.ast === e.ast | ||
return shouldSkipDefaultMessage ? go(error) : make(AST.format(e.ast), [go(e.error)]) | ||
}, | ||
onSome: make | ||
return getTree(e, () => { | ||
const error = e.error | ||
const shouldSkipDefaultMessage = error._tag === "Type" && error.ast === e.ast | ||
return shouldSkipDefaultMessage | ||
? go(error) | ||
: Effect.map(go(error), (tree) => make(getParseIssueTitle(e), [tree])) | ||
}) | ||
} | ||
} |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
2312158
39620
8036
127
+ Addedfast-check@^3.17.2