@ronin/syntax
Advanced tools
Comparing version 0.1.9 to 0.1.10-leo-ron-1097-experimental-46
@@ -1,102 +0,2 @@ | ||
import { AsyncLocalStorage } from 'node:async_hooks'; | ||
import { Q as Query } from './index.d-CMkIzj0q.js'; | ||
declare const setProperty: <T extends object, K>(obj: T, path: string, value: K) => T; | ||
/** | ||
* Gets the property value of an object based on the given path segments | ||
* of the property. | ||
* | ||
* @param obj - The object to get the property value from. | ||
* @param pathSegments - An array of property keys leading up to the final | ||
* property at the end. | ||
* | ||
* @returns The property value at the specified path or `undefined` if the path | ||
* does not exist. | ||
* | ||
* @example | ||
* const exampleObject = \{ | ||
* user: \{ | ||
* name: \{ | ||
* first: 'John', | ||
* last: 'Doe' | ||
* \}, | ||
* age: 30 | ||
* \} | ||
* \}; | ||
* console.log(getProperty(exampleObject, ['user', 'name', 'first'])); // Output: 'John' | ||
* console.log(getProperty(exampleObject, ['user', 'age'])); // Output: 30 | ||
* console.log(getProperty(exampleObject, ['user', 'non', 'existing'])); // Output: undefined | ||
*/ | ||
declare const getProperty: (obj: object, path: string) => unknown; | ||
/** | ||
* Utility type to convert a tuple of promises into a tuple of their resolved types. | ||
*/ | ||
type PromiseTuple<T extends [Promise<any>, ...Array<Promise<any>>] | Array<Promise<any>>> = { | ||
[P in keyof T]: Awaited<T[P]>; | ||
}; | ||
/** | ||
* Utility type that represents a particular query and any options that should | ||
* be used when executing it. | ||
*/ | ||
interface QueryItem { | ||
query: Query; | ||
options?: Record<string, unknown>; | ||
} | ||
/** | ||
* A list of options that may be passed for every individual query. The type is meant to | ||
* represent a generic list of options without specific properties. The specific | ||
* properties are defined by the client that re-exports the syntax package. | ||
*/ | ||
type QueryOptions = Record<string, unknown> & { | ||
asyncContext?: AsyncLocalStorage<any>; | ||
}; | ||
/** | ||
* A utility function that creates a Proxy object to handle dynamic property | ||
* access and function calls. It is used to create a syntax that allows for | ||
* dynamic query generation. | ||
* | ||
* @param queryType - The type of the query. This will be used as the key in | ||
* the generated query object. | ||
* @param queryHandler - A function that handles the execution of the query. | ||
* | ||
* @returns A Proxy object that intercepts property access and function | ||
* calls to generate and execute queries. | ||
* | ||
* ### Usage | ||
* ```typescript | ||
* const proxy = getSyntaxProxy('get', async (query) => { | ||
* // Execute the query and return the result | ||
* }); | ||
* | ||
* const result = await get.account(); | ||
* | ||
* const result = await get.account.with.email('mike@gmail.com'); | ||
* ``` | ||
*/ | ||
declare const getSyntaxProxy: (queryType: string, queryHandler: (query: Query, options?: Record<string, unknown>) => Promise<any> | any) => any; | ||
/** | ||
* Executes a batch of operations and handles their results. It is used to | ||
* execute multiple queries at once and return their results at once. | ||
* | ||
* @param operations - A function that returns an array of Promises. Each | ||
* Promise should resolve with a Query object. | ||
* @param queriesHandler - A function that handles the execution of the queries. | ||
* This function is expected to receive an array of Query objects and return a | ||
* Promise that resolves with the results of the queries. | ||
* | ||
* @returns A Promise that resolves with a tuple of the results of the queries. | ||
* | ||
* ### Usage | ||
* ```typescript | ||
* const results = await batch(() => [ | ||
* get.accounts(), | ||
* get.account.with.email('mike@gmail.com') | ||
* ], async (queries) => { | ||
* // Execute the queries and return their results | ||
* }); | ||
* ``` | ||
*/ | ||
declare const getBatchProxy: <T extends [Promise<any> | any, ...Array<Promise<any> | any>] | (Promise<any> | any)[]>(operations: () => T, options: QueryOptions | undefined, queriesHandler: (queries: Array<QueryItem>, options?: QueryOptions) => Promise<any> | any) => Promise<PromiseTuple<T>> | T; | ||
export { type PromiseTuple, type QueryItem, getBatchProxy, getProperty, getSyntaxProxy, setProperty }; | ||
import 'node:async_hooks'; | ||
export { c as PromiseTuple, S as SyntaxItem, d as getBatchProxy, e as getProperty, g as getSyntaxProxy, s as setProperty } from './queries-CWtJe_zj.js'; |
@@ -1,285 +0,7 @@ | ||
import "./chunk-QGM4M3NI.js"; | ||
// src/utils/index.ts | ||
var getPathSegments = (path) => { | ||
const segments = path.replace(/\\\./g, "\u200B").split(/[[.]/g).map((s) => s.replace(/\u200B/g, ".")).filter((x) => !!x.trim()).map((x) => x.replaceAll("\\.", ".")); | ||
return segments; | ||
}; | ||
var setPropertyViaPathSegments = (obj, pathSegments, value) => { | ||
let current = obj; | ||
for (let i = 0; i < pathSegments.length; i++) { | ||
const key = pathSegments[i]; | ||
const isLastKey = i === pathSegments.length - 1; | ||
if (isLastKey) { | ||
current[key] = typeof value === "function" ? value(current[key]) : value; | ||
} else { | ||
if (!Object.prototype.hasOwnProperty.call(current, key) || typeof current[key] !== "object") { | ||
current[key] = {}; | ||
} | ||
current = current[key]; | ||
} | ||
} | ||
}; | ||
var setProperty = (obj, path, value) => { | ||
const segments = getPathSegments(path); | ||
setPropertyViaPathSegments(obj, segments, value); | ||
return obj; | ||
}; | ||
var getProperty = (obj, path) => { | ||
const pathSegments = getPathSegments(path); | ||
let current = obj; | ||
for (const key of pathSegments) { | ||
if (current[key] === null || current[key] === void 0) return void 0; | ||
current = current[key]; | ||
} | ||
return current; | ||
}; | ||
// node_modules/@ronin/compiler/dist/index.js | ||
var QUERY_SYMBOLS = { | ||
// Represents a sub query. | ||
QUERY: "__RONIN_QUERY", | ||
// Represents an expression that should be evaluated. | ||
EXPRESSION: "__RONIN_EXPRESSION", | ||
// Represents the value of a field in the model. | ||
FIELD: "__RONIN_FIELD_", | ||
// Represents the value of a field in the model of a parent query. | ||
FIELD_PARENT: "__RONIN_FIELD_PARENT_", | ||
// Represents the old value of a field in the parent model. Used for triggers. | ||
FIELD_PARENT_OLD: "__RONIN_FIELD_PARENT_OLD_", | ||
// Represents the new value of a field in the parent model. Used for triggers. | ||
FIELD_PARENT_NEW: "__RONIN_FIELD_PARENT_NEW_", | ||
// Represents a value provided to a query preset. | ||
VALUE: "__RONIN_VALUE" | ||
}; | ||
var RONIN_MODEL_FIELD_REGEX = new RegExp( | ||
`${QUERY_SYMBOLS.FIELD}[_a-zA-Z0-9.]+`, | ||
"g" | ||
); | ||
var CURRENT_TIME_EXPRESSION = { | ||
[QUERY_SYMBOLS.EXPRESSION]: `strftime('%Y-%m-%dT%H:%M:%f', 'now') || 'Z'` | ||
}; | ||
var omit = (obj, properties) => Object.fromEntries( | ||
Object.entries(obj).filter(([key]) => !properties.includes(key)) | ||
); | ||
var conjunctions = [ | ||
"for", | ||
"and", | ||
"nor", | ||
"but", | ||
"or", | ||
"yet", | ||
"so" | ||
]; | ||
var articles = [ | ||
"a", | ||
"an", | ||
"the" | ||
]; | ||
var prepositions = [ | ||
"aboard", | ||
"about", | ||
"above", | ||
"across", | ||
"after", | ||
"against", | ||
"along", | ||
"amid", | ||
"among", | ||
"anti", | ||
"around", | ||
"as", | ||
"at", | ||
"before", | ||
"behind", | ||
"below", | ||
"beneath", | ||
"beside", | ||
"besides", | ||
"between", | ||
"beyond", | ||
"but", | ||
"by", | ||
"concerning", | ||
"considering", | ||
"despite", | ||
"down", | ||
"during", | ||
"except", | ||
"excepting", | ||
"excluding", | ||
"following", | ||
"for", | ||
"from", | ||
"in", | ||
"inside", | ||
"into", | ||
"like", | ||
"minus", | ||
"near", | ||
"of", | ||
"off", | ||
"on", | ||
"onto", | ||
"opposite", | ||
"over", | ||
"past", | ||
"per", | ||
"plus", | ||
"regarding", | ||
"round", | ||
"save", | ||
"since", | ||
"than", | ||
"through", | ||
"to", | ||
"toward", | ||
"towards", | ||
"under", | ||
"underneath", | ||
"unlike", | ||
"until", | ||
"up", | ||
"upon", | ||
"versus", | ||
"via", | ||
"with", | ||
"within", | ||
"without" | ||
]; | ||
var lowerCase = /* @__PURE__ */ new Set([ | ||
...conjunctions, | ||
...articles, | ||
...prepositions | ||
]); | ||
var word = `[^\\s'\u2019\\(\\)!?;:"-]`; | ||
var regex = new RegExp(`(?:(?:(\\s?(?:^|[.\\(\\)!?;:"-])\\s*)(${word}))|(${word}))(${word}*[\u2019']*${word}*)`, "g"); | ||
var ROOT_MODEL = { | ||
slug: "model", | ||
identifiers: { | ||
name: "name", | ||
slug: "slug" | ||
}, | ||
// This name mimics the `sqlite_schema` table in SQLite. | ||
table: "ronin_schema", | ||
// Indicates that the model was automatically generated by RONIN. | ||
system: { model: "root" }, | ||
fields: [ | ||
{ slug: "name", type: "string" }, | ||
{ slug: "pluralName", type: "string" }, | ||
{ slug: "slug", type: "string" }, | ||
{ slug: "pluralSlug", type: "string" }, | ||
{ slug: "idPrefix", type: "string" }, | ||
{ slug: "table", type: "string" }, | ||
{ slug: "identifiers.name", type: "string" }, | ||
{ slug: "identifiers.slug", type: "string" }, | ||
// Providing an empty object as a default value allows us to use `json_insert` | ||
// without needing to fall back to an empty object in the insertion statement, | ||
// which makes the statement shorter. | ||
{ slug: "fields", type: "json", defaultValue: "{}" }, | ||
{ slug: "indexes", type: "json", defaultValue: "{}" }, | ||
{ slug: "triggers", type: "json", defaultValue: "{}" }, | ||
{ slug: "presets", type: "json", defaultValue: "{}" } | ||
] | ||
}; | ||
var PLURAL_MODEL_ENTITIES = { | ||
field: "fields", | ||
index: "indexes", | ||
trigger: "triggers", | ||
preset: "presets" | ||
}; | ||
var PLURAL_MODEL_ENTITIES_VALUES = Object.values(PLURAL_MODEL_ENTITIES); | ||
var CLEAN_ROOT_MODEL = omit(ROOT_MODEL, ["system"]); | ||
// src/queries.ts | ||
var RONIN_EXPRESSION_SEPARATOR = "//.//"; | ||
var IN_BATCH_ASYNC; | ||
var IN_BATCH_SYNC = false; | ||
var getSyntaxProxy = (queryType, queryHandler) => { | ||
function createProxy(path, targetProps) { | ||
const proxyTargetFunction = () => void 0; | ||
proxyTargetFunction(); | ||
if (targetProps) Object.assign(proxyTargetFunction, targetProps); | ||
delete proxyTargetFunction.name; | ||
return new Proxy(proxyTargetFunction, { | ||
apply(target, _thisArg, args) { | ||
let value = args[0]; | ||
const options = args[1]; | ||
if (typeof value === "function") { | ||
IN_BATCH_SYNC = true; | ||
const fieldProxy = new Proxy( | ||
{}, | ||
{ | ||
get(_target, property) { | ||
const name = property.toString(); | ||
const split = RONIN_EXPRESSION_SEPARATOR; | ||
return `${split}${QUERY_SYMBOLS.FIELD}${name}${split}`; | ||
} | ||
} | ||
); | ||
const instructions = value(fieldProxy); | ||
if (instructions.query) { | ||
value = { [QUERY_SYMBOLS.QUERY]: instructions.query }; | ||
} else { | ||
value = instructions; | ||
} | ||
if (isExpression(value)) { | ||
value = wrapExpression(value); | ||
} else if (typeof value === "object") { | ||
value = wrapExpressions(value); | ||
} | ||
IN_BATCH_SYNC = false; | ||
} | ||
const query = target.query || {}; | ||
const targetValue = typeof value === "undefined" ? {} : value; | ||
setProperty(query, `${queryType}.${path.join(".")}`, targetValue); | ||
if (IN_BATCH_ASYNC?.getStore() || IN_BATCH_SYNC) { | ||
const newPath = path.slice(0, -1); | ||
const details = { query }; | ||
if (options) details.options = options; | ||
return createProxy(newPath, details); | ||
} | ||
return queryHandler(query, options); | ||
}, | ||
get(target, nextProp, receiver) { | ||
if (Object.hasOwn(target, nextProp)) { | ||
return Reflect.get(target, nextProp, receiver); | ||
} | ||
return createProxy(path.concat([nextProp]), target); | ||
} | ||
}); | ||
} | ||
return createProxy([]); | ||
}; | ||
var getBatchProxy = (operations, options = {}, queriesHandler) => { | ||
let queries = []; | ||
if (options.asyncContext) { | ||
IN_BATCH_ASYNC = options.asyncContext; | ||
queries = IN_BATCH_ASYNC.run(true, () => operations()); | ||
} else { | ||
IN_BATCH_SYNC = true; | ||
queries = operations(); | ||
IN_BATCH_SYNC = false; | ||
} | ||
const cleanQueries = queries.map((details) => ({ ...details })); | ||
return queriesHandler(cleanQueries); | ||
}; | ||
var isExpression = (value) => { | ||
return typeof value === "string" && value.includes(RONIN_EXPRESSION_SEPARATOR); | ||
}; | ||
var wrapExpression = (value) => { | ||
const components = value.split(RONIN_EXPRESSION_SEPARATOR).filter((part) => part.length > 0).map((part) => { | ||
return part.startsWith(QUERY_SYMBOLS.FIELD) ? part : `'${part}'`; | ||
}).join(" || "); | ||
return { [QUERY_SYMBOLS.EXPRESSION]: components }; | ||
}; | ||
var wrapExpressions = (obj) => Object.fromEntries( | ||
Object.entries(obj).map(([key, value]) => { | ||
if (isExpression(value)) return [key, wrapExpression(value)]; | ||
return [ | ||
key, | ||
value && typeof value === "object" ? wrapExpressions(value) : value | ||
]; | ||
}) | ||
); | ||
import { | ||
getBatchProxy, | ||
getProperty, | ||
getSyntaxProxy, | ||
setProperty | ||
} from "./chunk-HYDBZZQA.js"; | ||
export { | ||
@@ -286,0 +8,0 @@ getBatchProxy, |
@@ -1,2 +0,3 @@ | ||
import { P as PublicModel, G as GetInstructions, W as WithInstruction, M as ModelIndex, a as ModelField, b as ModelTrigger } from './index.d-CMkIzj0q.js'; | ||
import { P as PublicModel, G as GetInstructions, W as WithInstruction, M as ModelIndex, a as ModelField, b as ModelTrigger, S as SyntaxItem } from './queries-CWtJe_zj.js'; | ||
import 'node:async_hooks'; | ||
@@ -13,4 +14,4 @@ interface RoninFields { | ||
} | ||
type Primitives = ReturnType<typeof link> | ReturnType<typeof string> | ReturnType<typeof boolean> | ReturnType<typeof number> | ReturnType<typeof json> | ReturnType<typeof date> | ReturnType<typeof blob> | NestedFields; | ||
interface NestedFields { | ||
type Primitives = ReturnType<typeof link> | ReturnType<typeof string> | ReturnType<typeof boolean> | ReturnType<typeof number> | ReturnType<typeof json> | ReturnType<typeof date> | ReturnType<typeof blob> | NestedFieldsPrimitives; | ||
interface NestedFieldsPrimitives { | ||
[key: string]: Primitives; | ||
@@ -74,3 +75,23 @@ } | ||
/** A utility type that maps an attribute's type to a function signature. */ | ||
type AttributeSignature<T> = T extends boolean ? () => any : T extends boolean ? never : (value: string) => any; | ||
/** | ||
* Represents a chain of field attributes in the form of a function chain. | ||
* | ||
* - `Attrs`: The interface describing your attributes (e.g., { required: boolean; }). | ||
* - `Used`: A union of the keys already used in the chain. | ||
* | ||
* For each attribute key `K` not in `Used`, create a method using the signature derived | ||
* from that attribute's type. Calling it returns a new `Chain` marking `K` as used. | ||
*/ | ||
type Chain<Attrs, Used extends keyof Attrs = never> = { | ||
[K in Exclude<keyof Attrs, Used | 'type'>]: (...args: Parameters<AttributeSignature<Attrs[K]>>) => Chain<Attrs, Used | K>; | ||
} & ('type' extends keyof Attrs ? { | ||
readonly type: Attrs['type']; | ||
} : {}); | ||
type FieldOutput<Type extends ModelField['type']> = Omit<Extract<ModelField, { | ||
type: Type; | ||
}>, 'slug'>; | ||
type SyntaxField<Type extends ModelField['type']> = SyntaxItem<FieldOutput<Type>>; | ||
/** | ||
* Creates a string field definition returning an object that includes the field type | ||
@@ -83,3 +104,3 @@ * ("string") and attributes. | ||
*/ | ||
declare const string: (attributes?: Partial<Omit<{ | ||
declare const string: (initialAttributes?: Partial<Omit<{ | ||
name?: string; | ||
@@ -103,22 +124,3 @@ slug: string; | ||
collation?: "BINARY" | "NOCASE" | "RTRIM"; | ||
}, "slug" | "type">>) => Omit<{ | ||
name?: string; | ||
slug: string; | ||
displayAs?: string; | ||
unique?: boolean; | ||
required?: boolean; | ||
defaultValue?: unknown; | ||
computedAs?: { | ||
kind: "VIRTUAL" | "STORED"; | ||
value: { | ||
__RONIN_EXPRESSION: string; | ||
}; | ||
}; | ||
check?: { | ||
__RONIN_EXPRESSION: string; | ||
}; | ||
} & { | ||
type: "string"; | ||
collation?: "BINARY" | "NOCASE" | "RTRIM"; | ||
}, "slug">; | ||
}, "slug" | "type">>) => Chain<FieldOutput<"string">, never>; | ||
/** | ||
@@ -132,3 +134,3 @@ * Creates a number field definition returning an object that includes the field type | ||
*/ | ||
declare const number: (attributes?: Partial<Omit<{ | ||
declare const number: (initialAttributes?: Partial<Omit<{ | ||
name?: string; | ||
@@ -152,22 +154,3 @@ slug: string; | ||
increment?: boolean; | ||
}, "slug" | "type">>) => Omit<{ | ||
name?: string; | ||
slug: string; | ||
displayAs?: string; | ||
unique?: boolean; | ||
required?: boolean; | ||
defaultValue?: unknown; | ||
computedAs?: { | ||
kind: "VIRTUAL" | "STORED"; | ||
value: { | ||
__RONIN_EXPRESSION: string; | ||
}; | ||
}; | ||
check?: { | ||
__RONIN_EXPRESSION: string; | ||
}; | ||
} & { | ||
type: "number"; | ||
increment?: boolean; | ||
}, "slug">; | ||
}, "slug" | "type">>) => Chain<FieldOutput<"number">, never>; | ||
/** | ||
@@ -181,3 +164,3 @@ * Creates a link field definition returning an object that includes the field type | ||
*/ | ||
declare const link: (attributes?: Partial<Omit<{ | ||
declare const link: (initialAttributes?: Partial<Omit<{ | ||
name?: string; | ||
@@ -206,27 +189,3 @@ slug: string; | ||
}; | ||
}, "slug" | "type">>) => Omit<{ | ||
name?: string; | ||
slug: string; | ||
displayAs?: string; | ||
unique?: boolean; | ||
required?: boolean; | ||
defaultValue?: unknown; | ||
computedAs?: { | ||
kind: "VIRTUAL" | "STORED"; | ||
value: { | ||
__RONIN_EXPRESSION: string; | ||
}; | ||
}; | ||
check?: { | ||
__RONIN_EXPRESSION: string; | ||
}; | ||
} & { | ||
type: "link"; | ||
target: string; | ||
kind?: "one" | "many"; | ||
actions?: { | ||
onDelete?: "CASCADE" | "RESTRICT" | "SET NULL" | "SET DEFAULT" | "NO ACTION"; | ||
onUpdate?: "CASCADE" | "RESTRICT" | "SET NULL" | "SET DEFAULT" | "NO ACTION"; | ||
}; | ||
}, "slug">; | ||
}, "slug" | "type">>) => Chain<FieldOutput<"link">, never>; | ||
/** | ||
@@ -240,3 +199,3 @@ * Creates a JSON field definition returning an object that includes the field type | ||
*/ | ||
declare const json: (attributes?: Partial<Omit<{ | ||
declare const json: (initialAttributes?: Partial<Omit<{ | ||
name?: string; | ||
@@ -259,21 +218,3 @@ slug: string; | ||
type: "json"; | ||
}, "slug" | "type">>) => Omit<{ | ||
name?: string; | ||
slug: string; | ||
displayAs?: string; | ||
unique?: boolean; | ||
required?: boolean; | ||
defaultValue?: unknown; | ||
computedAs?: { | ||
kind: "VIRTUAL" | "STORED"; | ||
value: { | ||
__RONIN_EXPRESSION: string; | ||
}; | ||
}; | ||
check?: { | ||
__RONIN_EXPRESSION: string; | ||
}; | ||
} & { | ||
type: "json"; | ||
}, "slug">; | ||
}, "slug" | "type">>) => Chain<FieldOutput<"json">, never>; | ||
/** | ||
@@ -287,3 +228,3 @@ * Creates a date field definition returning an object that includes the field type | ||
*/ | ||
declare const date: (attributes?: Partial<Omit<{ | ||
declare const date: (initialAttributes?: Partial<Omit<{ | ||
name?: string; | ||
@@ -306,21 +247,3 @@ slug: string; | ||
type: "date"; | ||
}, "slug" | "type">>) => Omit<{ | ||
name?: string; | ||
slug: string; | ||
displayAs?: string; | ||
unique?: boolean; | ||
required?: boolean; | ||
defaultValue?: unknown; | ||
computedAs?: { | ||
kind: "VIRTUAL" | "STORED"; | ||
value: { | ||
__RONIN_EXPRESSION: string; | ||
}; | ||
}; | ||
check?: { | ||
__RONIN_EXPRESSION: string; | ||
}; | ||
} & { | ||
type: "date"; | ||
}, "slug">; | ||
}, "slug" | "type">>) => Chain<FieldOutput<"date">, never>; | ||
/** | ||
@@ -334,3 +257,3 @@ * Creates a boolean field definition returning an object that includes the field type | ||
*/ | ||
declare const boolean: (attributes?: Partial<Omit<{ | ||
declare const boolean: (initialAttributes?: Partial<Omit<{ | ||
name?: string; | ||
@@ -353,21 +276,3 @@ slug: string; | ||
type: "boolean"; | ||
}, "slug" | "type">>) => Omit<{ | ||
name?: string; | ||
slug: string; | ||
displayAs?: string; | ||
unique?: boolean; | ||
required?: boolean; | ||
defaultValue?: unknown; | ||
computedAs?: { | ||
kind: "VIRTUAL" | "STORED"; | ||
value: { | ||
__RONIN_EXPRESSION: string; | ||
}; | ||
}; | ||
check?: { | ||
__RONIN_EXPRESSION: string; | ||
}; | ||
} & { | ||
type: "boolean"; | ||
}, "slug">; | ||
}, "slug" | "type">>) => Chain<FieldOutput<"boolean">, never>; | ||
/** | ||
@@ -381,3 +286,3 @@ * Creates a blob field definition returning an object that includes the field type | ||
*/ | ||
declare const blob: (attributes?: Partial<Omit<{ | ||
declare const blob: (initialAttributes?: Partial<Omit<{ | ||
name?: string; | ||
@@ -400,22 +305,4 @@ slug: string; | ||
type: "blob"; | ||
}, "slug" | "type">>) => Omit<{ | ||
name?: string; | ||
slug: string; | ||
displayAs?: string; | ||
unique?: boolean; | ||
required?: boolean; | ||
defaultValue?: unknown; | ||
computedAs?: { | ||
kind: "VIRTUAL" | "STORED"; | ||
value: { | ||
__RONIN_EXPRESSION: string; | ||
}; | ||
}; | ||
check?: { | ||
__RONIN_EXPRESSION: string; | ||
}; | ||
} & { | ||
type: "blob"; | ||
}, "slug">; | ||
}, "slug" | "type">>) => Chain<FieldOutput<"blob">, never>; | ||
export { blob, boolean, date, json, link, model, number, string }; | ||
export { type SyntaxField, blob, boolean, date, json, link, model, number, string }; |
import { | ||
__commonJS, | ||
__require, | ||
__toESM | ||
} from "./chunk-QGM4M3NI.js"; | ||
getBatchProxy, | ||
getSyntaxProxy | ||
} from "./chunk-HYDBZZQA.js"; | ||
// node_modules/@noble/hashes/_assert.js | ||
var require_assert = __commonJS({ | ||
"node_modules/@noble/hashes/_assert.js"(exports) { | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.anumber = anumber; | ||
exports.abytes = abytes; | ||
exports.ahash = ahash; | ||
exports.aexists = aexists; | ||
exports.aoutput = aoutput; | ||
function anumber(n) { | ||
if (!Number.isSafeInteger(n) || n < 0) | ||
throw new Error("positive integer expected, got " + n); | ||
} | ||
function isBytes(a) { | ||
return a instanceof Uint8Array || ArrayBuffer.isView(a) && a.constructor.name === "Uint8Array"; | ||
} | ||
function abytes(b, ...lengths) { | ||
if (!isBytes(b)) | ||
throw new Error("Uint8Array expected"); | ||
if (lengths.length > 0 && !lengths.includes(b.length)) | ||
throw new Error("Uint8Array expected of length " + lengths + ", got length=" + b.length); | ||
} | ||
function ahash(h) { | ||
if (typeof h !== "function" || typeof h.create !== "function") | ||
throw new Error("Hash should be wrapped by utils.wrapConstructor"); | ||
anumber(h.outputLen); | ||
anumber(h.blockLen); | ||
} | ||
function aexists(instance, checkFinished = true) { | ||
if (instance.destroyed) | ||
throw new Error("Hash instance has been destroyed"); | ||
if (checkFinished && instance.finished) | ||
throw new Error("Hash#digest() has already been called"); | ||
} | ||
function aoutput(out, instance) { | ||
abytes(out); | ||
const min = instance.outputLen; | ||
if (out.length < min) { | ||
throw new Error("digestInto() expects output buffer of length at least " + min); | ||
} | ||
} | ||
} | ||
}); | ||
// node_modules/@noble/hashes/_u64.js | ||
var require_u64 = __commonJS({ | ||
"node_modules/@noble/hashes/_u64.js"(exports) { | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.add5L = exports.add5H = exports.add4H = exports.add4L = exports.add3H = exports.add3L = exports.rotlBL = exports.rotlBH = exports.rotlSL = exports.rotlSH = exports.rotr32L = exports.rotr32H = exports.rotrBL = exports.rotrBH = exports.rotrSL = exports.rotrSH = exports.shrSL = exports.shrSH = exports.toBig = void 0; | ||
exports.fromBig = fromBig; | ||
exports.split = split; | ||
exports.add = add; | ||
var U32_MASK64 = /* @__PURE__ */ BigInt(2 ** 32 - 1); | ||
var _32n = /* @__PURE__ */ BigInt(32); | ||
function fromBig(n, le = false) { | ||
if (le) | ||
return { h: Number(n & U32_MASK64), l: Number(n >> _32n & U32_MASK64) }; | ||
return { h: Number(n >> _32n & U32_MASK64) | 0, l: Number(n & U32_MASK64) | 0 }; | ||
} | ||
function split(lst, le = false) { | ||
let Ah = new Uint32Array(lst.length); | ||
let Al = new Uint32Array(lst.length); | ||
for (let i = 0; i < lst.length; i++) { | ||
const { h, l } = fromBig(lst[i], le); | ||
[Ah[i], Al[i]] = [h, l]; | ||
} | ||
return [Ah, Al]; | ||
} | ||
var toBig = (h, l) => BigInt(h >>> 0) << _32n | BigInt(l >>> 0); | ||
exports.toBig = toBig; | ||
var shrSH = (h, _l, s) => h >>> s; | ||
exports.shrSH = shrSH; | ||
var shrSL = (h, l, s) => h << 32 - s | l >>> s; | ||
exports.shrSL = shrSL; | ||
var rotrSH = (h, l, s) => h >>> s | l << 32 - s; | ||
exports.rotrSH = rotrSH; | ||
var rotrSL = (h, l, s) => h << 32 - s | l >>> s; | ||
exports.rotrSL = rotrSL; | ||
var rotrBH = (h, l, s) => h << 64 - s | l >>> s - 32; | ||
exports.rotrBH = rotrBH; | ||
var rotrBL = (h, l, s) => h >>> s - 32 | l << 64 - s; | ||
exports.rotrBL = rotrBL; | ||
var rotr32H = (_h, l) => l; | ||
exports.rotr32H = rotr32H; | ||
var rotr32L = (h, _l) => h; | ||
exports.rotr32L = rotr32L; | ||
var rotlSH = (h, l, s) => h << s | l >>> 32 - s; | ||
exports.rotlSH = rotlSH; | ||
var rotlSL = (h, l, s) => l << s | h >>> 32 - s; | ||
exports.rotlSL = rotlSL; | ||
var rotlBH = (h, l, s) => l << s - 32 | h >>> 64 - s; | ||
exports.rotlBH = rotlBH; | ||
var rotlBL = (h, l, s) => h << s - 32 | l >>> 64 - s; | ||
exports.rotlBL = rotlBL; | ||
function add(Ah, Al, Bh, Bl) { | ||
const l = (Al >>> 0) + (Bl >>> 0); | ||
return { h: Ah + Bh + (l / 2 ** 32 | 0) | 0, l: l | 0 }; | ||
} | ||
var add3L = (Al, Bl, Cl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0); | ||
exports.add3L = add3L; | ||
var add3H = (low, Ah, Bh, Ch) => Ah + Bh + Ch + (low / 2 ** 32 | 0) | 0; | ||
exports.add3H = add3H; | ||
var add4L = (Al, Bl, Cl, Dl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0); | ||
exports.add4L = add4L; | ||
var add4H = (low, Ah, Bh, Ch, Dh) => Ah + Bh + Ch + Dh + (low / 2 ** 32 | 0) | 0; | ||
exports.add4H = add4H; | ||
var add5L = (Al, Bl, Cl, Dl, El) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0) + (El >>> 0); | ||
exports.add5L = add5L; | ||
var add5H = (low, Ah, Bh, Ch, Dh, Eh) => Ah + Bh + Ch + Dh + Eh + (low / 2 ** 32 | 0) | 0; | ||
exports.add5H = add5H; | ||
var u64 = { | ||
fromBig, | ||
split, | ||
toBig, | ||
shrSH, | ||
shrSL, | ||
rotrSH, | ||
rotrSL, | ||
rotrBH, | ||
rotrBL, | ||
rotr32H, | ||
rotr32L, | ||
rotlSH, | ||
rotlSL, | ||
rotlBH, | ||
rotlBL, | ||
add, | ||
add3L, | ||
add3H, | ||
add4L, | ||
add4H, | ||
add5H, | ||
add5L | ||
}; | ||
exports.default = u64; | ||
} | ||
}); | ||
// node_modules/@noble/hashes/cryptoNode.js | ||
var require_cryptoNode = __commonJS({ | ||
"node_modules/@noble/hashes/cryptoNode.js"(exports) { | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.crypto = void 0; | ||
var nc = __require("node:crypto"); | ||
exports.crypto = nc && typeof nc === "object" && "webcrypto" in nc ? nc.webcrypto : nc && typeof nc === "object" && "randomBytes" in nc ? nc : void 0; | ||
} | ||
}); | ||
// node_modules/@noble/hashes/utils.js | ||
var require_utils = __commonJS({ | ||
"node_modules/@noble/hashes/utils.js"(exports) { | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.Hash = exports.nextTick = exports.byteSwapIfBE = exports.byteSwap = exports.isLE = exports.rotl = exports.rotr = exports.createView = exports.u32 = exports.u8 = void 0; | ||
exports.isBytes = isBytes; | ||
exports.byteSwap32 = byteSwap32; | ||
exports.bytesToHex = bytesToHex; | ||
exports.hexToBytes = hexToBytes; | ||
exports.asyncLoop = asyncLoop; | ||
exports.utf8ToBytes = utf8ToBytes; | ||
exports.toBytes = toBytes; | ||
exports.concatBytes = concatBytes; | ||
exports.checkOpts = checkOpts; | ||
exports.wrapConstructor = wrapConstructor; | ||
exports.wrapConstructorWithOpts = wrapConstructorWithOpts; | ||
exports.wrapXOFConstructorWithOpts = wrapXOFConstructorWithOpts; | ||
exports.randomBytes = randomBytes; | ||
var crypto_1 = require_cryptoNode(); | ||
var _assert_js_1 = require_assert(); | ||
function isBytes(a) { | ||
return a instanceof Uint8Array || ArrayBuffer.isView(a) && a.constructor.name === "Uint8Array"; | ||
} | ||
var u8 = (arr) => new Uint8Array(arr.buffer, arr.byteOffset, arr.byteLength); | ||
exports.u8 = u8; | ||
var u32 = (arr) => new Uint32Array(arr.buffer, arr.byteOffset, Math.floor(arr.byteLength / 4)); | ||
exports.u32 = u32; | ||
var createView = (arr) => new DataView(arr.buffer, arr.byteOffset, arr.byteLength); | ||
exports.createView = createView; | ||
var rotr = (word, shift) => word << 32 - shift | word >>> shift; | ||
exports.rotr = rotr; | ||
var rotl = (word, shift) => word << shift | word >>> 32 - shift >>> 0; | ||
exports.rotl = rotl; | ||
exports.isLE = (() => new Uint8Array(new Uint32Array([287454020]).buffer)[0] === 68)(); | ||
var byteSwap = (word) => word << 24 & 4278190080 | word << 8 & 16711680 | word >>> 8 & 65280 | word >>> 24 & 255; | ||
exports.byteSwap = byteSwap; | ||
exports.byteSwapIfBE = exports.isLE ? (n) => n : (n) => (0, exports.byteSwap)(n); | ||
function byteSwap32(arr) { | ||
for (let i = 0; i < arr.length; i++) { | ||
arr[i] = (0, exports.byteSwap)(arr[i]); | ||
} | ||
} | ||
var hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, "0")); | ||
function bytesToHex(bytes) { | ||
(0, _assert_js_1.abytes)(bytes); | ||
let hex = ""; | ||
for (let i = 0; i < bytes.length; i++) { | ||
hex += hexes[bytes[i]]; | ||
} | ||
return hex; | ||
} | ||
var asciis = { _0: 48, _9: 57, A: 65, F: 70, a: 97, f: 102 }; | ||
function asciiToBase16(ch) { | ||
if (ch >= asciis._0 && ch <= asciis._9) | ||
return ch - asciis._0; | ||
if (ch >= asciis.A && ch <= asciis.F) | ||
return ch - (asciis.A - 10); | ||
if (ch >= asciis.a && ch <= asciis.f) | ||
return ch - (asciis.a - 10); | ||
return; | ||
} | ||
function hexToBytes(hex) { | ||
if (typeof hex !== "string") | ||
throw new Error("hex string expected, got " + typeof hex); | ||
const hl = hex.length; | ||
const al = hl / 2; | ||
if (hl % 2) | ||
throw new Error("hex string expected, got unpadded hex of length " + hl); | ||
const array = new Uint8Array(al); | ||
for (let ai = 0, hi = 0; ai < al; ai++, hi += 2) { | ||
const n1 = asciiToBase16(hex.charCodeAt(hi)); | ||
const n2 = asciiToBase16(hex.charCodeAt(hi + 1)); | ||
if (n1 === void 0 || n2 === void 0) { | ||
const char = hex[hi] + hex[hi + 1]; | ||
throw new Error('hex string expected, got non-hex character "' + char + '" at index ' + hi); | ||
} | ||
array[ai] = n1 * 16 + n2; | ||
} | ||
return array; | ||
} | ||
var nextTick = async () => { | ||
}; | ||
exports.nextTick = nextTick; | ||
async function asyncLoop(iters, tick, cb) { | ||
let ts = Date.now(); | ||
for (let i = 0; i < iters; i++) { | ||
cb(i); | ||
const diff = Date.now() - ts; | ||
if (diff >= 0 && diff < tick) | ||
continue; | ||
await (0, exports.nextTick)(); | ||
ts += diff; | ||
} | ||
} | ||
function utf8ToBytes(str) { | ||
if (typeof str !== "string") | ||
throw new Error("utf8ToBytes expected string, got " + typeof str); | ||
return new Uint8Array(new TextEncoder().encode(str)); | ||
} | ||
function toBytes(data) { | ||
if (typeof data === "string") | ||
data = utf8ToBytes(data); | ||
(0, _assert_js_1.abytes)(data); | ||
return data; | ||
} | ||
function concatBytes(...arrays) { | ||
let sum = 0; | ||
for (let i = 0; i < arrays.length; i++) { | ||
const a = arrays[i]; | ||
(0, _assert_js_1.abytes)(a); | ||
sum += a.length; | ||
} | ||
const res = new Uint8Array(sum); | ||
for (let i = 0, pad = 0; i < arrays.length; i++) { | ||
const a = arrays[i]; | ||
res.set(a, pad); | ||
pad += a.length; | ||
} | ||
return res; | ||
} | ||
var Hash = class { | ||
// Safe version that clones internal state | ||
clone() { | ||
return this._cloneInto(); | ||
} | ||
}; | ||
exports.Hash = Hash; | ||
function checkOpts(defaults, opts) { | ||
if (opts !== void 0 && {}.toString.call(opts) !== "[object Object]") | ||
throw new Error("Options should be object or undefined"); | ||
const merged = Object.assign(defaults, opts); | ||
return merged; | ||
} | ||
function wrapConstructor(hashCons) { | ||
const hashC = (msg) => hashCons().update(toBytes(msg)).digest(); | ||
const tmp = hashCons(); | ||
hashC.outputLen = tmp.outputLen; | ||
hashC.blockLen = tmp.blockLen; | ||
hashC.create = () => hashCons(); | ||
return hashC; | ||
} | ||
function wrapConstructorWithOpts(hashCons) { | ||
const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); | ||
const tmp = hashCons({}); | ||
hashC.outputLen = tmp.outputLen; | ||
hashC.blockLen = tmp.blockLen; | ||
hashC.create = (opts) => hashCons(opts); | ||
return hashC; | ||
} | ||
function wrapXOFConstructorWithOpts(hashCons) { | ||
const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); | ||
const tmp = hashCons({}); | ||
hashC.outputLen = tmp.outputLen; | ||
hashC.blockLen = tmp.blockLen; | ||
hashC.create = (opts) => hashCons(opts); | ||
return hashC; | ||
} | ||
function randomBytes(bytesLength = 32) { | ||
if (crypto_1.crypto && typeof crypto_1.crypto.getRandomValues === "function") { | ||
return crypto_1.crypto.getRandomValues(new Uint8Array(bytesLength)); | ||
} | ||
if (crypto_1.crypto && typeof crypto_1.crypto.randomBytes === "function") { | ||
return crypto_1.crypto.randomBytes(bytesLength); | ||
} | ||
throw new Error("crypto.getRandomValues must be defined"); | ||
} | ||
} | ||
}); | ||
// node_modules/@noble/hashes/sha3.js | ||
var require_sha3 = __commonJS({ | ||
"node_modules/@noble/hashes/sha3.js"(exports) { | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.shake256 = exports.shake128 = exports.keccak_512 = exports.keccak_384 = exports.keccak_256 = exports.keccak_224 = exports.sha3_512 = exports.sha3_384 = exports.sha3_256 = exports.sha3_224 = exports.Keccak = void 0; | ||
exports.keccakP = keccakP; | ||
var _assert_js_1 = require_assert(); | ||
var _u64_js_1 = require_u64(); | ||
var utils_js_1 = require_utils(); | ||
var SHA3_PI = []; | ||
var SHA3_ROTL = []; | ||
var _SHA3_IOTA = []; | ||
var _0n = /* @__PURE__ */ BigInt(0); | ||
var _1n = /* @__PURE__ */ BigInt(1); | ||
var _2n = /* @__PURE__ */ BigInt(2); | ||
var _7n = /* @__PURE__ */ BigInt(7); | ||
var _256n = /* @__PURE__ */ BigInt(256); | ||
var _0x71n = /* @__PURE__ */ BigInt(113); | ||
for (let round = 0, R = _1n, x = 1, y = 0; round < 24; round++) { | ||
[x, y] = [y, (2 * x + 3 * y) % 5]; | ||
SHA3_PI.push(2 * (5 * y + x)); | ||
SHA3_ROTL.push((round + 1) * (round + 2) / 2 % 64); | ||
let t = _0n; | ||
for (let j = 0; j < 7; j++) { | ||
R = (R << _1n ^ (R >> _7n) * _0x71n) % _256n; | ||
if (R & _2n) | ||
t ^= _1n << (_1n << /* @__PURE__ */ BigInt(j)) - _1n; | ||
} | ||
_SHA3_IOTA.push(t); | ||
} | ||
var [SHA3_IOTA_H, SHA3_IOTA_L] = /* @__PURE__ */ (0, _u64_js_1.split)(_SHA3_IOTA, true); | ||
var rotlH = (h, l, s) => s > 32 ? (0, _u64_js_1.rotlBH)(h, l, s) : (0, _u64_js_1.rotlSH)(h, l, s); | ||
var rotlL = (h, l, s) => s > 32 ? (0, _u64_js_1.rotlBL)(h, l, s) : (0, _u64_js_1.rotlSL)(h, l, s); | ||
function keccakP(s, rounds = 24) { | ||
const B = new Uint32Array(5 * 2); | ||
for (let round = 24 - rounds; round < 24; round++) { | ||
for (let x = 0; x < 10; x++) | ||
B[x] = s[x] ^ s[x + 10] ^ s[x + 20] ^ s[x + 30] ^ s[x + 40]; | ||
for (let x = 0; x < 10; x += 2) { | ||
const idx1 = (x + 8) % 10; | ||
const idx0 = (x + 2) % 10; | ||
const B0 = B[idx0]; | ||
const B1 = B[idx0 + 1]; | ||
const Th = rotlH(B0, B1, 1) ^ B[idx1]; | ||
const Tl = rotlL(B0, B1, 1) ^ B[idx1 + 1]; | ||
for (let y = 0; y < 50; y += 10) { | ||
s[x + y] ^= Th; | ||
s[x + y + 1] ^= Tl; | ||
} | ||
} | ||
let curH = s[2]; | ||
let curL = s[3]; | ||
for (let t = 0; t < 24; t++) { | ||
const shift = SHA3_ROTL[t]; | ||
const Th = rotlH(curH, curL, shift); | ||
const Tl = rotlL(curH, curL, shift); | ||
const PI = SHA3_PI[t]; | ||
curH = s[PI]; | ||
curL = s[PI + 1]; | ||
s[PI] = Th; | ||
s[PI + 1] = Tl; | ||
} | ||
for (let y = 0; y < 50; y += 10) { | ||
for (let x = 0; x < 10; x++) | ||
B[x] = s[y + x]; | ||
for (let x = 0; x < 10; x++) | ||
s[y + x] ^= ~B[(x + 2) % 10] & B[(x + 4) % 10]; | ||
} | ||
s[0] ^= SHA3_IOTA_H[round]; | ||
s[1] ^= SHA3_IOTA_L[round]; | ||
} | ||
B.fill(0); | ||
} | ||
var Keccak = class _Keccak extends utils_js_1.Hash { | ||
// NOTE: we accept arguments in bytes instead of bits here. | ||
constructor(blockLen, suffix, outputLen, enableXOF = false, rounds = 24) { | ||
super(); | ||
this.blockLen = blockLen; | ||
this.suffix = suffix; | ||
this.outputLen = outputLen; | ||
this.enableXOF = enableXOF; | ||
this.rounds = rounds; | ||
this.pos = 0; | ||
this.posOut = 0; | ||
this.finished = false; | ||
this.destroyed = false; | ||
(0, _assert_js_1.anumber)(outputLen); | ||
if (0 >= this.blockLen || this.blockLen >= 200) | ||
throw new Error("Sha3 supports only keccak-f1600 function"); | ||
this.state = new Uint8Array(200); | ||
this.state32 = (0, utils_js_1.u32)(this.state); | ||
} | ||
keccak() { | ||
if (!utils_js_1.isLE) | ||
(0, utils_js_1.byteSwap32)(this.state32); | ||
keccakP(this.state32, this.rounds); | ||
if (!utils_js_1.isLE) | ||
(0, utils_js_1.byteSwap32)(this.state32); | ||
this.posOut = 0; | ||
this.pos = 0; | ||
} | ||
update(data) { | ||
(0, _assert_js_1.aexists)(this); | ||
const { blockLen, state } = this; | ||
data = (0, utils_js_1.toBytes)(data); | ||
const len = data.length; | ||
for (let pos = 0; pos < len; ) { | ||
const take = Math.min(blockLen - this.pos, len - pos); | ||
for (let i = 0; i < take; i++) | ||
state[this.pos++] ^= data[pos++]; | ||
if (this.pos === blockLen) | ||
this.keccak(); | ||
} | ||
return this; | ||
} | ||
finish() { | ||
if (this.finished) | ||
return; | ||
this.finished = true; | ||
const { state, suffix, pos, blockLen } = this; | ||
state[pos] ^= suffix; | ||
if ((suffix & 128) !== 0 && pos === blockLen - 1) | ||
this.keccak(); | ||
state[blockLen - 1] ^= 128; | ||
this.keccak(); | ||
} | ||
writeInto(out) { | ||
(0, _assert_js_1.aexists)(this, false); | ||
(0, _assert_js_1.abytes)(out); | ||
this.finish(); | ||
const bufferOut = this.state; | ||
const { blockLen } = this; | ||
for (let pos = 0, len = out.length; pos < len; ) { | ||
if (this.posOut >= blockLen) | ||
this.keccak(); | ||
const take = Math.min(blockLen - this.posOut, len - pos); | ||
out.set(bufferOut.subarray(this.posOut, this.posOut + take), pos); | ||
this.posOut += take; | ||
pos += take; | ||
} | ||
return out; | ||
} | ||
xofInto(out) { | ||
if (!this.enableXOF) | ||
throw new Error("XOF is not possible for this instance"); | ||
return this.writeInto(out); | ||
} | ||
xof(bytes) { | ||
(0, _assert_js_1.anumber)(bytes); | ||
return this.xofInto(new Uint8Array(bytes)); | ||
} | ||
digestInto(out) { | ||
(0, _assert_js_1.aoutput)(out, this); | ||
if (this.finished) | ||
throw new Error("digest() was already called"); | ||
this.writeInto(out); | ||
this.destroy(); | ||
return out; | ||
} | ||
digest() { | ||
return this.digestInto(new Uint8Array(this.outputLen)); | ||
} | ||
destroy() { | ||
this.destroyed = true; | ||
this.state.fill(0); | ||
} | ||
_cloneInto(to) { | ||
const { blockLen, suffix, outputLen, rounds, enableXOF } = this; | ||
to || (to = new _Keccak(blockLen, suffix, outputLen, enableXOF, rounds)); | ||
to.state32.set(this.state32); | ||
to.pos = this.pos; | ||
to.posOut = this.posOut; | ||
to.finished = this.finished; | ||
to.rounds = rounds; | ||
to.suffix = suffix; | ||
to.outputLen = outputLen; | ||
to.enableXOF = enableXOF; | ||
to.destroyed = this.destroyed; | ||
return to; | ||
} | ||
}; | ||
exports.Keccak = Keccak; | ||
var gen = (suffix, blockLen, outputLen) => (0, utils_js_1.wrapConstructor)(() => new Keccak(blockLen, suffix, outputLen)); | ||
exports.sha3_224 = gen(6, 144, 224 / 8); | ||
exports.sha3_256 = gen(6, 136, 256 / 8); | ||
exports.sha3_384 = gen(6, 104, 384 / 8); | ||
exports.sha3_512 = gen(6, 72, 512 / 8); | ||
exports.keccak_224 = gen(1, 144, 224 / 8); | ||
exports.keccak_256 = gen(1, 136, 256 / 8); | ||
exports.keccak_384 = gen(1, 104, 384 / 8); | ||
exports.keccak_512 = gen(1, 72, 512 / 8); | ||
var genShake = (suffix, blockLen, outputLen) => (0, utils_js_1.wrapXOFConstructorWithOpts)((opts = {}) => new Keccak(blockLen, suffix, opts.dkLen === void 0 ? outputLen : opts.dkLen, true)); | ||
exports.shake128 = genShake(31, 168, 128 / 8); | ||
exports.shake256 = genShake(31, 136, 256 / 8); | ||
} | ||
}); | ||
// node_modules/@paralleldrive/cuid2/src/index.js | ||
var require_src = __commonJS({ | ||
"node_modules/@paralleldrive/cuid2/src/index.js"(exports, module) { | ||
"use strict"; | ||
var { sha3_512: sha3 } = require_sha3(); | ||
var defaultLength = 24; | ||
var bigLength = 32; | ||
var createEntropy = (length = 4, random = Math.random) => { | ||
let entropy = ""; | ||
while (entropy.length < length) { | ||
entropy = entropy + Math.floor(random() * 36).toString(36); | ||
} | ||
return entropy; | ||
}; | ||
function bufToBigInt(buf) { | ||
let bits = 8n; | ||
let value = 0n; | ||
for (const i of buf.values()) { | ||
const bi = BigInt(i); | ||
value = (value << bits) + bi; | ||
} | ||
return value; | ||
} | ||
var hash = (input = "") => { | ||
return bufToBigInt(sha3(input)).toString(36).slice(1); | ||
}; | ||
var alphabet = Array.from( | ||
{ length: 26 }, | ||
(x, i) => String.fromCharCode(i + 97) | ||
); | ||
var randomLetter = (random) => alphabet[Math.floor(random() * alphabet.length)]; | ||
var createFingerprint = ({ | ||
globalObj = typeof global !== "undefined" ? global : typeof window !== "undefined" ? window : {}, | ||
random = Math.random | ||
} = {}) => { | ||
const globals = Object.keys(globalObj).toString(); | ||
const sourceString = globals.length ? globals + createEntropy(bigLength, random) : createEntropy(bigLength, random); | ||
return hash(sourceString).substring(0, bigLength); | ||
}; | ||
var createCounter = (count) => () => { | ||
return count++; | ||
}; | ||
var initialCountMax = 476782367; | ||
var init = ({ | ||
// Fallback if the user does not pass in a CSPRNG. This should be OK | ||
// because we don't rely solely on the random number generator for entropy. | ||
// We also use the host fingerprint, current time, and a session counter. | ||
random = Math.random, | ||
counter = createCounter(Math.floor(random() * initialCountMax)), | ||
length = defaultLength, | ||
fingerprint = createFingerprint({ random }) | ||
} = {}) => { | ||
return function cuid2() { | ||
const firstLetter = randomLetter(random); | ||
const time = Date.now().toString(36); | ||
const count = counter().toString(36); | ||
const salt = createEntropy(length, random); | ||
const hashInput = `${time + salt + count + fingerprint}`; | ||
return `${firstLetter + hash(hashInput).substring(1, length)}`; | ||
}; | ||
}; | ||
var createId = init(); | ||
var isCuid = (id, { minLength = 2, maxLength = bigLength } = {}) => { | ||
const length = id.length; | ||
const regex = /^[0-9a-z]+$/; | ||
try { | ||
if (typeof id === "string" && length >= minLength && length <= maxLength && regex.test(id)) | ||
return true; | ||
} finally { | ||
} | ||
return false; | ||
}; | ||
module.exports.getConstants = () => ({ defaultLength, bigLength }); | ||
module.exports.init = init; | ||
module.exports.createId = createId; | ||
module.exports.bufToBigInt = bufToBigInt; | ||
module.exports.createCounter = createCounter; | ||
module.exports.createFingerprint = createFingerprint; | ||
module.exports.isCuid = isCuid; | ||
} | ||
}); | ||
// node_modules/@paralleldrive/cuid2/index.js | ||
var require_cuid2 = __commonJS({ | ||
"node_modules/@paralleldrive/cuid2/index.js"(exports, module) { | ||
"use strict"; | ||
var { createId, init, getConstants, isCuid } = require_src(); | ||
module.exports.createId = createId; | ||
module.exports.init = init; | ||
module.exports.getConstants = getConstants; | ||
module.exports.isCuid = isCuid; | ||
} | ||
}); | ||
// node_modules/title/lib/lower-case.js | ||
var require_lower_case = __commonJS({ | ||
"node_modules/title/lib/lower-case.js"(exports, module) { | ||
"use strict"; | ||
var conjunctions = [ | ||
"for", | ||
"and", | ||
"nor", | ||
"but", | ||
"or", | ||
"yet", | ||
"so" | ||
]; | ||
var articles = [ | ||
"a", | ||
"an", | ||
"the" | ||
]; | ||
var prepositions = [ | ||
"aboard", | ||
"about", | ||
"above", | ||
"across", | ||
"after", | ||
"against", | ||
"along", | ||
"amid", | ||
"among", | ||
"anti", | ||
"around", | ||
"as", | ||
"at", | ||
"before", | ||
"behind", | ||
"below", | ||
"beneath", | ||
"beside", | ||
"besides", | ||
"between", | ||
"beyond", | ||
"but", | ||
"by", | ||
"concerning", | ||
"considering", | ||
"despite", | ||
"down", | ||
"during", | ||
"except", | ||
"excepting", | ||
"excluding", | ||
"following", | ||
"for", | ||
"from", | ||
"in", | ||
"inside", | ||
"into", | ||
"like", | ||
"minus", | ||
"near", | ||
"of", | ||
"off", | ||
"on", | ||
"onto", | ||
"opposite", | ||
"over", | ||
"past", | ||
"per", | ||
"plus", | ||
"regarding", | ||
"round", | ||
"save", | ||
"since", | ||
"than", | ||
"through", | ||
"to", | ||
"toward", | ||
"towards", | ||
"under", | ||
"underneath", | ||
"unlike", | ||
"until", | ||
"up", | ||
"upon", | ||
"versus", | ||
"via", | ||
"with", | ||
"within", | ||
"without" | ||
]; | ||
module.exports = /* @__PURE__ */ new Set([ | ||
...conjunctions, | ||
...articles, | ||
...prepositions | ||
]); | ||
} | ||
}); | ||
// node_modules/title/lib/specials.js | ||
var require_specials = __commonJS({ | ||
"node_modules/title/lib/specials.js"(exports, module) { | ||
"use strict"; | ||
var intended = [ | ||
"ZEIT", | ||
"ZEIT Inc.", | ||
"Vercel", | ||
"Vercel Inc.", | ||
"CLI", | ||
"API", | ||
"HTTP", | ||
"HTTPS", | ||
"JSX", | ||
"DNS", | ||
"URL", | ||
"now.sh", | ||
"now.json", | ||
"vercel.app", | ||
"vercel.json", | ||
"CI", | ||
"CD", | ||
"CDN", | ||
"package.json", | ||
"package.lock", | ||
"yarn.lock", | ||
"GitHub", | ||
"GitLab", | ||
"CSS", | ||
"Sass", | ||
"JS", | ||
"JavaScript", | ||
"TypeScript", | ||
"HTML", | ||
"WordPress", | ||
"Next.js", | ||
"Node.js", | ||
"Webpack", | ||
"Docker", | ||
"Bash", | ||
"Kubernetes", | ||
"SWR", | ||
"TinaCMS", | ||
"UI", | ||
"UX", | ||
"TS", | ||
"TSX", | ||
"iPhone", | ||
"iPad", | ||
"watchOS", | ||
"iOS", | ||
"iPadOS", | ||
"macOS", | ||
"PHP", | ||
"composer.json", | ||
"composer.lock", | ||
"CMS", | ||
"SQL", | ||
"C", | ||
"C#", | ||
"GraphQL", | ||
"GraphiQL", | ||
"JWT", | ||
"JWTs" | ||
]; | ||
module.exports = intended; | ||
} | ||
}); | ||
// node_modules/title/lib/index.js | ||
var require_lib = __commonJS({ | ||
"node_modules/title/lib/index.js"(exports, module) { | ||
"use strict"; | ||
var lowerCase = require_lower_case(); | ||
var specials = require_specials(); | ||
var word = `[^\\s'\u2019\\(\\)!?;:"-]`; | ||
var regex = new RegExp(`(?:(?:(\\s?(?:^|[.\\(\\)!?;:"-])\\s*)(${word}))|(${word}))(${word}*[\u2019']*${word}*)`, "g"); | ||
var convertToRegExp = (specials2) => specials2.map((s) => [new RegExp(`\\b${s}\\b`, "gi"), s]); | ||
function parseMatch(match) { | ||
const firstCharacter = match[0]; | ||
if (/\s/.test(firstCharacter)) { | ||
return match.slice(1); | ||
} | ||
if (/[\(\)]/.test(firstCharacter)) { | ||
return null; | ||
} | ||
return match; | ||
} | ||
module.exports = (str, options = {}) => { | ||
str = str.toLowerCase().replace(regex, (m, lead = "", forced, lower, rest, offset, string2) => { | ||
const isLastWord = m.length + offset >= string2.length; | ||
const parsedMatch = parseMatch(m); | ||
if (!parsedMatch) { | ||
return m; | ||
} | ||
if (!forced) { | ||
const fullLower = lower + rest; | ||
if (lowerCase.has(fullLower) && !isLastWord) { | ||
return parsedMatch; | ||
} | ||
} | ||
return lead + (lower || forced).toUpperCase() + rest; | ||
}); | ||
const customSpecials = options.special || []; | ||
const replace = [...specials, ...customSpecials]; | ||
const replaceRegExp = convertToRegExp(replace); | ||
replaceRegExp.forEach(([pattern, s]) => { | ||
str = str.replace(pattern, s); | ||
}); | ||
return str; | ||
}; | ||
} | ||
}); | ||
// src/utils/errors.ts | ||
@@ -883,88 +58,9 @@ var RoninError = class extends Error { | ||
// node_modules/ronin/node_modules/@ronin/compiler/dist/index.js | ||
var import_cuid2 = __toESM(require_cuid2(), 1); | ||
var import_title = __toESM(require_lib(), 1); | ||
var QUERY_SYMBOLS = { | ||
// Represents a sub query. | ||
QUERY: "__RONIN_QUERY", | ||
// Represents an expression that should be evaluated. | ||
EXPRESSION: "__RONIN_EXPRESSION", | ||
// Represents the value of a field in the model. | ||
FIELD: "__RONIN_FIELD_", | ||
// Represents the value of a field in the model of a parent query. | ||
FIELD_PARENT: "__RONIN_FIELD_PARENT_", | ||
// Represents the old value of a field in the parent model. Used for triggers. | ||
FIELD_PARENT_OLD: "__RONIN_FIELD_PARENT_OLD_", | ||
// Represents the new value of a field in the parent model. Used for triggers. | ||
FIELD_PARENT_NEW: "__RONIN_FIELD_PARENT_NEW_", | ||
// Represents a value provided to a query preset. | ||
VALUE: "__RONIN_VALUE" | ||
}; | ||
var RONIN_MODEL_FIELD_REGEX = new RegExp( | ||
`${QUERY_SYMBOLS.FIELD}[_a-zA-Z0-9.]+`, | ||
"g" | ||
); | ||
var omit = (obj, properties) => Object.fromEntries( | ||
Object.entries(obj).filter(([key]) => !properties.includes(key)) | ||
); | ||
var ROOT_MODEL = { | ||
slug: "model", | ||
identifiers: { | ||
name: "name", | ||
slug: "slug" | ||
}, | ||
// This name mimics the `sqlite_schema` table in SQLite. | ||
table: "ronin_schema", | ||
// Indicates that the model was automatically generated by RONIN. | ||
system: { model: "root" }, | ||
fields: [ | ||
{ slug: "name", type: "string" }, | ||
{ slug: "pluralName", type: "string" }, | ||
{ slug: "slug", type: "string" }, | ||
{ slug: "pluralSlug", type: "string" }, | ||
{ slug: "idPrefix", type: "string" }, | ||
{ slug: "table", type: "string" }, | ||
{ slug: "identifiers.name", type: "string" }, | ||
{ slug: "identifiers.slug", type: "string" }, | ||
// Providing an empty object as a default value allows us to use `json_insert` | ||
// without needing to fall back to an empty object in the insertion statement, | ||
// which makes the statement shorter. | ||
{ slug: "fields", type: "json", defaultValue: "{}" }, | ||
{ slug: "indexes", type: "json", defaultValue: "{}" }, | ||
{ slug: "triggers", type: "json", defaultValue: "{}" }, | ||
{ slug: "presets", type: "json", defaultValue: "{}" } | ||
] | ||
}; | ||
var PLURAL_MODEL_ENTITIES = { | ||
field: "fields", | ||
index: "indexes", | ||
trigger: "triggers", | ||
preset: "presets" | ||
}; | ||
var PLURAL_MODEL_ENTITIES_VALUES = Object.values(PLURAL_MODEL_ENTITIES); | ||
var CLEAN_ROOT_MODEL = omit(ROOT_MODEL, ["system"]); | ||
// node_modules/ronin/dist/chunk-SZFCDSI6.js | ||
var EMPTY = Symbol("empty"); | ||
var IN_BATCH_ASYNC; | ||
var IN_BATCH_SYNC = false; | ||
var getBatchProxy = (operations, options = {}, queriesHandler) => { | ||
let queries = []; | ||
if (options.asyncContext) { | ||
IN_BATCH_ASYNC = options.asyncContext; | ||
queries = IN_BATCH_ASYNC.run(true, () => operations()); | ||
} else { | ||
IN_BATCH_SYNC = true; | ||
queries = operations(); | ||
IN_BATCH_SYNC = false; | ||
} | ||
const cleanQueries = queries.map((details) => ({ ...details })); | ||
return queriesHandler(cleanQueries); | ||
}; | ||
// src/utils/serializers.ts | ||
var serializeFields = (fields) => { | ||
return Object.entries(fields ?? {}).flatMap( | ||
([key, value]) => { | ||
if (!("type" in value)) { | ||
([key, initialValue]) => { | ||
let value = initialValue?.structure; | ||
if (typeof value === "undefined") { | ||
value = initialValue; | ||
const result = {}; | ||
@@ -976,13 +72,5 @@ for (const k of Object.keys(value)) { | ||
} | ||
const { type, unique, defaultValue, required, name } = value; | ||
const { actions, target } = value; | ||
return { | ||
slug: key, | ||
name, | ||
unique: unique ?? false, | ||
required: required ?? false, | ||
defaultValue, | ||
type, | ||
target, | ||
actions | ||
...value | ||
}; | ||
@@ -1017,3 +105,3 @@ } | ||
(queries) => { | ||
return queries.map((query2) => query2.query); | ||
return queries.map((query2) => query2.structure); | ||
} | ||
@@ -1055,4 +143,4 @@ ); | ||
var primitive = (type) => { | ||
return (attributes = {}) => { | ||
return { type, ...attributes }; | ||
return (initialAttributes = {}) => { | ||
return getSyntaxProxy()({ ...initialAttributes, type }); | ||
}; | ||
@@ -1077,6 +165,1 @@ }; | ||
}; | ||
/*! Bundled license information: | ||
@noble/hashes/utils.js: | ||
(*! noble-hashes - MIT License (c) 2022 Paul Miller (paulmillr.com) *) | ||
*/ |
{ | ||
"name": "@ronin/syntax", | ||
"version": "0.1.9", | ||
"version": "0.1.10-leo-ron-1097-experimental-46", | ||
"type": "module", | ||
@@ -46,5 +46,5 @@ "description": "Allows for defining RONIN queries and schemas in code.", | ||
"@types/bun": "1.1.15", | ||
"ronin": "5.3.5", | ||
"tsup": "8.3.5" | ||
"tsup": "8.3.5", | ||
"typescript": "5.7.2" | ||
} | ||
} |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
1
51503
1141
1