@ronin/schema
Advanced tools
Comparing version 0.1.3-leo-ron-1071-experimental-13 to 0.1.3-leo-ron-1071-experimental-14
@@ -1,3 +0,271 @@ | ||
import { Model as Model$1, GetInstructions, WithInstruction, ModelIndex, ModelField, ModelTrigger } from '@ronin/compiler'; | ||
/** | ||
* A list of placeholders that can be located inside queries after those queries were | ||
* serialized into JSON objects. | ||
* | ||
* These placeholders are used to represent special keys and values. For example, if a | ||
* query is nested into a query, the nested query will be marked with `__RONIN_QUERY`, | ||
* which allows for distinguishing that nested query from an object of instructions. | ||
*/ | ||
declare const QUERY_SYMBOLS: { | ||
readonly QUERY: "__RONIN_QUERY"; | ||
readonly EXPRESSION: "__RONIN_EXPRESSION"; | ||
readonly FIELD: "__RONIN_FIELD_"; | ||
readonly FIELD_PARENT: "__RONIN_FIELD_PARENT_"; | ||
readonly FIELD_PARENT_OLD: "__RONIN_FIELD_PARENT_OLD_"; | ||
readonly FIELD_PARENT_NEW: "__RONIN_FIELD_PARENT_NEW_"; | ||
readonly VALUE: "__RONIN_VALUE"; | ||
}; | ||
type ModelEntityEnum = 'field' | 'index' | 'trigger' | 'preset'; | ||
type FieldValue = string | number | boolean | null | unknown; | ||
type FieldSelector = Record<string, FieldValue>; | ||
type Expression = { | ||
[QUERY_SYMBOLS.EXPRESSION]: string; | ||
}; | ||
type WithInstructionRefinement = FieldValue | { | ||
being?: FieldValue | Array<FieldValue>; | ||
notBeing?: FieldValue | Array<FieldValue>; | ||
startingWith?: FieldValue | Array<FieldValue>; | ||
notStartingWith?: FieldValue | Array<FieldValue>; | ||
endingWith?: FieldValue | Array<FieldValue>; | ||
notEndingWith?: FieldValue | Array<FieldValue>; | ||
containing?: FieldValue | Array<FieldValue>; | ||
notContaining?: FieldValue | Array<FieldValue>; | ||
greaterThan?: FieldValue | Array<FieldValue>; | ||
greaterOrEqual?: FieldValue | Array<FieldValue>; | ||
lessThan?: FieldValue | Array<FieldValue>; | ||
lessOrEqual?: FieldValue | Array<FieldValue>; | ||
}; | ||
type WithInstruction = Record<string, WithInstructionRefinement> | Record<string, Record<string, WithInstructionRefinement>> | Record<string, Array<WithInstructionRefinement>> | Record<string, Record<string, Array<WithInstructionRefinement>>>; | ||
type IncludingInstruction = Record<string, unknown | GetQuery>; | ||
type OrderedByInstruction = { | ||
ascending?: Array<string | Expression>; | ||
descending?: Array<string | Expression>; | ||
}; | ||
type ForInstruction = Array<string> | Record<string, string>; | ||
type CombinedInstructions = { | ||
with?: WithInstruction | Array<WithInstruction>; | ||
to?: FieldSelector; | ||
including?: IncludingInstruction; | ||
selecting?: Array<string>; | ||
orderedBy?: OrderedByInstruction; | ||
before?: string | null; | ||
after?: string | null; | ||
limitedTo?: number; | ||
for?: ForInstruction; | ||
}; | ||
type GetQuery = Record<string, Omit<CombinedInstructions, 'to'> | null>; | ||
type SetQuery = Record<string, Omit<CombinedInstructions, 'to'> & { | ||
to: FieldSelector; | ||
}>; | ||
type AddQuery = Record<string, Omit<CombinedInstructions, 'with' | 'for'> & { | ||
to: FieldSelector; | ||
}>; | ||
type RemoveQuery = Record<string, Omit<CombinedInstructions, 'to'>>; | ||
type CountQuery = Record<string, Omit<CombinedInstructions, 'to'> | null>; | ||
type GetInstructions = Omit<CombinedInstructions, 'to'>; | ||
type CreateQuery = { | ||
model: string | PublicModel; | ||
to?: PublicModel; | ||
}; | ||
type AlterQuery = { | ||
model: string; | ||
to?: Partial<PublicModel>; | ||
create?: { | ||
field?: ModelField; | ||
index?: ModelIndex; | ||
trigger?: ModelTrigger; | ||
preset?: ModelPreset; | ||
}; | ||
alter?: { | ||
field?: string; | ||
to?: Partial<ModelField>; | ||
} | { | ||
index?: string; | ||
to?: Partial<ModelIndex>; | ||
} | { | ||
trigger?: string; | ||
to?: Partial<ModelTrigger>; | ||
} | { | ||
preset?: string; | ||
to?: Partial<ModelPreset>; | ||
}; | ||
drop?: Partial<Record<ModelEntityEnum, string>>; | ||
}; | ||
type DropQuery = { | ||
model: string; | ||
}; | ||
type Query = { | ||
get?: GetQuery; | ||
set?: SetQuery; | ||
add?: AddQuery; | ||
remove?: RemoveQuery; | ||
count?: CountQuery; | ||
create?: CreateQuery; | ||
alter?: AlterQuery; | ||
drop?: DropQuery; | ||
}; | ||
type ModelFieldCollation = 'BINARY' | 'NOCASE' | 'RTRIM'; | ||
type ModelFieldBasics = { | ||
/** The label that should be used when displaying the field on the RONIN dashboard. */ | ||
name?: string; | ||
/** Allows for addressing the field programmatically. */ | ||
slug: string; | ||
/** How the field should be displayed visually on the RONIN dashboard. */ | ||
displayAs?: string; | ||
/** | ||
* If set, only one record of the same model will be allowed to exist with a given | ||
* value for the field. | ||
*/ | ||
unique?: boolean; | ||
/** | ||
* Whether a value must be provided for the field. If this attribute is set and no | ||
* value is provided, an error will be thrown. | ||
*/ | ||
required?: boolean; | ||
/** | ||
* The value that should be inserted into the field in the case that no value was | ||
* explicitly provided for it when a record is created. | ||
*/ | ||
defaultValue?: unknown; | ||
/** | ||
* An expression that should be evaluated to form the value of the field. The | ||
* expression can either be VIRTUAL (evaluated whenever a record is read) or STORED | ||
* (evaluated whenever a record is created or updated). | ||
*/ | ||
computedAs?: { | ||
kind: 'VIRTUAL' | 'STORED'; | ||
value: Expression; | ||
}; | ||
/** An expression that gets evaluated every time a value is provided for the field. */ | ||
check?: Expression; | ||
/** | ||
* If the field is of type `string`, setting this attribute defines the collation | ||
* sequence to use for the field value. | ||
*/ | ||
collation?: ModelFieldCollation; | ||
/** | ||
* If the field is of type `number`, setting this attribute will automatically increment | ||
* the value of the field with every new record that gets inserted. | ||
*/ | ||
increment?: boolean; | ||
}; | ||
type ModelFieldNormal = ModelFieldBasics & { | ||
type?: 'string' | 'number' | 'boolean' | 'date' | 'json'; | ||
}; | ||
type ModelFieldReferenceAction = 'CASCADE' | 'RESTRICT' | 'SET NULL' | 'SET DEFAULT' | 'NO ACTION'; | ||
type ModelFieldReference = ModelFieldBasics & { | ||
type: 'link'; | ||
target: string; | ||
kind?: 'one' | 'many'; | ||
actions?: { | ||
onDelete?: ModelFieldReferenceAction; | ||
onUpdate?: ModelFieldReferenceAction; | ||
}; | ||
}; | ||
type ModelField = ModelFieldNormal | ModelFieldReference; | ||
type ModelIndexField<T extends Array<ModelField> = Array<ModelField>> = { | ||
/** The collating sequence used for text placed inside the field. */ | ||
collation?: ModelFieldCollation; | ||
/** How the records in the index should be ordered. */ | ||
order?: 'ASC' | 'DESC'; | ||
} & ({ | ||
/** The field slug for which the index should be created. */ | ||
slug: T[number]['slug']; | ||
} | { | ||
/** The field expression for which the index should be created. */ | ||
expression: string; | ||
}); | ||
type ModelIndex<T extends Array<ModelField> = Array<ModelField>> = { | ||
/** | ||
* The list of fields in the model for which the index should be created. | ||
*/ | ||
fields: Array<ModelIndexField<T>>; | ||
/** | ||
* The identifier of the index. | ||
*/ | ||
slug?: string; | ||
/** | ||
* Whether only one record with a unique value for the provided fields will be allowed. | ||
*/ | ||
unique?: boolean; | ||
/** | ||
* An object containing query instructions that will be used to match the records that | ||
* should be included in the index. | ||
*/ | ||
filter?: WithInstruction; | ||
}; | ||
type ModelTriggerField<T extends Array<ModelField> = Array<ModelField>> = { | ||
/** | ||
* The slug of the field that should cause the trigger to fire if the value of the | ||
* field has changed. | ||
*/ | ||
slug: T[number]['slug']; | ||
}; | ||
type ModelTrigger<T extends Array<ModelField> = Array<ModelField>> = { | ||
/** | ||
* The identifier of the trigger. | ||
*/ | ||
slug?: string; | ||
/** The type of query for which the trigger should fire. */ | ||
action: 'INSERT' | 'UPDATE' | 'DELETE'; | ||
/** When the trigger should fire in the case that a matching query is executed. */ | ||
when: 'BEFORE' | 'DURING' | 'AFTER'; | ||
/** A list of queries that should be executed when the trigger fires. */ | ||
effects: Array<Query>; | ||
/** A list of field slugs for which the trigger should fire. */ | ||
fields?: Array<ModelTriggerField<T>>; | ||
/** | ||
* An object containing query instructions used to determine whether the trigger should | ||
* fire, or not. | ||
*/ | ||
filter?: WithInstruction; | ||
}; | ||
type ModelPreset = { | ||
/** The identifier that can be used for adding the preset to a query. */ | ||
slug: string; | ||
/** The query instructions that should be applied when the preset is used. */ | ||
instructions: GetInstructions; | ||
}; | ||
interface Model$1<T extends Array<ModelField> = Array<ModelField>> { | ||
name: string; | ||
pluralName: string; | ||
slug: string; | ||
pluralSlug: string; | ||
identifiers: { | ||
name: T[number]['slug']; | ||
slug: T[number]['slug']; | ||
}; | ||
idPrefix: string; | ||
/** The name of the table in SQLite. */ | ||
table: string; | ||
/** | ||
* The table name to which the model was aliased. This will be set in the case that | ||
* multiple tables are being joined into one SQL statement. | ||
*/ | ||
tableAlias?: string; | ||
/** | ||
* Details that identify the model as a model that was automatically created by RONIN, | ||
* instead of being manually created by a developer. | ||
*/ | ||
system?: { | ||
/** The model that caused the system model to get created. */ | ||
model: string | 'root'; | ||
/** | ||
* If the model is used to associate two models with each other (in the case of | ||
* many-cardinality link fields), this property should contain the field slug to | ||
* which the associative model should be mounted on the source model. | ||
*/ | ||
associationSlug?: string; | ||
}; | ||
fields: T; | ||
indexes?: Array<ModelIndex<T>>; | ||
triggers?: Array<ModelTrigger<T>>; | ||
presets?: Array<ModelPreset>; | ||
} | ||
type PublicModel<T extends Array<ModelField> = Array<ModelField>> = Omit<Partial<Model$1<T>>, 'slug' | 'identifiers' | 'system' | 'tableAlias'> & { | ||
slug: Required<Model$1['slug']>; | ||
identifiers?: Partial<Model$1['identifiers']>; | ||
}; | ||
interface RoninFields { | ||
@@ -11,3 +279,3 @@ id: string; | ||
} | ||
interface Model<Fields> extends Omit<Model$1, 'fields' | 'indexes' | 'triggers' | 'presets'> { | ||
interface Model<Fields> extends Omit<PublicModel, 'fields' | 'indexes' | 'triggers' | 'presets'> { | ||
fields?: Fields; | ||
@@ -14,0 +282,0 @@ presets?: Record<string, GetInstructions | WithInstruction>; |
@@ -0,1 +1,867 @@ | ||
var __create = Object.create; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __getProtoOf = Object.getPrototypeOf; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, { | ||
get: (a, b) => (typeof require !== "undefined" ? require : a)[b] | ||
}) : x)(function(x) { | ||
if (typeof require !== "undefined") return require.apply(this, arguments); | ||
throw Error('Dynamic require of "' + x + '" is not supported'); | ||
}); | ||
var __commonJS = (cb, mod) => function __require2() { | ||
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports; | ||
}; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( | ||
// If the importer is in node compatibility mode or this is not an ESM | ||
// file that has been converted to a CommonJS file using a Babel- | ||
// compatible transform (i.e. "__esModule" has not been set), then set | ||
// "default" to the CommonJS "module.exports" for node compatibility. | ||
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, | ||
mod | ||
)); | ||
// node_modules/@noble/hashes/_assert.js | ||
var require_assert = __commonJS({ | ||
"node_modules/@noble/hashes/_assert.js"(exports) { | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.anumber = anumber; | ||
exports.number = anumber; | ||
exports.abytes = abytes; | ||
exports.bytes = abytes; | ||
exports.ahash = ahash; | ||
exports.aexists = aexists; | ||
exports.aoutput = aoutput; | ||
function anumber(n) { | ||
if (!Number.isSafeInteger(n) || n < 0) | ||
throw new Error("positive integer expected, got " + n); | ||
} | ||
function isBytes(a) { | ||
return a instanceof Uint8Array || ArrayBuffer.isView(a) && a.constructor.name === "Uint8Array"; | ||
} | ||
function abytes(b, ...lengths) { | ||
if (!isBytes(b)) | ||
throw new Error("Uint8Array expected"); | ||
if (lengths.length > 0 && !lengths.includes(b.length)) | ||
throw new Error("Uint8Array expected of length " + lengths + ", got length=" + b.length); | ||
} | ||
function ahash(h) { | ||
if (typeof h !== "function" || typeof h.create !== "function") | ||
throw new Error("Hash should be wrapped by utils.wrapConstructor"); | ||
anumber(h.outputLen); | ||
anumber(h.blockLen); | ||
} | ||
function aexists(instance, checkFinished = true) { | ||
if (instance.destroyed) | ||
throw new Error("Hash instance has been destroyed"); | ||
if (checkFinished && instance.finished) | ||
throw new Error("Hash#digest() has already been called"); | ||
} | ||
function aoutput(out, instance) { | ||
abytes(out); | ||
const min = instance.outputLen; | ||
if (out.length < min) { | ||
throw new Error("digestInto() expects output buffer of length at least " + min); | ||
} | ||
} | ||
var assert = { | ||
number: anumber, | ||
bytes: abytes, | ||
hash: ahash, | ||
exists: aexists, | ||
output: aoutput | ||
}; | ||
exports.default = assert; | ||
} | ||
}); | ||
// node_modules/@noble/hashes/_u64.js | ||
var require_u64 = __commonJS({ | ||
"node_modules/@noble/hashes/_u64.js"(exports) { | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.add5L = exports.add5H = exports.add4H = exports.add4L = exports.add3H = exports.add3L = exports.rotlBL = exports.rotlBH = exports.rotlSL = exports.rotlSH = exports.rotr32L = exports.rotr32H = exports.rotrBL = exports.rotrBH = exports.rotrSL = exports.rotrSH = exports.shrSL = exports.shrSH = exports.toBig = void 0; | ||
exports.fromBig = fromBig; | ||
exports.split = split; | ||
exports.add = add; | ||
var U32_MASK64 = /* @__PURE__ */ BigInt(2 ** 32 - 1); | ||
var _32n = /* @__PURE__ */ BigInt(32); | ||
function fromBig(n, le = false) { | ||
if (le) | ||
return { h: Number(n & U32_MASK64), l: Number(n >> _32n & U32_MASK64) }; | ||
return { h: Number(n >> _32n & U32_MASK64) | 0, l: Number(n & U32_MASK64) | 0 }; | ||
} | ||
function split(lst, le = false) { | ||
let Ah = new Uint32Array(lst.length); | ||
let Al = new Uint32Array(lst.length); | ||
for (let i = 0; i < lst.length; i++) { | ||
const { h, l } = fromBig(lst[i], le); | ||
[Ah[i], Al[i]] = [h, l]; | ||
} | ||
return [Ah, Al]; | ||
} | ||
var toBig = (h, l) => BigInt(h >>> 0) << _32n | BigInt(l >>> 0); | ||
exports.toBig = toBig; | ||
var shrSH = (h, _l, s) => h >>> s; | ||
exports.shrSH = shrSH; | ||
var shrSL = (h, l, s) => h << 32 - s | l >>> s; | ||
exports.shrSL = shrSL; | ||
var rotrSH = (h, l, s) => h >>> s | l << 32 - s; | ||
exports.rotrSH = rotrSH; | ||
var rotrSL = (h, l, s) => h << 32 - s | l >>> s; | ||
exports.rotrSL = rotrSL; | ||
var rotrBH = (h, l, s) => h << 64 - s | l >>> s - 32; | ||
exports.rotrBH = rotrBH; | ||
var rotrBL = (h, l, s) => h >>> s - 32 | l << 64 - s; | ||
exports.rotrBL = rotrBL; | ||
var rotr32H = (_h, l) => l; | ||
exports.rotr32H = rotr32H; | ||
var rotr32L = (h, _l) => h; | ||
exports.rotr32L = rotr32L; | ||
var rotlSH = (h, l, s) => h << s | l >>> 32 - s; | ||
exports.rotlSH = rotlSH; | ||
var rotlSL = (h, l, s) => l << s | h >>> 32 - s; | ||
exports.rotlSL = rotlSL; | ||
var rotlBH = (h, l, s) => l << s - 32 | h >>> 64 - s; | ||
exports.rotlBH = rotlBH; | ||
var rotlBL = (h, l, s) => h << s - 32 | l >>> 64 - s; | ||
exports.rotlBL = rotlBL; | ||
function add(Ah, Al, Bh, Bl) { | ||
const l = (Al >>> 0) + (Bl >>> 0); | ||
return { h: Ah + Bh + (l / 2 ** 32 | 0) | 0, l: l | 0 }; | ||
} | ||
var add3L = (Al, Bl, Cl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0); | ||
exports.add3L = add3L; | ||
var add3H = (low, Ah, Bh, Ch) => Ah + Bh + Ch + (low / 2 ** 32 | 0) | 0; | ||
exports.add3H = add3H; | ||
var add4L = (Al, Bl, Cl, Dl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0); | ||
exports.add4L = add4L; | ||
var add4H = (low, Ah, Bh, Ch, Dh) => Ah + Bh + Ch + Dh + (low / 2 ** 32 | 0) | 0; | ||
exports.add4H = add4H; | ||
var add5L = (Al, Bl, Cl, Dl, El) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0) + (El >>> 0); | ||
exports.add5L = add5L; | ||
var add5H = (low, Ah, Bh, Ch, Dh, Eh) => Ah + Bh + Ch + Dh + Eh + (low / 2 ** 32 | 0) | 0; | ||
exports.add5H = add5H; | ||
var u64 = { | ||
fromBig, | ||
split, | ||
toBig, | ||
shrSH, | ||
shrSL, | ||
rotrSH, | ||
rotrSL, | ||
rotrBH, | ||
rotrBL, | ||
rotr32H, | ||
rotr32L, | ||
rotlSH, | ||
rotlSL, | ||
rotlBH, | ||
rotlBL, | ||
add, | ||
add3L, | ||
add3H, | ||
add4L, | ||
add4H, | ||
add5H, | ||
add5L | ||
}; | ||
exports.default = u64; | ||
} | ||
}); | ||
// node_modules/@noble/hashes/cryptoNode.js | ||
var require_cryptoNode = __commonJS({ | ||
"node_modules/@noble/hashes/cryptoNode.js"(exports) { | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.crypto = void 0; | ||
var nc = __require("node:crypto"); | ||
exports.crypto = nc && typeof nc === "object" && "webcrypto" in nc ? nc.webcrypto : nc && typeof nc === "object" && "randomBytes" in nc ? nc : void 0; | ||
} | ||
}); | ||
// node_modules/@noble/hashes/utils.js | ||
var require_utils = __commonJS({ | ||
"node_modules/@noble/hashes/utils.js"(exports) { | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.Hash = exports.nextTick = exports.byteSwapIfBE = exports.byteSwap = exports.isLE = exports.rotl = exports.rotr = exports.createView = exports.u32 = exports.u8 = void 0; | ||
exports.isBytes = isBytes; | ||
exports.byteSwap32 = byteSwap32; | ||
exports.bytesToHex = bytesToHex; | ||
exports.hexToBytes = hexToBytes; | ||
exports.asyncLoop = asyncLoop; | ||
exports.utf8ToBytes = utf8ToBytes; | ||
exports.toBytes = toBytes; | ||
exports.concatBytes = concatBytes; | ||
exports.checkOpts = checkOpts; | ||
exports.wrapConstructor = wrapConstructor; | ||
exports.wrapConstructorWithOpts = wrapConstructorWithOpts; | ||
exports.wrapXOFConstructorWithOpts = wrapXOFConstructorWithOpts; | ||
exports.randomBytes = randomBytes; | ||
var crypto_1 = require_cryptoNode(); | ||
var _assert_js_1 = require_assert(); | ||
function isBytes(a) { | ||
return a instanceof Uint8Array || ArrayBuffer.isView(a) && a.constructor.name === "Uint8Array"; | ||
} | ||
var u8 = (arr) => new Uint8Array(arr.buffer, arr.byteOffset, arr.byteLength); | ||
exports.u8 = u8; | ||
var u32 = (arr) => new Uint32Array(arr.buffer, arr.byteOffset, Math.floor(arr.byteLength / 4)); | ||
exports.u32 = u32; | ||
var createView = (arr) => new DataView(arr.buffer, arr.byteOffset, arr.byteLength); | ||
exports.createView = createView; | ||
var rotr = (word, shift) => word << 32 - shift | word >>> shift; | ||
exports.rotr = rotr; | ||
var rotl = (word, shift) => word << shift | word >>> 32 - shift >>> 0; | ||
exports.rotl = rotl; | ||
exports.isLE = (() => new Uint8Array(new Uint32Array([287454020]).buffer)[0] === 68)(); | ||
var byteSwap = (word) => word << 24 & 4278190080 | word << 8 & 16711680 | word >>> 8 & 65280 | word >>> 24 & 255; | ||
exports.byteSwap = byteSwap; | ||
exports.byteSwapIfBE = exports.isLE ? (n) => n : (n) => (0, exports.byteSwap)(n); | ||
function byteSwap32(arr) { | ||
for (let i = 0; i < arr.length; i++) { | ||
arr[i] = (0, exports.byteSwap)(arr[i]); | ||
} | ||
} | ||
var hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, "0")); | ||
function bytesToHex(bytes) { | ||
(0, _assert_js_1.abytes)(bytes); | ||
let hex = ""; | ||
for (let i = 0; i < bytes.length; i++) { | ||
hex += hexes[bytes[i]]; | ||
} | ||
return hex; | ||
} | ||
var asciis = { _0: 48, _9: 57, A: 65, F: 70, a: 97, f: 102 }; | ||
function asciiToBase16(ch) { | ||
if (ch >= asciis._0 && ch <= asciis._9) | ||
return ch - asciis._0; | ||
if (ch >= asciis.A && ch <= asciis.F) | ||
return ch - (asciis.A - 10); | ||
if (ch >= asciis.a && ch <= asciis.f) | ||
return ch - (asciis.a - 10); | ||
return; | ||
} | ||
function hexToBytes(hex) { | ||
if (typeof hex !== "string") | ||
throw new Error("hex string expected, got " + typeof hex); | ||
const hl = hex.length; | ||
const al = hl / 2; | ||
if (hl % 2) | ||
throw new Error("hex string expected, got unpadded hex of length " + hl); | ||
const array = new Uint8Array(al); | ||
for (let ai = 0, hi = 0; ai < al; ai++, hi += 2) { | ||
const n1 = asciiToBase16(hex.charCodeAt(hi)); | ||
const n2 = asciiToBase16(hex.charCodeAt(hi + 1)); | ||
if (n1 === void 0 || n2 === void 0) { | ||
const char = hex[hi] + hex[hi + 1]; | ||
throw new Error('hex string expected, got non-hex character "' + char + '" at index ' + hi); | ||
} | ||
array[ai] = n1 * 16 + n2; | ||
} | ||
return array; | ||
} | ||
var nextTick = async () => { | ||
}; | ||
exports.nextTick = nextTick; | ||
async function asyncLoop(iters, tick, cb) { | ||
let ts = Date.now(); | ||
for (let i = 0; i < iters; i++) { | ||
cb(i); | ||
const diff = Date.now() - ts; | ||
if (diff >= 0 && diff < tick) | ||
continue; | ||
await (0, exports.nextTick)(); | ||
ts += diff; | ||
} | ||
} | ||
function utf8ToBytes(str) { | ||
if (typeof str !== "string") | ||
throw new Error("utf8ToBytes expected string, got " + typeof str); | ||
return new Uint8Array(new TextEncoder().encode(str)); | ||
} | ||
function toBytes(data) { | ||
if (typeof data === "string") | ||
data = utf8ToBytes(data); | ||
(0, _assert_js_1.abytes)(data); | ||
return data; | ||
} | ||
function concatBytes(...arrays) { | ||
let sum = 0; | ||
for (let i = 0; i < arrays.length; i++) { | ||
const a = arrays[i]; | ||
(0, _assert_js_1.abytes)(a); | ||
sum += a.length; | ||
} | ||
const res = new Uint8Array(sum); | ||
for (let i = 0, pad = 0; i < arrays.length; i++) { | ||
const a = arrays[i]; | ||
res.set(a, pad); | ||
pad += a.length; | ||
} | ||
return res; | ||
} | ||
var Hash = class { | ||
// Safe version that clones internal state | ||
clone() { | ||
return this._cloneInto(); | ||
} | ||
}; | ||
exports.Hash = Hash; | ||
function checkOpts(defaults, opts) { | ||
if (opts !== void 0 && {}.toString.call(opts) !== "[object Object]") | ||
throw new Error("Options should be object or undefined"); | ||
const merged = Object.assign(defaults, opts); | ||
return merged; | ||
} | ||
function wrapConstructor(hashCons) { | ||
const hashC = (msg) => hashCons().update(toBytes(msg)).digest(); | ||
const tmp = hashCons(); | ||
hashC.outputLen = tmp.outputLen; | ||
hashC.blockLen = tmp.blockLen; | ||
hashC.create = () => hashCons(); | ||
return hashC; | ||
} | ||
function wrapConstructorWithOpts(hashCons) { | ||
const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); | ||
const tmp = hashCons({}); | ||
hashC.outputLen = tmp.outputLen; | ||
hashC.blockLen = tmp.blockLen; | ||
hashC.create = (opts) => hashCons(opts); | ||
return hashC; | ||
} | ||
function wrapXOFConstructorWithOpts(hashCons) { | ||
const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); | ||
const tmp = hashCons({}); | ||
hashC.outputLen = tmp.outputLen; | ||
hashC.blockLen = tmp.blockLen; | ||
hashC.create = (opts) => hashCons(opts); | ||
return hashC; | ||
} | ||
function randomBytes(bytesLength = 32) { | ||
if (crypto_1.crypto && typeof crypto_1.crypto.getRandomValues === "function") { | ||
return crypto_1.crypto.getRandomValues(new Uint8Array(bytesLength)); | ||
} | ||
if (crypto_1.crypto && typeof crypto_1.crypto.randomBytes === "function") { | ||
return crypto_1.crypto.randomBytes(bytesLength); | ||
} | ||
throw new Error("crypto.getRandomValues must be defined"); | ||
} | ||
} | ||
}); | ||
// node_modules/@noble/hashes/sha3.js | ||
var require_sha3 = __commonJS({ | ||
"node_modules/@noble/hashes/sha3.js"(exports) { | ||
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.shake256 = exports.shake128 = exports.keccak_512 = exports.keccak_384 = exports.keccak_256 = exports.keccak_224 = exports.sha3_512 = exports.sha3_384 = exports.sha3_256 = exports.sha3_224 = exports.Keccak = void 0; | ||
exports.keccakP = keccakP; | ||
var _assert_js_1 = require_assert(); | ||
var _u64_js_1 = require_u64(); | ||
var utils_js_1 = require_utils(); | ||
var SHA3_PI = []; | ||
var SHA3_ROTL = []; | ||
var _SHA3_IOTA = []; | ||
var _0n = /* @__PURE__ */ BigInt(0); | ||
var _1n = /* @__PURE__ */ BigInt(1); | ||
var _2n = /* @__PURE__ */ BigInt(2); | ||
var _7n = /* @__PURE__ */ BigInt(7); | ||
var _256n = /* @__PURE__ */ BigInt(256); | ||
var _0x71n = /* @__PURE__ */ BigInt(113); | ||
for (let round = 0, R = _1n, x = 1, y = 0; round < 24; round++) { | ||
[x, y] = [y, (2 * x + 3 * y) % 5]; | ||
SHA3_PI.push(2 * (5 * y + x)); | ||
SHA3_ROTL.push((round + 1) * (round + 2) / 2 % 64); | ||
let t = _0n; | ||
for (let j = 0; j < 7; j++) { | ||
R = (R << _1n ^ (R >> _7n) * _0x71n) % _256n; | ||
if (R & _2n) | ||
t ^= _1n << (_1n << /* @__PURE__ */ BigInt(j)) - _1n; | ||
} | ||
_SHA3_IOTA.push(t); | ||
} | ||
var [SHA3_IOTA_H, SHA3_IOTA_L] = /* @__PURE__ */ (0, _u64_js_1.split)(_SHA3_IOTA, true); | ||
var rotlH = (h, l, s) => s > 32 ? (0, _u64_js_1.rotlBH)(h, l, s) : (0, _u64_js_1.rotlSH)(h, l, s); | ||
var rotlL = (h, l, s) => s > 32 ? (0, _u64_js_1.rotlBL)(h, l, s) : (0, _u64_js_1.rotlSL)(h, l, s); | ||
function keccakP(s, rounds = 24) { | ||
const B = new Uint32Array(5 * 2); | ||
for (let round = 24 - rounds; round < 24; round++) { | ||
for (let x = 0; x < 10; x++) | ||
B[x] = s[x] ^ s[x + 10] ^ s[x + 20] ^ s[x + 30] ^ s[x + 40]; | ||
for (let x = 0; x < 10; x += 2) { | ||
const idx1 = (x + 8) % 10; | ||
const idx0 = (x + 2) % 10; | ||
const B0 = B[idx0]; | ||
const B1 = B[idx0 + 1]; | ||
const Th = rotlH(B0, B1, 1) ^ B[idx1]; | ||
const Tl = rotlL(B0, B1, 1) ^ B[idx1 + 1]; | ||
for (let y = 0; y < 50; y += 10) { | ||
s[x + y] ^= Th; | ||
s[x + y + 1] ^= Tl; | ||
} | ||
} | ||
let curH = s[2]; | ||
let curL = s[3]; | ||
for (let t = 0; t < 24; t++) { | ||
const shift = SHA3_ROTL[t]; | ||
const Th = rotlH(curH, curL, shift); | ||
const Tl = rotlL(curH, curL, shift); | ||
const PI = SHA3_PI[t]; | ||
curH = s[PI]; | ||
curL = s[PI + 1]; | ||
s[PI] = Th; | ||
s[PI + 1] = Tl; | ||
} | ||
for (let y = 0; y < 50; y += 10) { | ||
for (let x = 0; x < 10; x++) | ||
B[x] = s[y + x]; | ||
for (let x = 0; x < 10; x++) | ||
s[y + x] ^= ~B[(x + 2) % 10] & B[(x + 4) % 10]; | ||
} | ||
s[0] ^= SHA3_IOTA_H[round]; | ||
s[1] ^= SHA3_IOTA_L[round]; | ||
} | ||
B.fill(0); | ||
} | ||
var Keccak = class _Keccak extends utils_js_1.Hash { | ||
// NOTE: we accept arguments in bytes instead of bits here. | ||
constructor(blockLen, suffix, outputLen, enableXOF = false, rounds = 24) { | ||
super(); | ||
this.blockLen = blockLen; | ||
this.suffix = suffix; | ||
this.outputLen = outputLen; | ||
this.enableXOF = enableXOF; | ||
this.rounds = rounds; | ||
this.pos = 0; | ||
this.posOut = 0; | ||
this.finished = false; | ||
this.destroyed = false; | ||
(0, _assert_js_1.anumber)(outputLen); | ||
if (0 >= this.blockLen || this.blockLen >= 200) | ||
throw new Error("Sha3 supports only keccak-f1600 function"); | ||
this.state = new Uint8Array(200); | ||
this.state32 = (0, utils_js_1.u32)(this.state); | ||
} | ||
keccak() { | ||
if (!utils_js_1.isLE) | ||
(0, utils_js_1.byteSwap32)(this.state32); | ||
keccakP(this.state32, this.rounds); | ||
if (!utils_js_1.isLE) | ||
(0, utils_js_1.byteSwap32)(this.state32); | ||
this.posOut = 0; | ||
this.pos = 0; | ||
} | ||
update(data) { | ||
(0, _assert_js_1.aexists)(this); | ||
const { blockLen, state } = this; | ||
data = (0, utils_js_1.toBytes)(data); | ||
const len = data.length; | ||
for (let pos = 0; pos < len; ) { | ||
const take = Math.min(blockLen - this.pos, len - pos); | ||
for (let i = 0; i < take; i++) | ||
state[this.pos++] ^= data[pos++]; | ||
if (this.pos === blockLen) | ||
this.keccak(); | ||
} | ||
return this; | ||
} | ||
finish() { | ||
if (this.finished) | ||
return; | ||
this.finished = true; | ||
const { state, suffix, pos, blockLen } = this; | ||
state[pos] ^= suffix; | ||
if ((suffix & 128) !== 0 && pos === blockLen - 1) | ||
this.keccak(); | ||
state[blockLen - 1] ^= 128; | ||
this.keccak(); | ||
} | ||
writeInto(out) { | ||
(0, _assert_js_1.aexists)(this, false); | ||
(0, _assert_js_1.abytes)(out); | ||
this.finish(); | ||
const bufferOut = this.state; | ||
const { blockLen } = this; | ||
for (let pos = 0, len = out.length; pos < len; ) { | ||
if (this.posOut >= blockLen) | ||
this.keccak(); | ||
const take = Math.min(blockLen - this.posOut, len - pos); | ||
out.set(bufferOut.subarray(this.posOut, this.posOut + take), pos); | ||
this.posOut += take; | ||
pos += take; | ||
} | ||
return out; | ||
} | ||
xofInto(out) { | ||
if (!this.enableXOF) | ||
throw new Error("XOF is not possible for this instance"); | ||
return this.writeInto(out); | ||
} | ||
xof(bytes) { | ||
(0, _assert_js_1.anumber)(bytes); | ||
return this.xofInto(new Uint8Array(bytes)); | ||
} | ||
digestInto(out) { | ||
(0, _assert_js_1.aoutput)(out, this); | ||
if (this.finished) | ||
throw new Error("digest() was already called"); | ||
this.writeInto(out); | ||
this.destroy(); | ||
return out; | ||
} | ||
digest() { | ||
return this.digestInto(new Uint8Array(this.outputLen)); | ||
} | ||
destroy() { | ||
this.destroyed = true; | ||
this.state.fill(0); | ||
} | ||
_cloneInto(to) { | ||
const { blockLen, suffix, outputLen, rounds, enableXOF } = this; | ||
to || (to = new _Keccak(blockLen, suffix, outputLen, enableXOF, rounds)); | ||
to.state32.set(this.state32); | ||
to.pos = this.pos; | ||
to.posOut = this.posOut; | ||
to.finished = this.finished; | ||
to.rounds = rounds; | ||
to.suffix = suffix; | ||
to.outputLen = outputLen; | ||
to.enableXOF = enableXOF; | ||
to.destroyed = this.destroyed; | ||
return to; | ||
} | ||
}; | ||
exports.Keccak = Keccak; | ||
var gen = (suffix, blockLen, outputLen) => (0, utils_js_1.wrapConstructor)(() => new Keccak(blockLen, suffix, outputLen)); | ||
exports.sha3_224 = gen(6, 144, 224 / 8); | ||
exports.sha3_256 = gen(6, 136, 256 / 8); | ||
exports.sha3_384 = gen(6, 104, 384 / 8); | ||
exports.sha3_512 = gen(6, 72, 512 / 8); | ||
exports.keccak_224 = gen(1, 144, 224 / 8); | ||
exports.keccak_256 = gen(1, 136, 256 / 8); | ||
exports.keccak_384 = gen(1, 104, 384 / 8); | ||
exports.keccak_512 = gen(1, 72, 512 / 8); | ||
var genShake = (suffix, blockLen, outputLen) => (0, utils_js_1.wrapXOFConstructorWithOpts)((opts = {}) => new Keccak(blockLen, suffix, opts.dkLen === void 0 ? outputLen : opts.dkLen, true)); | ||
exports.shake128 = genShake(31, 168, 128 / 8); | ||
exports.shake256 = genShake(31, 136, 256 / 8); | ||
} | ||
}); | ||
// node_modules/@paralleldrive/cuid2/src/index.js | ||
var require_src = __commonJS({ | ||
"node_modules/@paralleldrive/cuid2/src/index.js"(exports, module) { | ||
"use strict"; | ||
var { sha3_512: sha3 } = require_sha3(); | ||
var defaultLength = 24; | ||
var bigLength = 32; | ||
var createEntropy = (length = 4, random = Math.random) => { | ||
let entropy = ""; | ||
while (entropy.length < length) { | ||
entropy = entropy + Math.floor(random() * 36).toString(36); | ||
} | ||
return entropy; | ||
}; | ||
function bufToBigInt(buf) { | ||
let bits = 8n; | ||
let value = 0n; | ||
for (const i of buf.values()) { | ||
const bi = BigInt(i); | ||
value = (value << bits) + bi; | ||
} | ||
return value; | ||
} | ||
var hash = (input = "") => { | ||
return bufToBigInt(sha3(input)).toString(36).slice(1); | ||
}; | ||
var alphabet = Array.from( | ||
{ length: 26 }, | ||
(x, i) => String.fromCharCode(i + 97) | ||
); | ||
var randomLetter = (random) => alphabet[Math.floor(random() * alphabet.length)]; | ||
var createFingerprint = ({ | ||
globalObj = typeof global !== "undefined" ? global : typeof window !== "undefined" ? window : {}, | ||
random = Math.random | ||
} = {}) => { | ||
const globals = Object.keys(globalObj).toString(); | ||
const sourceString = globals.length ? globals + createEntropy(bigLength, random) : createEntropy(bigLength, random); | ||
return hash(sourceString).substring(0, bigLength); | ||
}; | ||
var createCounter = (count) => () => { | ||
return count++; | ||
}; | ||
var initialCountMax = 476782367; | ||
var init = ({ | ||
// Fallback if the user does not pass in a CSPRNG. This should be OK | ||
// because we don't rely solely on the random number generator for entropy. | ||
// We also use the host fingerprint, current time, and a session counter. | ||
random = Math.random, | ||
counter = createCounter(Math.floor(random() * initialCountMax)), | ||
length = defaultLength, | ||
fingerprint = createFingerprint({ random }) | ||
} = {}) => { | ||
return function cuid2() { | ||
const firstLetter = randomLetter(random); | ||
const time = Date.now().toString(36); | ||
const count = counter().toString(36); | ||
const salt = createEntropy(length, random); | ||
const hashInput = `${time + salt + count + fingerprint}`; | ||
return `${firstLetter + hash(hashInput).substring(1, length)}`; | ||
}; | ||
}; | ||
var createId = init(); | ||
var isCuid = (id, { minLength = 2, maxLength = bigLength } = {}) => { | ||
const length = id.length; | ||
const regex = /^[0-9a-z]+$/; | ||
try { | ||
if (typeof id === "string" && length >= minLength && length <= maxLength && regex.test(id)) | ||
return true; | ||
} finally { | ||
} | ||
return false; | ||
}; | ||
module.exports.getConstants = () => ({ defaultLength, bigLength }); | ||
module.exports.init = init; | ||
module.exports.createId = createId; | ||
module.exports.bufToBigInt = bufToBigInt; | ||
module.exports.createCounter = createCounter; | ||
module.exports.createFingerprint = createFingerprint; | ||
module.exports.isCuid = isCuid; | ||
} | ||
}); | ||
// node_modules/@paralleldrive/cuid2/index.js | ||
var require_cuid2 = __commonJS({ | ||
"node_modules/@paralleldrive/cuid2/index.js"(exports, module) { | ||
"use strict"; | ||
var { createId, init, getConstants, isCuid } = require_src(); | ||
module.exports.createId = createId; | ||
module.exports.init = init; | ||
module.exports.getConstants = getConstants; | ||
module.exports.isCuid = isCuid; | ||
} | ||
}); | ||
// node_modules/title/lib/lower-case.js | ||
var require_lower_case = __commonJS({ | ||
"node_modules/title/lib/lower-case.js"(exports, module) { | ||
"use strict"; | ||
var conjunctions = [ | ||
"for", | ||
"and", | ||
"nor", | ||
"but", | ||
"or", | ||
"yet", | ||
"so" | ||
]; | ||
var articles = [ | ||
"a", | ||
"an", | ||
"the" | ||
]; | ||
var prepositions = [ | ||
"aboard", | ||
"about", | ||
"above", | ||
"across", | ||
"after", | ||
"against", | ||
"along", | ||
"amid", | ||
"among", | ||
"anti", | ||
"around", | ||
"as", | ||
"at", | ||
"before", | ||
"behind", | ||
"below", | ||
"beneath", | ||
"beside", | ||
"besides", | ||
"between", | ||
"beyond", | ||
"but", | ||
"by", | ||
"concerning", | ||
"considering", | ||
"despite", | ||
"down", | ||
"during", | ||
"except", | ||
"excepting", | ||
"excluding", | ||
"following", | ||
"for", | ||
"from", | ||
"in", | ||
"inside", | ||
"into", | ||
"like", | ||
"minus", | ||
"near", | ||
"of", | ||
"off", | ||
"on", | ||
"onto", | ||
"opposite", | ||
"over", | ||
"past", | ||
"per", | ||
"plus", | ||
"regarding", | ||
"round", | ||
"save", | ||
"since", | ||
"than", | ||
"through", | ||
"to", | ||
"toward", | ||
"towards", | ||
"under", | ||
"underneath", | ||
"unlike", | ||
"until", | ||
"up", | ||
"upon", | ||
"versus", | ||
"via", | ||
"with", | ||
"within", | ||
"without" | ||
]; | ||
module.exports = /* @__PURE__ */ new Set([ | ||
...conjunctions, | ||
...articles, | ||
...prepositions | ||
]); | ||
} | ||
}); | ||
// node_modules/title/lib/specials.js | ||
var require_specials = __commonJS({ | ||
"node_modules/title/lib/specials.js"(exports, module) { | ||
"use strict"; | ||
var intended = [ | ||
"ZEIT", | ||
"ZEIT Inc.", | ||
"Vercel", | ||
"Vercel Inc.", | ||
"CLI", | ||
"API", | ||
"HTTP", | ||
"HTTPS", | ||
"JSX", | ||
"DNS", | ||
"URL", | ||
"now.sh", | ||
"now.json", | ||
"vercel.app", | ||
"vercel.json", | ||
"CI", | ||
"CD", | ||
"CDN", | ||
"package.json", | ||
"package.lock", | ||
"yarn.lock", | ||
"GitHub", | ||
"GitLab", | ||
"CSS", | ||
"Sass", | ||
"JS", | ||
"JavaScript", | ||
"TypeScript", | ||
"HTML", | ||
"WordPress", | ||
"Next.js", | ||
"Node.js", | ||
"Webpack", | ||
"Docker", | ||
"Bash", | ||
"Kubernetes", | ||
"SWR", | ||
"TinaCMS", | ||
"UI", | ||
"UX", | ||
"TS", | ||
"TSX", | ||
"iPhone", | ||
"iPad", | ||
"watchOS", | ||
"iOS", | ||
"iPadOS", | ||
"macOS", | ||
"PHP", | ||
"composer.json", | ||
"composer.lock", | ||
"CMS", | ||
"SQL", | ||
"C", | ||
"C#", | ||
"GraphQL", | ||
"GraphiQL", | ||
"JWT", | ||
"JWTs" | ||
]; | ||
module.exports = intended; | ||
} | ||
}); | ||
// node_modules/title/lib/index.js | ||
var require_lib = __commonJS({ | ||
"node_modules/title/lib/index.js"(exports, module) { | ||
"use strict"; | ||
var lowerCase = require_lower_case(); | ||
var specials = require_specials(); | ||
var word = `[^\\s'\u2019\\(\\)!?;:"-]`; | ||
var regex = new RegExp(`(?:(?:(\\s?(?:^|[.\\(\\)!?;:"-])\\s*)(${word}))|(${word}))(${word}*[\u2019']*${word}*)`, "g"); | ||
var convertToRegExp = (specials2) => specials2.map((s) => [new RegExp(`\\b${s}\\b`, "gi"), s]); | ||
function parseMatch(match) { | ||
const firstCharacter = match[0]; | ||
if (/\s/.test(firstCharacter)) { | ||
return match.slice(1); | ||
} | ||
if (/[\(\)]/.test(firstCharacter)) { | ||
return null; | ||
} | ||
return match; | ||
} | ||
module.exports = (str, options = {}) => { | ||
str = str.toLowerCase().replace(regex, (m, lead = "", forced, lower, rest, offset, string2) => { | ||
const isLastWord = m.length + offset >= string2.length; | ||
const parsedMatch = parseMatch(m); | ||
if (!parsedMatch) { | ||
return m; | ||
} | ||
if (!forced) { | ||
const fullLower = lower + rest; | ||
if (lowerCase.has(fullLower) && !isLastWord) { | ||
return parsedMatch; | ||
} | ||
} | ||
return lead + (lower || forced).toUpperCase() + rest; | ||
}); | ||
const customSpecials = options.special || []; | ||
const replace = [...specials, ...customSpecials]; | ||
const replaceRegExp = convertToRegExp(replace); | ||
replaceRegExp.forEach(([pattern, s]) => { | ||
str = str.replace(pattern, s); | ||
}); | ||
return str; | ||
}; | ||
} | ||
}); | ||
// src/model/primitives/blob.ts | ||
@@ -76,5 +942,66 @@ var blob = (attributes = {}) => { | ||
// node_modules/ronin/node_modules/@ronin/compiler/dist/index.js | ||
var import_cuid2 = __toESM(require_cuid2(), 1); | ||
var import_title = __toESM(require_lib(), 1); | ||
var QUERY_SYMBOLS = { | ||
// Represents a sub query. | ||
QUERY: "__RONIN_QUERY", | ||
// Represents an expression that should be evaluated. | ||
EXPRESSION: "__RONIN_EXPRESSION", | ||
// Represents the value of a field in the model. | ||
FIELD: "__RONIN_FIELD_", | ||
// Represents the value of a field in the model of a parent query. | ||
FIELD_PARENT: "__RONIN_FIELD_PARENT_", | ||
// Represents the old value of a field in the parent model. Used for triggers. | ||
FIELD_PARENT_OLD: "__RONIN_FIELD_PARENT_OLD_", | ||
// Represents the new value of a field in the parent model. Used for triggers. | ||
FIELD_PARENT_NEW: "__RONIN_FIELD_PARENT_NEW_", | ||
// Represents a value provided to a query preset. | ||
VALUE: "__RONIN_VALUE" | ||
}; | ||
var RONIN_MODEL_FIELD_REGEX = new RegExp( | ||
`${QUERY_SYMBOLS.FIELD}[_a-zA-Z0-9.]+`, | ||
"g" | ||
); | ||
var omit = (obj, properties) => Object.fromEntries( | ||
Object.entries(obj).filter(([key]) => !properties.includes(key)) | ||
); | ||
var ROOT_MODEL = { | ||
slug: "model", | ||
identifiers: { | ||
name: "name", | ||
slug: "slug" | ||
}, | ||
// This name mimics the `sqlite_schema` table in SQLite. | ||
table: "ronin_schema", | ||
// Indicates that the model was automatically generated by RONIN. | ||
system: { model: "root" }, | ||
fields: [ | ||
{ slug: "name", type: "string" }, | ||
{ slug: "pluralName", type: "string" }, | ||
{ slug: "slug", type: "string" }, | ||
{ slug: "pluralSlug", type: "string" }, | ||
{ slug: "idPrefix", type: "string" }, | ||
{ slug: "table", type: "string" }, | ||
{ slug: "identifiers.name", type: "string" }, | ||
{ slug: "identifiers.slug", type: "string" }, | ||
// Providing an empty object as a default value allows us to use `json_insert` | ||
// without needing to fall back to an empty object in the insertion statement, | ||
// which makes the statement shorter. | ||
{ slug: "fields", type: "json", defaultValue: "{}" }, | ||
{ slug: "indexes", type: "json", defaultValue: "{}" }, | ||
{ slug: "triggers", type: "json", defaultValue: "{}" }, | ||
{ slug: "presets", type: "json", defaultValue: "{}" } | ||
] | ||
}; | ||
var PLURAL_MODEL_ENTITIES = { | ||
field: "fields", | ||
index: "indexes", | ||
trigger: "triggers", | ||
preset: "presets" | ||
}; | ||
var PLURAL_MODEL_ENTITIES_VALUES = Object.values(PLURAL_MODEL_ENTITIES); | ||
var CLEAN_ROOT_MODEL = omit(ROOT_MODEL, ["system"]); | ||
// node_modules/ronin/dist/chunk-SZFCDSI6.js | ||
import { Transaction } from "@ronin/compiler"; | ||
import { QUERY_SYMBOLS } from "@ronin/compiler"; | ||
var EMPTY = Symbol("empty"); | ||
@@ -206,1 +1133,6 @@ var IN_BATCH_ASYNC; | ||
}; | ||
/*! Bundled license information: | ||
@noble/hashes/utils.js: | ||
(*! noble-hashes - MIT License (c) 2022 Paul Miller (paulmillr.com) *) | ||
*/ |
{ | ||
"name": "@ronin/schema", | ||
"version": "0.1.3-leo-ron-1071-experimental-13", | ||
"version": "0.1.3-leo-ron-1071-experimental-14", | ||
"type": "module", | ||
@@ -20,3 +20,3 @@ "description": "Allows for defining the schema of a RONIN database in code.", | ||
"test": "bun test", | ||
"build": "tsup ./src/index.ts --dts --format esm", | ||
"build": "tsup ./src/index.ts --dts --dts-resolve --format esm", | ||
"prepare": "bun run build" | ||
@@ -31,7 +31,5 @@ }, | ||
"license": "Apache-2.0", | ||
"dependencies": { | ||
"@ronin/compiler": "0.13.1" | ||
}, | ||
"devDependencies": { | ||
"@biomejs/biome": "1.9.4", | ||
"@ronin/compiler": "0.13.1", | ||
"@types/bun": "1.1.14", | ||
@@ -38,0 +36,0 @@ "ronin": "5.3.5", |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
68714
0
1593
5
4
1
- Removed@ronin/compiler@0.13.1
- Removed@ronin/compiler@0.13.1(transitive)