@ronin/cli
Advanced tools
Comparing version 0.2.36 to 0.2.37-corny-latest-get-batch-proxy-experimental-133
1083
dist/index.js
@@ -14,3 +14,3 @@ // src/index.ts | ||
var initializeDatabase = async (packages, fsPath = ".ronin/db.sqlite") => { | ||
const { Transaction, ROOT_MODEL } = packages.compiler; | ||
const { Transaction, ROOT_MODEL: ROOT_MODEL2 } = packages.compiler; | ||
const engine = new Engine({ | ||
@@ -21,3 +21,3 @@ resolvers: [(engine2) => new MemoryResolver(engine2)] | ||
{ | ||
create: { model: ROOT_MODEL } | ||
create: { model: ROOT_MODEL2 } | ||
} | ||
@@ -69,10 +69,5 @@ ]); | ||
}; | ||
var createModelQuery = (modelSlug, properties) => { | ||
if (properties) { | ||
const propertiesString = Object.entries(properties).filter(([_, value]) => value !== void 0).map(([key, value]) => { | ||
return `${key}:${serialize(value)}`; | ||
}).join(", "); | ||
return `create.model({slug:'${modelSlug}',${propertiesString}})`; | ||
} | ||
return `create.model({slug:'${modelSlug}'})`; | ||
var createModelQuery = (model) => { | ||
const { indexes, triggers, ...rest } = model; | ||
return `create.model(${JSON.stringify(rest)})`; | ||
}; | ||
@@ -85,8 +80,9 @@ var createFieldQuery = (modelSlug, field) => { | ||
}; | ||
var createTempModelQuery = (modelSlug, fields, _indexes, triggers, customQueries, includeFields) => { | ||
var createTempModelQuery = (model, customQueries, includeFields) => { | ||
const { slug, fields, indexes: _indexes, triggers, ...rest } = model; | ||
const queries = []; | ||
const tempModelSlug = `${RONIN_SCHEMA_TEMP_SUFFIX}${modelSlug}`; | ||
queries.push(createModelQuery(tempModelSlug, { fields })); | ||
const tempModelSlug = `${RONIN_SCHEMA_TEMP_SUFFIX}${slug}`; | ||
queries.push(createModelQuery({ slug: tempModelSlug, fields, ...rest })); | ||
queries.push( | ||
`add.${tempModelSlug}.with(() => get.${modelSlug}(${includeFields ? JSON.stringify({ selecting: includeFields.map((field) => field.slug) }) : ""}))` | ||
`add.${tempModelSlug}.with(() => get.${slug}(${includeFields ? JSON.stringify({ selecting: includeFields.map((field) => field.slug) }) : ""}))` | ||
); | ||
@@ -96,6 +92,6 @@ if (customQueries) { | ||
} | ||
queries.push(dropModelQuery(modelSlug)); | ||
queries.push(`alter.model("${tempModelSlug}").to({slug: "${modelSlug}"})`); | ||
for (const trigger of triggers) { | ||
queries.push(createTriggerQuery(modelSlug, trigger)); | ||
queries.push(dropModelQuery(slug)); | ||
queries.push(`alter.model("${tempModelSlug}").to({slug: "${slug}"})`); | ||
for (const [key, value] of Object.entries(triggers || {})) { | ||
queries.push(createTriggerQuery(slug, { ...value, slug: key })); | ||
} | ||
@@ -121,14 +117,2 @@ return queries; | ||
}; | ||
var serialize = (value) => { | ||
if (typeof value === "string") { | ||
return `'${value}'`; | ||
} | ||
if (Array.isArray(value)) { | ||
return `[${value.map(serialize).join(", ")}]`; | ||
} | ||
if (typeof value === "object" && value !== null) { | ||
return `{${Object.entries(value).filter(([_, v]) => v !== void 0).map(([k, v]) => `${k}:${serialize(v)}`).join(", ")}}`; | ||
} | ||
return String(value); | ||
}; | ||
var renameModelQuery = (modelSlug, newModelSlug) => { | ||
@@ -153,6 +137,311 @@ return `alter.model("${modelSlug}").to({slug: "${newModelSlug}"})`; | ||
// src/utils/field.ts | ||
// src/utils/migration.ts | ||
import { confirm } from "@inquirer/prompts"; | ||
var diffFields = async (definedFields, existingFields, modelSlug, indexes, triggers, rename) => { | ||
var IGNORED_FIELDS = [ | ||
"id", | ||
"ronin", | ||
"ronin.updatedAt", | ||
"ronin.createdBy", | ||
"ronin.updatedBy", | ||
"ronin.createdAt", | ||
"ronin.locked" | ||
]; | ||
var diffModels = async (definedModelsWithFieldObject, existingModelsWithFieldObject, options) => { | ||
const definedModels = definedModelsWithFieldObject.map( | ||
(model) => convertModelToArrayFields(model) | ||
); | ||
const existingModels = existingModelsWithFieldObject.map( | ||
(model) => convertModelToArrayFields(model) | ||
); | ||
const diff = []; | ||
const adjustModelMetaQueries = adjustModelMeta(definedModels, existingModels); | ||
const recreateIndexes = indexesToRecreate(definedModels, existingModels); | ||
const recreateTriggers = triggersToRecreate(definedModels, existingModels); | ||
const modelsToBeRenamed = modelsToRename(definedModels, existingModels); | ||
let modelsToBeAdded = modelsToAdd(definedModels, existingModels); | ||
let modelsToBeDropped = modelsToDrop(definedModels, existingModels); | ||
if (modelsToBeRenamed.length > 0) { | ||
for (const model of modelsToBeRenamed) { | ||
const confirmRename = options?.rename || process.env.NODE_ENV !== "test" && await confirm({ | ||
message: `Did you mean to rename model: ${model.from.slug} -> ${model.to.slug}`, | ||
default: true | ||
}); | ||
if (confirmRename) { | ||
modelsToBeDropped = modelsToBeDropped.filter((s) => s.slug !== model.from.slug); | ||
modelsToBeAdded = modelsToBeAdded.filter((s) => s.slug !== model.to.slug); | ||
diff.push(renameModelQuery(model.from.slug, model.to.slug)); | ||
} | ||
} | ||
} | ||
diff.push(...adjustModelMetaQueries); | ||
diff.push(...dropModels(modelsToBeDropped)); | ||
diff.push( | ||
...createModels( | ||
modelsToBeAdded.map((m) => ({ | ||
...m, | ||
// @ts-expect-error This will work once the types are fixed. | ||
fields: convertArrayToObject(m.fields) | ||
})) | ||
) | ||
); | ||
diff.push(...await adjustModels(definedModels, existingModels, options)); | ||
diff.push(...recreateIndexes); | ||
diff.push(...recreateTriggers); | ||
return diff; | ||
}; | ||
var adjustModels = async (definedModels, existingModels, options) => { | ||
const diff = []; | ||
for (const localModel of definedModels) { | ||
const remoteModel = existingModels.find((r) => r.slug === localModel.slug); | ||
if (remoteModel) { | ||
diff.push( | ||
...await diffFields( | ||
// @ts-expect-error This will work once the types are fixed. | ||
localModel.fields || [], | ||
remoteModel.fields || [], | ||
localModel.slug, | ||
localModel.indexes || [], | ||
localModel.triggers || [], | ||
options | ||
) | ||
); | ||
} | ||
} | ||
return diff; | ||
}; | ||
var dropModels = (models) => { | ||
const diff = []; | ||
for (const model of models) { | ||
diff.push(dropModelQuery(model.slug)); | ||
} | ||
return diff; | ||
}; | ||
var createModels = (models) => { | ||
const diff = []; | ||
for (const model of models) { | ||
diff.push(createModelQuery(model)); | ||
} | ||
return diff; | ||
}; | ||
var modelsToDrop = (definedModels, existingModels) => { | ||
return existingModels.filter((s) => !definedModels.some((c) => c.slug === s.slug)); | ||
}; | ||
var modelsToAdd = (definedModels, existingModels) => { | ||
const currentModelsMap = new Map(existingModels.map((s) => [s.slug, s])); | ||
const newModels = []; | ||
for (const model of definedModels) { | ||
if (!currentModelsMap.has(model.slug)) { | ||
newModels.push(model); | ||
} | ||
} | ||
return newModels; | ||
}; | ||
var modelsToRename = (definedModels, existingModels) => { | ||
const modelsToBeAdded = modelsToAdd(definedModels, existingModels); | ||
const modelsToBeDropped = modelsToDrop(definedModels, existingModels); | ||
const modelsToRename2 = []; | ||
for (const model of modelsToBeAdded) { | ||
const currentModel = modelsToBeDropped.find((s) => { | ||
return areArraysEqual( | ||
// @ts-expect-error This will work once the types are fixed. | ||
model.fields?.map((f) => f.slug) || [], | ||
// @ts-expect-error This will work once the types are fixed. | ||
s.fields?.map((f) => f.slug) || [] | ||
); | ||
}); | ||
if (currentModel) { | ||
modelsToRename2.push({ to: model, from: currentModel }); | ||
} | ||
} | ||
return modelsToRename2; | ||
}; | ||
var adjustModelMeta = (definedModels, existingModels) => { | ||
const databaseModelsMap = new Map(existingModels.map((s) => [s.slug, s])); | ||
const newModels = []; | ||
for (const model of definedModels) { | ||
const currentModel = databaseModelsMap.get(model.slug); | ||
if (!(model.name && model.idPrefix)) continue; | ||
if (currentModel && (model.name !== currentModel.name || model.idPrefix !== currentModel.idPrefix)) { | ||
newModels.push( | ||
`alter.model("${model.slug}").to({name: "${model.name}", idPrefix: "${model.idPrefix}"})` | ||
); | ||
} | ||
} | ||
return newModels; | ||
}; | ||
var triggersToRecreate = (definedModels, existingModels) => { | ||
const diff = []; | ||
for (const definedModel of definedModels) { | ||
const existingModel = existingModels.find((m) => m.slug === definedModel.slug); | ||
const modelRecreated = modelWillBeRecreated( | ||
definedModel, | ||
existingModel || {} | ||
); | ||
const needRecreation = Object.entries(definedModel.triggers || {}).reduce((acc, [slug, trigger]) => { | ||
const existingTrigger = existingModel?.triggers?.[slug]; | ||
if (existingTrigger && !(JSON.stringify(trigger) === JSON.stringify(existingTrigger))) { | ||
const createTrigger = createTriggerQuery(definedModel.slug, { | ||
slug, | ||
...trigger | ||
}); | ||
const dropTrigger = dropTriggerQuery(definedModel.slug, slug); | ||
acc.push(dropTrigger); | ||
acc.push(createTrigger); | ||
return acc; | ||
} | ||
if (definedModel.triggers?.[slug] && !existingModel?.triggers?.[slug]) { | ||
acc.push( | ||
createTriggerQuery(definedModel.slug, { | ||
slug, | ||
...trigger | ||
}) | ||
); | ||
} | ||
return acc; | ||
}, []); | ||
diff.push(...modelRecreated ? [] : needRecreation); | ||
} | ||
return diff; | ||
}; | ||
var modelWillBeRecreated = (definedModel, existingModel) => { | ||
if (!existingModel) return false; | ||
return ( | ||
// @ts-expect-error This will work once the types are fixed. | ||
(fieldsToAdjust(definedModel.fields || [], existingModel.fields || []) ?? []).length > 0 | ||
); | ||
}; | ||
var indexesToRecreate = (definedModels, existingModels) => { | ||
const diff = []; | ||
for (const definedModel of definedModels) { | ||
const existingModel = existingModels.find((m) => m.slug === definedModel.slug); | ||
const modelRecreated = modelWillBeRecreated( | ||
definedModel, | ||
existingModel || {} | ||
); | ||
const needRecreation = Object.entries(definedModel.indexes || {}).reduce((acc, [slug, index]) => { | ||
const existingIndex = existingModel?.indexes?.[slug]; | ||
if (existingIndex && !(JSON.stringify(index) === JSON.stringify(existingIndex))) { | ||
const createIndex = createIndexQuery(definedModel.slug, { | ||
slug, | ||
...index | ||
}); | ||
const dropIndex = dropIndexQuery(definedModel.slug, slug); | ||
acc.push(dropIndex); | ||
acc.push(createIndex); | ||
return acc; | ||
} | ||
if (definedModel.indexes?.[slug] && !existingModel?.indexes?.[slug]) { | ||
acc.push( | ||
createIndexQuery(definedModel.slug, { | ||
slug, | ||
...index | ||
}) | ||
); | ||
} | ||
return acc; | ||
}, []); | ||
diff.push(...modelRecreated ? [] : needRecreation); | ||
} | ||
return diff; | ||
}; | ||
var MIGRATION_FLAGS = { | ||
sql: { type: "boolean", short: "s", default: false }, | ||
local: { type: "boolean", short: "l", default: false }, | ||
apply: { type: "boolean", short: "a", default: false } | ||
}; | ||
// src/utils/model.ts | ||
var getModels = async (packages, db, token, space, isLocal = true) => { | ||
const { Transaction } = packages.compiler; | ||
const transaction = new Transaction([{ get: { models: null } }]); | ||
let rawResults; | ||
if (isLocal) { | ||
rawResults = (await db.query(transaction.statements)).map((r) => r.rows); | ||
} else { | ||
try { | ||
const nativeQueries = transaction.statements.map((statement) => ({ | ||
query: statement.statement, | ||
values: statement.params | ||
})); | ||
const response = await fetch(`https://data.ronin.co/?data-selector=${space}`, { | ||
method: "POST", | ||
headers: { | ||
"Content-Type": "application/json", | ||
Authorization: `Bearer ${token}` | ||
}, | ||
body: JSON.stringify({ nativeQueries }) | ||
}); | ||
const responseResults = await getResponseBody(response); | ||
rawResults = responseResults.results.map((result) => { | ||
return "records" in result ? result.records : []; | ||
}); | ||
} catch (error) { | ||
throw new Error(`Failed to fetch remote models: ${error.message}`); | ||
} | ||
} | ||
const results = transaction.formatResults(rawResults, false); | ||
const models = "records" in results[0] ? results[0].records : []; | ||
return models.map((model) => ({ | ||
...model, | ||
// @ts-expect-error This will work once the types are fixed. | ||
fields: convertObjectToArray(model.fields)?.filter( | ||
(field) => !IGNORED_FIELDS.includes(field.slug) | ||
) | ||
})); | ||
}; | ||
var convertObjectToArray = (input3) => { | ||
return Object.entries(input3).map(([key, value]) => ({ | ||
slug: key, | ||
// @ts-expect-error This will work once the types are fixed. | ||
...value | ||
})); | ||
}; | ||
var convertArrayToObject = (fields) => { | ||
if (!fields) return {}; | ||
return fields.reduce((obj, field) => { | ||
const { slug, ...rest } = field; | ||
obj[slug] = rest; | ||
return obj; | ||
}, {}); | ||
}; | ||
var convertModelToArrayFields = (model) => { | ||
if (JSON.stringify(model) === "{}") return {}; | ||
return { ...model, fields: convertObjectToArray(model.fields) }; | ||
}; | ||
// src/utils/field.ts | ||
import { confirm as confirm2, input, select } from "@inquirer/prompts"; | ||
var handleRequiredField = async (modelSlug, field, definedFields, options) => { | ||
let defaultValue; | ||
if (field.type === "boolean") { | ||
defaultValue = options?.requiredDefault || await select({ | ||
message: `Field ${modelSlug}.${field.slug} is required. Select a default value (or manually drop all records):`, | ||
choices: [ | ||
{ name: "True", value: true }, | ||
{ name: "False", value: false } | ||
] | ||
}); | ||
} else { | ||
defaultValue = options?.requiredDefault || await input({ | ||
message: `Field ${modelSlug}.${field.slug} is required. Enter a default value (or manually drop all records):` | ||
}); | ||
} | ||
const updatedFields = definedFields?.map((f) => ({ | ||
...f, | ||
required: false | ||
})); | ||
const queries = [ | ||
// Set the default value for all existing records. | ||
`set.RONIN_TEMP_${modelSlug}.to({${field.slug}: ${typeof defaultValue === "string" ? `"${defaultValue}"` : defaultValue}})`, | ||
// Re-add the NOT NULL constraint after defaults are set. | ||
`alter.model("RONIN_TEMP_${modelSlug}").alter.field("${field.slug}").to({required: true})` | ||
]; | ||
return { | ||
defaultValue, | ||
definedFields: updatedFields, | ||
queries | ||
}; | ||
}; | ||
var diffFields = async (definedFields, existingFields, modelSlug, indexes, triggers, options) => { | ||
const diff = []; | ||
const fieldsToBeRenamed = fieldsToRename(definedFields, existingFields); | ||
@@ -164,3 +453,3 @@ let fieldsToAdd = fieldsToCreate(definedFields, existingFields); | ||
for (const field of fieldsToBeRenamed) { | ||
const confirmRename = rename || await confirm({ | ||
const confirmRename = options?.rename || await confirm2({ | ||
message: `Did you mean to rename field: ${modelSlug}.${field.from.slug} -> ${modelSlug}.${field.to.slug}`, | ||
@@ -175,10 +464,13 @@ default: true | ||
...createTempModelQuery( | ||
modelSlug, | ||
{ | ||
slug: modelSlug, | ||
// @ts-expect-error This will work once the types are fixed. | ||
fields: convertArrayToObject([ | ||
{ ...field.to, slug: field.from.slug }, | ||
...definedFields.filter((local) => local.slug !== field.to.slug) | ||
]), | ||
indexes: convertArrayToObject(indexes), | ||
triggers: convertArrayToObject(triggers) | ||
}, | ||
[ | ||
{ ...field.to, slug: field.from.slug }, | ||
...definedFields.filter((local) => local.slug !== field.to.slug) | ||
], | ||
indexes, | ||
triggers, | ||
[ | ||
renameFieldQuery( | ||
@@ -198,3 +490,8 @@ `${RONIN_SCHEMA_TEMP_SUFFIX}${modelSlug}`, | ||
} | ||
const createFieldsQueries = createFields(fieldsToAdd, modelSlug, definedFields); | ||
const createFieldsQueries = await createFields( | ||
fieldsToAdd, | ||
modelSlug, | ||
definedFields, | ||
options | ||
); | ||
diff.push(...createFieldsQueries); | ||
@@ -206,4 +503,24 @@ if (!(createFieldsQueries.length > 0 && createFieldsQueries.find((q) => q.includes(RONIN_SCHEMA_TEMP_SUFFIX)))) { | ||
const existingField = existingFields.find((f) => f.slug === field.slug); | ||
if (field.unique || field.required || existingField?.unique) { | ||
if (field.unique || existingField?.unique) { | ||
diff.push(...adjustFields(modelSlug, definedFields, indexes, triggers)); | ||
} else if (field.required && !field.defaultValue) { | ||
const { definedFields: updatedFields, queries } = await handleRequiredField( | ||
modelSlug, | ||
field, | ||
definedFields, | ||
options | ||
); | ||
diff.push( | ||
...createTempModelQuery( | ||
{ | ||
slug: modelSlug, | ||
// @ts-expect-error This will work once the types are fixed. | ||
fields: convertArrayToObject(updatedFields || []), | ||
indexes: convertArrayToObject(indexes), | ||
triggers: convertArrayToObject(triggers) | ||
}, | ||
queries, | ||
existingFields | ||
) | ||
); | ||
} else if (field.type === "link" && field.kind === "many") { | ||
@@ -253,3 +570,10 @@ diff.push(...adjustFields(modelSlug, definedFields, indexes, triggers)); | ||
var adjustFields = (modelSlug, fields, indexes, triggers) => { | ||
return createTempModelQuery(modelSlug, fields, indexes, triggers); | ||
return createTempModelQuery({ | ||
slug: modelSlug, | ||
// @ts-expect-error This will work once the types are fixed. | ||
fields: convertArrayToObject(fields), | ||
// @ts-expect-error This will work once the types are fixed. | ||
indexes, | ||
triggers: convertArrayToObject(triggers) | ||
}); | ||
}; | ||
@@ -261,3 +585,3 @@ var fieldsToCreate = (definedFields, existingFields) => { | ||
}; | ||
var createFields = (fields, modelSlug, definedFields) => { | ||
var createFields = async (fields, modelSlug, definedFields, options) => { | ||
const diff = []; | ||
@@ -269,11 +593,45 @@ for (const fieldToAdd of fields) { | ||
); | ||
if (fieldToAdd.required && !fieldToAdd.defaultValue) { | ||
const { definedFields: updatedFields, queries } = await handleRequiredField( | ||
modelSlug, | ||
fieldToAdd, | ||
definedFields, | ||
options | ||
); | ||
return createTempModelQuery( | ||
{ | ||
slug: modelSlug, | ||
// @ts-expect-error This will work once the types are fixed. | ||
fields: convertArrayToObject(updatedFields || []) | ||
}, | ||
queries, | ||
existingFields | ||
); | ||
} | ||
return createTempModelQuery( | ||
modelSlug, | ||
definedFields || [], | ||
{ | ||
slug: modelSlug, | ||
// @ts-expect-error This will work once the types are fixed. | ||
fields: convertArrayToObject(definedFields || []) | ||
}, | ||
[], | ||
[], | ||
[], | ||
existingFields | ||
); | ||
} | ||
if (fieldToAdd.required && !fieldToAdd.defaultValue) { | ||
const { defaultValue } = await handleRequiredField( | ||
modelSlug, | ||
fieldToAdd, | ||
definedFields, | ||
options | ||
); | ||
diff.push(createFieldQuery(modelSlug, { ...fieldToAdd, required: false })); | ||
diff.push( | ||
`set.${modelSlug}.to({${fieldToAdd.slug}: ${typeof defaultValue === "boolean" ? defaultValue : `"${defaultValue}"`}})` | ||
); | ||
diff.push( | ||
`alter.model("${modelSlug}").alter.field("${fieldToAdd.slug}").to({required: true})` | ||
); | ||
return diff; | ||
} | ||
diff.push(createFieldQuery(modelSlug, fieldToAdd)); | ||
@@ -292,3 +650,11 @@ } | ||
if (fieldToDrop.unique) { | ||
return createTempModelQuery(modelSlug, fields, [], [], [], fields); | ||
return createTempModelQuery( | ||
{ | ||
slug: modelSlug, | ||
// @ts-expect-error This will work once the types are fixed. | ||
fields: convertArrayToObject(fields) | ||
}, | ||
[], | ||
fields | ||
); | ||
} | ||
@@ -310,3 +676,3 @@ diff.push(dropFieldQuery(modelSlug, fieldToDrop.slug)); | ||
// src/utils/misc.ts | ||
import { input } from "@inquirer/prompts"; | ||
import { input as input2 } from "@inquirer/prompts"; | ||
import resolveFrom from "resolve-from"; | ||
@@ -343,9 +709,12 @@ var BASE_FLAGS = { | ||
var logTableDiff = (tableB, tableA, tableName) => { | ||
const fieldsToAdd = fieldsToCreate(tableB.fields ?? [], tableA.fields ?? []); | ||
const fieldsToDelete = fieldsToDrop(tableB.fields ?? [], tableA.fields ?? []); | ||
const a = convertModelToArrayFields(tableA); | ||
const b = convertModelToArrayFields(tableB); | ||
const fieldsToAdd = fieldsToCreate(b.fields ?? [], a.fields ?? []); | ||
const fieldsToDelete = fieldsToDrop(b.fields ?? [], a.fields ?? []); | ||
const fieldsA = Object.fromEntries( | ||
(tableA.fields ?? []).map((field) => [field.slug, field]) | ||
// @ts-expect-error This will work once the types are fixed. | ||
(a.fields ?? []).map((field) => [field.slug, field]) | ||
); | ||
const allKeys = /* @__PURE__ */ new Set(); | ||
for (const item of [...tableB.fields ?? [], ...tableA.fields ?? []]) { | ||
for (const item of [...b.fields ?? [], ...a.fields ?? []]) { | ||
for (const key of Object.keys(item)) { | ||
@@ -357,3 +726,4 @@ allKeys.add(key); | ||
// Headers for current fields (green for new fields) | ||
...(tableB.fields ?? []).map((field) => { | ||
// @ts-expect-error This will work once the types are fixed. | ||
...(b.fields ?? []).map((field) => { | ||
const isNew = fieldsToAdd.some((f) => f.slug === field.slug); | ||
@@ -368,3 +738,3 @@ return isNew ? `\x1B[32m${field.slug}\x1B[0m` : field.slug; | ||
let hasValue = false; | ||
(tableB.fields ?? []).forEach((field, index) => { | ||
(b.fields ?? []).forEach((field, index) => { | ||
const oldValue = fieldsA[field.slug]?.[key]; | ||
@@ -378,3 +748,3 @@ const newValue = field[key]; | ||
if (value !== void 0) hasValue = true; | ||
row[columnHeaders[tableB.fields?.length ?? 0 + i]] = `\x1B[31m\x1B[9m${value}\x1B[0m`; | ||
row[columnHeaders[b.fields?.length ?? 0 + i]] = `\x1B[31m\x1B[9m${value}\x1B[0m`; | ||
}); | ||
@@ -392,3 +762,3 @@ return hasValue ? row : null; | ||
spinner.fail(`Could not find a model definition file ${MODEL_IN_CODE_RELATIVE_PATH}`); | ||
definedPath = process.env.NODE_ENV !== "test" ? await input({ | ||
definedPath = process.env.NODE_ENV !== "test" ? await input2({ | ||
message: "Enter the path to the model definition file" | ||
@@ -426,3 +796,3 @@ }) : MODEL_IN_CODE_RELATIVE_PATH; | ||
for (const model of models) { | ||
for (const field of model.fields ?? []) { | ||
for (const field of Object.values(model.fields ?? [])) { | ||
if (field.type === "link" && field.target && field.target !== model.slug) { | ||
@@ -513,259 +883,2 @@ dependencyMap.get(model.slug)?.add(field.target); | ||
// src/utils/migration.ts | ||
import { confirm as confirm2 } from "@inquirer/prompts"; | ||
var IGNORED_FIELDS = [ | ||
"id", | ||
"ronin", | ||
"ronin.updatedAt", | ||
"ronin.createdBy", | ||
"ronin.updatedBy", | ||
"ronin.createdAt", | ||
"ronin.locked" | ||
]; | ||
var diffModels = async (definedModels, existingModels, rename) => { | ||
const diff = []; | ||
const adjustModelMetaQueries = adjustModelMeta(definedModels, existingModels); | ||
const recreateIndexes = indexesToRecreate(definedModels, existingModels); | ||
const recreateTriggers = triggersToRecreate(definedModels, existingModels); | ||
const modelsToBeRenamed = modelsToRename(definedModels, existingModels); | ||
let modelsToBeAdded = modelsToAdd(definedModels, existingModels); | ||
let modelsToBeDropped = modelsToDrop(definedModels, existingModels); | ||
if (modelsToBeRenamed.length > 0) { | ||
for (const model of modelsToBeRenamed) { | ||
const confirmRename = rename || process.env.NODE_ENV !== "test" && await confirm2({ | ||
message: `Did you mean to rename model: ${model.from.slug} -> ${model.to.slug}`, | ||
default: true | ||
}); | ||
if (confirmRename) { | ||
modelsToBeDropped = modelsToBeDropped.filter((s) => s.slug !== model.from.slug); | ||
modelsToBeAdded = modelsToBeAdded.filter((s) => s.slug !== model.to.slug); | ||
diff.push(renameModelQuery(model.from.slug, model.to.slug)); | ||
} | ||
} | ||
} | ||
diff.push(...adjustModelMetaQueries); | ||
diff.push(...dropModels(modelsToBeDropped)); | ||
diff.push(...createModels(modelsToBeAdded)); | ||
diff.push(...await adjustModels(definedModels, existingModels, rename)); | ||
diff.push(...recreateIndexes); | ||
diff.push(...recreateTriggers); | ||
return diff; | ||
}; | ||
var adjustModels = async (definedModels, existingModels, rename) => { | ||
const diff = []; | ||
for (const localModel of definedModels) { | ||
const remoteModel = existingModels.find((r) => r.slug === localModel.slug); | ||
if (remoteModel) { | ||
diff.push( | ||
...await diffFields( | ||
localModel.fields || [], | ||
remoteModel.fields || [], | ||
localModel.slug, | ||
localModel.indexes || [], | ||
localModel.triggers || [], | ||
rename | ||
) | ||
); | ||
} | ||
} | ||
return diff; | ||
}; | ||
var dropModels = (models) => { | ||
const diff = []; | ||
for (const model of models) { | ||
diff.push(dropModelQuery(model.slug)); | ||
} | ||
return diff; | ||
}; | ||
var createModels = (models) => { | ||
const diff = []; | ||
for (const model of models) { | ||
diff.push( | ||
createModelQuery(model.slug, model.fields ? { fields: model.fields } : void 0) | ||
); | ||
} | ||
return diff; | ||
}; | ||
var modelsToDrop = (definedModels, existingModels) => { | ||
return existingModels.filter((s) => !definedModels.some((c) => c.slug === s.slug)); | ||
}; | ||
var modelsToAdd = (definedModels, existingModels) => { | ||
const currentModelsMap = new Map(existingModels.map((s) => [s.slug, s])); | ||
const newModels = []; | ||
for (const model of definedModels) { | ||
if (!currentModelsMap.has(model.slug)) { | ||
newModels.push(model); | ||
} | ||
} | ||
return newModels; | ||
}; | ||
var modelsToRename = (definedModels, existingModels) => { | ||
const modelsToBeAdded = modelsToAdd(definedModels, existingModels); | ||
const modelsToBeDropped = modelsToDrop(definedModels, existingModels); | ||
const modelsToRename2 = []; | ||
for (const model of modelsToBeAdded) { | ||
const currentModel = modelsToBeDropped.find((s) => { | ||
return areArraysEqual( | ||
model.fields?.map((f) => f.slug) || [], | ||
s.fields?.map((f) => f.slug) || [] | ||
); | ||
}); | ||
if (currentModel) { | ||
modelsToRename2.push({ to: model, from: currentModel }); | ||
} | ||
} | ||
return modelsToRename2; | ||
}; | ||
var adjustModelMeta = (definedModels, existingModels) => { | ||
const databaseModelsMap = new Map(existingModels.map((s) => [s.slug, s])); | ||
const newModels = []; | ||
for (const model of definedModels) { | ||
const currentModel = databaseModelsMap.get(model.slug); | ||
if (!(model.name && model.idPrefix)) continue; | ||
if (currentModel && (model.name !== currentModel.name || model.idPrefix !== currentModel.idPrefix)) { | ||
newModels.push( | ||
`alter.model("${model.slug}").to({name: "${model.name}", idPrefix: "${model.idPrefix}"})` | ||
); | ||
} | ||
} | ||
return newModels; | ||
}; | ||
var triggersToRecreate = (definedModels, existingModels) => { | ||
const diff = []; | ||
for (const definedModel of definedModels) { | ||
const existingModel = existingModels.find((m) => m.slug === definedModel.slug); | ||
const modelRecreated = modelWillBeRecreated( | ||
definedModel, | ||
existingModel || {} | ||
); | ||
diff.push( | ||
...modelRecreated ? [] : dropTriggers(definedModel, existingModel || {}), | ||
...createTriggers(definedModel, existingModel || {}) | ||
); | ||
} | ||
return diff; | ||
}; | ||
var dropTriggers = (definedModel, existingModel) => { | ||
const diff = []; | ||
const definedTriggers = definedModel.triggers || []; | ||
const existingTriggers = existingModel.triggers || []; | ||
const triggersToDrop = existingTriggers.filter( | ||
(i) => !definedTriggers.some( | ||
(d) => d.fields && i.fields && d.fields.length === i.fields.length && d.fields.every( | ||
(f, idx) => JSON.stringify(f) === JSON.stringify(i.fields?.[idx]) | ||
) | ||
) | ||
) || []; | ||
for (const trigger of triggersToDrop) { | ||
diff.push(dropTriggerQuery(definedModel.slug, trigger.slug || "no slug")); | ||
} | ||
return diff; | ||
}; | ||
var createTriggers = (definedModel, existingModel) => { | ||
const diff = []; | ||
const definedTriggers = definedModel.triggers || []; | ||
const existingTriggers = existingModel.triggers || []; | ||
const triggersToAdd = definedTriggers.filter( | ||
(i) => !existingTriggers.some( | ||
(e) => e?.fields && i.fields && e.fields.length === i.fields.length && e.fields.every( | ||
(f, idx) => JSON.stringify(f) === JSON.stringify(i.fields?.[idx]) | ||
) | ||
) | ||
); | ||
for (const trigger of triggersToAdd) { | ||
diff.push(createTriggerQuery(definedModel.slug, trigger)); | ||
} | ||
return diff; | ||
}; | ||
var modelWillBeRecreated = (definedModel, existingModel) => { | ||
if (!existingModel) return false; | ||
return (fieldsToAdjust(definedModel.fields || [], existingModel.fields || []) ?? []).length > 0; | ||
}; | ||
var indexesToRecreate = (definedModels, existingModels) => { | ||
const diff = []; | ||
for (const definedModel of definedModels) { | ||
const existingModel = existingModels.find((m) => m.slug === definedModel.slug); | ||
const modelRecreated = modelWillBeRecreated( | ||
definedModel, | ||
existingModel || {} | ||
); | ||
diff.push( | ||
...modelRecreated ? [] : dropIndexes(definedModel, existingModel || {}), | ||
...createIndexes(definedModel, existingModel || {}) | ||
); | ||
} | ||
return diff; | ||
}; | ||
var dropIndexes = (definedModel, existingModel) => { | ||
const diff = []; | ||
const definedIndexes = definedModel.indexes || []; | ||
const indexesToDrop = existingModel?.indexes?.filter( | ||
(i) => !definedIndexes.some( | ||
(d) => d.fields && i.fields && d.fields.length === i.fields.length && d.unique === i.unique && d.fields.every( | ||
(f, idx) => JSON.stringify(f) === JSON.stringify(i.fields[idx]) | ||
) | ||
) | ||
) || []; | ||
for (const index of indexesToDrop) { | ||
diff.push(dropIndexQuery(definedModel.slug, index.slug || "no slug")); | ||
} | ||
return diff; | ||
}; | ||
var createIndexes = (definedModel, existingModel) => { | ||
const diff = []; | ||
const definedIndexes = definedModel.indexes || []; | ||
const existingIndexes = existingModel.indexes || []; | ||
const indexesToAdd = definedIndexes.filter( | ||
(i) => !existingIndexes.some( | ||
(e) => e.fields && i.fields && e.fields.length === i.fields.length && e.unique === i.unique && e.fields.every((f, idx) => JSON.stringify(f) === JSON.stringify(i.fields[idx])) | ||
) | ||
); | ||
for (const index of indexesToAdd) { | ||
diff.push(createIndexQuery(definedModel.slug, index)); | ||
} | ||
return diff; | ||
}; | ||
var MIGRATION_FLAGS = { | ||
sql: { type: "boolean", short: "s", default: false }, | ||
local: { type: "boolean", short: "l", default: false }, | ||
apply: { type: "boolean", short: "a", default: false } | ||
}; | ||
// src/utils/model.ts | ||
var getModels = async (packages, db, token, space, isLocal = true) => { | ||
const { Transaction } = packages.compiler; | ||
const transaction = new Transaction([{ get: { models: null } }]); | ||
let rawResults; | ||
if (isLocal) { | ||
rawResults = (await db.query(transaction.statements)).map((r) => r.rows); | ||
} else { | ||
try { | ||
const nativeQueries = transaction.statements.map((statement) => ({ | ||
query: statement.statement, | ||
values: statement.params | ||
})); | ||
const response = await fetch(`https://data.ronin.co/?data-selector=${space}`, { | ||
method: "POST", | ||
headers: { | ||
"Content-Type": "application/json", | ||
Authorization: `Bearer ${token}` | ||
}, | ||
body: JSON.stringify({ nativeQueries }) | ||
}); | ||
const responseResults = await getResponseBody(response); | ||
rawResults = responseResults.results.map((result) => { | ||
return "records" in result ? result.records : []; | ||
}); | ||
} catch (error) { | ||
throw new Error(`Failed to fetch remote models: ${error.message}`); | ||
} | ||
} | ||
const results = transaction.formatResults(rawResults, false); | ||
const models = "records" in results[0] ? results[0].records : []; | ||
return models.map((model) => ({ | ||
...model, | ||
fields: model.fields?.filter((field) => !IGNORED_FIELDS.includes(field.slug)) | ||
})); | ||
}; | ||
// src/utils/protocol.ts | ||
@@ -844,2 +957,319 @@ import fs5 from "node:fs"; | ||
// node_modules/@ronin/compiler/dist/index.js | ||
var QUERY_SYMBOLS = { | ||
// Represents a sub query. | ||
QUERY: "__RONIN_QUERY", | ||
// Represents an expression that should be evaluated. | ||
EXPRESSION: "__RONIN_EXPRESSION", | ||
// Represents the value of a field in the model. | ||
FIELD: "__RONIN_FIELD_", | ||
// Represents the value of a field in the model of a parent query. | ||
FIELD_PARENT: "__RONIN_FIELD_PARENT_", | ||
// Represents the old value of a field in the parent model. Used for triggers. | ||
FIELD_PARENT_OLD: "__RONIN_FIELD_PARENT_OLD_", | ||
// Represents the new value of a field in the parent model. Used for triggers. | ||
FIELD_PARENT_NEW: "__RONIN_FIELD_PARENT_NEW_", | ||
// Represents a value provided to a query preset. | ||
VALUE: "__RONIN_VALUE" | ||
}; | ||
var RONIN_MODEL_FIELD_REGEX = new RegExp( | ||
`${QUERY_SYMBOLS.FIELD}[_a-zA-Z0-9.]+`, | ||
"g" | ||
); | ||
var CURRENT_TIME_EXPRESSION = { | ||
[QUERY_SYMBOLS.EXPRESSION]: `strftime('%Y-%m-%dT%H:%M:%f', 'now') || 'Z'` | ||
}; | ||
var SINGLE_QUOTE_REGEX = /'/g; | ||
var DOUBLE_QUOTE_REGEX = /"/g; | ||
var AMPERSAND_REGEX = /\s*&+\s*/g; | ||
var SPECIAL_CHARACTERS_REGEX = /[^\w\s-]+/g; | ||
var SPLIT_REGEX = /(?<=[a-z])(?=[A-Z])|(?<=[A-Z])(?=[A-Z][a-z])|[\s.\-_]+/; | ||
var sanitize = (str) => { | ||
if (!str || str.length === 0) return ""; | ||
return str.replace(SINGLE_QUOTE_REGEX, "").replace(DOUBLE_QUOTE_REGEX, "").replace(AMPERSAND_REGEX, " and ").replace(SPECIAL_CHARACTERS_REGEX, " ").trim(); | ||
}; | ||
var convertToSnakeCase = (str) => { | ||
if (!str || str.length === 0) return ""; | ||
return sanitize(str).split(SPLIT_REGEX).map((part) => part.toLowerCase()).join("_"); | ||
}; | ||
var omit = (obj, properties) => Object.fromEntries( | ||
Object.entries(obj).filter(([key]) => !properties.includes(key)) | ||
); | ||
var conjunctions = [ | ||
"for", | ||
"and", | ||
"nor", | ||
"but", | ||
"or", | ||
"yet", | ||
"so" | ||
]; | ||
var articles = [ | ||
"a", | ||
"an", | ||
"the" | ||
]; | ||
var prepositions = [ | ||
"aboard", | ||
"about", | ||
"above", | ||
"across", | ||
"after", | ||
"against", | ||
"along", | ||
"amid", | ||
"among", | ||
"anti", | ||
"around", | ||
"as", | ||
"at", | ||
"before", | ||
"behind", | ||
"below", | ||
"beneath", | ||
"beside", | ||
"besides", | ||
"between", | ||
"beyond", | ||
"but", | ||
"by", | ||
"concerning", | ||
"considering", | ||
"despite", | ||
"down", | ||
"during", | ||
"except", | ||
"excepting", | ||
"excluding", | ||
"following", | ||
"for", | ||
"from", | ||
"in", | ||
"inside", | ||
"into", | ||
"like", | ||
"minus", | ||
"near", | ||
"of", | ||
"off", | ||
"on", | ||
"onto", | ||
"opposite", | ||
"over", | ||
"past", | ||
"per", | ||
"plus", | ||
"regarding", | ||
"round", | ||
"save", | ||
"since", | ||
"than", | ||
"through", | ||
"to", | ||
"toward", | ||
"towards", | ||
"under", | ||
"underneath", | ||
"unlike", | ||
"until", | ||
"up", | ||
"upon", | ||
"versus", | ||
"via", | ||
"with", | ||
"within", | ||
"without" | ||
]; | ||
var lowerCase = /* @__PURE__ */ new Set([ | ||
...conjunctions, | ||
...articles, | ||
...prepositions | ||
]); | ||
var specials = [ | ||
"ZEIT", | ||
"ZEIT Inc.", | ||
"Vercel", | ||
"Vercel Inc.", | ||
"CLI", | ||
"API", | ||
"HTTP", | ||
"HTTPS", | ||
"JSX", | ||
"DNS", | ||
"URL", | ||
"now.sh", | ||
"now.json", | ||
"vercel.app", | ||
"vercel.json", | ||
"CI", | ||
"CD", | ||
"CDN", | ||
"package.json", | ||
"package.lock", | ||
"yarn.lock", | ||
"GitHub", | ||
"GitLab", | ||
"CSS", | ||
"Sass", | ||
"JS", | ||
"JavaScript", | ||
"TypeScript", | ||
"HTML", | ||
"WordPress", | ||
"Next.js", | ||
"Node.js", | ||
"Webpack", | ||
"Docker", | ||
"Bash", | ||
"Kubernetes", | ||
"SWR", | ||
"TinaCMS", | ||
"UI", | ||
"UX", | ||
"TS", | ||
"TSX", | ||
"iPhone", | ||
"iPad", | ||
"watchOS", | ||
"iOS", | ||
"iPadOS", | ||
"macOS", | ||
"PHP", | ||
"composer.json", | ||
"composer.lock", | ||
"CMS", | ||
"SQL", | ||
"C", | ||
"C#", | ||
"GraphQL", | ||
"GraphiQL", | ||
"JWT", | ||
"JWTs" | ||
]; | ||
var word = `[^\\s'\u2019\\(\\)!?;:"-]`; | ||
var regex = new RegExp(`(?:(?:(\\s?(?:^|[.\\(\\)!?;:"-])\\s*)(${word}))|(${word}))(${word}*[\u2019']*${word}*)`, "g"); | ||
var convertToRegExp = (specials2) => specials2.map((s) => [new RegExp(`\\b${s}\\b`, "gi"), s]); | ||
function parseMatch(match) { | ||
const firstCharacter = match[0]; | ||
if (/\s/.test(firstCharacter)) { | ||
return match.slice(1); | ||
} | ||
if (/[\(\)]/.test(firstCharacter)) { | ||
return null; | ||
} | ||
return match; | ||
} | ||
var src_default = (str, options = {}) => { | ||
str = str.toLowerCase().replace(regex, (m, lead = "", forced, lower, rest, offset, string) => { | ||
const isLastWord = m.length + offset >= string.length; | ||
const parsedMatch = parseMatch(m); | ||
if (!parsedMatch) { | ||
return m; | ||
} | ||
if (!forced) { | ||
const fullLower = lower + rest; | ||
if (lowerCase.has(fullLower) && !isLastWord) { | ||
return parsedMatch; | ||
} | ||
} | ||
return lead + (lower || forced).toUpperCase() + rest; | ||
}); | ||
const customSpecials = options.special || []; | ||
const replace = [...specials, ...customSpecials]; | ||
const replaceRegExp = convertToRegExp(replace); | ||
replaceRegExp.forEach(([pattern, s]) => { | ||
str = str.replace(pattern, s); | ||
}); | ||
return str; | ||
}; | ||
var slugToName = (slug) => { | ||
const name = slug.replace(/([a-z])([A-Z])/g, "$1 $2"); | ||
return src_default(name); | ||
}; | ||
var VOWELS = ["a", "e", "i", "o", "u"]; | ||
var pluralize = (word2) => { | ||
const lastLetter = word2.slice(-1).toLowerCase(); | ||
const secondLastLetter = word2.slice(-2, -1).toLowerCase(); | ||
if (lastLetter === "y" && !VOWELS.includes(secondLastLetter)) { | ||
return `${word2.slice(0, -1)}ies`; | ||
} | ||
if (lastLetter === "s" || word2.slice(-2).toLowerCase() === "ch" || word2.slice(-2).toLowerCase() === "sh" || word2.slice(-2).toLowerCase() === "ex") { | ||
return `${word2}es`; | ||
} | ||
return `${word2}s`; | ||
}; | ||
var modelAttributes = [ | ||
["pluralSlug", "slug", pluralize, true], | ||
["name", "slug", slugToName, false], | ||
["pluralName", "pluralSlug", slugToName, false], | ||
["idPrefix", "slug", (slug) => slug.slice(0, 3).toLowerCase(), false], | ||
["table", "pluralSlug", convertToSnakeCase, true] | ||
]; | ||
var getModelIdentifier = () => { | ||
return `mod_${Array.from(crypto.getRandomValues(new Uint8Array(12))).map((b) => b.toString(16).padStart(2, "0")).join("").slice(0, 16).toLowerCase()}`; | ||
}; | ||
var addDefaultModelAttributes = (model, isNew) => { | ||
const copiedModel = { ...model }; | ||
if (isNew && !copiedModel.id) copiedModel.id = getModelIdentifier(); | ||
for (const [setting, base, generator, mustRegenerate] of modelAttributes) { | ||
if (!(isNew || mustRegenerate)) continue; | ||
if (copiedModel[setting] || !copiedModel[base]) continue; | ||
copiedModel[setting] = generator(copiedModel[base]); | ||
} | ||
const newFields = copiedModel.fields || []; | ||
if (isNew || Object.keys(newFields).length > 0) { | ||
if (!copiedModel.identifiers) copiedModel.identifiers = {}; | ||
if (!copiedModel.identifiers.name) { | ||
const suitableField = Object.entries(newFields).find( | ||
([fieldSlug, field]) => field.type === "string" && field.required === true && ["name"].includes(fieldSlug) | ||
); | ||
copiedModel.identifiers.name = suitableField?.[0] || "id"; | ||
} | ||
if (!copiedModel.identifiers.slug) { | ||
const suitableField = Object.entries(newFields).find( | ||
([fieldSlug, field]) => field.type === "string" && field.unique === true && field.required === true && ["slug", "handle"].includes(fieldSlug) | ||
); | ||
copiedModel.identifiers.slug = suitableField?.[0] || "id"; | ||
} | ||
} | ||
return copiedModel; | ||
}; | ||
var ROOT_MODEL = { | ||
slug: "model", | ||
identifiers: { | ||
name: "name", | ||
slug: "slug" | ||
}, | ||
// This name mimics the `sqlite_schema` table in SQLite. | ||
table: "ronin_schema", | ||
// Indicates that the model was automatically generated by RONIN. | ||
system: { model: "root" }, | ||
fields: { | ||
name: { type: "string" }, | ||
pluralName: { type: "string" }, | ||
slug: { type: "string" }, | ||
pluralSlug: { type: "string" }, | ||
idPrefix: { type: "string" }, | ||
table: { type: "string" }, | ||
"identifiers.name": { type: "string" }, | ||
"identifiers.slug": { type: "string" }, | ||
// Providing an empty object as a default value allows us to use `json_insert` | ||
// without needing to fall back to an empty object in the insertion statement, | ||
// which makes the statement shorter. | ||
fields: { type: "json", defaultValue: "{}" }, | ||
indexes: { type: "json", defaultValue: "{}" }, | ||
triggers: { type: "json", defaultValue: "{}" }, | ||
presets: { type: "json", defaultValue: "{}" } | ||
} | ||
}; | ||
var ROOT_MODEL_WITH_ATTRIBUTES = addDefaultModelAttributes(ROOT_MODEL, true); | ||
var PLURAL_MODEL_ENTITIES = { | ||
field: "fields", | ||
index: "indexes", | ||
trigger: "triggers", | ||
preset: "presets" | ||
}; | ||
var PLURAL_MODEL_ENTITIES_VALUES = Object.values(PLURAL_MODEL_ENTITIES); | ||
var CLEAN_ROOT_MODEL = omit(ROOT_MODEL, ["system"]); | ||
// src/utils/protocol.ts | ||
@@ -878,2 +1308,3 @@ var Protocol = class { | ||
* @returns Object containing the Query and options. | ||
* | ||
* @private | ||
@@ -893,7 +1324,7 @@ */ | ||
]; | ||
const queryProxies = queryTypes.map( | ||
(type) => getSyntaxProxy({ rootProperty: type }) | ||
); | ||
const queryProxies = queryTypes.map((type) => { | ||
return getSyntaxProxy({ root: `${QUERY_SYMBOLS.QUERY}.${type}` }); | ||
}); | ||
const func = new Function(...queryTypes, `"use strict"; return ${query}`); | ||
return func(...queryProxies).structure; | ||
return func(...queryProxies)[QUERY_SYMBOLS.QUERY]; | ||
}; | ||
@@ -992,3 +1423,3 @@ /** | ||
// src/utils/space.ts | ||
import { select } from "@inquirer/prompts"; | ||
import { select as select2 } from "@inquirer/prompts"; | ||
var getSpaces = async (sessionToken) => { | ||
@@ -1007,3 +1438,3 @@ try { | ||
members: { | ||
including: ["space", "account"], | ||
using: ["space", "account"], | ||
with: { | ||
@@ -1043,3 +1474,3 @@ team: null | ||
spinner2?.stop(); | ||
space = await select({ | ||
space = await select2({ | ||
message: "Which space do you want to apply models to?", | ||
@@ -1062,3 +1493,3 @@ choices: spaces.map((space2) => ({ | ||
// src/commands/apply.ts | ||
import { select as select2 } from "@inquirer/prompts"; | ||
import { select as select3 } from "@inquirer/prompts"; | ||
var apply_default = async (appToken, sessionToken, flags, migrationFilePath) => { | ||
@@ -1078,5 +1509,5 @@ const spinner2 = spinner.info("Applying migration"); | ||
const migrations = fs6.readdirSync(MIGRATIONS_PATH); | ||
const migrationPrompt = migrationFilePath ?? await select2({ | ||
const migrationPrompt = migrationFilePath ?? await select3({ | ||
message: "Which migration do you want to apply?", | ||
choices: migrations.map((migration) => ({ | ||
choices: migrations.sort((a, b) => b.localeCompare(a)).map((migration) => ({ | ||
name: migration, | ||
@@ -1083,0 +1514,0 @@ value: path5.join(MIGRATIONS_PATH, migration) |
{ | ||
"name": "@ronin/cli", | ||
"version": "0.2.36", | ||
"version": "0.2.37-corny-latest-get-batch-proxy-experimental-133", | ||
"type": "module", | ||
@@ -45,3 +45,3 @@ "description": "The command-line interface for RONIN.", | ||
"devDependencies": { | ||
"ronin": "6.0.27", | ||
"ronin": "6.2.16", | ||
"@biomejs/biome": "1.9.4", | ||
@@ -48,0 +48,0 @@ "@types/bun": "1.2.1", |
77046
1876