New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@astrojs/db

Package Overview
Dependencies
Maintainers
3
Versions
149
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@astrojs/db - npm Package Compare versions

Comparing version 0.2.2 to 0.3.0

dist/core/cli/commands/link/index.d.ts

10

./dist/index.js

@@ -1,10 +0,16 @@

import { defineCollection, defineWritableCollection, field } from "./core/types.js";
import { defineCollection, defineWritableCollection, defineData, field } from "./core/types.js";
import { cli } from "./core/cli/index.js";
import { integration } from "./core/integration/index.js";
import { sql, NOW, TRUE, FALSE } from "./runtime/index.js";
export {
FALSE,
NOW,
TRUE,
cli,
integration as default,
defineCollection,
defineData,
defineWritableCollection,
field
field,
sql
};

2

config-augment.d.ts
declare namespace Config {
type DBUserConfig = import('./dist/config.js').DBUserConfig;
type DBUserConfig = import('./dist/core/types.js').DBUserConfig;
export interface Database extends DBUserConfig {}
}
import { createClient } from "@libsql/client";
import deepDiff from "deep-diff";
import { drizzle } from "drizzle-orm/sqlite-proxy";
import { appTokenError } from "../../../errors.js";
import { red } from "kleur/colors";
import prompts from "prompts";
import { setupDbTables } from "../../../queries.js";
import { getManagedAppTokenOrExit } from "../../../tokens.js";
import { collectionsSchema } from "../../../types.js";
import { getRemoteDatabaseUrl } from "../../../utils.js";
import { getMigrationQueries } from "../../migration-queries.js";
import {
createCurrentSnapshot,
createEmptySnapshot,
getMigrations,
initializeFromMigrations,
getMigrationStatus,
loadInitialSnapshot,
loadMigration
loadMigration,
MIGRATION_NEEDED,
MIGRATIONS_NOT_INITIALIZED,
MIGRATIONS_UP_TO_DATE
} from "../../migrations.js";
import { getAstroStudioEnv, getRemoteDatabaseUrl } from "../../../utils.js";
import { getMigrationQueries } from "../../migration-queries.js";
import { setupDbTables } from "../../../queries.js";
const { diff } = deepDiff;
import { MISSING_SESSION_ID_ERROR } from "../../../errors.js";
async function cmd({ config, flags }) {
const isSeedData = flags.seed;
const isDryRun = flags.dryRun;
const appToken = flags.token ?? getAstroStudioEnv().ASTRO_STUDIO_APP_TOKEN;
const currentSnapshot = createCurrentSnapshot(config);
const allMigrationFiles = await getMigrations();
if (allMigrationFiles.length === 0) {
console.log("Project not yet initialized!");
const appToken = await getManagedAppTokenOrExit(flags.token);
const migration = await getMigrationStatus(config);
if (migration.state === "no-migrations-found") {
console.log(MIGRATIONS_NOT_INITIALIZED);
process.exit(1);
}
const prevSnapshot = await initializeFromMigrations(allMigrationFiles);
const calculatedDiff = diff(prevSnapshot, currentSnapshot);
if (calculatedDiff) {
console.log("Changes detected!");
console.log(calculatedDiff);
} else if (migration.state === "ahead") {
console.log(MIGRATION_NEEDED);
process.exit(1);
}
if (!appToken) {
console.error(appTokenError);
const allLocalMigrations = await getMigrations();
let missingMigrations = [];
try {
const { data } = await prepareMigrateQuery({
migrations: allLocalMigrations,
appToken: appToken.token
});
missingMigrations = data;
} catch (error) {
if (error instanceof Error) {
if (error.message.startsWith("{")) {
const { error: { code } = { code: "" } } = JSON.parse(error.message);
if (code === "TOKEN_UNAUTHORIZED") {
console.error(MISSING_SESSION_ID_ERROR);
}
}
}
console.error(error);
process.exit(1);
}
const allLocalMigrations = await getMigrations();
const { data: missingMigrations } = await prepareMigrateQuery({
migrations: allLocalMigrations,
appToken
});
if (missingMigrations.length === 0) {
console.info("No migrations to push! Your database is up to date!");
process.exit(0);
}
if (missingMigrations.length > 0) {
console.log(MIGRATIONS_UP_TO_DATE);
} else {
console.log(`Pushing ${missingMigrations.length} migrations...`);
await pushSchema({ migrations: missingMigrations, appToken, isDryRun, currentSnapshot });
await pushSchema({
migrations: missingMigrations,
appToken: appToken.token,
isDryRun,
currentSnapshot: migration.currentSnapshot
});
}
if (isSeedData) {
console.info("Pushing data...");
await pushData({ config, appToken, isDryRun });
}
console.info("Pushing data...");
await pushData({ config, appToken: appToken.token, isDryRun });
await appToken.destroy();
console.info("Push complete!");

@@ -66,6 +77,22 @@ }

const missingMigrationContents = await Promise.all(filteredMigrations.map(loadMigration));
const initialMigrationBatch = initialSnapshot ? await getMigrationQueries({
const initialMigrationBatch = initialSnapshot ? (await getMigrationQueries({
oldSnapshot: createEmptySnapshot(),
newSnapshot: await loadInitialSnapshot()
}) : [];
})).queries : [];
const confirmations = missingMigrationContents.reduce((acc, curr) => {
return [...acc, ...curr.confirm || []];
}, []);
if (confirmations.length > 0) {
const response = await prompts([
...confirmations.map((message, index) => ({
type: "confirm",
name: String(index),
message: red("Warning: ") + message + "\nContinue?",
initial: true
}))
]);
if (Object.values(response).length === 0 || Object.values(response).some((value) => value === false)) {
process.exit(1);
}
}
const queries = missingMigrationContents.reduce((acc, curr) => {

@@ -102,3 +129,3 @@ return [...acc, ...curr.db];

mode: "build",
collections: config.db.collections ?? {},
collections: collectionsSchema.parse(config.db.collections ?? {}),
data: config.db.data

@@ -121,3 +148,3 @@ });

async function runMigrateQuery({
queries,
queries: baseQueries,
migrations,

@@ -128,2 +155,3 @@ snapshot,

}) {
const queries = ["pragma defer_foreign_keys=true;", ...baseQueries];
const requestBody = {

@@ -139,3 +167,3 @@ snapshot,

}
const url = new URL("/db/migrate/run", getRemoteDatabaseUrl());
const url = new URL("/migrations/run", getRemoteDatabaseUrl());
return await fetch(url, {

@@ -153,3 +181,3 @@ method: "POST",

}) {
const url = new URL("/db/migrate/prepare", getRemoteDatabaseUrl());
const url = new URL("/migrations/prepare", getRemoteDatabaseUrl());
const requestBody = {

@@ -156,0 +184,0 @@ migrations,

import { sql } from "drizzle-orm";
import { appTokenError } from "../../../errors.js";
import { getAstroStudioEnv, getRemoteDatabaseUrl } from "../../../utils.js";
import { createRemoteDatabaseClient } from "../../../../runtime/db-client.js";
import { getManagedAppTokenOrExit } from "../../../tokens.js";
import { getRemoteDatabaseUrl } from "../../../utils.js";
async function cmd({ flags }) {
const query = flags.query;
const appToken = flags.token ?? getAstroStudioEnv().ASTRO_STUDIO_APP_TOKEN;
if (!appToken) {
console.error(appTokenError);
process.exit(1);
}
const db = createRemoteDatabaseClient(appToken, getRemoteDatabaseUrl());
const appToken = await getManagedAppTokenOrExit(flags.token);
const db = createRemoteDatabaseClient(appToken.token, getRemoteDatabaseUrl());
const result = await db.run(sql.raw(query));
await appToken.destroy();
console.log(result);

@@ -15,0 +12,0 @@ }

@@ -1,41 +0,34 @@

import deepDiff from "deep-diff";
import { writeFile } from "fs/promises";
import { writeFile } from "node:fs/promises";
import {
createCurrentSnapshot,
getMigrations,
initializeFromMigrations,
MIGRATIONS_CREATED,
MIGRATIONS_UP_TO_DATE,
getMigrationStatus,
initializeMigrationsDirectory
} from "../../migrations.js";
import { getMigrationQueries } from "../../migration-queries.js";
const { diff } = deepDiff;
import { bgRed, red, reset } from "kleur/colors";
async function cmd({ config }) {
const currentSnapshot = createCurrentSnapshot(config);
const allMigrationFiles = await getMigrations();
if (allMigrationFiles.length === 0) {
await initializeMigrationsDirectory(currentSnapshot);
console.log("Project initialized!");
const migration = await getMigrationStatus(config);
if (migration.state === "no-migrations-found") {
await initializeMigrationsDirectory(migration.currentSnapshot);
console.log(MIGRATIONS_CREATED);
return;
}
const prevSnapshot = await initializeFromMigrations(allMigrationFiles);
const calculatedDiff = diff(prevSnapshot, currentSnapshot);
if (!calculatedDiff) {
console.log("No changes detected!");
} else if (migration.state === "up-to-date") {
console.log(MIGRATIONS_UP_TO_DATE);
return;
}
const migrationQueries = await getMigrationQueries({
oldSnapshot: prevSnapshot,
newSnapshot: currentSnapshot
const { oldSnapshot, newSnapshot, newFilename, diff } = migration;
const { queries: migrationQueries, confirmations } = await getMigrationQueries({
oldSnapshot,
newSnapshot
});
const largestNumber = allMigrationFiles.reduce((acc, curr) => {
const num = parseInt(curr.split("_")[0]);
return num > acc ? num : acc;
}, 0);
confirmations.map((message) => console.log(bgRed(" !!! ") + " " + red(message)));
const migrationFileContent = {
diff: calculatedDiff,
db: migrationQueries
diff,
db: migrationQueries,
// TODO(fks): Encode the relevant data, instead of the raw message.
// This will give `db push` more control over the formatting of the message.
confirm: confirmations.map((c) => reset(c))
};
const migrationFileName = `./migrations/${String(largestNumber + 1).padStart(
4,
"0"
)}_migration.json`;
const migrationFileName = `./migrations/${newFilename}`;
await writeFile(migrationFileName, JSON.stringify(migrationFileContent, void 0, 2));

@@ -42,0 +35,0 @@ console.log(migrationFileName + " created!");

import type { AstroConfig } from 'astro';
import type { Arguments } from 'yargs-parser';
export declare function cmd({ config }: {
export declare function cmd({ config, flags }: {
config: AstroConfig;
flags: Arguments;
}): Promise<void>;

@@ -1,19 +0,23 @@

import deepDiff from "deep-diff";
import { getMigrations, initializeFromMigrations } from "../../migrations.js";
const { diff } = deepDiff;
async function cmd({ config }) {
const currentSnapshot = JSON.parse(JSON.stringify(config.db?.collections ?? {}));
const allMigrationFiles = await getMigrations();
if (allMigrationFiles.length === 0) {
console.log("Project not yet initialized!");
process.exit(1);
import { getMigrationStatus, MIGRATION_NEEDED, MIGRATIONS_NOT_INITIALIZED, MIGRATIONS_UP_TO_DATE } from "../../migrations.js";
async function cmd({ config, flags }) {
const status = await getMigrationStatus(config);
const { state } = status;
if (flags.json) {
console.log(JSON.stringify(status));
process.exit(state === "up-to-date" ? 0 : 1);
}
const prevSnapshot = await initializeFromMigrations(allMigrationFiles);
const calculatedDiff = diff(prevSnapshot, currentSnapshot);
if (calculatedDiff) {
console.log("Changes detected!");
process.exit(1);
switch (state) {
case "no-migrations-found": {
console.log(MIGRATIONS_NOT_INITIALIZED);
process.exit(1);
}
case "ahead": {
console.log(MIGRATION_NEEDED);
process.exit(1);
}
case "up-to-date": {
console.log(MIGRATIONS_UP_TO_DATE);
return;
}
}
console.log("No changes detected.");
return;
}

@@ -20,0 +24,0 @@ export {

@@ -0,20 +1,37 @@

import { STUDIO_CONFIG_MISSING_CLI_ERROR } from "../errors.js";
async function cli({ flags, config }) {
const command = flags._[3];
if (!config.db?.studio) {
console.log(STUDIO_CONFIG_MISSING_CLI_ERROR);
process.exit(1);
}
switch (command) {
case "shell": {
const { cmd: shellCommand } = await import("./commands/shell/index.js");
return await shellCommand({ config, flags });
const { cmd } = await import("./commands/shell/index.js");
return await cmd({ config, flags });
}
case "sync": {
const { cmd: syncCommand } = await import("./commands/sync/index.js");
return await syncCommand({ config, flags });
const { cmd } = await import("./commands/sync/index.js");
return await cmd({ config, flags });
}
case "push": {
const { cmd: pushCommand } = await import("./commands/push/index.js");
return await pushCommand({ config, flags });
const { cmd } = await import("./commands/push/index.js");
return await cmd({ config, flags });
}
case "verify": {
const { cmd: verifyCommand } = await import("./commands/verify/index.js");
return await verifyCommand({ config, flags });
const { cmd } = await import("./commands/verify/index.js");
return await cmd({ config, flags });
}
case "login": {
const { cmd } = await import("./commands/login/index.js");
return await cmd({ config, flags });
}
case "logout": {
const { cmd } = await import("./commands/logout/index.js");
return await cmd({ config, flags });
}
case "link": {
const { cmd } = await import("./commands/link/index.js");
return await cmd({ config, flags });
}
default: {

@@ -38,2 +55,6 @@ if (command == null) {

astro db login Authenticate your machine with Astro Studio
astro db logout End your authenticated session with Astro Studio
astro db link Link this directory to an Astro Studio project
astro db sync Creates snapshot based on your schema

@@ -40,0 +61,0 @@ astro db push Pushes migrations to Astro Studio

import type { DBCollection, DBSnapshot } from '../types.js';
interface PromptResponses {
allowDataLoss: boolean;
fieldRenames: Record<string, string | false>;
collectionRenames: Record<string, string | false>;
}
export declare function getMigrationQueries({ oldSnapshot, newSnapshot, promptResponses, }: {
/** Dependency injected for unit testing */
type AmbiguityResponses = {
collectionRenames: Record<string, string>;
fieldRenames: {
[collectionName: string]: Record<string, string>;
};
};
export declare function getMigrationQueries({ oldSnapshot, newSnapshot, ambiguityResponses, }: {
oldSnapshot: DBSnapshot;
newSnapshot: DBSnapshot;
promptResponses?: PromptResponses;
}): Promise<string[]>;
export declare function getCollectionChangeQueries({ collectionName, oldCollection, newCollection, promptResponses, }: {
ambiguityResponses?: AmbiguityResponses;
}): Promise<{
queries: string[];
confirmations: string[];
}>;
export declare function getCollectionChangeQueries({ collectionName, oldCollection, newCollection, ambiguityResponses, }: {
collectionName: string;
oldCollection: DBCollection;
newCollection: DBCollection;
promptResponses?: PromptResponses;
}): Promise<string[]>;
ambiguityResponses?: AmbiguityResponses;
}): Promise<{
queries: string[];
confirmations: string[];
}>;
export {};

@@ -10,2 +10,3 @@ import * as color from "kleur/colors";

getModifiers,
getReferencesConfig,
hasDefault,

@@ -15,2 +16,3 @@ schemaTypeToSqlType

import { hasPrimaryKey } from "../../runtime/index.js";
import { isSerializedSQL } from "../../runtime/types.js";
const sqlite = new SQLiteAsyncDialect();

@@ -21,13 +23,10 @@ const genTempTableName = customAlphabet("abcdefghijklmnopqrstuvwxyz", 10);

newSnapshot,
promptResponses
ambiguityResponses
}) {
const queries = [];
const confirmations = [];
let added = getAddedCollections(oldSnapshot, newSnapshot);
let dropped = getDroppedCollections(oldSnapshot, newSnapshot);
if (!isEmpty(added) && !isEmpty(dropped)) {
const resolved = await resolveCollectionRenames(
added,
dropped,
promptResponses?.collectionRenames
);
const resolved = await resolveCollectionRenames(added, dropped, ambiguityResponses);
added = resolved.added;

@@ -54,11 +53,11 @@ dropped = resolved.dropped;

continue;
const collectionChangeQueries = await getCollectionChangeQueries({
const result = await getCollectionChangeQueries({
collectionName,
oldCollection,
newCollection,
promptResponses
newCollection
});
queries.push(...collectionChangeQueries);
queries.push(...result.queries);
confirmations.push(...result.confirmations);
}
return queries;
return { queries, confirmations };
}

@@ -69,22 +68,24 @@ async function getCollectionChangeQueries({

newCollection,
promptResponses
ambiguityResponses
}) {
const queries = [];
const confirmations = [];
const updated = getUpdatedFields(oldCollection.fields, newCollection.fields);
let added = getAdded(oldCollection.fields, newCollection.fields);
let dropped = getDropped(oldCollection.fields, newCollection.fields);
if (isEmpty(updated) && isEmpty(added) && isEmpty(dropped)) {
return getChangeIndexQueries({
collectionName,
oldIndexes: oldCollection.indexes,
newIndexes: newCollection.indexes
});
const hasForeignKeyChanges = Boolean(
deepDiff(oldCollection.foreignKeys, newCollection.foreignKeys)
);
if (!hasForeignKeyChanges && isEmpty(updated) && isEmpty(added) && isEmpty(dropped)) {
return {
queries: getChangeIndexQueries({
collectionName,
oldIndexes: oldCollection.indexes,
newIndexes: newCollection.indexes
}),
confirmations
};
}
if (!isEmpty(added) && !isEmpty(dropped)) {
const resolved = await resolveFieldRenames(
collectionName,
added,
dropped,
promptResponses?.fieldRenames
);
if (!hasForeignKeyChanges && !isEmpty(added) && !isEmpty(dropped)) {
const resolved = await resolveFieldRenames(collectionName, added, dropped, ambiguityResponses);
added = resolved.added;

@@ -94,3 +95,3 @@ dropped = resolved.dropped;

}
if (isEmpty(updated) && Object.values(dropped).every(canAlterTableDropColumn) && Object.values(added).every(canAlterTableAddColumn)) {
if (!hasForeignKeyChanges && isEmpty(updated) && Object.values(dropped).every(canAlterTableDropColumn) && Object.values(added).every(canAlterTableAddColumn)) {
queries.push(

@@ -104,47 +105,33 @@ ...getAlterTableQueries(collectionName, added, dropped),

);
return queries;
return { queries, confirmations };
}
const dataLossCheck = canRecreateTableWithoutDataLoss(added, updated);
if (dataLossCheck.dataLoss) {
let allowDataLoss = promptResponses?.allowDataLoss;
const nameMsg = `Type the collection name ${color.blue(
collectionName
)} to confirm you want to delete all data:`;
const { reason, fieldName } = dataLossCheck;
const reasonMsgs = {
"added-required": `Adding required ${color.blue(
color.bold(collectionName)
)} field ${color.blue(color.bold(fieldName))}. ${color.red(
"This will delete all existing data in the collection!"
)} We recommend setting a default value to avoid data loss.`,
"added-unique": `Adding unique ${color.blue(color.bold(collectionName))} field ${color.blue(
color.bold(fieldName)
)}. ${color.red("This will delete all existing data in the collection!")}`,
"updated-type": `Changing the type of ${color.blue(
color.bold(collectionName)
)} field ${color.blue(color.bold(fieldName))}. ${color.red(
"This will delete all existing data in the collection!"
)}`
"added-required": `New field ${color.bold(
collectionName + "." + fieldName
)} is required with no default value.
This requires deleting existing data in the ${color.bold(
collectionName
)} collection.`,
"added-unique": `New field ${color.bold(
collectionName + "." + fieldName
)} is marked as unique.
This requires deleting existing data in the ${color.bold(
collectionName
)} collection.`,
"updated-type": `Updated field ${color.bold(
collectionName + "." + fieldName
)} cannot convert data to new field data type.
This requires deleting existing data in the ${color.bold(
collectionName
)} collection.`
};
if (allowDataLoss === void 0) {
const res = await prompts({
type: "text",
name: "allowDataLoss",
message: `${reasonMsgs[reason]} ${nameMsg}`,
validate: (name) => name === collectionName || "Incorrect collection name"
});
if (typeof res.allowDataLoss !== "string")
process.exit(0);
allowDataLoss = !!res.allowDataLoss;
}
if (!allowDataLoss) {
console.info("Exiting without changes \u{1F44B}");
process.exit(0);
}
confirmations.push(reasonMsgs[reason]);
}
const addedPrimaryKey = Object.entries(added).find(([, field]) => hasPrimaryKey(field));
const primaryKeyExists = Object.entries(newCollection.fields).find(
([, field]) => hasPrimaryKey(field)
);
const droppedPrimaryKey = Object.entries(dropped).find(([, field]) => hasPrimaryKey(field));
const updatedPrimaryKey = Object.entries(updated).find(
([, field]) => hasPrimaryKey(field.old) || hasPrimaryKey(field.new)
);
const recreateTableQueries = getRecreateTableQueries({

@@ -155,6 +142,6 @@ collectionName,

hasDataLoss: dataLossCheck.dataLoss,
migrateHiddenPrimaryKey: !addedPrimaryKey && !droppedPrimaryKey && !updatedPrimaryKey
migrateHiddenPrimaryKey: !primaryKeyExists && !droppedPrimaryKey
});
queries.push(...recreateTableQueries, ...getCreateIndexQueries(collectionName, newCollection));
return queries;
return { queries, confirmations };
}

@@ -179,3 +166,3 @@ function getChangeIndexQueries({

}
async function resolveFieldRenames(collectionName, mightAdd, mightDrop, renamePromptResponses) {
async function resolveFieldRenames(collectionName, mightAdd, mightDrop, ambiguityResponses) {
const added = {};

@@ -185,41 +172,31 @@ const dropped = {};

for (const [fieldName, field] of Object.entries(mightAdd)) {
const promptResponse = renamePromptResponses?.[fieldName];
if (promptResponse === false) {
added[fieldName] = field;
continue;
} else if (promptResponse) {
renamed.push({ from: promptResponse, to: fieldName });
continue;
let oldFieldName = ambiguityResponses ? ambiguityResponses.fieldRenames[collectionName]?.[fieldName] ?? "__NEW__" : void 0;
if (!oldFieldName) {
const res = await prompts(
{
type: "select",
name: "fieldName",
message: "New field " + color.blue(color.bold(`${collectionName}.${fieldName}`)) + " detected. Was this renamed from an existing field?",
choices: [
{ title: "New field (not renamed from existing)", value: "__NEW__" },
...Object.keys(mightDrop).filter((key) => !(key in renamed)).map((key) => ({ title: key, value: key }))
]
},
{
onCancel: () => {
process.exit(1);
}
}
);
oldFieldName = res.fieldName;
}
const res = await prompts({
type: "toggle",
name: "isRename",
message: `Is the field ${color.blue(color.bold(fieldName))} in collection ${color.blue(
color.bold(collectionName)
)} a new field, or renaming an existing field?`,
initial: false,
active: "Rename",
inactive: "New field"
});
if (typeof res.isRename !== "boolean")
process.exit(0);
if (!res.isRename) {
if (oldFieldName === "__NEW__") {
added[fieldName] = field;
continue;
} else {
renamed.push({ from: oldFieldName, to: fieldName });
}
const choices = Object.keys(mightDrop).filter((key) => !(key in renamed)).map((key) => ({ title: key, value: key }));
const { oldFieldName } = await prompts({
type: "select",
name: "oldFieldName",
message: `Which field in ${color.blue(
color.bold(collectionName)
)} should be renamed to ${color.blue(color.bold(fieldName))}?`,
choices
});
if (typeof oldFieldName !== "string")
process.exit(0);
renamed.push({ from: oldFieldName, to: fieldName });
for (const [droppedFieldName, droppedField] of Object.entries(mightDrop)) {
if (!renamed.find((r) => r.from === droppedFieldName))
dropped[droppedFieldName] = droppedField;
}
for (const [droppedFieldName, droppedField] of Object.entries(mightDrop)) {
if (!renamed.find((r) => r.from === droppedFieldName)) {
dropped[droppedFieldName] = droppedField;
}

@@ -229,3 +206,3 @@ }

}
async function resolveCollectionRenames(mightAdd, mightDrop, renamePromptResponses) {
async function resolveCollectionRenames(mightAdd, mightDrop, ambiguityResponses) {
const added = {};

@@ -235,39 +212,31 @@ const dropped = {};

for (const [collectionName, collection] of Object.entries(mightAdd)) {
const promptResponse = renamePromptResponses?.[collectionName];
if (promptResponse === false) {
added[collectionName] = collection;
continue;
} else if (promptResponse) {
renamed.push({ from: promptResponse, to: collectionName });
continue;
let oldCollectionName = ambiguityResponses ? ambiguityResponses.collectionRenames[collectionName] ?? "__NEW__" : void 0;
if (!oldCollectionName) {
const res = await prompts(
{
type: "select",
name: "collectionName",
message: "New collection " + color.blue(color.bold(collectionName)) + " detected. Was this renamed from an existing collection?",
choices: [
{ title: "New collection (not renamed from existing)", value: "__NEW__" },
...Object.keys(mightDrop).filter((key) => !(key in renamed)).map((key) => ({ title: key, value: key }))
]
},
{
onCancel: () => {
process.exit(1);
}
}
);
oldCollectionName = res.collectionName;
}
const res = await prompts({
type: "toggle",
name: "isRename",
message: `Is the collection ${color.blue(
color.bold(collectionName)
)} a new collection, or renaming an existing collection?`,
initial: false,
active: "Rename",
inactive: "New collection"
});
if (typeof res.isRename !== "boolean")
process.exit(0);
if (!res.isRename) {
if (oldCollectionName === "__NEW__") {
added[collectionName] = collection;
continue;
} else {
renamed.push({ from: oldCollectionName, to: collectionName });
}
const choices = Object.keys(mightDrop).filter((key) => !(key in renamed)).map((key) => ({ title: key, value: key }));
const { oldCollectionName } = await prompts({
type: "select",
name: "oldCollectionName",
message: `Which collection should be renamed to ${color.blue(color.bold(collectionName))}?`,
choices
});
if (typeof oldCollectionName !== "string")
process.exit(0);
renamed.push({ from: oldCollectionName, to: collectionName });
for (const [droppedCollectionName, droppedCollection] of Object.entries(mightDrop)) {
if (!renamed.find((r) => r.from === droppedCollectionName))
dropped[droppedCollectionName] = droppedCollection;
}
for (const [droppedCollectionName, droppedCollection] of Object.entries(mightDrop)) {
if (!renamed.find((r) => r.from === droppedCollectionName)) {
dropped[droppedCollectionName] = droppedCollection;
}

@@ -355,14 +324,16 @@ }

function canAlterTableAddColumn(field) {
if (field.unique)
if (field.schema.unique)
return false;
if (hasRuntimeDefault(field))
return false;
if (!field.optional && !hasDefault(field))
if (!field.schema.optional && !hasDefault(field))
return false;
if (hasPrimaryKey(field))
return false;
if (getReferencesConfig(field))
return false;
return true;
}
function canAlterTableDropColumn(field) {
if (field.unique)
if (field.schema.unique)
return false;

@@ -378,6 +349,6 @@ if (hasPrimaryKey(field))

}
if (!a.optional && !hasDefault(a)) {
if (!a.schema.optional && !hasDefault(a)) {
return { dataLoss: true, fieldName, reason: "added-required" };
}
if (!a.optional && a.unique) {
if (!a.schema.optional && a.schema.unique) {
return { dataLoss: true, fieldName, reason: "added-unique" };

@@ -426,10 +397,12 @@ }

continue;
if (objShallowEqual(oldField, newField))
continue;
const oldFieldSqlType = { ...oldField, type: schemaTypeToSqlType(oldField.type) };
const newFieldSqlType = { ...newField, type: schemaTypeToSqlType(newField.type) };
const isSafeTypeUpdate = objShallowEqual(oldFieldSqlType, newFieldSqlType) && canChangeTypeWithoutQuery(oldField, newField);
if (isSafeTypeUpdate)
continue;
updated[key] = { old: oldField, new: newField };
const diff = deepDiff(oldField, newField, (path, objKey) => {
const isTypeKey = objKey === "type" && path.length === 0;
return (
// If we can safely update the type without a SQL query, ignore the diff
isTypeKey && oldField.type !== newField.type && canChangeTypeWithoutQuery(oldField, newField)
);
});
if (diff) {
updated[key] = { old: oldField, new: newField };
}
}

@@ -454,14 +427,4 @@ return updated;

function hasRuntimeDefault(field) {
return field.type === "date" && field.default === "now";
return !!(field.schema.default && isSerializedSQL(field.schema.default));
}
function objShallowEqual(a, b) {
if (Object.keys(a).length !== Object.keys(b).length)
return false;
for (const [key, value] of Object.entries(a)) {
if (JSON.stringify(b[key]) !== JSON.stringify(value)) {
return false;
}
}
return true;
}
export {

@@ -468,0 +431,0 @@ getCollectionChangeQueries,

@@ -1,3 +0,23 @@

import type { DBSnapshot } from '../types.js';
import deepDiff from 'deep-diff';
import { type DBSnapshot } from '../types.js';
import type { AstroConfig } from 'astro';
export type MigrationStatus = {
state: 'no-migrations-found';
currentSnapshot: DBSnapshot;
} | {
state: 'ahead';
oldSnapshot: DBSnapshot;
newSnapshot: DBSnapshot;
diff: deepDiff.Diff<DBSnapshot, DBSnapshot>[];
newFilename: string;
summary: string;
} | {
state: 'up-to-date';
currentSnapshot: DBSnapshot;
};
export declare function getMigrationStatus(config: AstroConfig): Promise<MigrationStatus>;
export declare const MIGRATIONS_CREATED: string;
export declare const MIGRATIONS_UP_TO_DATE: string;
export declare const MIGRATIONS_NOT_INITIALIZED: string;
export declare const MIGRATION_NEEDED: string;
export declare function getMigrations(): Promise<string[]>;

@@ -7,2 +27,3 @@ export declare function loadMigration(migration: string): Promise<{

db: string[];
confirm?: string[];
}>;

@@ -9,0 +30,0 @@ export declare function loadInitialSnapshot(): Promise<DBSnapshot>;

import deepDiff from "deep-diff";
import { mkdir, readFile, readdir, writeFile } from "fs/promises";
const { applyChange } = deepDiff;
import { collectionsSchema } from "../types.js";
import { cyan, green, yellow } from "kleur/colors";
const { applyChange, diff: generateDiff } = deepDiff;
async function getMigrationStatus(config) {
const currentSnapshot = createCurrentSnapshot(config);
const allMigrationFiles = await getMigrations();
if (allMigrationFiles.length === 0) {
return {
state: "no-migrations-found",
currentSnapshot
};
}
const previousSnapshot = await initializeFromMigrations(allMigrationFiles);
const diff = generateDiff(previousSnapshot, currentSnapshot);
if (diff) {
const n = getNewMigrationNumber(allMigrationFiles);
const newFilename = `${String(n + 1).padStart(4, "0")}_migration.json`;
return {
state: "ahead",
oldSnapshot: previousSnapshot,
newSnapshot: currentSnapshot,
diff,
newFilename,
summary: generateDiffSummary(diff)
};
}
return {
state: "up-to-date",
currentSnapshot
};
}
const MIGRATIONS_CREATED = `${green("\u25A0 Migrations initialized!")}
To execute your migrations, run
${cyan("astro db push")}`;
const MIGRATIONS_UP_TO_DATE = `${green("\u25A0 No migrations needed!")}
Your database is up to date.
`;
const MIGRATIONS_NOT_INITIALIZED = `${yellow("\u25B6 No migrations found!")}
To scaffold your migrations folder, run
${cyan("astro db sync")}
`;
const MIGRATION_NEEDED = `${yellow("\u25B6 Changes detected!")}
To create the necessary migration file, run
${cyan("astro db sync")}
`;
function generateDiffSummary(diff) {
return JSON.stringify(diff, null, 2);
}
function getNewMigrationNumber(allMigrationFiles) {
const len = allMigrationFiles.length - 1;
return allMigrationFiles.reduce((acc, curr) => {
const num = Number.parseInt(curr.split("_")[0] ?? len, 10);
return num > acc ? num : acc;
}, 0);
}
async function getMigrations() {

@@ -43,3 +101,4 @@ const migrationFiles = await readdir("./migrations").catch((err) => {

function createCurrentSnapshot(config) {
const schema = JSON.parse(JSON.stringify(config.db?.collections ?? {}));
const collectionsConfig = collectionsSchema.parse(config.db?.collections ?? {});
const schema = JSON.parse(JSON.stringify(collectionsConfig));
return { experimentalVersion: 1, schema };

@@ -51,4 +110,9 @@ }

export {
MIGRATIONS_CREATED,
MIGRATIONS_NOT_INITIALIZED,
MIGRATIONS_UP_TO_DATE,
MIGRATION_NEEDED,
createCurrentSnapshot,
createEmptySnapshot,
getMigrationStatus,
getMigrations,

@@ -55,0 +119,0 @@ initializeFromMigrations,

@@ -1,1 +0,5 @@

export declare const appTokenError: string;
export declare const MISSING_SESSION_ID_ERROR: string;
export declare const MISSING_PROJECT_ID_ERROR: string;
export declare const STUDIO_CONFIG_MISSING_WRITABLE_COLLECTIONS_ERROR: (collectionName: string) => string;
export declare const STUDIO_CONFIG_MISSING_CLI_ERROR: string;
export declare const MIGRATIONS_NOT_INITIALIZED: string;

@@ -1,7 +0,37 @@

import { red } from "kleur/colors";
const appTokenError = `${red(
"\u26A0\uFE0F App token invalid or expired."
)} Please generate a new one from your the Studio dashboard under project settings.`;
import { cyan, bold, red, green, yellow } from "kleur/colors";
const MISSING_SESSION_ID_ERROR = `${red(
"\u25B6 Login required!"
)}
To authenticate with Astro Studio, run
${cyan("astro db login")}
`;
const MISSING_PROJECT_ID_ERROR = `${red(
"\u25B6 Directory not linked."
)}
To link this directory to an Astro Studio project, run
${cyan("astro db link")}
`;
const STUDIO_CONFIG_MISSING_WRITABLE_COLLECTIONS_ERROR = (collectionName) => `${red(`\u25B6 Writable collection ${bold(collectionName)} requires Astro Studio.`)}
Visit ${cyan("https://astro.build/studio")} to create your account
and set ${green("studio: true")} in your astro.config.mjs file to enable Studio.
`;
const STUDIO_CONFIG_MISSING_CLI_ERROR = `${red("\u25B6 This command requires Astro Studio.")}
Visit ${cyan("https://astro.build/studio")} to create your account
and set ${green("studio: true")} in your astro.config.mjs file to enable Studio.
`;
const MIGRATIONS_NOT_INITIALIZED = `${yellow("\u25B6 No migrations found!")}
To scaffold your migrations folder, run
${cyan("astro db sync")}
`;
export {
appTokenError
MIGRATIONS_NOT_INITIALIZED,
MISSING_PROJECT_ID_ERROR,
MISSING_SESSION_ID_ERROR,
STUDIO_CONFIG_MISSING_CLI_ERROR,
STUDIO_CONFIG_MISSING_WRITABLE_COLLECTIONS_ERROR
};

@@ -9,15 +9,18 @@ import { vitePluginDb } from "./vite-plugin-db.js";

import { astroConfigWithDbSchema } from "../types.js";
import { getAstroStudioEnv } from "../utils.js";
import { appTokenError } from "../errors.js";
import {} from "../utils.js";
import { STUDIO_CONFIG_MISSING_WRITABLE_COLLECTIONS_ERROR } from "../errors.js";
import { errorMap } from "./error-map.js";
import { dirname } from "path";
import { fileURLToPath } from "url";
import { bold } from "kleur/colors";
import { blue, yellow } from "kleur/colors";
import { fileURLIntegration } from "./file-url.js";
import { setupDbTables } from "../queries.js";
import { getManagedAppTokenOrExit } from "../tokens.js";
function astroDBIntegration() {
let connectedToRemote = false;
let appToken;
return {
name: "astro:db",
hooks: {
async "astro:config:setup"({ logger, updateConfig, config, command }) {
"astro:config:setup": async ({ logger, updateConfig, config, command }) => {
if (command === "preview")

@@ -28,20 +31,17 @@ return;

const studio = configWithDb.db?.studio ?? false;
if (!studio && Object.values(collections).some((c) => c.writable)) {
logger.warn(
`Writable collections should only be used with Astro Studio. Did you set the ${bold(
"studio"
)} flag in your astro config?`
const foundWritableCollection = Object.entries(collections).find(([, c]) => c.writable);
if (!studio && foundWritableCollection) {
logger.error(
STUDIO_CONFIG_MISSING_WRITABLE_COLLECTIONS_ERROR(foundWritableCollection[0])
);
process.exit(1);
}
let dbPlugin;
if (studio && command === "build") {
const appToken = getAstroStudioEnv().ASTRO_STUDIO_APP_TOKEN;
if (!appToken) {
logger.error(appTokenError);
process.exit(0);
}
if (studio && command === "build" && process.env.ASTRO_DB_TEST_ENV !== "1") {
appToken = await getManagedAppTokenOrExit();
connectedToRemote = true;
dbPlugin = vitePluginDb({
connectToStudio: true,
collections,
appToken,
appToken: appToken.token,
root: config.root

@@ -68,3 +68,3 @@ });

});
logger.info("Collections set up \u{1F680}");
logger.debug("Database setup complete.");
dbPlugin = vitePluginDb({

@@ -97,2 +97,17 @@ connectToStudio: false,

await typegen({ collections, root: config.root });
},
"astro:server:start": async ({ logger }) => {
setTimeout(() => {
logger.info(
connectedToRemote ? "Connected to remote database." : "New local database created."
);
}, 100);
},
"astro:build:start": async ({ logger }) => {
logger.info(
"database: " + (connectedToRemote ? yellow("remote") : blue("local database."))
);
},
"astro:build:done": async ({}) => {
await appToken?.destroy();
}

@@ -99,0 +114,0 @@ }

@@ -30,4 +30,4 @@ import { existsSync } from "node:fs";

type: field.type,
optional: field.optional,
default: field.default
optional: field.schema.optional,
default: field.schema.default
}

@@ -34,0 +34,0 @@ ])

@@ -13,3 +13,3 @@ import type { DBCollections } from '../types.js';

}): VitePlugin;
export declare function getVirtualModContents({ collections, root }: {
export declare function getVirtualModContents({ collections, root, }: {
collections: DBCollections;

@@ -16,0 +16,0 @@ root: URL;

@@ -1,3 +0,2 @@

import { RUNTIME_IMPORT, VIRTUAL_MODULE_ID, DB_PATH, RUNTIME_DRIZZLE_IMPORT } from "../consts.js";
import { fileURLToPath } from "node:url";
import { DB_PATH, RUNTIME_DRIZZLE_IMPORT, RUNTIME_IMPORT, VIRTUAL_MODULE_ID } from "../consts.js";
const resolvedVirtualModuleId = "\0" + VIRTUAL_MODULE_ID;

@@ -23,7 +22,10 @@ function vitePluginDb(params) {

}
function getVirtualModContents({ collections, root }) {
function getVirtualModContents({
collections,
root
}) {
const dbUrl = new URL(DB_PATH, root);
return `
import { collectionToTable, createLocalDatabaseClient } from ${RUNTIME_IMPORT};
import dbUrl from '${fileURLToPath(dbUrl)}?fileurl';
import dbUrl from ${JSON.stringify(`${dbUrl}?fileurl`)};

@@ -30,0 +32,0 @@ const params = ${JSON.stringify({

@@ -14,7 +14,63 @@ import type { SqliteRemoteDatabase } from 'drizzle-orm/sqlite-proxy';

export declare function getCreateIndexQueries(collectionName: string, collection: Pick<DBCollection, 'indexes'>): string[];
export declare function getCreateForeignKeyQueries(collectionName: string, collection: DBCollection): string[];
export declare function schemaTypeToSqlType(type: FieldType): 'text' | 'integer';
export declare function getModifiers(fieldName: string, field: DBField): string;
type WithDefaultDefined<T extends DBField> = T & Required<Pick<T, 'default'>>;
export declare function getReferencesConfig(field: DBField): {
type: "number";
schema: ({
name?: string | undefined;
label?: string | undefined;
unique?: boolean | undefined;
collection?: string | undefined;
} & {
primaryKey?: false | undefined;
optional?: boolean | undefined;
default?: number | import("../runtime/types.js").SerializedSQL | undefined;
} & {
references?: any | undefined;
}) | ({
name?: string | undefined;
label?: string | undefined;
unique?: boolean | undefined;
collection?: string | undefined;
} & {
primaryKey: true;
optional?: false | undefined;
default?: undefined;
} & {
references?: any | undefined;
});
} | {
type: "text";
schema: ({
name?: string | undefined;
label?: string | undefined;
unique?: boolean | undefined;
collection?: string | undefined;
default?: string | import("../runtime/types.js").SerializedSQL | undefined;
multiline?: boolean | undefined;
} & {
primaryKey?: false | undefined;
optional?: boolean | undefined;
} & {
references?: any | undefined;
}) | ({
name?: string | undefined;
label?: string | undefined;
unique?: boolean | undefined;
collection?: string | undefined;
default?: string | import("../runtime/types.js").SerializedSQL | undefined;
multiline?: boolean | undefined;
} & {
primaryKey: true;
optional?: false | undefined;
} & {
references?: any | undefined;
});
} | undefined;
type WithDefaultDefined<T extends DBField> = T & {
schema: Required<Pick<T['schema'], 'default'>>;
};
type DBFieldWithDefault = WithDefaultDefined<TextField> | WithDefaultDefined<DateField> | WithDefaultDefined<NumberField> | WithDefaultDefined<BooleanField> | WithDefaultDefined<JsonField>;
export declare function hasDefault(field: DBField): field is DBFieldWithDefault;
export {};

@@ -6,3 +6,4 @@ import {

import { SQLiteAsyncDialect } from "drizzle-orm/sqlite-core";
import { collectionToTable, hasPrimaryKey } from "../runtime/index.js";
import { hasPrimaryKey } from "../runtime/index.js";
import { isSerializedSQL } from "../runtime/types.js";
const sqlite = new SQLiteAsyncDialect();

@@ -15,2 +16,3 @@ async function setupDbTables({

mode
// TODO: Remove once Turso has foreign key PRAGMA support
}) {

@@ -28,13 +30,6 @@ const setupQueries = [];

if (data) {
for (const [name, collection] of Object.entries(collections)) {
const table = collectionToTable(name, collection);
collection._setMeta?.({ table });
}
try {
await data({
async seed({ table }, values) {
const result = Array.isArray(values) ? (
// TODO: fix values typing once we can infer fields type correctly
await db.insert(table).values(values).returning()
) : await db.insert(table).values(values).returning().get();
seed: async ({ table }, values) => {
const result = Array.isArray(values) ? db.insert(table).values(values).returning() : db.insert(table).values(values).returning().get();
return result;

@@ -45,8 +40,7 @@ },

});
} catch (e) {
} catch (error) {
(logger ?? console).error(
`Failed to seed data. Did you update to match recent schema changes? Full error:
${e}`
`Failed to seed data. Did you update to match recent schema changes?`
);
(logger ?? console).error(error);
}

@@ -70,2 +64,3 @@ }

}
colQueries.push(...getCreateForeignKeyQueries(collectionName, collection));
query += colQueries.join(", ") + ")";

@@ -77,3 +72,3 @@ return query;

for (const [indexName, indexProps] of Object.entries(collection.indexes ?? {})) {
const onColNames = Array.isArray(indexProps.on) ? indexProps.on : [indexProps.on];
const onColNames = asArray(indexProps.on);
const onCols = onColNames.map((colName) => sqlite.escapeName(colName));

@@ -88,2 +83,26 @@ const unique = indexProps.unique ? "UNIQUE " : "";

}
function getCreateForeignKeyQueries(collectionName, collection) {
let queries = [];
for (const foreignKey of collection.foreignKeys ?? []) {
const fields = asArray(foreignKey.fields);
const references = asArray(foreignKey.references);
if (fields.length !== references.length) {
throw new Error(
`Foreign key on ${collectionName} is misconfigured. \`fields\` and \`references\` must be the same length.`
);
}
const referencedCollection = references[0]?.schema.collection;
if (!referencedCollection) {
throw new Error(
`Foreign key on ${collectionName} is misconfigured. \`references\` cannot be empty.`
);
}
const query = `FOREIGN KEY (${fields.map((f) => sqlite.escapeName(f)).join(", ")}) REFERENCES ${sqlite.escapeName(referencedCollection)}(${references.map((r) => sqlite.escapeName(r.schema.name)).join(", ")})`;
queries.push(query);
}
return queries;
}
function asArray(value) {
return Array.isArray(value) ? value : [value];
}
function schemaTypeToSqlType(type) {

@@ -105,6 +124,6 @@ switch (type) {

}
if (!field.optional) {
if (!field.schema.optional) {
modifiers += " NOT NULL";
}
if (field.unique) {
if (field.schema.unique) {
modifiers += " UNIQUE";

@@ -115,6 +134,22 @@ }

}
const references = getReferencesConfig(field);
if (references) {
const { collection, name } = references.schema;
if (!collection || !name) {
throw new Error(
`Invalid reference for field ${fieldName}. This is an unexpected error that should be reported to the Astro team.`
);
}
modifiers += ` REFERENCES ${sqlite.escapeName(collection)} (${sqlite.escapeName(name)})`;
}
return modifiers;
}
function getReferencesConfig(field) {
const canHaveReferences = field.type === "number" || field.type === "text";
if (!canHaveReferences)
return void 0;
return field.schema.references;
}
function hasDefault(field) {
if (field.default !== void 0) {
if (field.schema.default !== void 0) {
return true;

@@ -127,16 +162,26 @@ }

}
function toDefault(def) {
const type = typeof def;
if (type === "string") {
return sqlite.escapeString(def);
} else if (type === "boolean") {
return def ? "TRUE" : "FALSE";
} else {
return def + "";
}
}
function getDefaultValueSql(columnName, column) {
if (isSerializedSQL(column.schema.default)) {
return column.schema.default.sql;
}
switch (column.type) {
case "boolean":
return column.default ? "TRUE" : "FALSE";
case "number":
return `${column.default || "AUTOINCREMENT"}`;
case "text":
return sqlite.escapeString(column.default);
case "date":
return column.default === "now" ? "CURRENT_TIMESTAMP" : sqlite.escapeString(column.default);
return toDefault(column.schema.default);
case "json": {
let stringified = "";
try {
stringified = JSON.stringify(column.default);
stringified = JSON.stringify(column.schema.default);
} catch (e) {

@@ -155,5 +200,7 @@ console.log(

export {
getCreateForeignKeyQueries,
getCreateIndexQueries,
getCreateTableQuery,
getModifiers,
getReferencesConfig,
hasDefault,

@@ -160,0 +207,0 @@ schemaTypeToSqlType,

@@ -0,34 +1,98 @@

import { SQLiteAsyncDialect } from "drizzle-orm/sqlite-core";
import { collectionToTable } from "../runtime/index.js";
import { z } from "zod";
import { SQL } from "drizzle-orm";
import { errorMap } from "./integration/error-map.js";
import { SERIALIZED_SQL_KEY } from "../runtime/types.js";
const sqlite = new SQLiteAsyncDialect();
const sqlSchema = z.instanceof(SQL).transform(
(sqlObj) => ({
[SERIALIZED_SQL_KEY]: true,
sql: sqlite.sqlToQuery(sqlObj).sql
})
);
const baseFieldSchema = z.object({
label: z.string().optional(),
optional: z.boolean().optional(),
unique: z.boolean().optional()
unique: z.boolean().optional(),
// Defined when `defineCollection()` is called
name: z.string().optional(),
collection: z.string().optional()
});
const booleanFieldSchema = baseFieldSchema.extend({
const booleanFieldSchema = z.object({
type: z.literal("boolean"),
default: z.boolean().optional()
schema: baseFieldSchema.extend({
default: z.union([z.boolean(), sqlSchema]).optional()
})
});
const numberFieldSchema = baseFieldSchema.extend({
const numberFieldBaseSchema = baseFieldSchema.omit({ optional: true }).and(
z.union([
z.object({
primaryKey: z.literal(false).optional(),
optional: z.boolean().optional(),
default: z.union([z.number(), sqlSchema]).optional()
}),
z.object({
// `integer primary key` uses ROWID as the default value.
// `optional` and `default` do not have an effect,
// so disable these config options for primary keys.
primaryKey: z.literal(true),
optional: z.literal(false).optional(),
default: z.literal(void 0).optional()
})
])
);
const numberFieldOptsSchema = numberFieldBaseSchema.and(
z.object({
references: z.function().returns(z.lazy(() => numberFieldSchema)).optional().transform((fn) => fn?.())
})
);
const numberFieldSchema = z.object({
type: z.literal("number"),
default: z.number().optional(),
primaryKey: z.boolean().optional()
schema: numberFieldOptsSchema
});
const textFieldSchema = baseFieldSchema.extend({
const textFieldBaseSchema = baseFieldSchema.omit({ optional: true }).extend({
default: z.union([z.string(), sqlSchema]).optional(),
multiline: z.boolean().optional()
}).and(
z.union([
z.object({
primaryKey: z.literal(false).optional(),
optional: z.boolean().optional()
}),
z.object({
// text primary key allows NULL values.
// NULL values bypass unique checks, which could
// lead to duplicate URLs per record in Astro Studio.
// disable `optional` for primary keys.
primaryKey: z.literal(true),
optional: z.literal(false).optional()
})
])
);
const textFieldOptsSchema = textFieldBaseSchema.and(
z.object({
references: z.function().returns(z.lazy(() => textFieldSchema)).optional().transform((fn) => fn?.())
})
);
const textFieldSchema = z.object({
type: z.literal("text"),
multiline: z.boolean().optional(),
default: z.string().optional(),
primaryKey: z.boolean().optional()
schema: textFieldOptsSchema
});
const dateFieldSchema = baseFieldSchema.extend({
const dateFieldSchema = z.object({
type: z.literal("date"),
default: z.union([
z.literal("now"),
// allow date-like defaults in user config,
// transform to ISO string for D1 storage
z.coerce.date().transform((d) => d.toISOString())
]).optional()
schema: baseFieldSchema.extend({
default: z.union([
sqlSchema,
// allow date-like defaults in user config,
// transform to ISO string for D1 storage
z.coerce.date().transform((d) => d.toISOString())
]).optional()
})
});
const jsonFieldSchema = baseFieldSchema.extend({
const jsonFieldSchema = z.object({
type: z.literal("json"),
default: z.unknown().optional()
schema: baseFieldSchema.extend({
default: z.unknown().optional()
})
});

@@ -42,2 +106,3 @@ const fieldSchema = z.union([

]);
const referenceableFieldSchema = z.union([textFieldSchema, numberFieldSchema]);
const fieldsSchema = z.record(fieldSchema);

@@ -48,8 +113,10 @@ const indexSchema = z.object({

});
const indexesSchema = z.record(indexSchema);
const foreignKeysSchema = z.object({
fields: z.string().or(z.array(z.string())),
references: z.function().returns(z.lazy(() => referenceableFieldSchema.or(z.array(referenceableFieldSchema)))).transform((fn) => fn())
});
const baseCollectionSchema = z.object({
fields: fieldsSchema,
indexes: indexesSchema.optional(),
table: z.any(),
_setMeta: z.function().optional()
indexes: z.record(indexSchema).optional(),
foreignKeys: z.array(foreignKeysSchema).optional()
});

@@ -63,3 +130,20 @@ const readableCollectionSchema = baseCollectionSchema.extend({

const collectionSchema = z.union([readableCollectionSchema, writableCollectionSchema]);
const collectionsSchema = z.record(collectionSchema);
const collectionsSchema = z.preprocess((rawCollections) => {
const collections = z.record(z.any()).parse(rawCollections, { errorMap });
for (const [collectionName, collection] of Object.entries(collections)) {
collection.table = collectionToTable(
collectionName,
collectionSchema.parse(collection, { errorMap })
);
const { fields } = z.object({ fields: z.record(z.any()) }).parse(collection, { errorMap });
for (const [fieldName, field2] of Object.entries(fields)) {
field2.schema.name = fieldName;
field2.schema.collection = collectionName;
}
}
return rawCollections;
}, z.record(collectionSchema));
function defineData(fn) {
return fn;
}
const dbConfigSchema = z.object({

@@ -73,53 +157,40 @@ studio: z.boolean().optional(),

});
function defineCollection(userConfig) {
const meta = { table: null };
function _setMeta(values) {
Object.assign(meta, values);
}
function baseDefineCollection(userConfig, writable) {
return {
...userConfig,
writable: false,
get table() {
return meta.table;
},
// @ts-expect-error private field
_setMeta
writable,
// set at runtime to get the table name
table: null
};
}
function defineCollection(userConfig) {
return baseDefineCollection(userConfig, false);
}
function defineWritableCollection(userConfig) {
const meta = { table: null };
function _setMeta(values) {
Object.assign(meta, values);
}
return baseDefineCollection(userConfig, true);
}
function createField(type, schema) {
return {
...userConfig,
writable: true,
get table() {
return meta.table;
},
// @ts-expect-error private field
_setMeta
type,
/**
* @internal
*/
schema
};
}
const baseDefaults = {
optional: false,
unique: false,
label: void 0,
default: void 0
};
const field = {
number(opts = {}) {
return { type: "number", ...baseDefaults, ...opts };
number: (opts = {}) => {
return createField("number", opts);
},
boolean(opts = {}) {
return { type: "boolean", ...baseDefaults, ...opts };
boolean: (opts = {}) => {
return createField("boolean", opts);
},
text(opts = {}) {
return { type: "text", multiline: false, ...baseDefaults, ...opts };
text: (opts = {}) => {
return createField("text", opts);
},
date(opts = {}) {
return { type: "date", ...baseDefaults, ...opts };
return createField("date", opts);
},
json(opts = {}) {
return { type: "json", ...baseDefaults, ...opts };
return createField("json", opts);
}

@@ -133,2 +204,3 @@ };

defineCollection,
defineData,
defineWritableCollection,

@@ -138,3 +210,4 @@ field,

readableCollectionSchema,
referenceableFieldSchema,
writableCollectionSchema
};

@@ -5,1 +5,2 @@ import type { AstroConfig } from 'astro';

export declare function getRemoteDatabaseUrl(): string;
export declare function getAstroStudioUrl(): string;

@@ -10,5 +10,10 @@ import { loadEnv } from "vite";

}
function getAstroStudioUrl() {
const env = getAstroStudioEnv();
return env.ASTRO_STUDIO_URL;
}
export {
getAstroStudioEnv,
getAstroStudioUrl,
getRemoteDatabaseUrl
};

@@ -1,4 +0,5 @@

export { defineCollection, defineWritableCollection, field } from './core/types.js';
export { defineCollection, defineWritableCollection, defineData, field } from './core/types.js';
export type { ResolvedCollectionConfig, DBDataContext } from './core/types.js';
export { cli } from './core/cli/index.js';
export { integration as default } from './core/integration/index.js';
export { sql, NOW, TRUE, FALSE } from './runtime/index.js';

@@ -1,10 +0,16 @@

import { defineCollection, defineWritableCollection, field } from "./core/types.js";
import { defineCollection, defineWritableCollection, defineData, field } from "./core/types.js";
import { cli } from "./core/cli/index.js";
import { integration } from "./core/integration/index.js";
import { sql, NOW, TRUE, FALSE } from "./runtime/index.js";
export {
FALSE,
NOW,
TRUE,
cli,
integration as default,
defineCollection,
defineData,
defineWritableCollection,
field
field,
sql
};

@@ -43,3 +43,3 @@ import { createClient } from "@libsql/client";

function createRemoteDatabaseClient(appToken, remoteDbURL) {
const url = new URL("./db/query/", remoteDbURL);
const url = new URL("/db/query", remoteDbURL);
const db = drizzleProxy(async (sql, parameters, method) => {

@@ -46,0 +46,0 @@ const requestBody = { sql, args: parameters };

import type { SqliteRemoteDatabase } from 'drizzle-orm/sqlite-proxy';
import { type DBCollection, type DBField } from '../core/types.js';
import { type ColumnDataType } from 'drizzle-orm';
import { type ColumnDataType, sql, SQL } from 'drizzle-orm';
export { sql };
export type SqliteDB = SqliteRemoteDatabase;

@@ -8,2 +9,5 @@ export type { Table } from './types.js';

export declare function hasPrimaryKey(field: DBField): boolean;
export declare const NOW: SQL<unknown>;
export declare const TRUE: SQL<unknown>;
export declare const FALSE: SQL<unknown>;
export declare function collectionToTable(name: string, collection: DBCollection, isJsonSerializable?: boolean): import("drizzle-orm/sqlite-core").SQLiteTableWithColumns<{

@@ -10,0 +14,0 @@ name: string;

import {} from "../core/types.js";
import { sql } from "drizzle-orm";
import { sql, SQL } from "drizzle-orm";
import {

@@ -11,6 +11,10 @@ customType,

import { z } from "zod";
import { isSerializedSQL } from "./types.js";
import { createRemoteDatabaseClient, createLocalDatabaseClient } from "./db-client.js";
function hasPrimaryKey(field) {
return "primaryKey" in field && !!field.primaryKey;
return "primaryKey" in field.schema && !!field.schema.primaryKey;
}
const NOW = sql`CURRENT_TIMESTAMP`;
const TRUE = sql`TRUE`;
const FALSE = sql`FALSE`;
const dateType = customType({

@@ -67,5 +71,5 @@ dataType() {

c = text(fieldName);
if (field.default !== void 0)
c = c.default(field.default);
if (field.primaryKey === true)
if (field.schema.default !== void 0)
c = c.default(handleSerializedSQL(field.schema.default));
if (field.schema.primaryKey === true)
c = c.primaryKey();

@@ -76,6 +80,6 @@ break;

c = integer(fieldName);
if (field.default !== void 0)
c = c.default(field.default);
if (field.primaryKey === true)
c = c.primaryKey({ autoIncrement: true });
if (field.schema.default !== void 0)
c = c.default(handleSerializedSQL(field.schema.default));
if (field.schema.primaryKey === true)
c = c.primaryKey();
break;

@@ -85,4 +89,4 @@ }

c = integer(fieldName, { mode: "boolean" });
if (field.default !== void 0)
c = c.default(field.default);
if (field.schema.default !== void 0)
c = c.default(handleSerializedSQL(field.schema.default));
break;

@@ -92,4 +96,4 @@ }

c = jsonType(fieldName);
if (field.default !== void 0)
c = c.default(field.default);
if (field.schema.default !== void 0)
c = c.default(field.schema.default);
break;

@@ -99,13 +103,14 @@ case "date": {

c = text(fieldName);
if (field.default !== void 0) {
c = c.default(field.default === "now" ? sql`CURRENT_TIMESTAMP` : field.default);
if (field.schema.default !== void 0) {
c = c.default(handleSerializedSQL(field.schema.default));
}
} else {
c = dateType(fieldName);
if (field.default !== void 0) {
if (field.schema.default !== void 0) {
const def = handleSerializedSQL(field.schema.default);
c = c.default(
field.default === "now" ? sql`CURRENT_TIMESTAMP` : (
def instanceof SQL ? def : (
// default comes pre-transformed to an ISO string for D1 storage.
// parse back to a Date for Drizzle.
z.coerce.date().parse(field.default)
z.coerce.date().parse(field.schema.default)
)

@@ -118,13 +123,23 @@ );

}
if (!field.optional)
if (!field.schema.optional)
c = c.notNull();
if (field.unique)
if (field.schema.unique)
c = c.unique();
return c;
}
function handleSerializedSQL(def) {
if (isSerializedSQL(def)) {
return sql.raw(def.sql);
}
return def;
}
export {
FALSE,
NOW,
TRUE,
collectionToTable,
createLocalDatabaseClient,
createRemoteDatabaseClient,
hasPrimaryKey
hasPrimaryKey,
sql
};

@@ -46,3 +46,3 @@ import type { ColumnDataType, ColumnBaseConfig } from 'drizzle-orm';

export type Column<T extends DBField['type'], S extends GeneratedConfig> = T extends 'boolean' ? AstroBoolean<S> : T extends 'number' ? AstroNumber<S> : T extends 'text' ? AstroText<S> : T extends 'date' ? AstroDate<S> : T extends 'json' ? AstroJson<S> : never;
export type Table<TTableName extends string, TFields extends Record<string, Pick<DBField, 'type' | 'default' | 'optional'>>> = SQLiteTableWithColumns<{
export type Table<TTableName extends string, TFields extends Record<string, Pick<DBField, 'type' | 'schema'>>> = SQLiteTableWithColumns<{
name: TTableName;

@@ -55,7 +55,17 @@ schema: undefined;

name: K;
hasDefault: TFields[K]['default'] extends undefined ? false : true;
notNull: TFields[K]['optional'] extends true ? false : true;
hasDefault: TFields[K] extends {
default: NonNullable<unknown>;
} ? true : TFields[K] extends {
primaryKey: true;
} ? true : false;
notNull: TFields[K]['schema']['optional'] extends true ? false : true;
}>;
};
}>;
export declare const SERIALIZED_SQL_KEY = "__serializedSQL";
export type SerializedSQL = {
[SERIALIZED_SQL_KEY]: true;
sql: string;
};
export declare function isSerializedSQL(value: any): value is SerializedSQL;
export {};
{
"name": "@astrojs/db",
"version": "0.2.2",
"version": "0.3.0",
"description": "",

@@ -55,2 +55,4 @@ "license": "MIT",

"nanoid": "^5.0.1",
"open": "^10.0.3",
"ora": "^7.0.1",
"prompts": "^2.4.2",

@@ -61,8 +63,8 @@ "yargs-parser": "^21.1.1",

"devDependencies": {
"@types/chai": "^4.3.6",
"@types/deep-diff": "^1.0.5",
"@types/diff": "^5.0.8",
"@types/yargs-parser": "^21.0.3",
"@types/chai": "^4.3.6",
"@types/mocha": "^10.0.2",
"@types/prompts": "^2.4.8",
"@types/yargs-parser": "^21.0.3",
"chai": "^4.3.10",

@@ -69,0 +71,0 @@ "cheerio": "1.0.0-rc.12",

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc