New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@stately-cloud/schema

Package Overview
Dependencies
Maintainers
0
Versions
20
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@stately-cloud/schema - npm Package Compare versions

Comparing version 0.7.0 to 0.8.0

dist/errors.d.ts

11

dist/cli.js
#!/usr/bin/env node
import { build } from "./driver.js";
if (!(await build(process.argv[2], process.argv[3]))) {
try {
await build(process.argv[2], process.argv[3]);
}
catch (e) {
if ((e && typeof e === "string") || e instanceof Error) {
process.stderr.write(e.toString());
}
else {
process.stderr.write("An error occurred");
}
process.exit(1);
}
//# sourceMappingURL=cli.js.map

12

dist/driver.d.ts
/**
* The build function is used by the CLI to build a binary FileDescriptorSet
* file from input TypeScript files. The TypeScript files should include public
* exports of the types that should be included in the schema.
* The build function is used by the CLI to build a binary DSLResponse file from
* input TypeScript files. The TypeScript files should include public exports of
* the types that should be included in the schema.
*
* The result will be printed to stdout as a binary proto FileDescriptorSet.
* The result will be printed to stdout as a binary proto DSLResponse. Any
* errors should also be returned in a binary proto DSLResponse, unless there's
* an error writing to stdout.
*

@@ -11,2 +13,2 @@ * @example

*/
export declare function build(inputPath: string, fileName: string): Promise<boolean>;
export declare function build(inputPath: string, fileName: string): Promise<void>;
import { create, toBinary } from "@bufbuild/protobuf";
import { FileDescriptorSetSchema } from "@bufbuild/protobuf/wkt";
import path from "node:path";

@@ -7,9 +6,16 @@ import process from "node:process";

import ts from "typescript";
import packageJson from "../package.json" with { type: "json" };
import { SchemaError } from "./errors.js";
import { StatelyErrorDetailsSchema } from "./errors/error_details_pb.js";
import { file } from "./file.js";
import { DeferredMigration } from "./migrate.js";
import { DSLResponseSchema } from "./schemaservice/cli_pb.js";
/**
* The build function is used by the CLI to build a binary FileDescriptorSet
* file from input TypeScript files. The TypeScript files should include public
* exports of the types that should be included in the schema.
* The build function is used by the CLI to build a binary DSLResponse file from
* input TypeScript files. The TypeScript files should include public exports of
* the types that should be included in the schema.
*
* The result will be printed to stdout as a binary proto FileDescriptorSet.
* The result will be printed to stdout as a binary proto DSLResponse. Any
* errors should also be returned in a binary proto DSLResponse, unless there's
* an error writing to stdout.
*

@@ -38,46 +44,121 @@ * @example

});
// Validate and type check the program
const allDiagnostics = ts.getPreEmitDiagnostics(program);
// const emitResult = program.emit();
// Could hook the result files and use them to add in-memory modules: https://nodejs.org/api/module.html#customization-hooks
// TODO: I guess we could walk the AST of the source files to find a mapping of JSDoc??
// TODO: I guess we could walk the AST of the source files to find a mapping
// of JSDoc?? to types/fields, and use that to write a SourceCodeInfo section
// in the FileDescriptorProto (or do our own comment representation).
// https://joshuakgoldberg.github.io/ts-api-utils/stable/types/ForEachCommentCallback.html
const errors = [];
for (const diagnostic of allDiagnostics) {
let message = "";
if (diagnostic.file) {
const { line, character } = ts.getLineAndCharacterOfPosition(diagnostic.file, diagnostic.start);
const message = ts.flattenDiagnosticMessageText(diagnostic.messageText, "\n");
process.stderr.write(`${diagnostic.file.fileName} (${line + 1},${character + 1}): ${message}`);
message = `${diagnostic.file.fileName} (${line + 1},${character + 1}): ${message}`;
}
else {
process.stderr.write(ts.flattenDiagnosticMessageText(diagnostic.messageText, "\n"));
message = ts.flattenDiagnosticMessageText(diagnostic.messageText, "\n");
}
errors.push(createError("InvalidTypeScript", message));
}
if (allDiagnostics.length) {
return false;
await respond(create(DSLResponseSchema, { errors }));
return;
}
// Import from the generated code.
let exportedValues;
try {
const exportedValues = (await tsImport(fullInputPath, import.meta.url));
// Process into a FileDescriptorProto
const fd = file(exportedValues, fileName);
const fds = create(FileDescriptorSetSchema, {
file: [fd],
});
// Export the FileDescriptorProto to the output path
const fdBytes = toBinary(FileDescriptorSetSchema, fds);
// Write the file
await new Promise((resolve, reject) =>
// use the process stdout API here because we want to block on the callback
process.stdout.write(fdBytes, (err) => (err ? reject(err) : resolve(undefined))));
exportedValues = (await tsImport(fullInputPath, import.meta.url));
}
catch (e) {
if ((e && typeof e === "string") || e instanceof Error) {
process.stderr.write(e.toString());
await respondWithError(e, "SchemaModuleInit", "An error occurred while importing your schema code.");
return;
}
const deferredMigrations = [];
const schemaTypes = [];
// The export names don't matter, only the exported values
for (const value of Object.values(exportedValues)) {
if (value instanceof DeferredMigration) {
deferredMigrations.push(value);
}
else {
process.stderr.write("An error occurred while importing the generated code.");
schemaTypes.push(value);
}
return false;
}
return true;
// Process into a FileDescriptorProto
let fd;
try {
fd = file(schemaTypes, fileName);
}
catch (e) {
if (e instanceof SchemaError) {
const output = create(DSLResponseSchema, {
errors: [createError(e.statelyCode, e.message)],
});
await respond(output);
}
else {
await respondWithError(e, "SchemaCode", "An error occurred while running your schema code.");
}
return;
}
// Collect and expand all the migrations from the latest version. In the
// future I suppose we can pass a specific from-version argument.
const latestMigrations = getLatestMigrations(deferredMigrations);
const migrations = latestMigrations.map((migration) => migration.build());
const output = create(DSLResponseSchema, {
fileDescriptor: fd,
migrations: migrations,
dslVersion: packageJson.version,
});
respond(output);
}
function createError(statelyCode, message) {
return create(StatelyErrorDetailsSchema, {
statelyCode,
message,
/* InvalidArgument is closest for "your code doesn't work" or "we found validation errors" */
code: 3,
});
}
async function respondWithError(e, statelyCode, message) {
if ((e && typeof e === "string") || e instanceof Error) {
const output = create(DSLResponseSchema, {
errors: [
createError(statelyCode, `${message}: ${e.toString()}\n${e instanceof Error ? e.stack : ""}`),
],
});
await respond(output);
}
else {
throw e;
}
}
async function respond(output) {
output.dslVersion = packageJson.version;
// Export the DSLResponse to the output path
const outputBytes = toBinary(DSLResponseSchema, output);
// Write the file
await new Promise((resolve, reject) =>
// use the process stdout API here because we want to block on the callback
process.stdout.write(outputBytes, (err) => (err ? reject(err) : resolve(undefined))));
}
/**
* Select only those migrations that start at the highest version number.
*/
function getLatestMigrations(deferredMigrations) {
let highestVersion = 0n;
let latestMigrations = [];
for (const migration of deferredMigrations) {
if (migration.fromSchemaVersion > highestVersion) {
highestVersion = migration.fromSchemaVersion;
latestMigrations = [migration];
}
else if (migration.fromSchemaVersion === highestVersion) {
latestMigrations.push(migration);
}
}
return latestMigrations;
}
//# sourceMappingURL=driver.js.map
import { FileDescriptorProto } from "@bufbuild/protobuf/wkt";
import { Deferred, Plural } from "./type-util.js";
import { SchemaType } from "./types.js";
export declare function file(exportedValues: {
[name: string]: Deferred<Plural<SchemaType>>;
}, fileName: string): FileDescriptorProto;
export declare function file(schemaTypes: Deferred<Plural<SchemaType>>[], fileName: string): FileDescriptorProto;
import { create } from "@bufbuild/protobuf";
import { FileDescriptorProtoSchema, } from "@bufbuild/protobuf/wkt";
import { SchemaError } from "./errors.js";
import { resolveDeferred, resolvePlural } from "./type-util.js";
import { resolveType } from "./types.js";
const packageName = "stately.generated";
export function file(exportedValues, fileName) {
export function file(schemaTypes, fileName) {
const fd = create(FileDescriptorProtoSchema, {

@@ -19,13 +20,16 @@ name: `${fileName || "stately"}.proto`, // TODO: for now, we pretend everything was in a single "file"

const seenTypes = new Map();
for (const [_exportName, exportValue] of Object.entries(exportedValues)) {
for (const exportValue of schemaTypes) {
const fieldConfigs = resolvePlural(resolveDeferred(exportValue));
for (const fieldConfig of fieldConfigs) {
// First check to see if this is even a field config. TODO: We could
// switch this to a class, or add a symbol type to the objects, to make
// this more robust.
if (typeof fieldConfig !== "object" ||
!("name" in fieldConfig) ||
!("parentType" in fieldConfig)) {
// Skip this, it allows exporting string/number constants and such
continue;
}
const name = fieldConfig.name;
const fqName = `.${packageName}.${name}`;
if (!("parentType" in fieldConfig)) {
// console.warn(
// `Expected ${name} to be either a FieldTypeConfig, or a function that returns FieldTypeConfig. Ignoring it.`,
// );
continue;
}
const { underlyingType } = resolveType(fieldConfig);

@@ -35,3 +39,3 @@ // Don't add the same type twice (e.g. if it's exported multiple times)

if (seenTypes.get(fqName) !== underlyingType) {
throw new Error(`Found two types with the same name: ${fqName}`);
throw new SchemaError("SchemaDuplicateType", `Found two types with the same name: ${fqName}`);
}

@@ -60,3 +64,3 @@ continue;

const shortName = field.typeName.split(".").at(-1);
throw new Error(`Type ${shortName} was not exported. Please export it (e.g. 'export const ${shortName} = ...;').`);
throw new SchemaError("SchemaUnexportedType", `Type ${shortName} was not exported. Please export it (e.g. 'export const ${shortName} = ...;').`);
}

@@ -63,0 +67,0 @@ }

@@ -9,3 +9,4 @@ /**

export { itemType, objectType, type Fields, type GroupLocalIndexConfig, type ItemTypeConfig, type PathTemplate, type PropertyPath, type TTLConfig, } from "./item-types.js";
export { migrate, type Migrator, type TypeMigrator } from "./migrate.js";
export type { Deferred, Plural } from "./type-util.js";
export { arrayOf, bool, bytes, double, durationMilliseconds, durationSeconds, int, int32, keyPath, string, timestampMicroseconds, timestampMilliseconds, timestampSeconds, type, uint, uint32, url, uuid, type SchemaType, } from "./types.js";

@@ -8,2 +8,3 @@ /**

export { itemType, objectType, } from "./item-types.js";
export { migrate } from "./migrate.js";
export { arrayOf, bool, bytes, double, durationMilliseconds, durationSeconds, int, int32, keyPath,

@@ -10,0 +11,0 @@ // mapOf,

@@ -5,3 +5,5 @@ /**

export function resolveDeferred(type) {
return typeof type === "function" ? type() : type;
// The "type as T" is a bit of a lie if `type` is a function with multiple
// arguments, but it'll be caught elsewhere
return typeof type === "function" && type.length === 0 ? type() : type;
}

@@ -8,0 +10,0 @@ /**

import { getExtension, hasExtension, isMessage } from "@bufbuild/protobuf";
import { DescriptorProtoSchema, FieldDescriptorProto_Label, FieldDescriptorProto_Type, } from "@bufbuild/protobuf/wkt";
import { SchemaError } from "./errors.js";
import { message } from "./extensions_pb.js";

@@ -39,3 +40,3 @@ import { getRegisteredType, registerType } from "./type-registry.js";

if (resolvedType.array) {
throw new Error(`${resolvedType.name} is already an array, and nested arrays are not supported. Consider making a wrapper type with 'objectType'.`);
throw new SchemaError("SchemaNestedArrays", `${resolvedType.name} is already an array, and nested arrays are not supported. Consider making a wrapper type with 'objectType'.`);
}

@@ -239,3 +240,3 @@ // omit validation rules from the array type, since we want them to apply to

if (!underlyingType) {
throw new Error(`No field type or type name found for ${type.name}`);
throw new SchemaError("SchemaUnknownType", `No field type or type name found for ${type.name}`);
}

@@ -242,0 +243,0 @@ return {

{
"name": "@stately-cloud/schema",
"version": "0.7.0",
"version": "0.8.0",
"author": "Stately Cloud <support@stately.cloud> (https://stately.cloud/)",

@@ -5,0 +5,0 @@ "description": "Schema language for StatelyDB",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc