Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@embracesql/postgres

Package Overview
Dependencies
Maintainers
1
Versions
18
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@embracesql/postgres - npm Package Compare versions

Comparing version 0.0.13 to 0.0.14

6

package.json
{
"name": "@embracesql/postgres",
"version": "0.0.13",
"version": "0.0.14",
"description": "EmbraceSQL shared library for talking to postgres. Used in Node.",

@@ -13,3 +13,3 @@ "type": "module",

"dependencies": {
"@embracesql/shared": "^0.0.13",
"@embracesql/shared": "^0.0.14",
"glob": "^10.3.10",

@@ -23,3 +23,3 @@ "object-hash": "^3.0.0",

},
"gitHead": "448c86eec2b97e67e555867f7c3ca08459993d24"
"gitHead": "4f59f194e44a54194d5f8d1344a1c0adadfd4958"
}

@@ -1,2 +0,1 @@

import { PGAttributes } from "./generator/pgtype/pgattribute";
import { PGIndexes } from "./generator/pgtype/pgindex";

@@ -7,3 +6,2 @@ import { PGNamespace } from "./generator/pgtype/pgnamespace";

import { PGTypes } from "./generator/pgtype/pgtype";
import { PGTypeEnumValues } from "./generator/pgtype/pgtypeenum";
import { loadScriptsAST } from "./generator/scripts";

@@ -21,2 +19,3 @@ import { oneBasedArgumentNamefromZeroBasedIndex } from "./util";

ResultsNode,
ParametersNode,
} from "@embracesql/shared";

@@ -81,13 +80,2 @@ import pgconnectionstring from "pg-connection-string";

/**
* Type factories need metadata from the 'leaf' level ahead of time
* to join up. This allows one query per catalog table instead of
* ORM style chitter chatter.
*/
export type TypeFactoryContext = {
attributes: PGAttributes;
indexes: PGIndexes;
enumValues: PGTypeEnumValues;
};
const DEFAULT_POSTGRES_URL =

@@ -150,38 +138,3 @@ "postgres://postgres:postgres@localhost:5432/postgres";

.current_database;
// Attributes on the composite type that represent each relation (table, view, index).
const attributes = await PGAttributes.factory(sql);
// indexes a bit more obvious in the catalog, but they need attributes ahead of time
// to join up
const indexes = await PGIndexes.factory(sql, attributes);
// enum values -- these are the individual bits of the enum like attributes
// but not the postgres type that is the enum itself
const enumValues = await PGTypeEnumValues.factory(sql);
// and now we have enough data to really make types
const typeCatalog = await PGTypes.factory(
{ attributes, indexes, enumValues },
sql,
);
// procs rely on types so we can create them now
const procCatalog = await PGProcs.factory({ typeCatalog }, sql);
// Tables - meaning plain old tables. The definition of tables comes from types.
// Tables are interesting because they have columns (attributes) and indexes.
const tableCatalog = await PGTables.factory(
{ attributes, indexes, enumValues, typeCatalog },
sql,
);
// and group up all the database by namespaces
const namespaces = PGNamespace.factory(
typeCatalog,
tableCatalog,
procCatalog,
);
// abstract database representation
// we're generating a database -- so this is the root object
const database = new DatabaseNode(databaseName);

@@ -192,2 +145,8 @@ const generationContext = {

};
// start off with the types grouped into namespaces, types will be
// referenced by other objects -- tables, indexes, procs, views...
const typeCatalog = await PGTypes.factory(sql);
// and group up all the database by namespaces
const namespaces = PGNamespace.factory(typeCatalog);
// ok, this is a bit tricky since - tables and types can cross namespaces

@@ -200,13 +159,27 @@ // so the first pass will set up all the schemas from catalog namespaces

});
// we know all types! -- now we need to cross link the type graph
// as types can be defined by other types, reference other types
// and be composed of other types
namespaces.forEach((n) => {
// all types in the namespace
n.types.forEach((t) => t.finalizeAST(generationContext));
});
// now we have an initial generation context that can resolve types
// time to use those types for database objects
// we now know all types -- now we have enough information to load the
// procs rely on types so we can create them now
const procCatalog = await PGProcs.factory({ typeCatalog }, sql);
namespaces.forEach((n) => n.pickProcs(procCatalog));
// Tables - meaning plain old tables. The definition of tables comes from types.
const tableCatalog = await PGTables.factory({ typeCatalog }, sql);
// indexes are rather table-like and come from types
const indexes = await PGIndexes.factory({ typeCatalog }, sql);
tableCatalog.tables.forEach((t) => t.pickIndexes(indexes));
namespaces.forEach((n) => n.pickTables(tableCatalog));
// AST with database schema objects - tables, columns, indexes
namespaces.forEach((n) => n.loadAST(generationContext));
// second pass now that all types are registered
namespaces.forEach((n) => {
// all types in the namespace
n.types.forEach((t) => t.finalizeAST(generationContext));
});
// second pass now that all types are associated with all objects

@@ -238,4 +211,8 @@ // stored scripts -- load up the AST

);
// outputs
new ResultsNode(node, resultsType);
// inputs, which are optional as you have have a query with no $ variables
if (metadata.types.length) {
const PARAMETERSNode = new CompositeTypeNode(
const parametersType = new CompositeTypeNode(
PARAMETERS,

@@ -249,3 +226,3 @@ node,

new AttributeNode(
PARAMETERSNode,
parametersType,
// these don't have natural names, just positions

@@ -260,5 +237,5 @@ // so manufacture names

);
// inputs
new ParametersNode(node, parametersType);
}
// attach a formal results node
new ResultsNode(node, resultsType);
return "";

@@ -282,5 +259,3 @@ },

// ok a little odd loading this up here -- we're going to modify it later before
// we return which will allow the context being created to be passed to
// type resolvers that can parse composite and RETURNS TABLE types at runtime
// we resolve 'natural' and pseudo types for marshalling
const resolveType = (oid: number) => {

@@ -287,0 +262,0 @@ return typeCatalog.typesByOid[oid] ?? procCatalog.pseudoTypesByOid[oid];

import { Context } from "../../context";
import { groupBy } from "../../util";
import { PGTypeComposite } from "./pgtypecomposite";
import { cleanIdentifierForTypescript } from "@embracesql/shared";

@@ -41,5 +40,5 @@ import { camelCase } from "change-case";

attributesForType(catalogType: PGTypeComposite) {
attributesForType(typrelid: number) {
return (
this.attributesByRelid[catalogType.catalog.typrelid]?.sort(
this.attributesByRelid[typrelid]?.sort(
(l, r) => l.attribute.attnum - r.attribute.attnum,

@@ -46,0 +45,0 @@ ) ?? []

import { Context, PostgresTypecast } from "../../context";
import { CatalogRow } from "./pgtype";
import {

@@ -17,3 +16,5 @@ GeneratesTypeScript,

constructor(
public catalog: CatalogRow,
public oid: number,
public nspname: string,
public typname: string,
public comment = "",

@@ -26,6 +27,6 @@ ) {}

loadAST(context: GenerationContext) {
const schema = context.database.resolveSchema(this.catalog.nspname);
const schema = context.database.resolveSchema(this.nspname);
const type = new TypeNode(
this.catalog.typname,
this.typname,
schema.types,

@@ -49,9 +50,2 @@ this.oid,

/**
* The all powerful oid.
*/
get oid() {
return this.catalog.oid;
}
/**
* The default parser doesn't do much -- just echoes a string.

@@ -107,4 +101,4 @@ */

return {
to: this.catalog.oid,
from: [this.catalog.oid],
to: this.oid,
from: [this.oid],
serialize: (x) => this.serializeToPostgres(context, x),

@@ -111,0 +105,0 @@ // eslint-disable-next-line @typescript-eslint/no-unsafe-return

@@ -1,6 +0,3 @@

import { TypeFactoryContext } from "../../context";
import { groupBy } from "../../util";
import { PGAttribute, PGAttributes } from "./pgattribute";
import { PGCatalogType } from "./pgcatalogtype";
import { PGTypeComposite } from "./pgtypecomposite";
import { PGTypes } from "./pgtype";
import { GenerationContext, IndexNode, TableNode } from "@embracesql/shared";

@@ -18,3 +15,7 @@ import path from "path";

indisprimary: boolean;
name: string;
};
type PGIndexesContext = { typeCatalog: PGTypes };
/**

@@ -24,25 +25,17 @@ * Collect up all indexes in the postgres catalog.

export class PGIndexes {
static async factory(sql: Sql, attributes: PGAttributes) {
static async factory(context: PGIndexesContext, sql: Sql) {
const indexRows = (await sql.file(
path.join(__dirname, "pgindexes.sql"),
)) as unknown as IndexRow[];
return new PGIndexes(indexRows, attributes);
return new PGIndexes(context, indexRows);
}
indexesByTableTypeOid: Record<number, PGIndex[]>;
private constructor(indexRows: IndexRow[], attributes: PGAttributes) {
private constructor(context: PGIndexesContext, indexRows: IndexRow[]) {
this.indexesByTableTypeOid = groupBy(
indexRows,
(r) => r.tabletypeoid,
(r) => new PGIndex(r, attributes),
(r) => new PGIndex(context, r),
);
}
indexesForType(catalogType: PGCatalogType) {
return (
this.indexesByTableTypeOid[catalogType.oid]?.sort((l, r) =>
l.name.localeCompare(r.name),
) ?? []
);
}
}

@@ -56,51 +49,16 @@

export class PGIndex {
attributes: PGAttribute[];
constructor(
context: PGIndexesContext,
public index: IndexRow,
attributes: PGAttributes | PGAttribute[],
) {
if ((attributes as PGAttributes).attributesByRelid) {
this.attributes = (attributes as PGAttributes).attributesByRelid[
index.indexrelid
];
} else {
this.attributes = attributes as PGAttribute[];
}
}
) {}
loadAST(context: GenerationContext, table: TableNode) {
new IndexNode(
this.index.name,
table,
this.name,
this.index.indisunique,
this.index.indisprimary,
this.attributes.map((a) => {
const typeNode = context.database.resolveType(a.attribute.atttypid)!;
return { name: a.name, type: typeNode };
}),
context.database.resolveType(this.index.indexrelid),
);
}
get name() {
return `by_${this.attributes.map((a) => a.typescriptName).join("_")}`;
}
get primaryKey() {
return this.index.indisprimary;
}
/**
* Rewrite the index in terms of the base type attributes in order to
* pick up type constraints and flags that are missing on the index
* attributes themselves.
*
* But there are also computed index components that are not actullay in
* the base table. So while a bit of extra complexity, there is a reason.
*/
translateAttributes(context: TypeFactoryContext, forType: PGTypeComposite) {
const translatedAttributes = context.attributes.attributesByRelid[
this.index.indexrelid
].map((r) => forType.attributes.find((a) => a.name === r.name) ?? r);
return new PGIndex(this.index, translatedAttributes);
}
}

@@ -17,44 +17,47 @@ import { groupBy } from "../../util";

*/
static factory(
typeCatalog: PGTypes,
tableCatalog: PGTables,
procCatalog: PGProcs,
) {
static factory(typeCatalog: PGTypes) {
const typesByNamespace = groupBy(
typeCatalog.types,
(r) => r.catalog.nspname,
(r) => r.nspname,
(r) => r,
);
const tablesByNamespace = groupBy(
tableCatalog.tables,
(r) => r.table.nspname,
(r) => r,
);
const procsByNamespace = groupBy(
procCatalog.procs,
(r) => r.proc.nspname,
(r) => r,
);
return Object.keys(typesByNamespace).map((namespace): PGNamespace => {
return new PGNamespace(
namespace,
[
...typesByNamespace[namespace],
...(procsByNamespace[namespace] ?? [])
.filter((p) => p.returnsPseudoType)
.map((p) => new PGProcPseudoType(p)),
] ?? [],
tablesByNamespace[namespace] ?? [],
procsByNamespace[namespace] ?? [],
);
return new PGNamespace(namespace, typesByNamespace[namespace]);
});
}
_procs: PGProc[] = [];
_tables: PGTable[] = [];
_types: PGCatalogType[];
_pseudoTypes: PGCatalogType[] = [];
constructor(
public namespace: string,
public types: PGCatalogType[],
public tables: PGTable[],
public procs: PGProc[],
) {}
types: PGCatalogType[],
) {
this._types = types;
}
get procs() {
return this._procs;
}
pickProcs(procs: PGProcs) {
this._procs = procs.procsByNamespace[this.namespace] ?? [];
this._pseudoTypes = this._procs
.filter((p) => p.returnsPseudoType)
.map((p) => new PGProcPseudoType(p));
}
get tables() {
return this._tables;
}
pickTables(tables: PGTables) {
this._tables = tables.tablesByNamespace[this.namespace] ?? [];
}
get types() {
return [...this._types, ...this._pseudoTypes];
}
loadAST(context: GenerationContext) {

@@ -61,0 +64,0 @@ const schema = context.database.resolveSchema(this.nspname);

import { Context, PostgresProcTypecast } from "../../../context";
import { groupBy } from "../../../util";
import { PGCatalogType } from "../pgcatalogtype";

@@ -14,2 +15,3 @@ import { PGTypes } from "../pgtype";

ASTKind,
ParametersNode,
} from "@embracesql/shared";

@@ -57,2 +59,3 @@ import { camelCase } from "change-case";

procs: PGProc[];
procsByNamespace: Record<string, PGProc[]>;
pseudoTypesByOid: Record<number, PGProcPseudoType>;

@@ -64,2 +67,3 @@ private constructor(context: PGProcsContext, procRows: ProcRow[]) {

);
this.procsByNamespace = groupBy(this.procs, (p) => p.nspname);
}

@@ -71,10 +75,12 @@

const procsNode = schemaNode.procedures;
const returnsAttributes = new PGProcPseudoType(proc).pseudoTypeAttributes(
context,
);
const procPseudoType = this.pseudoTypesByOid[proc.proc.oid];
const returnsAttributes = procPseudoType.pseudoTypeAttributes(context);
// by the time we are generating procedures, we have already made
// a first pass over types, so the result type should be available
// hence the !
const procReturnType = (() => {
if (returnsAttributes.length === 1) {
const resultsType = (() => {
if (returnsAttributes.length === 0) {
// this has a plain single type return type
return context.database.resolveType(proc.proc.prorettype);
} else if (returnsAttributes.length === 1) {
// just need the single type as is

@@ -85,10 +91,7 @@ // single attribute, table of one column which is

} else {
// resolve the pseudo type composite for the proc to
// contain multiple attributes -- table like results
return context.database.resolveType(
proc.returnsPseudoType ? proc.proc.oid : proc.proc.prorettype,
)!;
// really using this pseudo type, so register it with the AST
return procPseudoType.loadAST(context);
}
})();
const procNode = new ProcedureNode(
const node = new ProcedureNode(
proc.name,

@@ -99,12 +102,16 @@ procsNode,

proc.returnsPseudoType || proc.returnsSet,
procReturnType.kind === ASTKind.CompositeType,
resultsType.kind === ASTKind.CompositeType,
proc.returnsPseudoType,
);
// outputs
new ResultsNode(node, resultsType);
// inputs
const parametersNode = new CompositeTypeNode(
// inputs -- which may have no attributes
// this type won't exist in the database catalog - we're treating
// all the parameters which are in a flat argument list style
// as a structured object single 'parameter'
const parametersType = new CompositeTypeNode(
PARAMETERS,
procNode,
"",
proc.comment,
node,
"", // no identifier, this is not a type in the database
);

@@ -117,3 +124,3 @@

new AttributeNode(
parametersNode,
parametersType,
proc.proc.proargnames[i]

@@ -129,4 +136,4 @@ ? proc.proc.proargnames[i]

});
// outputs
new ResultsNode(procNode, procReturnType);
// inputs
new ParametersNode(node, parametersType);
}

@@ -214,35 +221,20 @@ }

constructor(public proc: PGProc) {
super({
oid: proc.proc.oid,
nspname: proc.proc.nspname,
typname: `${proc.proc.proname}_results`,
typbasetype: 0,
typelem: 0,
rngsubtype: 0,
typcategory: "",
typoutput: "",
typrelid: 0,
typtype: "",
});
super(proc.proc.oid, proc.proc.nspname, `${proc.proc.proname}_results`);
}
loadAST(context: GenerationContext) {
const schema = context.database.resolveSchema(this.catalog.nspname);
const schema = context.database.resolveSchema(this.nspname);
const returnsAttributes = this.pseudoTypeAttributes(context);
if (returnsAttributes.length === 1) {
// single attribute will already have a type available
// no need to register
} else {
// multiple attributes creates a composite
const type = new CompositeTypeNode(
this.proc.name,
schema.types,
this.oid,
this.comment,
);
returnsAttributes.forEach(
(a, i) => new AttributeNode(type, a.name, i, a.type, true, true),
);
context.database.registerType(type.id, type);
}
// multiple attributes creates a composite
const type = new CompositeTypeNode(
this.proc.name,
schema.types,
this.oid,
this.comment,
);
returnsAttributes.forEach(
(a, i) => new AttributeNode(type, a.name, i, a.type, true, true),
);
context.database.registerType(type.id, type);
return type;
}

@@ -249,0 +241,0 @@

@@ -1,3 +0,3 @@

import { TypeFactoryContext } from "../../context";
import { PGIndex } from "./pgindex";
import { groupBy } from "../../util";
import { PGIndex, PGIndexes } from "./pgindex";
import { PGTypes } from "./pgtype";

@@ -23,3 +23,3 @@ import { PGTypeComposite } from "./pgtypecomposite";

type PGTablesContext = TypeFactoryContext & { typeCatalog: PGTypes };
type PGTablesContext = { typeCatalog: PGTypes };

@@ -43,4 +43,6 @@ /**

tables: PGTable[];
tablesByNamespace: Record<string, PGTable[]>;
private constructor(context: PGTablesContext, tableRows: TableRow[]) {
this.tables = tableRows.map((r) => new PGTable(context, r));
this.tablesByNamespace = groupBy(this.tables, (t) => t.table.nspname);
}

@@ -53,3 +55,3 @@ }

export class PGTable {
indexes: PGIndex[];
_indexes: PGIndex[] = [];
tableType: PGTypeComposite;

@@ -60,4 +62,2 @@ constructor(

) {
this.indexes =
context.indexes.indexesByTableTypeOid[table.tabletypeoid] ?? [];
this.tableType = context.typeCatalog.typesByOid[

@@ -68,2 +68,10 @@ table.tabletypeoid

get indexes() {
return this._indexes;
}
pickIndexes(indexes: PGIndexes) {
this._indexes = indexes.indexesByTableTypeOid[this.tableType.oid] ?? [];
}
loadAST(context: GenerationContext, tables: TablesNode) {

@@ -70,0 +78,0 @@ const table = new TableNode(

@@ -1,4 +0,4 @@

import { TypeFactoryContext } from "../../context";
import { PGTypeText } from "./base/text";
import { overrides } from "./overrides";
import { PGAttributes } from "./pgattribute";
import { PGCatalogType } from "./pgcatalogtype";

@@ -9,3 +9,3 @@ import { PGTypeArray } from "./pgtypearray";

import { PGTypeDomain } from "./pgtypedomain";
import { PGTypeEnum } from "./pgtypeenum";
import { PGTypeEnum, PGTypeEnumValues } from "./pgtypeenum";
import { PGTypeRange } from "./pgtyperange";

@@ -19,2 +19,12 @@ import path from "path";

/**
* Type factories need metadata from the 'leaf' level ahead of time
* to join up. This allows one query per catalog table instead of
* ORM style chitter chatter.
*/
export type TypeFactoryContext = {
attributes: PGAttributes;
enumValues: PGTypeEnumValues;
};
/**
* Database row for types in the pg catalog. All types - tables, views, indexes

@@ -44,5 +54,10 @@ * enums, domains, composites, ranges all have a corresponding pg_type.

export class PGTypes {
static async factory(context: TypeFactoryContext, sql: Sql) {
static async factory(sql: Sql) {
// Attributes on the composite type that represent each relation (table, view, index).
const attributes = await PGAttributes.factory(sql);
// enum values -- these are the individual bits of the enum like attributes
// but not the postgres type that is the enum itself
const enumValues = await PGTypeEnumValues.factory(sql);
return new PGTypes(
context,
{ attributes, enumValues },
(await sql.file(

@@ -70,3 +85,4 @@ path.join(__dirname, "pgtypes.sql"),

const cons = overrides.get(catalog.typname);
if (cons) return new cons(catalog);
if (cons)
return new cons(catalog.oid, catalog.nspname, catalog.typname, "");
}

@@ -78,9 +94,49 @@ // there are 'odd' base types that are arrays of scalars but when you

return PGTypeBase.factory(context, catalog);
if (catalog.typname === "name") return new PGTypeText(catalog);
if (catalog.typname === "name")
return new PGTypeText(catalog.oid, catalog.nspname, catalog.typname, "");
if (catalog.typtype === "c") return new PGTypeComposite(context, catalog);
if (catalog.typtype === "e") return new PGTypeEnum(context, catalog);
if (catalog.typtype === "d") return new PGTypeDomain(context, catalog);
if (catalog.typelem > 0) return new PGTypeArray(context, catalog);
if (catalog.rngsubtype > 0) return new PGTypeRange(context, catalog);
if (catalog.typtype === "c")
return new PGTypeComposite(
context,
catalog.oid,
catalog.nspname,
catalog.typname,
"",
catalog.typrelid,
);
if (catalog.typtype === "e")
return new PGTypeEnum(
context,
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
if (catalog.typtype === "d")
return new PGTypeDomain(
context,
catalog.oid,
catalog.nspname,
catalog.typname,
"",
catalog.typbasetype,
);
if (catalog.typelem > 0)
return new PGTypeArray(
context,
catalog.oid,
catalog.nspname,
catalog.typname,
"",
catalog.typelem,
);
if (catalog.rngsubtype > 0)
return new PGTypeRange(
context,
catalog.oid,
catalog.nspname,
catalog.typname,
"",
catalog.rngsubtype,
);
// this is last on purpose -- typeelem and rngsubtype is more discriminating

@@ -90,4 +146,4 @@ if (catalog.typtype === "b") return PGTypeBase.factory(context, catalog);

// default through to no type at all -- void type
return new PGType(catalog);
return new PGType(catalog.oid, catalog.nspname, catalog.typname, "");
}
}

@@ -1,4 +0,4 @@

import { Context, TypeFactoryContext } from "../../context";
import { Context } from "../../context";
import { PGCatalogType } from "./pgcatalogtype";
import { CatalogRow } from "./pgtype";
import { TypeFactoryContext } from "./pgtype";
import {

@@ -22,13 +22,18 @@ ArrayTypeNode,

context: TypeFactoryContext,
catalog: CatalogRow,
oid: number,
nspname: string,
typname: string,
comment: string,
private typelem: number,
private props: Props = { arraySuffix: true },
) {
super(catalog);
console.assert(context);
super(oid, nspname, typname, comment);
}
loadAST(context: GenerationContext) {
const schema = context.database.resolveSchema(this.catalog.nspname);
const schema = context.database.resolveSchema(this.nspname);
const type = new ArrayTypeNode(
`${this.catalog.typname}${this.props.arraySuffix ? "_array" : ""}`,
`${this.typname}${this.props.arraySuffix ? "_array" : ""}`,
schema.types,

@@ -45,4 +50,4 @@ this.oid,

// graph, not a strict tree
const memberType = context.database.resolveType(this.catalog.typelem);
context.database.resolveType<ArrayTypeNode>(this.catalog.oid).memberType =
const memberType = context.database.resolveType(this.typelem);
context.database.resolveType<ArrayTypeNode>(this.oid).memberType =
memberType;

@@ -57,3 +62,3 @@ }

const elements = x as any[];
const elementType = context.resolveType(this.catalog.typelem);
const elementType = context.resolveType(this.typelem);
const attributes = elements.map((e) => {

@@ -77,3 +82,3 @@ // hand off the the serializer

const elements = arrayAttribute.tryParse(x);
const elementType = context.resolveType(this.catalog.typelem);
const elementType = context.resolveType(this.typelem);
// eslint-disable-next-line @typescript-eslint/no-unsafe-return

@@ -80,0 +85,0 @@ return elements.map((e) =>

@@ -1,2 +0,1 @@

import { TypeFactoryContext } from "../../context";
import { PGTypeBool } from "./base/bool";

@@ -7,3 +6,3 @@ import { PGTypeBigInt, PGTypeBytea, PGTypeNumber } from "./base/number";

import { PGCatalogType } from "./pgcatalogtype";
import { CatalogRow } from "./pgtype";
import { CatalogRow, TypeFactoryContext } from "./pgtype";
import { PGTypeArray } from "./pgtypearray";

@@ -21,68 +20,214 @@ import { GenerationContext } from "@embracesql/shared";

case "tid":
return new PGTypeTid(catalog);
return new PGTypeTid(catalog.oid, catalog.nspname, catalog.typname, "");
case "xml":
return new PGTypeText(catalog);
return new PGTypeText(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "name":
return new PGTypeText(catalog);
return new PGTypeText(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "gtrgm":
return new PGTypeText(catalog);
return new PGTypeText(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "text":
return new PGTypeText(catalog);
return new PGTypeText(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "char":
return new PGTypeText(catalog);
return new PGTypeText(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "varchar":
return new PGTypeText(catalog);
return new PGTypeText(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "bool":
return new PGTypeBool(catalog);
return new PGTypeBool(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "bit":
return new PGTypeBool(catalog);
return new PGTypeBool(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "varbit":
return new PGTypeBool(catalog);
return new PGTypeBool(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "macaddr":
return new PGTypeBytea(catalog);
return new PGTypeBytea(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "macaddr8":
return new PGTypeBytea(catalog);
return new PGTypeBytea(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "cidr":
return new PGTypeBytea(catalog);
return new PGTypeBytea(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "bytea":
return new PGTypeBytea(catalog);
return new PGTypeBytea(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "inet":
return new PGTypeInet(catalog);
return new PGTypeInet(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "jsonpath":
return new PGTypeText(catalog);
return new PGTypeText(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "aclitem":
return new PGTypeText(catalog);
return new PGTypeText(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "bpchar":
return new PGTypeText(catalog);
return new PGTypeText(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "pg_lsn":
return new PGTypeBigInt(catalog);
return new PGTypeBigInt(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "tsvector":
return new PGTypeText(catalog);
return new PGTypeText(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "gtsvector":
return new PGTypeTextArray(catalog);
return new PGTypeTextArray(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "tsquery":
return new PGTypeText(catalog);
return new PGTypeText(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "uri":
return new PGTypeUri(catalog);
return new PGTypeUri(catalog.oid, catalog.nspname, catalog.typname, "");
case "cid":
return new PGTypeNumber(catalog);
return new PGTypeNumber(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "xid":
return new PGTypeNumber(catalog);
return new PGTypeNumber(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "xid8":
return new PGTypeNumber(catalog);
return new PGTypeNumber(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "oidvector":
return new PGTypeArray(context, catalog, { arraySuffix: false });
return new PGTypeArray(
context,
catalog.oid,
catalog.nspname,
catalog.typname,
"",
catalog.typelem,
{ arraySuffix: false },
);
default:
// if the db wants to code it as text, so do we
if (catalog.typoutput === "textout") return new PGTypeText(catalog);
if (catalog.typoutput === "textout")
return new PGTypeText(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
// categorical type mappings form a backstop
switch (catalog.typcategory) {
case "N":
return new PGTypeNumber(catalog);
return new PGTypeNumber(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
case "S":
return new PGTypeText(catalog);
return new PGTypeText(
catalog.oid,
catalog.nspname,
catalog.typname,
"",
);
default:
// TODO: handle types
return new PGTypeText(
catalog,
catalog.oid,
catalog.nspname,
catalog.typname,
`FIXME: ${catalog.typname} did not resolve to a base type`,

@@ -89,0 +234,0 @@ );

@@ -1,6 +0,5 @@

import { Context, TypeFactoryContext } from "../../context";
import { Context } from "../../context";
import { PGAttribute } from "./pgattribute";
import { PGCatalogType } from "./pgcatalogtype";
import { PGIndex } from "./pgindex";
import { CatalogRow } from "./pgtype";
import { TypeFactoryContext } from "./pgtype";
import {

@@ -24,21 +23,21 @@ AttributeNode,

attributes: PGAttribute[];
indexes: PGIndex[];
constructor(context: TypeFactoryContext, catalog: CatalogRow) {
super(catalog);
constructor(
context: TypeFactoryContext,
oid: number,
nspname: string,
typname: string,
comment: string,
public typrelid: number,
) {
super(oid, nspname, typname, comment);
this.attributes = context.attributes.attributesForType(this);
// translate the attributes on the index into attributes on the type
// this is needed to properly pick up constraints which are on the type
// for the base table but are not on the type for the index
this.indexes = context.indexes
.indexesForType(this)
.map((i) => i.translateAttributes(context, this));
this.attributes = context.attributes.attributesForType(typrelid);
}
loadAST(context: GenerationContext) {
const schema = context.database.resolveSchema(this.catalog.nspname);
const schema = context.database.resolveSchema(this.nspname);
// there is no guarantee that the types of the attributes are loaded yet
const type = new CompositeTypeNode(
this.catalog.typname,
this.typname,
schema.types,

@@ -57,3 +56,3 @@ this.oid,

throw new Error(
`${this.catalog.typname} cannot find type for ${a.attribute.attname} ${a.attribute.atttypid}`,
`${this.typname} cannot find type for ${a.attribute.attname} ${a.attribute.atttypid}`,
);

@@ -72,34 +71,2 @@ }

get hasPrimaryKey() {
return this.primaryKey !== undefined;
}
get hasPrimaryKeyDefault() {
const primaryKey = this.indexes.find((i) => i.primaryKey);
if (primaryKey !== undefined) {
// check all the primary key attributes
return (
primaryKey.attributes.filter((a) => a.hasDefault).length ===
primaryKey.attributes.length
);
}
//fallthrough
return false;
}
get primaryKey() {
return this.indexes.find((i) => i.primaryKey);
}
get primaryKeyAttributes() {
const primaryKey = this.indexes.find((i) => i.primaryKey);
return primaryKey?.attributes ?? [];
}
get notPrimaryKeyAttributes() {
const primaryKey = this.indexes.find((i) => i.primaryKey);
const notIt = primaryKey?.attributes.map((a) => a.name) ?? [];
return this.attributes.filter((a) => !notIt.includes(a.name));
}
attributeByAttnum(attnum: number) {

@@ -106,0 +73,0 @@ // yep -- postgres is one based

@@ -1,4 +0,4 @@

import { Context, TypeFactoryContext } from "../../context";
import { Context } from "../../context";
import { PGCatalogType } from "./pgcatalogtype";
import { CatalogRow } from "./pgtype";
import { TypeFactoryContext } from "./pgtype";
import { DomainTypeNode, GenerationContext } from "@embracesql/shared";

@@ -12,12 +12,20 @@

export class PGTypeDomain extends PGCatalogType {
constructor(context: TypeFactoryContext, catalog: CatalogRow) {
super(catalog);
constructor(
context: TypeFactoryContext,
oid: number,
nspname: string,
typname: string,
comment: string,
public typbasetype: number,
) {
console.assert(context);
super(oid, nspname, typname, comment);
}
loadAST(context: GenerationContext) {
const schema = context.database.resolveSchema(this.catalog.nspname);
const schema = context.database.resolveSchema(this.nspname);
// there is no guarantee that the types of the attributes are loaded yet
const type = new DomainTypeNode(
this.catalog.typname,
this.typname,
schema.types,

@@ -32,3 +40,3 @@ this.oid,

const typeNode = context.database.resolveType<DomainTypeNode>(this.oid);
typeNode.baseType = context.database.resolveType(this.catalog.typbasetype);
typeNode.baseType = context.database.resolveType(this.typbasetype);
}

@@ -38,3 +46,3 @@

return `${
context.database.resolveType(this.catalog.typbasetype)
context.database.resolveType(this.typbasetype)
?.typescriptNamespacedName ?? "void"

@@ -48,5 +56,3 @@ }`;

// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return context
.resolveType(this.catalog.typbasetype)
.parseFromPostgres(context, x);
return context.resolveType(this.typbasetype).parseFromPostgres(context, x);
}

@@ -58,5 +64,5 @@

return context
.resolveType(this.catalog.typbasetype)
.resolveType(this.typbasetype)
.serializeToPostgres(context, x);
}
}

@@ -1,5 +0,4 @@

import { TypeFactoryContext } from "../../context";
import { groupBy } from "../../util";
import { PGCatalogType } from "./pgcatalogtype";
import { CatalogRow } from "./pgtype";
import { TypeFactoryContext } from "./pgtype";
import { EnumTypeNode, GenerationContext } from "@embracesql/shared";

@@ -42,5 +41,12 @@ import path from "path";

constructor(context: TypeFactoryContext, catalog: CatalogRow) {
super(catalog);
this.values = context.enumValues.enumValuesByTypeId[catalog.oid];
constructor(
context: TypeFactoryContext,
oid: number,
nspname: string,
typname: string,
comment: string,
) {
console.assert(context);
super(oid, nspname, typname, comment);
this.values = context.enumValues.enumValuesByTypeId[oid];
this.values.toSorted((l, r) => l.enumsortorder - r.enumsortorder);

@@ -50,6 +56,6 @@ }

loadAST(context: GenerationContext) {
const schema = context.database.resolveSchema(this.catalog.nspname);
const schema = context.database.resolveSchema(this.nspname);
const type = new EnumTypeNode(
this.catalog.typname,
this.typname,
this.values.map((v) => v.enumlabel),

@@ -56,0 +62,0 @@ schema.types,

@@ -1,4 +0,3 @@

import { TypeFactoryContext } from "../../context";
import { PGCatalogType } from "./pgcatalogtype";
import { CatalogRow } from "./pgtype";
import { TypeFactoryContext } from "./pgtype";
import { GenerationContext } from "@embracesql/shared";

@@ -11,4 +10,11 @@

export class PGTypeRange extends PGCatalogType {
constructor(context: TypeFactoryContext, catalog: CatalogRow) {
super(catalog);
constructor(
context: TypeFactoryContext,
oid: number,
nspname: string,
typname: string,
comment: string,
public rngsubtype: number,
) {
super(oid, nspname, typname, comment);
}

@@ -19,3 +25,3 @@

// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const type = context.database.resolveType(this.catalog.rngsubtype)!;
const type = context.database.resolveType(this.rngsubtype)!;
// array with two elements is the nearst-to-a-tuple in JS

@@ -22,0 +28,0 @@ return `[${type.typescriptNamespacedName}, ${type.typescriptNamespacedName}]`;

@@ -18,3 +18,3 @@ import { postgresToTypescript, sqlPredicate } from "./shared";

async before(context: GenerationContext, node: DeleteOperationNode) {
const parameters = `${PARAMETERS}: ${node.index.typescriptNamespacedName}`;
const parameters = `${PARAMETERS}: ${node.index.type.typescriptNamespacedName}`;
const sqlColumnNames = node.index.table.type.attributes

@@ -21,0 +21,0 @@ .map((a) => a.name)

@@ -17,3 +17,3 @@ import { postgresToTypescript, sqlPredicate } from "./shared";

const generationBuffer = [""];
const parameters = `${PARAMETERS}: ${node.index.typescriptNamespacedName}`;
const parameters = `${PARAMETERS}: ${node.index.type.typescriptNamespacedName}`;
const returns = node.index.unique

@@ -20,0 +20,0 @@ ? `Promise<${node.index.table.type.typescriptNamespacedName}>`

@@ -99,3 +99,3 @@ /**

) {
return node.columns
return node.type.attributes
.map((a) => `${a.name} = ${postgresValueExpression(context, a, holder)}`)

@@ -102,0 +102,0 @@ .join(" AND ");

@@ -24,3 +24,3 @@ import {

const generationBuffer = [""];
const parameters = `${PARAMETERS}: ${node.index.typescriptNamespacedName}, ${VALUES}: Partial<${node.index.table.typescriptNamespacedName}.Values>`;
const parameters = `${PARAMETERS}: ${node.index.type.typescriptNamespacedName}, ${VALUES}: Partial<${node.index.table.typescriptNamespacedName}.Values>`;
const returns = node.index.unique

@@ -27,0 +27,0 @@ ? `Promise<${node.index.table.type.typescriptNamespacedName}>`

@@ -96,3 +96,3 @@ import {

callee.push(
`request.parameters as ${node.index.typescriptNamespacedName}`,
`request.parameters as ${node.index.type.typescriptNamespacedName}`,
);

@@ -110,3 +110,3 @@ return `"${

callee.push(
`request.parameters as ${node.index.typescriptNamespacedName}`,
`request.parameters as ${node.index.type.typescriptNamespacedName}`,
);

@@ -127,3 +127,3 @@ callee.push(

callee.push(
`request.parameters as ${node.index.typescriptNamespacedName}`,
`request.parameters as ${node.index.type.typescriptNamespacedName}`,
);

@@ -130,0 +130,0 @@ return `"${

@@ -35,3 +35,3 @@ import { GenerationContext } from "..";

generationBuffer.push(`return JSON.stringify({`);
primaryKey.columns.forEach((c) =>
primaryKey.type.attributes.forEach((c) =>
generationBuffer.push(

@@ -45,3 +45,3 @@ `${camelCase(c.name)}: value.${camelCase(c.name)},`,

const primaryKeyNames =
primaryKey.columns.map(
primaryKey.type.attributes.map(
(a) => `value.${camelCase(a.typescriptName)} !== undefined`,

@@ -48,0 +48,0 @@ ) || [];

@@ -64,7 +64,2 @@ import { GenerationContext } from "..";

handlers: {
[ASTKind.Database]: {
before: async () => {
return `// begin type definitions`;
},
},
[ASTKind.Schema]: NamespaceVisitor,

@@ -102,3 +97,3 @@ [ASTKind.Types]: NamespaceVisitor,

// exhaustive -- if there is no primary key, say so explicitly
node.primaryKey ? "" : `export type ByPrimaryKey = never;`,
node.primaryKey ? "" : `export type PrimaryKey = never;`,
// optional columns -- won't always need to pass these

@@ -116,3 +111,3 @@ // ex: database has a default

node.type.typescriptNamespacedName
}, Optional & ByPrimaryKey>`,
}, Optional & PrimaryKey>`,
await NamespaceVisitor.after(context, node),

@@ -123,19 +118,22 @@ ].join("\n");

[ASTKind.Index]: {
before: async (_, node) => `export type ${node.typescriptName} = {`,
after: async (_, node) =>
before: async (_, node) =>
[
`}`,
// alias primary key to the correct index
node.primaryKey
? `export type ByPrimaryKey = ${node.typescriptName};`
? `export type PrimaryKey = ${node.type.typescriptNamespacedName};`
: "",
].join("\n"),
},
[ASTKind.IndexColumn]: {
before: async (_, node) =>
`${node.typescriptPropertyName}: ${node.type.typescriptNamespacedName} ;`,
},
[ASTKind.Procedures]: NamespaceVisitor,
[ASTKind.Procedure]: NamespaceVisitor,
[ASTKind.CompositeType]: TypeDefiner,
[ASTKind.CompositeType]: {
// composite types are a name and AttributeNode(s) will fill the body
before: async (_, node) => {
return [
node.comment ? asDocComment(node.comment) : "",
`export type ${node.typescriptName} = {`,
].join("\n");
},
after: async () => `}`,
},
[ASTKind.DomainType]: TypeDefiner,

@@ -146,2 +144,16 @@ [ASTKind.ArrayType]: TypeDefiner,

[ASTKind.Script]: NamespaceVisitor,
[ASTKind.Attribute]: {
before: async (_, node) => {
// arrays are not nullable, they are empty arrays []
if (node.type.kind === ASTKind.ArrayType) {
return `${node.typescriptPropertyName}: ${node.type.typescriptNamespacedName};`;
}
// nullable is of course nullable
if (node.nullable) {
return `${node.typescriptPropertyName}: Nullable<${node.type.typescriptNamespacedName}>;`;
} else {
return `${node.typescriptPropertyName}: ${node.type.typescriptNamespacedName};`;
}
},
},
},

@@ -148,0 +160,0 @@ }),

@@ -34,7 +34,2 @@ import {

handlers: {
[ASTKind.Database]: {
before: async () => {
return `// begin string parsers`;
},
},
[ASTKind.Schema]: NamespaceVisitor,

@@ -44,2 +39,3 @@ [ASTKind.Types]: NamespaceVisitor,

[ASTKind.Procedure]: NamespaceVisitor,
[ASTKind.Results]: NamespaceVisitor,
[ASTKind.Tables]: NamespaceVisitor,

@@ -46,0 +42,0 @@ [ASTKind.Table]: NamespaceVisitor,

@@ -9,3 +9,3 @@ /**

*/
export const groupBy = <T, TT, K extends string | number>(
export const groupBy = <K extends string | number, T, TT = T>(
list: T[],

@@ -12,0 +12,0 @@ key: (i: T) => K,

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc