Socket
Socket
Sign inDemoInstall

graphile-build-pg

Package Overview
Dependencies
Maintainers
1
Versions
208
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

graphile-build-pg - npm Package Compare versions

Comparing version 5.0.0-beta.10 to 5.0.0-beta.11

17

dist/plugins/PgBasicsPlugin.d.ts

@@ -1,2 +0,1 @@

import "graphile-build";
import "./PgTablesPlugin.js";

@@ -7,2 +6,3 @@ import "../interfaces.js";

import type { GraphQLType } from "grafast/graphql";
import type { SQL } from "pg-sql2";
import sql from "pg-sql2";

@@ -75,5 +75,20 @@ import { getBehavior } from "../behavior.js";

}
interface GatherOptions {
/** Set to 'unqualified' to omit the schema name from table, function, and type identifiers */
pgIdentifiers?: "qualified" | "unqualified";
}
}
namespace GraphileConfig {
interface GatherHelpers {
pgBasics: {
/**
* Create an SQL identifier from the given parts; skipping the very
* first part (schema) if pgIdentifiers is set to 'unqualified'
*/
identifier(...parts: string[]): SQL;
};
}
}
}
export declare const PgBasicsPlugin: GraphileConfig.Plugin;
//# sourceMappingURL=PgBasicsPlugin.d.ts.map

@@ -5,3 +5,2 @@ "use strict";

const tslib_1 = require("tslib");
require("graphile-build");
require("./PgTablesPlugin.js");

@@ -11,2 +10,3 @@ require("../interfaces.js");

const dataplanPg = tslib_1.__importStar(require("@dataplan/pg"));
const graphile_build_1 = require("graphile-build");
const pg_sql2_1 = tslib_1.__importDefault(require("pg-sql2"));

@@ -20,2 +20,23 @@ const behavior_js_1 = require("../behavior.js");

version: version_js_1.version,
gather: (0, graphile_build_1.gatherConfig)({
namespace: "pgBasics",
helpers: {
identifier(info, ...parts) {
switch (info.options.pgIdentifiers) {
case "unqualified": {
// strip the schema
const [, ...partsWithoutSchema] = parts;
return (0, graphile_build_1.EXPORTABLE)((partsWithoutSchema, sql) => sql.identifier(...partsWithoutSchema), [partsWithoutSchema, pg_sql2_1.default]);
}
case "qualified":
case undefined: {
return (0, graphile_build_1.EXPORTABLE)((parts, sql) => sql.identifier(...parts), [parts, pg_sql2_1.default]);
}
default: {
throw new Error(`Setting preset.gather.pgIdentifiers had unsupported value '${info.options.pgIdentifiers}'; please use a supported value: 'qualified' or 'unqualified'.`);
}
}
},
},
}),
schema: {

@@ -22,0 +43,0 @@ globalBehavior: "connection -list",

40

dist/plugins/PgCodecsPlugin.js
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.PgCodecsPlugin = void 0;
const tslib_1 = require("tslib");
const pg_1 = require("@dataplan/pg");
const graphile_build_1 = require("graphile-build");
const pg_sql2_1 = tslib_1.__importDefault(require("pg-sql2"));
const utils_js_1 = require("../utils.js");

@@ -218,5 +216,6 @@ const version_js_1 = require("../version.js");

const executor = info.helpers.pgIntrospection.getExecutorForService(serviceName);
const spec = (0, graphile_build_1.EXPORTABLE)((attributes, className, codecName, description, executor, extensions, nspName, sql) => ({
const sqlIdent = info.helpers.pgBasics.identifier(nspName, className);
const spec = (0, graphile_build_1.EXPORTABLE)((attributes, codecName, description, executor, extensions, sqlIdent) => ({
name: codecName,
identifier: sql.identifier(nspName, className),
identifier: sqlIdent,
attributes,

@@ -228,3 +227,2 @@ description,

attributes,
className,
codecName,

@@ -234,4 +232,3 @@ description,

extensions,
nspName,
pg_sql2_1.default,
sqlIdent,
]);

@@ -324,16 +321,9 @@ await info.process("pgCodecs_recordType_spec", {

});
return (0, graphile_build_1.EXPORTABLE)((codecName, enumCodec, enumLabels, extensions, namespaceName, sql, typeName) => enumCodec({
const sqlIdent = info.helpers.pgBasics.identifier(namespaceName, typeName);
return (0, graphile_build_1.EXPORTABLE)((codecName, enumCodec, enumLabels, extensions, sqlIdent) => enumCodec({
name: codecName,
identifier: sql.identifier(namespaceName, typeName),
identifier: sqlIdent,
values: enumLabels,
extensions,
}), [
codecName,
pg_1.enumCodec,
enumLabels,
extensions,
namespaceName,
pg_sql2_1.default,
typeName,
]);
}), [codecName, pg_1.enumCodec, enumLabels, extensions, sqlIdent]);
}

@@ -373,3 +363,4 @@ // Range type

});
return (0, graphile_build_1.EXPORTABLE)((codecName, description, extensions, innerCodec, namespaceName, rangeOfCodec, sql, typeName) => rangeOfCodec(innerCodec, codecName, sql.identifier(namespaceName, typeName), {
const sqlIdent = info.helpers.pgBasics.identifier(namespaceName, typeName);
return (0, graphile_build_1.EXPORTABLE)((codecName, description, extensions, innerCodec, rangeOfCodec, sqlIdent) => rangeOfCodec(innerCodec, codecName, sqlIdent, {
description,

@@ -382,6 +373,4 @@ extensions,

innerCodec,
namespaceName,
pg_1.rangeOfCodec,
pg_sql2_1.default,
typeName,
sqlIdent,
]);

@@ -418,3 +407,4 @@ }

});
return (0, graphile_build_1.EXPORTABLE)((codecName, description, domainOfCodec, extensions, innerCodec, namespaceName, notNull, sql, typeName) => domainOfCodec(innerCodec, codecName, sql.identifier(namespaceName, typeName), {
const sqlIdent = info.helpers.pgBasics.identifier(namespaceName, typeName);
return (0, graphile_build_1.EXPORTABLE)((codecName, description, domainOfCodec, extensions, innerCodec, notNull, sqlIdent) => domainOfCodec(innerCodec, codecName, sqlIdent, {
description,

@@ -429,6 +419,4 @@ extensions,

innerCodec,
namespaceName,
notNull,
pg_sql2_1.default,
typeName,
sqlIdent,
]);

@@ -435,0 +423,0 @@ }

@@ -0,1 +1,2 @@

import type { PgEnumCodec } from "@dataplan/pg";
import type { Introspection, PgAttribute, PgClass, PgConstraint } from "pg-introspection";

@@ -6,2 +7,3 @@ declare global {

pgEnumTables: {
isEnumConstraint(pgConstraint: PgConstraint): boolean;
getIntrospectionData(serviceName: string, pgClass: PgClass, attributes: PgAttribute[]): Promise<readonly Record<string, string>[]>;

@@ -12,2 +14,3 @@ processIntrospection(event: {

}): Promise<void>;
enumCodecForConstraint(pgConstraint: PgConstraint): PgEnumCodec<string, string> | undefined;
};

@@ -14,0 +17,0 @@ }

@@ -63,5 +63,32 @@ "use strict";

helpers: {
isEnumConstraint(info, pgConstraint) {
const pgClass = pgConstraint.getClass();
if (!pgClass)
return false;
const { tags, description: _description } = pgClass.getTagsAndDescription();
const isEnumTable = tags.enum === true || typeof tags.enum === "string";
if (pgConstraint.conrelid === pgClass._id) {
const isPrimaryKey = pgConstraint.contype === "p";
const isUniqueConstraint = pgConstraint.contype === "u";
if (isPrimaryKey || isUniqueConstraint) {
const conTags = pgConstraint.getTags();
const isExplicitEnumConstraint = conTags.enum === true || typeof conTags.enum === "string";
const isPrimaryKeyOfEnumTableConstraint = pgConstraint.contype === "p" && isEnumTable;
if (isExplicitEnumConstraint || isPrimaryKeyOfEnumTableConstraint) {
const hasExactlyOneAttribute = pgConstraint.conkey.length === 1;
if (!hasExactlyOneAttribute) {
throw new Error(`Enum table "${pgClass.getNamespace().nspname}"."${pgClass.relname}" enum constraint '${pgConstraint.conname}' is composite; it should have exactly one attribute (found: ${pgConstraint.conkey.length})`);
}
return true;
}
}
}
return false;
},
async getIntrospectionData(info, serviceName, pgClass, attributes) {
// Load data from the table/view.
const query = pg_sql2_1.sql.compile(pg_sql2_1.sql.fragment `select ${pg_sql2_1.sql.join(attributes.map((col) => pg_sql2_1.sql.identifier(col.attname)), ", ")} from ${pg_sql2_1.sql.identifier(pgClass.getNamespace().nspname, pgClass.relname)};`);
const query = pg_sql2_1.sql.compile(pg_sql2_1.sql.fragment `select ${pg_sql2_1.sql.join(attributes.map((col) => pg_sql2_1.sql.identifier(col.attname)), ", ")} from ${
// NOTE: Even in the case of unqualified pgIdentifiers, we still want
// to read _this_ enums values from _this_ schema.
pg_sql2_1.sql.identifier(pgClass.getNamespace().nspname, pgClass.relname)};`);
const pgService = info.resolvedPreset.pgServices.find((pgService) => pgService.name === serviceName);

@@ -110,3 +137,8 @@ try {

// (e.g. `@primaryKey`)
const enumConstraints = introspection.constraints.filter((pgConstraint) => isEnumConstraint(pgClass, pgConstraint, isEnumTable));
const enumConstraints = pgClass
.getConstraints()
.filter((pgConstraint) => info.helpers.pgEnumTables.isEnumConstraint(pgConstraint));
if (enumConstraints.length === 0) {
continue;
}
// Get all the attributes

@@ -152,2 +184,3 @@ const enumTableAttributes = pgClass.getAttributes();

// ENHANCE: more extensions/tags?
isEnumTableEnum: true,
tags: {

@@ -194,2 +227,5 @@ name: info.inflection.enumTableEnum({

},
enumCodecForConstraint(info, constraint) {
return info.state.codecByPgConstraint.get(constraint);
},
},

@@ -211,21 +247,2 @@ hooks: {

};
function isEnumConstraint(pgClass, pgConstraint, isEnumTable) {
if (pgConstraint.conrelid === pgClass._id) {
const isPrimaryKey = pgConstraint.contype === "p";
const isUniqueConstraint = pgConstraint.contype === "u";
if (isPrimaryKey || isUniqueConstraint) {
const conTags = pgConstraint.getTags();
const isExplicitEnumConstraint = conTags.enum === true || typeof conTags.enum === "string";
const isPrimaryKeyOfEnumTableConstraint = pgConstraint.contype === "p" && isEnumTable;
if (isExplicitEnumConstraint || isPrimaryKeyOfEnumTableConstraint) {
const hasExactlyOneAttribute = pgConstraint.conkey.length === 1;
if (!hasExactlyOneAttribute) {
throw new Error(`Enum table "${pgClass.getNamespace().nspname}"."${pgClass.relname}" enum constraint '${pgConstraint.conname}' is composite; it should have exactly one attribute (found: ${pgConstraint.conkey.length})`);
}
return true;
}
}
}
return false;
}
//# sourceMappingURL=PgEnumTablesPlugin.js.map

@@ -242,3 +242,3 @@ "use strict";

.map((k) => k.attname)
.join(",")}' smart tag to emulate this. (Original spec: ${JSON.stringify(rawSpec)}).\nTo temporarily fix this you can set 'preset.gather.pgFaceConstraintsAutofixForeignKeyUniqueness' to 'true', but we strongly recommend against using this long term.'`);
.join(",")}' smart tag to emulate this. (Original spec: ${JSON.stringify(rawSpec)}).\nTo temporarily fix this you can set 'preset.gather.pgFakeConstraintsAutofixForeignKeyUniqueness' to 'true', but we strongly recommend against using this long term.'`);
}

@@ -245,0 +245,0 @@ }

@@ -229,3 +229,4 @@ "use strict";

const procName = pgProc.proname;
const fromCallback = (0, graphile_build_1.EXPORTABLE)((namespaceName, procName, sql, sqlFromArgDigests) => (...args) => sql `${sql.identifier(namespaceName, procName)}(${sqlFromArgDigests(args)})`, [namespaceName, procName, pg_sql2_1.default, pg_1.sqlFromArgDigests]);
const sqlIdent = info.helpers.pgBasics.identifier(namespaceName, procName);
const fromCallback = (0, graphile_build_1.EXPORTABLE)((sql, sqlFromArgDigests, sqlIdent) => (...args) => sql `${sqlIdent}(${sqlFromArgDigests(args)})`, [pg_sql2_1.default, pg_1.sqlFromArgDigests, sqlIdent]);
(0, utils_js_1.addBehaviorToTags)(tags, "-filter -order", true);

@@ -232,0 +233,0 @@ const extensions = {

@@ -139,3 +139,6 @@ "use strict";

for (const rawPart of parts) {
const relationEntries = Object.entries(registryConfig.pgRelations[currentResourceOptions.codec.name]);
const relations = registryConfig.pgRelations[currentResourceOptions.codec.name];
const relationEntries = relations
? Object.entries(relations)
: [];
const part = rawPart.trim();

@@ -190,3 +193,5 @@ // ENHANCE: allow whitespace

else {
console.warn(`Could not find matching relation for '${via}' / ${currentResourceOptions.name} -> '${rawPart}'`);
console.warn(`When processing ref for resource '${resourceOptions.name}', could not find matching relation for via:'${via}' ${rawPart === via
? ""
: ` (from: '${currentResourceOptions.name}', path: '${rawPart}')`}`);
continue outerLoop;

@@ -193,0 +198,0 @@ }

@@ -1,2 +0,2 @@

export declare const version = "5.0.0-beta.10";
export declare const version = "5.0.0-beta.11";
//# sourceMappingURL=version.d.ts.map

@@ -5,3 +5,3 @@ "use strict";

// This file is autogenerated by /scripts/postversion.mjs
exports.version = "5.0.0-beta.10";
exports.version = "5.0.0-beta.11";
//# sourceMappingURL=version.js.map
{
"name": "graphile-build-pg",
"version": "5.0.0-beta.10",
"version": "5.0.0-beta.11",
"description": "PostgreSQL plugins for Graphile Build - build a supercharged GraphQL schema by reflection over a PostgreSQL database and executed by Grafast.",

@@ -55,5 +55,5 @@ "type": "commonjs",

"peerDependencies": {
"@dataplan/pg": "^0.0.1-beta.8",
"grafast": "^0.0.1-beta.8",
"graphile-build": "5.0.0-beta.9",
"@dataplan/pg": "^0.0.1-beta.9",
"grafast": "^0.1.1-beta.0",
"graphile-build": "5.0.0-beta.10",
"graphile-config": "^0.0.1-beta.3",

@@ -91,3 +91,3 @@ "graphql": "^16.1.0-experimental-stream-defer.6",

"fastify-static": "^4.7.0",
"graphile-export": "^0.0.2-beta.4",
"graphile-export": "^0.0.2-beta.5",
"graphql": "16.1.0-experimental-stream-defer.6",

@@ -94,0 +94,0 @@ "graphql-helix": "^1.13.0",

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc