node-pg-migrate
Advanced tools
Comparing version 5.9.0 to 5.10.0
@@ -19,3 +19,3 @@ "use strict"; | ||
const SEPARATOR = '_'; | ||
exports.loadMigrationFiles = async (dir, ignorePattern) => { | ||
const loadMigrationFiles = async (dir, ignorePattern) => { | ||
const dirContent = await readdir(`${dir}/`, { withFileTypes: true }); | ||
@@ -29,2 +29,3 @@ const files = dirContent | ||
}; | ||
exports.loadMigrationFiles = loadMigrationFiles; | ||
const getSuffixFromFileName = (fileName) => path_1.default.extname(fileName).substr(1); | ||
@@ -40,3 +41,3 @@ const getLastSuffix = async (dir, ignorePattern) => { | ||
}; | ||
exports.getTimestamp = (logger, filename) => { | ||
const getTimestamp = (logger, filename) => { | ||
const prefix = filename.split(SEPARATOR)[0]; | ||
@@ -61,2 +62,3 @@ if (prefix && /^\d+$/.test(prefix)) { | ||
}; | ||
exports.getTimestamp = getTimestamp; | ||
const resolveSuffix = async (directory, { language, ignorePattern }) => language || (await getLastSuffix(directory, ignorePattern)) || 'js'; | ||
@@ -63,0 +65,0 @@ class Migration { |
"use strict"; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.createExtension = exports.dropExtension = void 0; | ||
const lodash_1 = __importDefault(require("lodash")); | ||
function dropExtension(mOptions) { | ||
const _drop = (_extensions, options = {}) => { | ||
const { ifExists, cascade } = options; | ||
const extensions = lodash_1.default.isArray(_extensions) ? _extensions : [_extensions]; | ||
const extensions = Array.isArray(_extensions) ? _extensions : [_extensions]; | ||
const ifExistsStr = ifExists ? ' IF EXISTS' : ''; | ||
const cascadeStr = cascade ? ' CASCADE' : ''; | ||
return lodash_1.default.map(extensions, (extension) => { | ||
return extensions.map((extension) => { | ||
const extensionStr = mOptions.literal(extension); | ||
@@ -25,6 +21,6 @@ return `DROP EXTENSION${ifExistsStr} ${extensionStr}${cascadeStr};`; | ||
const { ifNotExists, schema } = options; | ||
const extensions = lodash_1.default.isArray(_extensions) ? _extensions : [_extensions]; | ||
const extensions = Array.isArray(_extensions) ? _extensions : [_extensions]; | ||
const ifNotExistsStr = ifNotExists ? ' IF NOT EXISTS' : ''; | ||
const schemaStr = schema ? ` SCHEMA ${mOptions.literal(schema)}` : ''; | ||
return lodash_1.default.map(extensions, (extension) => { | ||
return extensions.map((extension) => { | ||
const extensionStr = mOptions.literal(extension); | ||
@@ -31,0 +27,0 @@ return `CREATE EXTENSION${ifNotExistsStr} ${extensionStr}${schemaStr};`; |
"use strict"; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.createIndex = exports.dropIndex = void 0; | ||
const lodash_1 = __importDefault(require("lodash")); | ||
function generateIndexName(table, columns, options, schemalize) { | ||
@@ -44,3 +40,3 @@ if (options.name) { | ||
const { concurrently, ifExists, cascade } = options; | ||
const columns = lodash_1.default.isArray(rawColumns) ? rawColumns.slice() : [rawColumns]; | ||
const columns = Array.isArray(rawColumns) ? rawColumns.slice() : [rawColumns]; | ||
const concurrentlyStr = concurrently ? ' CONCURRENTLY' : ''; | ||
@@ -58,3 +54,3 @@ const ifExistsStr = ifExists ? ' IF EXISTS' : ''; | ||
const _create = (tableName, rawColumns, options = {}) => { | ||
const columns = lodash_1.default.isArray(rawColumns) ? rawColumns.slice() : [rawColumns]; | ||
const columns = Array.isArray(rawColumns) ? rawColumns.slice() : [rawColumns]; | ||
if (options.opclass) { | ||
@@ -82,3 +78,3 @@ mOptions.logger.warn("Using opclass is deprecated. You should use it as part of column definition e.g. pgm.createIndex('table', [['column', 'opclass', 'ASC']])"); | ||
const include = options.include | ||
? ` INCLUDE (${(lodash_1.default.isArray(options.include) ? options.include : [options.include]) | ||
? ` INCLUDE (${(Array.isArray(options.include) ? options.include : [options.include]) | ||
.map(mOptions.literal) | ||
@@ -85,0 +81,0 @@ .join(', ')})` |
@@ -89,3 +89,3 @@ "use strict"; | ||
}; | ||
exports.removeFromOperatorFamily = (mOptions) => { | ||
const removeFromOperatorFamily = (mOptions) => { | ||
const method = (operatorFamilyName, indexMethod, operatorList) => { | ||
@@ -99,3 +99,4 @@ const operatorFamilyNameStr = mOptions.literal(operatorFamilyName); | ||
}; | ||
exports.addToOperatorFamily = (mOptions) => { | ||
exports.removeFromOperatorFamily = removeFromOperatorFamily; | ||
const addToOperatorFamily = (mOptions) => { | ||
const method = (operatorFamilyName, indexMethod, operatorList) => { | ||
@@ -110,2 +111,3 @@ const operatorFamilyNameStr = mOptions.literal(operatorFamilyName); | ||
}; | ||
exports.addToOperatorFamily = addToOperatorFamily; | ||
function renameOperatorFamily(mOptions) { | ||
@@ -112,0 +114,0 @@ const _rename = (oldOperatorFamilyName, indexMethod, newOperatorFamilyName) => { |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.renameRole = exports.alterRole = exports.createRole = exports.dropRole = void 0; | ||
const lodash_1 = require("lodash"); | ||
const utils_1 = require("../utils"); | ||
@@ -41,11 +40,11 @@ const formatRoleOptions = (roleOptions = {}) => { | ||
if (roleOptions.inRole) { | ||
const inRole = lodash_1.isArray(roleOptions.inRole) ? roleOptions.inRole.join(',') : roleOptions.inRole; | ||
const inRole = Array.isArray(roleOptions.inRole) ? roleOptions.inRole.join(',') : roleOptions.inRole; | ||
options.push(`IN ROLE ${inRole}`); | ||
} | ||
if (roleOptions.role) { | ||
const role = lodash_1.isArray(roleOptions.role) ? roleOptions.role.join(',') : roleOptions.role; | ||
const role = Array.isArray(roleOptions.role) ? roleOptions.role.join(',') : roleOptions.role; | ||
options.push(`ROLE ${role}`); | ||
} | ||
if (roleOptions.admin) { | ||
const admin = lodash_1.isArray(roleOptions.admin) ? roleOptions.admin.join(',') : roleOptions.admin; | ||
const admin = Array.isArray(roleOptions.admin) ? roleOptions.admin.join(',') : roleOptions.admin; | ||
options.push(`ADMIN ${admin}`); | ||
@@ -52,0 +51,0 @@ } |
@@ -5,3 +5,3 @@ "use strict"; | ||
const utils_1 = require("../utils"); | ||
exports.parseSequenceOptions = (typeShorthands, options) => { | ||
const parseSequenceOptions = (typeShorthands, options) => { | ||
const { type, increment, minvalue, maxvalue, start, cache, cycle, owner } = options; | ||
@@ -47,2 +47,3 @@ const clauses = []; | ||
}; | ||
exports.parseSequenceOptions = parseSequenceOptions; | ||
function dropSequence(mOptions) { | ||
@@ -49,0 +50,0 @@ const _drop = (sequenceName, options = {}) => { |
"use strict"; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.addConstraint = exports.dropConstraint = exports.renameConstraint = exports.renameColumn = exports.renameTable = exports.alterColumn = exports.addColumns = exports.dropColumns = exports.alterTable = exports.createTable = exports.dropTable = void 0; | ||
const lodash_1 = __importDefault(require("lodash")); | ||
const utils_1 = require("../utils"); | ||
@@ -30,18 +26,18 @@ const sequences_1 = require("./sequences"); | ||
const extendingTypeShorthands = mOptions.typeShorthands; | ||
let columnsWithOptions = lodash_1.default.mapValues(columns, (column) => utils_1.applyType(column, extendingTypeShorthands)); | ||
const primaryColumns = lodash_1.default.chain(columnsWithOptions) | ||
.map((options, columnName) => (options.primaryKey ? columnName : null)) | ||
.filter((columnName) => Boolean(columnName)) | ||
.value(); | ||
let columnsWithOptions = Object.keys(columns).reduce((previous, column) => (Object.assign(Object.assign({}, previous), { [column]: utils_1.applyType(columns[column], extendingTypeShorthands) })), {}); | ||
const primaryColumns = Object.entries(columnsWithOptions) | ||
.filter(([, { primaryKey }]) => Boolean(primaryKey)) | ||
.map(([columnName]) => columnName); | ||
const multiplePrimaryColumns = primaryColumns.length > 1; | ||
if (multiplePrimaryColumns) { | ||
columnsWithOptions = lodash_1.default.mapValues(columnsWithOptions, (options) => (Object.assign(Object.assign({}, options), { primaryKey: false }))); | ||
columnsWithOptions = Object.entries(columnsWithOptions).reduce((previous, [columnName, options]) => (Object.assign(Object.assign({}, previous), { [columnName]: Object.assign(Object.assign({}, options), { primaryKey: false }) })), {}); | ||
} | ||
const comments = lodash_1.default.chain(columnsWithOptions) | ||
.map((options, columnName) => typeof options.comment !== 'undefined' && | ||
utils_1.makeComment('COLUMN', `${mOptions.literal(tableName)}.${mOptions.literal(columnName)}`, options.comment)) | ||
.filter((comment) => Boolean(comment)) | ||
.value(); | ||
const comments = Object.entries(columnsWithOptions) | ||
.map(([columnName, { comment }]) => { | ||
return (typeof comment !== 'undefined' && | ||
utils_1.makeComment('COLUMN', `${mOptions.literal(tableName)}.${mOptions.literal(columnName)}`, comment)); | ||
}) | ||
.filter((comment) => Boolean(comment)); | ||
return { | ||
columns: lodash_1.default.map(columnsWithOptions, (options, columnName) => { | ||
columns: Object.entries(columnsWithOptions).map(([columnName, options]) => { | ||
const { type, collation, default: defaultValue, unique, primaryKey, notNull, check, references, referencesConstraintName, referencesConstraintComment, deferrable, expressionGenerated, } = options; | ||
@@ -100,3 +96,3 @@ const sequenceGenerated = options.sequenceGenerated === undefined ? options.generated : options.sequenceGenerated; | ||
if (check) { | ||
if (lodash_1.default.isArray(check)) { | ||
if (Array.isArray(check)) { | ||
check.forEach((ch, i) => { | ||
@@ -113,6 +109,6 @@ const name = literal(optionName || `${tableName}_chck_${i + 1}`); | ||
if (unique) { | ||
const uniqueArray = lodash_1.default.isArray(unique) ? unique : [unique]; | ||
const isArrayOfArrays = uniqueArray.some((uniqueSet) => lodash_1.default.isArray(uniqueSet)); | ||
const uniqueArray = Array.isArray(unique) ? unique : [unique]; | ||
const isArrayOfArrays = uniqueArray.some((uniqueSet) => Array.isArray(uniqueSet)); | ||
(isArrayOfArrays ? uniqueArray : [uniqueArray]).forEach((uniqueSet) => { | ||
const cols = lodash_1.default.isArray(uniqueSet) ? uniqueSet : [uniqueSet]; | ||
const cols = Array.isArray(uniqueSet) ? uniqueSet : [uniqueSet]; | ||
const name = literal(optionName || `${tableName}_uniq_${cols.join('_')}`); | ||
@@ -124,3 +120,3 @@ constraints.push(`CONSTRAINT ${name} UNIQUE (${cols.map(literal).join(', ')})`); | ||
const name = literal(optionName || `${tableName}_pkey`); | ||
const key = (lodash_1.default.isArray(primaryKey) ? primaryKey : [primaryKey]).map(literal).join(', '); | ||
const key = (Array.isArray(primaryKey) ? primaryKey : [primaryKey]).map(literal).join(', '); | ||
constraints.push(`CONSTRAINT ${name} PRIMARY KEY (${key})`); | ||
@@ -130,5 +126,5 @@ } | ||
; | ||
(lodash_1.default.isArray(foreignKeys) ? foreignKeys : [foreignKeys]).forEach((fk) => { | ||
(Array.isArray(foreignKeys) ? foreignKeys : [foreignKeys]).forEach((fk) => { | ||
const { columns, referencesConstraintName, referencesConstraintComment } = fk; | ||
const cols = lodash_1.default.isArray(columns) ? columns : [columns]; | ||
const cols = Array.isArray(columns) ? columns : [columns]; | ||
const name = literal(referencesConstraintName || optionName || `${tableName}_fk_${cols.join('_')}`); | ||
@@ -161,3 +157,3 @@ const key = cols.map(literal).join(', '); | ||
const parseLike = (like, literal) => { | ||
const formatOptions = (name, options) => (lodash_1.default.isArray(options) ? options : [options]) | ||
const formatOptions = (name, options) => (Array.isArray(options) ? options : [options]) | ||
.filter((option) => option !== undefined) | ||
@@ -186,4 +182,4 @@ .map((option) => ` ${name} ${option}`) | ||
const { temporary, ifNotExists, inherits, like, constraints: optionsConstraints = {}, comment } = options; | ||
const { columns: columnLines, constraints: crossColumnConstraints, comments: columnComments = [] } = parseColumns(tableName, columns, mOptions); | ||
const dupes = lodash_1.default.intersection(Object.keys(optionsConstraints), Object.keys(crossColumnConstraints)); | ||
const { columns: columnLines, constraints: crossColumnConstraints, comments: columnComments = [], } = parseColumns(tableName, columns, mOptions); | ||
const dupes = utils_1.intersection(Object.keys(optionsConstraints), Object.keys(crossColumnConstraints)); | ||
if (dupes.length > 0) { | ||
@@ -231,4 +227,4 @@ const dupesStr = dupes.join(', '); | ||
} | ||
else if (!lodash_1.default.isArray(columns) && typeof columns === 'object') { | ||
columns = lodash_1.default.keys(columns); | ||
else if (!Array.isArray(columns) && typeof columns === 'object') { | ||
columns = Object.keys(columns); | ||
} | ||
@@ -235,0 +231,0 @@ const columnsStr = utils_1.formatLines(columns.map(mOptions.literal), ` DROP ${ifExists ? ' IF EXISTS' : ''}`, `${cascade ? ' CASCADE' : ''},`); |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.renameTrigger = exports.createTrigger = exports.dropTrigger = void 0; | ||
const lodash_1 = require("lodash"); | ||
const utils_1 = require("../utils"); | ||
@@ -23,3 +22,3 @@ const functions_1 = require("./functions"); | ||
let { when, level = 'STATEMENT', function: functionName } = triggerOptions; | ||
const operations = lodash_1.isArray(operation) ? operation.join(' OR ') : operation; | ||
const operations = Array.isArray(operation) ? operation.join(' OR ') : operation; | ||
if (constraint) { | ||
@@ -26,0 +25,0 @@ when = 'AFTER'; |
"use strict"; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.renameTypeValue = exports.renameTypeAttribute = exports.renameType = exports.addTypeValue = exports.setTypeAttribute = exports.addTypeAttribute = exports.dropTypeAttribute = exports.createType = exports.dropType = void 0; | ||
const lodash_1 = __importDefault(require("lodash")); | ||
const utils_1 = require("../utils"); | ||
@@ -22,3 +18,3 @@ function dropType(mOptions) { | ||
const _create = (typeName, options) => { | ||
if (lodash_1.default.isArray(options)) { | ||
if (Array.isArray(options)) { | ||
const optionsStr = options.map(utils_1.escapeValue).join(', '); | ||
@@ -28,6 +24,8 @@ const typeNameStr = mOptions.literal(typeName); | ||
} | ||
const attributes = lodash_1.default.map(options, (attribute, attributeName) => { | ||
const attributes = Object.entries(options) | ||
.map(([attributeName, attribute]) => { | ||
const typeStr = utils_1.applyType(attribute, mOptions.typeShorthands).type; | ||
return `${mOptions.literal(attributeName)} ${typeStr}`; | ||
}).join(',\n'); | ||
}) | ||
.join(',\n'); | ||
return `CREATE TYPE ${mOptions.literal(typeName)} AS (\n${attributes}\n);`; | ||
@@ -34,0 +32,0 @@ }; |
@@ -10,3 +10,3 @@ "use strict"; | ||
const createMigrationCommentRegex = (direction) => new RegExp(`^\\s*--[\\s-]*${direction}\\s+migration`, 'im'); | ||
exports.getActions = (content) => { | ||
const getActions = (content) => { | ||
const upMigrationCommentRegex = createMigrationCommentRegex('up'); | ||
@@ -27,2 +27,3 @@ const downMigrationCommentRegex = createMigrationCommentRegex('down'); | ||
}; | ||
exports.getActions = getActions; | ||
exports.default = async (sqlPath) => { | ||
@@ -29,0 +30,0 @@ const content = await readFile(sqlPath, 'utf-8'); |
@@ -1,4 +0,2 @@ | ||
/// <reference types="node" /> | ||
import { ClientBase, QueryArrayResult, QueryResult, QueryArrayConfig, QueryConfig } from 'pg'; | ||
import { TlsOptions } from 'tls'; | ||
import { ClientBase, ClientConfig, QueryArrayResult, QueryResult, QueryArrayConfig, QueryConfig } from 'pg'; | ||
import { Name } from './operations/generalTypes'; | ||
@@ -21,2 +19,3 @@ import * as domains from './operations/domainsTypes'; | ||
import PgLiteral from './operations/PgLiteral'; | ||
export { ClientConfig, ConnectionConfig } from 'pg'; | ||
export interface DB { | ||
@@ -214,13 +213,2 @@ query(queryConfig: QueryArrayConfig, values?: any[]): Promise<QueryArrayResult>; | ||
} | ||
export interface ConnectionConfig { | ||
user?: string; | ||
database?: string; | ||
password?: string; | ||
port?: number; | ||
host?: string; | ||
connectionString?: string; | ||
} | ||
export interface ClientConfig extends ConnectionConfig { | ||
ssl?: boolean | TlsOptions; | ||
} | ||
export interface RunnerOptionUrl { | ||
@@ -227,0 +215,0 @@ databaseUrl: string | ClientConfig; |
@@ -28,1 +28,2 @@ import { ColumnDefinitions, ColumnDefinition } from './operations/tablesTypes'; | ||
export declare const formatLines: (lines: string[], replace?: string, separator?: string) => string; | ||
export declare function intersection<T>(list1: T[], list2: T[]): T[]; |
@@ -17,7 +17,7 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.formatLines = exports.makeComment = exports.formatParams = exports.applyType = exports.applyTypeAdapters = exports.getMigrationTableSchema = exports.getSchemas = exports.createTransformer = exports.escapeValue = exports.StringIdGenerator = exports.createSchemalize = void 0; | ||
exports.intersection = exports.formatLines = exports.makeComment = exports.formatParams = exports.applyType = exports.applyTypeAdapters = exports.getMigrationTableSchema = exports.getSchemas = exports.createTransformer = exports.escapeValue = exports.StringIdGenerator = exports.createSchemalize = void 0; | ||
const decamelize_1 = __importDefault(require("decamelize")); | ||
const identity = (v) => v; | ||
const quote = (str) => `"${str}"`; | ||
exports.createSchemalize = (shouldDecamelize, shouldQuote) => { | ||
const createSchemalize = (shouldDecamelize, shouldQuote) => { | ||
const transform = [shouldDecamelize ? decamelize_1.default : identity, shouldQuote ? quote : identity].reduce((acc, fn) => fn === identity ? acc : (x) => acc(fn(x))); | ||
@@ -32,2 +32,3 @@ return (v) => { | ||
}; | ||
exports.createSchemalize = createSchemalize; | ||
class StringIdGenerator { | ||
@@ -56,3 +57,3 @@ constructor(chars = 'abcdefghijklmnopqrstuvwxyz') { | ||
const isPgLiteral = (val) => typeof val === 'object' && val !== null && 'literal' in val && val.literal === true; | ||
exports.escapeValue = (val) => { | ||
const escapeValue = (val) => { | ||
if (val === null) { | ||
@@ -86,3 +87,4 @@ return 'NULL'; | ||
}; | ||
exports.createTransformer = (literal) => (s, d) => Object.keys(d || {}).reduce((str, p) => { | ||
exports.escapeValue = escapeValue; | ||
const createTransformer = (literal) => (s, d) => Object.keys(d || {}).reduce((str, p) => { | ||
const v = d === null || d === void 0 ? void 0 : d[p]; | ||
@@ -95,7 +97,10 @@ return str.replace(new RegExp(`{${p}}`, 'g'), v === undefined | ||
}, s); | ||
exports.getSchemas = (schema) => { | ||
exports.createTransformer = createTransformer; | ||
const getSchemas = (schema) => { | ||
const schemas = (Array.isArray(schema) ? schema : [schema]).filter((s) => typeof s === 'string' && s.length > 0); | ||
return schemas.length > 0 ? schemas : ['public']; | ||
}; | ||
exports.getMigrationTableSchema = (options) => options.migrationsSchema !== undefined ? options.migrationsSchema : exports.getSchemas(options.schema)[0]; | ||
exports.getSchemas = getSchemas; | ||
const getMigrationTableSchema = (options) => options.migrationsSchema !== undefined ? options.migrationsSchema : exports.getSchemas(options.schema)[0]; | ||
exports.getMigrationTableSchema = getMigrationTableSchema; | ||
const typeAdapters = { | ||
@@ -112,3 +117,4 @@ int: 'integer', | ||
}; | ||
exports.applyTypeAdapters = (type) => type in typeAdapters ? typeAdapters[type] : type; | ||
const applyTypeAdapters = (type) => type in typeAdapters ? typeAdapters[type] : type; | ||
exports.applyTypeAdapters = applyTypeAdapters; | ||
const toType = (type) => (typeof type === 'string' ? { type } : type); | ||
@@ -119,3 +125,3 @@ const removeType = (_a) => { | ||
}; | ||
exports.applyType = (type, extendingTypeShorthands = {}) => { | ||
const applyType = (type, extendingTypeShorthands = {}) => { | ||
var _a; | ||
@@ -137,2 +143,3 @@ const typeShorthands = Object.assign(Object.assign({}, defaultTypeShorthands), extendingTypeShorthands); | ||
}; | ||
exports.applyType = applyType; | ||
const formatParam = (mOptions) => (param) => { | ||
@@ -155,10 +162,17 @@ const { mode, name, type, default: defaultValue } = exports.applyType(param, mOptions.typeShorthands); | ||
}; | ||
exports.formatParams = (params = [], mOptions) => `(${params.map(formatParam(mOptions)).join(', ')})`; | ||
exports.makeComment = (object, name, text) => { | ||
const formatParams = (params = [], mOptions) => `(${params.map(formatParam(mOptions)).join(', ')})`; | ||
exports.formatParams = formatParams; | ||
const makeComment = (object, name, text) => { | ||
const cmt = exports.escapeValue(text || null); | ||
return `COMMENT ON ${object} ${name} IS ${cmt};`; | ||
}; | ||
exports.formatLines = (lines, replace = ' ', separator = ',') => lines | ||
exports.makeComment = makeComment; | ||
const formatLines = (lines, replace = ' ', separator = ',') => lines | ||
.map((line) => line.replace(/(?:\r\n|\r|\n)+/g, ' ')) | ||
.join(`${separator}\n`) | ||
.replace(/^/gm, replace); | ||
exports.formatLines = formatLines; | ||
function intersection(list1, list2) { | ||
return list1.filter((element) => list2.includes(element)); | ||
} | ||
exports.intersection = intersection; |
@@ -8,3 +8,5 @@ # CLI Usage | ||
{ | ||
"url": "postgres://postgres:password@localhost:5432/database" | ||
"db": { | ||
"url": "postgres://postgres:password@localhost:5432/database" | ||
} | ||
} | ||
@@ -18,7 +20,9 @@ ``` | ||
{ | ||
"user": "postgres", | ||
"password": "", | ||
"host": "localhost", | ||
"port": 5432, | ||
"database": "database" | ||
"db": { | ||
"user": "postgres", | ||
"password": "", | ||
"host": "localhost", | ||
"port": 5432, | ||
"database": "database" | ||
} | ||
} | ||
@@ -46,3 +50,3 @@ ``` | ||
If a .env file exists, it will be loaded using [dotenv](https://www.npmjs.com/package/dotenv) (if installed) when running the node-pg-migrate binary. | ||
If a .env file exists, it will be loaded using [dotenv](https://www.npmjs.com/package/dotenv) (if installed) when running the node-pg-migrate binary. If the .env file is not on the same level where the command has been called, you can use the `--envPath` option to point to the location of your .env file. | ||
@@ -66,2 +70,3 @@ Depending on your project's setup, it may make sense to write some custom grunt/gulp/whatever tasks that set this env var and run your migration commands. More on that below. | ||
- `config-file` (`f`) - The file with migration JSON config (defaults to undefined) | ||
- `config-value` - Name of config section with db options (default to `db`) | ||
- `schema` (`s`) - The schema(s) on which migration will be run (defaults to `public`, used to set `search_path`) | ||
@@ -79,2 +84,3 @@ - `create-schema` - Create the configured schema if it doesn't exist (defaults to `false`) | ||
- `tsconfig` - Path to tsconfig.json. Used to setup transpiling of TS migration files. (Also sets `migration-file-language` to typescript, if not overridden) | ||
- `envPath` - Path to a .env file. The default finds the file on the same level where the command has been called. It might be useful if you have nested projects, but a global .env file that you need to point to. | ||
- `timestamp` - Treats number argument to up/down migration as timestamp (running up migrations less or equal to timestamp or down migrations greater or equal to timestamp) | ||
@@ -81,0 +87,0 @@ - `check-order` - Check order of migrations before running them (defaults to `true`, to switch it off supply `--no-check-order` on the command line). |
@@ -126,3 +126,3 @@ # node-pg-migrate | ||
Copyright (c) 2016-2020 Salsita Software <jando@salsitasoft.com> | ||
Copyright (c) 2016-2021 Salsita Software <jando@salsitasoft.com> | ||
@@ -129,0 +129,0 @@ Copyright (c) 2014-2016 Theo Ephraim |
{ | ||
"name": "node-pg-migrate", | ||
"version": "5.9.0", | ||
"version": "5.10.0", | ||
"description": "Postgresql database migration management tool for node.js", | ||
@@ -48,45 +48,43 @@ "author": "Theo Ephraim", | ||
"dependencies": { | ||
"@types/pg": "^7.4.0", | ||
"decamelize": "^4.0.0", | ||
"lodash": "~4.17.0", | ||
"@types/pg": "^8.0.0", | ||
"decamelize": "^5.0.0", | ||
"mkdirp": "~1.0.0", | ||
"yargs": "~16.1.0" | ||
"yargs": "~16.2.0" | ||
}, | ||
"devDependencies": { | ||
"@types/chai": "4.2.14", | ||
"@types/chai-as-promised": "7.1.3", | ||
"@types/lodash": "4.14.164", | ||
"@types/chai": "4.2.18", | ||
"@types/chai-as-promised": "7.1.4", | ||
"@types/mkdirp": "1.0.1", | ||
"@types/mocha": "8.0.3", | ||
"@types/mocha": "8.2.2", | ||
"@types/proxyquire": "1.3.28", | ||
"@types/sinon": "9.0.8", | ||
"@types/sinon": "10.0.2", | ||
"@types/sinon-chai": "3.2.5", | ||
"@typescript-eslint/eslint-plugin": "4.6.0", | ||
"@typescript-eslint/parser": "4.6.0", | ||
"chai": "4.2.0", | ||
"@typescript-eslint/eslint-plugin": "4.26.1", | ||
"@typescript-eslint/parser": "4.26.1", | ||
"chai": "4.3.4", | ||
"chai-as-promised": "7.1.1", | ||
"config": "3.3.2", | ||
"cross-env": "7.0.2", | ||
"docsify-cli": "4.4.1", | ||
"dotenv": "8.2.0", | ||
"config": "3.3.6", | ||
"cross-env": "7.0.3", | ||
"docsify-cli": "4.4.3", | ||
"dotenv": "10.0.0", | ||
"dotenv-expand": "5.1.0", | ||
"eslint": "7.12.1", | ||
"eslint-config-airbnb-base": "14.2.0", | ||
"eslint-config-prettier": "6.15.0", | ||
"eslint-import-resolver-typescript": "2.3.0", | ||
"eslint-plugin-import": "2.22.1", | ||
"eslint-plugin-prettier": "3.1.4", | ||
"eslint": "7.28.0", | ||
"eslint-config-airbnb-base": "14.2.1", | ||
"eslint-config-prettier": "7.2.0", | ||
"eslint-import-resolver-typescript": "2.4.0", | ||
"eslint-plugin-import": "2.23.4", | ||
"eslint-plugin-prettier": "3.4.0", | ||
"eslint-plugin-security": "1.4.0", | ||
"husky": "4.3.0", | ||
"json5": "2.1.3", | ||
"lint-staged": "10.5.1", | ||
"mocha": "8.2.0", | ||
"pg": "8.4.2", | ||
"prettier": "2.1.2", | ||
"husky": "6.0.0", | ||
"json5": "2.2.0", | ||
"lint-staged": "10.5.4", | ||
"mocha": "8.4.0", | ||
"pg": "8.6.0", | ||
"prettier": "2.3.1", | ||
"proxyquire": "2.1.3", | ||
"rimraf": "3.0.2", | ||
"sinon": "9.2.1", | ||
"sinon-chai": "3.5.0", | ||
"ts-node": "9.0.0", | ||
"typescript": "4.0.5" | ||
"sinon": "11.1.1", | ||
"sinon-chai": "3.7.0", | ||
"ts-node": "9.1.1", | ||
"typescript": "4.3.2" | ||
}, | ||
@@ -101,2 +99,3 @@ "peerDependencies": { | ||
"migrate": "node bin/node-pg-migrate", | ||
"format": "prettier --write .", | ||
"lint": "eslint . bin/* --ext .js,.ts", | ||
@@ -107,7 +106,2 @@ "lintfix": "npm run lint -- --fix && prettier --write *.json *.md docs/*.md", | ||
}, | ||
"husky": { | ||
"hooks": { | ||
"pre-commit": "lint-staged" | ||
} | ||
}, | ||
"lint-staged": { | ||
@@ -114,0 +108,0 @@ "*.js": [ |
@@ -103,3 +103,3 @@ # node-pg-migrate | ||
Copyright (c) 2016-2020 Salsita Software <jando@salsitasoft.com> | ||
Copyright (c) 2016-2021 Salsita Software <jando@salsitasoft.com> | ||
@@ -106,0 +106,0 @@ Copyright (c) 2014-2016 Theo Ephraim |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
New author
Supply chain riskA new npm collaborator published a version of the package for the first time. New collaborators are usually benign additions to a project, but do indicate a change to the security surface area of a package.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
290120
5
35
3504
1
+ Added@types/pg@8.11.10(transitive)
+ Addeddecamelize@5.0.1(transitive)
+ Addedobuf@1.1.2(transitive)
+ Addedpg-numeric@1.0.2(transitive)
+ Addedpg-types@4.0.2(transitive)
+ Addedpostgres-array@3.0.2(transitive)
+ Addedpostgres-bytea@3.0.0(transitive)
+ Addedpostgres-date@2.1.0(transitive)
+ Addedpostgres-interval@3.0.0(transitive)
+ Addedpostgres-range@1.1.4(transitive)
+ Addedyargs@16.2.0(transitive)
- Removedlodash@~4.17.0
- Removed@types/pg@7.14.11(transitive)
- Removeddecamelize@4.0.0(transitive)
- Removedlodash@4.17.21(transitive)
- Removedyargs@16.1.1(transitive)
Updated@types/pg@^8.0.0
Updateddecamelize@^5.0.0
Updatedyargs@~16.2.0