New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@ovotech/pg-sql-migrate

Package Overview
Dependencies
Maintainers
87
Versions
17
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@ovotech/pg-sql-migrate - npm Package Compare versions

Comparing version 1.0.5 to 2.0.0

CODEOWNERS

4

dist/index.d.ts
export { MigrationError } from './MigrationError';
export { MigrationsReadable } from './MigrationsReadable';
export { MigrationsWritable } from './MigrationsWritable';
export { MigrationsCollectTransform } from './MigrationsCollectTransform';
export { MigrationsLogTransform } from './MigrationsLogTransform';
export { migrate, executeMigrations, loadConfig } from './migrate';
export { Migration, Config, DEFAULT_CONFIG, CONFIG_DEFAULTS } from './types';
export { Migration, Config, DEFAULT_CONFIG_FILE, CONFIG_DEFAULTS } from './types';

@@ -9,2 +9,6 @@ "use strict";

exports.MigrationsWritable = MigrationsWritable_1.MigrationsWritable;
var MigrationsCollectTransform_1 = require("./MigrationsCollectTransform");
exports.MigrationsCollectTransform = MigrationsCollectTransform_1.MigrationsCollectTransform;
var MigrationsLogTransform_1 = require("./MigrationsLogTransform");
exports.MigrationsLogTransform = MigrationsLogTransform_1.MigrationsLogTransform;
var migrate_1 = require("./migrate");

@@ -15,4 +19,4 @@ exports.migrate = migrate_1.migrate;

var types_1 = require("./types");
exports.DEFAULT_CONFIG = types_1.DEFAULT_CONFIG;
exports.DEFAULT_CONFIG_FILE = types_1.DEFAULT_CONFIG_FILE;
exports.CONFIG_DEFAULTS = types_1.CONFIG_DEFAULTS;
//# sourceMappingURL=index.js.map
/// <reference types="node" />
import { ClientBase } from 'pg';
import { Config, Migration } from './types';
export declare const executeMigrations: (clientBase: ClientBase, table: string, dir: string) => Promise<Migration[]>;
import { Config, Migration, PGClient } from './types';
export declare const executeMigrations: (pg: PGClient, table: string, directory: string) => Promise<Migration[]>;
export declare const loadConfig: (file?: string, env?: NodeJS.ProcessEnv) => Config;
export declare const migrate: (config?: string | Config | undefined, env?: NodeJS.ProcessEnv) => Promise<Migration[]>;
export declare const migrate: (config?: string | Partial<Config> | undefined, env?: NodeJS.ProcessEnv) => Promise<Migration[]>;

@@ -7,27 +7,18 @@ "use strict";

const types_1 = require("./types");
exports.executeMigrations = (clientBase, table, dir) => {
return new Promise((resolve, reject) => {
const migrations = new _1.MigrationsReadable(clientBase, table, dir);
const sink = new _1.MigrationsWritable(clientBase, table);
const results = [];
sink.on('finish', () => resolve(results));
sink.on('error', reject);
migrations.on('error', reject);
migrations.on('data', data => results.push(data));
migrations.pipe(sink);
});
const stream_1 = require("stream");
const util_1 = require("util");
const MigrationsCollectTransform_1 = require("./MigrationsCollectTransform");
exports.executeMigrations = async (pg, table, directory) => {
const read = new _1.MigrationsReadable(pg, table, directory);
const sink = new _1.MigrationsWritable(pg, table);
const collect = new MigrationsCollectTransform_1.MigrationsCollectTransform();
await util_1.promisify(stream_1.pipeline)(read, collect, sink);
return collect.migrations;
};
exports.loadConfig = (file = types_1.DEFAULT_CONFIG, env = process.env) => {
return config_file_1.loadConfigFile({
file,
env,
defaults: types_1.CONFIG_DEFAULTS,
required: ['client'],
});
};
exports.loadConfig = (file = types_1.DEFAULT_CONFIG_FILE, env = process.env) => config_file_1.loadConfigFile({ file, env, defaults: types_1.CONFIG_DEFAULTS, required: ['client'] });
exports.migrate = async (config, env = process.env) => {
const { client, table, dir } = typeof config === 'object' ? config : exports.loadConfig(config);
const { client, table, directory } = typeof config === 'object' ? { ...types_1.CONFIG_DEFAULTS, ...config } : exports.loadConfig(config, env);
const pg = new pg_1.Client(client);
await pg.connect();
const results = await exports.executeMigrations(pg, table, dir);
const results = await exports.executeMigrations(pg, table, directory);
await pg.end();

@@ -34,0 +25,0 @@ return results;

/// <reference types="node" />
import { ClientBase } from 'pg';
import { Readable } from 'stream';
import { Migration } from './types';
import { PGClient } from './types';
export declare const nameParts: (name: string) => string[];
export declare class MigrationsReadable extends Readable {
private current;
private migrationFiles?;
private pg;
private table;
private dir;
private current;
private migrationFiles?;
constructor(pg: ClientBase, table: string, dir: string);
initialize(): Promise<void>;
next(): Promise<Migration | null>;
private directory;
constructor(pg: PGClient, table: string, directory: string);
private initialize;
private next;
_read(): Promise<void>;

@@ -16,0 +15,0 @@ private initState;

@@ -8,11 +8,11 @@ "use strict";

class MigrationsReadable extends stream_1.Readable {
constructor(pg, table, dir) {
constructor(pg, table, directory) {
super({ objectMode: true });
this.current = 0;
this.pg = pg;
this.table = table;
this.dir = dir;
this.current = 0;
this.directory = directory;
}
async initialize() {
const migrationFiles = fs_1.readdirSync(this.dir).filter(file => file.endsWith('.pgsql'));
const migrationFiles = fs_1.readdirSync(this.directory).filter(file => file.endsWith('.pgsql'));
await this.initState();

@@ -29,4 +29,5 @@ const completed = await this.loadState();

const [id, name] = exports.nameParts(file);
const content = fs_1.readFileSync(path_1.join(this.dir, file)).toString();
return { id, name, content };
const content = fs_1.readFileSync(path_1.join(this.directory, file)).toString();
const migration = { id, name, content };
return migration;
}

@@ -33,0 +34,0 @@ else {

/// <reference types="node" />
import { ClientBase } from 'pg';
import { Writable } from 'stream';
import { Migration } from './types';
import { Migration, PGClient } from './types';
export declare class MigrationsWritable extends Writable {
private pg;
private table;
constructor(pg: ClientBase, table: string);
constructor(pg: PGClient, table: string);
_write(migration: Migration, encoding: string, callback: (error?: Error | null) => void): Promise<void>;
}

@@ -13,8 +13,15 @@ "use strict";

try {
await this.pg.query('BEGIN');
await this.pg.query(migration.content);
await this.pg.query(`INSERT INTO ${this.table} VALUES ($1)`, [migration.id]);
await this.pg.query('COMMIT');
callback(null);
}
catch (error) {
callback(new MigrationError_1.MigrationError(error.message, migration));
try {
await this.pg.query('ROLLBACK');
}
finally {
callback(new MigrationError_1.MigrationError(error.message, migration));
}
}

@@ -21,0 +28,0 @@ }

@@ -1,5 +0,5 @@

import { ClientConfig } from 'pg';
import { ClientConfig, ClientBase } from 'pg';
export interface Config {
client: ClientConfig | string;
dir: string;
directory: string;
table: string;

@@ -12,6 +12,9 @@ }

}
export declare const DEFAULT_CONFIG = "pg-sql-migrate.config.json";
export interface PGClient {
query: ClientBase['query'];
}
export declare const DEFAULT_CONFIG_FILE = "pg-sql-migrate.config.json";
export declare const CONFIG_DEFAULTS: {
dir: string;
directory: string;
table: string;
};
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.DEFAULT_CONFIG = 'pg-sql-migrate.config.json';
exports.DEFAULT_CONFIG_FILE = 'pg-sql-migrate.config.json';
exports.CONFIG_DEFAULTS = {
dir: 'migrations',
directory: 'migrations',
table: 'migrations',
};
//# sourceMappingURL=types.js.map
{
"name": "@ovotech/pg-sql-migrate",
"description": "run migration scripts incrementally using postgres",
"version": "1.0.5",
"repository": "git@github.com:ovotech/pg-sql-migrate.git",
"version": "2.0.0",
"main": "dist/index.js",

@@ -11,11 +12,19 @@ "source": "src/index.ts",

"scripts": {
"test": "jest --runInBand",
"lint-prettier": "prettier --list-different {src,test}/**/*.ts",
"lint-tslint": "tslint --config tslint.json '{src,test}/**/*.ts'",
"lint": "yarn lint-prettier && yarn lint-tslint",
"build": "tsc --outDir dist --declaration",
"pg-sql-migrate": "ts-node src/cli.ts"
"test": "jest",
"lint:prettier": "prettier --list-different {src,test}/**/*.ts",
"lint:eslint": "eslint '{src,test}/**/*.ts'",
"lint": "yarn lint:prettier && yarn lint:eslint",
"build": "tsc --declaration",
"pg-migrate": "ts-node src/cli/pg-migrate.ts",
"pg-migrate-create": "ts-node src/cli/pg-migrate-create.ts",
"pg-migrate-execute": "ts-node src/cli/pg-migrate-execute.ts"
},
"bin": {
"pg-migrate": "./scripts/pg-migrate",
"pg-migrate-execute": "./scripts/pg-migrate-execute",
"pg-migrate-create": "./scripts/pg-migrate-create"
},
"dependencies": {
"@ovotech/config-file": "^1.0.0"
"@ovotech/config-file": "^1.0.0",
"commander": "^2.20.0"
},

@@ -26,21 +35,22 @@ "peerDependencies": {

"devDependencies": {
"@types/jest": "^24.0.13",
"@types/node": "^11.11.4",
"@types/jest": "^24.0.11",
"@types/node": "^12.6.9",
"@types/pg": "^7.4.14",
"@types/uuid": "^3.4.4",
"@types/yargs": "^13.0.0",
"jest": "^24.8.0",
"pg": "^7.11.0",
"prettier": "^1.17.1",
"ts-jest": "^24.0.2",
"ts-node": "^8.2.0",
"tslint": "^5.17.0",
"tslint-config-prettier": "^1.18.0",
"typescript": "^3.5.1",
"@typescript-eslint/eslint-plugin": "^1.13.0",
"@typescript-eslint/parser": "^1.13.0",
"eslint": "^6.1.0",
"eslint-config-prettier": "^6.0.0",
"jest": "^24.5.0",
"jest-junit": "^7.0.0",
"pg": "^7.9.0",
"prettier": "^1.16.4",
"ts-jest": "^24.0.0",
"ts-node": "^8.0.3",
"typescript": "^3.5.3",
"uuid": "^3.3.2"
},
"jest": {
"preset": "../../jest-preset.json"
},
"gitHead": "08920fa2c6cc9a25f01f9de4c1b2283291d93b1b"
"preset": "./jest.config.json"
}
}

@@ -15,14 +15,36 @@ # Postgres migration tool with plain sql

{
"client": "postgresql://postgres:dev-pass@0.0.0.0:5432/postgres"
"client": "postgresql://postgres:dev-pass@0.0.0.0:5432/postgres",
"directory": "migrations"
"table" "migrations"
}
```
and place some migration files, with .pgsql extension. into your migrations folder. Like
The default values for "directory" and "table" configuration is `migrations` but you can override that if you need to.
Instead of a string you can use an object. This is passed directly to pg https://node-postgres.com/features/connecting
```json
{
"client": {
"user": "postgres",
"password": "dev-pass",
"host": "0.0.0.0",
"database": "postgres",
"port": 5432
}
}
```
migrations
|-<timestamp>_somename-1.pgsql
|-<timestamp>_somename-2.pgsql
To create new migrations in the designated directory you can run:
```bash
yarn pg-migrate create my_migration
```
This will create a file `migrations/<timestamp>_my_migration.pgsql` that you can place raw sql into. After that, you can run the migration(s) by calling
```bash
yarn pg-migrate execute
```
Then in your code you can:

@@ -36,9 +58,27 @@

You can choose a different location for the config file, or to just input its contents direclty:
## Environment variables
In your config file you can use environment variables.
For example, if you have the env var `PG_USER_PASS` setup, you can access it with:
```json
{
"client": "postgresql://postgres:${PG_USER_PASS}@0.0.0.0:5432/postgres",
"directory": "migrations"
"table" "migrations"
}
```
## Using the library
You can choose a different location for the config file, or to just input its contents directly:
```typescript
import { migrate } from '@ovotech/pg-sql-migrate';
const results = await migrate('my_config.json');
const results = await migrate();
const results = await migrate('custom-config.json');
const results = await migrate({

@@ -49,3 +89,3 @@ client: 'postgresql://postgres:dev-pass@0.0.0.0:5432/postgres',

// Custom directory for migration files
dir: 'migrations_dir',
directory: 'migrations_dir',
});

@@ -70,3 +110,3 @@ ```

migrations.pipe(sink);
migrations.pipe(sink).on('finish', () => console.log('Finished'));
```

@@ -84,3 +124,3 @@

Style is maintained with prettier and tslint
Style is maintained with prettier and eslint

@@ -93,3 +133,3 @@ ```

Deployment is preferment by lerna automatically on merge / push to master, but you'll need to bump the package version numbers yourself. Only updated packages with newer versions will be pushed to the npm registry.
Deployment is preferment by circleci automatically on merge / push to master, but you'll need to bump the package version numbers yourself.

@@ -96,0 +136,0 @@ ## Contributing

export { MigrationError } from './MigrationError';
export { MigrationsReadable } from './MigrationsReadable';
export { MigrationsWritable } from './MigrationsWritable';
export { MigrationsCollectTransform } from './MigrationsCollectTransform';
export { MigrationsLogTransform } from './MigrationsLogTransform';
export { migrate, executeMigrations, loadConfig } from './migrate';
export { Migration, Config, DEFAULT_CONFIG, CONFIG_DEFAULTS } from './types';
export { Migration, Config, DEFAULT_CONFIG_FILE, CONFIG_DEFAULTS } from './types';
import { loadConfigFile } from '@ovotech/config-file';
import { Client, ClientBase } from 'pg';
import { Client } from 'pg';
import { MigrationsReadable, MigrationsWritable } from './';
import { Config, CONFIG_DEFAULTS, DEFAULT_CONFIG, Migration } from './types';
import { Config, CONFIG_DEFAULTS, DEFAULT_CONFIG_FILE, Migration, PGClient } from './types';
import { pipeline } from 'stream';
import { promisify } from 'util';
import { MigrationsCollectTransform } from './MigrationsCollectTransform';
export const executeMigrations = (clientBase: ClientBase, table: string, dir: string): Promise<Migration[]> => {
return new Promise((resolve, reject) => {
const migrations = new MigrationsReadable(clientBase, table, dir);
const sink = new MigrationsWritable(clientBase, table);
const results: Migration[] = [];
export const executeMigrations = async (
pg: PGClient,
table: string,
directory: string,
): Promise<Migration[]> => {
const read = new MigrationsReadable(pg, table, directory);
const sink = new MigrationsWritable(pg, table);
const collect = new MigrationsCollectTransform();
sink.on('finish', () => resolve(results));
sink.on('error', reject);
migrations.on('error', reject);
migrations.on('data', data => results.push(data));
migrations.pipe(sink);
});
await promisify(pipeline)(read, collect, sink);
return collect.migrations;
};
export const loadConfig = (file = DEFAULT_CONFIG, env = process.env) => {
return loadConfigFile<Config>({
file,
env,
defaults: CONFIG_DEFAULTS,
required: ['client'],
});
};
export const loadConfig = (file = DEFAULT_CONFIG_FILE, env = process.env): Config =>
loadConfigFile<Config>({ file, env, defaults: CONFIG_DEFAULTS, required: ['client'] });
export const migrate = async (config?: Config | string, env = process.env) => {
const { client, table, dir } = typeof config === 'object' ? config : loadConfig(config);
export const migrate = async (
config?: Partial<Config> | string,
env = process.env,
): Promise<Migration[]> => {
const { client, table, directory } =
typeof config === 'object' ? { ...CONFIG_DEFAULTS, ...config } : loadConfig(config, env);
const pg = new Client(client);
await pg.connect();
const results = await executeMigrations(pg, table, dir);
const results = await executeMigrations(pg, table, directory);
await pg.end();
return results;
};
import { Migration } from './types';
export class MigrationError extends Error {
constructor(message: string | undefined, public migration: Migration) {
public migration: Migration;
public constructor(message: string | undefined, migration: Migration) {
super(message);
this.migration = migration;
}
}
import { readdirSync, readFileSync } from 'fs';
import { join } from 'path';
import { ClientBase } from 'pg';
import { QueryResult } from 'pg';
import { Readable } from 'stream';
import { Migration } from './types';
import { Migration, PGClient } from './types';
export const nameParts = (name: string) => name.split('_', 2);
export const nameParts = (name: string): string[] => name.split('_', 2);

@@ -12,9 +12,15 @@ export class MigrationsReadable extends Readable {

private migrationFiles?: string[];
private pg: PGClient;
private table: string;
private directory: string;
constructor(private pg: ClientBase, private table: string, private dir: string) {
public constructor(pg: PGClient, table: string, directory: string) {
super({ objectMode: true });
this.pg = pg;
this.table = table;
this.directory = directory;
}
async initialize() {
const migrationFiles = readdirSync(this.dir).filter(file => file.endsWith('.pgsql'));
private async initialize(): Promise<void> {
const migrationFiles = readdirSync(this.directory).filter(file => file.endsWith('.pgsql'));
await this.initState();

@@ -26,3 +32,3 @@ const completed = await this.loadState();

async next() {
private async next(): Promise<Migration | null> {
if (!this.migrationFiles) {

@@ -35,4 +41,5 @@ await this.initialize();

const [id, name] = nameParts(file);
const content = readFileSync(join(this.dir, file)).toString();
return { id, name, content } as Migration;
const content = readFileSync(join(this.directory, file)).toString();
const migration: Migration = { id, name, content };
return migration;
} else {

@@ -43,13 +50,13 @@ return null;

async _read() {
public async _read(): Promise<void> {
this.push(await this.next());
}
private async initState() {
private async initState(): Promise<QueryResult> {
return await this.pg.query(`CREATE TABLE IF NOT EXISTS ${this.table} (id VARCHAR PRIMARY KEY)`);
}
private async loadState() {
private async loadState(): Promise<string[]> {
return (await this.pg.query(`SELECT id FROM ${this.table}`)).rows.map(row => row.id);
}
}

@@ -1,20 +0,33 @@

import { ClientBase } from 'pg';
import { Writable } from 'stream';
import { MigrationError } from './MigrationError';
import { Migration } from './types';
import { Migration, PGClient } from './types';
export class MigrationsWritable extends Writable {
constructor(private pg: ClientBase, private table: string) {
private pg: PGClient;
private table: string;
public constructor(pg: PGClient, table: string) {
super({ objectMode: true });
this.pg = pg;
this.table = table;
}
async _write(migration: Migration, encoding: string, callback: (error?: Error | null) => void) {
public async _write(
migration: Migration,
encoding: string,
callback: (error?: Error | null) => void,
): Promise<void> {
try {
await this.pg.query('BEGIN');
await this.pg.query(migration.content);
await this.pg.query(`INSERT INTO ${this.table} VALUES ($1)`, [migration.id]);
await this.pg.query('COMMIT');
callback(null);
} catch (error) {
callback(new MigrationError(error.message, migration));
try {
await this.pg.query('ROLLBACK');
} finally {
callback(new MigrationError(error.message, migration));
}
}
}
}

@@ -1,6 +0,6 @@

import { ClientConfig } from 'pg';
import { ClientConfig, ClientBase } from 'pg';
export interface Config {
client: ClientConfig | string;
dir: string;
directory: string;
table: string;

@@ -15,7 +15,11 @@ }

export const DEFAULT_CONFIG = 'pg-sql-migrate.config.json';
export interface PGClient {
query: ClientBase['query'];
}
export const DEFAULT_CONFIG_FILE = 'pg-sql-migrate.config.json';
export const CONFIG_DEFAULTS = {
dir: 'migrations',
directory: 'migrations',
table: 'migrations',
};

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc