Comparing version
360
lib/cli.js
@@ -1,285 +0,81 @@ | ||
// Generated by CoffeeScript 2.5.0 | ||
(function() { | ||
var Migrator, MigratorCLI, _, mapEnv, pkg, program, rl; | ||
pkg = require('../package.json'); | ||
_ = require('lodash'); | ||
rl = require('readline'); | ||
program = require('commander'); | ||
mapEnv = require('../env.json'); | ||
Migrator = require('./migrator'); | ||
MigratorCLI = (function() { | ||
class MigratorCLI { | ||
constructor(args, env) { | ||
this.actionFound = false; | ||
this.mapEnv = _.clone(mapEnv); | ||
this.env = env; | ||
this._initProgram(args); | ||
if (!this.actionFound) { | ||
this.migrate(); | ||
} | ||
} | ||
_initProgram(args) { | ||
this.program = program; | ||
program.option('--host <host>', 'Database host (env: MIGRATOR_DB_HOST)').option('--port <number>', 'Database port', Number).option('--user <username>', 'Database user').option('--password <password>', 'Database password').option('--database <base_name>', 'Database schema name').option('--schema-table <name>', 'Migrator table name').option('--schema-name <name>', 'Database schema name').option('-M, --migrations <path>', 'Path to migrations files').option('-L, --list-name <name>', 'Migrations order list file name'); | ||
program.command('run').description('Run all migrations if needed, without asking').action(({parent}) => { | ||
this.actionFound = true; | ||
return this.migrate(parent); | ||
import path from 'node:path'; | ||
import fs from 'node:fs'; | ||
import { fileURLToPath } from 'node:url'; | ||
import { Command, Option } from '@commander-js/extra-typings'; | ||
import Migrator from './index.js'; | ||
import envConfig from './config.js'; | ||
const __dirname = path.dirname(fileURLToPath(import.meta.url)); | ||
const pkg = JSON.parse(fs.readFileSync(path.resolve(__dirname, '../package.json'), 'utf8')); | ||
const program = new Command() | ||
.version(pkg.version, '-v, --version', 'output the current version') | ||
.description(pkg.description) | ||
.addOption(new Option('--exec', 'Execute migration. You must use this flag for run migrations')) | ||
.addOption(new Option('--host <host>', 'Database hostname') | ||
.env('MIGRATOR_DB_HOST') | ||
.default('localhost')) | ||
.addOption(new Option('--port <port>', 'Database port') | ||
.env('MIGRATOR_DB_PORT') | ||
.default(5432) | ||
.argParser(parseInt)) | ||
.addOption(new Option('--user <username>', 'Database user') | ||
.env('MIGRATOR_DB_USER')) | ||
.addOption(new Option('--password <password>', 'Database password') | ||
.env('MIGRATOR_DB_PASSWORD')) | ||
.addOption(new Option('--database <name>', 'Database name') | ||
.env('MIGRATOR_DB_DATABASE')) | ||
.addOption(new Option('--schema-table <name>', 'Migrator sync table name') | ||
.env('MIGRATOR_TABLE_NAME') | ||
.default('schema_versions')) | ||
.addOption(new Option('--schema-name <name>', 'Database schema name') | ||
.env('MIGRATOR_DB_SCHEMA_NAME') | ||
.default('public')) | ||
.addOption(new Option('--path <path>', 'Path to migrations files dir') | ||
.env('MITRATOR_FILES_PATH')) | ||
.parse(process.argv); | ||
const cliConfig = program.opts(); | ||
const config = { | ||
db: { | ||
...envConfig.db, | ||
}, | ||
migrations: { | ||
...envConfig.migrations, | ||
} | ||
}; | ||
if (cliConfig.host) | ||
config.db.host = cliConfig.host; | ||
if (cliConfig.port) | ||
config.db.port = cliConfig.port; | ||
if (cliConfig.user) | ||
config.db.user = cliConfig.user; | ||
if (cliConfig.password) | ||
config.db.password = cliConfig.password; | ||
if (cliConfig.database) | ||
config.db.database = cliConfig.database; | ||
if (cliConfig.schemaTable) | ||
config.migrations.table = cliConfig.schemaTable; | ||
if (cliConfig.schemaName) | ||
config.migrations.schema = cliConfig.schemaName; | ||
if (cliConfig.path) | ||
config.migrations.path = cliConfig.path; | ||
const migragor = new Migrator(config); | ||
function capitalize(text) { | ||
return `${text.slice(0, 1).toUpperCase()}${text.slice(1)}`; | ||
} | ||
(async () => { | ||
if (cliConfig.exec) { | ||
await migragor.migrate(action => { | ||
// eslint-disable-next-line no-console | ||
console.log(`${capitalize(action.action)} migration "${action.name}":\t${action.success ? 'done' : 'failed'}`); | ||
}); | ||
program.command('interactive').description('Run migrations in interactive mode').action(({parent}) => { | ||
this.actionFound = true; | ||
return this.interactive(parent); | ||
}); | ||
program.command('inspect').description('List all migrations with states').action(({parent}) => { | ||
this.actionFound = true; | ||
return this.inspect(parent); | ||
}); | ||
program.command('remove <migration>').description('Remove migration').action((name, {parent}) => { | ||
this.actionFound = true; | ||
return this.remove(parent, name); | ||
}); | ||
return program.version(pkg.version).parse(args); | ||
} | ||
getOptions(program = this.program, env = this.env) { | ||
var name, param, params, ref, ref1; | ||
params = {}; | ||
ref = this.mapEnv; | ||
for (param in ref) { | ||
name = ref[param]; | ||
if (env[param]) { | ||
params[name] = env[param]; | ||
} | ||
} | ||
ref1 = this.mapParams; | ||
for (param in ref1) { | ||
name = ref1[param]; | ||
if (program[param]) { | ||
params[name] = program[param]; | ||
} | ||
} | ||
return params; | ||
} | ||
migrate(program = this.program) { | ||
var e, migrator, options, progress; | ||
options = this.getOptions(); | ||
console.log("Migrator started..."); | ||
try { | ||
migrator = new Migrator(options); | ||
} catch (error) { | ||
e = error; | ||
return this._error(e); | ||
} | ||
progress = (action, name) => { | ||
var ops; | ||
ops = this.actions[action]; | ||
if (ops) { | ||
return console.log(`${ops.txt} migration \"${name}\":\tdone`); | ||
} else { | ||
return console.log(`Migration \"${name}\":\t${action}`); | ||
} | ||
}; | ||
return migrator.migrate(progress).then(function(migrations) { | ||
if (!migrations.map || !migrations.map.length > 0) { | ||
console.log('No migrations found'); | ||
} | ||
console.log("\nMigrator ended successful"); | ||
migrator.destroy(); | ||
return process.exit(0); | ||
}).catch((e) => { | ||
this._error(e); | ||
migrator.destroy(); | ||
return process.exit(1); | ||
}); | ||
} | ||
interactive(program = this.program) { | ||
var asker, e, migrator, options; | ||
options = this.getOptions(); | ||
console.log("Interactive migrator started..."); | ||
try { | ||
migrator = new Migrator(options); | ||
} catch (error) { | ||
e = error; | ||
return this._error(e); | ||
} | ||
asker = _.bind(this._ask, this); | ||
return migrator.inspect().then(function(migrations) { | ||
var ask, loader; | ||
if (migrations.map && migrations.map.length > 0) { | ||
ask = rl.createInterface({ | ||
input: process.stdin, | ||
output: process.stdout | ||
}); | ||
loader = Promise.resolve(); | ||
_.forEach(migrations.map, function(migration) { | ||
return loader = loader.then(function() { | ||
return asker(migrator, ask, migration); | ||
}); | ||
}); | ||
loader = loader.then(function() { | ||
return ask.close(); | ||
}); | ||
return loader; | ||
} else { | ||
return console.log('No migrations found'); | ||
} | ||
}).then(function() { | ||
console.log("\nMigrator ended successful"); | ||
migrator.destroy(); | ||
return process.exit(0); | ||
}).catch((e) => { | ||
this._error(e); | ||
migrator.destroy(); | ||
return process.exit(1); | ||
}); | ||
} | ||
inspect(program = this.program) { | ||
var e, migrator, options; | ||
options = this.getOptions(); | ||
console.log("Inspection started..."); | ||
try { | ||
migrator = new Migrator(options); | ||
} catch (error) { | ||
e = error; | ||
return this._error(e); | ||
} | ||
return migrator.inspect().then(function(migrations) { | ||
var i, len, migration, ref; | ||
if (migrations.map && migrations.map.length > 0) { | ||
console.log("\nMigrations list:"); | ||
ref = migrations.map; | ||
for (i = 0, len = ref.length; i < len; i++) { | ||
migration = ref[i]; | ||
console.log(`Migration \"${migration.name}\":\t${migration.state}`); | ||
} | ||
else { | ||
await migragor.inspect().then(actions => { | ||
for (const action of actions) { | ||
// eslint-disable-next-line no-console | ||
console.log(`${capitalize(action.action)} migration "${action.name}"`); | ||
} | ||
} else { | ||
console.log('No migrations found'); | ||
} | ||
console.log("\nMigrator ended successful"); | ||
migrator.destroy(); | ||
return process.exit(0); | ||
}).catch((e) => { | ||
this._error(e); | ||
migrator.destroy(); | ||
return process.exit(1); | ||
}); | ||
} | ||
remove(program = this.program, name) { | ||
var e, migrator, options; | ||
options = this.getOptions(); | ||
console.log(`Remove migration started for \"${name}\"...`); | ||
try { | ||
migrator = new Migrator(options); | ||
} catch (error) { | ||
e = error; | ||
return this._error(e); | ||
} | ||
return migrator.inspect().then(function(migrations) { | ||
var migration; | ||
migration = _.find(migrations.table, {name}); | ||
if (migration) { | ||
return migrator.remove(name, true).then(function() { | ||
return console.log(`\nMigration \"${name}\" removed`); | ||
}); | ||
} else { | ||
return console.log(`\nMigration \"${name}\" not found`); | ||
} | ||
}).then(function() { | ||
console.log("\nMigrator ended successful"); | ||
migrator.destroy(); | ||
return process.exit(0); | ||
}).catch((e) => { | ||
this._error(e); | ||
migrator.destroy(); | ||
return process.exit(1); | ||
}); | ||
} | ||
_ask(migrator, ask, migration) { | ||
var ops; | ||
ops = this.actions[migration.state]; | ||
if (!(ops && ops.command)) { | ||
return console.log(`Skip migration \"${migration.name}\"`); | ||
} | ||
return new Promise(function(resolve, reject) { | ||
return ask.question(`${ops.txt} migration \"${migration.name}\" [yes/no] (no): `, function(accept) { | ||
if (accept !== 'yes') { | ||
console.log(`Skip migration \"${migration.name}\"`); | ||
return resolve(); | ||
} | ||
return migrator[ops.command](migration.name, true).then(function() { | ||
console.log(`${ops.txt} migration \"${migration.name}\":\tdone`); | ||
return resolve(); | ||
}).catch(function(e) { | ||
console.error(`${ops.txt} migration \"${migration.name}\":\tfailed`); | ||
return reject(e); | ||
}); | ||
}); | ||
}); | ||
} | ||
_error(e) { | ||
console.error('\n\n'); | ||
if (e.code) { | ||
console.error(`Error: ${e.code}`); | ||
if (e.name) { | ||
console.error(`Migration: ${e.name}`); | ||
} | ||
if (e.err) { | ||
return console.error(e.err); | ||
} | ||
} else { | ||
return console.error(e); | ||
} | ||
} | ||
}; | ||
MigratorCLI.prototype.mapParams = { | ||
'host': 'host', | ||
'port': 'port', | ||
'user': 'user', | ||
'password': 'password', | ||
'database': 'database', | ||
'schemaTable': 'schema_table', | ||
'schemaName': 'schema_name', | ||
'migrations': 'migrations', | ||
'listName': 'migrations_list' | ||
}; | ||
MigratorCLI.prototype.actions = { | ||
added: { | ||
txt: 'Add', | ||
command: 'add' | ||
}, | ||
changed: { | ||
txt: 'Change', | ||
command: 'change' | ||
}, | ||
removed: { | ||
txt: 'Remove', | ||
command: 'remove' | ||
}, | ||
executed: { | ||
txt: "Skip" | ||
} | ||
}; | ||
return MigratorCLI; | ||
}).call(this); | ||
new MigratorCLI(process.argv, process.env); | ||
}).call(this); | ||
} | ||
process.exit(0); | ||
})(); | ||
//# sourceMappingURL=cli.js.map |
131
lib/db.js
@@ -1,60 +0,81 @@ | ||
// Generated by CoffeeScript 2.5.0 | ||
(function() { | ||
var DB, _, pg; | ||
_ = require('lodash'); | ||
pg = require('pg'); | ||
DB = class DB { | ||
defaults() { | ||
return { | ||
host: 'localhost', | ||
port: 5432, | ||
user: null, | ||
password: null, | ||
database: null | ||
}; | ||
import pg from 'pg'; | ||
export class DB { | ||
constructor(config, name = 'dev') { | ||
this.DBTransaction = DBTransaction; | ||
this.initialize(config, name); | ||
} | ||
constructor(user_config) { | ||
var config; | ||
config = _.assign(this.defaults(), user_config); | ||
if (!(config.host && config.port && config.database)) { | ||
throw { | ||
code: 'no_required_db_params', | ||
err: config | ||
get pool() { | ||
return this._conn || this._pool; | ||
} | ||
get defaults() { | ||
return { | ||
host: 'localhost', | ||
port: 5432, | ||
user: 'postgres', | ||
password: 'postgres', | ||
database: 'postgres', | ||
}; | ||
} | ||
this._pool = new pg.Pool(config); | ||
} | ||
q(query, values = [], callback) { | ||
if (_.isFunction(values)) { | ||
callback = values; | ||
values = []; | ||
} | ||
if (!_.isArray(values)) { | ||
values = [values]; | ||
} | ||
return new Promise((resolve, reject) => { | ||
return this._pool.query(query, values, function(err = null, result) { | ||
if (err) { | ||
reject(err); | ||
} else { | ||
resolve(result); | ||
} | ||
return typeof callback === "function" ? callback(err, result) : void 0; | ||
}); | ||
}); | ||
initialize(config, application_name) { | ||
if ('on' in config) { | ||
this._conn = config; | ||
} | ||
else { | ||
this._pool = new pg.Pool({ | ||
...this.defaults, | ||
application_name, | ||
...config | ||
}); | ||
} | ||
} | ||
destroy() { | ||
return this._pool.end(); | ||
async q(query, values) { | ||
if (typeof query != 'string') { | ||
values = query.values || values; | ||
query = query.query; | ||
} | ||
const vals = Array.isArray(values) | ||
? values | ||
: []; | ||
return this.pool.query(query, vals); | ||
} | ||
}; | ||
module.exports = DB; | ||
}).call(this); | ||
async r(query, values) { | ||
const { rows } = typeof query === 'string' | ||
? await this.q(query, values) | ||
: await this.q(query); | ||
return rows; | ||
} | ||
async transaction(runner) { | ||
const client = await this._pool.connect(); | ||
try { | ||
await client.query('BEGIN'); | ||
const transaction = new this.DBTransaction(client); | ||
const result = await runner(transaction); | ||
await client.query('COMMIT'); | ||
client.release(); | ||
return result; | ||
} | ||
catch (error) { | ||
await client.query('ROLLBACK'); | ||
client.release(); | ||
throw error; | ||
} | ||
} | ||
} | ||
export class DBTransaction extends DB { | ||
transaction() { | ||
return Promise.reject(new Error('Transaction into transaction is not possible')); | ||
} | ||
} | ||
export function sql(queryParts, ...vars) { | ||
let query = ''; | ||
const values = []; | ||
for (let index = 0; index < queryParts.length; index++) { | ||
query += queryParts[index]; | ||
if (index < vars.length) { | ||
const num = values.push(values); | ||
query += '$' + num; | ||
} | ||
} | ||
return { query, values }; | ||
} | ||
//# sourceMappingURL=db.js.map |
@@ -1,385 +0,125 @@ | ||
// Generated by CoffeeScript 2.5.0 | ||
(function() { | ||
var DB, Initer, Migrator, _; | ||
_ = require('lodash'); | ||
DB = require('./db'); | ||
Initer = require('./initer'); | ||
Migrator = class Migrator { | ||
defaults() { | ||
return { | ||
host: null, | ||
port: null, | ||
user: null, | ||
password: null, | ||
database: null, | ||
migrations: './migrations', | ||
migrations_list: 'order', | ||
schema_table: 'schema_versions', | ||
schema_name: 'public', | ||
actions: { | ||
do: 'do', | ||
undo: 'undo' | ||
} | ||
}; | ||
export var Action; | ||
(function (Action) { | ||
Action["Skip"] = "skip"; | ||
Action["Shrink"] = "shrink"; | ||
Action["Remove"] = "remove"; | ||
Action["Change"] = "change"; | ||
Action["Add"] = "add"; | ||
})(Action || (Action = {})); | ||
export class Migrator { | ||
constructor(db, state) { | ||
this.db = db; | ||
this.state = state; | ||
} | ||
constructor(config) { | ||
var params; | ||
this.params = params = _.assign(this.defaults(), config); | ||
if (!(this.params.host && this.params.port && this.params.database && this.params.migrations)) { | ||
throw { | ||
code: 'no_required_params', | ||
err: this.params | ||
}; | ||
} | ||
this.db = new DB(this.params); | ||
this.initer = new Initer(this.params, this.db); | ||
} | ||
destroy() { | ||
this.db.destroy(); | ||
return this.data = null; | ||
} | ||
// Собирает карту изменений | ||
inspect(callback) { | ||
var migrator; | ||
migrator = this; | ||
return Promise.resolve().then(() => { | ||
return this.initer.start(); | ||
}).then(function(data) { | ||
var inited, map, shrink; | ||
map = []; | ||
inited = false; | ||
shrink = false; | ||
_.forEach(data.order.reverse(), function(name) { | ||
var file, state, table; | ||
if (!name || name[0] === '#') { | ||
return true; | ||
} | ||
if (name === '~inited~') { | ||
inited = true; | ||
return true; | ||
} | ||
if (name === '~shrink~') { | ||
shrink = true; | ||
return true; | ||
} | ||
state = 'executed'; | ||
file = data.files[name]; | ||
table = data.table[name]; | ||
if (inited) { | ||
state = 'inited'; | ||
} else if (shrink) { | ||
state = 'shrink'; | ||
} else if (!table) { | ||
state = 'added'; | ||
} else if (!file) { | ||
throw 'no_file'; | ||
} else if (file.do_hash !== table.do_hash) { | ||
state = 'changed'; | ||
} | ||
return map.unshift({name, state}); | ||
}); | ||
_.forEach(_.sortBy(data.table, 'exec_date'), function(row) { | ||
if (!_.includes(data.order, row.name)) { | ||
return map.unshift({ | ||
name: row.name, | ||
state: 'removed' | ||
async inspect() { | ||
await this.state.isReady; | ||
const table = this.state.table.map(item => item.name); | ||
const migrations = this.state.migrations.map(item => item.name); | ||
const state = []; | ||
if (migrations.length === 0) { | ||
table.forEach(name => { | ||
state.push({ name, action: Action.Remove }); | ||
}); | ||
} | ||
}); | ||
data.map = map; | ||
migrator.data = data; | ||
if (typeof callback === "function") { | ||
callback(null, data); | ||
} | ||
return data; | ||
}).catch(function(e) { | ||
if (typeof callback === "function") { | ||
callback(e); | ||
const shrinkIndex = table.indexOf(migrations[0]); | ||
if (shrinkIndex >= 0) | ||
table.splice(0, shrinkIndex).forEach(name => { | ||
state.push({ name, action: Action.Shrink }); | ||
}); | ||
let differenceFound = false; | ||
let iCur = 0; | ||
let iNew = 0; | ||
while (iCur < table.length && iNew < migrations.length) { | ||
const current = this.state.tableByName[table[iCur]]; | ||
const next = this.state.migrationsByName[migrations[iNew]]; | ||
if (differenceFound || current.name !== next.name || current.hash.do !== next.hash.do) { | ||
differenceFound = true; | ||
state.push({ name: current.name, action: Action.Remove }); | ||
iCur++; | ||
} | ||
else { | ||
state.push({ name: current.name, action: Action.Skip }); | ||
iCur++; | ||
iNew++; | ||
} | ||
} | ||
if (e.code) { | ||
throw e; | ||
} else { | ||
throw { | ||
code: 'inspect_error', | ||
err: e | ||
}; | ||
while (iCur < table.length) { | ||
state.push({ name: table[iCur], action: Action.Remove }); | ||
iCur++; | ||
} | ||
}); | ||
while (iNew < migrations.length) { | ||
state.push({ name: migrations[iNew], action: Action.Add }); | ||
iNew++; | ||
} | ||
return state; | ||
} | ||
// Мигрирует все изменения | ||
migrate(progress) { | ||
var migrator; | ||
migrator = this; | ||
return Promise.resolve().then(() => { | ||
return this.data || this.inspect(); | ||
}).then(function(migrations) { | ||
var add, change, executed, init, loader, remove, runner, shrink; | ||
init = _.filter(migrations.map, { | ||
state: 'inited' | ||
}); | ||
remove = _.filter(migrations.map, { | ||
state: 'removed' | ||
}); | ||
change = _.filter(migrations.map, { | ||
state: 'changed' | ||
}); | ||
add = _.filter(migrations.map, { | ||
state: 'added' | ||
}); | ||
shrink = _.filter(migrations.map, { | ||
state: 'shrink' | ||
}); | ||
executed = _.filter(migrations.map, { | ||
state: 'executed' | ||
}); | ||
runner = function(loader, action, result, name) { | ||
return loader.then(function() { | ||
if (migrator[action]) { | ||
return migrator[action](name, false); | ||
} else { | ||
return true; | ||
async migrate(notify, userActions) { | ||
const actions = userActions || await this.inspect(); | ||
await this.db.transaction(async (db) => { | ||
for (const item of actions) { | ||
try { | ||
switch (item.action) { | ||
case Action.Remove: | ||
await this.remove(item.name, db); | ||
break; | ||
case Action.Add: | ||
await this.add(item.name, db); | ||
break; | ||
case Action.Change: | ||
await this.change(item.name, db); | ||
break; | ||
} | ||
if (notify) | ||
notify({ ...item, success: true }); | ||
} | ||
catch (error) { | ||
if (notify) | ||
notify({ ...item, success: false }); | ||
throw error; | ||
} | ||
} | ||
}).then(function() { | ||
return typeof progress === "function" ? progress(result, name) : void 0; | ||
}).catch(function(err) { | ||
if (typeof progress === "function") { | ||
progress('fail', name); | ||
} | ||
throw err; | ||
}); | ||
}; | ||
loader = Promise.resolve().then(function() { | ||
return migrator.db.q("BEGIN"); | ||
}); | ||
_.forEach(init, function({name}) { | ||
return loader = runner(loader, 'init', 'inited', name); | ||
}); | ||
_.forEach(shrink, function({name}) { | ||
return loader = runner(loader, 'shrink', 'shrink', name); | ||
}); | ||
_.forEach(executed, function({name}) { | ||
return loader = runner(loader, 'execute', 'executed', name); | ||
}); | ||
_.forEach(remove, function({name}) { | ||
return loader = runner(loader, 'remove', 'removed', name); | ||
}); | ||
_.forEach(change, function({name}) { | ||
return loader = runner(loader, 'change', 'changed', name); | ||
}); | ||
_.forEach(add, function({name}) { | ||
return loader = runner(loader, 'add', 'added', name); | ||
}); | ||
loader = loader.then(function() { | ||
return migrator.db.q("COMMIT"); | ||
}).catch(function(err) { | ||
migrator.db.q("ROLLBACK"); | ||
throw err; | ||
}); | ||
return loader; | ||
}).then(function() { | ||
return migrator.data; | ||
}); | ||
} | ||
init(name) { | ||
var schema_name, schema_table; | ||
({schema_name, schema_table} = this.params); | ||
return Promise.resolve().then(() => { | ||
return this.data || this.inspect(); | ||
}).then(() => { | ||
var file; | ||
file = this.data.files[name]; | ||
if (file) { | ||
return file; | ||
} else { | ||
throw 'not_found'; | ||
} | ||
}).then((file) => { | ||
return this.db.q(`INSERT INTO ${schema_name}.${schema_table} (name, do_hash, do_sql, undo_hash, undo_sql) | ||
VALUES ($1, $2, $3, $4, $5)`, [name, file.do_hash, file.do_sql, file.undo_hash, file.undo_sql]); | ||
}).then(function() { | ||
return true; | ||
}).catch(function(e) { | ||
throw { | ||
code: 'migration_init', | ||
name: name, | ||
err: e | ||
}; | ||
}); | ||
async remove(name, db = this.db) { | ||
const current = this.state.tableByName[name]; | ||
if (current) | ||
await db.q(current.sql.undo); | ||
await db.q(`DELETE FROM ${this.state.tableName} WHERE name = $1`, [name]); | ||
} | ||
remove(name, transaction = true) { | ||
var schema_name, schema_table; | ||
({schema_name, schema_table} = this.params); | ||
return Promise.resolve().then(() => { | ||
return this.data || this.inspect(); | ||
}).then(() => { | ||
var table; | ||
table = this.data.table[name]; | ||
if (table) { | ||
return table; | ||
} else { | ||
throw 'not_found'; | ||
} | ||
}).then((data) => { | ||
if (transaction) { | ||
return this.db.q("BEGIN").then(function() { | ||
return data; | ||
}); | ||
} else { | ||
return data; | ||
} | ||
}).then((table) => { | ||
if (table.undo_sql) { | ||
return this.db.q(table.undo_sql); | ||
} else { | ||
return true; | ||
} | ||
}).then(() => { | ||
return this.db.q(`DELETE FROM ${schema_name}.${schema_table} WHERE name = $1`, [name]); | ||
}).then(() => { | ||
if (transaction) { | ||
return this.db.q("COMMIT"); | ||
} else { | ||
return true; | ||
} | ||
}).catch((e) => { | ||
if (transaction) { | ||
return this.db.q("ROLLBACK"); | ||
} | ||
throw { | ||
code: 'migration_remove', | ||
name: name, | ||
err: e | ||
}; | ||
}); | ||
async add(name, db = this.db) { | ||
const migration = this.state.migrationsByName[name]; | ||
if (migration) | ||
await db.q(migration.sql.do); | ||
await db.q(` | ||
INSERT INTO ${this.state.tableName} (name, do_sql, do_hash, undo_sql, undo_hash) | ||
VALUES ($1, $2, $3, $4, $5) | ||
`, [ | ||
name, | ||
migration.do.sql, migration.do.hash, | ||
migration.undo.sql, migration.undo.hash, | ||
]); | ||
} | ||
change(name, transaction = true) { | ||
var schema_name, schema_table; | ||
({schema_name, schema_table} = this.params); | ||
return Promise.resolve().then(() => { | ||
return this.data || this.inspect(); | ||
}).then(() => { | ||
var file, table; | ||
table = this.data.table[name]; | ||
file = this.data.files[name]; | ||
if (table && file) { | ||
return {table, file}; | ||
} else { | ||
throw 'not_found'; | ||
} | ||
}).then((data) => { | ||
if (transaction) { | ||
return this.db.q("BEGIN").then(function() { | ||
return data; | ||
}); | ||
} else { | ||
return data; | ||
} | ||
}).then(({table, file}) => { | ||
if (table.undo_sql) { | ||
return this.db.q(table.undo_sql).then(function() { | ||
return file; | ||
}); | ||
} else { | ||
return file; | ||
} | ||
}).then((file) => { | ||
if (file.do_sql) { | ||
return this.db.q(file.do_sql).then(function() { | ||
return file; | ||
}); | ||
} else { | ||
return file; | ||
} | ||
}).then((file) => { | ||
return this.db.q(`UPDATE ${schema_name}.${schema_table} | ||
SET | ||
do_hash = $2, | ||
do_sql = $3, | ||
undo_hash = $4, | ||
undo_sql = $5, | ||
exec_date = $6 | ||
WHERE name = $1`, [name, file.do_hash, file.do_sql, file.undo_hash, file.undo_sql, new Date()]); | ||
}).then(() => { | ||
if (transaction) { | ||
return this.db.q("COMMIT"); | ||
} else { | ||
return true; | ||
} | ||
}).catch((e) => { | ||
if (transaction) { | ||
return this.db.q("ROLLBACK"); | ||
} | ||
throw { | ||
code: 'migration_change', | ||
name: name, | ||
err: e | ||
}; | ||
}); | ||
async change(name, db = this.db) { | ||
const current = this.state.tableByName[name]; | ||
const migration = this.state.migrationsByName[name]; | ||
if (current) | ||
await db.q(current.sql.undo); | ||
if (migration) | ||
await db.q(migration.sql.do); | ||
await db.q(` | ||
UPDATE ${this.state.tableName} | ||
SET | ||
do_sql = $2, | ||
do_hash = $3, | ||
undo_sql = $4, | ||
undo_hash = $5, | ||
exec_date = now() | ||
WHERE name = $1 | ||
`, [ | ||
name, | ||
migration.do.sql, migration.do.hash, | ||
migration.undo.sql, migration.undo.hash, | ||
]); | ||
} | ||
add(name, transaction = true) { | ||
var schema_name, schema_table; | ||
({schema_name, schema_table} = this.params); | ||
return Promise.resolve().then(() => { | ||
return this.data || this.inspect(); | ||
}).then(() => { | ||
var file; | ||
file = this.data.files[name]; | ||
if (file) { | ||
return file; | ||
} else { | ||
throw 'not_found'; | ||
} | ||
}).then((data) => { | ||
if (transaction) { | ||
return this.db.q("BEGIN").then(function() { | ||
return data; | ||
}); | ||
} else { | ||
return data; | ||
} | ||
}).then((file) => { | ||
if (file.do_sql) { | ||
return this.db.q(file.do_sql).then(function() { | ||
return file; | ||
}); | ||
} else { | ||
return file; | ||
} | ||
}).then((file) => { | ||
return this.db.q(`INSERT INTO ${schema_name}.${schema_table} (name, do_hash, do_sql, undo_hash, undo_sql) | ||
VALUES ($1, $2, $3, $4, $5)`, [name, file.do_hash, file.do_sql, file.undo_hash, file.undo_sql]); | ||
}).then(() => { | ||
if (transaction) { | ||
return this.db.q("COMMIT"); | ||
} else { | ||
return true; | ||
} | ||
}).catch((e) => { | ||
if (transaction) { | ||
return this.db.q("ROLLBACK"); | ||
} | ||
throw { | ||
code: 'migration_add', | ||
name: name, | ||
err: e | ||
}; | ||
}); | ||
} | ||
}; | ||
module.exports = Migrator; | ||
}).call(this); | ||
} | ||
//# sourceMappingURL=migrator.js.map |
{ | ||
"name": "do-migrate", | ||
"version": "1.0.0", | ||
"description": "Postgresql migrator", | ||
"main": "index.js", | ||
"version": "2.0.0", | ||
"description": "PostgreSQL migrator", | ||
"main": "lib/index.js", | ||
"type": "module", | ||
"bin": { | ||
"do-migrate": "./bin/do-migrate" | ||
"do-migrate": "./bin/do-migrate.js" | ||
}, | ||
"scripts": { | ||
"build": "rm -rf lib/ && npx coffee -c -o lib/ src/", | ||
"test": "mocha", | ||
"build": "tsc", | ||
"prepublishOnly": "npm run build", | ||
@@ -21,9 +23,21 @@ "start": "node bin/do-migrate" | ||
"dependencies": { | ||
"commander": "^2.9.0", | ||
"lodash": "^4.17.15", | ||
"pg": "^6.4.2" | ||
"@commander-js/extra-typings": "^11.1.0", | ||
"commander": "^11.1.0", | ||
"dotenv": "^16.4.1", | ||
"lodash": "^4.17.21", | ||
"pg": "^8.11.3" | ||
}, | ||
"devDependencies": { | ||
"coffeescript": "^2.5.0" | ||
"@istanbuljs/nyc-config-typescript": "^1.0.2", | ||
"@types/lodash": "^4.14.202", | ||
"@types/mocha": "^10.0.6", | ||
"@types/node": "^20.11.10", | ||
"@types/pg": "^8.11.0", | ||
"@typescript-eslint/eslint-plugin": "^6.20.0", | ||
"@typescript-eslint/parser": "^6.20.0", | ||
"eslint": "^8.56.0", | ||
"mocha": "^10.2.0", | ||
"ts-node": "^10.9.2", | ||
"typescript": "^5.3.3" | ||
} | ||
} |
315
README.md
@@ -1,2 +0,2 @@ | ||
# Do Migrate! | ||
# Do Migrate | ||
@@ -7,3 +7,22 @@ PostgreSQL migration tool on Node.js. | ||
Table schema: | ||
This SQL migration tool is designed to facilitate seamless database schema evolution. It operates based on a directory containing migration scripts and a defined order of execution. Here’s an overview of its core principles: | ||
## Migration Directory Structure | ||
1. *Migration Files*: The tool relies on a structured folder containing SQL migration files. Each migration consists of two files: `{name}.do.sql` for applying the migration and `{name}.undo.sql` for reverting it. | ||
2. *Order File*: Within the migration folder, an `order` file explicitly lists the names of the migrations in the sequence they should be applied. This ensures controlled and predictable migration execution. | ||
## Migration Execution Logic | ||
1. *Applying New Migrations*: When a new migration is added to the directory and listed in the `order` file, the tool automatically executes the corresponding `.do.sql` file to apply the migration. | ||
2. *Reverting Migrations*: If a migration is removed from the order file, the tool finds the associated .undo.sql file and executes it to revert the migration. | ||
3. *Modifying Migrations*: Should a migration be altered, the tool first reverts the previous version of the migration using the .undo.sql file and then applies the new version with the `.do.sql` file. | ||
4. *Handling Mid-List Changes*: Any changes (addition, deletion, modification) not at the end of the order list trigger the tool to revert migrations in reverse order up to the point of the earliest change. Subsequently, it applies all new or modified migrations in the correct sequence. | ||
### SQL Migration History Table | ||
Crucially, the tool maintains a table within the database that records the history of applied migrations along with their corresponding undo scripts. This facilitates efficient tracking of changes and enables precise rollback capabilities, ensuring the database schema can be accurately reverted to any previous state as defined by the migration history. | ||
Columns: | ||
* name | ||
@@ -16,55 +35,4 @@ * do_sql — executed migration | ||
Files: | ||
* `{name}.do.sql` — migration file | ||
* `{name}.undo.sql` — undo migration file | ||
* `order` — file with ordered migrations names. | ||
### Possible actions | ||
Inspect data exapmle: | ||
``` js | ||
{ | ||
"table": { | ||
"initial": { | ||
"name": "initial", | ||
"do_hash": "870f2713310e990bd8ed4951fb9560fb8591def24a324abea0d8fd010940752a", | ||
"do_sql": "CREATE TABLE test (\n key varchar(255) UNIQUE,\n value text\n);\n", | ||
"undo_hash": "5111d07169d0ba3c9f4c861fa6076c786f86469e298450c641c3e70ea21df8f6", | ||
"undo_sql": "DROP TABLE test;\n", | ||
"exec_date": "2017-05-10T15:33:40.485Z" | ||
}, | ||
"test_name": { | ||
"name": "test_name", | ||
"do_hash": "bb8d5549c23390eeef2293dc3366e6e97f33102d9326e42af8ee80ec1afba988", | ||
"do_sql": "ALTER TABLE test ADD COLUMN is_json boolean NOT NULL DEFAULT false;\n\nCREATE TABLE test2 (\n key varchar(255) UNIQUE,\n value text\n);\n", | ||
"undo_hash": "0fcb0be684de7480b38486d654c8e89a4a23233dafe6a9bae20ce860c1d78fac", | ||
"undo_sql": "ALTER TABLE test DROP COLUMN IF EXISTS is_json;\n\nDROP TABLE test2;\nDROP TABLE _test2;\n", | ||
"exec_date": "2017-05-10T15:34:17.961Z" | ||
} | ||
}, | ||
"files": { | ||
"initial": { | ||
"do_sql": "CREATE TABLE test (\n key varchar(255) UNIQUE,\n value text\n);\n", | ||
"undo_sql": "DROP TABLE test;\n", | ||
"do_hash": "870f2713310e990bd8ed4951fb9560fb8591def24a324abea0d8fd010940752a", | ||
"undo_hash": "5111d07169d0ba3c9f4c861fa6076c786f86469e298450c641c3e70ea21df8f6" | ||
}, | ||
"test_name": { | ||
"do_sql": "ALTER TABLE test ADD COLUMN is_json boolean NOT NULL DEFAULT false;\n\nCREATE TABLE test2 (\n key varchar(255) UNIQUE,\n value text\n);\n", | ||
"undo_sql": "ALTER TABLE test DROP COLUMN IF EXISTS is_json;\n\nDROP TABLE test2;\nDROP TABLE _test2;\n", | ||
"do_hash": "bb8d5549c23390eeef2293dc3366e6e97f33102d9326e42af8ee80ec1afba988", | ||
"undo_hash": "0fcb0be684de7480b38486d654c8e89a4a23233dafe6a9bae20ce860c1d78fac" | ||
} | ||
}, | ||
"map": [ | ||
{ | ||
"name": "initial", | ||
"state": "executed" | ||
}, { | ||
"name": "test_name", | ||
"state": "executed" | ||
} | ||
] | ||
} | ||
``` | ||
### Statuses | ||
* executed — Already executed migration | ||
@@ -77,158 +45,153 @@ * removed — Migration removed (executed "undo migration") | ||
### Order file | ||
### Ordering and file structure | ||
Migrations order list by names. | ||
Example: | ||
``` | ||
name1 | ||
name2 | ||
name3 | ||
``` | ||
Example `order` file: | ||
**Initiated cursor:** | ||
```txt | ||
migration1 | ||
migration2 | ||
migration3 | ||
``` | ||
~inited~ | ||
``` | ||
Use it if you integrating migrator in an existing project. Migrations before this cursor will not be executed if that not in table schema and just added to it. Can be placed on any line. | ||
Example migrations dir list: | ||
**Shrinking cursor:** | ||
```txt | ||
migration1.do.sql | ||
migration1.undo.sql | ||
migration2.do.sql | ||
migration2.undo.sql | ||
migration3.do.sql | ||
migration3.undo.sql | ||
order | ||
``` | ||
~shrink~~ | ||
``` | ||
Use it if you want to remove very old migrations files from your project. Migrations before this cursor will be ignored and you can remove migrations files and names in order list. Can be placed on any line. | ||
## Install | ||
Clone repo and run `npm install` or use Docker. | ||
Clone repo and run `npm install do-migrate` or use Docker. | ||
## CLI usage | ||
Globaly: | ||
View action plan: | ||
``` bash | ||
$ do-migrate <args> [command] [params] | ||
npx do-migrate [options] | ||
``` | ||
Localy: | ||
Execute: | ||
``` bash | ||
$ $(npm bin)/do-migrate -- <args> [command] [params] | ||
npx do-migrate --exec [options] | ||
``` | ||
Commands: | ||
* **run** — Run all migrations if needed, without asking | ||
* **interactive** — Run migrations in interactive mode | ||
* **inspect** — List all migrations with states | ||
* **remove** *<migration>* — Remove migration | ||
Options: | ||
* **-h**, **--help** — output usage information | ||
* **--host** *<host>* — Database host | ||
* **--port** *<number>* — Database port | ||
* **--user** *<username>* — Database user | ||
* **--password** *<password>* — Database password | ||
* **--database** *<base_name>* — Database schema name | ||
* **--schema-table** *<name>* — Migrator table name | ||
* **-M**, **--migrations** *<path>* — Path to migrations files | ||
* **-L**, **--list-name** *<name>* — Migrations order list file name | ||
* **-V**, **--version** — output the version number | ||
* **--exec** — Execute migration. You must use this flag for run migrations | ||
* **--host \<host\>** — Database hostname (default: "localhost", env: MIGRATOR_DB_HOST) | ||
* **--port \<port\>** — Database port (default: 5432, env: MIGRATOR_DB_PORT) | ||
* **--user \<username\>** — Database user (env: MIGRATOR_DB_USER) | ||
* **--password \<password\>** — Database password (env: MIGRATOR_DB_PASSWORD) | ||
* **--database \<name\>** — Database name (env: MIGRATOR_DB_DATABASE) | ||
* **--schema-table \<name\>** — Migrator sync table name (default: "schema_versions", env: MIGRATOR_TABLE_NAME) | ||
* **--schema-name \<name\>** — Database schema name (default: "public", env: MIGRATOR_DB_SCHEMA_NAME) | ||
* **--path \<path\>** — Path to migrations files dir (env: MITRATOR_FILES_PATH) | ||
* **-v**, **--version** — Output the current version | ||
* **-h**, **--help** — Display help for command | ||
## Program usage | ||
Env variables: | ||
``` coffee | ||
Migrator = require('do-migrate') | ||
* `MIGRATOR_DB_HOST` — Database host (default: localhost) | ||
* `MIGRATOR_DB_PORT` — Database port (default: 5432) | ||
* `MIGRATOR_DB_USER` — Database user (default: postgres) | ||
* `MIGRATOR_DB_PASSWORD` Database password (default empty) | ||
* `MIGRATOR_DB_DATABASE` — Database name (default: postgres) | ||
* `MIGRATOR_DB_SCHEMA_NAME` — Database schema name (default: public) | ||
* `MIGRATOR_DB_SSL_REQUIRED` — SSL required flag (user true/false, default: false) | ||
* `MIGRATOR_DB_SSL_CA` — SSL CA (pem string) | ||
* `MITRATOR_FILES_PATH` — Path to migration dir (default: ./migrations) | ||
* `MITRATOR_ORDER_FILE` — Order file name (default: order) | ||
* `MIGRATOR_TABLE_NAME` — Sync table name (default: schema_versions) | ||
migrator = new Migrator options | ||
migrator.migrate() | ||
``` | ||
## API usage | ||
Options: | ||
* *host* — Database host | ||
* *port* — Database port | ||
* *user* — Database user | ||
* *password* — Database password | ||
* *database* — Database schema name | ||
* *migrations* — Path to migrations files | ||
* *migrations_list* — Migrations order list file name | ||
* *schema_table* — Migrator table name | ||
``` ts | ||
import Migrator, { Config } from 'do-migrate'; | ||
Methods: | ||
* **inspect(callback)** — return promise with info about migrations. | ||
* **migrate(progress)** — migrate all needed migrations. Return promise with info about migrations. In *progress* return migration status (action and migration name). | ||
* **remove(name, transaction=true)** – force remove migration by name (*transaction* is flag if is single transaction). Return promise, | ||
* **change(name, transaction=true)** — force change migration by name (*transaction* is flag if is single transaction). Return promise, | ||
* **add(name, transaction=true)** — force add migration by name (*transaction* is flag if is single transaction). Return promise, | ||
* **destroy()** — close DB connection and destroy migrator | ||
const config: Config = { | ||
db: { | ||
host: get('MIGRATOR_DB_HOST', 'localhost'), | ||
port: get('MIGRATOR_DB_PORT', 5432), | ||
user: get('MIGRATOR_DB_USER', 'postgres'), | ||
... | ||
}, | ||
migrations: { | ||
path: path.resolve(__dirname, './migrations'), | ||
... | ||
} | ||
}; | ||
Inspect data structure: | ||
* table — migrations executed and saved in DB table (name, do_sql, undo_sql, do_hash `sha256`, undo_hash `sha256`, exec_date) | ||
* files — migrations in files (name, do_sql, undo_sql, do_hash `sha256`, undo_hash `sha256`, exec_date) | ||
* map — all migrations (name, state) | ||
const migrator = new Migrator(config); | ||
States: | ||
* executed — already executed | ||
* added — new migration | ||
* changed — changed migration | ||
* removed – removed migration | ||
* inited — initial migration | ||
* shrink — shrink migration | ||
// actions list without execution | ||
console.log( | ||
await migrator.inspect(); | ||
); | ||
// execute migrations | ||
await migrator.migrate((action) => console.log(action)); | ||
``` | ||
## Docker usage | ||
Types: | ||
### Build base image | ||
```ts | ||
type Config = { | ||
db?: { | ||
host?: string; | ||
port?: number; | ||
user?: string; | ||
password?: string; | ||
database?: string; | ||
... // pg.PoolConfig | ||
}; | ||
migrations?: { | ||
path?: string; | ||
order_file?: string; | ||
schema?: string; | ||
table?: string; | ||
}; | ||
}; | ||
Dockerfile: | ||
``` docker | ||
FROM nim579/do-migrate | ||
enum Action { | ||
Skip = 'skip', | ||
Shrink = 'shrink', | ||
Remove = 'remove', | ||
Change = 'change', | ||
Add = 'add', | ||
} | ||
ADD ./migrations /migrator/migrations | ||
ADD ./env.json /migrator/env.json # Optional env variables map file | ||
CMD [ "interactive" ] # Optional command and args for CLI | ||
interface Migrator { | ||
constructor(config: Config): void | ||
inspect(): Promise<{ name: string; action: Action; }[]> | ||
migrate( | ||
notify?: (action: { name: string; action: Action, success: boolean }) => void, | ||
userActions?: { name: string; action: Action }[] | ||
): Promise<void> | ||
} | ||
``` | ||
Command: | ||
``` bash | ||
$ docker run --name migrator -d -e ... nim579/do-migrate | ||
``` | ||
## Docker | ||
Command: | ||
``` | ||
$ docker run --name migrator -it -e ... nim579/do-migrate interactive | ||
``` | ||
Inspect: | ||
Compose: | ||
``` yaml | ||
version: "2" | ||
services: | ||
migrator: | ||
image: app-migrator | ||
build: | ||
context: . | ||
dockerfile: Dockerfile | ||
environment: | ||
MIGRATOR_DB_HOST=localhost | ||
MIGRATOR_DB_PORT=5432 | ||
MIGRATOR_DB_USER=postgres | ||
MIGRATOR_DB_PASSWORD=admin | ||
MIGRATOR_DB_DATABASE=test | ||
``` sh | ||
docker run --name migrator -e MIGRATOR_DB_HOST=localhost -e ... --volume ./migrations:/migrator/migrations nim579/do-migrate | ||
``` | ||
### From original image | ||
Execute: | ||
Command: | ||
``` bash | ||
$ docker run --name migrator -d -e ... --volume ./migrations:/migrator/migrations nim579/do-migrate | ||
``` sh | ||
docker run --name migrator -e MIGRATOR_DB_HOST=localhost -e ... --volume ./migrations:/migrator/migrations nim579/do-migrate --exec | ||
``` | ||
Command for interactive: | ||
``` | ||
$ docker run --name migrator -it -e ... --volume ./migrations:/migrator/migrations nim579/do-migrate interactive | ||
``` | ||
Docker Compose: | ||
Compose: | ||
``` yaml | ||
@@ -238,11 +201,13 @@ version: "2" | ||
migrator: | ||
image: nim579/do-migrate | ||
image: nim579/do-migrate:2 | ||
command: | ||
- "--exec" | ||
environment: | ||
MIGRATOR_DB_HOST=localhost | ||
MIGRATOR_DB_PORT=5432 | ||
MIGRATOR_DB_USER=postgres | ||
MIGRATOR_DB_PASSWORD=admin | ||
MIGRATOR_DB_DATABASE=test | ||
- MIGRATOR_DB_HOST | ||
- MIGRATOR_DB_PORT | ||
- MIGRATOR_DB_USER | ||
- MIGRATOR_DB_PASSWORD | ||
- MIGRATOR_DB_DATABASE | ||
volume: | ||
- ./migrations:/migrator/migrations:ro | ||
- ./migrations:/migrator/migrations | ||
``` |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
AI-detected possible typosquat
Supply chain riskAI has identified this package as a potential typosquat of a more popular package. This suggests that the package may be intentionally mimicking another package's name, description, or other metadata.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 2 instances in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
708019
1790.82%90
350%2422
148.92%Yes
NaN5
66.67%11
1000%210
-14.29%5
150%2
Infinity%+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
Updated
Updated
Updated