New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@vbilopav/pgmigrations

Package Overview
Dependencies
Maintainers
1
Versions
55
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@vbilopav/pgmigrations - npm Package Compare versions

Comparing version 0.2.10 to 0.2.11

904

migration.js

@@ -72,19 +72,41 @@ const path = require("path");

module.exports = async function(cmd, opt, config) {
var mandatory = [
"upPrefix","downPrefix","repetablePrefix","repetableBeforePrefix",
"beforePrefix","afterPrefix","separatorPrefix",
"historyTableName","historyTableSchema",
"tmpDir","hashFunction"
];
for (let i = 0; i < mandatory.length; i++) {
const key = mandatory[i];
if (!config[key]) {
error(`Config key ${key} is required. Please provide a valid config key.`);
return;
module.exports = {
history: async function(opt, config) {
var schemaQuery = str => formatByName(str, {schema: config.historyTableSchema, name: config.historyTableName});
var exists = (await query(schemaQuery(tableExistsQuery), opt, config)) == 't';
if (exists) {
info(JSON.parse(await query(schemaQuery(historyQuery), opt, config)));
} else {
info("History table does not exist.");
}
}
if (Array.isArray(config.migrationDir)) {
for (let i = 0; i < config.migrationDir.length; i++) {
const migrationDir = config.migrationDir[i];
},
migrate: async function(cmd, opt, config) {
var mandatory = [
"upPrefix","downPrefix","repetablePrefix","repetableBeforePrefix",
"beforePrefix","afterPrefix","separatorPrefix",
"historyTableName","historyTableSchema",
"tmpDir","hashFunction"
];
for (let i = 0; i < mandatory.length; i++) {
const key = mandatory[i];
if (!config[key]) {
error(`Config key ${key} is required. Please provide a valid config key.`);
return;
}
}
if (Array.isArray(config.migrationDir)) {
for (let i = 0; i < config.migrationDir.length; i++) {
const migrationDir = config.migrationDir[i];
if (!fs.existsSync(migrationDir)) {
error(`Migration directory ${migrationDir} does not exist. Please provide a valid migration directory.`);
return;
}
if (opt.verbose) {
console.log("Using migration directory: " + migrationDir);
}
}
}
else {
var migrationDir = path.join(process.cwd(), config.migrationDir);
if (!fs.existsSync(migrationDir)) {

@@ -98,474 +120,466 @@ error(`Migration directory ${migrationDir} does not exist. Please provide a valid migration directory.`);

}
}
else {
var migrationDir = path.join(process.cwd(), config.migrationDir);
if (!fs.existsSync(migrationDir)) {
error(`Migration directory ${migrationDir} does not exist. Please provide a valid migration directory.`);
return;
}
if (opt.verbose) {
console.log("Using migration directory: " + migrationDir);
}
}
var schemaQuery = str => formatByName(str, {schema: config.historyTableSchema, name: config.historyTableName});
try
{
if (!fs.existsSync(config.tmpDir)) {
if (opt.verbose) {
console.log("Creating tmp directory: " + config.tmpDir);
var schemaQuery = str => formatByName(str, {schema: config.historyTableSchema, name: config.historyTableName});
try
{
if (!fs.existsSync(config.tmpDir)) {
if (opt.verbose) {
console.log("Creating tmp directory: " + config.tmpDir);
}
fs.mkdirSync(config.tmpDir);
} else if (!config.keepMigrationDirHistory) {
if (opt.verbose) {
console.log("Clearing tmp directory: " + config.tmpDir);
}
fs.readdirSync(config.tmpDir).forEach(file => {
fs.unlinkSync(path.join(config.tmpDir, file));
});
}
fs.mkdirSync(config.tmpDir);
} else if (!config.keepMigrationDirHistory) {
if (opt.verbose) {
console.log("Clearing tmp directory: " + config.tmpDir);
var history = [];
if (!opt.full) {
var exists = (await query(schemaQuery(tableExistsQuery), opt, config)) == 't';
if (exists) {
history = JSON.parse(await query(schemaQuery(historyQuery), opt, config));
} else {
if (opt.verbose) {
console.log("Creating history table...");
}
var result = await command(formatByName(createHistoryTableScript, {
schema: config.historyTableSchema,
name: config.historyTableName,
types: Object.values(types).map(t => `'${t}'`).join(",")
}), opt, [], config, true);
if (result != 0) {
error("Failed to create history table, exiting...");
return;
}
}
}
fs.readdirSync(config.tmpDir).forEach(file => {
fs.unlinkSync(path.join(config.tmpDir, file));
});
}
var history = [];
if (!opt.full) {
var exists = (await query(schemaQuery(tableExistsQuery), opt, config)) == 't';
if (exists) {
history = JSON.parse(await query(schemaQuery(historyQuery), opt, config));
} else {
if (opt.verbose) {
console.log("Creating history table...");
var repetableHashes = {};
var versionDict = {};
history.forEach(h => {
if (h.type == types.repetable || h.type == types.repetableBefore) {
repetableHashes[h.hash + ";" + h.name] = h;
}
var result = await command(formatByName(createHistoryTableScript, {
schema: config.historyTableSchema,
name: config.historyTableName,
types: Object.values(types).map(t => `'${t}'`).join(",")
}), opt, [], config, true);
if (result != 0) {
error("Failed to create history table, exiting...");
return;
if (h.type == types.up) {
versionDict[h.version] = h;
}
});
const migrationDirs = Array.isArray(config.migrationDir) ? config.migrationDir : [config.migrationDir];
const upDirsHash = {};
const downDirsHash = {};
const repetableDirsHash = {};
const repetableBeforeDirsHash = {};
const beforeDirsHash = {};
const afterDirsHash = {};
if (config.upDirs && config.upDirs.length > 0) {
migrationDirs.push(...config.upDirs);
config.upDirs.forEach(d => upDirsHash[d] = true);
}
}
var repetableHashes = {};
var versionDict = {};
history.forEach(h => {
if (h.type == types.repetable || h.type == types.repetableBefore) {
repetableHashes[h.hash + ";" + h.name] = h;
if (config.downDirs && config.downDirs.length > 0) {
migrationDirs.push(...config.downDirs);
config.downDirs.forEach(d => downDirsHash[d] = true);
}
if (h.type == types.up) {
versionDict[h.version] = h;
if (config.repetableDirs && config.repetableDirs.length > 0) {
migrationDirs.push(...config.repetableDirs);
config.repetableDirs.forEach(d => repetableDirsHash[d] = true);
}
});
const migrationDirs = Array.isArray(config.migrationDir) ? config.migrationDir : [config.migrationDir];
const upDirsHash = {};
const downDirsHash = {};
const repetableDirsHash = {};
const repetableBeforeDirsHash = {};
const beforeDirsHash = {};
const afterDirsHash = {};
if (config.upDirs && config.upDirs.length > 0) {
migrationDirs.push(...config.upDirs);
config.upDirs.forEach(d => upDirsHash[d] = true);
}
if (config.downDirs && config.downDirs.length > 0) {
migrationDirs.push(...config.downDirs);
config.downDirs.forEach(d => downDirsHash[d] = true);
}
if (config.repetableDirs && config.repetableDirs.length > 0) {
migrationDirs.push(...config.repetableDirs);
config.repetableDirs.forEach(d => repetableDirsHash[d] = true);
}
if (config.repetableBeforeDirs && config.repetableBeforeDirs.length > 0) {
migrationDirs.push(...config.repetableBeforeDirs);
config.repetableBeforeDirs.forEach(d => repetableBeforeDirsHash[d] = true);
}
if (config.beforeDirs && config.beforeDirs.length > 0) {
migrationDirs.push(...config.beforeDirs);
config.beforeDirs.forEach(d => beforeDirsHash[d] = true);
}
if (config.afterDirs && config.afterDirs.length > 0) {
migrationDirs.push(...config.afterDirs);
config.afterDirs.forEach(d => afterDirsHash[d] = true);
}
const beforeList = [];
const repetableBeforeList = [];
const repetableList = [];
const upList = [];
const downList = [];
const afterList = [];
const upVersions = {};
const downVersions = {};
const isUp = cmd == "up";
const isDown = cmd == "down";
const versionUpNames = {};
const versionDownNames = {};
if (config.recursiveDirs) {
var migrationDirsTmp = [...migrationDirs];
for (let i = 0; i < migrationDirsTmp.length; i++) {
const migrationDir = migrationDirsTmp[i];
fs.readdirSync(migrationDir, {recursive: true}).forEach(subDir => {
const subDirPath = path.join(migrationDir, subDir);
if (fs.lstatSync(subDirPath).isDirectory()) {
migrationDirs.push(subDirPath);
}
});
if (config.repetableBeforeDirs && config.repetableBeforeDirs.length > 0) {
migrationDirs.push(...config.repetableBeforeDirs);
config.repetableBeforeDirs.forEach(d => repetableBeforeDirsHash[d] = true);
}
}
const hasMultipleDirs = migrationDirs.length > 1;
var parsedDirs = {};
var usedNames = {};
for (let i = 0; i < migrationDirs.length; i++) {
const migrationDir = migrationDirs[i];
if (!migrationDir) {
continue;
if (config.beforeDirs && config.beforeDirs.length > 0) {
migrationDirs.push(...config.beforeDirs);
config.beforeDirs.forEach(d => beforeDirsHash[d] = true);
}
var parsed = migrationDir.replace(/[^a-zA-Z0-9]/g, "");
if (parsedDirs[parsed]) {
continue;
if (config.afterDirs && config.afterDirs.length > 0) {
migrationDirs.push(...config.afterDirs);
config.afterDirs.forEach(d => afterDirsHash[d] = true);
}
parsedDirs[parsed] = true;
if (!fs.existsSync(migrationDir) || !fs.lstatSync(migrationDir).isDirectory()) {
error(`Migration directory ${migrationDir} does not exist or is not a directory. Please provide a valid migration directory.`);
return;
const beforeList = [];
const repetableBeforeList = [];
const repetableList = [];
const upList = [];
const downList = [];
const afterList = [];
const upVersions = {};
const downVersions = {};
const isUp = cmd == "up";
const isDown = cmd == "down";
const versionUpNames = {};
const versionDownNames = {};
if (config.recursiveDirs) {
var migrationDirsTmp = [...migrationDirs];
for (let i = 0; i < migrationDirsTmp.length; i++) {
const migrationDir = migrationDirsTmp[i];
fs.readdirSync(migrationDir, {recursive: true}).forEach(subDir => {
const subDirPath = path.join(migrationDir, subDir);
if (fs.lstatSync(subDirPath).isDirectory()) {
migrationDirs.push(subDirPath);
}
});
}
}
fs.readdirSync(migrationDir).forEach(fileName => {
const filePath = path.join(migrationDir, fileName);
if (fs.lstatSync(filePath).isDirectory()) {
return;
const hasMultipleDirs = migrationDirs.length > 1;
var parsedDirs = {};
var usedNames = {};
for (let i = 0; i < migrationDirs.length; i++) {
const migrationDir = migrationDirs[i];
if (!migrationDir) {
continue;
}
for (let j = 0; j < config.migrationExtensions.length; j++) {
const ext = config.migrationExtensions[j].toLowerCase();
if (!fileName.toLowerCase().endsWith(ext)) {
if (opt.verbose) {
warning(`Skipping file ${fileName} with invalid extension. Valid extensions are ${config.migrationExtensions.join(", ")}.`);
}
return;
}
var parsed = migrationDir.replace(/[^a-zA-Z0-9]/g, "");
if (parsedDirs[parsed]) {
continue;
}
if (fileName.indexOf(config.separatorPrefix) == -1
&& repetableDirsHash[migrationDir] == false
&& repetableBeforeDirsHash[migrationDir] == false
&& beforeDirsHash[migrationDir] == false
&& afterDirsHash[migrationDir] == false
&& upDirsHash[migrationDir] == false
&& downDirsHash[migrationDir] == false) {
warning(`Migration file ${fileName} does not contain separator prefix ${config.separatorPrefix}. Skipping...`);
parsedDirs[parsed] = true;
if (!fs.existsSync(migrationDir) || !fs.lstatSync(migrationDir).isDirectory()) {
error(`Migration directory ${migrationDir} does not exist or is not a directory. Please provide a valid migration directory.`);
return;
}
let parts = fileName.split(config.separatorPrefix);
let prefix = parts[0];
let suffix = parts.slice(1).join(config.separatorPrefix);
let name = suffix.split(".").slice(0, -1).join(".").replace(/[^a-zA-Z0-9]/g, " ").trim().replace(/\s+/g, " ");
if (usedNames[name]) {
let dirParts = migrationDir.replace(/[^a-zA-Z0-9]/g, " ").trim().split(" ");
let nameSet = false;
for (let i = dirParts.length - 1; i >= 0; i--) {
let newName = name + " (" + dirParts.slice(i).join(" ") + ")";
if (!usedNames[newName]) {
name = newName;
nameSet = true;
break;
}
fs.readdirSync(migrationDir).forEach(fileName => {
const filePath = path.join(migrationDir, fileName);
if (fs.lstatSync(filePath).isDirectory()) {
return;
}
if (!nameSet) {
let count = 1;
while(usedNames[name]) {
name = name + ` (${count++})`;
for (let j = 0; j < config.migrationExtensions.length; j++) {
const ext = config.migrationExtensions[j].toLowerCase();
if (!fileName.toLowerCase().endsWith(ext)) {
if (opt.verbose) {
warning(`Skipping file ${fileName} with invalid extension. Valid extensions are ${config.migrationExtensions.join(", ")}.`);
}
return;
}
}
}
usedNames[name] = true;
let version = null;
let type = null;
const meta = {};
const content = fs.readFileSync(filePath).toString();
const hash = config.hashFunction(content);
const script = (hasMultipleDirs ? (migrationDir + "/" + fileName).replace(/\\/g, "/") : fileName).replace(/\/+/g, "/");
let pushTo = null;
if (prefix.startsWith(config.upPrefix) || upDirsHash[migrationDir]) {
if (isUp) {
version = prefix.slice(config.upPrefix.length).trim();
if (upVersions[version]) {
error(`Migration file ${script} contains duplicate version ${version} already present in ${upVersions[version]}. Exiting...`);
process.exit(1);
if (fileName.indexOf(config.separatorPrefix) == -1
&& repetableDirsHash[migrationDir] == false
&& repetableBeforeDirsHash[migrationDir] == false
&& beforeDirsHash[migrationDir] == false
&& afterDirsHash[migrationDir] == false
&& upDirsHash[migrationDir] == false
&& downDirsHash[migrationDir] == false) {
warning(`Migration file ${fileName} does not contain separator prefix ${config.separatorPrefix}. Skipping...`);
return;
}
let parts = fileName.split(config.separatorPrefix);
let prefix = parts[0];
let suffix = parts.slice(1).join(config.separatorPrefix);
let name = suffix.split(".").slice(0, -1).join(".").replace(/[^a-zA-Z0-9]/g, " ").trim().replace(/\s+/g, " ");
if (usedNames[name]) {
let dirParts = migrationDir.replace(/[^a-zA-Z0-9]/g, " ").trim().split(" ");
let nameSet = false;
for (let i = dirParts.length - 1; i >= 0; i--) {
let newName = name + " (" + dirParts.slice(i).join(" ") + ")";
if (!usedNames[newName]) {
name = newName;
nameSet = true;
break;
}
}
upVersions[version] = script;
type = types.up;
if (!version) {
warning(`Migration file ${migrationDir}/${fileName} does not contain version. Skipping...`);
return;
if (!nameSet) {
let count = 1;
while(usedNames[name]) {
name = name + ` (${count++})`;
}
}
if (versionDict[version]) {
return;
}
var count = versionUpNames[name];
count = count ? count + 1 : 1;
if (count > 1) {
name = name + " (" + count + ")";
}
versionUpNames[name] = count;
pushTo = upList;
}
} else if (prefix.startsWith(config.downPrefix) || downDirsHash[migrationDir]) {
if (isDown) {
version = prefix.slice(config.downPrefix.length).trim();
if (downVersions[version]) {
error(`Migration file ${script} contains duplicate version ${version} already present in ${downVersions[version]}. Exiting...`);
process.exit(1);
usedNames[name] = true;
let version = null;
let type = null;
const meta = {};
const content = fs.readFileSync(filePath).toString();
const hash = config.hashFunction(content);
const script = (hasMultipleDirs ? (migrationDir + "/" + fileName).replace(/\\/g, "/") : fileName).replace(/\/+/g, "/");
let pushTo = null;
if (prefix.startsWith(config.upPrefix) || upDirsHash[migrationDir]) {
if (isUp) {
version = prefix.slice(config.upPrefix.length).trim();
if (upVersions[version]) {
error(`Migration file ${script} contains duplicate version ${version} already present in ${upVersions[version]}. Exiting...`);
process.exit(1);
}
upVersions[version] = script;
type = types.up;
if (!version) {
warning(`Migration file ${migrationDir}/${fileName} does not contain version. Skipping...`);
return;
}
if (versionDict[version]) {
return;
}
var count = versionUpNames[name];
count = count ? count + 1 : 1;
if (count > 1) {
name = name + " (" + count + ")";
}
versionUpNames[name] = count;
pushTo = upList;
}
downVersions[version] = script;
type = types.down;
if (!version) {
warning(`Migration file ${migrationDir}/${fileName} does not contain version. Skipping...`);
return;
} else if (prefix.startsWith(config.downPrefix) || downDirsHash[migrationDir]) {
if (isDown) {
version = prefix.slice(config.downPrefix.length).trim();
if (downVersions[version]) {
error(`Migration file ${script} contains duplicate version ${version} already present in ${downVersions[version]}. Exiting...`);
process.exit(1);
}
downVersions[version] = script;
type = types.down;
if (!version) {
warning(`Migration file ${migrationDir}/${fileName} does not contain version. Skipping...`);
return;
}
if (!versionDict[version]) {
return;
}
var count = versionDownNames[name];
count = count ? count + 1 : 1;
if (count > 1) {
name = name + " (" + count + ")";
}
versionDownNames[name] = count;
meta.up = versionDict[version];
pushTo = downList;
}
if (!versionDict[version]) {
return;
} else if (prefix == config.repetablePrefix || repetableDirsHash[migrationDir]) {
if (isUp) {
type = types.repetable;
if (repetableHashes[hash + ";" + name]) {
return;
}
pushTo = repetableList;
}
var count = versionDownNames[name];
count = count ? count + 1 : 1;
if (count > 1) {
name = name + " (" + count + ")";
} else if (prefix == config.repetableBeforePrefix || repetableBeforeDirsHash[migrationDir]) {
if (isUp) {
type = types.repetableBefore;
if (repetableHashes[hash + ";" + name]) {
pushTo = null;
}
pushTo = repetableBeforeList;
}
versionDownNames[name] = count;
meta.up = versionDict[version];
pushTo = downList;
}
} else if (prefix == config.repetablePrefix || repetableDirsHash[migrationDir]) {
if (isUp) {
type = types.repetable;
if (repetableHashes[hash + ";" + name]) {
return;
} else if (prefix == config.beforePrefix || beforeDirsHash[migrationDir]) {
if (isUp) {
type = types.before;
pushTo = beforeList;
}
pushTo = repetableList;
}
} else if (prefix == config.repetableBeforePrefix || repetableBeforeDirsHash[migrationDir]) {
if (isUp) {
type = types.repetableBefore;
if (repetableHashes[hash + ";" + name]) {
pushTo = null;
} else if (prefix == config.afterPrefix || afterDirsHash[migrationDir]) {
if (isUp) {
type = types.after;
pushTo = afterList;
}
pushTo = repetableBeforeList;
} else {
warning(`Migration file ${fileName} does not contain valid prefix. Skipping. Valied prefixes are '${config.upPrefix}', '${config.downPrefix}', '${config.repetablePrefix}', '${config.repetableBeforePrefix}', '${config.beforePrefix}', '${config.afterPrefix}' and separator prefix '${config.separatorPrefix}'.`);
return;
}
} else if (prefix == config.beforePrefix || beforeDirsHash[migrationDir]) {
if (isUp) {
type = types.before;
pushTo = beforeList;
if (pushTo) {
pushTo.push({ name, version, type, script, hash, content, meta });
}
} else if (prefix == config.afterPrefix || afterDirsHash[migrationDir]) {
if (isUp) {
type = types.after;
pushTo = afterList;
}
} else {
warning(`Migration file ${fileName} does not contain valid prefix. Skipping. Valied prefixes are '${config.upPrefix}', '${config.downPrefix}', '${config.repetablePrefix}', '${config.repetableBeforePrefix}', '${config.beforePrefix}', '${config.afterPrefix}' and separator prefix '${config.separatorPrefix}'.`);
});
}
afterList.sort((a, b) => config.sortFunction(a.name, b.name));
beforeList.sort((a, b) => config.sortFunction(a.name, b.name));
repetableList.sort((a, b) => config.sortFunction(a.name, b.name));
repetableBeforeList.sort((a, b) => config.sortFunction(a.name, b.name));
upList.sort((a, b) => config.versionSortFunction(a.version, b.version));
downList.sort((a, b) => config.versionSortFunction(b.version, a.version));
if (opt.list) {
if (isUp) {
beforeList.concat(repetableBeforeList).concat(upList).concat(repetableList).concat(afterList).forEach((m, index) => {
console.log({
rank: index+1,
name: m.name,
version: m.version,
type: names[m.type],
script: m.script,
hash: m.hash
})
});
return;
}
if (pushTo) {
pushTo.push({ name, version, type, script, hash, content, meta });
if (isDown) {
downList.forEach((m, index) => {
console.log({
rank: index+1,
name: m.name,
version: m.version,
type: names[m.type],
script: m.script,
hash: m.hash
})
});
}
});
}
afterList.sort((a, b) => config.sortFunction(a.name, b.name));
beforeList.sort((a, b) => config.sortFunction(a.name, b.name));
repetableList.sort((a, b) => config.sortFunction(a.name, b.name));
repetableBeforeList.sort((a, b) => config.sortFunction(a.name, b.name));
upList.sort((a, b) => config.versionSortFunction(a.version, b.version));
downList.sort((a, b) => config.versionSortFunction(b.version, a.version));
if (opt.list) {
}
if (isUp) {
beforeList.concat(repetableBeforeList).concat(upList).concat(repetableList).concat(afterList).forEach((m, index) => {
console.log({
rank: index+1,
name: m.name,
version: m.version,
type: names[m.type],
script: m.script,
hash: m.hash
})
});
return;
if (beforeList.length == 0 && repetableBeforeList.length == 0 && upList.length == 0 && repetableList.length == 0 && afterList.length == 0) {
warning("Nothing to migrate.");
return;
}
} else if (isDown) {
if (downList.length) {
warning("Nothing to migrate.");
return;
}
}
if (isDown) {
downList.forEach((m, index) => {
console.log({
rank: index+1,
name: m.name,
version: m.version,
type: names[m.type],
const date = new Date();
const ident = date.toISOString().replace(/[-:.ZT]/g, "");
const tmpFile = path.join(config.tmpDir, `migration_${ident}.sql`);
if (opt.verbose) {
console.log("Creating migration file: " + tmpFile);
}
if (fs.existsSync(tmpFile)) {
fs.unlinkSync(tmpFile);
}
const line = l => fs.appendFileSync(tmpFile, l + "\n", { encoding: "utf8", flag: "a" });
let index = 0;
const addMigration = list => list.forEach(m => {
index++;
const cleanUp = m.type == types.down ?
`delete from ${config.historyTableSchema}.${config.historyTableName} where name = '${m.meta.up.name}' and type = '${types.up}';` :
formatByName(upsertHistorySql, {
historySchema: config.historyTableSchema,
historyName: config.historyTableName,
name: m.name,
type: m.type,
version: m.version ? `'${m.version}'` : "null",
script: m.script,
hash: m.hash
})
});
});
line(`--
-- Migration ${index}
-- Script: ${m.script}
-- Type: ${names[m.type]}
--
raise info 'Running migration %: %. Script file: %', ${index}, '${m.name}', '${m.script}';
___clock = clock_timestamp();
-- Migration ${index} start
${m.content}
-- Migration ${index} end
${cleanUp}
`);
});
line(`--
-- Migration file generated by pgmigrations
-- Date: ${date.toISOString()}
--
do
$migration_${ident}$
declare ___clock timestamp with time zone;
begin
`);
if (beforeList.length == 0 &&
repetableBeforeList.length == 0 &&
upList.length == 0 && downList.length == 0 && repetableList.length == 0 && afterList.length == 0) {
}
}
if (isUp) {
if (beforeList.length == 0 && repetableBeforeList.length == 0 && upList.length == 0 && repetableList.length == 0 && afterList.length == 0) {
warning("Nothing to migrate.");
return;
if (isUp) {
addMigration(beforeList);
addMigration(repetableBeforeList);
addMigration(upList);
addMigration(repetableList);
addMigration(afterList);
} else if (isDown) {
addMigration(downList);
}
} else if (isDown) {
if (downList.length) {
warning("Nothing to migrate.");
line(`-- Update ranks
${schemaQuery(`update {schema}.{name}
set rank = t.rank
from (
select name, type, row_number() over (order by
case
when type = 'B' then 1
when type = 'P' then 2
when type = 'U' then 3
when type = 'R' then 4
when type = 'A' then 5
else 6
end,
version,
name) as rank
from {schema}.{name}
) as t
where {schema}.{name}.name = t.name and {schema}.{name}.type = t.type;`)}
`);
if (opt.dry) {
line(`raise info 'Rolling back migration changes...';
rollback;`);
}
line(`end;
$migration_${ident}$;`);
if (opt.dump) {
info("\n" + fs.readFileSync(tmpFile, { encoding: "utf8"}));
} else {
console.log("Running migration...");
var result = await run({
command: config.psql,
config: config,
file: tmpFile,
verbose: opt.verbose
});
if (result != 0) {
error("Migration failed with exit code " + result + ". Changes have been rolled back.");
return;
} else {
console.info("Migration completed successfully.");
console.info("Migration file available: " + tmpFile);
}
}
const date = new Date();
const ident = date.toISOString().replace(/[-:.ZT]/g, "");
const tmpFile = path.join(config.tmpDir, `migration_${ident}.sql`);
if (opt.verbose) {
console.log("Creating migration file: " + tmpFile);
} catch (e) {
error(e);
warning("Migration aborted!");
}
if (fs.existsSync(tmpFile)) {
fs.unlinkSync(tmpFile);
}
const line = l => fs.appendFileSync(tmpFile, l + "\n", { encoding: "utf8", flag: "a" });
}
}
let index = 0;
const addMigration = list => list.forEach(m => {
index++;
const cleanUp = m.type == types.down ?
`delete from ${config.historyTableSchema}.${config.historyTableName} where name = '${m.meta.up.name}' and type = '${types.up}';` :
formatByName(upsertHistorySql, {
historySchema: config.historyTableSchema,
historyName: config.historyTableName,
name: m.name,
type: m.type,
version: m.version ? `'${m.version}'` : "null",
script: m.script,
hash: m.hash
});
line(`--
-- Migration ${index}
-- Script: ${m.script}
-- Type: ${names[m.type]}
--
raise info 'Running migration %: %. Script file: %', ${index}, '${m.name}', '${m.script}';
___clock = clock_timestamp();
-- Migration ${index} start
${m.content}
-- Migration ${index} end
${cleanUp}
`);
});
line(`--
-- Migration file generated by pgmigrations
-- Date: ${date.toISOString()}
--
do
$migration_${ident}$
declare ___clock timestamp with time zone;
begin
`);
if (beforeList.length == 0 &&
repetableBeforeList.length == 0 &&
upList.length == 0 && downList.length == 0 && repetableList.length == 0 && afterList.length == 0) {
}
if (isUp) {
addMigration(beforeList);
addMigration(repetableBeforeList);
addMigration(upList);
addMigration(repetableList);
addMigration(afterList);
} else if (isDown) {
addMigration(downList);
}
line(`-- Update ranks
${schemaQuery(`update {schema}.{name}
set rank = t.rank
from (
select name, type, row_number() over (order by
case
when type = 'B' then 1
when type = 'P' then 2
when type = 'U' then 3
when type = 'R' then 4
when type = 'A' then 5
else 6
end,
version,
name) as rank
from {schema}.{name}
) as t
where {schema}.{name}.name = t.name and {schema}.{name}.type = t.type;`)}
`);
if (opt.dry) {
line(`raise info 'Rolling back migration changes...';
rollback;`);
}
line(`end;
$migration_${ident}$;`);
if (opt.dump) {
info("\n" + fs.readFileSync(tmpFile, { encoding: "utf8"}));
} else {
console.log("Running migration...");
var result = await run({
command: config.psql,
config: config,
file: tmpFile,
verbose: opt.verbose
});
if (result != 0) {
error("Migration failed with exit code " + result + ". Changes have been rolled back.");
return;
} else {
console.info("Migration completed successfully.");
console.info("Migration file available: " + tmpFile);
}
}
} catch (e) {
error(e);
warning("Migration aborted!");
}
}
{
"name": "@vbilopav/pgmigrations",
"version": "0.2.10",
"version": "0.2.11",
"description": "PostgreSQL Migration Tool for Node.js and NPM",

@@ -5,0 +5,0 @@ "author": "vb-consulting",

@@ -8,3 +8,3 @@ #!/usr/bin/env node

const { command: commandRunner, schema, psql } = require("./runner.js");
const migrate = require("./migration.js");
const {migrate, history} = require("./migration.js");
const tests = require("./tests.js");

@@ -121,2 +121,3 @@

{key: "down", value: "Run only down migrations. Optional switches: --list, --dry, --full, --dump."},
{key: "history", value: "console.log the current migration schema history."},
{key: "run | exec", value: "Run a command or a script file with psql. Command text or a script file is required as the second argument. Any additional arguments will be passed to a psql command."},

@@ -356,2 +357,27 @@ {key: "dump | schema", value: "Run pg_dump command with --schema-only --encoding=UTF8 swtiches on (plus schemaDumpAdditionalArgs from the config). Any additional arguments will be passed to pg_dump command."},

} else if (cmd == "history") {
for (let i = 0; i < options.length; i++) {
let opt = options[i];
if (opt.startsWith("-")) {
if (opt == "--verbose") {
verbose = true;
} else if (opt.startsWith("--config")) {
let parts = opt.split("=");
if (parts.length <= 1) {
error("Config file is required. Please provide a valid config file.");
return;
}
userConfigs.push(parts[1]);
} else {
error("Unknown option: " + opt + ". Please provide a valid option");
return;
}
}
}
const config = buildConfig({verbose});
history({verbose}, config);
} else {

@@ -358,0 +384,0 @@

@@ -25,2 +25,3 @@ # PgMigrations

down Run only down migrations. Optional switches: --list, --dry, --full, --dump.
history console.log the current migration schema history.
run | exec Run a command or a script file with psql. Command text or a script file is required as the second argument. Any additional arguments will be passed to a psql command.

@@ -27,0 +28,0 @@ dump | schema Run pg_dump command with --schema-only --encoding=UTF8 swtiches on (plus schemaDumpAdditionalArgs from the config). Any additional arguments will be passed to pg_dump command.

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc