@nyffels/mynodeorm
Advanced tools
Comparing version 1.0.0-alpha76 to 1.0.0-alpha77
282
dist/app.js
@@ -14,6 +14,2 @@ #! /usr/bin/env node | ||
import * as fs from "node:fs"; | ||
import { getAllProperties, getAutoIncrement, getColumn, getDefaultSql, getNullable, getPrimary, getSqlType, getTable, getType, getUnique, getUnsigned } from "./decorators/index.js"; | ||
import { mkdirSync } from "fs"; | ||
import { MigrationFileBuilder } from "./models/index.js"; | ||
import { isEqual } from 'lodash-es'; | ||
import mysql from "mysql2/promise"; | ||
@@ -37,5 +33,26 @@ import { createRequire } from 'module'; | ||
} | ||
// TODO Auto populate dbClasses | ||
let migrationsScript = ` | ||
export const dbClasses = []; | ||
import {createMigration} from '@nyffels/mynodeorm/dist/logic/migration.logic.js'; | ||
export const dbClasses = [ | ||
/* Add your classes here to include them into the migration files */ | ||
]; | ||
const args = process.argv.slice(2); | ||
if (!args.some(e => /^--name=*./.test(e))) { | ||
console.error("❌ Name is required for a migration. Use '--name={{name}}' to declare a name of this migration."); | ||
process.exit(1); | ||
} | ||
const name = args.find((a) => a.includes('--name=')) | ||
?.replace('--name=', '') ?? ""; | ||
const migrationLocationPath = args.find((a) => a.includes('--migration-location=')) | ||
?.replace('--migration-location=', '') ?? "./"; | ||
const configurationLocationPath = args.find((a) => a.includes('--config-location=')) | ||
?.replace('--config-location=', '') ?? "./"; | ||
createMigration(name, dbClasses, migrationLocationPath, configurationLocationPath); | ||
`; | ||
@@ -45,255 +62,2 @@ fs.writeFileSync(schemaScriptPath, migrationsScript, { encoding: "utf8" }); | ||
} | ||
else if (args.includes("--migration")) { | ||
const runMigration = () => __awaiter(void 0, void 0, void 0, function* () { | ||
var _a, _b, _c, _d, _e, _f, _g; | ||
if (!args.some(e => /^--name=*./.test(e))) { | ||
console.error("❌ Name is required for a migration. Use '--name={{name}}' to declare a name of this migration."); | ||
process.exit(1); | ||
} | ||
const name = (_b = (_a = args.find((a) => a.includes('--name='))) === null || _a === void 0 ? void 0 : _a.replace('--name=', '')) !== null && _b !== void 0 ? _b : ""; | ||
const migrationLocationPath = (_d = (_c = args.find((a) => a.includes('--migration-location='))) === null || _c === void 0 ? void 0 : _c.replace('--migration-location=', '')) !== null && _d !== void 0 ? _d : "./"; | ||
const migrationLocation = path.join(process.cwd(), migrationLocationPath, "migrations"); | ||
const configurationLocationPath = (_f = (_e = args.find((a) => a.includes('--config-location='))) === null || _e === void 0 ? void 0 : _e.replace('--config-location=', '')) !== null && _f !== void 0 ? _f : "./"; | ||
const configurationLocation = path.join(process.cwd(), configurationLocationPath, "mynodeorm-migration-config.ts"); | ||
if (!fs.existsSync(configurationLocation)) { | ||
console.error(`❌ Configuration not found on location ${configurationLocation}`); | ||
process.exit(1); | ||
} | ||
if (!fs.existsSync(migrationLocation)) { | ||
console.log("• Migration location does not exists... Creating folder."); | ||
fs.mkdirSync(migrationLocation, { recursive: true }); | ||
} | ||
const folders = fs.readdirSync(migrationLocation, { withFileTypes: true }) | ||
.filter(f => f.isDirectory()) | ||
.map(f => f.name); | ||
let version = 0; | ||
let oldSchema; | ||
if ((folders).length > 0) { | ||
// @ts-ignore | ||
version = (folders.map(f => +f.split(".")[0]) | ||
.sort() | ||
.reverse()[0]) + 1; | ||
oldSchema = JSON.parse(fs.readFileSync(path.join(migrationLocation, "schema.json")) | ||
.toString()); | ||
} | ||
const migrationName = `${version}.${getDateFormat()}_${name}`; | ||
const dbClasses = require(configurationLocation).dbClasses; | ||
console.log("• Creating schema..."); | ||
const schema = {}; | ||
for (const dbClass of dbClasses) { | ||
const table = getTable(dbClass); | ||
if (!table) { | ||
continue; | ||
} | ||
schema[table] = { | ||
columns: {} | ||
}; | ||
const properties = getAllProperties(dbClass); | ||
for (let property of properties) { | ||
const type = getType(dbClass, property); | ||
schema[table].columns[getColumn(dbClass, property)] = { | ||
type: getSqlType(dbClass, property), | ||
primary: getPrimary(dbClass, property), | ||
nullable: getNullable(dbClass, property), | ||
unique: getUnique(dbClass, property), | ||
unsigned: ['number', 'bignumber'].includes(type) ? getUnsigned(dbClass, property) : false, | ||
autoIncrement: getAutoIncrement(dbClass, property), | ||
defaultSql: (_g = getDefaultSql(dbClass, property)) !== null && _g !== void 0 ? _g : null, | ||
foreignKey: null // TODO | ||
}; | ||
} | ||
} | ||
fs.writeFileSync(path.join(migrationLocation, "schema.json"), JSON.stringify(schema)); | ||
console.log("• Schema created."); | ||
if (version === 0) { | ||
console.log("• Creating migration file..."); | ||
let migrationFileContent = MigrationFileBuilder.GetFileTemplate(); | ||
migrationFileContent = migrationFileContent.replace("{{{{TEMPLATE-DATA-DOWN}}}}", "// First migration plan starts from empty database, down should mean destroy database. Database not empty? Use rebase function for integration of existing database to the migration flow."); | ||
let uplogic = ''; | ||
Object.keys(schema) | ||
.forEach((table, index) => { | ||
if (index != 0) { | ||
uplogic += `\n\n `; | ||
} | ||
uplogic += `const table_${index} = this._builder.addTable('${table}');\n`; | ||
// @ts-ignore | ||
Object.keys(schema[table].columns) | ||
.forEach((column, cIndex) => { | ||
var _a; | ||
if (cIndex !== 0) { | ||
uplogic += `\n`; | ||
} | ||
// @ts-ignore | ||
const sColumn = schema[table].columns[column]; | ||
// @ts-ignore | ||
uplogic += ` table_${index}.addColumn('${column}', '${sColumn.type}')`; | ||
// @ts-ignore | ||
if (sColumn.primary) { | ||
uplogic += `.primary()`; | ||
} | ||
// @ts-ignore | ||
if (sColumn.nullable) { | ||
uplogic += `.nullable()`; | ||
} | ||
// @ts-ignore | ||
if (sColumn.unique) { | ||
uplogic += `.unique()`; | ||
} | ||
// @ts-ignore | ||
if (sColumn.unsigned) { | ||
uplogic += `.unsigned()`; | ||
} | ||
// @ts-ignore | ||
if (sColumn.autoIncrement) { | ||
uplogic += `.autoIncrement()`; | ||
} | ||
// @ts-ignore | ||
if (((_a = sColumn.defaultSql) !== null && _a !== void 0 ? _a : "").trim().length > 0) { | ||
// @ts-ignore | ||
uplogic += `.defaultSql('${sColumn.defaultSql}')`; | ||
} | ||
uplogic += `;`; | ||
}); | ||
uplogic += `\n\n table_${index}.commit();`; | ||
}); | ||
uplogic += `\n\n await this._builder.execute();`; | ||
migrationFileContent = migrationFileContent.replace("{{{{TEMPLATE-DATA-UP}}}}", uplogic); | ||
mkdirSync(path.join(migrationLocation, migrationName), { recursive: true }); | ||
fs.writeFileSync(path.join(migrationLocation, migrationName, "migration-plan.ts"), migrationFileContent); | ||
console.log("• Migration file created."); | ||
} | ||
else { | ||
console.log("• Creating migration file..."); | ||
let migrationFileContent = MigrationFileBuilder.GetFileTemplate(); | ||
let downlogic = ''; // TODO Create up logic | ||
let uplogic = ''; // TODO Create up logic | ||
if (isEqual(oldSchema, schema)) { | ||
const oldSchemaTables = Object.keys(oldSchema); | ||
const schemaTables = Object.keys(schema); | ||
const addedTables = schemaTables.filter(t => oldSchemaTables.indexOf(t) < 0); | ||
const removedTables = oldSchemaTables.filter(t => schemaTables.indexOf(t) < 0); | ||
const existingTables = schemaTables.filter(t => !addedTables.concat(removedTables) | ||
.includes(t)); | ||
let isFirstEntry = true; | ||
let tIndex = 0; | ||
(addedTables !== null && addedTables !== void 0 ? addedTables : []).forEach((table) => { | ||
if (!isFirstEntry) { | ||
uplogic += `\n\n `; | ||
downlogic += `\n\n `; | ||
} | ||
else { | ||
isFirstEntry = false; | ||
} | ||
uplogic += `const table_${tIndex} = this._builder.addTable('${table}');\n`; | ||
// @ts-ignore | ||
Object.keys(schema[table].columns) | ||
.forEach((column, cIndex) => { | ||
var _a; | ||
if (cIndex !== 0) { | ||
uplogic += `\n`; | ||
} | ||
// @ts-ignore | ||
const sColumn = schema[table].columns[column]; | ||
// @ts-ignore | ||
uplogic += ` table_${tIndex}.addColumn('${column}', '${sColumn.type}')`; | ||
// @ts-ignore | ||
if (sColumn.primary) { | ||
uplogic += `.primary()`; | ||
} | ||
// @ts-ignore | ||
if (sColumn.nullable) { | ||
uplogic += `.nullable()`; | ||
} | ||
// @ts-ignore | ||
if (sColumn.unique) { | ||
uplogic += `.unique()`; | ||
} | ||
// @ts-ignore | ||
if (sColumn.unsigned) { | ||
uplogic += `.unsigned()`; | ||
} | ||
// @ts-ignore | ||
if (sColumn.autoIncrement) { | ||
uplogic += `.autoIncrement()`; | ||
} | ||
// @ts-ignore | ||
if (((_a = sColumn.defaultSql) !== null && _a !== void 0 ? _a : "").trim().length > 0) { | ||
// @ts-ignore | ||
uplogic += `.defaultSql('${sColumn.defaultSql}')`; | ||
} | ||
uplogic += `;`; | ||
}); | ||
downlogic += `this._builder.dropTable('${table}')`; | ||
tIndex += 1; | ||
}); | ||
(removedTables !== null && removedTables !== void 0 ? removedTables : []).forEach((table) => { | ||
if (!isFirstEntry) { | ||
uplogic += `\n\n `; | ||
downlogic += `\n\n `; | ||
} | ||
else { | ||
isFirstEntry = false; | ||
} | ||
downlogic += `const table_${tIndex} = this._builder.addTable('${table}');\n`; | ||
// @ts-ignore | ||
Object.keys(oldSchema[table].columns) | ||
.forEach((column, cIndex) => { | ||
var _a; | ||
if (cIndex !== 0) { | ||
downlogic += `\n`; | ||
} | ||
// @ts-ignore | ||
const sColumn = oldSchema[table].columns[column]; | ||
// @ts-ignore | ||
downlogic += ` table_${tIndex}.addColumn('${column}', '${sColumn.type}')`; | ||
// @ts-ignore | ||
if (sColumn.primary) { | ||
downlogic += `.primary()`; | ||
} | ||
// @ts-ignore | ||
if (sColumn.nullable) { | ||
downlogic += `.nullable()`; | ||
} | ||
// @ts-ignore | ||
if (sColumn.unique) { | ||
downlogic += `.unique()`; | ||
} | ||
// @ts-ignore | ||
if (sColumn.unsigned) { | ||
downlogic += `.unsigned()`; | ||
} | ||
// @ts-ignore | ||
if (sColumn.autoIncrement) { | ||
downlogic += `.autoIncrement()`; | ||
} | ||
// @ts-ignore | ||
if (((_a = sColumn.defaultSql) !== null && _a !== void 0 ? _a : "").trim().length > 0) { | ||
// @ts-ignore | ||
downlogic += `.defaultSql('${sColumn.defaultSql}')`; | ||
} | ||
downlogic += `;`; | ||
}); | ||
uplogic += `this._builder.dropTable('${table}')`; | ||
tIndex += 1; | ||
}); | ||
if (uplogic.trim().length > 0) { | ||
uplogic += `\n\n this._builder.execute();`; | ||
} | ||
if (downlogic.trim().length > 0) { | ||
downlogic += `\n\n this._builder.execute();`; | ||
} | ||
} | ||
else { | ||
console.log("⚠ Schema has no differences. Creating empty migration file..."); | ||
} | ||
migrationFileContent = migrationFileContent.replace("{{{{TEMPLATE-DATA-DOWN}}}}", downlogic); | ||
migrationFileContent = migrationFileContent.replace("{{{{TEMPLATE-DATA-UP}}}}", uplogic); | ||
mkdirSync(path.join(migrationLocation, migrationName), { recursive: true }); | ||
fs.writeFileSync(path.join(migrationLocation, migrationName, "migration-plan.ts"), migrationFileContent); | ||
console.log("• Migration file created."); | ||
} | ||
console.log("✅ Migration completed."); | ||
}); | ||
runMigration(); | ||
} | ||
else if (args.includes("--migrate")) { | ||
@@ -300,0 +64,0 @@ // TODO |
@@ -17,2 +17,3 @@ import 'reflect-metadata'; | ||
export declare function getObjectById(id: string): Object | null; | ||
export declare function getObjectsInIdTable(): ITableIdentification[]; | ||
export {}; |
@@ -30,2 +30,5 @@ import 'reflect-metadata'; | ||
} | ||
export function getObjectsInIdTable() { | ||
return TableIdentifications; | ||
} | ||
//# sourceMappingURL=class.decorator.js.map |
{ | ||
"name": "@nyffels/mynodeorm", | ||
"version": "1.0.0-alpha76", | ||
"version": "1.0.0-alpha77", | ||
"description": "A full-fledged ORM framework for NodeJS and MySQL with develop friendly code aimed to handle database migrations, MySQL Query builder / helper and property mapping.", | ||
@@ -5,0 +5,0 @@ "private": false, |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
6
131498
1988