remult
Advanced tools
Comparing version 0.25.7 to 0.25.8-exp.0
@@ -38,2 +38,14 @@ import pg from 'pg'; | ||
} | ||
provideMigrationBuilder(builder) { | ||
var db = new SqlDatabase(this); | ||
var sb = new PostgresSchemaBuilder(db, this.options?.schema); | ||
return { | ||
addColumn: async (meta, field) => { | ||
builder.addSql(await sb.getAddColumnScript(meta, field)); | ||
}, | ||
createTable: async (meta) => { | ||
builder.addSql(await sb.createTableScript(meta)); | ||
}, | ||
}; | ||
} | ||
wrapIdentifier = (name) => name | ||
@@ -40,0 +52,0 @@ .split('.') |
@@ -1,2 +0,2 @@ | ||
import { dbNamesOf, shouldNotCreateField, } from '../src/filter/filter-consumer-bridge-to-sql-request.js'; | ||
import { dbNamesOf, shouldCreateEntity, shouldNotCreateField, } from '../src/filter/filter-consumer-bridge-to-sql-request.js'; | ||
import { remult as defaultRemult } from '../src/remult-proxy.js'; | ||
@@ -22,3 +22,3 @@ import { remultStatic } from '../src/remult-static.js'; | ||
else | ||
result += ' timestamp'; | ||
result += ' timestamptz'; | ||
else | ||
@@ -113,7 +113,5 @@ result += ' ' + x.valueConverter.fieldTypeInDb; | ||
try { | ||
if (!entity.options.sqlExpression) { | ||
if (e.$entityName.toLowerCase().indexOf('from ') < 0) { | ||
await this.createIfNotExist(entity); | ||
await this.verifyAllColumns(entity); | ||
} | ||
if (shouldCreateEntity(entity, e)) { | ||
await this.createIfNotExist(entity); | ||
await this.verifyAllColumns(entity); | ||
} | ||
@@ -135,19 +133,3 @@ } | ||
if (r.rows.length == 0) { | ||
let result = ''; | ||
for (const x of entity.fields) { | ||
if (!shouldNotCreateField(x, e) || isAutoIncrement(x)) { | ||
if (result.length != 0) | ||
result += ','; | ||
result += '\r\n '; | ||
if (isAutoIncrement(x)) | ||
result += e.$dbNameOf(x) + ' serial'; | ||
else { | ||
result += postgresColumnSyntax(x, e.$dbNameOf(x)); | ||
} | ||
if (x == entity.idMetadata.field) | ||
result += ' primary key'; | ||
} | ||
} | ||
let sql = `CREATE SCHEMA IF NOT EXISTS ${this.schemaOnly(e)}; | ||
CREATE table ${this.schemaAndName(e)} (${result}\r\n)`; | ||
const sql = await this.createTableScript(entity); | ||
if (PostgresSchemaBuilder.logToConsole) | ||
@@ -159,2 +141,30 @@ console.info(sql); | ||
} | ||
/* @internal*/ | ||
async getAddColumnScript(entity, field) { | ||
let e = await dbNamesOf(entity, this.pool.wrapIdentifier); | ||
return (`ALTER table ${this.schemaAndName(e)} ` + | ||
`ADD column ${postgresColumnSyntax(field, e.$dbNameOf(field))}`); | ||
} | ||
/* @internal*/ | ||
async createTableScript(entity) { | ||
let result = ''; | ||
let e = await dbNamesOf(entity, this.pool.wrapIdentifier); | ||
for (const x of entity.fields) { | ||
if (!shouldNotCreateField(x, e) || isAutoIncrement(x)) { | ||
if (result.length != 0) | ||
result += ','; | ||
result += '\r\n '; | ||
if (isAutoIncrement(x)) | ||
result += e.$dbNameOf(x) + ' serial'; | ||
else { | ||
result += postgresColumnSyntax(x, e.$dbNameOf(x)); | ||
} | ||
if (x == entity.idMetadata.field) | ||
result += ' primary key'; | ||
} | ||
} | ||
let sql = `CREATE SCHEMA IF NOT EXISTS ${this.schemaOnly(e)}; | ||
CREATE table ${this.schemaAndName(e)} (${result}\r\n)`; | ||
return sql; | ||
} | ||
async addColumnIfNotExist(entity, c) { | ||
@@ -170,4 +180,3 @@ let e = await dbNamesOf(entity, this.pool.wrapIdentifier); | ||
`AND column_name=${cmd.param(colName.toLocaleLowerCase())}`)).rows.length == 0) { | ||
let sql = `ALTER table ${this.schemaAndName(e)} ` + | ||
`ADD column ${postgresColumnSyntax(c(entity), colName)}`; | ||
let sql = await this.getAddColumnScript(entity, c(entity)); | ||
if (PostgresSchemaBuilder.logToConsole) | ||
@@ -174,0 +183,0 @@ console.info(sql); |
@@ -1,2 +0,2 @@ | ||
import { dbNamesOf, isDbReadonly, shouldNotCreateField, } from '../src/filter/filter-consumer-bridge-to-sql-request.js'; | ||
import { dbNamesOf, isDbReadonly, shouldCreateEntity, shouldNotCreateField, } from '../src/filter/filter-consumer-bridge-to-sql-request.js'; | ||
import { customDatabaseFilterToken, Filter, } from '../src/filter/filter-interfaces.js'; | ||
@@ -16,2 +16,26 @@ import { CompoundIdField } from '../src/CompoundIdField.js'; | ||
} | ||
provideMigrationBuilder(builder) { | ||
var sb = new KnexSchemaBuilder(this.knex); | ||
return { | ||
async createTable(entity) { | ||
let e = await dbNamesOf(entity, (x) => x); | ||
sb.createTableKnexCommand(entity, e) | ||
.toSQL() | ||
.forEach((sql) => builder.addSql(sql.sql)); | ||
}, | ||
async addColumn(entity, field) { | ||
let e = await dbNamesOf(entity, (x) => x); | ||
await sb | ||
.createColumnKnexCommand(e, field, e.$dbNameOf(field)) | ||
.toSQL() | ||
.forEach((sql) => builder.addSql(sql.sql)); | ||
}, | ||
}; | ||
} | ||
createCommand() { | ||
return new KnexBridgeToSQLCommand(this.knex); | ||
} | ||
execute(sql) { | ||
return this.createCommand().execute(sql); | ||
} | ||
static getDb(remult) { | ||
@@ -354,7 +378,5 @@ const r = (remult || remultContext).dataProvider; | ||
try { | ||
if (!entity.options.sqlExpression) { | ||
if (e.$entityName.toLowerCase().indexOf('from ') < 0) { | ||
await this.createIfNotExist(entity); | ||
await this.verifyAllColumns(entity); | ||
} | ||
if (shouldCreateEntity(entity, e)) { | ||
await this.createIfNotExist(entity); | ||
await this.verifyAllColumns(entity); | ||
} | ||
@@ -371,23 +393,26 @@ } | ||
if (!(await this.knex.schema.hasTable(e.$entityName))) { | ||
let cols = new Map(); | ||
for (const f of entity.fields) { | ||
cols.set(f, { | ||
name: e.$dbNameOf(f), | ||
readonly: shouldNotCreateField(f, e), | ||
}); | ||
} | ||
await logSql(this.knex.schema.createTable(e.$entityName, (b) => { | ||
for (const x of entity.fields) { | ||
if (!cols.get(x).readonly || isAutoIncrement(x)) { | ||
if (isAutoIncrement(x)) | ||
b.increments(cols.get(x).name); | ||
else { | ||
buildColumn(x, cols.get(x).name, b, supportsJsonDataStorage(this.knex)); | ||
if (x == entity.idMetadata.field) | ||
b.primary([cols.get(x).name]); | ||
} | ||
await logSql(this.createTableKnexCommand(entity, e)); | ||
} | ||
} | ||
createTableKnexCommand(entity, e) { | ||
let cols = new Map(); | ||
for (const f of entity.fields) { | ||
cols.set(f, { | ||
name: e.$dbNameOf(f), | ||
readonly: shouldNotCreateField(f, e), | ||
}); | ||
} | ||
return this.knex.schema.createTable(e.$entityName, (b) => { | ||
for (const x of entity.fields) { | ||
if (!cols.get(x).readonly || isAutoIncrement(x)) { | ||
if (isAutoIncrement(x)) | ||
b.increments(cols.get(x).name); | ||
else { | ||
buildColumn(x, cols.get(x).name, b, supportsJsonDataStorage(this.knex)); | ||
if (x == entity.idMetadata.field) | ||
b.primary([cols.get(x).name]); | ||
} | ||
} | ||
})); | ||
} | ||
} | ||
}); | ||
} | ||
@@ -401,7 +426,10 @@ async addColumnIfNotExist(entity, c) { | ||
if (!(await this.knex.schema.hasColumn(e.$entityName, colName))) { | ||
await logSql(this.knex.schema.alterTable(e.$entityName, (b) => { | ||
buildColumn(col, colName, b, supportsJsonDataStorage(this.knex)); | ||
})); | ||
await logSql(this.createColumnKnexCommand(e, col, colName)); | ||
} | ||
} | ||
createColumnKnexCommand(e, col, colName) { | ||
return this.knex.schema.alterTable(e.$entityName, (b) => { | ||
buildColumn(col, colName, b, supportsJsonDataStorage(this.knex)); | ||
}); | ||
} | ||
async verifyAllColumns(entity) { | ||
@@ -509,1 +537,57 @@ let e = await dbNamesOf(entity, (x) => x); | ||
} | ||
class KnexBridgeToSQLCommand { | ||
source; | ||
constructor(source) { | ||
this.source = source; | ||
} | ||
values = []; | ||
addParameterAndReturnSqlToken(val) { | ||
return this.param(val); | ||
} | ||
param(val) { | ||
if (Array.isArray(val)) | ||
val = JSON.stringify(val); | ||
this.values.push(val); | ||
return '?'; | ||
} | ||
async execute(sql) { | ||
return await this.source.raw(sql, this.values).then((r) => { | ||
switch (this.source.client.config.client) { | ||
case 'mysql': | ||
case 'mysql2': | ||
return new KnexPostgresBridgeToSQLQueryResult({ | ||
fields: r[1], | ||
rows: r[0], | ||
}); | ||
case 'pg': | ||
return new KnexPostgresBridgeToSQLQueryResult(r); | ||
default: | ||
case 'better-sqlite3': | ||
case 'mssql': | ||
return new KnexPostgresBridgeToSQLQueryResult({ | ||
rows: r, | ||
}); | ||
} | ||
}); | ||
} | ||
} | ||
class KnexPostgresBridgeToSQLQueryResult { | ||
r; | ||
getColumnKeyInResultForIndexInSelect(index) { | ||
if (this.r.fields) | ||
return this.r.fields[index].name; | ||
if (this.rows.length == 0) | ||
throw Error('No rows'); | ||
let i = 0; | ||
for (let m in this.rows[0]) { | ||
if (i++ == index) | ||
return m; | ||
} | ||
throw Error('index not found'); | ||
} | ||
constructor(r) { | ||
this.r = r; | ||
this.rows = r.rows; | ||
} | ||
rows; | ||
} |
import { SqlDatabase } from './src/data-providers/sql-database.js'; | ||
import { dbNamesOf, isDbReadonly, } from './src/filter/filter-consumer-bridge-to-sql-request.js'; | ||
import { dbNamesOf, shouldNotCreateField, } from './src/filter/filter-consumer-bridge-to-sql-request.js'; | ||
import { isAutoIncrement } from './src/remult3/RepositoryImplementation.js'; | ||
@@ -55,3 +55,3 @@ export class SqlJsDataProvider { | ||
for (const x of entity.fields) { | ||
if (!isDbReadonly(x, e) || isAutoIncrement(x)) { | ||
if (!shouldNotCreateField(x, e) || isAutoIncrement(x)) { | ||
if (result.length != 0) | ||
@@ -58,0 +58,0 @@ result += ','; |
@@ -236,10 +236,16 @@ import { RestDataProvider } from './data-providers/rest-data-provider.js'; | ||
let ok = true; | ||
const result = await remult.dataProvider.transaction(async (ds) => { | ||
remult.dataProvider = ds; | ||
remult.liveQueryPublisher = trans; | ||
await what(); | ||
ok = true; | ||
}); | ||
if (ok) | ||
await trans.flush(); | ||
const prev = remult.dataProvider; | ||
try { | ||
await remult.dataProvider.transaction(async (ds) => { | ||
remult.dataProvider = ds; | ||
remult.liveQueryPublisher = trans; | ||
await what(ds); | ||
ok = true; | ||
}); | ||
if (ok) | ||
await trans.flush(); | ||
} | ||
finally { | ||
remult.dataProvider = prev; | ||
} | ||
} | ||
@@ -246,0 +252,0 @@ class transactionLiveQueryPublisher { |
@@ -9,2 +9,3 @@ import { CompoundIdField } from '../CompoundIdField.js'; | ||
import { getRepositoryInternals } from '../remult3/repository-internals.js'; | ||
import { isOfType } from '../isOfType.js'; | ||
// @dynamic | ||
@@ -15,5 +16,6 @@ export class SqlDatabase { | ||
const r = (remult || defaultRemult).dataProvider; | ||
if (!r.createCommand) | ||
throw 'the data provider is not an SqlDatabase'; | ||
return r; | ||
if (isOfType(r, 'createCommand')) | ||
return r; | ||
else | ||
throw 'the data provider is not an SqlCommandFactory'; | ||
} | ||
@@ -121,3 +123,8 @@ createCommand() { | ||
this.wrapIdentifier = (x) => sql.wrapIdentifier(x); | ||
if (isOfType(sql, 'provideMigrationBuilder')) { | ||
this.provideMigrationBuilder = sql.provideMigrationBuilder; | ||
} | ||
} | ||
provideMigrationBuilder; | ||
isProxy; | ||
createdEntities = []; | ||
@@ -124,0 +131,0 @@ } |
@@ -167,2 +167,6 @@ import { SqlDatabase } from '../data-providers/sql-database.js'; | ||
} | ||
export function shouldCreateEntity(entity, e) { | ||
return (!entity.options.sqlExpression && | ||
e.$entityName.toLowerCase().indexOf('from ') < 0); | ||
} | ||
export async function dbNamesOf(repo, wrapIdentifierOrOptions) { | ||
@@ -169,0 +173,0 @@ let options = typeof wrapIdentifierOrOptions === 'function' |
export {}; | ||
/*y1 - workshop | ||
//p1 - migrations - in docs empesize that mysql doesn't support transactions in ddl, in docs empesize destroy for knex - // await dataProvider.knex.destroy() | ||
//https://gitkraken.dev/link/dnNjb2RlOi8vZWFtb2Rpby5naXRsZW5zL2xpbmsvci9mMDgzMWU0OWIyODJkMDlkNTA0NTcxYjYwNmUzNTMwODQ3NGIwY2M2L2YvcHJvamVjdHMvcGxheS13aXRoLW1pZ3JhdGlvbnMvc3JjL3NlcnZlci9idWlsZC1taWdyYXRpb25zLnRzP3VybD1odHRwcyUzQSUyRiUyRmdpdGh1Yi5jb20lMkZyZW11bHQlMkZyZW11bHQuZ2l0?origin=gitlens | ||
/*p1 - processError in remult express | ||
- Should we merge (notFound,error,forbidden) into one method in `DataApiResponse` type? | ||
https://gitkraken.dev/link/dnNjb2RlOi8vZWFtb2Rpby5naXRsZW5zL2xpbmsvci9mMDgzMWU0OWIyODJkMDlkNTA0NTcxYjYwNmUzNTMwODQ3NGIwY2M2L2YvcHJvamVjdHMvY29yZS9zcmMvZGF0YS1hcGkudHM%2FdXJsPWh0dHBzJTNBJTJGJTJGZ2l0aHViLmNvbSUyRnJlbXVsdCUyRnJlbXVsdC5naXQ%3D?origin=gitlens | ||
- The api of the `processError` should recieve an `ErrorInfo` object and return an `ErrorInfo` object? or should it return something more in the line, | ||
of httpStatus and errorBody. - currently the `serializeError` method is used to build the response | ||
https://gitkraken.dev/link/dnNjb2RlOi8vZWFtb2Rpby5naXRsZW5zL2xpbmsvci9mMDgzMWU0OWIyODJkMDlkNTA0NTcxYjYwNmUzNTMwODQ3NGIwY2M2L2YvcHJvamVjdHMvY29yZS9zcmMvZGF0YS1hcGkudHM%2FdXJsPWh0dHBzJTNBJTJGJTJGZ2l0aHViLmNvbSUyRnJlbXVsdCUyRnJlbXVsdC5naXQmbGluZXM9NDI1?origin=gitlens | ||
- I think there should be a way to throw a forbidden exception | ||
*/ | ||
//p1 - cleanup root directory of reult | ||
//p1 - in this video I'll use remult to turn a frontend app to a fullstack app | ||
/*y1 - discuss using delete & put - with url query language for deleteMany and updateMany - | ||
- put & delete, similar to get | ||
- add where to count, deleteMany,updateMany, | ||
- prevent delete all and update all - must have meaningful where. | ||
- try forcing this also in typescript | ||
* Experiment with doing this with version control - so the students can follow changes | ||
* Prepare stackblitz | ||
- protect against deleting of all rows by mistake | ||
- https://github.com/remult/remult/issues/221#issuecomment-2016519746 | ||
# influencer: | ||
* review react summit | ||
# | ||
*/ | ||
//y1 - getFields didn't work for kobi in the home component | ||
//p1 - processError in remult express | ||
//p1 - I think there should be a way to throw a forbidden exception | ||
//p1 - add section to Fields doc, explaining field type in db | ||
//p1 - add section about union type | ||
//p1 - add section about value list field type | ||
/*y1 - Talk JYC - JYC - add some integrity checks on delete | ||
@@ -56,3 +64,2 @@ - soft delete | ||
//y1 - select data provider per entity https://discord.com/channels/975754286384418847/976006081748807690/1201415305885397003 | ||
//y1 - migrations | ||
//y1 - live query refresh of view on table update | ||
@@ -59,0 +66,0 @@ //y1 - main vs master |
{ | ||
"name": "remult", | ||
"version": "0.25.7", | ||
"version": "0.25.8-exp.0", | ||
"description": "A CRUD framework for full-stack TypeScript", | ||
@@ -104,2 +104,7 @@ "homepage": "https://remult.dev", | ||
}, | ||
"./migrations": { | ||
"require": "./migrations/index.js", | ||
"import": "./esm/migrations/index.js", | ||
"types": "./migrations/index.d.ts" | ||
}, | ||
"./ably": { | ||
@@ -106,0 +111,0 @@ "require": "./ably.js", |
@@ -6,2 +6,3 @@ import type { ClientBase, PoolConfig, QueryResult } from 'pg'; | ||
import type { SqlCommand, SqlImplementation } from '../src/sql-command.js'; | ||
import type { CanBuildMigrations, MigrationBuilder, MigrationCode } from '../migrations/migration-types.js'; | ||
export interface PostgresPool extends PostgresCommandSource { | ||
@@ -13,3 +14,3 @@ connect(): Promise<PostgresClient>; | ||
} | ||
export declare class PostgresDataProvider implements SqlImplementation { | ||
export declare class PostgresDataProvider implements SqlImplementation, CanBuildMigrations { | ||
private pool; | ||
@@ -28,2 +29,3 @@ private options?; | ||
}); | ||
provideMigrationBuilder(builder: MigrationCode): MigrationBuilder; | ||
wrapIdentifier: (name: any) => any; | ||
@@ -30,0 +32,0 @@ ensureSchema(entities: EntityMetadata<any>[]): Promise<void>; |
@@ -52,2 +52,36 @@ "use strict"; | ||
}; | ||
PostgresDataProvider.prototype.provideMigrationBuilder = function (builder) { | ||
var _this = this; | ||
var _a; | ||
var db = new sql_database_js_1.SqlDatabase(this); | ||
var sb = new schema_builder_js_1.PostgresSchemaBuilder(db, (_a = this.options) === null || _a === void 0 ? void 0 : _a.schema); | ||
return { | ||
addColumn: function (meta, field) { return tslib_1.__awaiter(_this, void 0, void 0, function () { | ||
var _a, _b; | ||
return tslib_1.__generator(this, function (_c) { | ||
switch (_c.label) { | ||
case 0: | ||
_b = (_a = builder).addSql; | ||
return [4 /*yield*/, sb.getAddColumnScript(meta, field)]; | ||
case 1: | ||
_b.apply(_a, [_c.sent()]); | ||
return [2 /*return*/]; | ||
} | ||
}); | ||
}); }, | ||
createTable: function (meta) { return tslib_1.__awaiter(_this, void 0, void 0, function () { | ||
var _a, _b; | ||
return tslib_1.__generator(this, function (_c) { | ||
switch (_c.label) { | ||
case 0: | ||
_b = (_a = builder).addSql; | ||
return [4 /*yield*/, sb.createTableScript(meta)]; | ||
case 1: | ||
_b.apply(_a, [_c.sent()]); | ||
return [2 /*return*/]; | ||
} | ||
}); | ||
}); }, | ||
}; | ||
}; | ||
PostgresDataProvider.prototype.ensureSchema = function (entities) { | ||
@@ -54,0 +88,0 @@ var _a; |
@@ -26,3 +26,3 @@ "use strict"; | ||
else | ||
result += ' timestamp'; | ||
result += ' timestamptz'; | ||
else | ||
@@ -180,4 +180,3 @@ result += ' ' + x.valueConverter.fieldTypeInDb; | ||
_b.trys.push([3, 7, , 8]); | ||
if (!!entity.options.sqlExpression) return [3 /*break*/, 6]; | ||
if (!(e.$entityName.toLowerCase().indexOf('from ') < 0)) return [3 /*break*/, 6]; | ||
if (!(0, filter_consumer_bridge_to_sql_request_js_1.shouldCreateEntity)(entity, e)) return [3 /*break*/, 6]; | ||
return [4 /*yield*/, this.createIfNotExist(entity)]; | ||
@@ -229,41 +228,17 @@ case 4: | ||
.then(function (r) { return tslib_1.__awaiter(_this, void 0, void 0, function () { | ||
var result, _a, _b, x, sql; | ||
var e_3, _c; | ||
return tslib_1.__generator(this, function (_d) { | ||
switch (_d.label) { | ||
var sql; | ||
return tslib_1.__generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: | ||
if (!(r.rows.length == 0)) return [3 /*break*/, 2]; | ||
result = ''; | ||
try { | ||
for (_a = tslib_1.__values(entity.fields), _b = _a.next(); !_b.done; _b = _a.next()) { | ||
x = _b.value; | ||
if (!(0, filter_consumer_bridge_to_sql_request_js_1.shouldNotCreateField)(x, e) || (0, RepositoryImplementation_js_1.isAutoIncrement)(x)) { | ||
if (result.length != 0) | ||
result += ','; | ||
result += '\r\n '; | ||
if ((0, RepositoryImplementation_js_1.isAutoIncrement)(x)) | ||
result += e.$dbNameOf(x) + ' serial'; | ||
else { | ||
result += postgresColumnSyntax(x, e.$dbNameOf(x)); | ||
} | ||
if (x == entity.idMetadata.field) | ||
result += ' primary key'; | ||
} | ||
} | ||
} | ||
catch (e_3_1) { e_3 = { error: e_3_1 }; } | ||
finally { | ||
try { | ||
if (_b && !_b.done && (_c = _a.return)) _c.call(_a); | ||
} | ||
finally { if (e_3) throw e_3.error; } | ||
} | ||
sql = "CREATE SCHEMA IF NOT EXISTS ".concat(this.schemaOnly(e), ";\nCREATE table ").concat(this.schemaAndName(e), " (").concat(result, "\r\n)"); | ||
if (!(r.rows.length == 0)) return [3 /*break*/, 3]; | ||
return [4 /*yield*/, this.createTableScript(entity)]; | ||
case 1: | ||
sql = _a.sent(); | ||
if (PostgresSchemaBuilder.logToConsole) | ||
console.info(sql); | ||
return [4 /*yield*/, this.pool.execute(sql)]; | ||
case 1: | ||
_d.sent(); | ||
_d.label = 2; | ||
case 2: return [2 /*return*/]; | ||
case 2: | ||
_a.sent(); | ||
_a.label = 3; | ||
case 3: return [2 /*return*/]; | ||
} | ||
@@ -279,2 +254,59 @@ }); | ||
}; | ||
/* @internal*/ | ||
PostgresSchemaBuilder.prototype.getAddColumnScript = function (entity, field) { | ||
return tslib_1.__awaiter(this, void 0, void 0, function () { | ||
var e; | ||
return tslib_1.__generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, (0, filter_consumer_bridge_to_sql_request_js_1.dbNamesOf)(entity, this.pool.wrapIdentifier)]; | ||
case 1: | ||
e = _a.sent(); | ||
return [2 /*return*/, ("ALTER table ".concat(this.schemaAndName(e), " ") + | ||
"ADD column ".concat(postgresColumnSyntax(field, e.$dbNameOf(field))))]; | ||
} | ||
}); | ||
}); | ||
}; | ||
/* @internal*/ | ||
PostgresSchemaBuilder.prototype.createTableScript = function (entity) { | ||
return tslib_1.__awaiter(this, void 0, void 0, function () { | ||
var result, e, _a, _b, x, sql; | ||
var e_3, _c; | ||
return tslib_1.__generator(this, function (_d) { | ||
switch (_d.label) { | ||
case 0: | ||
result = ''; | ||
return [4 /*yield*/, (0, filter_consumer_bridge_to_sql_request_js_1.dbNamesOf)(entity, this.pool.wrapIdentifier)]; | ||
case 1: | ||
e = _d.sent(); | ||
try { | ||
for (_a = tslib_1.__values(entity.fields), _b = _a.next(); !_b.done; _b = _a.next()) { | ||
x = _b.value; | ||
if (!(0, filter_consumer_bridge_to_sql_request_js_1.shouldNotCreateField)(x, e) || (0, RepositoryImplementation_js_1.isAutoIncrement)(x)) { | ||
if (result.length != 0) | ||
result += ','; | ||
result += '\r\n '; | ||
if ((0, RepositoryImplementation_js_1.isAutoIncrement)(x)) | ||
result += e.$dbNameOf(x) + ' serial'; | ||
else { | ||
result += postgresColumnSyntax(x, e.$dbNameOf(x)); | ||
} | ||
if (x == entity.idMetadata.field) | ||
result += ' primary key'; | ||
} | ||
} | ||
} | ||
catch (e_3_1) { e_3 = { error: e_3_1 }; } | ||
finally { | ||
try { | ||
if (_b && !_b.done && (_c = _a.return)) _c.call(_a); | ||
} | ||
finally { if (e_3) throw e_3.error; } | ||
} | ||
sql = "CREATE SCHEMA IF NOT EXISTS ".concat(this.schemaOnly(e), ";\nCREATE table ").concat(this.schemaAndName(e), " (").concat(result, "\r\n)"); | ||
return [2 /*return*/, sql]; | ||
} | ||
}); | ||
}); | ||
}; | ||
PostgresSchemaBuilder.prototype.addColumnIfNotExist = function (entity, c) { | ||
@@ -292,3 +324,3 @@ return tslib_1.__awaiter(this, void 0, void 0, function () { | ||
case 2: | ||
_a.trys.push([2, 6, , 7]); | ||
_a.trys.push([2, 7, , 8]); | ||
cmd = this.pool.createCommand(); | ||
@@ -300,17 +332,18 @@ colName = e.$dbNameOf(c(entity)); | ||
case 3: | ||
if (!((_a.sent()).rows.length == 0)) return [3 /*break*/, 5]; | ||
sql = "ALTER table ".concat(this.schemaAndName(e), " ") + | ||
"ADD column ".concat(postgresColumnSyntax(c(entity), colName)); | ||
if (!((_a.sent()).rows.length == 0)) return [3 /*break*/, 6]; | ||
return [4 /*yield*/, this.getAddColumnScript(entity, c(entity))]; | ||
case 4: | ||
sql = _a.sent(); | ||
if (PostgresSchemaBuilder.logToConsole) | ||
console.info(sql); | ||
return [4 /*yield*/, this.pool.execute(sql)]; | ||
case 4: | ||
case 5: | ||
_a.sent(); | ||
_a.label = 5; | ||
case 5: return [3 /*break*/, 7]; | ||
case 6: | ||
_a.label = 6; | ||
case 6: return [3 /*break*/, 8]; | ||
case 7: | ||
err_2 = _a.sent(); | ||
console.error(err_2); | ||
return [3 /*break*/, 7]; | ||
case 7: return [2 /*return*/]; | ||
return [3 /*break*/, 8]; | ||
case 8: return [2 /*return*/]; | ||
} | ||
@@ -317,0 +350,0 @@ }); |
@@ -8,19 +8,13 @@ <div align="center"> | ||
<a href="https://circleci.com/gh/remult/remult/tree/master" rel="nofollow"> | ||
<img alt="CircleCI" src="https://circleci.com/gh/remult/remult/tree/master.svg?style=shield"> | ||
</a> | ||
<img alt="CircleCI" src="https://circleci.com/gh/remult/remult/tree/master.svg?style=shield"></a> | ||
<a href="https://codecov.io/gh/remult/remult" rel="nofollow"> | ||
<img src="https://codecov.io/gh/remult/remult/branch/master/graph/badge.svg?token=LYWQRUN3D1"/> | ||
</a> | ||
<img src="https://codecov.io/gh/remult/remult/branch/master/graph/badge.svg?token=LYWQRUN3D1"/></a> | ||
<a href="https://raw.githubusercontent.com/remult/remult/master/LICENSE" rel="nofollow"> | ||
<img alt="GitHub license" src="https://img.shields.io/badge/license-MIT-blue.svg"> | ||
</a> | ||
<img alt="GitHub license" src="https://img.shields.io/badge/license-MIT-blue.svg"></a> | ||
<a href="https://www.npmjs.com/package/remult" rel="nofollow"> | ||
<img alt="npm version" src="https://badge.fury.io/js/remult.svg"> | ||
</a> | ||
<img alt="npm version" src="https://badge.fury.io/js/remult.svg"></a> | ||
<a href="https://www.npmjs.com/package/remult" rel="nofollow"> | ||
<img alt="npm downloads" src="https://img.shields.io/npm/dm/remult"> | ||
</a> | ||
<img alt="npm downloads" src="https://img.shields.io/npm/dm/remult"></a> | ||
<a href="https://discord.gg/GXHk7ZfuG5" rel="nofollow"> | ||
<img alt="Join Discord" src="https://badgen.net/discord/online-members/GXHk7ZfuG5?icon=discord&label=Discord"/> | ||
</a> | ||
<img alt="Join Discord" src="https://badgen.net/discord/members/GXHk7ZfuG5?icon=discord&label=Discord"/></a> | ||
</div> | ||
@@ -27,0 +21,0 @@ |
import type { Knex } from 'knex'; | ||
import type { Remult } from '../src/context.js'; | ||
import type { EntityDbNamesBase } from '../src/filter/filter-consumer-bridge-to-sql-request.js'; | ||
import type { FieldMetadata } from '../src/column-interfaces.js'; | ||
@@ -7,6 +8,10 @@ import type { DataProvider, EntityDataProvider } from '../src/data-interfaces.js'; | ||
import type { RepositoryOverloads } from '../src/remult3/RepositoryImplementation.js'; | ||
import type { HasWrapIdentifier } from '../src/sql-command.js'; | ||
export declare class KnexDataProvider implements DataProvider, HasWrapIdentifier { | ||
import type { HasWrapIdentifier, SqlCommand, SqlCommandFactory, SqlResult } from '../src/sql-command.js'; | ||
import type { CanBuildMigrations, MigrationBuilder, MigrationCode } from '../migrations/migration-types.js'; | ||
export declare class KnexDataProvider implements DataProvider, HasWrapIdentifier, SqlCommandFactory, CanBuildMigrations { | ||
knex: Knex; | ||
constructor(knex: Knex); | ||
provideMigrationBuilder(builder: MigrationCode): MigrationBuilder; | ||
createCommand(): SqlCommand; | ||
execute(sql: string): Promise<SqlResult>; | ||
static getDb(remult?: Remult): Knex<any, any[]>; | ||
@@ -27,3 +32,5 @@ wrapIdentifier: (name: string) => string; | ||
createIfNotExist(entity: EntityMetadata): Promise<void>; | ||
addColumnIfNotExist<T extends EntityMetadata>(entity: T, c: (e: T) => FieldMetadata): Promise<void>; | ||
createTableKnexCommand(entity: EntityMetadata<any>, e: EntityDbNamesBase): Knex.SchemaBuilder; | ||
addColumnIfNotExist(entity: EntityMetadata, c: (e: EntityMetadata) => FieldMetadata): Promise<void>; | ||
createColumnKnexCommand(e: EntityDbNamesBase, col: FieldMetadata<any, any>, colName: string): Knex.SchemaBuilder; | ||
verifyAllColumns<T extends EntityMetadata>(entity: T): Promise<void>; | ||
@@ -30,0 +37,0 @@ additionalWhere: string; |
@@ -21,2 +21,48 @@ "use strict"; | ||
} | ||
KnexDataProvider.prototype.provideMigrationBuilder = function (builder) { | ||
var sb = new KnexSchemaBuilder(this.knex); | ||
return { | ||
createTable: function (entity) { | ||
return tslib_1.__awaiter(this, void 0, void 0, function () { | ||
var e; | ||
return tslib_1.__generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, (0, filter_consumer_bridge_to_sql_request_js_1.dbNamesOf)(entity, function (x) { return x; })]; | ||
case 1: | ||
e = _a.sent(); | ||
sb.createTableKnexCommand(entity, e) | ||
.toSQL() | ||
.forEach(function (sql) { return builder.addSql(sql.sql); }); | ||
return [2 /*return*/]; | ||
} | ||
}); | ||
}); | ||
}, | ||
addColumn: function (entity, field) { | ||
return tslib_1.__awaiter(this, void 0, void 0, function () { | ||
var e; | ||
return tslib_1.__generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, (0, filter_consumer_bridge_to_sql_request_js_1.dbNamesOf)(entity, function (x) { return x; })]; | ||
case 1: | ||
e = _a.sent(); | ||
return [4 /*yield*/, sb | ||
.createColumnKnexCommand(e, field, e.$dbNameOf(field)) | ||
.toSQL() | ||
.forEach(function (sql) { return builder.addSql(sql.sql); })]; | ||
case 2: | ||
_a.sent(); | ||
return [2 /*return*/]; | ||
} | ||
}); | ||
}); | ||
}, | ||
}; | ||
}; | ||
KnexDataProvider.prototype.createCommand = function () { | ||
return new KnexBridgeToSQLCommand(this.knex); | ||
}; | ||
KnexDataProvider.prototype.execute = function (sql) { | ||
return this.createCommand().execute(sql); | ||
}; | ||
KnexDataProvider.getDb = function (remult) { | ||
@@ -691,4 +737,3 @@ var r = (remult || remult_proxy_js_1.remult).dataProvider; | ||
_b.trys.push([3, 7, , 8]); | ||
if (!!entity.options.sqlExpression) return [3 /*break*/, 6]; | ||
if (!(e.$entityName.toLowerCase().indexOf('from ') < 0)) return [3 /*break*/, 6]; | ||
if (!(0, filter_consumer_bridge_to_sql_request_js_1.shouldCreateEntity)(entity, e)) return [3 /*break*/, 6]; | ||
return [4 /*yield*/, this.createIfNotExist(entity)]; | ||
@@ -727,57 +772,15 @@ case 4: | ||
return tslib_1.__awaiter(this, void 0, void 0, function () { | ||
var e, cols_1, _a, _b, f; | ||
var e_9, _c; | ||
var _this = this; | ||
return tslib_1.__generator(this, function (_d) { | ||
switch (_d.label) { | ||
var e; | ||
return tslib_1.__generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, (0, filter_consumer_bridge_to_sql_request_js_1.dbNamesOf)(entity, function (x) { return x; })]; | ||
case 1: | ||
e = _d.sent(); | ||
e = _a.sent(); | ||
return [4 /*yield*/, this.knex.schema.hasTable(e.$entityName)]; | ||
case 2: | ||
if (!!(_d.sent())) return [3 /*break*/, 4]; | ||
cols_1 = new Map(); | ||
try { | ||
for (_a = tslib_1.__values(entity.fields), _b = _a.next(); !_b.done; _b = _a.next()) { | ||
f = _b.value; | ||
cols_1.set(f, { | ||
name: e.$dbNameOf(f), | ||
readonly: (0, filter_consumer_bridge_to_sql_request_js_1.shouldNotCreateField)(f, e), | ||
}); | ||
} | ||
} | ||
catch (e_9_1) { e_9 = { error: e_9_1 }; } | ||
finally { | ||
try { | ||
if (_b && !_b.done && (_c = _a.return)) _c.call(_a); | ||
} | ||
finally { if (e_9) throw e_9.error; } | ||
} | ||
return [4 /*yield*/, logSql(this.knex.schema.createTable(e.$entityName, function (b) { | ||
var e_10, _a; | ||
try { | ||
for (var _b = tslib_1.__values(entity.fields), _c = _b.next(); !_c.done; _c = _b.next()) { | ||
var x = _c.value; | ||
if (!cols_1.get(x).readonly || (0, RepositoryImplementation_js_1.isAutoIncrement)(x)) { | ||
if ((0, RepositoryImplementation_js_1.isAutoIncrement)(x)) | ||
b.increments(cols_1.get(x).name); | ||
else { | ||
buildColumn(x, cols_1.get(x).name, b, supportsJsonDataStorage(_this.knex)); | ||
if (x == entity.idMetadata.field) | ||
b.primary([cols_1.get(x).name]); | ||
} | ||
} | ||
} | ||
} | ||
catch (e_10_1) { e_10 = { error: e_10_1 }; } | ||
finally { | ||
try { | ||
if (_c && !_c.done && (_a = _b.return)) _a.call(_b); | ||
} | ||
finally { if (e_10) throw e_10.error; } | ||
} | ||
}))]; | ||
if (!!(_a.sent())) return [3 /*break*/, 4]; | ||
return [4 /*yield*/, logSql(this.createTableKnexCommand(entity, e))]; | ||
case 3: | ||
_d.sent(); | ||
_d.label = 4; | ||
_a.sent(); | ||
_a.label = 4; | ||
case 4: return [2 /*return*/]; | ||
@@ -788,6 +791,50 @@ } | ||
}; | ||
KnexSchemaBuilder.prototype.createTableKnexCommand = function (entity, e) { | ||
var e_9, _a; | ||
var _this = this; | ||
var cols = new Map(); | ||
try { | ||
for (var _b = tslib_1.__values(entity.fields), _c = _b.next(); !_c.done; _c = _b.next()) { | ||
var f = _c.value; | ||
cols.set(f, { | ||
name: e.$dbNameOf(f), | ||
readonly: (0, filter_consumer_bridge_to_sql_request_js_1.shouldNotCreateField)(f, e), | ||
}); | ||
} | ||
} | ||
catch (e_9_1) { e_9 = { error: e_9_1 }; } | ||
finally { | ||
try { | ||
if (_c && !_c.done && (_a = _b.return)) _a.call(_b); | ||
} | ||
finally { if (e_9) throw e_9.error; } | ||
} | ||
return this.knex.schema.createTable(e.$entityName, function (b) { | ||
var e_10, _a; | ||
try { | ||
for (var _b = tslib_1.__values(entity.fields), _c = _b.next(); !_c.done; _c = _b.next()) { | ||
var x = _c.value; | ||
if (!cols.get(x).readonly || (0, RepositoryImplementation_js_1.isAutoIncrement)(x)) { | ||
if ((0, RepositoryImplementation_js_1.isAutoIncrement)(x)) | ||
b.increments(cols.get(x).name); | ||
else { | ||
buildColumn(x, cols.get(x).name, b, supportsJsonDataStorage(_this.knex)); | ||
if (x == entity.idMetadata.field) | ||
b.primary([cols.get(x).name]); | ||
} | ||
} | ||
} | ||
} | ||
catch (e_10_1) { e_10 = { error: e_10_1 }; } | ||
finally { | ||
try { | ||
if (_c && !_c.done && (_a = _b.return)) _a.call(_b); | ||
} | ||
finally { if (e_10) throw e_10.error; } | ||
} | ||
}); | ||
}; | ||
KnexSchemaBuilder.prototype.addColumnIfNotExist = function (entity, c) { | ||
return tslib_1.__awaiter(this, void 0, void 0, function () { | ||
var e, col, colName; | ||
var _this = this; | ||
return tslib_1.__generator(this, function (_a) { | ||
@@ -805,5 +852,3 @@ switch (_a.label) { | ||
if (!!(_a.sent())) return [3 /*break*/, 4]; | ||
return [4 /*yield*/, logSql(this.knex.schema.alterTable(e.$entityName, function (b) { | ||
buildColumn(col, colName, b, supportsJsonDataStorage(_this.knex)); | ||
}))]; | ||
return [4 /*yield*/, logSql(this.createColumnKnexCommand(e, col, colName))]; | ||
case 3: | ||
@@ -817,2 +862,8 @@ _a.sent(); | ||
}; | ||
KnexSchemaBuilder.prototype.createColumnKnexCommand = function (e, col, colName) { | ||
var _this = this; | ||
return this.knex.schema.alterTable(e.$entityName, function (b) { | ||
buildColumn(col, colName, b, supportsJsonDataStorage(_this.knex)); | ||
}); | ||
}; | ||
KnexSchemaBuilder.prototype.verifyAllColumns = function (entity) { | ||
@@ -981,1 +1032,64 @@ return tslib_1.__awaiter(this, void 0, void 0, function () { | ||
} | ||
var KnexBridgeToSQLCommand = /** @class */ (function () { | ||
function KnexBridgeToSQLCommand(source) { | ||
this.source = source; | ||
this.values = []; | ||
} | ||
KnexBridgeToSQLCommand.prototype.addParameterAndReturnSqlToken = function (val) { | ||
return this.param(val); | ||
}; | ||
KnexBridgeToSQLCommand.prototype.param = function (val) { | ||
if (Array.isArray(val)) | ||
val = JSON.stringify(val); | ||
this.values.push(val); | ||
return '?'; | ||
}; | ||
KnexBridgeToSQLCommand.prototype.execute = function (sql) { | ||
return tslib_1.__awaiter(this, void 0, void 0, function () { | ||
var _this = this; | ||
return tslib_1.__generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.source.raw(sql, this.values).then(function (r) { | ||
switch (_this.source.client.config.client) { | ||
case 'mysql': | ||
case 'mysql2': | ||
return new KnexPostgresBridgeToSQLQueryResult({ | ||
fields: r[1], | ||
rows: r[0], | ||
}); | ||
case 'pg': | ||
return new KnexPostgresBridgeToSQLQueryResult(r); | ||
default: | ||
case 'better-sqlite3': | ||
case 'mssql': | ||
return new KnexPostgresBridgeToSQLQueryResult({ | ||
rows: r, | ||
}); | ||
} | ||
})]; | ||
case 1: return [2 /*return*/, _a.sent()]; | ||
} | ||
}); | ||
}); | ||
}; | ||
return KnexBridgeToSQLCommand; | ||
}()); | ||
var KnexPostgresBridgeToSQLQueryResult = /** @class */ (function () { | ||
function KnexPostgresBridgeToSQLQueryResult(r) { | ||
this.r = r; | ||
this.rows = r.rows; | ||
} | ||
KnexPostgresBridgeToSQLQueryResult.prototype.getColumnKeyInResultForIndexInSelect = function (index) { | ||
if (this.r.fields) | ||
return this.r.fields[index].name; | ||
if (this.rows.length == 0) | ||
throw Error('No rows'); | ||
var i = 0; | ||
for (var m in this.rows[0]) { | ||
if (i++ == index) | ||
return m; | ||
} | ||
throw Error('index not found'); | ||
}; | ||
return KnexPostgresBridgeToSQLQueryResult; | ||
}()); |
@@ -119,3 +119,3 @@ "use strict"; | ||
x = _b.value; | ||
if (!(0, filter_consumer_bridge_to_sql_request_js_1.isDbReadonly)(x, e) || (0, RepositoryImplementation_js_1.isAutoIncrement)(x)) { | ||
if (!(0, filter_consumer_bridge_to_sql_request_js_1.shouldNotCreateField)(x, e) || (0, RepositoryImplementation_js_1.isAutoIncrement)(x)) { | ||
if (result.length != 0) | ||
@@ -122,0 +122,0 @@ result += ','; |
@@ -7,3 +7,3 @@ import type { ClassType } from '../classType.js'; | ||
import { IdEntity } from '../src/remult3/IdEntity.js'; | ||
import type { Repository } from '../src/remult3/remult3.js'; | ||
import type { EntityMetadata, Repository } from '../src/remult3/remult3.js'; | ||
import type { queuedJobInfoResponse } from '../src/server-action.js'; | ||
@@ -59,2 +59,10 @@ export interface RemultServerOptions<RequestType> { | ||
queueStorage?: QueueStorage; | ||
error?: (info: { | ||
req: RequestType; | ||
entity?: EntityMetadata; | ||
exception?: any; | ||
httpStatusCode: number; | ||
responseBody: any; | ||
sendError: (httpStatusCode: number, body: any) => void; | ||
}) => Promise<void>; | ||
} | ||
@@ -61,0 +69,0 @@ export interface InitRequestOptions { |
@@ -186,3 +186,3 @@ import type { ClassType } from '../classType.js'; | ||
} | ||
export declare function doTransaction(remult: Remult, what: () => Promise<void>): Promise<void>; | ||
export declare function doTransaction(remult: Remult, what: (dp: DataProvider) => Promise<void>): Promise<void>; | ||
export declare function withRemult<T>(callback: (remult: any) => Promise<T>, options?: { | ||
@@ -189,0 +189,0 @@ dataProvider?: DataProvider; |
@@ -325,3 +325,3 @@ "use strict"; | ||
return tslib_1.__awaiter(this, void 0, void 0, function () { | ||
var trans, ok, result; | ||
var trans, ok, prev; | ||
var _this = this; | ||
@@ -333,2 +333,6 @@ return tslib_1.__generator(this, function (_a) { | ||
ok = true; | ||
prev = remult.dataProvider; | ||
_a.label = 1; | ||
case 1: | ||
_a.trys.push([1, , 5, 6]); | ||
return [4 /*yield*/, remult.dataProvider.transaction(function (ds) { return tslib_1.__awaiter(_this, void 0, void 0, function () { | ||
@@ -340,3 +344,3 @@ return tslib_1.__generator(this, function (_a) { | ||
remult.liveQueryPublisher = trans; | ||
return [4 /*yield*/, what()]; | ||
return [4 /*yield*/, what(ds)]; | ||
case 1: | ||
@@ -349,10 +353,14 @@ _a.sent(); | ||
}); })]; | ||
case 1: | ||
result = _a.sent(); | ||
if (!ok) return [3 /*break*/, 3]; | ||
return [4 /*yield*/, trans.flush()]; | ||
case 2: | ||
_a.sent(); | ||
_a.label = 3; | ||
case 3: return [2 /*return*/]; | ||
if (!ok) return [3 /*break*/, 4]; | ||
return [4 /*yield*/, trans.flush()]; | ||
case 3: | ||
_a.sent(); | ||
_a.label = 4; | ||
case 4: return [3 /*break*/, 6]; | ||
case 5: | ||
remult.dataProvider = prev; | ||
return [7 /*endfinally*/]; | ||
case 6: return [2 /*return*/]; | ||
} | ||
@@ -359,0 +367,0 @@ }); |
import type { DataProvider, EntityDataProvider } from '../data-interfaces.js'; | ||
import type { HasWrapIdentifier, SqlCommand, SqlCommandWithParameters, SqlImplementation, SqlResult } from '../sql-command.js'; | ||
import type { HasWrapIdentifier, SqlCommand, SqlCommandFactory, SqlCommandWithParameters, SqlImplementation, SqlResult } from '../sql-command.js'; | ||
import type { Remult } from '../context.js'; | ||
@@ -7,3 +7,4 @@ import type { CustomSqlFilterBuilderFunction, EntityDbNamesBase } from '../filter/filter-consumer-bridge-to-sql-request.js'; | ||
import type { RepositoryOverloads } from '../remult3/RepositoryImplementation.js'; | ||
export declare class SqlDatabase implements DataProvider, HasWrapIdentifier { | ||
import type { CanBuildMigrations, MigrationBuilder, MigrationCode } from '../../migrations/migration-types.js'; | ||
export declare class SqlDatabase implements DataProvider, HasWrapIdentifier, CanBuildMigrations, SqlCommandFactory { | ||
private sql; | ||
@@ -34,3 +35,5 @@ static getDb(remult?: Remult): SqlDatabase; | ||
constructor(sql: SqlImplementation); | ||
provideMigrationBuilder: (builder: MigrationCode) => MigrationBuilder; | ||
isProxy?: boolean; | ||
private createdEntities; | ||
} |
@@ -13,2 +13,3 @@ "use strict"; | ||
var repository_internals_js_1 = require("../remult3/repository-internals.js"); | ||
var isOfType_js_1 = require("../isOfType.js"); | ||
// @dynamic | ||
@@ -22,8 +23,12 @@ var SqlDatabase = /** @class */ (function () { | ||
this.wrapIdentifier = function (x) { return sql.wrapIdentifier(x); }; | ||
if ((0, isOfType_js_1.isOfType)(sql, 'provideMigrationBuilder')) { | ||
this.provideMigrationBuilder = sql.provideMigrationBuilder; | ||
} | ||
} | ||
SqlDatabase.getDb = function (remult) { | ||
var r = (remult || remult_proxy_js_1.remult).dataProvider; | ||
if (!r.createCommand) | ||
throw 'the data provider is not an SqlDatabase'; | ||
return r; | ||
if ((0, isOfType_js_1.isOfType)(r, 'createCommand')) | ||
return r; | ||
else | ||
throw 'the data provider is not an SqlCommandFactory'; | ||
}; | ||
@@ -30,0 +35,0 @@ SqlDatabase.prototype.createCommand = function () { |
@@ -46,2 +46,3 @@ import type { FieldMetadata } from '../column-interfaces.js'; | ||
export declare function shouldNotCreateField<entityType>(field: FieldMetadata, dbNames: EntityDbNames<entityType>): boolean; | ||
export declare function shouldCreateEntity(entity: EntityMetadata<any>, e: EntityDbNamesBase): boolean; | ||
export declare type EntityDbNamesBase = { | ||
@@ -48,0 +49,0 @@ $entityName: string; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.fieldDbName = exports.entityDbName = exports.dbNamesOf = exports.shouldNotCreateField = exports.isDbReadonly = exports.CustomSqlFilterBuilder = exports.FilterConsumerBridgeToSqlRequest = void 0; | ||
exports.fieldDbName = exports.entityDbName = exports.dbNamesOf = exports.shouldCreateEntity = exports.shouldNotCreateField = exports.isDbReadonly = exports.CustomSqlFilterBuilder = exports.FilterConsumerBridgeToSqlRequest = void 0; | ||
var tslib_1 = require("tslib"); | ||
@@ -275,2 +275,7 @@ var sql_database_js_1 = require("../data-providers/sql-database.js"); | ||
exports.shouldNotCreateField = shouldNotCreateField; | ||
function shouldCreateEntity(entity, e) { | ||
return (!entity.options.sqlExpression && | ||
e.$entityName.toLowerCase().indexOf('from ') < 0); | ||
} | ||
exports.shouldCreateEntity = shouldCreateEntity; | ||
function dbNamesOf(repo, wrapIdentifierOrOptions) { | ||
@@ -277,0 +282,0 @@ return tslib_1.__awaiter(this, void 0, void 0, function () { |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
/*y1 - workshop | ||
//p1 - migrations - in docs empesize that mysql doesn't support transactions in ddl, in docs empesize destroy for knex - // await dataProvider.knex.destroy() | ||
//https://gitkraken.dev/link/dnNjb2RlOi8vZWFtb2Rpby5naXRsZW5zL2xpbmsvci9mMDgzMWU0OWIyODJkMDlkNTA0NTcxYjYwNmUzNTMwODQ3NGIwY2M2L2YvcHJvamVjdHMvcGxheS13aXRoLW1pZ3JhdGlvbnMvc3JjL3NlcnZlci9idWlsZC1taWdyYXRpb25zLnRzP3VybD1odHRwcyUzQSUyRiUyRmdpdGh1Yi5jb20lMkZyZW11bHQlMkZyZW11bHQuZ2l0?origin=gitlens | ||
/*p1 - processError in remult express | ||
- Should we merge (notFound,error,forbidden) into one method in `DataApiResponse` type? | ||
https://gitkraken.dev/link/dnNjb2RlOi8vZWFtb2Rpby5naXRsZW5zL2xpbmsvci9mMDgzMWU0OWIyODJkMDlkNTA0NTcxYjYwNmUzNTMwODQ3NGIwY2M2L2YvcHJvamVjdHMvY29yZS9zcmMvZGF0YS1hcGkudHM%2FdXJsPWh0dHBzJTNBJTJGJTJGZ2l0aHViLmNvbSUyRnJlbXVsdCUyRnJlbXVsdC5naXQ%3D?origin=gitlens | ||
- The api of the `processError` should recieve an `ErrorInfo` object and return an `ErrorInfo` object? or should it return something more in the line, | ||
of httpStatus and errorBody. - currently the `serializeError` method is used to build the response | ||
https://gitkraken.dev/link/dnNjb2RlOi8vZWFtb2Rpby5naXRsZW5zL2xpbmsvci9mMDgzMWU0OWIyODJkMDlkNTA0NTcxYjYwNmUzNTMwODQ3NGIwY2M2L2YvcHJvamVjdHMvY29yZS9zcmMvZGF0YS1hcGkudHM%2FdXJsPWh0dHBzJTNBJTJGJTJGZ2l0aHViLmNvbSUyRnJlbXVsdCUyRnJlbXVsdC5naXQmbGluZXM9NDI1?origin=gitlens | ||
- I think there should be a way to throw a forbidden exception | ||
*/ | ||
//p1 - cleanup root directory of reult | ||
//p1 - in this video I'll use remult to turn a frontend app to a fullstack app | ||
/*y1 - discuss using delete & put - with url query language for deleteMany and updateMany - | ||
- put & delete, similar to get | ||
- add where to count, deleteMany,updateMany, | ||
- prevent delete all and update all - must have meaningful where. | ||
- try forcing this also in typescript | ||
* Experiment with doing this with version control - so the students can follow changes | ||
* Prepare stackblitz | ||
- protect against deleting of all rows by mistake | ||
- https://github.com/remult/remult/issues/221#issuecomment-2016519746 | ||
# influencer: | ||
* review react summit | ||
# | ||
*/ | ||
//y1 - getFields didn't work for kobi in the home component | ||
//p1 - processError in remult express | ||
//p1 - I think there should be a way to throw a forbidden exception | ||
//p1 - add section to Fields doc, explaining field type in db | ||
//p1 - add section about union type | ||
//p1 - add section about value list field type | ||
/*y1 - Talk JYC - JYC - add some integrity checks on delete | ||
@@ -57,3 +65,2 @@ - soft delete | ||
//y1 - select data provider per entity https://discord.com/channels/975754286384418847/976006081748807690/1201415305885397003 | ||
//y1 - migrations | ||
//y1 - live query refresh of view on table update | ||
@@ -60,0 +67,0 @@ //y1 - main vs master |
@@ -15,2 +15,6 @@ import type { EntityMetadata } from './remult3/remult3.js'; | ||
} | ||
export interface SqlCommandFactory extends HasWrapIdentifier { | ||
createCommand(): SqlCommand; | ||
execute(sql: string): Promise<SqlResult>; | ||
} | ||
export interface SqlCommand extends SqlCommandWithParameters { | ||
@@ -17,0 +21,0 @@ execute(sql: string): Promise<SqlResult>; |
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
4913550
259
48808
303
16