kmore-types
Advanced tools
Comparing version 2.0.5 to 3.0.0
@@ -6,2 +6,10 @@ # Change Log | ||
# 3.0.0 (2021-04-23) | ||
**Note:** Version bump only for package kmore-types | ||
## [2.0.5](https://github.com/waitingsong/kmore/compare/v2.0.4...v2.0.5) (2021-01-26) | ||
@@ -8,0 +16,0 @@ |
@@ -5,3 +5,3 @@ /** | ||
* | ||
* @version 2.0.4 | ||
* @version 2.0.5 | ||
* @author waiting | ||
@@ -16,1647 +16,17 @@ * @license MIT | ||
var sharedCore = require('@waiting/shared-core'); | ||
var operators = require('rxjs/operators'); | ||
var ts = require('typescript'); | ||
var rxjs = require('rxjs'); | ||
var rxwalker = require('rxwalker'); | ||
var sourceMapSupport = require('source-map-support'); | ||
var fs = require('fs'); | ||
var sharedTypesDev = require('@waiting/shared-types-dev'); | ||
const globalCallerFuncNameSet = new Set(['genDbDictFromType', 'kmore']); | ||
const initGenDbDictFromTypeOpts = { | ||
callerDistance: 0, | ||
}; | ||
const initOptions = { | ||
...initGenDbDictFromTypeOpts, | ||
exportVarPrefix: 'dict_', | ||
forceLoadDbDictJs: false, | ||
forceLoadDbDictJsPathReplaceRules: null, | ||
outputBanner: `/** | ||
* Dynamically generated by Kmore, do NOT modify me. | ||
* This file contains variables of database model necessary for running production. | ||
* @link https://www.npmjs.com/package/kmore | ||
*/\n/* eslint-disable */\n/* tslint-disalbe */\n`, | ||
outputFileNameSuffix: '__built-dict', | ||
refTablesPrefix: 'reftb_', | ||
DictTypeSuffix: 'Dict', | ||
DictTypeFolder: false, | ||
DictTypeFileName: '.kmore.ts', | ||
DictTypeBanner: `/** | ||
* Dynamically generated by Kmore, do NOT modify me. | ||
* This file contains types of database model for auto-complete help in IDE during code development. | ||
* @link https://www.npmjs.com/package/kmore | ||
*/\n/* eslint-disable */\n/* tslint-disalbe */\n`, | ||
}; | ||
const initBuildSrcOpts = { | ||
...initOptions, | ||
path: [], | ||
concurrent: 5, | ||
excludePathKeys: ['node_modules'], | ||
maxScanLines: 128, | ||
columnNameCreationFn: defaultCreateScopedColumnName, | ||
}; | ||
const reservedTbListKeys = [ | ||
'constructor', | ||
'__proto__', | ||
]; | ||
const defaultPropDescriptor = { | ||
configurable: true, | ||
enumerable: true, | ||
writable: false, | ||
}; | ||
const cacheMap = { | ||
/** CallerId -> TbListParam */ | ||
dbMap: new Map(), | ||
dbColsMap: new Map(), | ||
/** CallerId -> LocalTypeId */ | ||
callerIdToLocalTypeIdMap: new Map(), | ||
/** LocalTypeId -> TableListTagMap */ | ||
localTypeMap: new Map(), | ||
}; | ||
function defaultCreateScopedColumnName(options) { | ||
const { tableName, columnName } = options; | ||
return `${tableName}.${columnName}`; | ||
} | ||
(function (KmorePropKeys) { | ||
KmorePropKeys["dbh"] = "dbh"; | ||
KmorePropKeys["tables"] = "tables"; | ||
KmorePropKeys["columns"] = "columns"; | ||
KmorePropKeys["scopedColumns"] = "scopedColumns"; | ||
KmorePropKeys["aliasColumns"] = "aliasColumns"; | ||
KmorePropKeys["refTables"] = "rb"; | ||
/** | ||
* Type ref to generics param Db only, do NOT access as variable! | ||
* @example ```ts | ||
* const km = kmore<Db, KDD>({ config }) | ||
* type DbRef = typeof km.DbModel | ||
* type User = DbRef['tb_user'] // equal to Db['tb_user'] | ||
* ``` | ||
*/ | ||
KmorePropKeys["DbModel"] = "DbModel"; | ||
KmorePropKeys["DbModelAlias"] = "DbModelAlias"; | ||
KmorePropKeys["dummy"] = "dymmy"; | ||
})(exports.KmorePropKeys || (exports.KmorePropKeys = {})); | ||
/* eslint-disable import/no-extraneous-dependencies */ | ||
async function updateDbDictFile(targetPath, code, dbDictExportNameToCheck) { | ||
await checkDbDictNameDup(targetPath, dbDictExportNameToCheck); | ||
const str = `\n${code.trim()}\n`.replace(/ {4}/ug, ' '); | ||
await sharedCore.writeFileAsync(targetPath, str, { flag: 'a' }); | ||
return targetPath; | ||
} | ||
async function checkDbDictNameDup(targetPath, dbDictExportName) { | ||
if (!await sharedCore.isFileExists(targetPath)) { | ||
return; | ||
} | ||
const buf = await sharedCore.readFileAsync(targetPath); | ||
const code = buf.toString(); | ||
const needle = `export interface ${dbDictExportName}`; | ||
if (code.length && code.includes(needle)) { | ||
throw new Error(`type name dbDictExportName: "${dbDictExportName}" already exists in the file: "${targetPath}", | ||
file content: ${code} | ||
`); | ||
} | ||
} | ||
/* eslint-disable node/no-unpublished-import */ | ||
/** | ||
* Generate DbDict type declaration code from dbDict variable | ||
* | ||
* @returns ```ts | ||
* export interface DbDict {....} | ||
* ``` | ||
* Transformer needle. | ||
* Should running expression only under development. | ||
* Will be transformed to js literal object in js file under production | ||
*/ | ||
function genDbDictTypeDeclaration(dbDict, dbDictExportName = 'DbDict') { | ||
// const targetPath = './.kmore-debug.ts' | ||
// const srcCode = 'const dbDict = ' + JSON.stringify(dbDict) | ||
// const ast2 = ts.createSourceFile( | ||
// targetPath, | ||
// srcCode, | ||
// ts.ScriptTarget.ESNext, | ||
// false, | ||
// ) | ||
// const str2 = JSON.stringify(ast2.statements, null, 2) | ||
// const printer = ts.createPrinter() | ||
// const codeAfterTransform = printer.printFile(ast2) | ||
// console.info(codeAfterTransform) | ||
const srcCode = `export interface ${dbDictExportName} {}`; | ||
let ast = ts.createSourceFile('', srcCode, ts.ScriptTarget.ESNext, false); | ||
const tablesTypeNode = genTablesNode(dbDict.tables); | ||
ast = updateSourceFile('tables', tablesTypeNode, ast, dbDictExportName); | ||
const colsTypeNode = genColsNode(dbDict.columns); | ||
ast = updateSourceFile('columns', colsTypeNode, ast, dbDictExportName); | ||
const scopedColsTypeNode = genColsNode(dbDict.scopedColumns); | ||
ast = updateSourceFile('scopedColumns', scopedColsTypeNode, ast, dbDictExportName); | ||
const aliasColsTypeNode = genAliasColsNode(dbDict.aliasColumns); | ||
ast = updateSourceFile('aliasColumns', aliasColsTypeNode, ast, dbDictExportName); | ||
const code = ts.createPrinter().printFile(ast); | ||
const ret = code.replace(/\r\n/ug, '\n').replace(/\r/ug, '\n'); | ||
function genDbDict() { | ||
const needle = 'genDbDict'; | ||
const ret = sharedTypesDev.computeCallExpressionToLiteralObj(needle); | ||
return ret; | ||
} | ||
function genTablesNode(tables) { | ||
const ret = genLiteralTypeElements(tables); | ||
return ret; | ||
} | ||
/** | ||
* LiteralTypeElement of returns | ||
* @returns ```ts | ||
* key: value | ||
* ``` | ||
*/ | ||
function genLiteralTypeElement(item) { | ||
const [key, value] = item; | ||
const token = ts.createStringLiteral(value); | ||
const typeNode = ts.createLiteralTypeNode(token); | ||
const typeElm = ts.createPropertySignature(undefined, ts.createIdentifier(key), undefined, typeNode, undefined); | ||
return typeElm; | ||
} | ||
/** | ||
* @returns ```ts | ||
* tb_user: { | ||
* uid: 'uid', | ||
* name: 'name', | ||
* }, | ||
* tb_user_detail: {}, | ||
* ``` | ||
*/ | ||
function genColsNode(rows) { | ||
const nodes = []; | ||
Object.entries(rows).forEach(([key, items]) => { | ||
const itemType = genLiteralTypeElements(items); | ||
const node = ts.createPropertySignature(undefined, ts.createIdentifier(key), undefined, itemType, undefined); | ||
nodes.push(node); | ||
}); | ||
const ret = ts.createTypeLiteralNode(nodes); | ||
if (!ts.isTypeLiteralNode(ret)) { | ||
throw new TypeError('Result is NOT TypeLiteralNode'); | ||
} | ||
return ret; | ||
} | ||
/** | ||
* @returns ```ts | ||
* tb_user: { | ||
* uid: { | ||
* tbUserUid: 'tb_user.uid' | ||
* } | ||
* name: { | ||
* tbUserName: 'tb_user.name' | ||
* } | ||
* }, | ||
* tb_user_detail: {}, | ||
* ``` | ||
*/ | ||
function genAliasColsNode(rows) { | ||
const nodes = []; | ||
Object.entries(rows).forEach(([key, items]) => { | ||
const itemType = genColsNode(items); | ||
const node = ts.createPropertySignature(undefined, ts.createIdentifier(key), undefined, itemType, undefined); | ||
nodes.push(node); | ||
}); | ||
const ret = ts.createTypeLiteralNode(nodes); | ||
if (!ts.isTypeLiteralNode(ret)) { | ||
throw new TypeError('Result is NOT TypeLiteralNode'); | ||
} | ||
return ret; | ||
} | ||
/** | ||
* | ||
* { | ||
* uid: 'uid', | ||
* name: 'name', | ||
* } | ||
*/ | ||
function genLiteralTypeElements(row) { | ||
const typeElms = []; | ||
Object.entries(row).forEach((item) => { | ||
const typeElm = genLiteralTypeElement(item); | ||
typeElms.push(typeElm); | ||
}); | ||
const ret = ts.createTypeLiteralNode(typeElms); | ||
return ret; | ||
} | ||
function updateSourceFile(key, value, ast, dbDictExportName) { | ||
const transformerFactory = (ctx) => { | ||
const visitor = (node) => { | ||
if (ts.isInterfaceDeclaration(node)) { | ||
if (node.name.text !== dbDictExportName) { | ||
return node; | ||
} | ||
const typeElm = ts.createPropertySignature(undefined, ts.createIdentifier(key), undefined, value, undefined); | ||
const decla = ts.updateInterfaceDeclaration(node, undefined, node.modifiers, // [ts.createModifier(ts.SyntaxKind.ExportKeyword)], | ||
node.name, [], [], [...node.members, typeElm]); | ||
return decla; | ||
} | ||
return ts.visitEachChild(node, visitor, ctx); | ||
}; | ||
return visitor; | ||
}; | ||
const ret = ts.transform(ast, [transformerFactory]); | ||
const sourceFileRet = ret.transformed[0]; | ||
if (!sourceFileRet) { | ||
throw new Error('result error'); | ||
} | ||
// const printer = ts.createPrinter() | ||
// const codeAfterTransform = printer.printNode(ts.EmitHint.Unspecified, sourceFileRet, ast) | ||
// console.info({ codeAfterTransform }) | ||
return sourceFileRet; | ||
} | ||
function genAliasColumns(scopedColumns) { | ||
const ret = {}; | ||
Object.entries(scopedColumns).forEach((item) => { | ||
const tbAlias = item[0]; | ||
const cols = item[1]; | ||
const tableFlds = {}; | ||
Object.entries(cols).forEach((field) => { | ||
const [colAlias, scopedColName] = field; | ||
// tb_user.uid -> tbUserUid | ||
const output = scopedSnakeToCamel(scopedColName); | ||
const value = { | ||
[output]: scopedColName, | ||
}; | ||
Object.defineProperty(tableFlds, colAlias, { | ||
configurable: false, | ||
enumerable: true, | ||
writable: true, | ||
value, | ||
}); | ||
}); | ||
Object.defineProperty(ret, tbAlias, { | ||
configurable: false, | ||
enumerable: true, | ||
writable: true, | ||
value: tableFlds, | ||
}); | ||
}); | ||
return ret; | ||
} | ||
/** | ||
* | ||
* @returns - DbCols<T> ``` | ||
* { | ||
* tb_user: { | ||
* uid: "tb_user.uid", | ||
* name: "tb_user.name", | ||
* }, | ||
* tb_user_detail: {...}, | ||
* } | ||
* ``` | ||
*/ | ||
function genDbScopedCols(dbDictBase, | ||
/** false will use original col name w/o table name prefix */ | ||
createColumnNameFn = defaultCreateScopedColumnName) { | ||
const ret = {}; | ||
const props = { | ||
configurable: false, | ||
enumerable: true, | ||
writable: false, | ||
}; | ||
const { tables, columns } = dbDictBase; | ||
Object.entries(columns).forEach((tb) => { | ||
const [tbAlias, tbFields] = tb; | ||
const tableName = tables[tbAlias]; | ||
const tmpTableFields = {}; | ||
Object.entries(tbFields).forEach((field) => { | ||
const [colKey, columnName] = field; | ||
let value = columnName; | ||
if (typeof createColumnNameFn === 'function') { | ||
value = createColumnNameFn({ | ||
tableName, | ||
columnName, | ||
}); | ||
} | ||
Object.defineProperty(tmpTableFields, colKey, { | ||
...props, | ||
value, | ||
}); | ||
}); | ||
Object.defineProperty(ret, tbAlias, { | ||
...props, | ||
value: tmpTableFields, | ||
}); | ||
}); | ||
return ret; | ||
} | ||
/** Allow empty Object */ | ||
function validateParamTables(tbs) { | ||
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition | ||
if (tbs === null) { | ||
throw new TypeError('Parameter tables of DbFacrory() invalid. Values is null.'); | ||
} | ||
if (typeof tbs === 'symbol') { | ||
throw new TypeError('Parameter tables of DbFacrory() invalid. Values is symbol.'); | ||
} | ||
else if (Array.isArray(tbs)) { | ||
throw new TypeError('Parameter tables of DbFacrory() invalid, value is Array.'); | ||
// tbs.forEach((tb) => { | ||
// if (typeof tb !== 'string') { | ||
// throw new TypeError(`TableName in parameter of DbFacrory() invalid: "${tb}"`) | ||
// } | ||
// validateTbName(tb) | ||
// }) | ||
} | ||
if (typeof tbs === 'object') { | ||
if (!tbs || !Object.keys(tbs).length) { | ||
return; | ||
} | ||
Object.entries(tbs).forEach((item) => { | ||
const [aliasOri, tbName] = item; | ||
validateTbName(aliasOri); | ||
validateTbName(tbName); | ||
if (typeof aliasOri !== 'string') { | ||
throw new TypeError('TableModel name in parameter of DbFacrory() invalid.' + item.toString()); | ||
} | ||
else if (!aliasOri.trim()) { | ||
throw new RangeError('Value of Parameter alias/tbl of DbFacrory() invalid.'); | ||
} | ||
}); | ||
} | ||
else { | ||
throw new TypeError('Parameter tables of DbFacrory() invalid.'); | ||
} | ||
} | ||
function validateTbName(tb) { | ||
if (typeof tb !== 'string') { | ||
throw new TypeError('TableName in parameter of DbFacrory() empty.'); | ||
} | ||
const tbName = tb.trim(); | ||
if (!tbName) { | ||
throw new RangeError('Value of Parameter alias/tbl of DbFacrory() invalid.'); | ||
} | ||
else if (reservedTbListKeys.includes(tbName)) { | ||
throw new TypeError(`tableName "${tbName}" of param tables is in reservedTbListKeys`); | ||
} | ||
} | ||
function validateDuplicateProp(tbs, key) { | ||
if (typeof key === 'string' && typeof tbs[key] !== 'undefined') { | ||
throw Error(`Object has duplicate key: "${key}" to assign`); | ||
} | ||
} | ||
function getCallerStack(callerDistance) { | ||
const depth = callerDistance + 2; | ||
const caller = getStack(depth); | ||
return caller; | ||
} | ||
/** | ||
* @see https://stackoverflow.com/a/13227808 | ||
*/ | ||
function getStack(depth = 0) { | ||
// Save original Error.prepareStackTrace | ||
let origPrepareStackTrace = Error.prepareStackTrace; | ||
/* istanbul ignore else */ | ||
if (!origPrepareStackTrace) { | ||
// MUST installing inner getStack() | ||
sourceMapSupport.install(); | ||
/* istanbul ignore else */ | ||
if (!Error.prepareStackTrace) { | ||
throw new Error('Error.prepareStackTrace not defined'); | ||
} | ||
origPrepareStackTrace = Error.prepareStackTrace; | ||
} | ||
// void else in debug hooked by source-map-support already | ||
// Override with function that just returns `stack` | ||
Error.prepareStackTrace = function (_err, stack) { | ||
const target = stack[depth + 1]; | ||
// @ts-expect-error | ||
const ret = origPrepareStackTrace(_err, [target]); | ||
return ret; | ||
}; | ||
const limit = Error.stackTraceLimit; | ||
Error.stackTraceLimit = depth + 2; | ||
const err = new Error(); | ||
const { stack } = err; | ||
// Restore original `Error.prepareStackTrace` | ||
Error.prepareStackTrace = origPrepareStackTrace; | ||
Error.stackTraceLimit = limit; | ||
if (!stack) { | ||
throw new Error('stack EMPTY!'); | ||
} | ||
const arr = stack.split('\n'); | ||
const line = arr.pop(); // one StackFram, but may all stacks sometime | ||
if (!line) { | ||
throw new Error('Retrieve stack of caller failed, line empty.'); | ||
} | ||
const path = line.slice(line.indexOf('(') + 1, -1); | ||
if (!path) { | ||
throw new Error('Retrieve stack of caller failed'); | ||
} | ||
const matched = /^(.+):(\d+):(\d+)$/u.exec(path); | ||
if (!matched || matched.length !== 4) { | ||
throw new Error('Retrieve stack of caller failed. ' + (matched ? JSON.stringify(matched) : '')); | ||
} | ||
const caller = { | ||
path: matched[1].replace(/\\/gu, '/'), | ||
line: +matched[2], | ||
column: +matched[3], | ||
}; | ||
return caller; | ||
} | ||
function isTsFile(path) { | ||
return !!path.endsWith('.ts'); | ||
} | ||
/** Build DbTables from TableListTagMap */ | ||
function buildDbParam(tagMap) { | ||
const ret = createNullObject(); | ||
if (tagMap.size) { | ||
tagMap.forEach((_tags, key) => { | ||
Object.defineProperty(ret, key, { | ||
...defaultPropDescriptor, | ||
value: key, | ||
}); | ||
}); | ||
} | ||
else { | ||
throw new TypeError('Value of tagMap invalid.'); | ||
} | ||
return ret; | ||
} | ||
/** Build DbTableCols from TableColListTagMap */ | ||
function buildDbColsParam(tagMap) { | ||
const ret = createNullObject(); | ||
if (tagMap.size) { | ||
tagMap.forEach((colListTagMap, tb) => { | ||
const cols = createNullObject(); | ||
colListTagMap.forEach((_tags, col) => { | ||
Object.defineProperty(cols, col, { | ||
...defaultPropDescriptor, | ||
value: col, | ||
}); | ||
}); | ||
Object.defineProperty(ret, tb, { | ||
...defaultPropDescriptor, | ||
value: cols, | ||
}); | ||
}); | ||
} | ||
else { | ||
throw new TypeError('Value of tagMap invalid.'); | ||
} | ||
return ret; | ||
} | ||
/** Build DbTableScopedCols from TableColListTagMap */ | ||
// export function buildTbScopedColListParam<T extends DbModel>( | ||
// tagMap: TbColListTagMap, | ||
// tables: DbTables<T>, | ||
// ): DbTableScopedCols<T> { | ||
// const ret = createNullObject() | ||
// if (! tables || ! Object.keys(tables).length) { | ||
// return ret | ||
// } | ||
// if (tagMap && tagMap.size) { | ||
// tagMap.forEach((colListTagMap, tbAlias) => { | ||
// const tb = tbAlias as keyof DbTables<T> | ||
// if (typeof tables[tb] !== 'string') { | ||
// return | ||
// } | ||
// const tbName = tables[tb] | ||
// const cols = createNullObject() | ||
// colListTagMap.forEach((_tags, colAlias) => { | ||
// Object.defineProperty(cols, colAlias, { | ||
// ...defaultPropDescriptor, | ||
// value: `${tbName}.${colAlias}`, | ||
// }) | ||
// }) | ||
// Object.defineProperty(ret, tbAlias, { | ||
// ...defaultPropDescriptor, | ||
// value: cols, | ||
// }) | ||
// }) | ||
// } | ||
// else { | ||
// throw new TypeError('Value of tagMap invalid.') | ||
// } | ||
// return ret | ||
// } | ||
function isCallerNameMatched(name, matchFuncNameSet) { | ||
if (!name) { | ||
return false; | ||
} | ||
else if (matchFuncNameSet.has(name)) { | ||
return true; | ||
} | ||
// else if (typeof matchFuncNameSet === 'string' && matchFuncNameSet === name) { | ||
// return true | ||
// } | ||
// else if (Array.isArray(matchFuncNameSet) && matchFuncNameSet.includes(name)) { | ||
// return true | ||
// } | ||
else { | ||
return false; | ||
} | ||
} | ||
// eslint-disable-next-line @typescript-eslint/no-explicit-any | ||
function createNullObject() { | ||
return Object.create(null); | ||
} | ||
function genDbDictTsFilePath(srcPath, outputFileNameSuffix) { | ||
const ret = srcPath.slice(0, -3) + `.${outputFileNameSuffix}.ts`; | ||
return ret.replace(/\\/ug, '/'); | ||
} | ||
/** | ||
* Return relative or absolute path | ||
*/ | ||
async function genDbDictTypeTsFilePath(srcPath, targetDir, targetFileName) { | ||
if (!targetFileName) { | ||
throw new TypeError('Value of targetFileName empty.'); | ||
} | ||
const arr = ['.', './']; | ||
let ret = ''; | ||
if (targetDir === false || arr.includes(targetDir)) { | ||
const dir = sharedCore.dirname(srcPath); | ||
ret = sharedCore.join(dir, targetFileName); | ||
} | ||
else if (await sharedCore.isDirExists(targetDir)) { | ||
ret = sharedCore.join(targetDir, targetFileName); | ||
} | ||
else { | ||
const dir = sharedCore.dirname(srcPath); | ||
ret = sharedCore.join(dir, targetFileName); | ||
} | ||
if (!ret) { | ||
throw new TypeError('Result path empty.'); | ||
} | ||
return ret.replace(/\\/ug, '/'); | ||
} | ||
/** | ||
* Generate DbDict Type name from generics name of DbModel, | ||
* validate by the content to avoid confliction. | ||
* | ||
* @example 'DbModelDict' => 'DbModelDictAlias{1|2|3|...}' | ||
*/ | ||
function genValidDictTypeAliasName(content, dictTypeName) { | ||
const id = `${dictTypeName}Alias`; | ||
if (!includeExportTypeName(content, id)) { | ||
return id; | ||
} | ||
for (let i = 1; i < 10000; i += 1) { | ||
const name = `${id}${i}`; | ||
if (!includeExportTypeName(content, name)) { | ||
return id; | ||
} | ||
} | ||
return id + Math.random().toString().slice(10); | ||
} | ||
/** Generate dict var name to output */ | ||
function genVarName(exportVarPrefix, line, column) { | ||
const varName = `${exportVarPrefix}${line}_${column}`; | ||
return varName; | ||
} | ||
/** | ||
* Generate DbDict Type name from generics name of DbModel | ||
* @example 'DbModel' => 'DbModelDict' | ||
*/ | ||
function genDictTypeName(dbModelName, nameSuffix) { | ||
return `${dbModelName}${nameSuffix}`; | ||
} | ||
/** | ||
* Generate dict var name from DictTypeName, will be write to the file together with DictType declaration | ||
* @example 'DbModelDict' => 'dbModelDict' | ||
*/ | ||
function genDictVarNameFromDictTypeName(dictTypeName) { | ||
const ret = dictTypeName.slice(0, 1).toLowerCase() + dictTypeName.slice(1); | ||
return ret; | ||
} | ||
/** | ||
* Generate dict var name from DictTypeName, will be write to the file together with DictType declaration | ||
* @example 'DbModel' => 'dbModelDict' | ||
*/ | ||
function genDictVarNameFromDbName(dbModelName, nameSuffix) { | ||
const typeName = genDictTypeName(dbModelName, nameSuffix); | ||
const ret = typeName.slice(0, 1).toLowerCase() + typeName.slice(1); | ||
return ret; | ||
} | ||
/** | ||
* Generate DbDict Type name from generics name of DbModel | ||
* @example 'DbModel' => 'DbModelDict' | ||
*/ | ||
function genDictTypeNameFromCallerId(id, nameSuffix) { | ||
const { typeId } = pickInfoFromCallerTypeId(id); | ||
const ret = genDictTypeName(typeId, nameSuffix); | ||
return ret; | ||
} | ||
function reWriteLoadingPath(path, rules) { | ||
let ret = path; | ||
/* istanbul ignore else */ | ||
if (rules && rules.length) { | ||
rules.forEach(([re, str]) => { | ||
ret = ret.replace(re, str); | ||
}); | ||
} | ||
return ret; | ||
} | ||
/** | ||
* Load dbDictBase var from a js file | ||
*/ | ||
function loadDbDictVarFromFile(loadOpts) { | ||
const { path, caller, options } = loadOpts; | ||
const dbDictVarName = genVarName(options.exportVarPrefix, caller.line, caller.column); | ||
const mods = loadFile(path); | ||
if (!mods) { | ||
throw new TypeError(`Loaded mods empty, path: "${path}"`); | ||
} | ||
else if (typeof mods[dbDictVarName] === 'object') { | ||
return mods[dbDictVarName]; | ||
} | ||
throw new TypeError(`Loaded mods[${dbDictVarName}] not object, path: "${path}"`); | ||
} | ||
function loadFile(path) { | ||
// eslint-disable-next-line @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports | ||
const mods = require(path); | ||
return mods; | ||
} | ||
/** | ||
* Scan and emit .ts type files containing keywords of Options['callerFuncNames'] | ||
*/ | ||
function walkDirForCallerFuncTsFiles(options) { | ||
const opts = { | ||
...initBuildSrcOpts, | ||
...options, | ||
}; | ||
const { path: basePath, excludePathKeys, maxScanLines } = opts; | ||
const maxDepth = 99; | ||
const concurrent = opts.concurrent && opts.concurrent > 0 | ||
? opts.concurrent | ||
: 5; | ||
const matchFuncNameSet = new Set(globalCallerFuncNameSet); | ||
const dir$ = rxjs.iif(() => { | ||
if (typeof basePath === 'string') { | ||
return true; | ||
} | ||
else if (Array.isArray(basePath)) { | ||
return false; | ||
} | ||
else { | ||
throw new TypeError('Value of baseDir invalid, should be String or Array.'); | ||
} | ||
}, rxjs.of(basePath), rxjs.from(basePath)); | ||
const path$ = dir$.pipe(operators.mergeMap(path => rxwalker.walk(path, { maxDepth }), concurrent), operators.filter((ev) => { | ||
const { path } = ev; | ||
return path ? !ifPathContainsKey(path, excludePathKeys) : false; | ||
}), operators.filter(ev => ev.type === "file" /* file */ | ||
&& ev.path.endsWith('.ts') | ||
&& !ev.path.endsWith('.d.ts')), operators.map(ev => ev.path), operators.mergeMap((path) => { | ||
const flag$ = ifFileContentContainsCallerFuncNames(matchFuncNameSet, maxScanLines, path); | ||
return flag$.pipe(operators.map((contains) => { | ||
return contains ? path : ''; | ||
})); | ||
}, concurrent), operators.filter(path => path.length > 0)); | ||
const ret$ = path$.pipe(operators.reduce((acc, val) => { | ||
const path = sharedCore.pathResolve(val); | ||
if (!acc.includes(path)) { | ||
acc.push(path); | ||
} | ||
return acc; | ||
}, []), operators.mergeMap(paths => rxjs.from(paths))); | ||
return ret$; | ||
} | ||
function ifPathContainsKey(path, keys) { | ||
if (!path) { | ||
return false; | ||
} | ||
if (typeof keys === 'string' && keys) { | ||
return path.includes(keys); | ||
} | ||
else if (Array.isArray(keys)) { | ||
for (const key of keys) { | ||
if (key && path.includes(key)) { | ||
return true; | ||
} | ||
} | ||
} | ||
return false; | ||
} | ||
function ifFileContentContainsCallerFuncNames(matchFuncNameSet, maxLines, path) { | ||
const line$ = sharedCore.readFileLineRx(path); | ||
const scan$ = line$.pipe(operators.take(maxLines >= 0 ? maxLines : 128), operators.map((content) => { | ||
return hasContainsCallerFuncNames(matchFuncNameSet, content); | ||
}), operators.filter(exists => !!exists), operators.catchError(() => rxjs.of(false))); | ||
const notExists$ = rxjs.of(false); | ||
const ret$ = rxjs.concat(scan$, notExists$).pipe(operators.take(1)); | ||
return ret$; | ||
} | ||
function hasContainsCallerFuncNames(matchFuncNameSet, content) { | ||
if (content) { | ||
for (const key of matchFuncNameSet.keys()) { | ||
if (content.includes(key)) { | ||
return true; | ||
} | ||
} | ||
} | ||
return false; | ||
} | ||
function parseCallerFuncNames(callerFuncNameSet, names) { | ||
const st = new Set(callerFuncNameSet); | ||
if (!names) { | ||
return st; | ||
} | ||
else if (typeof names === 'string') { | ||
st.add(names); | ||
} | ||
else if (Array.isArray(names) && names.length) { | ||
names.forEach(name => st.add(name)); | ||
} | ||
else { | ||
throw new TypeError('Value of param invalid.'); | ||
} | ||
return st; | ||
} | ||
/** | ||
* Generate DbDict from generics type T, | ||
* Loading compiled js file if prod env. | ||
* Param columnNameCreationFn ignored if dbDictBase is type DbDict<D>. | ||
*/ | ||
function genDbDictFromBase(dbDictBase, | ||
/** false will use original col name w/o table name prefix */ | ||
columnNameCreationFn = defaultCreateScopedColumnName) { | ||
const ret = { ...dbDictBase }; | ||
if (!hasExtColumns(dbDictBase, exports.KmorePropKeys.scopedColumns)) { | ||
ret.scopedColumns = genDbScopedCols(dbDictBase, columnNameCreationFn); | ||
} | ||
if (!hasExtColumns(dbDictBase, exports.KmorePropKeys.aliasColumns)) { | ||
ret.aliasColumns = genAliasColumns(ret.scopedColumns); | ||
} | ||
return ret; | ||
} | ||
function hasExtColumns(dict, key) { | ||
if (!Object.prototype.hasOwnProperty.call(dict, exports.KmorePropKeys.tables)) { | ||
throw new TypeError('Value of parameter dbDictBase of has no tables property'); | ||
} | ||
else if (!Object.prototype.hasOwnProperty.call(dict, exports.KmorePropKeys.columns)) { | ||
throw new TypeError('Value of parameter dbDictBase of has no columns property'); | ||
} | ||
return !!Object.prototype.hasOwnProperty.call(dict, key); | ||
} | ||
/** | ||
* tb_user => tbUser, | ||
* tb-user => tbUser | ||
*/ | ||
function snakeToCamel(string) { | ||
return string.replace(/([-_][a-z])/uig, ($1) => { | ||
return $1.toUpperCase() | ||
.replace('-', '') | ||
.replace('_', ''); | ||
}); | ||
} | ||
/** | ||
* tb_user.uid => tbUserUid, | ||
* tb-user.uid => tbUserUid | ||
*/ | ||
function scopedSnakeToCamel(input) { | ||
return snakeToCamel(input.replace(/\./ug, '_')); | ||
} | ||
function includeExportTypeName(content, typeName) { | ||
const name = typeName.trim(); | ||
if (!content || !name) { | ||
return false; | ||
} | ||
const needles = [ | ||
`export interface ${name}`, | ||
`export interface '${name}'`, | ||
`export interface "${name}"`, | ||
]; | ||
// const ret = needles.some(needle => content.includes(needle)) | ||
const ret = needles.some((needle) => { | ||
const bb = content.includes(needle); | ||
return bb; | ||
}); | ||
return ret; | ||
} | ||
/** | ||
* Retrieve dict var name and type name from the content by the dict const | ||
*/ | ||
function retrieveDictInfoByDictConst(content, dbDict) { | ||
const ret = { dictVarName: '', dictTypeName: '' }; | ||
if (!content.trim().length) { | ||
return ret; | ||
} | ||
const dictMap = retrieveDictVarMapFrom(content); | ||
if (!dictMap.size) { | ||
return ret; | ||
} | ||
for (const [id, dict] of dictMap.entries()) { | ||
if (dictObjectEquals(dict, dbDict)) { | ||
ret.dictVarName = id.trim(); | ||
break; | ||
} | ||
} | ||
if (ret.dictVarName) { | ||
const dictTypeName = ret.dictVarName.slice(0, 1).toUpperCase() + ret.dictVarName.slice(1).trim(); | ||
if (includeExportTypeName(content, dictTypeName)) { | ||
ret.dictTypeName = dictTypeName; | ||
} | ||
} | ||
return ret; | ||
} | ||
/** | ||
* Whether content has the same dict variable | ||
*/ | ||
function hasSameDictVar(content, dbDict) { | ||
if (!content.trim().length) { | ||
return false; | ||
} | ||
const dicts = retrieveDictVarsFrom(content); | ||
if (!dicts.length) { | ||
return false; | ||
} | ||
const ret = dicts.some(dict => dictObjectEquals(dict, dbDict)); | ||
return ret; | ||
} | ||
function retrieveDictVarMapFrom(content) { | ||
const ret = new Map(); | ||
if (!content.trim().length) { | ||
return ret; | ||
} | ||
const re = /^export const\s+(\S+?)\s+=.+$/ugm; | ||
let arr = re.exec(content); | ||
while (arr) { | ||
const [pick, id] = arr; | ||
if (!pick || !id || !pick.includes('{') || !pick.includes('}')) { | ||
continue; | ||
} | ||
const start = pick.indexOf('{'); | ||
const end = pick.lastIndexOf('}'); | ||
const json = pick.slice(start, end + 1); | ||
const dict = JSON.parse(json); | ||
if (typeof dict === 'object') { | ||
ret.set(id, dict); | ||
} | ||
arr = re.exec(content); | ||
} | ||
return ret; | ||
} | ||
function retrieveDictVarsFrom(content) { | ||
const dictMap = retrieveDictVarMapFrom(content); | ||
const ret = Array.from(dictMap.values()); | ||
return ret; | ||
} | ||
/** | ||
* If deep equal of two Dict object | ||
*/ | ||
function dictObjectEquals(d1, d2) { | ||
if (!d1 || !d2) { | ||
throw new TypeError('invalid param 1'); | ||
} | ||
else if (typeof d1 !== 'object') { | ||
throw new TypeError('d1 invalid param'); | ||
} | ||
else if (typeof d2 !== 'object') { | ||
throw new TypeError('d2 invalid param'); | ||
} | ||
else if (Array.isArray(d1) || Array.isArray(d2)) { | ||
throw new TypeError('invalid param array'); | ||
} | ||
// recursive object equality check | ||
const keys1 = Object.keys(d1); | ||
const keys2 = Object.keys(d2); | ||
const r1 = keys2.every(key => keys1.includes(key)); | ||
if (!r1) { | ||
return false; | ||
} | ||
const r2 = dictElementEquals(d1.tables, d2.tables); | ||
const r3 = dictColsEquals(d1.columns, d2.columns); | ||
const r4 = dictColsEquals(d1.scopedColumns, d2.scopedColumns); | ||
const r5 = dictAliasColsEquals(d1.aliasColumns, d2.aliasColumns); | ||
const ret = r1 && r2 && r3 && r4 && r5; | ||
return ret; | ||
} | ||
function dictElementEquals(t1, t2) { | ||
if (!t1 || !t2) { | ||
throw new TypeError('Invalid value of param to compare.'); | ||
} | ||
const keys1 = Object.keys(t1); | ||
const keys2 = Object.keys(t2); | ||
if (keys1.length === 0 && keys2.length === 0) { | ||
return true; | ||
} | ||
else if (keys1.length !== keys2.length) { | ||
return false; | ||
} | ||
const ret = keys1.every((key) => { | ||
if (!keys2.includes(key)) { | ||
return false; | ||
} | ||
const v1 = t1[key]; | ||
const v2 = t2[key]; | ||
return typeof v1 === 'string' && v1 === v2; | ||
}); | ||
return ret; | ||
} | ||
function dictColsEquals(cols1, cols2) { | ||
if (!cols1 || !cols2) { | ||
throw new TypeError('Invalid value of param to compare'); | ||
} | ||
const keys1 = Object.keys(cols1); | ||
const keys2 = Object.keys(cols2); | ||
if (keys1.length === 0 && keys2.length === 0) { | ||
return true; | ||
} | ||
else if (keys1.length !== keys2.length) { | ||
return false; | ||
} | ||
const ret = keys1.every((key) => { | ||
if (!keys2.includes(key)) { | ||
return false; | ||
} | ||
return dictElementEquals(cols1[key], cols2[key]); | ||
}); | ||
return ret; | ||
} | ||
function dictAliasColsEquals(cols1, cols2) { | ||
if (!cols1 || !cols2) { | ||
throw new TypeError('Invalid value of dictAliasColsEquals() param to compare.'); | ||
} | ||
const keys1 = Object.keys(cols1); | ||
const keys2 = Object.keys(cols2); | ||
if (keys1.length === 0 && keys2.length === 0) { | ||
return true; | ||
} | ||
else if (keys1.length !== keys2.length) { | ||
return false; | ||
} | ||
const ret = keys1.every((key) => { | ||
if (!keys2.includes(key)) { | ||
return false; | ||
} | ||
return dictColsEquals(cols1[key], cols2[key]); | ||
}); | ||
return ret; | ||
} | ||
/* eslint-disable import/no-extraneous-dependencies */ | ||
/** | ||
* | ||
* @param id <path>:<line>:<col>:typeid-<typeName> | ||
*/ | ||
function pickInfoFromCallerTypeId(id) { | ||
const matched = /^(.+):(\d+):(\d+):typeid-([\d\w]+)$/u.exec(id); | ||
if (matched && matched.length === 5) { | ||
const ret = { | ||
path: matched[1], | ||
line: +matched[2], | ||
column: +matched[3], | ||
typeId: matched[4], | ||
}; | ||
return ret; | ||
} | ||
throw new TypeError('CallerTypeId value invalid'); | ||
} | ||
function genCallerTypeMapFromNodeSet(nodes, checker, sourceFile, // sourceFileObject | ||
path) { | ||
const retMap = new Map(); | ||
nodes.forEach((node) => { | ||
const obj = genInfoFromNode({ | ||
checker, | ||
node, | ||
path, | ||
sourceFile, | ||
}); | ||
if (obj) { | ||
const { callerTypeId, dbTagMap: tbTagMap, dbColsTagMap: tbColTagMap } = obj; | ||
retMap.set(callerTypeId, [tbTagMap, tbColTagMap]); | ||
} | ||
}); | ||
return retMap; | ||
} | ||
function genInfoFromNode(options) { | ||
const { node, checker, sourceFile, path, } = options; | ||
if (!node.typeArguments) { | ||
return; | ||
} | ||
// console.info(node.getSourceFile().fileName) | ||
// if (ts.isIdentifier(node)) { | ||
// const sym = checker.getSymbolAtLocation(node) | ||
// } | ||
const typeRefNode = retrieveTypeRefNodeFromGenerics(node); | ||
const gType = checker.getTypeAtLocation(typeRefNode.typeName); | ||
// const gType = checker.getTypeFromTypeNode(refTypeNode) | ||
// const props = checker.getPropertiesOfType(type2) | ||
const sym = gType.getSymbol(); | ||
/* istanbul ignore else */ | ||
if (!sym) { | ||
return; | ||
} | ||
const { line, character } = sourceFile.getLineAndCharacterOfPosition(node.getStart()); | ||
const inputTypeName = sym.getName(); | ||
// "/kmore-mono/packages/kmore-types/test/config/test.config2.ts:4:1:typeid-Db" | ||
const callerTypeId = `${path}:${line + 1}:${character + 1}:typeid-${inputTypeName}`; | ||
// const gTypeId: number = typeof gType.id === 'number' ? gType.id : Math.random() | ||
// "/kmore-mono/packages/kmore-types/test/config/test.config2.ts:typeid-76" | ||
// "/kmore-mono/packages/kmore-types/test/config/test.config2.ts:typeid-Db" | ||
const localTypeId = `${path}:typeid-${inputTypeName}`; | ||
const { dbTagMap, dbColsTagMap } = genTbListTagMapFromSymbol(checker, sym); | ||
/* istanbul ignore else */ | ||
if (dbTagMap.size) { | ||
return { | ||
callerTypeId, | ||
localTypeId, | ||
dbTagMap, | ||
dbColsTagMap, | ||
}; | ||
} | ||
} | ||
function genTbListTagMapFromSymbol(checker, symbol) { | ||
const { members } = symbol; | ||
// Map<TableAlias, Map<TagName, TagComment> > | ||
const dbTagMap = new Map(); | ||
const dbColsTagMap = new Map(); | ||
/* istanbul ignore else */ | ||
if (members) { | ||
members.forEach((tbNameSym) => { | ||
const { name: tbName, tags } = retrieveInfoFromSymbolObject(tbNameSym); | ||
// tags can be empty array | ||
dbTagMap.set(tbName, tags); | ||
// fields | ||
const colTagMap = genColListTagMapFromTbSymbol(checker, tbNameSym); | ||
dbColsTagMap.set(tbName, colTagMap); | ||
}); | ||
} | ||
return { dbTagMap, dbColsTagMap }; | ||
} | ||
function genColListTagMapFromTbSymbol(checker, tbNameSym) { | ||
const ret = new Map(); | ||
const tbType = checker.getTypeOfSymbolAtLocation(tbNameSym, tbNameSym.valueDeclaration); | ||
const sym = tbType.getSymbol(); | ||
/* istanbul ignore else */ | ||
if (sym && sym.members) { | ||
sym.members.forEach((member) => { | ||
const { name: colName, tags } = retrieveInfoFromSymbolObject(member); | ||
ret.set(colName, tags); | ||
}); | ||
} | ||
return ret; | ||
} | ||
function retrieveInfoFromSymbolObject(symbol) { | ||
return { | ||
name: symbol.getName(), | ||
tags: symbol.getJsDocTags(), | ||
}; | ||
} | ||
/** | ||
* Retrieve TypeReferenceNode | ||
* @example genDbDictFromType<Db>() | ||
*/ | ||
function retrieveTypeRefNodeFromGenerics(node) { | ||
const { isTypeReferenceNode, isTypeLiteralNode, } = require('typescript'); | ||
if (!node.typeArguments) { | ||
throw new TypeError('Generics param required, like kmore<Db>() genDbDictFromType<Db>() '); | ||
} | ||
else if (node.typeArguments.length !== 1 && node.typeArguments.length !== 2) { | ||
throw new TypeError(`Generics param required, like kmore<Db>() or genDbDictFromType<Db>(), | ||
node.typeArguments.length: ${node.typeArguments.length}, should be 1 or 2 | ||
`); | ||
} | ||
const [typeNode] = node.typeArguments; | ||
// typeNode TypeReference = 169 | ||
if (isTypeReferenceNode(typeNode)) { | ||
return typeNode; | ||
} | ||
else if (isTypeLiteralNode(typeNode)) { | ||
throw new TypeError(`Literal Type param not supported, such as | ||
genDbDictFromType<{ tb_user: { uid: number } }>(), | ||
should be an TypeReference like: genDbDictFromType<Db>(), | ||
`); | ||
} | ||
else { | ||
throw new TypeError(`Not supported TypeNode Kind: ${node.kind}`); | ||
} | ||
} | ||
function matchSourceFileWithFilePath(path) { | ||
const { createProgram } = require('typescript'); | ||
const srcPath = sharedCore.pathResolve(path).replace(/\\/gu, '/'); | ||
const srcLower = srcPath.toLowerCase(); | ||
const program = createProgram([srcPath], { | ||
noEmitOnError: true, | ||
noImplicitAny: true, | ||
// target: ScriptTarget.ESNext, | ||
target: 99, | ||
inlineSourceMap: false, | ||
// module: ModuleKind.CommonJS, | ||
module: 1, | ||
}); | ||
const ret = { | ||
// ! otherwise node.getText() will fail | ||
checker: program.getTypeChecker(), | ||
sourceFile: null, | ||
}; | ||
for (const sourceFile of program.getSourceFiles()) { | ||
/* istanbul ignore else */ | ||
if (!sourceFile.isDeclarationFile) { | ||
/* istanbul ignore else */ | ||
if (sourceFile.fileName.toLowerCase() === srcLower) { | ||
ret.sourceFile = sourceFile; | ||
break; | ||
} | ||
} | ||
} | ||
return ret; | ||
} | ||
/** Retrieve node with specified position from caller */ | ||
function walkNodeWithPosition(options) { | ||
const { isCallExpression, forEachChild } = require('typescript'); | ||
const visit = (node, opts) => { | ||
const { line, character } = opts.sourceFile.getLineAndCharacterOfPosition(node.getStart()); | ||
/* istanbul ignore else */ | ||
if (line + 1 === opts.matchLine && character + 1 === opts.matchColumn) { | ||
/* istanbul ignore else */ | ||
if (isCallExpression(node)) { | ||
return node; // stop walk | ||
} | ||
} | ||
// void else continue walk | ||
/* istanbul ignore else */ | ||
if (node.getChildCount()) { | ||
return forEachChild(node, childNode => visit(childNode, opts)); | ||
} | ||
}; // End of visit | ||
const targetNode = forEachChild(options.sourceFile, node => visit(node, options)); | ||
return targetNode; | ||
} | ||
/** Retrieve node with specified matchFuncName */ | ||
function walkNode(options) { | ||
const { isCallExpression, forEachChild } = require('typescript'); | ||
const ret = new Set(); | ||
const visitor = (node, opts) => { | ||
/* istanbul ignore else */ | ||
if (isCallExpression(node)) { | ||
/* istanbul ignore else */ | ||
if (isCallerNameMatched(node.expression.getText(), options.matchFuncNameSet)) { | ||
ret.add(node); | ||
return; | ||
} // void else | ||
} // void else continue walk | ||
/* istanbul ignore else */ | ||
if (node.getChildCount()) { | ||
forEachChild(node, childNode => visitor(childNode)); | ||
} | ||
}; // End of visit | ||
forEachChild(options.sourceFile, node => visitor(node)); | ||
return ret; | ||
} | ||
/** | ||
* Generate dbDict .ts files, for CLI | ||
* include extra scopedColumns, aliasColumns, | ||
* for testing. | ||
* no path value emitted if no file generated. | ||
*/ | ||
function buildSource(options) { | ||
const opts = { | ||
...initBuildSrcOpts, | ||
...options, | ||
}; | ||
const walk$ = walkDirForCallerFuncTsFiles(opts); | ||
const build$ = walk$.pipe(operators.mergeMap(path => buildDbDict(path, opts), opts.concurrent), operators.filter(paths => paths.dictPath.length > 0 && paths.DictTypePath.length > 0)); | ||
return build$; | ||
} | ||
/** | ||
* Build dbDict const and type code | ||
*/ | ||
async function buildDbDict(file, options) { | ||
let dictPath = ''; | ||
let DictTypePath = ''; | ||
const srcFile = file.replace(/\\/ug, '/'); | ||
const dbMap = retrieveTypeFromTsFile(file); | ||
if (dbMap.size) { | ||
dictPath = await buildDbDictFile(srcFile, options, dbMap); | ||
if (dictPath.length) { | ||
DictTypePath = await buildDbDictTypeFile(srcFile, options, dbMap); | ||
} | ||
} | ||
return { dictPath, DictTypePath }; | ||
} | ||
/** | ||
* Build dbdict code from generics type for ts source file | ||
* | ||
* @returns file path if src file need parsed | ||
*/ | ||
async function buildDbDictFile(file, options, callerDbMap) { | ||
const opts = { | ||
...initBuildSrcOpts, | ||
...options, | ||
}; | ||
await unlinkBuildFile(file, opts); | ||
let path = ''; | ||
let content = ''; | ||
const map = callerDbMap ? callerDbMap : retrieveTypeFromTsFile(file); | ||
if (map.size) { | ||
map.forEach((arr, key) => { | ||
const kdd = genDbDict(arr); | ||
const [str, dbDictCode] = genDbDictConst(key, kdd, opts); | ||
if (!path) { | ||
path = str; // all value are the same one | ||
} | ||
content += `${dbDictCode}\n\n`; | ||
}); | ||
if (!path) { | ||
throw new Error('path value is empty'); | ||
} | ||
await appendDataToFile(path, content, opts.outputBanner); | ||
path = path.replace(/\\/ug, '/'); | ||
} | ||
return path; | ||
} | ||
/** | ||
* Build dbdict code from generics type for ts source file | ||
* | ||
* @returns file path if src file need parsed | ||
*/ | ||
async function buildDbDictTypeFile(srcFile, options, callerDbMap) { | ||
const opts = { | ||
...initBuildSrcOpts, | ||
...options, | ||
}; | ||
const targetPath = await genDbDictTypeTsFilePath(srcFile, opts.DictTypeFolder, opts.DictTypeFileName); | ||
let targetFileContent = ''; | ||
if (await sharedCore.isFileExists(targetPath)) { | ||
targetFileContent = (await sharedCore.readFileAsync(targetPath)).toString(); | ||
} | ||
const DictTypeBanner = targetFileContent.includes(opts.DictTypeBanner.trim()) | ||
? '' | ||
: opts.DictTypeBanner; | ||
let content = ''; | ||
const map = callerDbMap ? callerDbMap : retrieveTypeFromTsFile(srcFile); | ||
if (map.size) { | ||
map.forEach((tablesMapArr, callerId) => { | ||
content = content + genDbDictTypeCode({ | ||
srcFile, | ||
callerId, | ||
tablesMapArr, | ||
DictTypeSuffix: opts.DictTypeSuffix, | ||
targetPath, | ||
targetFileContent, | ||
content, | ||
}) + '\n'; | ||
}); | ||
if (content.trim().length) { | ||
await appendDataToFile(targetPath, content, DictTypeBanner); | ||
} | ||
} | ||
return targetPath.replace(/\\/ug, '/'); | ||
} | ||
function retrieveTypeFromTsFile(file) { | ||
const path = sharedCore.pathResolve(file).replace(/\\/ug, '/'); | ||
const { checker, sourceFile } = matchSourceFileWithFilePath(path); | ||
const ret = new Map(); | ||
if (sourceFile) { | ||
const nodeSet = walkNode({ | ||
sourceFile, | ||
matchFuncNameSet: globalCallerFuncNameSet, | ||
}); | ||
const callerTypeMap = genCallerTypeMapFromNodeSet(nodeSet, checker, sourceFile, path); | ||
callerTypeMap.forEach(([dbTagMap, dbColsTagMap], callerTypeId) => { | ||
const tbs = buildDbParam(dbTagMap); | ||
const mtCols = buildDbColsParam(dbColsTagMap); | ||
ret.set(callerTypeId, [tbs, mtCols]); | ||
}); | ||
} | ||
return ret; | ||
} | ||
function genDbDictConst(callerTypeId, dbDict, options) { | ||
const { path, line, column } = pickInfoFromCallerTypeId(callerTypeId); | ||
// const relativePath = relative(base, path) | ||
const targetPath = genDbDictTsFilePath(path, options.outputFileNameSuffix); | ||
const dbDictVarName = genVarName(options.exportVarPrefix, line, column); | ||
const code = `export const ${dbDictVarName} = ${JSON.stringify(dbDict, null, 2)} as const`; | ||
return [targetPath, code]; | ||
} | ||
function genDbDict(arr) { | ||
const [tables, columns] = arr; | ||
const base = { | ||
tables, | ||
columns, | ||
}; | ||
const kdd = genDbDictFromBase(base); | ||
return kdd; | ||
} | ||
/** Save (k)tables of one file */ | ||
async function appendDataToFile(path, code, outputPrefix) { | ||
const retCode = outputPrefix | ||
? `${outputPrefix}\n\n${code}\n\n` | ||
: `${code}\n\n`; | ||
await sharedCore.writeFileAsync(path, retCode, { flag: 'a' }); | ||
return path.replace(/\\/gu, '/'); | ||
} | ||
/** | ||
* Unlink dbDict const file starting with path, | ||
* create if not exists, empty if exists | ||
*/ | ||
async function unlinkBuildFile(path, options) { | ||
const target = genDbDictTsFilePath(path, options.outputFileNameSuffix); | ||
if (await sharedCore.isFileExists(target)) { | ||
await sharedCore.unlinkAsync(target); | ||
} | ||
return path; | ||
} | ||
function validateContentHasDupTypeName(opts) { | ||
if (includeExportTypeName(opts.targetFileContent, opts.typeName)) { | ||
const msg = `Build warn: | ||
Target file has same typeName: "${opts.typeName}" but different type declaration in the source file, use other generics input name, or create type alias of the type and pass the alias as generics param. Like: | ||
" | ||
type DbAlias = Db | ||
export const dbDict = genDbDictFromType<DbAlias>() | ||
", | ||
source: "${opts.srcFile}", | ||
target file name: "${opts.targetPath}", | ||
target file content: | ||
------- target content start ------------ | ||
${opts.targetFileContent} | ||
------- target content end ------------ | ||
`; | ||
throw new TypeError(msg); | ||
} | ||
else if (includeExportTypeName(opts.content, opts.typeName)) { | ||
const msg = `Build warn: | ||
Duplicate typeName: "${opts.typeName}" but different type declaration in the source file, use other generics input name, or create type alias of the type and pass the alias as generics param. Like:, | ||
" | ||
type DbAlias = Db | ||
export const dbDict = genDbDictFromType<DbAlias>() | ||
", | ||
source: "${opts.srcFile}", | ||
content: | ||
------- content start ------------ | ||
${opts.content} | ||
------- content end ------------ | ||
`; | ||
throw new TypeError(msg); | ||
} | ||
} | ||
function genDbDictTypeCode(options) { | ||
const { callerId, tablesMapArr, DictTypeSuffix, content, targetFileContent, srcFile, targetPath, } = options; | ||
let ret = ''; | ||
const kdd = genDbDict(tablesMapArr); | ||
const dictTypeName = genDictTypeNameFromCallerId(callerId, DictTypeSuffix); | ||
const dictVarName = genDictVarNameFromDictTypeName(dictTypeName); | ||
let writeDictConst = true; | ||
if (hasSameDictVar(targetFileContent, kdd)) { | ||
if (includeExportTypeName(targetFileContent, dictTypeName)) { | ||
console.info(`Build notice: | ||
message: skip write data, taregetFileContent already has same DictType: "${dictTypeName}", | ||
srcFile: "${srcFile}", | ||
targetPath: "${targetPath}". | ||
`); | ||
return ''; | ||
} | ||
else { | ||
// same generics type declaration but different type name, so create an ref type instead of write same type once more | ||
// const dictTypeNameAlias = genValidDictTypeAliasName( | ||
// targetFileContent, | ||
// dictTypeName, | ||
// ) | ||
const info = retrieveDictInfoByDictConst(targetFileContent, kdd); | ||
if (info.dictTypeName) { | ||
return `export type ${dictTypeName} = ${info.dictTypeName}`; | ||
} | ||
else { | ||
// only dict var, no DictType, strange. go down to create type declaration | ||
writeDictConst = false; | ||
} | ||
} | ||
} | ||
else if (hasSameDictVar(content, kdd)) { | ||
return ''; | ||
} | ||
else { | ||
validateContentHasDupTypeName({ | ||
srcFile, targetPath, targetFileContent, typeName: dictTypeName, content, | ||
}); | ||
} | ||
const code = genDbDictTypeDeclaration(kdd, dictTypeName).trim(); | ||
const code2 = code.replace(/ {4}/ug, ' '); | ||
if (writeDictConst) { | ||
// ! code must one line w/o new line | ||
const code3 = `export const ${dictVarName} = ` + JSON.stringify(kdd, null, 0).replace(/\n|\r/ug, ''); | ||
ret += `${code2}\n${code3}`; | ||
} | ||
else { | ||
ret += `${code2}`; | ||
} | ||
return ret.trim(); | ||
} | ||
// eslint-disable-next-line import/no-extraneous-dependencies | ||
/** | ||
* Generate DbDict from generics type T | ||
* Loading compiled js file if prod env | ||
*/ | ||
function genDbDictFromType(options) { | ||
const opts = options | ||
? { ...initOptions, ...options } | ||
: { ...initOptions }; | ||
if (process.env.NODE_ENV === 'production') { | ||
opts.forceLoadDbDictJs = true; | ||
if (opts.forceLoadDbDictJsPathReplaceRules === null) { | ||
opts.forceLoadDbDictJsPathReplaceRules = [[/\/src\//u, '/dist/']]; | ||
} | ||
} | ||
const caller = getCallerStack(opts.callerDistance); | ||
const kdd = loadDbDictParamFromCallerInfo(opts, caller, opts.columnNameCreationFn); | ||
return kdd; | ||
} | ||
function loadDbDictParamFromCallerInfo(options, caller, | ||
/** false will use original col name w/o table name prefix */ | ||
columnNameCreationFn = defaultCreateScopedColumnName) { | ||
if (!options.forceLoadDbDictJs && isTsFile(caller.path)) { | ||
return loadDbDictFromTsTypeFile(options.callerDistance + 3, columnNameCreationFn); | ||
} | ||
else { // run in js or debug in ts | ||
return loadDbDictFromJsBuiltFile(options, caller); | ||
} | ||
} | ||
function loadDbDictFromTsTypeFile(callerDistance, | ||
/** false will use original col name w/o table name prefix */ | ||
columnNameCreationFn = defaultCreateScopedColumnName) { | ||
const base = genDbDictBaseFromType({ callerDistance }); | ||
const ret = genDbDictFromBase(base, columnNameCreationFn); | ||
return ret; | ||
} | ||
function loadDbDictFromJsBuiltFile(options, caller) { | ||
const { outputFileNameSuffix, forceLoadDbDictJsPathReplaceRules } = options; | ||
let path = `${caller.path.slice(0, -3)}.${outputFileNameSuffix}.js`; | ||
path = reWriteLoadingPath(path, forceLoadDbDictJsPathReplaceRules); | ||
fs.accessSync(path, fs.constants.R_OK); | ||
const ret = loadDbDictVarFromFile({ path, caller, options }); | ||
return ret; | ||
} | ||
/** | ||
* Generate DbDictBase from generics type T | ||
*/ | ||
function genDbDictBaseFromType(options) { | ||
const opts = options | ||
? { ...initGenDbDictFromTypeOpts, ...options } | ||
: { ...initGenDbDictFromTypeOpts }; | ||
const depth = typeof opts.callerDistance === 'number' && opts.callerDistance > 0 | ||
? opts.callerDistance | ||
: 0; | ||
const caller = getCallerStack(depth); | ||
if (isTsFile(caller.path)) { | ||
return genDbDictBaseFromCaller(caller, opts); | ||
} | ||
else { | ||
throw TypeError('Not .ts file'); | ||
} | ||
} | ||
function genDbDictBaseFromCaller(caller, options) { | ||
const opts = { | ||
// callerDistance: initOptions.callerDistance, | ||
// callerFuncNames: initOptions.callerFuncNames, | ||
...options, | ||
caller, | ||
cacheMap: cacheMap, | ||
}; | ||
// "/kmore-mono/packages/kmore-types/test/test.config.ts:13:23" | ||
const callerId = `${caller.path}:${caller.line}:${caller.column}`; | ||
const localTypeId = opts.cacheMap.callerIdToLocalTypeIdMap.get(callerId); | ||
if (localTypeId) { // from cache | ||
const tagsMapArr = opts.cacheMap.localTypeMap.get(localTypeId); | ||
if (tagsMapArr) { | ||
return buildDbDictBaseFromTagsMapArr(tagsMapArr); | ||
} | ||
else { | ||
throw new Error(`cacheMap.localTypeMap not contains key: "${localTypeId}".`); | ||
} | ||
} | ||
else { | ||
const localTypeItem = retrieveLocalTypeItemFromType(opts); | ||
if (!localTypeItem) { | ||
throw new Error(`retrieveLocalTypeMapFromType() return empty with key: "${localTypeId ? localTypeId : 'N/A'}".`); | ||
} | ||
// id is localTypeId | ||
// map maybe empty, so try from cache | ||
const { localTypeId: id, tagsMapArr } = localTypeItem; | ||
opts.cacheMap.callerIdToLocalTypeIdMap.set(callerId, id); | ||
if (tagsMapArr && tagsMapArr[0].size) { | ||
opts.cacheMap.localTypeMap.set(id, tagsMapArr); | ||
return buildDbDictBaseFromTagsMapArr(tagsMapArr); | ||
} | ||
else { // retrieved only localTypeId, then try from cache | ||
const tagsMapArr2 = opts.cacheMap.localTypeMap.get(id); | ||
if (!tagsMapArr2) { | ||
throw new Error(`cacheMap.localTypeMap not contains key: "${id}" or value empty.`); | ||
} | ||
else if (!tagsMapArr2[0].size) { | ||
throw new Error(`cacheMap.localTypeMap key: "${id}" value empty.`); | ||
} | ||
return buildDbDictBaseFromTagsMapArr(tagsMapArr2); | ||
} | ||
} | ||
} | ||
function buildDbDictBaseFromTagsMapArr(tagsMapArr) { | ||
const [dbTagMap, dbColsTagMap] = tagsMapArr; | ||
const ret = { | ||
tables: buildDbParam(dbTagMap), | ||
columns: buildDbColsParam(dbColsTagMap), | ||
}; | ||
return ret; | ||
} | ||
function retrieveLocalTypeItemFromType(options) { | ||
const { caller } = options; | ||
const { checker, sourceFile } = matchSourceFileWithFilePath(caller.path); | ||
/* istanbul ignore else */ | ||
if (!sourceFile) { | ||
// throw new Error(`Can not retrieve generics type info from file: "${caller.path}"`) | ||
return; | ||
} | ||
// genDbDictFromType<Db>() | ||
const node = walkNodeWithPosition({ | ||
sourceFile, | ||
matchLine: caller.line, | ||
matchColumn: caller.column, | ||
matchFuncNameSet: globalCallerFuncNameSet, | ||
}); | ||
/* istanbul ignore else */ | ||
if (node) { | ||
const nodeInfo = genInfoFromNode({ | ||
checker, | ||
node, | ||
path: caller.path, | ||
sourceFile, | ||
}); | ||
if (nodeInfo) { | ||
const { localTypeId, dbTagMap, dbColsTagMap } = nodeInfo; | ||
if (dbTagMap.size) { | ||
// localTypeId: "/kmore-mono/packages/kmore-types/test/test.config.ts:typeid-Db" | ||
return { | ||
localTypeId, | ||
tagsMapArr: [dbTagMap, dbColsTagMap], | ||
}; | ||
} | ||
else { | ||
return { | ||
localTypeId, | ||
}; | ||
} | ||
} | ||
} | ||
} | ||
// function transformerFactory<T extends ts.Node>(ctx: ts.TransformationContext): ts.Transformer<T> { | ||
// const transformer = (rootNode: T) => { | ||
// const visit: ts.Visitor = (nodeParam: ts.Node): ts.Node => { | ||
// const node = ts.visitEachChild(nodeParam, visit, ctx) | ||
// return node | ||
// } | ||
// return ts.visitNode(rootNode, visit) | ||
// } | ||
// return transformer | ||
// } | ||
exports.buildDbColsParam = buildDbColsParam; | ||
exports.buildDbDict = buildDbDict; | ||
exports.buildDbParam = buildDbParam; | ||
exports.buildSource = buildSource; | ||
exports.cacheMap = cacheMap; | ||
exports.createNullObject = createNullObject; | ||
exports.defaultCreateScopedColumnName = defaultCreateScopedColumnName; | ||
exports.defaultPropDescriptor = defaultPropDescriptor; | ||
exports.dictObjectEquals = dictObjectEquals; | ||
exports.genCallerTypeMapFromNodeSet = genCallerTypeMapFromNodeSet; | ||
exports.genDbDictFromBase = genDbDictFromBase; | ||
exports.genDbDictFromType = genDbDictFromType; | ||
exports.genDbDictTsFilePath = genDbDictTsFilePath; | ||
exports.genDbDictTypeDeclaration = genDbDictTypeDeclaration; | ||
exports.genDbDictTypeTsFilePath = genDbDictTypeTsFilePath; | ||
exports.genDictTypeName = genDictTypeName; | ||
exports.genDictTypeNameFromCallerId = genDictTypeNameFromCallerId; | ||
exports.genDictVarNameFromDbName = genDictVarNameFromDbName; | ||
exports.genDictVarNameFromDictTypeName = genDictVarNameFromDictTypeName; | ||
exports.genInfoFromNode = genInfoFromNode; | ||
exports.genValidDictTypeAliasName = genValidDictTypeAliasName; | ||
exports.genVarName = genVarName; | ||
exports.getCallerStack = getCallerStack; | ||
exports.getStack = getStack; | ||
exports.globalCallerFuncNameSet = globalCallerFuncNameSet; | ||
exports.hasContainsCallerFuncNames = hasContainsCallerFuncNames; | ||
exports.hasExtColumns = hasExtColumns; | ||
exports.hasSameDictVar = hasSameDictVar; | ||
exports.includeExportTypeName = includeExportTypeName; | ||
exports.initBuildSrcOpts = initBuildSrcOpts; | ||
exports.initGenDbDictFromTypeOpts = initGenDbDictFromTypeOpts; | ||
exports.initOptions = initOptions; | ||
exports.isCallerNameMatched = isCallerNameMatched; | ||
exports.isTsFile = isTsFile; | ||
exports.loadDbDictParamFromCallerInfo = loadDbDictParamFromCallerInfo; | ||
exports.loadDbDictVarFromFile = loadDbDictVarFromFile; | ||
exports.loadFile = loadFile; | ||
exports.matchSourceFileWithFilePath = matchSourceFileWithFilePath; | ||
exports.parseCallerFuncNames = parseCallerFuncNames; | ||
exports.pickInfoFromCallerTypeId = pickInfoFromCallerTypeId; | ||
exports.reWriteLoadingPath = reWriteLoadingPath; | ||
exports.reservedTbListKeys = reservedTbListKeys; | ||
exports.retrieveDictInfoByDictConst = retrieveDictInfoByDictConst; | ||
exports.retrieveDictVarMapFrom = retrieveDictVarMapFrom; | ||
exports.retrieveDictVarsFrom = retrieveDictVarsFrom; | ||
exports.retrieveLocalTypeItemFromType = retrieveLocalTypeItemFromType; | ||
exports.scopedSnakeToCamel = scopedSnakeToCamel; | ||
exports.snakeToCamel = snakeToCamel; | ||
exports.updateDbDictFile = updateDbDictFile; | ||
exports.validateDuplicateProp = validateDuplicateProp; | ||
exports.validateParamTables = validateParamTables; | ||
exports.validateTbName = validateTbName; | ||
exports.walkDirForCallerFuncTsFiles = walkDirForCallerFuncTsFiles; | ||
exports.walkNode = walkNode; | ||
exports.walkNodeWithPosition = walkNodeWithPosition; | ||
exports.genDbDict = genDbDict; | ||
//# sourceMappingURL=index.cjs.js.map |
@@ -1,7 +0,1 @@ | ||
export * from './lib/config'; | ||
export * from './lib/model'; | ||
export * from './lib/build'; | ||
export * from './lib/compiler'; | ||
export * from './lib/ts-util'; | ||
export * from './lib/util'; | ||
export * from './lib/db-dict'; | ||
export * from './lib/index'; |
@@ -1,1 +0,2 @@ | ||
"use strict"; | ||
export * from './dict'; | ||
export * from './types'; |
{ | ||
"name": "kmore-types", | ||
"author": "waiting", | ||
"version": "2.0.5", | ||
"version": "3.0.0", | ||
"description": "Retrieve types info from ts file", | ||
@@ -17,3 +17,3 @@ "keywords": [ | ||
"module": "./dist/index.js", | ||
"types": "./dist/index.d.ts", | ||
"types": "./src/index.ts", | ||
"bugs": { | ||
@@ -29,29 +29,31 @@ "url": "https://github.com/waitingsong/kmore/issues" | ||
"dependencies": { | ||
"@waiting/shared-core": "5", | ||
"@waiting/shared-types": "5", | ||
"rxjs": "6", | ||
"rxwalker": "^3.0.0", | ||
"source-map-support": "^0.5.13" | ||
"@waiting/shared-core": "8", | ||
"@waiting/shared-types": "8" | ||
}, | ||
"devDependencies": { | ||
"@types/source-map-support": "*", | ||
"@rollup/plugin-node-resolve": "11", | ||
"@waiting/shared-types-dev": "2", | ||
"cross-env": "7", | ||
"rxrunscript": "5", | ||
"typescript": "4" | ||
"rollup-plugin-typescript2": "^0.30.0" | ||
}, | ||
"peerDependencies": { | ||
"typescript": "^4.1" | ||
}, | ||
"engines": { | ||
"node": ">=12.13.0" | ||
"node": ">=14.15.0" | ||
}, | ||
"files": [ | ||
"src/**/*.ts", | ||
"bin/*.js", | ||
"dist" | ||
], | ||
"publishConfig": { | ||
"access": "public" | ||
}, | ||
"scripts": { | ||
"build": "npm run tsc && npm run rp", | ||
"clean": "rm -rf dist/* .vscode/.tsbuildinfo .vscode/.tsbuildinfo.* ", | ||
"cov": "cross-env TS_NODE_PROJECT=test/tsconfig.json nyc mocha", | ||
"clean": "rm -rf dist/* -rf && npm run clean:lock && npm run clean:cache", | ||
"clean:cache": "rm -rf .eslintcache .vscode/.tsbuildinfo .vscode/.tsbuildinfo.*", | ||
"clean:lock": "rm package-lock.json -f", | ||
"cov": "cross-env TS_NODE_PROJECT=test/tsconfig.json nyc mocha --parallel=false", | ||
"lint": "eslint --fix --cache {src,test}/**/*.ts", | ||
"lint:nofix": "eslint --cache {src,test}/**/*.ts", | ||
"purge": "npm run clean && rm node_modules -rf", | ||
"rp": "rollup -c rollup.config.js", | ||
@@ -92,3 +94,3 @@ "rp:w": "npm run tsc:w | rollup -wc rollup.config.js", | ||
}, | ||
"gitHead": "6b32c45aa6785e39ef8bffb807b45f2ba03eba13" | ||
"gitHead": "35a1501ffd04f87eae83654fcf8c09a449b1ab88" | ||
} |
Sorry, the diff of this file is not supported yet
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
2
0
17269
14
272
1
+ Added@waiting/shared-core@8.6.0(transitive)
+ Added@waiting/shared-types@8.6.0(transitive)
+ Addedtypescript@5.7.3(transitive)
- Removedrxjs@6
- Removedrxwalker@^3.0.0
- Removedsource-map-support@^0.5.13
- Removed@waiting/shared-core@5.9.1(transitive)
- Removed@waiting/shared-types@5.9.1(transitive)
- Removedbuffer-from@1.1.2(transitive)
- Removedrxwalker@3.1.0(transitive)
- Removedsource-map@0.6.1(transitive)
- Removedsource-map-support@0.5.21(transitive)
- Removedtypescript@4.9.5(transitive)
Updated@waiting/shared-core@8
Updated@waiting/shared-types@8