Comparing version 1.1.0 to 1.2.0
{ | ||
"name": "sqltyper", | ||
"version": "1.1.0", | ||
"version": "1.2.0", | ||
"description": "Typed SQL queries in PostgreSQL", | ||
@@ -34,6 +34,6 @@ "main": "dist/src/index.js", | ||
"@types/jest": "^26.0.10", | ||
"@types/node": "^14.6.0", | ||
"@types/node": "^15.0.1", | ||
"@types/ramda": "^0.27.3", | ||
"@types/wrap-ansi": "^3.0.0", | ||
"@types/yargs": "^16.0.0", | ||
"@types/yargs": "^17.0.0", | ||
"@typescript-eslint/eslint-plugin": "^4.13.0", | ||
@@ -47,3 +47,3 @@ "@typescript-eslint/parser": "^4.13.0", | ||
"ts-jest": "^26.2.0", | ||
"ts-node": "^9.1.1" | ||
"ts-node": "^10.0.0" | ||
}, | ||
@@ -59,4 +59,4 @@ "dependencies": { | ||
"wrap-ansi": "^7.0.0", | ||
"yargs": "^16.2.0" | ||
"yargs": "^17.0.0" | ||
} | ||
} |
@@ -36,3 +36,3 @@ #!/usr/bin/env node | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const args = parseArgs(); | ||
const args = yield parseArgs(); | ||
if (!args._.length) { | ||
@@ -198,5 +198,6 @@ console.error('No input files. Try with `--help`.'); | ||
return moduleDirs; | ||
const newModules = yield eventHandler(moduleDir.modules, type, dirPath, fileName); | ||
const newModules = yield eventHandler(moduleDir.nestedDirs, moduleDir.modules, type, dirPath, fileName); | ||
moduleDirs = pipeable_1.pipe(modifyWhere((moduleDir) => moduleDir.dirPath === dirPath, (moduleDir) => ({ | ||
dirPath: moduleDir.dirPath, | ||
nestedDirs: moduleDir.nestedDirs, | ||
modules: newModules, | ||
@@ -210,3 +211,3 @@ hasErrors: moduleDir.hasErrors, | ||
function makeWatchEventHandler(clients, options) { | ||
return (tsModules, type, dirPath, sqlFileName) => __awaiter(this, void 0, void 0, function* () { | ||
return (nestedDirs, tsModules, type, dirPath, sqlFileName) => __awaiter(this, void 0, void 0, function* () { | ||
const sqlFilePath = path.join(dirPath, sqlFileName); | ||
@@ -225,3 +226,3 @@ let result; | ||
} | ||
result = pipeable_1.pipe(result, Task.chain((newModules) => maybeWriteIndexModule(options.index, dirPath, newModules, options.prettify))); | ||
result = pipeable_1.pipe(result, Task.chain((newModules) => maybeWriteIndexModule(options.index, dirPath, nestedDirs, newModules, options.prettify))); | ||
return yield result(); | ||
@@ -242,3 +243,3 @@ }); | ||
console.log('Starting compilation...'); | ||
}), Task.chain(() => mapDirectories(dirPaths, fileExtensions, (filePath) => processSQLFile(clients, filePath, false, options), (dirPath, tsModules) => processSQLDirectory(dirPath, tsModules, options))), Task.map((moduleDirs) => { | ||
}), Task.chain(() => mapDirectories(dirPaths, fileExtensions, (filePath) => processSQLFile(clients, filePath, false, options), (dirPath, nestedDirs, tsModules) => processSQLDirectory(dirPath, nestedDirs, tsModules, options))), Task.map((moduleDirs) => { | ||
if (moduleDirs.some((moduleDir) => moduleDir.hasErrors)) { | ||
@@ -254,3 +255,3 @@ console.log('Compilation failed.'); | ||
function checkDirectories(clients, fileExtensions, dirPaths, options) { | ||
return mapDirectories(dirPaths, fileExtensions, (filePath) => processSQLFile(clients, filePath, true, options), (_dirPath, tsModules) => checkDirectoryResult(tsModules)); | ||
return mapDirectories(dirPaths, fileExtensions, (filePath) => processSQLFile(clients, filePath, true, options), (_dirPath, _nestedDirs, tsModules) => checkDirectoryResult(tsModules)); | ||
} | ||
@@ -264,3 +265,3 @@ function checkDirectoryResult(tsModules) { | ||
function mapDirectory(dirPath, fileExtensions, fileProcessor, dirProcessor) { | ||
return pipeable_1.pipe(findSQLFilePaths(dirPath, fileExtensions), Task.chain((filePaths) => fp_utils_1.traverseATs(filePaths, fileProcessor)), Task.chain((results) => dirProcessor(dirPath, results))); | ||
return pipeable_1.pipe(findSQLFilePaths(dirPath, fileExtensions), Task.chain((res) => pipeable_1.pipe(fp_utils_1.traverseATs(res.nestedDirs, (dirPath) => mapDirectory(dirPath, fileExtensions, fileProcessor, dirProcessor)), Task.chain((processedDirs) => pipeable_1.pipe(fp_utils_1.traverseATs(res.sqlFiles, fileProcessor), Task.chain((processedFiles) => dirProcessor(dirPath, processedDirs, processedFiles))))))); | ||
} | ||
@@ -302,11 +303,15 @@ function processSQLFile(clients, filePath, checkOnly, options) { | ||
} | ||
function processSQLDirectory(dirPath, modules, options) { | ||
function processSQLDirectory(dirPath, nestedDirs, modules, options) { | ||
const successfulModules = pipeable_1.pipe(modules, Array.filterMap(function_1.identity)); | ||
const hasErrors = modules.some(Option.isNone); | ||
return pipeable_1.pipe(maybeWriteIndexModule(options.index, dirPath, successfulModules, options.prettify), Task.map((modules) => ({ hasErrors, dirPath, modules }))); | ||
return pipeable_1.pipe(maybeWriteIndexModule(options.index, dirPath, nestedDirs, successfulModules, options.prettify), Task.map((modules) => ({ hasErrors, dirPath, modules, nestedDirs }))); | ||
} | ||
function maybeWriteIndexModule(write, dirPath, tsModules, prettify) { | ||
function moduleDirContainsSqlFiles(dir) { | ||
return (dir.modules.length > 0 || dir.nestedDirs.some(moduleDirContainsSqlFiles)); | ||
} | ||
function maybeWriteIndexModule(write, dirPath, nestedDirs, tsModules, prettify) { | ||
const tsPath = path.join(dirPath, 'index.ts'); | ||
if (write) { | ||
return pipeable_1.pipe(Task.of(tsModules), Task.map((modules) => pipeable_1.pipe(modules, Array.sort(Ord.fromCompare((a, b) => Ordering.sign(a.tsFileName.localeCompare(b.tsFileName)))))), Task.chain((sortedModules) => index_1.indexModuleTS(sortedModules, { | ||
if (write && | ||
(tsModules.length > 0 || nestedDirs.some(moduleDirContainsSqlFiles))) { | ||
return pipeable_1.pipe(Task.of(tsModules), Task.map((modules) => pipeable_1.pipe(modules, Array.sort(Ord.fromCompare((a, b) => Ordering.sign(a.tsFileName.localeCompare(b.tsFileName)))))), Task.chain((sortedModules) => index_1.indexModuleTS(dirPath, nestedDirs.filter(moduleDirContainsSqlFiles), sortedModules, { | ||
prettierFileName: prettify ? tsPath : null, | ||
@@ -360,2 +365,8 @@ })), Task.chain((tsCode) => () => __awaiter(this, void 0, void 0, function* () { | ||
} | ||
function mapPartial(as, f) { | ||
function isNotNull(a) { | ||
return a !== null; | ||
} | ||
return as.map(f).filter(isNotNull); | ||
} | ||
function findSQLFilePaths(dirPath, fileExtensions) { | ||
@@ -365,3 +376,6 @@ return pipeable_1.pipe(() => fs_1.promises.readdir(dirPath, { | ||
withFileTypes: true, | ||
}), Task.chain((dirents) => pipeable_1.pipe(fp_utils_1.traverseATs(dirents, (dirent) => pipeable_1.pipe(isSQLFile(fileExtensions, dirPath, dirent.name), Task.map((is) => (is ? Option.some(dirent) : Option.none)))), Task.map(Array.filterMap(function_1.identity)), Task.map((dirents) => dirents.map((dirent) => path.join(dirPath, dirent.name)))))); | ||
}), Task.chain((dirents) => pipeable_1.pipe(fp_utils_1.traverseATs(dirents, (dirent) => pipeable_1.pipe(categoriseDirEnt(fileExtensions, dirPath, dirent.name), Task.map((typ) => [typ, dirent]))), Task.map((dirents) => ({ | ||
sqlFiles: mapPartial(dirents, ([typ, dirent]) => typ === 'sqlfile' ? path.join(dirPath, dirent.name) : null), | ||
nestedDirs: mapPartial(dirents, ([typ, dirent]) => typ === 'dir' ? path.join(dirPath, dirent.name) : null), | ||
}))))); | ||
} | ||
@@ -374,3 +388,3 @@ function getOutputPath(filePath) { | ||
} | ||
function isSQLFile(extensions, dirPath, fileName) { | ||
function categoriseDirEnt(extensions, dirPath, fileName) { | ||
return () => __awaiter(this, void 0, void 0, function* () { | ||
@@ -382,5 +396,9 @@ let stats; | ||
catch (_err) { | ||
return false; | ||
return null; | ||
} | ||
return stats.isFile() && hasOneOfExtensions(extensions, fileName); | ||
return stats.isFile() && hasOneOfExtensions(extensions, fileName) | ||
? 'sqlfile' | ||
: stats.isDirectory() | ||
? 'dir' | ||
: null; | ||
}); | ||
@@ -387,0 +405,0 @@ } |
@@ -149,6 +149,13 @@ "use strict"; | ||
} | ||
function generateIndexModule(modules) { | ||
return modules | ||
function generateIndexModule(dirPath, nestedDirs, modules) { | ||
const nestedDirsStr = nestedDirs | ||
.map((dir) => { | ||
const name = path.relative(dirPath, dir.dirPath); | ||
return `export * as ${name} from './${name}';`; | ||
}) | ||
.join('\n'); | ||
const modulesStr = modules | ||
.map(({ tsFileName, funcName }) => `export { ${funcName} } from './${baseNameWithoutExt(tsFileName)}';`) | ||
.join('\n'); | ||
return nestedDirsStr + '\n' + modulesStr; | ||
} | ||
@@ -155,0 +162,0 @@ exports.generateIndexModule = generateIndexModule; |
@@ -23,6 +23,3 @@ "use strict"; | ||
exports.sequenceAIM = sequenceAIM; | ||
const concat2 = () => (arr1) => (arr2) => [ | ||
...arr1, | ||
...arr2, | ||
]; | ||
const concat2 = () => (arr1) => (arr2) => [...arr1, ...arr2]; | ||
exports.concat2 = concat2; |
@@ -17,3 +17,3 @@ "use strict"; | ||
function generateTSCode(clients, sourceFileName, stmt, funcName, options) { | ||
const { prettierFileName = null, target = 'pg', module = 'pg' } = options || {}; | ||
const { prettierFileName = null, target = 'pg', module = 'pg', } = options || {}; | ||
return pipeable_1.pipe(TaskEither.right(stmt), TaskEither.chain((stmt) => TaskEither.rightTask(codegen_1.generateTypeScript(clients.types, sourceFileName, target, module, funcName, stmt))), TaskEither.chain((tsCode) => prettierFileName != null | ||
@@ -24,5 +24,5 @@ ? TaskEither.rightTask(() => prettify_1.runPrettier(prettierFileName, tsCode)) | ||
exports.generateTSCode = generateTSCode; | ||
function indexModuleTS(tsModules, options) { | ||
function indexModuleTS(dirPath, nestedDirs, tsModules, options) { | ||
const { prettierFileName = null } = options || {}; | ||
return pipeable_1.pipe(Task.of(codegen_1.generateIndexModule(tsModules)), Task.chain((tsCode) => prettierFileName != null | ||
return pipeable_1.pipe(Task.of(codegen_1.generateIndexModule(dirPath, nestedDirs, tsModules)), Task.chain((tsCode) => prettierFileName != null | ||
? () => prettify_1.runPrettier(prettierFileName, tsCode) | ||
@@ -29,0 +29,0 @@ : Task.of(tsCode))); |
@@ -25,3 +25,3 @@ "use strict"; | ||
} | ||
const options = yield resolveConfig(filePath); | ||
const options = yield resolveConfig(filePath, { editorconfig: true }); | ||
return yield format(tsCode, Object.assign(Object.assign({}, options), { filepath: filePath })); | ||
@@ -28,0 +28,0 @@ }); |
@@ -30,3 +30,5 @@ "use strict"; | ||
name: tableName, | ||
columns: result.map((col) => ({ | ||
columns: result | ||
.filter((col) => col.attisdropped === false) | ||
.map((col) => ({ | ||
hidden: col.attnum < 0, | ||
@@ -33,0 +35,0 @@ name: col.attname, |
@@ -17,3 +17,3 @@ "use strict"; | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const result = yield sql.unsafe(`SELECT attnum, attname, atttypid, attnotnull | ||
const result = yield sql.unsafe(`SELECT attnum, attname, atttypid, attnotnull, attisdropped | ||
FROM pg_catalog.pg_attribute attr | ||
@@ -20,0 +20,0 @@ JOIN pg_catalog.pg_class cls on attr.attrelid = cls.oid |
@@ -94,3 +94,3 @@ "use strict"; | ||
[114, 'any'], | ||
[3802, 'any'], | ||
[3802, 'any'], // jsonb | ||
]); | ||
@@ -125,3 +125,3 @@ exports.nodePgBuiltinArrayTypes = new Map([ | ||
[1183, 'string[]'], | ||
[1270, 'string[]'], | ||
[1270, 'string[]'], // timetz[] | ||
]); | ||
@@ -128,0 +128,0 @@ exports.defaultType = 'string'; |
{ | ||
"name": "sqltyper", | ||
"version": "1.1.0", | ||
"version": "1.2.0", | ||
"description": "Typed SQL queries in PostgreSQL", | ||
@@ -34,6 +34,6 @@ "main": "dist/src/index.js", | ||
"@types/jest": "^26.0.10", | ||
"@types/node": "^14.6.0", | ||
"@types/node": "^15.0.1", | ||
"@types/ramda": "^0.27.3", | ||
"@types/wrap-ansi": "^3.0.0", | ||
"@types/yargs": "^16.0.0", | ||
"@types/yargs": "^17.0.0", | ||
"@typescript-eslint/eslint-plugin": "^4.13.0", | ||
@@ -47,3 +47,3 @@ "@typescript-eslint/parser": "^4.13.0", | ||
"ts-jest": "^26.2.0", | ||
"ts-node": "^9.1.1" | ||
"ts-node": "^10.0.0" | ||
}, | ||
@@ -59,4 +59,4 @@ "dependencies": { | ||
"wrap-ansi": "^7.0.0", | ||
"yargs": "^16.2.0" | ||
"yargs": "^17.0.0" | ||
} | ||
} |
# sqltyper - Type your SQL queries! | ||
![tests](https://github.com/akheron/sqltyper/workflows/tests/badge.svg) | ||
[![tests](https://github.com/akheron/sqltyper/actions/workflows/tests.yml/badge.svg?branch=main)](https://github.com/akheron/sqltyper/actions/workflows/tests.yml) | ||
@@ -267,4 +267,15 @@ SQL is a typed language, but most solutions for using an SQL database from typed | ||
## Releasing | ||
``` | ||
$ yarn version --new-version <major|minor|patch> | ||
$ yarn publish | ||
$ git push origin main --tags | ||
``` | ||
Open https://github.com/akheron/sqltyper/releases, edit the draft release, | ||
select the newest version tag, adjust the description as needed. | ||
[node-postgres]: https://node-postgres.com/ | ||
[postgres.js]: https://github.com/porsager/postgres | ||
[sqlτyped]: https://github.com/jonifreeman/sqltyped |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
220207
4931
281
+ Addedcliui@8.0.1(transitive)
+ Addedyargs@17.7.2(transitive)
+ Addedyargs-parser@21.1.1(transitive)
- Removedcliui@7.0.4(transitive)
- Removedyargs@16.2.0(transitive)
- Removedyargs-parser@20.2.9(transitive)
Updatedyargs@^17.0.0