@lbu/store
Advanced tools
Comparing version 0.0.20 to 0.0.21
@@ -26,3 +26,3 @@ import { dirnameForModule } from "@lbu/stdlib"; | ||
export { | ||
createFile, | ||
createOrUpdateFile, | ||
copyFile, | ||
@@ -29,0 +29,0 @@ getFileById, |
{ | ||
"name": "@lbu/store", | ||
"version": "0.0.20", | ||
"version": "0.0.21", | ||
"description": "Postgres & S3-compatible wrappers for common things", | ||
@@ -17,4 +17,4 @@ "main": "./index.js", | ||
"dependencies": { | ||
"@lbu/insight": "^0.0.20", | ||
"@lbu/stdlib": "^0.0.20", | ||
"@lbu/insight": "^0.0.21", | ||
"@lbu/stdlib": "^0.0.21", | ||
"mime-types": "2.1.27", | ||
@@ -38,3 +38,3 @@ "minio": "7.0.16", | ||
}, | ||
"gitHead": "87bbba853bb050f7897c50ea291233297e2366e1" | ||
"gitHead": "b81b7a87aa31416c815ff2105990c7455de4846b" | ||
} |
@@ -9,15 +9,8 @@ # @lbu/store | ||
## Why | ||
Here at [lightbase](https://lightbase.nl) we had a constantly growing | ||
boilerplate for new projects. To facilitate our needs more and to stop copying | ||
and pasting things around this project was born. This project is for now | ||
tailored at smaller & monolithic projects. | ||
## Features | ||
- Minimal API project boilerplate | ||
- Minimal project boilerplate | ||
- Script runner, can watch & reload almost anything (via nodemon) | ||
- Flexible code generators supporting routers, validators, api clients, mocks | ||
and more in the future. | ||
- Flexible code generators supporting routers, validators, api clients, mocks, | ||
CRUD queries and more in the future. | ||
- Opinionated structured logging | ||
@@ -28,5 +21,88 @@ - Common Koa middleware wrapped in a single function | ||
## Requirements | ||
- Node.js >= 14 | ||
- Yarn 1.x.x | ||
## Why | ||
Here at [lightbase](https://lightbase.nl) we had a constantly growing | ||
boilerplate for new projects. To facilitate our needs more and to stop copying | ||
and pasting things around this project was born. This project is for now | ||
tailored at monolithic projects. | ||
## Features breakdown | ||
**@lbu/cli**: | ||
- Run user scripts (in watch mode) | ||
- Run the linter or tests | ||
- A LBU based boilerplate | ||
**@lbu/lint-config**: | ||
- All necessary ESLint and Prettier dependencies | ||
- Default configuration for ESLint and Prettier | ||
**@lbu/insight**: | ||
- A structured logger | ||
- Writing newline delimited JSON in production | ||
- Pretty printing for development | ||
- Various utilities to get insight in the running process | ||
- Parser to process production logs in an external process | ||
**@lbu/stdlib**: | ||
- Various lodash inspired utilities (isNil, isPlainObject, ...) | ||
- Wrappers for child_process execution and spawning | ||
- Basic templating system | ||
- A `mainFn` wrapper that reads `.env` and calls the provided function if the | ||
file is the process entrypoint | ||
- Replacements for CommonJS `__dirname` and `__filename` | ||
**@lbu/server**: | ||
- Wrapper around Koa instance creation | ||
- 404 en error handling | ||
- Handle CORS | ||
- Send file helper | ||
- Re-exports koa-session and koa-compose | ||
**@lbu/store**: | ||
- Wrapper around the Minio S3 client | ||
- Wrapper around Postgres connection | ||
- Utilities for providing temporary databases in a test environment | ||
- Postgres migrations | ||
- Postgres and S3 combined for file storage | ||
- Caching files from S3 in memory or on local disk | ||
- Postgres powered JobQueue implementation | ||
- Supports priority, scheduling, multiple async workers | ||
- koa-session compatible SessionStore backed by Postgres | ||
**@lbu/code-gen**: | ||
- Code generators for the following: | ||
- router, with wildcard and path parameter support | ||
- validators, with pre- and postValidate hooks | ||
- queries, CRUD postgres queries | ||
- Axios based api client | ||
- Typescript or JSDoc types | ||
- Generated mocks | ||
- An extendable set of types: | ||
- boolean, number, string; | ||
- object, array, any; | ||
- date, uuid; | ||
- generic, anyOf, reference; | ||
- Remote structure loader | ||
- OpenAPI to LBU converter | ||
- Generate stubs (types and structure only) so packages can also use LBU | ||
- router structure | ||
- api client structure (Typescript supported) | ||
- and of course the normal types generator | ||
## Docs | ||
See [/docs](/docs/README.md) | ||
See [/docs](/docs/README.md) for all available APIs and various guides. | ||
@@ -33,0 +109,0 @@ ## Development |
@@ -13,15 +13,22 @@ import { isNil } from "@lbu/stdlib"; | ||
* @name FileCacheOptions | ||
* | ||
* @typedef {object} | ||
* @property {number} inMemoryThreshold | ||
* @property {string} cacheControlHeader | ||
* @property {number} inMemoryThreshold Maximum byte size of a file to be stored in | ||
* memory | ||
* @property {string} cacheControlHeader Customize default Cache-Control header to give | ||
* back | ||
*/ | ||
/** | ||
* @class FileCache | ||
* @name FileCache | ||
* | ||
* @class | ||
* A relatively simple local file cache implementation. | ||
* Supports saving files in memory and on local disk | ||
* Files#content_length smaller than the provided threshold will be stored in memory. | ||
* Files#contentLength smaller than the provided threshold will be stored in memory. | ||
* A file will always be cached in full, and then the range requests will be evaluated | ||
* The FileCache#clear does not remove files from disk, but will overwrite the file when | ||
* added to the cache again | ||
* after The FileCache#clear does not remove files from disk, but will overwrite the | ||
* file when added to the cache again | ||
* | ||
* FileCache#getFileStream is compatible with `sendFile` in @lbu/server | ||
*/ | ||
@@ -33,2 +40,3 @@ export class FileCache { | ||
* Create a new file cache | ||
* | ||
* @param {FileStoreContext} fileStore | ||
@@ -45,12 +53,19 @@ * @param {FileCacheOptions} [options] | ||
this.fileCache = new Set(); | ||
/** | ||
* Pre-bind call to this#getFileStream | ||
* | ||
* @type {typeof FileCache#getFileStream} | ||
*/ | ||
this.getStreamFn = this.getFileStream.bind(this); | ||
} | ||
/** | ||
* @public | ||
* Get a file(part) from the cache. | ||
* Get a file(part) from the cache. | ||
* If the file(part) does not exist, it will try to fetch it from the FileStore | ||
* If the file store throws an error / it doesn't exist, the error is propagated to the | ||
* caller | ||
* @param {FileProps} file | ||
* | ||
* @public | ||
* @param {StoreFileStore} file | ||
* @param {number} [start] | ||
@@ -64,4 +79,4 @@ * @param {number} [end] | ||
} | ||
if (isNil(end) || end > file.content_length) { | ||
end = file.content_length; | ||
if (isNil(end) || end > file.contentLength) { | ||
end = file.contentLength; | ||
} | ||
@@ -74,3 +89,3 @@ const cacheKey = file.id; | ||
return this.loadFromDiskCache(cacheKey, file.id, start, end); | ||
} else if (file.content_length > this.inMemoryThreshold) { | ||
} else if (file.contentLength > this.inMemoryThreshold) { | ||
return this.cacheFileOnDisk(cacheKey, file.id, start, end); | ||
@@ -83,4 +98,5 @@ } else { | ||
/** | ||
* Remove a file from cache, but not from local disk | ||
* | ||
* @public | ||
* Remove a file from cache, but not from local disk | ||
* @param {string} fileId | ||
@@ -112,2 +128,7 @@ */ | ||
* Load file from disk, if not exists, will pull it in. | ||
* | ||
* @param key | ||
* @param id | ||
* @param start | ||
* @param end | ||
*/ | ||
@@ -136,2 +157,7 @@ async loadFromDiskCache(key, id, start, end) { | ||
* Transforms the buffer to a stream for consistency | ||
* | ||
* @param key | ||
* @param id | ||
* @param start | ||
* @param end | ||
*/ | ||
@@ -159,2 +185,7 @@ async cacheFileInMemory(key, id, start, end) { | ||
* Save file on disk and return a new Readable | ||
* | ||
* @param key | ||
* @param id | ||
* @param start | ||
* @param end | ||
*/ | ||
@@ -161,0 +192,0 @@ async cacheFileOnDisk(key, id, start, end) { |
129
src/files.js
import { uuid } from "@lbu/stdlib"; | ||
import { createReadStream } from "fs"; | ||
import mime from "mime-types"; | ||
import { storeQueries } from "./generated/queries.js"; | ||
import { listObjects } from "./minio.js"; | ||
const queries = { | ||
copyFile: (sql, targetId, targetBucket, sourceId, sourceBucket) => | ||
sql`INSERT INTO file_store (id, bucket_name, content_type, content_length, filename) SELECT ${targetId}, ${targetBucket}, content_type, content_length, filename FROM file_store WHERE id = ${sourceId} AND bucket_name = ${sourceBucket} RETURNING id`, | ||
}; | ||
/** | ||
* @typedef {object} FileStoreContext | ||
* @property {postgres} sql | ||
* @name FileStoreContext | ||
* | ||
* @typedef {object} | ||
* @property sql | ||
* @property {minio.Client} minio | ||
@@ -14,14 +22,3 @@ * @property {string} bucketName | ||
/** | ||
* @typedef {object} FileProps | ||
* @property {string} [id] | ||
* @property {string} bucket_name | ||
* @property {number} content_length | ||
* @property {string} content_type | ||
* @property {string} filename | ||
* @property {Date} created_at | ||
* @property {Date} updated_at | ||
*/ | ||
/** | ||
* @param {postgres} sql | ||
* @param sql | ||
* @param {minio.Client} minio | ||
@@ -39,21 +36,29 @@ * @param {string} bucketName | ||
/** | ||
* Create or update a file | ||
* Create or update a file. | ||
* If you pass in a non-existent id, the function will not error, but also not update the | ||
* file | ||
* | ||
* @param {FileStoreContext} fc | ||
* @param {FileProps} props | ||
* @param {StoreFileStoreInsertPartial_Input & { id?: string }} props | ||
* @param {ReadStream|string} streamOrPath | ||
* @return {Promise<FileProps>} | ||
* @returns {Promise<StoreFileStore>} | ||
*/ | ||
export async function createFile(fc, props, streamOrPath) { | ||
if (!props.id) { | ||
props.id = uuid(); | ||
} | ||
export async function createOrUpdateFile(fc, props, streamOrPath) { | ||
if (!props.filename) { | ||
throw new Error("filename is required on file props"); | ||
} | ||
if (!props.content_type) { | ||
props.content_type = mime.lookup(props.filename); | ||
if (!props.contentType) { | ||
props.contentType = mime.lookup(props.filename) || "*/*"; | ||
} | ||
props.updated_at = new Date(); | ||
props.bucket_name = fc.bucketName; | ||
props.bucketName = fc.bucketName; | ||
// Do a manual insert first to get an id | ||
if (!props.id) { | ||
props.contentLength = 0; | ||
const [intermediate] = await storeQueries.fileStoreInsert(fc.sql, props); | ||
props.id = intermediate.id; | ||
} | ||
if (typeof streamOrPath === "string") { | ||
@@ -64,22 +69,10 @@ streamOrPath = createReadStream(streamOrPath); | ||
await fc.minio.putObject(fc.bucketName, props.id, streamOrPath, { | ||
"content-type": props.content_type, | ||
"content-type": props.contentType, | ||
}); | ||
const stat = await fc.minio.statObject(fc.bucketName, props.id); | ||
props.content_length = stat.size; | ||
props.contentLength = stat.size; | ||
const [result] = await fc.sql`INSERT INTO file_store ${fc.sql( | ||
props, | ||
"id", | ||
"bucket_name", | ||
"content_length", | ||
"content_type", | ||
"filename", | ||
"updated_at", | ||
)} ON CONFLICT(id) DO UPDATE SET ${fc.sql( | ||
props, | ||
"content_length", | ||
"content_type", | ||
"filename", | ||
"updated_at", | ||
)} RETURNING *`; | ||
const [result] = await storeQueries.fileStoreUpdate(fc.sql, props, { | ||
id: props.id, | ||
}); | ||
@@ -92,8 +85,9 @@ return result; | ||
* @param {string} id | ||
* @return {Promise<FileProps|undefined>} | ||
* @returns {Promise<StoreFileStore|undefined>} | ||
*/ | ||
export async function getFileById(fc, id) { | ||
const [ | ||
result, | ||
] = await fc.sql`SELECT id, bucket_name, content_type, content_length, filename, created_at, updated_at FROM file_store WHERE id = ${id} AND bucket_name = ${fc.bucketName}`; | ||
const [result] = await storeQueries.fileStoreSelect(fc.sql, { | ||
id, | ||
bucketName: fc.bucketName, | ||
}); | ||
@@ -108,3 +102,3 @@ return result; | ||
* @param {number} [end] | ||
* @return {Promise<ReadableStream>} | ||
* @returns {Promise<ReadableStream>} | ||
*/ | ||
@@ -126,24 +120,45 @@ export async function getFileStream(fc, id, { start, end } = {}) { | ||
* @param {string} [targetBucket=fc.bucketName] | ||
* @return {Promise<FileProps>} | ||
* @returns {Promise<StoreFileStore>} | ||
*/ | ||
export async function copyFile(fc, id, targetBucket = fc.bucketName) { | ||
const [ | ||
result, | ||
] = await fc.sql`INSERT INTO file_store (id, bucket_name, content_type, content_length, filename) SELECT ${uuid()}, ${targetBucket}, content_type, content_length, filename FROM file_store WHERE id = ${id} AND bucket_name = ${ | ||
fc.bucketName | ||
} RETURNING *`; | ||
const [intermediate] = await queries.copyFile( | ||
fc.sql, | ||
uuid(), | ||
targetBucket, | ||
id, | ||
fc.bucketName, | ||
); | ||
await fc.minio.copyObject(targetBucket, result.id, `${fc.bucketName}/${id}`); | ||
await fc.minio.copyObject( | ||
targetBucket, | ||
intermediate.id, | ||
`${fc.bucketName}/${id}`, | ||
); | ||
const [result] = await storeQueries.fileStoreSelect(fc.sql, { | ||
id: intermediate.id, | ||
}); | ||
return result; | ||
} | ||
/** | ||
* @param fc | ||
* @param id | ||
*/ | ||
export async function deleteFile(fc, id) { | ||
return fc.sql`DELETE FROM file_store WHERE id = ${id} AND bucket_name = ${fc.bucketName}`; | ||
return storeQueries.fileStoreDelete(fc.sql, { | ||
id, | ||
bucketName: fc.bucketName, | ||
}); | ||
} | ||
/** | ||
* @param fc | ||
*/ | ||
export async function syncDeletedFiles(fc) { | ||
const minioObjectsPromise = listObjects(fc.minio, fc.bucketName); | ||
const knownIds = await fc.sql`SELECT DISTINCT(id) | ||
FROM file_store WHERE bucket_name = ${fc.bucketName}`; | ||
const knownIds = await storeQueries.fileStoreSelect(fc.sql, { | ||
bucketName: fc.bucketName, | ||
}); | ||
@@ -150,0 +165,0 @@ const ids = knownIds.map((it) => it.id); |
@@ -1,4 +0,6 @@ | ||
// Generated by @lbu/code-gen at 2020-05-22T08:17:51.893Z | ||
/* eslint-disable no-unused-vars */ | ||
// Generated by @lbu/code-gen at 2020-06-07T17:30:55.646Z | ||
/* eslint-disable no-unused-vars, jsdoc/valid-types, jsdoc/require-returns-description, jsdoc/no-undefined-types, jsdoc/require-property-description */ | ||
import { isNil, uuid } from "@lbu/stdlib"; | ||
export const storeQueries = { | ||
@@ -10,4 +12,16 @@ /** | ||
*/ | ||
fileStoreSelect: (sql, where) => | ||
sql`SELECT id, bucket_name, content_length, content_type, filename, created_at, updated_at FROM file_store WHERE (COALESCE(${where.id}, NULL) IS NULL OR id = ${where.id}) AND (COALESCE(${where.bucket_name}, NULL) IS NULL OR bucket_name = ${where.bucket_name})`, | ||
fileStoreSelect: ( | ||
sql, | ||
where, | ||
) => sql`SELECT fs.id as "id", fs.bucket_name as "bucketName", fs.content_length as "contentLength", fs.content_type as "contentType", fs.filename as "filename", fs.created_at as "createdAt", fs.updated_at as "updatedAt" FROM file_store fs | ||
WHERE (COALESCE(${where.id}, NULL) IS NULL OR fs.id = ${ | ||
where.id | ||
}) AND (COALESCE(${where.idIn}, NULL) IS NULL OR fs.id IN (${ | ||
where.idIn | ||
})) AND (COALESCE(${where.bucketName}, NULL) IS NULL OR fs.bucket_name = ${ | ||
where.bucketName | ||
}) AND (COALESCE(${ | ||
where.bucketNameLike | ||
}, NULL) IS NULL OR fs.bucket_name LIKE ${"%" + where.bucketNameLike + "%"}) | ||
`, | ||
@@ -17,14 +31,18 @@ /** | ||
* @param { StoreFileStoreWhere} where | ||
* @param { StoreFileStorePartial} partial | ||
* @returns {Promise<StoreFileStore[]>} | ||
* @returns {Promise<number>} | ||
*/ | ||
fileStoreUpdate: (sql, where, partial) => | ||
sql`UPDATE file_store SET ${sql( | ||
partial, | ||
...Object.keys(partial), | ||
)} WHERE (COALESCE(${where.id}, NULL) IS NULL OR id = ${ | ||
fileStoreCount: async (sql, where) => { | ||
const result = await sql`SELECT count(*) as gen_count FROM file_store fs | ||
WHERE (COALESCE(${where.id}, NULL) IS NULL OR fs.id = ${ | ||
where.id | ||
}) AND (COALESCE(${where.bucket_name}, NULL) IS NULL OR bucket_name = ${ | ||
where.bucket_name | ||
}) RETURNING id, bucket_name, content_length, content_type, filename, created_at, updated_at`, | ||
}) AND (COALESCE(${where.idIn}, NULL) IS NULL OR fs.id IN (${ | ||
where.idIn | ||
})) AND (COALESCE(${where.bucketName}, NULL) IS NULL OR fs.bucket_name = ${ | ||
where.bucketName | ||
}) AND (COALESCE(${ | ||
where.bucketNameLike | ||
}, NULL) IS NULL OR fs.bucket_name LIKE ${"%" + where.bucketNameLike + "%"}) | ||
`; | ||
return result?.[0]?.gen_count ?? 0; | ||
}, | ||
@@ -34,19 +52,43 @@ /** | ||
* @param { StoreFileStoreWhere} where | ||
* @returns {Promise<void>} | ||
* @returns {Promise<*[]>} | ||
*/ | ||
fileStoreDelete: (sql, where) => | ||
sql`DELETE FROM file_store WHERE (COALESCE(${where.id}, NULL) IS NULL OR id = ${where.id}) AND (COALESCE(${where.bucket_name}, NULL) IS NULL OR bucket_name = ${where.bucket_name})`, | ||
fileStoreDelete: (sql, where) => sql`DELETE FROM file_store fs | ||
WHERE (COALESCE(${where.id}, NULL) IS NULL OR fs.id = ${ | ||
where.id | ||
}) AND (COALESCE(${where.idIn}, NULL) IS NULL OR fs.id IN (${ | ||
where.idIn | ||
})) AND (COALESCE(${where.bucketName}, NULL) IS NULL OR fs.bucket_name = ${ | ||
where.bucketName | ||
}) AND (COALESCE(${ | ||
where.bucketNameLike | ||
}, NULL) IS NULL OR fs.bucket_name LIKE ${"%" + where.bucketNameLike + "%"}) | ||
`, | ||
/** | ||
* @param sql | ||
* @param { StoreFileStoreCreate} input | ||
* @param { StoreFileStoreInsertPartial_Input|StoreFileStoreInsertPartial_Input[]} insert | ||
* @returns {Promise<StoreFileStore[]>} | ||
*/ | ||
fileStoreCreate: (sql, input) => { | ||
input.created_at = input.created_at || new Date(); | ||
input.updated_at = input.updated_at || new Date(); | ||
fileStoreInsert: (sql, insert) => { | ||
const data = Array.isArray(insert) ? insert : [insert]; | ||
const input = []; | ||
for (const it of data) { | ||
input.push({ | ||
bucket_name: it.bucketName ?? undefined, | ||
content_length: it.contentLength ?? undefined, | ||
content_type: it.contentType ?? undefined, | ||
filename: it.filename ?? undefined, | ||
created_at: it.createdAt ?? new Date(), | ||
updated_at: it.updatedAt ?? new Date(), | ||
}); | ||
} | ||
return sql`INSERT INTO file_store ${sql( | ||
input, | ||
...Object.keys(input), | ||
)} RETURNING id, bucket_name, content_length, content_type, filename, created_at, updated_at`; | ||
"bucket_name", | ||
"content_length", | ||
"content_type", | ||
"filename", | ||
"created_at", | ||
"updated_at", | ||
)} RETURNING id as "id", bucket_name as "bucketName", content_length as "contentLength", content_type as "contentType", filename as "filename", created_at as "createdAt", updated_at as "updatedAt"`; | ||
}, | ||
@@ -56,7 +98,86 @@ | ||
* @param sql | ||
* @param { StoreFileStoreInsertPartial_Input} value | ||
* @param { StoreFileStoreWhere} where | ||
* @returns {Promise<StoreFileStore[]>} | ||
*/ | ||
fileStoreUpdate: async (sql, value, where) => { | ||
const updateValue = {}; | ||
if (!isNil(value["bucketName"])) { | ||
updateValue["bucket_name"] = value["bucketName"]; | ||
} | ||
if (!isNil(value["contentLength"])) { | ||
updateValue["content_length"] = value["contentLength"]; | ||
} | ||
if (!isNil(value["contentType"])) { | ||
updateValue["content_type"] = value["contentType"]; | ||
} | ||
if (!isNil(value["filename"])) { | ||
updateValue["filename"] = value["filename"]; | ||
} | ||
if (!isNil(value["createdAt"])) { | ||
updateValue["created_at"] = value["createdAt"]; | ||
} | ||
if (!isNil(value["updatedAt"])) { | ||
updateValue["updated_at"] = value["updatedAt"]; | ||
} | ||
updateValue.updated_at = new Date(); | ||
await sql`INSERT INTO file_store_history (file_store_id, bucket_name, content_length, content_type, filename, created_at) | ||
SELECT id, bucket_name, content_length, content_type, filename, COALESCE(updated_at, created_at, now()) FROM file_store fs | ||
WHERE (COALESCE(${where.id}, NULL) IS NULL OR fs.id = ${ | ||
where.id | ||
}) AND (COALESCE(${where.idIn}, NULL) IS NULL OR fs.id IN (${ | ||
where.idIn | ||
})) AND (COALESCE(${where.bucketName}, NULL) IS NULL OR fs.bucket_name = ${ | ||
where.bucketName | ||
}) AND (COALESCE(${ | ||
where.bucketNameLike | ||
}, NULL) IS NULL OR fs.bucket_name LIKE ${"%" + where.bucketNameLike + "%"}) | ||
`; | ||
return sql`UPDATE file_store fs set ${sql( | ||
updateValue, | ||
...Object.keys(updateValue), | ||
)} | ||
WHERE (COALESCE(${where.id}, NULL) IS NULL OR fs.id = ${ | ||
where.id | ||
}) AND (COALESCE(${where.idIn}, NULL) IS NULL OR fs.id IN (${ | ||
where.idIn | ||
})) AND (COALESCE(${where.bucketName}, NULL) IS NULL OR fs.bucket_name = ${ | ||
where.bucketName | ||
}) AND (COALESCE(${ | ||
where.bucketNameLike | ||
}, NULL) IS NULL OR fs.bucket_name LIKE ${"%" + where.bucketNameLike + "%"}) | ||
RETURNING fs.id as "id", fs.bucket_name as "bucketName", fs.content_length as "contentLength", fs.content_type as "contentType", fs.filename as "filename", fs.created_at as "createdAt", fs.updated_at as "updatedAt"`; | ||
}, | ||
/** | ||
* @param sql | ||
* @param { StoreFileStoreWhere} where | ||
* @returns {Promise<(StoreFileStore & (history: StoreFileStore[]))[]>} | ||
*/ | ||
fileStoreSelectHistory: ( | ||
sql, | ||
where, | ||
) => sql`SELECT fs.id as "id", fs.bucket_name as "bucketName", fs.content_length as "contentLength", fs.content_type as "contentType", fs.filename as "filename", fs.created_at as "createdAt", fs.updated_at as "updatedAt", array_agg(fsh.*) as history FROM file_store fs LEFT JOIN file_store_history fsh ON fs.id = fsh.file_store_id | ||
WHERE (COALESCE(${where.id}, NULL) IS NULL OR fs.id = ${ | ||
where.id | ||
}) AND (COALESCE(${where.idIn}, NULL) IS NULL OR fs.id IN (${ | ||
where.idIn | ||
})) AND (COALESCE(${where.bucketName}, NULL) IS NULL OR fs.bucket_name = ${ | ||
where.bucketName | ||
}) AND (COALESCE(${ | ||
where.bucketNameLike | ||
}, NULL) IS NULL OR fs.bucket_name LIKE ${"%" + where.bucketNameLike + "%"}) | ||
GROUP BY fs.id`, | ||
/** | ||
* @param sql | ||
* @param { StoreSessionStoreWhere} where | ||
* @returns {Promise<StoreSessionStore[]>} | ||
*/ | ||
sessionStoreSelect: (sql, where) => | ||
sql`SELECT id, expires, data FROM session_store WHERE (COALESCE(${where.id}, NULL) IS NULL OR id = ${where.id})`, | ||
sessionStoreSelect: ( | ||
sql, | ||
where, | ||
) => sql`SELECT ss.id as "id", ss.expires as "expires", ss.data as "data", ss.created_at as "createdAt", ss.updated_at as "updatedAt" FROM session_store ss | ||
WHERE (COALESCE(${where.id}, NULL) IS NULL OR ss.id = ${where.id}) AND (COALESCE(${where.idIn}, NULL) IS NULL OR ss.id IN (${where.idIn})) AND (COALESCE(${where.expires}, NULL) IS NULL OR ss.expires = ${where.expires}) AND (COALESCE(${where.expiresGreaterThan}, NULL) IS NULL OR ss.expires > ${where.expiresGreaterThan}) AND (COALESCE(${where.expiresLowerThan}, NULL) IS NULL OR ss.expires < ${where.expiresLowerThan}) | ||
`, | ||
@@ -66,12 +187,10 @@ /** | ||
* @param { StoreSessionStoreWhere} where | ||
* @param { StoreSessionStorePartial} partial | ||
* @returns {Promise<StoreSessionStore[]>} | ||
* @returns {Promise<number>} | ||
*/ | ||
sessionStoreUpdate: (sql, where, partial) => | ||
sql`UPDATE session_store SET ${sql( | ||
partial, | ||
...Object.keys(partial), | ||
)} WHERE (COALESCE(${where.id}, NULL) IS NULL OR id = ${ | ||
where.id | ||
}) RETURNING id, expires, data`, | ||
sessionStoreCount: async (sql, where) => { | ||
const result = await sql`SELECT count(*) as gen_count FROM session_store ss | ||
WHERE (COALESCE(${where.id}, NULL) IS NULL OR ss.id = ${where.id}) AND (COALESCE(${where.idIn}, NULL) IS NULL OR ss.id IN (${where.idIn})) AND (COALESCE(${where.expires}, NULL) IS NULL OR ss.expires = ${where.expires}) AND (COALESCE(${where.expiresGreaterThan}, NULL) IS NULL OR ss.expires > ${where.expiresGreaterThan}) AND (COALESCE(${where.expiresLowerThan}, NULL) IS NULL OR ss.expires < ${where.expiresLowerThan}) | ||
`; | ||
return result?.[0]?.gen_count ?? 0; | ||
}, | ||
@@ -81,18 +200,31 @@ /** | ||
* @param { StoreSessionStoreWhere} where | ||
* @returns {Promise<void>} | ||
* @returns {Promise<*[]>} | ||
*/ | ||
sessionStoreDelete: (sql, where) => | ||
sql`DELETE FROM session_store WHERE (COALESCE(${where.id}, NULL) IS NULL OR id = ${where.id})`, | ||
sessionStoreDelete: (sql, where) => sql`DELETE FROM session_store ss | ||
WHERE (COALESCE(${where.id}, NULL) IS NULL OR ss.id = ${where.id}) AND (COALESCE(${where.idIn}, NULL) IS NULL OR ss.id IN (${where.idIn})) AND (COALESCE(${where.expires}, NULL) IS NULL OR ss.expires = ${where.expires}) AND (COALESCE(${where.expiresGreaterThan}, NULL) IS NULL OR ss.expires > ${where.expiresGreaterThan}) AND (COALESCE(${where.expiresLowerThan}, NULL) IS NULL OR ss.expires < ${where.expiresLowerThan}) | ||
`, | ||
/** | ||
* @param sql | ||
* @param { StoreSessionStoreCreate} input | ||
* @param { StoreSessionStoreInsertPartial_Input|StoreSessionStoreInsertPartial_Input[]} insert | ||
* @returns {Promise<StoreSessionStore[]>} | ||
*/ | ||
sessionStoreCreate: (sql, input) => { | ||
input.data = input.data || {}; | ||
sessionStoreInsert: (sql, insert) => { | ||
const data = Array.isArray(insert) ? insert : [insert]; | ||
const input = []; | ||
for (const it of data) { | ||
input.push({ | ||
expires: it.expires ?? undefined, | ||
data: JSON.stringify(it.data ?? {}), | ||
created_at: it.createdAt ?? new Date(), | ||
updated_at: it.updatedAt ?? new Date(), | ||
}); | ||
} | ||
return sql`INSERT INTO session_store ${sql( | ||
input, | ||
...Object.keys(input), | ||
)} RETURNING id, expires, data`; | ||
"expires", | ||
"data", | ||
"created_at", | ||
"updated_at", | ||
)} RETURNING id as "id", expires as "expires", data as "data", created_at as "createdAt", updated_at as "updatedAt"`; | ||
}, | ||
@@ -102,7 +234,91 @@ | ||
* @param sql | ||
* @param { StoreSessionStoreInsertPartial_Input} value | ||
* @param { StoreSessionStoreWhere} where | ||
* @returns {Promise<StoreSessionStore[]>} | ||
*/ | ||
sessionStoreUpdate: (sql, value, where) => { | ||
const updateValue = {}; | ||
if (!isNil(value["expires"])) { | ||
updateValue["expires"] = value["expires"]; | ||
} | ||
if (!isNil(value["data"])) { | ||
updateValue["data"] = value["data"]; | ||
} | ||
if (!isNil(value["createdAt"])) { | ||
updateValue["created_at"] = value["createdAt"]; | ||
} | ||
if (!isNil(value["updatedAt"])) { | ||
updateValue["updated_at"] = value["updatedAt"]; | ||
} | ||
updateValue.updated_at = new Date(); | ||
return sql`UPDATE session_store ss set ${sql( | ||
updateValue, | ||
...Object.keys(updateValue), | ||
)} | ||
WHERE (COALESCE(${where.id}, NULL) IS NULL OR ss.id = ${ | ||
where.id | ||
}) AND (COALESCE(${where.idIn}, NULL) IS NULL OR ss.id IN (${ | ||
where.idIn | ||
})) AND (COALESCE(${where.expires}, NULL) IS NULL OR ss.expires = ${ | ||
where.expires | ||
}) AND (COALESCE(${ | ||
where.expiresGreaterThan | ||
}, NULL) IS NULL OR ss.expires > ${ | ||
where.expiresGreaterThan | ||
}) AND (COALESCE(${where.expiresLowerThan}, NULL) IS NULL OR ss.expires < ${ | ||
where.expiresLowerThan | ||
}) | ||
RETURNING ss.id as "id", ss.expires as "expires", ss.data as "data", ss.created_at as "createdAt", ss.updated_at as "updatedAt"`; | ||
}, | ||
/** | ||
* @param sql | ||
* @param { StoreSessionStoreInsertPartial_Input & { id?: string } } it | ||
* @returns {Promise<StoreSessionStore[]>} | ||
*/ | ||
sessionStoreUpsert: (sql, it) => { | ||
const data = { | ||
expires: it.expires ?? undefined, | ||
data: JSON.stringify(it.data ?? {}), | ||
created_at: it.createdAt ?? new Date(), | ||
updated_at: it.updatedAt ?? new Date(), | ||
}; | ||
data.id = it.id || uuid(); | ||
return sql`INSERT INTO session_store ${sql( | ||
data, | ||
"id", | ||
"expires", | ||
"data", | ||
"created_at", | ||
"updated_at", | ||
)} ON CONFLICT (id) DO UPDATE SET ${sql( | ||
data, | ||
"expires", | ||
"data", | ||
"created_at", | ||
"updated_at", | ||
)} RETURNING id as "id", expires as "expires", data as "data", created_at as "createdAt", updated_at as "updatedAt"`; | ||
}, | ||
/** | ||
* @param sql | ||
* @param { StoreJobQueueWhere} where | ||
* @returns {Promise<StoreJobQueue[]>} | ||
*/ | ||
jobQueueSelect: (sql, where) => | ||
sql`SELECT id, is_complete, priority, scheduled_at, created_at, updated_at, name, data FROM job_queue WHERE (COALESCE(${where.id}, NULL) IS NULL OR id = ${where.id}) AND (COALESCE(${where.name}, NULL) IS NULL OR name = ${where.name})`, | ||
jobQueueSelect: ( | ||
sql, | ||
where, | ||
) => sql`SELECT jq.id as "id", jq.is_complete as "isComplete", jq.priority as "priority", jq.scheduled_at as "scheduledAt", jq.name as "name", jq.data as "data", jq.created_at as "createdAt", jq.updated_at as "updatedAt" FROM job_queue jq | ||
WHERE (COALESCE(${where.id}, NULL) IS NULL OR jq.id = ${ | ||
where.id | ||
}) AND (COALESCE(${where.idGreaterThan}, NULL) IS NULL OR jq.id > ${ | ||
where.idGreaterThan | ||
}) AND (COALESCE(${where.idLowerThan}, NULL) IS NULL OR jq.id < ${ | ||
where.idLowerThan | ||
}) AND (COALESCE(${where.name}, NULL) IS NULL OR jq.name = ${ | ||
where.name | ||
}) AND (COALESCE(${where.nameLike}, NULL) IS NULL OR jq.name LIKE ${ | ||
"%" + where.nameLike + "%" | ||
}) | ||
`, | ||
@@ -112,14 +328,20 @@ /** | ||
* @param { StoreJobQueueWhere} where | ||
* @param { StoreJobQueuePartial} partial | ||
* @returns {Promise<StoreJobQueue[]>} | ||
* @returns {Promise<number>} | ||
*/ | ||
jobQueueUpdate: (sql, where, partial) => | ||
sql`UPDATE job_queue SET ${sql( | ||
partial, | ||
...Object.keys(partial), | ||
)} WHERE (COALESCE(${where.id}, NULL) IS NULL OR id = ${ | ||
jobQueueCount: async (sql, where) => { | ||
const result = await sql`SELECT count(*) as gen_count FROM job_queue jq | ||
WHERE (COALESCE(${where.id}, NULL) IS NULL OR jq.id = ${ | ||
where.id | ||
}) AND (COALESCE(${where.name}, NULL) IS NULL OR name = ${ | ||
}) AND (COALESCE(${where.idGreaterThan}, NULL) IS NULL OR jq.id > ${ | ||
where.idGreaterThan | ||
}) AND (COALESCE(${where.idLowerThan}, NULL) IS NULL OR jq.id < ${ | ||
where.idLowerThan | ||
}) AND (COALESCE(${where.name}, NULL) IS NULL OR jq.name = ${ | ||
where.name | ||
}) RETURNING id, is_complete, priority, scheduled_at, created_at, updated_at, name, data`, | ||
}) AND (COALESCE(${where.nameLike}, NULL) IS NULL OR jq.name LIKE ${ | ||
"%" + where.nameLike + "%" | ||
}) | ||
`; | ||
return result?.[0]?.gen_count ?? 0; | ||
}, | ||
@@ -129,23 +351,134 @@ /** | ||
* @param { StoreJobQueueWhere} where | ||
* @returns {Promise<void>} | ||
* @returns {Promise<*[]>} | ||
*/ | ||
jobQueueDelete: (sql, where) => | ||
sql`DELETE FROM job_queue WHERE (COALESCE(${where.id}, NULL) IS NULL OR id = ${where.id}) AND (COALESCE(${where.name}, NULL) IS NULL OR name = ${where.name})`, | ||
jobQueueDelete: (sql, where) => sql`DELETE FROM job_queue jq | ||
WHERE (COALESCE(${where.id}, NULL) IS NULL OR jq.id = ${ | ||
where.id | ||
}) AND (COALESCE(${where.idGreaterThan}, NULL) IS NULL OR jq.id > ${ | ||
where.idGreaterThan | ||
}) AND (COALESCE(${where.idLowerThan}, NULL) IS NULL OR jq.id < ${ | ||
where.idLowerThan | ||
}) AND (COALESCE(${where.name}, NULL) IS NULL OR jq.name = ${ | ||
where.name | ||
}) AND (COALESCE(${where.nameLike}, NULL) IS NULL OR jq.name LIKE ${ | ||
"%" + where.nameLike + "%" | ||
}) | ||
`, | ||
/** | ||
* @param sql | ||
* @param { StoreJobQueueCreate} input | ||
* @param { StoreJobQueueInsertPartial_Input|StoreJobQueueInsertPartial_Input[]} insert | ||
* @returns {Promise<StoreJobQueue[]>} | ||
*/ | ||
jobQueueCreate: (sql, input) => { | ||
input.is_complete = input.is_complete || false; | ||
input.priority = input.priority || 0; | ||
input.scheduled_at = input.scheduled_at || new Date(); | ||
input.created_at = input.created_at || new Date(); | ||
input.data = input.data || {}; | ||
jobQueueInsert: (sql, insert) => { | ||
const data = Array.isArray(insert) ? insert : [insert]; | ||
const input = []; | ||
for (const it of data) { | ||
input.push({ | ||
is_complete: it.isComplete ?? false, | ||
priority: it.priority ?? 0, | ||
scheduled_at: it.scheduledAt ?? new Date(), | ||
name: it.name ?? undefined, | ||
data: JSON.stringify(it.data ?? {}), | ||
created_at: it.createdAt ?? new Date(), | ||
updated_at: it.updatedAt ?? new Date(), | ||
}); | ||
} | ||
return sql`INSERT INTO job_queue ${sql( | ||
input, | ||
...Object.keys(input), | ||
)} RETURNING id, is_complete, priority, scheduled_at, created_at, updated_at, name, data`; | ||
"is_complete", | ||
"priority", | ||
"scheduled_at", | ||
"name", | ||
"data", | ||
"created_at", | ||
"updated_at", | ||
)} RETURNING id as "id", is_complete as "isComplete", priority as "priority", scheduled_at as "scheduledAt", name as "name", data as "data", created_at as "createdAt", updated_at as "updatedAt"`; | ||
}, | ||
/** | ||
* @param sql | ||
* @param { StoreJobQueueInsertPartial_Input} value | ||
* @param { StoreJobQueueWhere} where | ||
* @returns {Promise<StoreJobQueue[]>} | ||
*/ | ||
jobQueueUpdate: (sql, value, where) => { | ||
const updateValue = {}; | ||
if (!isNil(value["isComplete"])) { | ||
updateValue["is_complete"] = value["isComplete"]; | ||
} | ||
if (!isNil(value["priority"])) { | ||
updateValue["priority"] = value["priority"]; | ||
} | ||
if (!isNil(value["scheduledAt"])) { | ||
updateValue["scheduled_at"] = value["scheduledAt"]; | ||
} | ||
if (!isNil(value["name"])) { | ||
updateValue["name"] = value["name"]; | ||
} | ||
if (!isNil(value["data"])) { | ||
updateValue["data"] = value["data"]; | ||
} | ||
if (!isNil(value["createdAt"])) { | ||
updateValue["created_at"] = value["createdAt"]; | ||
} | ||
if (!isNil(value["updatedAt"])) { | ||
updateValue["updated_at"] = value["updatedAt"]; | ||
} | ||
updateValue.updated_at = new Date(); | ||
return sql`UPDATE job_queue jq set ${sql( | ||
updateValue, | ||
...Object.keys(updateValue), | ||
)} | ||
WHERE (COALESCE(${where.id}, NULL) IS NULL OR jq.id = ${ | ||
where.id | ||
}) AND (COALESCE(${where.idGreaterThan}, NULL) IS NULL OR jq.id > ${ | ||
where.idGreaterThan | ||
}) AND (COALESCE(${where.idLowerThan}, NULL) IS NULL OR jq.id < ${ | ||
where.idLowerThan | ||
}) AND (COALESCE(${where.name}, NULL) IS NULL OR jq.name = ${ | ||
where.name | ||
}) AND (COALESCE(${where.nameLike}, NULL) IS NULL OR jq.name LIKE ${ | ||
"%" + where.nameLike + "%" | ||
}) | ||
RETURNING jq.id as "id", jq.is_complete as "isComplete", jq.priority as "priority", jq.scheduled_at as "scheduledAt", jq.name as "name", jq.data as "data", jq.created_at as "createdAt", jq.updated_at as "updatedAt"`; | ||
}, | ||
/** | ||
* @param sql | ||
* @param { StoreJobQueueInsertPartial_Input & { id?: number } } it | ||
* @returns {Promise<StoreJobQueue[]>} | ||
*/ | ||
jobQueueUpsert: (sql, it) => { | ||
const data = { | ||
is_complete: it.isComplete ?? false, | ||
priority: it.priority ?? 0, | ||
scheduled_at: it.scheduledAt ?? new Date(), | ||
name: it.name ?? undefined, | ||
data: JSON.stringify(it.data ?? {}), | ||
created_at: it.createdAt ?? new Date(), | ||
updated_at: it.updatedAt ?? new Date(), | ||
}; | ||
data.id = it.id || uuid(); | ||
return sql`INSERT INTO job_queue ${sql( | ||
data, | ||
"id", | ||
"is_complete", | ||
"priority", | ||
"scheduled_at", | ||
"name", | ||
"data", | ||
"created_at", | ||
"updated_at", | ||
)} ON CONFLICT (id) DO UPDATE SET ${sql( | ||
data, | ||
"is_complete", | ||
"priority", | ||
"scheduled_at", | ||
"name", | ||
"data", | ||
"created_at", | ||
"updated_at", | ||
)} RETURNING id as "id", is_complete as "isComplete", priority as "priority", scheduled_at as "scheduledAt", name as "name", data as "data", created_at as "createdAt", updated_at as "updatedAt"`; | ||
}, | ||
}; |
export const structure = JSON.parse( | ||
'{"store":{"fileStore":{"type":"object","group":"store","name":"fileStore","docString":"","isOptional":false,"validator":{"strict":false},"enableQueries":true,"keys":{"id":{"type":"uuid","docString":"","isOptional":false,"sql":{"searchable":true,"primary":true}},"bucket_name":{"type":"string","docString":"","isOptional":false,"validator":{"convert":false,"trim":false,"lowerCase":false,"upperCase":false},"sql":{"searchable":true}},"content_length":{"type":"number","docString":"","isOptional":false,"validator":{"convert":false,"integer":false}},"content_type":{"type":"string","docString":"","isOptional":false,"validator":{"convert":false,"trim":false,"lowerCase":false,"upperCase":false}},"filename":{"type":"string","docString":"","isOptional":false,"validator":{"convert":false,"trim":false,"lowerCase":false,"upperCase":false}},"created_at":{"type":"date","docString":"","isOptional":true,"defaultValue":"(new Date())"},"updated_at":{"type":"date","docString":"","isOptional":true,"defaultValue":"(new Date())"}},"uniqueName":"StoreFileStore"},"sessionStore":{"type":"object","group":"store","name":"sessionStore","docString":"","isOptional":false,"validator":{"strict":false},"enableQueries":true,"keys":{"id":{"type":"uuid","docString":"","isOptional":false,"sql":{"searchable":true}},"expires":{"type":"date","docString":"","isOptional":false},"data":{"type":"any","docString":"","isOptional":true,"defaultValue":"{}"}},"uniqueName":"StoreSessionStore"},"jobQueue":{"type":"object","group":"store","name":"jobQueue","docString":"","isOptional":false,"validator":{"strict":false},"enableQueries":true,"keys":{"id":{"type":"number","docString":"","isOptional":false,"validator":{"convert":false,"integer":false},"sql":{"searchable":true,"primary":true}},"is_complete":{"type":"boolean","docString":"","isOptional":true,"defaultValue":"false","validator":{"convert":false},"sql":{"searchable":true}},"priority":{"type":"number","docString":"","isOptional":true,"defaultValue":"0","validator":{"convert":false,"integer":false}},"scheduled_at":{"type":"date","docString":"","isOptional":true,"defaultValue":"(new Date())","sql":{"searchable":true}},"created_at":{"type":"date","docString":"","isOptional":true,"defaultValue":"(new Date())"},"updated_at":{"type":"date","docString":"","isOptional":true,"sql":{"searchable":true}},"name":{"type":"string","docString":"","isOptional":false,"validator":{"convert":false,"trim":false,"lowerCase":false,"upperCase":false},"sql":{"searchable":true}},"data":{"type":"any","docString":"","isOptional":true,"defaultValue":"{}"}},"uniqueName":"StoreJobQueue"}}}', | ||
'{"store":{"fileStore":{"type":"object","group":"store","name":"fileStore","docString":"","isOptional":false,"validator":{"strict":false},"enableQueries":true,"queryOptions":{"withHistory":true},"keys":{"id":{"type":"uuid","docString":"","isOptional":false,"sql":{"searchable":true,"primary":true}},"bucketName":{"type":"string","docString":"","isOptional":false,"validator":{"convert":false,"trim":false,"lowerCase":false,"upperCase":false},"sql":{"searchable":true}},"contentLength":{"type":"number","docString":"","isOptional":false,"validator":{"convert":false,"integer":true}},"contentType":{"type":"string","docString":"","isOptional":false,"validator":{"convert":false,"trim":false,"lowerCase":false,"upperCase":false}},"filename":{"type":"string","docString":"","isOptional":false,"validator":{"convert":false,"trim":false,"lowerCase":false,"upperCase":false}}},"uniqueName":"StoreFileStore"},"sessionStore":{"type":"object","group":"store","name":"sessionStore","docString":"","isOptional":false,"validator":{"strict":false},"enableQueries":true,"queryOptions":{"withDates":true},"keys":{"id":{"type":"uuid","docString":"","isOptional":false,"sql":{"searchable":true,"primary":true}},"expires":{"type":"date","docString":"","isOptional":false,"sql":{"searchable":true}},"data":{"type":"any","docString":"","isOptional":true,"defaultValue":"{}"}},"uniqueName":"StoreSessionStore"},"jobQueue":{"type":"object","group":"store","name":"jobQueue","docString":"","isOptional":false,"validator":{"strict":false},"enableQueries":true,"queryOptions":{"withDates":true},"keys":{"id":{"type":"number","docString":"","isOptional":false,"validator":{"convert":false,"integer":true},"sql":{"searchable":true,"primary":true}},"isComplete":{"type":"boolean","docString":"","isOptional":true,"defaultValue":"false","validator":{"convert":false},"sql":{"searchable":true}},"priority":{"type":"number","docString":"","isOptional":true,"defaultValue":"0","validator":{"convert":false,"integer":true}},"scheduledAt":{"type":"date","docString":"","isOptional":true,"defaultValue":"(new Date())","sql":{"searchable":true}},"name":{"type":"string","docString":"","isOptional":false,"validator":{"convert":false,"trim":false,"lowerCase":false,"upperCase":false},"sql":{"searchable":true}},"data":{"type":"any","docString":"","isOptional":true,"defaultValue":"{}"}},"uniqueName":"StoreJobQueue"}}}', | ||
); |
@@ -1,3 +0,3 @@ | ||
// Generated by @lbu/code-gen at 2020-05-22T08:17:51.893Z | ||
/* eslint-disable no-unused-vars */ | ||
// Generated by @lbu/code-gen at 2020-06-07T17:30:55.646Z | ||
/* eslint-disable no-unused-vars, jsdoc/valid-types, jsdoc/require-returns-description, jsdoc/no-undefined-types, jsdoc/require-property-description */ | ||
@@ -7,3 +7,3 @@ /** | ||
* | ||
* @typedef { { id:string, bucket_name:string, content_length:number, content_type:string, filename:string, created_at:Date, updated_at:Date, }} | ||
* @typedef { { id:string, bucketName:string, contentLength:number, contentType:string, filename:string, createdAt:Date, updatedAt:Date, }} | ||
*/ | ||
@@ -14,3 +14,3 @@ | ||
* | ||
* @typedef { { id:string, bucket_name:string, content_length:number, content_type:string, filename:string, created_at?:string, updated_at?:string, }} | ||
* @typedef { { id:string, bucketName:string, contentLength:number, contentType:string, filename:string, createdAt?:string, updatedAt?:string, }} | ||
*/ | ||
@@ -21,3 +21,3 @@ | ||
* | ||
* @typedef { { id:string, expires:Date, data:*, }} | ||
* @typedef { { id:string, expires:Date, data:*, createdAt:Date, updatedAt:Date, }} | ||
*/ | ||
@@ -28,3 +28,3 @@ | ||
* | ||
* @typedef { { id:string, expires:string, data?:* , }} | ||
* @typedef { { id:string, expires:string, data?:* , createdAt?:string, updatedAt?:string, }} | ||
*/ | ||
@@ -35,3 +35,3 @@ | ||
* | ||
* @typedef { { id:number, is_complete:boolean, priority:number, scheduled_at:Date, created_at:Date, updated_at?:Date, name:string, data:*, }} | ||
* @typedef { { id:number, isComplete:boolean, priority:number, scheduledAt:Date, name:string, data:*, createdAt:Date, updatedAt:Date, }} | ||
*/ | ||
@@ -42,3 +42,3 @@ | ||
* | ||
* @typedef { { id:number, is_complete?:boolean , priority?:number , scheduled_at?:string, created_at?:string, updated_at?:string, name:string, data?:* , }} | ||
* @typedef { { id:number, isComplete?:boolean , priority?:number , scheduledAt?:string, name:string, data?:* , createdAt?:string, updatedAt?:string, }} | ||
*/ | ||
@@ -49,3 +49,3 @@ | ||
* | ||
* @typedef { { id?:string, bucket_name?:string , }} | ||
* @typedef { { id?:string, idIn?:(string)[] , bucketName?:string , bucketNameLike?:string , }} | ||
*/ | ||
@@ -60,29 +60,17 @@ | ||
/** | ||
* @name StoreFileStorePartial | ||
* @name StoreFileStoreInsertPartial | ||
* | ||
* @typedef { { bucket_name?:string , content_length?:number , content_type?:string , filename?:string , created_at:Date, updated_at:Date, }} | ||
* @typedef { { bucketName:string, contentLength:number, contentType:string, filename:string, createdAt:Date, updatedAt:Date, }} | ||
*/ | ||
/** | ||
* @name StoreFileStorePartial_Input | ||
* @name StoreFileStoreInsertPartial_Input | ||
* | ||
* @typedef { { bucket_name?:string , content_length?:number , content_type?:string , filename?:string , created_at?:string, updated_at?:string, }} | ||
* @typedef { { bucketName:string, contentLength:number, contentType:string, filename:string, createdAt?:string, updatedAt?:string, }} | ||
*/ | ||
/** | ||
* @name StoreFileStoreCreate | ||
* | ||
* @typedef { { bucket_name:string, content_length:number, content_type:string, filename:string, created_at?:Date, updated_at?:Date, }} | ||
*/ | ||
/** | ||
* @name StoreFileStoreCreate_Input | ||
* | ||
* @typedef { { bucket_name:string, content_length:number, content_type:string, filename:string, created_at?:string, updated_at?:string, }} | ||
*/ | ||
/** | ||
* @name StoreSessionStoreWhere | ||
* | ||
* @typedef { { id?:string, }} | ||
* @typedef { { id?:string, idIn?:(string)[] , expires?:Date, expiresGreaterThan?:Date, expiresLowerThan?:Date, }} | ||
*/ | ||
@@ -93,33 +81,21 @@ | ||
* | ||
* @typedef { StoreSessionStoreWhere} | ||
* @typedef { { id?:string, idIn?:(string)[] , expires?:string, expiresGreaterThan?:string, expiresLowerThan?:string, }} | ||
*/ | ||
/** | ||
* @name StoreSessionStorePartial | ||
* @name StoreSessionStoreInsertPartial | ||
* | ||
* @typedef { { id?:string, expires?:Date, data:*, }} | ||
* @typedef { { expires:Date, data:*, createdAt:Date, updatedAt:Date, }} | ||
*/ | ||
/** | ||
* @name StoreSessionStorePartial_Input | ||
* @name StoreSessionStoreInsertPartial_Input | ||
* | ||
* @typedef { { id?:string, expires?:string, data?:* , }} | ||
* @typedef { { expires:string, data?:* , createdAt?:string, updatedAt?:string, }} | ||
*/ | ||
/** | ||
* @name StoreSessionStoreCreate | ||
* | ||
* @typedef { { id:string, expires:Date, data?:* , }} | ||
*/ | ||
/** | ||
* @name StoreSessionStoreCreate_Input | ||
* | ||
* @typedef { { id:string, expires:string, data?:* , }} | ||
*/ | ||
/** | ||
* @name StoreJobQueueWhere | ||
* | ||
* @typedef { { id?:number , name?:string , }} | ||
* @typedef { { id?:number , idGreaterThan?:number , idLowerThan?:number , name?:string , nameLike?:string , }} | ||
*/ | ||
@@ -134,23 +110,11 @@ | ||
/** | ||
* @name StoreJobQueuePartial | ||
* @name StoreJobQueueInsertPartial | ||
* | ||
* @typedef { { is_complete:boolean, priority:number, scheduled_at:Date, created_at:Date, updated_at?:Date, name?:string , data:*, }} | ||
* @typedef { { isComplete:boolean, priority:number, scheduledAt:Date, name:string, data:*, createdAt:Date, updatedAt:Date, }} | ||
*/ | ||
/** | ||
* @name StoreJobQueuePartial_Input | ||
* @name StoreJobQueueInsertPartial_Input | ||
* | ||
* @typedef { { is_complete?:boolean , priority?:number , scheduled_at?:string, created_at?:string, updated_at?:string, name?:string , data?:* , }} | ||
* @typedef { { isComplete?:boolean , priority?:number , scheduledAt?:string, name:string, data?:* , createdAt?:string, updatedAt?:string, }} | ||
*/ | ||
/** | ||
* @name StoreJobQueueCreate | ||
* | ||
* @typedef { { is_complete?:boolean , priority?:number , scheduled_at?:Date, created_at?:Date, updated_at?:Date, name:string, data?:* , }} | ||
*/ | ||
/** | ||
* @name StoreJobQueueCreate_Input | ||
* | ||
* @typedef { { is_complete?:boolean , priority?:number , scheduled_at?:string, created_at?:string, updated_at?:string, name:string, data?:* , }} | ||
*/ |
@@ -7,11 +7,15 @@ import { dirnameForModule } from "@lbu/stdlib"; | ||
/** | ||
* @typedef {object} MigrateContext | ||
* @name MigrateContext | ||
* | ||
* @typedef {object} | ||
* @property {MigrateFile[]} files | ||
* @property {string[]} namespaces | ||
* @property {object<string, string>} storedHashes | ||
* @property {postgres} sql | ||
* @property {Postgres} sql | ||
*/ | ||
/** | ||
* @typedef {object} MigrateFile | ||
* @name MigrateFile | ||
* | ||
* @typedef {object} | ||
* @property {string} namespace | ||
@@ -28,4 +32,6 @@ * @property {number} number | ||
/** | ||
* @property {postgres} sql | ||
* @return {Promise<MigrateContext>} | ||
* @param sql | ||
* @param migrationDirectory | ||
* @property {Postgres} sql | ||
* @returns {Promise<MigrateContext>} | ||
*/ | ||
@@ -65,4 +71,5 @@ export async function newMigrateContext( | ||
* Get a list of migrations to be applied | ||
* | ||
* @param {MigrateContext} mc | ||
* @return {({name: string, number: number, repeatable: boolean}[])|boolean} | ||
* @returns {({name: string, number: number, repeatable: boolean}[])|boolean} | ||
*/ | ||
@@ -95,3 +102,3 @@ export function getMigrationsToBeApplied(mc) { | ||
* @param {MigrateContext} mc | ||
* @return {MigrateFile[]} | ||
* @returns {MigrateFile[]} | ||
*/ | ||
@@ -115,5 +122,5 @@ function filterMigrationsToBeApplied(mc) { | ||
/** | ||
* @param {postgres} sql | ||
* @param {Postgres} sql | ||
* @param {MigrateFile} migration | ||
* @return {Promise<void>} | ||
* @returns {Promise<void>} | ||
*/ | ||
@@ -142,3 +149,3 @@ async function runMigration(sql, migration) { | ||
/** | ||
* @param {postgres} sql | ||
* @param {Postgres} sql | ||
* @param {MigrateFile} migration | ||
@@ -158,3 +165,3 @@ */ | ||
* @param {MigrateContext} mc | ||
* @return {Promise<void>} | ||
* @returns {Promise<void>} | ||
*/ | ||
@@ -196,4 +203,7 @@ async function syncWithSchemaState(mc) { | ||
/** | ||
* @param sql | ||
*/ | ||
async function acquireLock(sql) { | ||
// Should be automatically released by postgres once this connection ends. | ||
// Should be automatically released by Postgres once this connection ends. | ||
// We expect that the user runs this process for migrations only | ||
@@ -214,3 +224,3 @@ let locked = false; | ||
* @param {string[]} namespaces | ||
* @return {Promise<{migrationFiles: [], namespaces: [*]}>} | ||
* @returns {Promise<{migrationFiles: [], namespaces: [*]}>} | ||
*/ | ||
@@ -283,2 +293,6 @@ async function readMigrationsDir( | ||
/** | ||
* @param namespaces | ||
* @param files | ||
*/ | ||
function sortMigrations(namespaces, files) { | ||
@@ -297,2 +311,5 @@ return files.sort((a, b) => { | ||
/** | ||
* @param fileName | ||
*/ | ||
function parseFileName(fileName) { | ||
@@ -299,0 +316,0 @@ const filePattern = /(\d+)(-r)?-([a-zA-Z-]+).sql/g; |
@@ -6,3 +6,3 @@ import { merge } from "@lbu/stdlib"; | ||
* @param {object} opts | ||
* @return {minio.Client} | ||
* @returns {minio.Client} | ||
*/ | ||
@@ -34,6 +34,7 @@ export function newMinioClient(opts) { | ||
* List all objects in a bucket | ||
* | ||
* @param {minio.Client} minio | ||
* @param {string} bucketName | ||
* @param {string} [filter] | ||
* @return {Promise<{name: string, prefix: string, size: number, etag: string, | ||
* @returns {Promise<{name: string, prefix: string, size: number, etag: string, | ||
* lastModified: Date}[]>} | ||
@@ -40,0 +41,0 @@ */ |
@@ -5,5 +5,13 @@ import { merge } from "@lbu/stdlib"; | ||
/** | ||
* @name Postgres | ||
* | ||
* See https://github.com/porsager/postgres for docs | ||
* | ||
* @typedef {*} | ||
*/ | ||
/** | ||
* @param {object} [opts] | ||
* @param {boolean} [opts.createIfNotExists] | ||
* @return {postgres} | ||
* @returns {Postgres} | ||
*/ | ||
@@ -43,2 +51,7 @@ export async function newPostgresConnection(opts) { | ||
/** | ||
* @param sql | ||
* @param databaseName | ||
* @param template | ||
*/ | ||
export async function createDatabaseIfNotExists(sql, databaseName, template) { | ||
@@ -45,0 +58,0 @@ if (!sql) { |
@@ -0,14 +1,4 @@ | ||
import { storeQueries } from "./generated/queries.js"; | ||
const queries = { | ||
insertJob: (sql, name, data, priority, scheduled_at) => | ||
sql`INSERT INTO job_queue ${sql( | ||
{ name, data, priority, scheduled_at }, | ||
"name", | ||
"data", | ||
"priority", | ||
"scheduled_at", | ||
)} RETURNING id`, | ||
getJobById: (sql, id) => | ||
sql`SELECT id, created_at, scheduled_at, name, data FROM job_queue WHERE id = ${id}`, | ||
// Should only run in a transaction | ||
@@ -69,2 +59,5 @@ getAnyJob: (sql) => sql`UPDATE job_queue | ||
* @name JobData | ||
* | ||
* Row data for a specific job | ||
* | ||
* @typedef {object} | ||
@@ -80,2 +73,3 @@ * @property {number} id | ||
* @name JobInput | ||
* | ||
* @typedef {object} | ||
@@ -90,2 +84,3 @@ * @property {number} [priority=0] | ||
* @name JobQueueWorkerOptions | ||
* | ||
* @typedef {object} | ||
@@ -96,8 +91,14 @@ * @property {function(sql: *, data: JobData): (void|Promise<void>)} handler | ||
* @property {number} [parallelCount] Set the amount of parallel jobs to process. | ||
* Defaults to 1 | ||
* Defaults to 1. Make sure it is not higher than the amount of Postgres connections in | ||
* the pool | ||
*/ | ||
/** | ||
* @class | ||
* | ||
*/ | ||
export class JobQueueWorker { | ||
/** | ||
* Create a new JobQueueWorker | ||
* | ||
* @param sql | ||
@@ -134,4 +135,5 @@ * @param {string|JobQueueWorkerOptions} nameOrOptions | ||
/** | ||
* Start the JobQueueWorker | ||
* | ||
* @public | ||
* Start the JobQueueWorker | ||
*/ | ||
@@ -154,5 +156,6 @@ start() { | ||
/** | ||
* @public | ||
* Stop the JobQueueWorker | ||
* Running jobs will continue to run, but no new jobs are fetched | ||
* | ||
* @public | ||
*/ | ||
@@ -170,5 +173,6 @@ stop() { | ||
/** | ||
* Get the number of jobs that need to run | ||
* | ||
* @public | ||
* Get the number of jobs that need to run | ||
* @return {Promise<{pending_count: number, scheduled_count: number}|undefined>} | ||
* @returns {Promise<{pending_count: number, scheduled_count: number}|undefined>} | ||
*/ | ||
@@ -183,8 +187,9 @@ pendingQueueSize() { | ||
/** | ||
* @public | ||
* Return the average time between scheduled and completed for jobs completed in the | ||
* provided time range in milliseconds | ||
* | ||
* @public | ||
* @param {Date} startDate | ||
* @param {Date} endDate | ||
* @return {Promise<number>} | ||
* @returns {Promise<number>} | ||
*/ | ||
@@ -205,6 +210,7 @@ averageTimeToCompletion(startDate, endDate) { | ||
/** | ||
* Uses this queue name and connection to add a job to the queue | ||
* | ||
* @public | ||
* Uses this queue name and connection to add a job to the queue | ||
* @param {JobInput} job | ||
* @return {Promise<number>} | ||
* @returns {Promise<number>} | ||
*/ | ||
@@ -262,3 +268,5 @@ addJob(job) { | ||
const [jobData] = await queries.getJobById(sql, job.id); | ||
const [jobData] = await storeQueries.jobQueueSelect(sql, { | ||
id: job.id, | ||
}); | ||
@@ -293,14 +301,12 @@ // We need to catch errors to be able to reset the worker. | ||
*Add a new item to the job queue | ||
* | ||
* @param sql | ||
* @param {JobInput} job | ||
* @return {Promise<number>} | ||
* @returns {Promise<number>} | ||
*/ | ||
export async function addJobToQueue(sql, job) { | ||
const [result] = await queries.insertJob( | ||
sql, | ||
job.name ?? process.env.APP_NAME, | ||
JSON.stringify(job.data ?? {}), | ||
job.priority ?? 0, | ||
job.scheduledAt ?? new Date(), | ||
); | ||
const [result] = await storeQueries.jobQueueInsert(sql, { | ||
...job, | ||
name: job.name ?? process.env.APP_NAME, | ||
}); | ||
return result?.id; | ||
@@ -311,4 +317,5 @@ } | ||
* Get the number of jobs that need to run | ||
* | ||
* @param sql | ||
* @return {Promise<{pendingCount: number, scheduledCount: number}>} | ||
* @returns {Promise<{pendingCount: number, scheduledCount: number}>} | ||
*/ | ||
@@ -327,5 +334,6 @@ async function getPendingQueueSize(sql) { | ||
* Get the number of jobs that need to run for specified job name | ||
* | ||
* @param sql | ||
* @param {string} name | ||
* @return {Promise<{pendingCount: number, scheduledCount: number}>} | ||
* @returns {Promise<{pendingCount: number, scheduledCount: number}>} | ||
*/ | ||
@@ -345,6 +353,7 @@ async function getPendingQueueSizeForName(sql, name) { | ||
* provided time range | ||
* | ||
* @param sql | ||
* @param {Date} startDate | ||
* @param {Date} endDate | ||
* @return {Promise<number>} | ||
* @returns {Promise<number>} | ||
*/ | ||
@@ -360,2 +369,3 @@ async function getAverageTimeToJobCompletion(sql, startDate, endDate) { | ||
* provided time range | ||
* | ||
* @param sql | ||
@@ -365,3 +375,3 @@ * @param {string} name | ||
* @param {Date} endDate | ||
* @return {Promise<number>} | ||
* @returns {Promise<number>} | ||
*/ | ||
@@ -368,0 +378,0 @@ async function getAverageTimeToJobCompletionForName( |
@@ -0,24 +1,8 @@ | ||
import { storeQueries } from "./generated/queries.js"; | ||
const DELETE_INTERVAL = 45 * 60 * 1000; // 45 minutes | ||
const queries = { | ||
get: (sql, id) => | ||
sql`SELECT data FROM session_store WHERE id = ${id} AND expires > ${new Date()}`, | ||
set: (sql, id, expires, data) => | ||
sql`INSERT INTO session_store ${sql( | ||
{ id, data, expires }, | ||
"id", | ||
"expires", | ||
"data", | ||
)} ON CONFLICT (id) DO UPDATE SET ${sql( | ||
{ data, expires }, | ||
"data", | ||
"expires", | ||
)}`, | ||
delete: (sql, id) => sql`DELETE FROM session_store WHERE id = ${id}`, | ||
deleteExpired: (sql, expires) => | ||
sql`DELETE FROM session_store WHERE expires < ${expires}`, | ||
}; | ||
/** | ||
* @name SessionStore | ||
* | ||
* @typedef {object} | ||
@@ -36,3 +20,3 @@ * @property {function(string): Promise<object|boolean>} get | ||
* @param {boolean} [options.disableInterval] | ||
* @return {SessionStore} | ||
* @returns {SessionStore} | ||
*/ | ||
@@ -47,3 +31,3 @@ export function newSessionStore(sql, options) { | ||
interval = setInterval(() => { | ||
queries.deleteExpired(sql, new Date()); | ||
storeQueries.sessionStoreDelete(sql, { expiresLowerThan: new Date() }); | ||
}, options.cleanupInterval); | ||
@@ -54,15 +38,20 @@ } | ||
get: async (sid) => { | ||
const [data] = await queries.get(sql, sid); | ||
const [data] = await storeQueries.sessionStoreSelect(sql, { id: sid }); | ||
if (!data) { | ||
return false; | ||
} | ||
return data.data; | ||
return JSON.parse(data.data); | ||
}, | ||
set: async (sid, sess, maxAge) => { | ||
const d = new Date(); | ||
d.setMilliseconds(d.getMilliseconds() + maxAge); | ||
await queries.set(sql, sid, d, JSON.stringify(sess)); | ||
const expires = new Date(); | ||
expires.setMilliseconds(expires.getMilliseconds() + maxAge); | ||
await storeQueries.sessionStoreUpsert(sql, { | ||
id: sid, | ||
expires, | ||
data: JSON.stringify(sess), | ||
}); | ||
}, | ||
destroy: async (sid) => { | ||
await queries.delete(sql, sid); | ||
await storeQueries.sessionStoreDelete(sql, { id: sid }); | ||
}, | ||
@@ -69,0 +58,0 @@ kill: () => { |
@@ -8,2 +8,5 @@ import { log } from "@lbu/insight"; | ||
/** | ||
* @param verboseSql | ||
*/ | ||
export async function createTestPostgresDatabase(verboseSql = false) { | ||
@@ -44,2 +47,5 @@ const name = process.env.APP_NAME + uuid().substring(0, 7); | ||
/** | ||
* @param sql | ||
*/ | ||
export async function cleanupTestPostgresDatabase(sql) { | ||
@@ -46,0 +52,0 @@ const dbName = sql.options.database; |
65248
20
1778
110
25
+ Added@lbu/insight@0.0.21(transitive)
+ Added@lbu/stdlib@0.0.21(transitive)
- Removed@lbu/insight@0.0.20(transitive)
- Removed@lbu/stdlib@0.0.20(transitive)
Updated@lbu/insight@^0.0.21
Updated@lbu/stdlib@^0.0.21