@lbu/store
Advanced tools
Comparing version 0.0.80 to 0.0.81
@@ -364,2 +364,26 @@ import * as minioVendor from "minio"; | ||
/** | ||
* Add a recurring job, if no existing job with the same name is scheduled. | ||
* Does not throw when a job is already pending with the same name. | ||
*/ | ||
export function addRecurringJobToQueue( | ||
sql: Postgres, | ||
{ | ||
name, | ||
priority, | ||
interval, | ||
}: { | ||
name: string; | ||
priority?: number; | ||
interval: { | ||
years?: number; | ||
months?: number; | ||
days?: number; | ||
hours?: number; | ||
minutes?: number; | ||
seconds?: number; | ||
}; | ||
}, | ||
): Promise<void>; | ||
/** | ||
* Stripped down from @lbu/server SessionStore | ||
@@ -366,0 +390,0 @@ */ |
{ | ||
"name": "@lbu/store", | ||
"version": "0.0.80", | ||
"version": "0.0.81", | ||
"description": "Postgres & S3-compatible wrappers for common things", | ||
@@ -18,4 +18,4 @@ "main": "./index.js", | ||
"dependencies": { | ||
"@lbu/insight": "0.0.80", | ||
"@lbu/stdlib": "0.0.80", | ||
"@lbu/insight": "0.0.81", | ||
"@lbu/stdlib": "0.0.81", | ||
"@types/minio": "7.0.6", | ||
@@ -47,3 +47,3 @@ "mime-types": "2.1.27", | ||
}, | ||
"gitHead": "5cf8ecaee5bb425f50daf095ba8860436f3d69a8" | ||
"gitHead": "09b2c4d5494313cd21ede7964e2a92e8fe0d3877" | ||
} |
@@ -9,3 +9,3 @@ import { createReadStream } from "fs"; | ||
copyFile: (sql, targetId, targetBucket, sourceId, sourceBucket) => sql` | ||
INSERT INTO "file" ("id", "bucketName", "contentType", "contentLength", "name") | ||
INSERT INTO "file" ("id", "bucketName", "contentType", "contentLength", "name", "meta") | ||
SELECT ${targetId}, | ||
@@ -15,3 +15,4 @@ ${targetBucket}, | ||
"contentLength", | ||
"name" | ||
"name", | ||
"meta" | ||
FROM "file" | ||
@@ -18,0 +19,0 @@ WHERE id = ${sourceId} |
@@ -20,3 +20,3 @@ // Generated by @lbu/code-gen | ||
SELECT | ||
f."id", f."bucketName", f."contentLength", f."contentType", f."name", f."createdAt", f."updatedAt", f."deletedAt" | ||
f."id", f."bucketName", f."contentLength", f."contentType", f."name", f."meta", f."createdAt", f."updatedAt", f."deletedAt" | ||
FROM "file" f | ||
@@ -260,3 +260,3 @@ WHERE (COALESCE(${where?.id ?? null}, NULL) IS NULL OR f."id" = ${ | ||
} | ||
let query = `INSERT INTO "file" ("bucketName", "contentLength", "contentType", "name", "createdAt", "updatedAt", "deletedAt") VALUES `; | ||
let query = `INSERT INTO "file" ("bucketName", "contentLength", "contentType", "name", "meta", "createdAt", "updatedAt", "deletedAt") VALUES `; | ||
const argList = []; | ||
@@ -270,2 +270,3 @@ let idx = 1; | ||
it.name ?? null, | ||
JSON.stringify(it.meta ?? {}), | ||
it.createdAt ?? new Date(), | ||
@@ -275,7 +276,7 @@ it.updatedAt ?? new Date(), | ||
); | ||
query += `($${idx++}, $${idx++}, $${idx++}, $${idx++}, $${idx++}, $${idx++}, $${idx++}),`; | ||
query += `($${idx++}, $${idx++}, $${idx++}, $${idx++}, $${idx++}, $${idx++}, $${idx++}, $${idx++}),`; | ||
} | ||
// Remove trailing comma | ||
query = query.substring(0, query.length - 1); | ||
query += ` RETURNING "id", "bucketName", "contentLength", "contentType", "name", "createdAt", "updatedAt", "deletedAt"`; | ||
query += ` RETURNING "id", "bucketName", "contentLength", "contentType", "name", "meta", "createdAt", "updatedAt", "deletedAt"`; | ||
return sql.unsafe(query, argList); | ||
@@ -310,2 +311,6 @@ }, | ||
} | ||
if (value["meta"] !== undefined) { | ||
query += `"meta" = $${idx++}, `; | ||
argList.push(JSON.stringify(value["meta"])); | ||
} | ||
if (value["createdAt"] !== undefined) { | ||
@@ -416,3 +421,3 @@ query += `"createdAt" = $${idx++}, `; | ||
query = query.substring(0, query.length - 4); | ||
query += ` RETURNING f."id", f."bucketName", f."contentLength", f."contentType", f."name", f."createdAt", f."updatedAt", f."deletedAt"`; | ||
query += ` RETURNING f."id", f."bucketName", f."contentLength", f."contentType", f."name", f."meta", f."createdAt", f."updatedAt", f."deletedAt"`; | ||
return sql.unsafe(query, argList); | ||
@@ -429,12 +434,12 @@ }, | ||
return sql` | ||
INSERT INTO "file" ("id", "bucketName", "contentLength", "contentType", "name", "createdAt", "updatedAt", "deletedAt" | ||
INSERT INTO "file" ("id", "bucketName", "contentLength", "contentType", "name", "meta", "createdAt", "updatedAt", "deletedAt" | ||
) VALUES ( | ||
${it.id ?? uuid()}, ${it.bucketName ?? null}, ${it.contentLength ?? null}, ${ | ||
it.contentType ?? null | ||
}, ${it.name ?? null}, ${it.createdAt ?? new Date()}, ${ | ||
it.updatedAt ?? new Date() | ||
}, ${it.deletedAt ?? null} | ||
}, ${it.name ?? null}, ${JSON.stringify(it.meta ?? {})}, ${ | ||
it.createdAt ?? new Date() | ||
}, ${it.updatedAt ?? new Date()}, ${it.deletedAt ?? null} | ||
) ON CONFLICT("id") DO UPDATE SET | ||
"bucketName" = EXCLUDED."bucketName", "contentLength" = EXCLUDED."contentLength", "contentType" = EXCLUDED."contentType", "name" = EXCLUDED."name", "updatedAt" = EXCLUDED."updatedAt", "deletedAt" = EXCLUDED."deletedAt" | ||
RETURNING "id", "bucketName", "contentLength", "contentType", "name", "createdAt", "updatedAt", "deletedAt" | ||
"bucketName" = EXCLUDED."bucketName", "contentLength" = EXCLUDED."contentLength", "contentType" = EXCLUDED."contentType", "name" = EXCLUDED."name", "meta" = EXCLUDED."meta", "updatedAt" = EXCLUDED."updatedAt", "deletedAt" = EXCLUDED."deletedAt" | ||
RETURNING "id", "bucketName", "contentLength", "contentType", "name", "meta", "createdAt", "updatedAt", "deletedAt" | ||
`; | ||
@@ -451,12 +456,12 @@ }, | ||
return sql` | ||
INSERT INTO "file" ("id", "bucketName", "contentLength", "contentType", "name", "createdAt", "updatedAt", "deletedAt" | ||
INSERT INTO "file" ("id", "bucketName", "contentLength", "contentType", "name", "meta", "createdAt", "updatedAt", "deletedAt" | ||
) VALUES ( | ||
${it.id ?? uuid()}, ${it.bucketName ?? null}, ${it.contentLength ?? null}, ${ | ||
it.contentType ?? null | ||
}, ${it.name ?? null}, ${it.createdAt ?? new Date()}, ${ | ||
it.updatedAt ?? new Date() | ||
}, ${it.deletedAt ?? null} | ||
}, ${it.name ?? null}, ${JSON.stringify(it.meta ?? {})}, ${ | ||
it.createdAt ?? new Date() | ||
}, ${it.updatedAt ?? new Date()}, ${it.deletedAt ?? null} | ||
) ON CONFLICT("bucketName") DO UPDATE SET | ||
"contentLength" = EXCLUDED."contentLength", "contentType" = EXCLUDED."contentType", "name" = EXCLUDED."name", "updatedAt" = EXCLUDED."updatedAt", "deletedAt" = EXCLUDED."deletedAt" | ||
RETURNING "id", "bucketName", "contentLength", "contentType", "name", "createdAt", "updatedAt", "deletedAt" | ||
"contentLength" = EXCLUDED."contentLength", "contentType" = EXCLUDED."contentType", "name" = EXCLUDED."name", "meta" = EXCLUDED."meta", "updatedAt" = EXCLUDED."updatedAt", "deletedAt" = EXCLUDED."deletedAt" | ||
RETURNING "id", "bucketName", "contentLength", "contentType", "name", "meta", "createdAt", "updatedAt", "deletedAt" | ||
`; | ||
@@ -473,12 +478,12 @@ }, | ||
return sql` | ||
INSERT INTO "file" ("id", "bucketName", "contentLength", "contentType", "name", "createdAt", "updatedAt", "deletedAt" | ||
INSERT INTO "file" ("id", "bucketName", "contentLength", "contentType", "name", "meta", "createdAt", "updatedAt", "deletedAt" | ||
) VALUES ( | ||
${it.id ?? uuid()}, ${it.bucketName ?? null}, ${it.contentLength ?? null}, ${ | ||
it.contentType ?? null | ||
}, ${it.name ?? null}, ${it.createdAt ?? new Date()}, ${ | ||
it.updatedAt ?? new Date() | ||
}, ${it.deletedAt ?? null} | ||
}, ${it.name ?? null}, ${JSON.stringify(it.meta ?? {})}, ${ | ||
it.createdAt ?? new Date() | ||
}, ${it.updatedAt ?? new Date()}, ${it.deletedAt ?? null} | ||
) ON CONFLICT("createdAt") DO UPDATE SET | ||
"bucketName" = EXCLUDED."bucketName", "contentLength" = EXCLUDED."contentLength", "contentType" = EXCLUDED."contentType", "name" = EXCLUDED."name", "updatedAt" = EXCLUDED."updatedAt", "deletedAt" = EXCLUDED."deletedAt" | ||
RETURNING "id", "bucketName", "contentLength", "contentType", "name", "createdAt", "updatedAt", "deletedAt" | ||
"bucketName" = EXCLUDED."bucketName", "contentLength" = EXCLUDED."contentLength", "contentType" = EXCLUDED."contentType", "name" = EXCLUDED."name", "meta" = EXCLUDED."meta", "updatedAt" = EXCLUDED."updatedAt", "deletedAt" = EXCLUDED."deletedAt" | ||
RETURNING "id", "bucketName", "contentLength", "contentType", "name", "meta", "createdAt", "updatedAt", "deletedAt" | ||
`; | ||
@@ -495,12 +500,12 @@ }, | ||
return sql` | ||
INSERT INTO "file" ("id", "bucketName", "contentLength", "contentType", "name", "createdAt", "updatedAt", "deletedAt" | ||
INSERT INTO "file" ("id", "bucketName", "contentLength", "contentType", "name", "meta", "createdAt", "updatedAt", "deletedAt" | ||
) VALUES ( | ||
${it.id ?? uuid()}, ${it.bucketName ?? null}, ${it.contentLength ?? null}, ${ | ||
it.contentType ?? null | ||
}, ${it.name ?? null}, ${it.createdAt ?? new Date()}, ${ | ||
it.updatedAt ?? new Date() | ||
}, ${it.deletedAt ?? null} | ||
}, ${it.name ?? null}, ${JSON.stringify(it.meta ?? {})}, ${ | ||
it.createdAt ?? new Date() | ||
}, ${it.updatedAt ?? new Date()}, ${it.deletedAt ?? null} | ||
) ON CONFLICT("updatedAt") DO UPDATE SET | ||
"bucketName" = EXCLUDED."bucketName", "contentLength" = EXCLUDED."contentLength", "contentType" = EXCLUDED."contentType", "name" = EXCLUDED."name", "deletedAt" = EXCLUDED."deletedAt" | ||
RETURNING "id", "bucketName", "contentLength", "contentType", "name", "createdAt", "updatedAt", "deletedAt" | ||
"bucketName" = EXCLUDED."bucketName", "contentLength" = EXCLUDED."contentLength", "contentType" = EXCLUDED."contentType", "name" = EXCLUDED."name", "meta" = EXCLUDED."meta", "deletedAt" = EXCLUDED."deletedAt" | ||
RETURNING "id", "bucketName", "contentLength", "contentType", "name", "meta", "createdAt", "updatedAt", "deletedAt" | ||
`; | ||
@@ -517,12 +522,12 @@ }, | ||
return sql` | ||
INSERT INTO "file" ("id", "bucketName", "contentLength", "contentType", "name", "createdAt", "updatedAt", "deletedAt" | ||
INSERT INTO "file" ("id", "bucketName", "contentLength", "contentType", "name", "meta", "createdAt", "updatedAt", "deletedAt" | ||
) VALUES ( | ||
${it.id ?? uuid()}, ${it.bucketName ?? null}, ${it.contentLength ?? null}, ${ | ||
it.contentType ?? null | ||
}, ${it.name ?? null}, ${it.createdAt ?? new Date()}, ${ | ||
it.updatedAt ?? new Date() | ||
}, ${it.deletedAt ?? null} | ||
}, ${it.name ?? null}, ${JSON.stringify(it.meta ?? {})}, ${ | ||
it.createdAt ?? new Date() | ||
}, ${it.updatedAt ?? new Date()}, ${it.deletedAt ?? null} | ||
) ON CONFLICT("deletedAt") DO UPDATE SET | ||
"bucketName" = EXCLUDED."bucketName", "contentLength" = EXCLUDED."contentLength", "contentType" = EXCLUDED."contentType", "name" = EXCLUDED."name", "updatedAt" = EXCLUDED."updatedAt" | ||
RETURNING "id", "bucketName", "contentLength", "contentType", "name", "createdAt", "updatedAt", "deletedAt" | ||
"bucketName" = EXCLUDED."bucketName", "contentLength" = EXCLUDED."contentLength", "contentType" = EXCLUDED."contentType", "name" = EXCLUDED."name", "meta" = EXCLUDED."meta", "updatedAt" = EXCLUDED."updatedAt" | ||
RETURNING "id", "bucketName", "contentLength", "contentType", "name", "meta", "createdAt", "updatedAt", "deletedAt" | ||
`; | ||
@@ -547,2 +552,6 @@ }, | ||
}) AND (COALESCE(${ | ||
where?.isComplete ?? null | ||
}, NULL) IS NULL OR j."isComplete" = ${ | ||
where?.isComplete ?? null | ||
}) AND (COALESCE(${ | ||
where?.scheduledAt ?? null | ||
@@ -606,2 +615,6 @@ }, NULL) IS NULL OR j."scheduledAt" = ${ | ||
}) AND (COALESCE(${ | ||
where?.isComplete ?? null | ||
}, NULL) IS NULL OR j."isComplete" = ${ | ||
where?.isComplete ?? null | ||
}) AND (COALESCE(${ | ||
where?.scheduledAt ?? null | ||
@@ -663,2 +676,6 @@ }, NULL) IS NULL OR j."scheduledAt" = ${ | ||
}) AND (COALESCE(${ | ||
where?.isComplete ?? null | ||
}, NULL) IS NULL OR j."isComplete" = ${ | ||
where?.isComplete ?? null | ||
}) AND (COALESCE(${ | ||
where?.scheduledAt ?? null | ||
@@ -791,2 +808,8 @@ }, NULL) IS NULL OR j."scheduledAt" = ${ | ||
} | ||
if (where.isComplete !== undefined) { | ||
query += `j."isComplete" `; | ||
query += `= $${idx++}`; | ||
argList.push(where.isComplete); | ||
query += " AND "; | ||
} | ||
if (where.scheduledAt !== undefined) { | ||
@@ -885,2 +908,23 @@ query += `j."scheduledAt" `; | ||
/** | ||
* Note: Use only when isComplete has a unique constraint | ||
* @param {Postgres} sql | ||
* @param { StoreJobInsertPartial_Input & { id?: number } } it | ||
* @returns {Promise<StoreJob[]>} | ||
*/ | ||
jobUpsertByIsComplete: (sql, it) => { | ||
return sql` | ||
INSERT INTO "job" ("id", "isComplete", "priority", "scheduledAt", "name", "data", "createdAt", "updatedAt" | ||
) VALUES ( | ||
${it.id ?? uuid()}, ${it.isComplete ?? false}, ${it.priority ?? 0}, ${ | ||
it.scheduledAt ?? new Date() | ||
}, ${it.name ?? null}, ${JSON.stringify(it.data ?? {})}, ${ | ||
it.createdAt ?? new Date() | ||
}, ${it.updatedAt ?? new Date()} | ||
) ON CONFLICT("isComplete") DO UPDATE SET | ||
"priority" = EXCLUDED."priority", "scheduledAt" = EXCLUDED."scheduledAt", "name" = EXCLUDED."name", "data" = EXCLUDED."data", "updatedAt" = EXCLUDED."updatedAt" | ||
RETURNING "id", "isComplete", "priority", "scheduledAt", "name", "data", "createdAt", "updatedAt" | ||
`; | ||
}, | ||
/** | ||
* Note: Use only when scheduledAt has a unique constraint | ||
@@ -887,0 +931,0 @@ * @param {Postgres} sql |
export const structureString = | ||
'{"store":{"file":{"type":"object","docString":"","isOptional":false,"uniqueName":"StoreFile","group":"store","name":"file","validator":{"strict":true},"keys":{"id":{"type":"uuid","docString":"","isOptional":false,"sql":{"primary":true,"searchable":true}},"bucketName":{"type":"string","docString":"","isOptional":false,"sql":{"primary":false,"searchable":true},"validator":{"convert":false,"trim":false,"lowerCase":false,"upperCase":false,"min":1}},"contentLength":{"type":"number","docString":"","isOptional":false,"validator":{"convert":false,"floatingPoint":false}},"contentType":{"type":"string","docString":"","isOptional":false,"validator":{"convert":false,"trim":false,"lowerCase":false,"upperCase":false,"min":1}},"name":{"type":"string","docString":"","isOptional":false,"validator":{"convert":false,"trim":false,"lowerCase":false,"upperCase":false,"min":1}}},"enableQueries":true,"queryOptions":{"withSoftDeletes":true,"withDates":false}},"session":{"type":"object","docString":"","isOptional":false,"uniqueName":"StoreSession","group":"store","name":"session","validator":{"strict":true},"keys":{"id":{"type":"uuid","docString":"","isOptional":false,"sql":{"primary":true,"searchable":true}},"expires":{"type":"date","docString":"","isOptional":false,"sql":{"primary":false,"searchable":true}},"data":{"type":"any","docString":"","isOptional":true,"defaultValue":"{}"}},"enableQueries":true,"queryOptions":{"withSoftDeletes":false,"withDates":true}},"job":{"type":"object","docString":"","isOptional":false,"uniqueName":"StoreJob","group":"store","name":"job","validator":{"strict":true},"keys":{"id":{"type":"number","docString":"","isOptional":false,"sql":{"primary":true,"searchable":true},"validator":{"convert":false,"floatingPoint":false}},"isComplete":{"type":"boolean","docString":"","isOptional":true,"defaultValue":"false","sql":{"primary":false,"searchable":true},"validator":{"convert":false}},"priority":{"type":"number","docString":"","isOptional":true,"defaultValue":"0","validator":{"convert":false,"floatingPoint":false}},"scheduledAt":{"type":"date","docString":"","isOptional":true,"defaultValue":"(new Date())","sql":{"primary":false,"searchable":true}},"name":{"type":"string","docString":"","isOptional":false,"sql":{"primary":false,"searchable":true},"validator":{"convert":false,"trim":false,"lowerCase":false,"upperCase":false,"min":1}},"data":{"type":"any","docString":"","isOptional":true,"defaultValue":"{}"}},"enableQueries":true,"queryOptions":{"withSoftDeletes":false,"withDates":true}}}}'; | ||
'{"store":{"jobInterval":{"type":"object","docString":"","isOptional":false,"uniqueName":"StoreJobInterval","group":"store","name":"jobInterval","validator":{"strict":true},"keys":{"years":{"type":"number","docString":"","isOptional":true,"validator":{"convert":false,"floatingPoint":false}},"months":{"type":"number","docString":"","isOptional":true,"validator":{"convert":false,"floatingPoint":false}},"days":{"type":"number","docString":"","isOptional":true,"validator":{"convert":false,"floatingPoint":false}},"hours":{"type":"number","docString":"","isOptional":true,"validator":{"convert":false,"floatingPoint":false}},"minutes":{"type":"number","docString":"","isOptional":true,"validator":{"convert":false,"floatingPoint":false}},"seconds":{"type":"number","docString":"","isOptional":true,"validator":{"convert":false,"floatingPoint":false}}},"enableQueries":false},"file":{"type":"object","docString":"","isOptional":false,"uniqueName":"StoreFile","group":"store","name":"file","validator":{"strict":true},"keys":{"id":{"type":"uuid","docString":"","isOptional":false,"sql":{"primary":true,"searchable":true}},"bucketName":{"type":"string","docString":"","isOptional":false,"sql":{"primary":false,"searchable":true},"validator":{"convert":false,"trim":false,"lowerCase":false,"upperCase":false,"min":1}},"contentLength":{"type":"number","docString":"","isOptional":false,"validator":{"convert":false,"floatingPoint":false}},"contentType":{"type":"string","docString":"","isOptional":false,"validator":{"convert":false,"trim":false,"lowerCase":false,"upperCase":false,"min":1}},"name":{"type":"string","docString":"","isOptional":false,"validator":{"convert":false,"trim":false,"lowerCase":false,"upperCase":false,"min":1}},"meta":{"type":"reference","docString":"","isOptional":false,"reference":{"uniqueName":"StoreFileMeta","group":"store","name":"fileMeta"}}},"enableQueries":true,"queryOptions":{"withSoftDeletes":true,"withDates":false}},"session":{"type":"object","docString":"","isOptional":false,"uniqueName":"StoreSession","group":"store","name":"session","validator":{"strict":true},"keys":{"id":{"type":"uuid","docString":"","isOptional":false,"sql":{"primary":true,"searchable":true}},"expires":{"type":"date","docString":"","isOptional":false,"sql":{"primary":false,"searchable":true}},"data":{"type":"any","docString":"","isOptional":true,"defaultValue":"{}"}},"enableQueries":true,"queryOptions":{"withSoftDeletes":false,"withDates":true}},"job":{"type":"object","docString":"","isOptional":false,"uniqueName":"StoreJob","group":"store","name":"job","validator":{"strict":true},"keys":{"id":{"type":"number","docString":"","isOptional":false,"sql":{"primary":true,"searchable":true},"validator":{"convert":false,"floatingPoint":false}},"isComplete":{"type":"boolean","docString":"","isOptional":true,"defaultValue":"false","sql":{"primary":false,"searchable":true},"validator":{"convert":false}},"priority":{"type":"number","docString":"","isOptional":true,"defaultValue":"0","validator":{"convert":false,"floatingPoint":false}},"scheduledAt":{"type":"date","docString":"","isOptional":true,"defaultValue":"(new Date())","sql":{"primary":false,"searchable":true}},"name":{"type":"string","docString":"","isOptional":false,"sql":{"primary":false,"searchable":true},"validator":{"convert":false,"trim":false,"lowerCase":false,"upperCase":false,"min":1}},"data":{"type":"any","docString":"","isOptional":true,"defaultValue":"{}"}},"enableQueries":true,"queryOptions":{"withSoftDeletes":false,"withDates":true}},"fileMeta":{"type":"object","docString":"User definable, optional object to store whatever you want","isOptional":true,"defaultValue":"{}","uniqueName":"StoreFileMeta","group":"store","name":"fileMeta","validator":{"strict":true},"keys":{},"enableQueries":false}}}'; | ||
export const structure = JSON.parse(structureString); |
@@ -7,3 +7,3 @@ // Generated by @lbu/code-gen | ||
* | ||
* @typedef { { "id":string, "bucketName":string, "contentLength":number, "contentType":string, "name":string, "createdAt":Date, "updatedAt":Date, "deletedAt"?:Date, }} | ||
* @typedef { { "id":string, "bucketName":string, "contentLength":number, "contentType":string, "name":string, "meta":StoreFileMeta, "createdAt":Date, "updatedAt":Date, "deletedAt"?:Date, }} | ||
*/ | ||
@@ -14,3 +14,3 @@ | ||
* | ||
* @typedef { { "id":string, "bucketName":string, "contentLength":number, "contentType":string, "name":string, "createdAt"?:string, "updatedAt"?:string, "deletedAt"?:string, }} | ||
* @typedef { { "id":string, "bucketName":string, "contentLength":number, "contentType":string, "name":string, "meta":StoreFileMeta_Input, "createdAt"?:string, "updatedAt"?:string, "deletedAt"?:string, }} | ||
*/ | ||
@@ -21,3 +21,3 @@ | ||
* | ||
* @typedef { { "bucketName":string, "contentLength":number, "contentType":string, "name":string, "createdAt":Date, "updatedAt":Date, "deletedAt"?:Date, }} | ||
* @typedef { { "bucketName":string, "contentLength":number, "contentType":string, "name":string, "meta":StoreFileMeta, "createdAt":Date, "updatedAt":Date, "deletedAt"?:Date, }} | ||
*/ | ||
@@ -28,6 +28,18 @@ | ||
* | ||
* @typedef { { "bucketName":string, "contentLength":number, "contentType":string, "name":string, "createdAt"?:string, "updatedAt"?:string, "deletedAt"?:string, }} | ||
* @typedef { { "bucketName":string, "contentLength":number, "contentType":string, "name":string, "meta":StoreFileMeta_Input, "createdAt"?:string, "updatedAt"?:string, "deletedAt"?:string, }} | ||
*/ | ||
/** | ||
* @name StoreFileMeta | ||
* Docs: User definable, optional object to store whatever you want | ||
* @typedef { { }} | ||
*/ | ||
/** | ||
* @name StoreFileMeta_Input | ||
* Docs: User definable, optional object to store whatever you want | ||
* @typedef { { } |undefined} | ||
*/ | ||
/** | ||
* @name StoreFileWhere | ||
@@ -69,5 +81,17 @@ * Docs: By default 'where.deletedAtInclude' will only include 'null' values. To use the other generated variants like 'deletedAtGreaterThan', set this value to 'true'. | ||
/** | ||
* @name StoreJobInterval | ||
* | ||
* @typedef { { "years"?:number , "months"?:number , "days"?:number , "hours"?:number , "minutes"?:number , "seconds"?:number , }} | ||
*/ | ||
/** | ||
* @name StoreJobInterval_Input | ||
* | ||
* @typedef { StoreJobInterval} | ||
*/ | ||
/** | ||
* @name StoreJobWhere | ||
* | ||
* @typedef { { "id"?:number , "idGreaterThan"?:number , "idLowerThan"?:number , "scheduledAt"?:Date, "scheduledAtGreaterThan"?:Date, "scheduledAtLowerThan"?:Date, "name"?:string , "nameLike"?:string , "createdAt"?:Date, "createdAtGreaterThan"?:Date, "createdAtLowerThan"?:Date, "updatedAt"?:Date, "updatedAtGreaterThan"?:Date, "updatedAtLowerThan"?:Date, }} | ||
* @typedef { { "id"?:number , "idGreaterThan"?:number , "idLowerThan"?:number , "isComplete"?:boolean , "scheduledAt"?:Date, "scheduledAtGreaterThan"?:Date, "scheduledAtLowerThan"?:Date, "name"?:string , "nameLike"?:string , "createdAt"?:Date, "createdAtGreaterThan"?:Date, "createdAtLowerThan"?:Date, "updatedAt"?:Date, "updatedAtGreaterThan"?:Date, "updatedAtLowerThan"?:Date, }} | ||
*/ | ||
@@ -78,3 +102,3 @@ | ||
* | ||
* @typedef { { "id"?:number , "idGreaterThan"?:number , "idLowerThan"?:number , "scheduledAt"?:string, "scheduledAtGreaterThan"?:string, "scheduledAtLowerThan"?:string, "name"?:string , "nameLike"?:string , "createdAt"?:string, "createdAtGreaterThan"?:string, "createdAtLowerThan"?:string, "updatedAt"?:string, "updatedAtGreaterThan"?:string, "updatedAtLowerThan"?:string, }} | ||
* @typedef { { "id"?:number , "idGreaterThan"?:number , "idLowerThan"?:number , "isComplete"?:boolean , "scheduledAt"?:string, "scheduledAtGreaterThan"?:string, "scheduledAtLowerThan"?:string, "name"?:string , "nameLike"?:string , "createdAt"?:string, "createdAtGreaterThan"?:string, "createdAtLowerThan"?:string, "updatedAt"?:string, "updatedAtGreaterThan"?:string, "updatedAtLowerThan"?:string, }} | ||
*/ | ||
@@ -81,0 +105,0 @@ |
import { createHash } from "crypto"; | ||
import { existsSync } from "fs"; | ||
import { readdir, readFile } from "fs/promises"; | ||
import path from "path"; | ||
import { dirnameForModule } from "@lbu/stdlib"; | ||
import { dirnameForModule, pathJoin } from "@lbu/stdlib"; | ||
@@ -257,3 +256,3 @@ /** | ||
for (const f of files) { | ||
const fullPath = path.join(directory, f); | ||
const fullPath = pathJoin(directory, f); | ||
@@ -274,3 +273,37 @@ if (f === "namespaces.txt") { | ||
const exportedItems = await import(sub); | ||
// Either same level in node_modules | ||
const directPath = pathJoin(process.cwd(), "node_modules", sub); | ||
// Or a level deeper | ||
const indirectPath = pathJoin(directory, "../node_modules", sub); | ||
const subPath = !existsSync(directPath) | ||
? existsSync(indirectPath) | ||
? indirectPath | ||
: new Error( | ||
`Could not determine import path of ${sub}, while searching for migration files.`, | ||
) | ||
: directPath; | ||
// Quick hack | ||
if (typeof subPath !== "string") { | ||
throw subPath; | ||
} | ||
// Use the package.json to find the package entrypoint | ||
// Only supporting simple { exports: "file.js" }, { exports: { default: "file.js" } or { main: "file.js" } | ||
const subPackageJson = JSON.parse( | ||
await readFile(pathJoin(subPath, "package.json"), "utf8"), | ||
); | ||
const exportedItems = await import( | ||
pathJoin( | ||
subPath, | ||
subPackageJson?.exports?.default ?? | ||
(typeof subPackageJson?.exports === "string" | ||
? subPackageJson?.exports | ||
: undefined) ?? | ||
subPackageJson?.main ?? | ||
"index.js", | ||
) | ||
); | ||
if (exportedItems && exportedItems.migrations) { | ||
@@ -277,0 +310,0 @@ const subResult = await readMigrationsDir( |
107
src/queue.js
import { log } from "@lbu/insight"; | ||
import { storeQueries } from "./generated/queries.js"; | ||
const LBU_RECURRING_JOB = "lbu.job.recurring"; | ||
const queries = { | ||
@@ -73,2 +75,15 @@ // Should only run in a transaction | ||
`, | ||
/** | ||
* @param {Postgres} sql | ||
* @param {string} name | ||
* @returns Promise<{ id: number }[]> | ||
*/ | ||
getRecurringJobForName: (sql, name) => sql` | ||
SELECT id | ||
FROM "job" | ||
WHERE name = ${LBU_RECURRING_JOB} | ||
AND "isComplete" IS FALSE | ||
AND data ->> 'name' = ${name} | ||
`, | ||
}; | ||
@@ -78,3 +93,3 @@ | ||
/** | ||
* @param sql | ||
* @param {Postgres} sql | ||
* @param {string|JobQueueWorkerOptions} nameOrOptions | ||
@@ -230,3 +245,7 @@ * @param {JobQueueWorkerOptions} [options] | ||
try { | ||
await this.jobHandler(sql, jobData); | ||
if (jobData.name === LBU_RECURRING_JOB) { | ||
await handleLbuRecurring(sql, jobData); | ||
} else { | ||
await this.jobHandler(sql, jobData); | ||
} | ||
} catch (err) { | ||
@@ -254,3 +273,3 @@ error = err; | ||
* | ||
* @param sql | ||
* @param {Postgres} sql | ||
* @param {JobInput} job | ||
@@ -268,5 +287,37 @@ * @returns {Promise<number>} | ||
/** | ||
* Add a recurring job, if no existing job with the same name is scheduled. | ||
* Does not throw when a job is already pending with the same name. | ||
* | ||
* @param {Postgres} sql | ||
* @param {string} name | ||
* @param {number} [priority] | ||
* @param {StoreJobInterval} interval | ||
* @returns {Promise<void>} | ||
*/ | ||
export async function addRecurringJobToQueue( | ||
sql, | ||
{ name, priority, interval }, | ||
) { | ||
priority = priority || 1; | ||
const existingJobs = await queries.getRecurringJobForName(sql, name); | ||
if (existingJobs.length > 0) { | ||
return; | ||
} | ||
await addJobToQueue(sql, { | ||
name: LBU_RECURRING_JOB, | ||
priority, | ||
data: { | ||
interval, | ||
name, | ||
}, | ||
}); | ||
} | ||
/** | ||
* Get the number of jobs that need to run | ||
* | ||
* @param sql | ||
* @param {Postgres} sql | ||
* @returns {Promise<{pendingCount: number, scheduledCount: number}>} | ||
@@ -287,3 +338,3 @@ */ | ||
* | ||
* @param sql | ||
* @param {Postgres} sql | ||
* @param {string} name | ||
@@ -306,3 +357,3 @@ * @returns {Promise<{pendingCount: number, scheduledCount: number}>} | ||
* | ||
* @param sql | ||
* @param {Postgres} sql | ||
* @param {Date} startDate | ||
@@ -322,3 +373,3 @@ * @param {Date} endDate | ||
* | ||
* @param sql | ||
* @param {Postgres} sql | ||
* @param {string} name | ||
@@ -343,1 +394,43 @@ * @param {Date} startDate | ||
} | ||
/** | ||
* Handles recurring jobs, by scheduling the 'child' and the current job again | ||
* | ||
* @param {Postgres} sql | ||
* @param {StoreJob} job | ||
*/ | ||
async function handleLbuRecurring(sql, job) { | ||
const { | ||
scheduledAt, | ||
data: { name, interval, priority }, | ||
} = job; | ||
const nextSchedule = new Date(); | ||
nextSchedule.setUTCFullYear( | ||
scheduledAt.getUTCFullYear() + interval.years ?? 0, | ||
scheduledAt.getUTCMonth() + interval.months ?? 0, | ||
scheduledAt.getUTCDate() + interval.days ?? 0, | ||
); | ||
nextSchedule.setUTCHours( | ||
scheduledAt.getUTCHours() + interval.hours ?? 0, | ||
scheduledAt.getUTCMinutes() + interval.minutes ?? 0, | ||
scheduledAt.getUTCSeconds() + interval.seconds ?? 0, | ||
0, | ||
); | ||
// Dispatch 'job' with higher priority | ||
await addJobToQueue(sql, { | ||
name, | ||
priority: priority + 1, | ||
}); | ||
// Dispatch recurring job again for the next 'schedule' | ||
await addJobToQueue(sql, { | ||
name: LBU_RECURRING_JOB, | ||
scheduledAt: nextSchedule, | ||
data: { | ||
name, | ||
interval, | ||
}, | ||
}); | ||
} |
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
113106
3188
+ Added@lbu/insight@0.0.81(transitive)
+ Added@lbu/stdlib@0.0.81(transitive)
+ Added@types/node@14.11.8(transitive)
- Removed@lbu/insight@0.0.80(transitive)
- Removed@lbu/stdlib@0.0.80(transitive)
- Removed@types/node@14.11.4(transitive)
Updated@lbu/insight@0.0.81
Updated@lbu/stdlib@0.0.81