Comparing version 3.2.4 to 3.3.0
@@ -50,3 +50,3 @@ const size = 256 | ||
raw(x) { | ||
buffer = Buffer.concat([buffer.slice(0, b.i), x]) | ||
buffer = Buffer.concat([buffer.subarray(0, b.i), x]) | ||
b.i = buffer.length | ||
@@ -57,3 +57,3 @@ return b | ||
buffer.writeUInt32BE(b.i - at, at) | ||
const out = buffer.slice(0, b.i) | ||
const out = buffer.subarray(0, b.i) | ||
b.i = 0 | ||
@@ -60,0 +60,0 @@ buffer = Buffer.allocUnsafe(size) |
@@ -312,3 +312,3 @@ const net = require('net') | ||
try { | ||
handle(incoming.slice(0, length + 1)) | ||
handle(incoming.subarray(0, length + 1)) | ||
} catch (e) { | ||
@@ -318,3 +318,3 @@ query && (query.cursorFn || query.describeFirst) && write(Sync) | ||
} | ||
incoming = incoming.slice(length + 1) | ||
incoming = incoming.subarray(length + 1) | ||
remaining = 0 | ||
@@ -359,3 +359,3 @@ incomings = null | ||
socket.on('data', data) | ||
keep_alive && socket.setKeepAlive(true, 1000 * keep_alive) | ||
keep_alive && socket.setKeepAlive && socket.setKeepAlive(true, 1000 * keep_alive) | ||
const s = StartupMessage() | ||
@@ -489,3 +489,3 @@ write(s) | ||
: query.isRaw === true | ||
? x.slice(index, index += length) | ||
? x.subarray(index, index += length) | ||
: column.parser === undefined | ||
@@ -500,4 +500,4 @@ ? x.toString('utf8', index, index += length) | ||
? value | ||
: transform.value.from ? transform.value.from(value) : value) | ||
: (row[column.name] = transform.value.from ? transform.value.from(value) : value) | ||
: transform.value.from ? transform.value.from(value, column) : value) | ||
: (row[column.name] = transform.value.from ? transform.value.from(value, column) : value) | ||
} | ||
@@ -623,2 +623,4 @@ | ||
while (x[index++] !== 0); | ||
const table = x.readUInt32BE(index) | ||
const number = x.readUInt16BE(index + 4) | ||
const type = x.readUInt32BE(index + 6) | ||
@@ -630,2 +632,4 @@ query.statement.columns[i] = { | ||
parser: parsers[type], | ||
table, | ||
number, | ||
type | ||
@@ -662,3 +666,3 @@ } | ||
write( | ||
b().p().str('md5' + md5(Buffer.concat([Buffer.from(md5((await Pass()) + user)), x.slice(9)]))).z(1).end() | ||
b().p().str('md5' + md5(Buffer.concat([Buffer.from(md5((await Pass()) + user)), x.subarray(9)]))).z(1).end() | ||
) | ||
@@ -862,7 +866,7 @@ } | ||
function CopyData(x) { | ||
stream.push(x.slice(5)) || socket.pause() | ||
stream && (stream.push(x.subarray(5)) || socket.pause()) | ||
} | ||
function CopyDone() { | ||
stream.push(null) | ||
stream && stream.push(null) | ||
stream = null | ||
@@ -869,0 +873,0 @@ } |
@@ -11,4 +11,7 @@ const os = require('os') | ||
toPascal, | ||
pascal, | ||
toCamel, | ||
camel, | ||
toKebab, | ||
kebab, | ||
fromPascal, | ||
@@ -29,4 +32,7 @@ fromCamel, | ||
toPascal, | ||
pascal, | ||
toCamel, | ||
camel, | ||
toKebab, | ||
kebab, | ||
fromPascal, | ||
@@ -167,15 +173,15 @@ fromCamel, | ||
, exists = name in channels | ||
, channel = exists ? channels[name] : (channels[name] = { listeners: [listener] }) | ||
if (exists) { | ||
channel.listeners.push(listener) | ||
channels[name].listeners.push(listener) | ||
listener.onlisten && listener.onlisten() | ||
return Promise.resolve({ ...channel.result, unlisten }) | ||
return Promise.resolve({ ...channels[name].result, unlisten }) | ||
} | ||
channel.result = await sql`listen ${ sql(name) }` | ||
const result = await sql`listen ${ sql(name) }` | ||
channels[name] = { result, listeners: [listener] } | ||
listener.onlisten && listener.onlisten() | ||
channel.result.unlisten = unlisten | ||
result.unlisten = unlisten | ||
return channel.result | ||
return result | ||
@@ -186,3 +192,3 @@ async function unlisten() { | ||
channel.listeners = channel.listeners.filter(x => x !== listener) | ||
channels[name].listeners = channels[name].listeners.filter(x => x !== listener) | ||
if (channels[name].listeners.length) | ||
@@ -189,0 +195,0 @@ return |
@@ -57,5 +57,10 @@ const originCache = new Map() | ||
async readable() { | ||
simple() { | ||
this.options.simple = true | ||
this.options.prepare = false | ||
return this | ||
} | ||
async readable() { | ||
this.simple() | ||
this.streaming = true | ||
@@ -66,4 +71,3 @@ return this | ||
async writable() { | ||
this.options.simple = true | ||
this.options.prepare = false | ||
this.simple() | ||
this.streaming = true | ||
@@ -113,3 +117,4 @@ return this | ||
describe() { | ||
this.onlyDescribe = true | ||
this.options.simple = false | ||
this.onlyDescribe = this.options.prepare = true | ||
return this | ||
@@ -116,0 +121,0 @@ } |
@@ -14,2 +14,3 @@ const noop = () => { /* noop */ } | ||
...options, | ||
transform: { column: {}, value: {}, row: {} }, | ||
max: 1, | ||
@@ -39,3 +40,3 @@ fetch_types: false, | ||
ended = true | ||
stream && (await new Promise(r => (stream.once('end', r), stream.end()))) | ||
stream && (await new Promise(r => (stream.once('close', r), stream.end()))) | ||
return end() | ||
@@ -45,3 +46,3 @@ } | ||
sql.close = async() => { | ||
stream && (await new Promise(r => (stream.once('end', r), stream.end()))) | ||
stream && (await new Promise(r => (stream.once('close', r), stream.end()))) | ||
return close() | ||
@@ -109,3 +110,3 @@ } | ||
if (x[0] === 0x77) | ||
parse(x.slice(25), state, sql.options.parsers, handle) | ||
parse(x.subarray(25), state, sql.options.parsers, handle, options.transform) | ||
else if (x[0] === 0x6b && x[17]) | ||
@@ -143,3 +144,3 @@ pong() | ||
function parse(x, state, parsers, handle) { | ||
function parse(x, state, parsers, handle, transform) { | ||
const char = (acc, [k, v]) => (acc[k.charCodeAt(0)] = v, acc) | ||
@@ -151,4 +152,4 @@ | ||
const r = state[x.readUInt32BE(i)] = { | ||
schema: String(x.slice(i += 4, i = x.indexOf(0, i))) || 'pg_catalog', | ||
table: String(x.slice(i + 1, i = x.indexOf(0, i + 1))), | ||
schema: x.toString('utf8', i += 4, i = x.indexOf(0, i)) || 'pg_catalog', | ||
table: x.toString('utf8', i + 1, i = x.indexOf(0, i + 1)), | ||
columns: Array(x.readUInt16BE(i += 2)), | ||
@@ -165,3 +166,5 @@ keys: [] | ||
key: x[i++], | ||
name: String(x.slice(i, i = x.indexOf(0, i))), | ||
name: transform.column.from | ||
? transform.column.from(x.toString('utf8', i, i = x.indexOf(0, i))) | ||
: x.toString('utf8', i, i = x.indexOf(0, i)), | ||
type: x.readUInt32BE(i += 1), | ||
@@ -180,3 +183,3 @@ parser: parsers[x.readUInt32BE(i)], | ||
state.date = Time(x.readBigInt64BE(9)) | ||
state.lsn = x.slice(1, 9) | ||
state.lsn = x.subarray(1, 9) | ||
}, | ||
@@ -186,4 +189,3 @@ I: x => { // Insert | ||
const relation = state[x.readUInt32BE(i)] | ||
const row = {} | ||
tuples(x, row, relation.columns, i += 7) | ||
const { row } = tuples(x, relation.columns, i += 7, transform) | ||
@@ -200,9 +202,6 @@ handle(row, { | ||
const key = x[i] === 75 | ||
const row = key || x[i] === 79 | ||
? {} | ||
handle(key || x[i] === 79 | ||
? tuples(x, key ? relation.keys : relation.columns, i += 3, transform).row | ||
: null | ||
tuples(x, row, key ? relation.keys : relation.columns, i += 3) | ||
handle(row, { | ||
, { | ||
command: 'delete', | ||
@@ -218,10 +217,9 @@ relation, | ||
const key = x[i] === 75 | ||
const old = key || x[i] === 79 | ||
? {} | ||
const xs = key || x[i] === 79 | ||
? tuples(x, key ? relation.keys : relation.columns, i += 3, transform) | ||
: null | ||
old && (i = tuples(x, old, key ? relation.keys : relation.columns, i += 3)) | ||
xs && (i = xs.i) | ||
const row = {} | ||
tuples(x, row, relation.columns, i + 3) | ||
const { row } = tuples(x, relation.columns, i + 3, transform) | ||
@@ -232,3 +230,3 @@ handle(row, { | ||
key, | ||
old | ||
old: xs && xs.row | ||
}) | ||
@@ -241,10 +239,12 @@ }, | ||
function tuples(x, row, columns, xi) { | ||
function tuples(x, columns, xi, transform) { | ||
let type | ||
, column | ||
, value | ||
const row = transform.raw ? new Array(columns.length) : {} | ||
for (let i = 0; i < columns.length; i++) { | ||
type = x[xi++] | ||
column = columns[i] | ||
row[column.name] = type === 110 // n | ||
value = type === 110 // n | ||
? null | ||
@@ -258,5 +258,14 @@ : type === 117 // u | ||
: column.parser(x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi))) | ||
transform.raw | ||
? (row[i] = transform.raw === true | ||
? value | ||
: transform.value.from ? transform.value.from(value, column) : value) | ||
: (row[column.name] = transform.value.from | ||
? transform.value.from(value, column) | ||
: value | ||
) | ||
} | ||
return xi | ||
return { i: xi, row: transform.row.from ? transform.row.from(row) : row } | ||
} | ||
@@ -263,0 +272,0 @@ |
@@ -156,3 +156,6 @@ const { Query } = require('./query.js') | ||
values, | ||
in: values, | ||
in: (...xs) => { | ||
const x = values(...xs) | ||
return x === '()' ? '(null)' : x | ||
}, | ||
select, | ||
@@ -327,1 +330,32 @@ as: select, | ||
const fromKebab = module.exports.fromKebab = x => x.replace(/-/g, '_') | ||
function createJsonTransform(fn) { | ||
return function jsonTransform(x, column) { | ||
return column.type === 114 || column.type === 3802 | ||
? Array.isArray(x) | ||
? x.map(jsonTransform) | ||
: Object.entries(x).reduce((acc, [k, v]) => Object.assign(acc, { [fn(k)]: v }), {}) | ||
: x | ||
} | ||
} | ||
toCamel.column = { from: toCamel } | ||
toCamel.value = { from: createJsonTransform(toCamel) } | ||
fromCamel.column = { to: fromCamel } | ||
const camel = module.exports.camel = { ...toCamel } | ||
camel.column.to = fromCamel | ||
toPascal.column = { from: toPascal } | ||
toPascal.value = { from: createJsonTransform(toPascal) } | ||
fromPascal.column = { to: fromPascal } | ||
const pascal = module.exports.pascal = { ...toPascal } | ||
pascal.column.to = fromPascal | ||
toKebab.column = { from: toKebab } | ||
toKebab.value = { from: createJsonTransform(toKebab) } | ||
fromKebab.column = { to: fromKebab } | ||
const kebab = module.exports.kebab = { ...toKebab } | ||
kebab.column.to = fromKebab |
{ | ||
"name": "postgres", | ||
"version": "3.2.4", | ||
"version": "3.3.0", | ||
"description": "Fastest full featured PostgreSQL client for Node.js", | ||
@@ -9,2 +9,3 @@ "type": "module", | ||
"exports": { | ||
"types": "./types/index.d.ts", | ||
"import": "./src/index.js", | ||
@@ -11,0 +12,0 @@ "default": "./cjs/src/index.js" |
136
README.md
@@ -130,3 +130,3 @@ <img align="left" width="440" height="180" alt="Fastest full PostgreSQL nodejs client" src="https://raw.githubusercontent.com/porsager/postgres/master/postgresjs.svg?sanitize=true"> | ||
> Please note that queries are first executed when `awaited` – or manually by using `.execute()`. | ||
> Please note that queries are first executed when `awaited` – or instantly by using [`.execute()`](#execute). | ||
@@ -487,2 +487,8 @@ ### Query parameters | ||
### Execute | ||
#### ```await sql``.execute()``` | ||
The lazy Promise implementation in Postgres.js is what allows it to distinguish [Nested Fragments](#building-queries) from the main outer query. This also means that queries are always executed at the earliest in the following tick. If you have a specific need to execute the query in the same tick, you can call `.execute()` | ||
### Unsafe raw string queries | ||
@@ -580,7 +586,92 @@ | ||
Postgres.js comes with a number of built-in data transformation functions that can be used to transform the data returned from a query or when inserting data. They are available under `transform` option in the `postgres()` function connection options. | ||
Postgres.js allows for transformation of the data passed to or returned from a query by using the `transform` option. | ||
Like - `postgres('connectionURL', { transform: {...} })` | ||
Built in transformation functions are: | ||
### Parameters | ||
* For camelCase - `postgres.camel`, `postgres.toCamel`, `postgres.fromCamel` | ||
* For PascalCase - `postgres.pascal`, `postgres.toPascal`, `postgres.fromPascal` | ||
* For Kebab-Case - `postgres.kebab`, `postgres.toKebab`, `postgres.fromKebab` | ||
By default, using `postgres.camel`, `postgres.pascal` and `postgres.kebab` will perform a two-way transformation - both the data passed to the query and the data returned by the query will be transformed: | ||
```js | ||
// Transform the column names to and from camel case | ||
const sql = postgres({ transform: postgres.camel }) | ||
await sql`CREATE TABLE IF NOT EXISTS camel_case (a_test INTEGER, b_test TEXT)` | ||
await sql`INSERT INTO camel_case ${ sql([{ aTest: 1, bTest: 1 }]) }` | ||
const data = await sql`SELECT ${ sql('aTest', 'bTest') } FROM camel_case` | ||
console.log(data) // [ { aTest: 1, bTest: '1' } ] | ||
``` | ||
To only perform half of the transformation (eg. only the transformation **to** or **from** camel case), use the other transformation functions: | ||
```js | ||
// Transform the column names only to camel case | ||
// (for the results that are returned from the query) | ||
postgres({ transform: postgres.toCamel }) | ||
await sql`CREATE TABLE IF NOT EXISTS camel_case (a_test INTEGER)` | ||
await sql`INSERT INTO camel_case ${ sql([{ a_test: 1 }]) }` | ||
const data = await sql`SELECT a_test FROM camel_case` | ||
console.log(data) // [ { aTest: 1 } ] | ||
``` | ||
```js | ||
// Transform the column names only from camel case | ||
// (for interpolated inserts, updates, and selects) | ||
const sql = postgres({ transform: postgres.fromCamel }) | ||
await sql`CREATE TABLE IF NOT EXISTS camel_case (a_test INTEGER)` | ||
await sql`INSERT INTO camel_case ${ sql([{ aTest: 1 }]) }` | ||
const data = await sql`SELECT ${ sql('aTest') } FROM camel_case` | ||
console.log(data) // [ { a_test: 1 } ] | ||
``` | ||
> Note that Postgres.js does not rewrite the static parts of the tagged template strings. So to transform column names in your queries, the `sql()` helper must be used - eg. `${ sql('columnName') }` as in the examples above. | ||
### Transform `undefined` Values | ||
By default, Postgres.js will throw the error `UNDEFINED_VALUE: Undefined values are not allowed` when undefined values are passed | ||
```js | ||
// Transform the column names to and from camel case | ||
const sql = postgres({ | ||
transform: { | ||
undefined: null | ||
} | ||
}) | ||
await sql`CREATE TABLE IF NOT EXISTS transform_undefined (a_test INTEGER)` | ||
await sql`INSERT INTO transform_undefined ${ sql([{ a_test: undefined }]) }` | ||
const data = await sql`SELECT a_test FROM transform_undefined` | ||
console.log(data) // [ { a_test: null } ] | ||
``` | ||
To combine with the built in transform functions, spread the transform in the `transform` object: | ||
```js | ||
// Transform the column names to and from camel case | ||
const sql = postgres({ | ||
transform: { | ||
...postgres.camel, | ||
undefined: null | ||
} | ||
}) | ||
await sql`CREATE TABLE IF NOT EXISTS transform_undefined (a_test INTEGER)` | ||
await sql`INSERT INTO transform_undefined ${ sql([{ aTest: undefined }]) }` | ||
const data = await sql`SELECT ${ sql('aTest') } FROM transform_undefined` | ||
console.log(data) // [ { aTest: null } ] | ||
``` | ||
### Custom Transform Functions | ||
To specify your own transformation functions, you can use the `column`, `value` and `row` options inside of `transform`, each an object possibly including `to` and `from` keys: | ||
* `to`: The function to transform the outgoing query column name to, i.e `SELECT ${ sql('aName') }` to `SELECT a_name` when using `postgres.toCamel`. | ||
@@ -591,22 +682,21 @@ * `from`: The function to transform the incoming query result column name to, see example below. | ||
Built in transformation functions are: | ||
* For camelCase - `postgres.toCamel` and `postgres.fromCamel` | ||
* For PascalCase - `postgres.toPascal` and `postgres.fromPascal` | ||
* For Kebab-Case - `postgres.toKebab` and `postgres.fromKebab` | ||
```js | ||
// Implement your own functions, look at postgres.toCamel, etc | ||
// as a reference: | ||
// https://github.com/porsager/postgres/blob/4241824ffd7aa94ffb482e54ca9f585d9d0a4eea/src/types.js#L310-L328 | ||
function transformColumnToDatabase() { /* ... */ } | ||
function transformColumnFromDatabase() { /* ... */ } | ||
These functions can be passed in as options when calling `postgres()`. For example - | ||
```js | ||
// this will tranform the column names to camel case back and forth | ||
(async function () { | ||
const sql = postgres('connectionURL', { transform: { column: { to: postgres.fromCamel, from: postgres.toCamel } }}); | ||
await sql`CREATE TABLE IF NOT EXISTS camel_case (a_test INTEGER, b_test TEXT)`; | ||
await sql`INSERT INTO camel_case ${ sql([{ aTest: 1, bTest: 1 }]) }` | ||
const data = await sql`SELECT ${ sql('aTest', 'bTest') } FROM camel_case`; | ||
console.log(data) // [ { aTest: 1, bTest: '1' } ] | ||
process.exit(1) | ||
})(); | ||
const sql = postgres({ | ||
transform: { | ||
column: { | ||
to: transformColumnToDatabase, | ||
from: transformColumnFromDatabase, | ||
}, | ||
value: { /* ... */ }, | ||
row: { /* ... */ } | ||
} | ||
}) | ||
``` | ||
> Note that if a column name is originally registered as snake_case in the database then to tranform it from camelCase to snake_case when querying or inserting, the column camelCase name must be put in `sql('columnName')` as it's done in the above example, Postgres.js does not rewrite anything inside the static parts of the tagged templates. | ||
## Listen & notify | ||
@@ -852,3 +942,3 @@ | ||
Since this library automatically creates prepared statements, it also has a default max lifetime for connections to prevent memory bloat on the database itself. This is a random interval for each connection between 45 and 90 minutes. This allows multiple connections to come up and down seamlessly without user interference. | ||
Since this library automatically creates prepared statements, it also has a default max lifetime for connections to prevent memory bloat on the database itself. This is a random interval for each connection between 45 and 90 minutes. This allows multiple connections to independently come up and down without affecting the service. | ||
@@ -902,3 +992,3 @@ ### Connection timeout | ||
Prepared statements will automatically be created for any queries where it can be inferred that the query is static. This can be disabled by using the `no_prepare` option. For instance — this is useful when [using PGBouncer in `transaction mode`](https://github.com/porsager/postgres/issues/93). | ||
Prepared statements will automatically be created for any queries where it can be inferred that the query is static. This can be disabled by using the `prepare: false` option. For instance — this is useful when [using PGBouncer in `transaction mode`](https://github.com/porsager/postgres/issues/93#issuecomment-656290493). | ||
@@ -905,0 +995,0 @@ ## Custom Types |
@@ -50,3 +50,3 @@ const size = 256 | ||
raw(x) { | ||
buffer = Buffer.concat([buffer.slice(0, b.i), x]) | ||
buffer = Buffer.concat([buffer.subarray(0, b.i), x]) | ||
b.i = buffer.length | ||
@@ -57,3 +57,3 @@ return b | ||
buffer.writeUInt32BE(b.i - at, at) | ||
const out = buffer.slice(0, b.i) | ||
const out = buffer.subarray(0, b.i) | ||
b.i = 0 | ||
@@ -60,0 +60,0 @@ buffer = Buffer.allocUnsafe(size) |
@@ -312,3 +312,3 @@ import net from 'net' | ||
try { | ||
handle(incoming.slice(0, length + 1)) | ||
handle(incoming.subarray(0, length + 1)) | ||
} catch (e) { | ||
@@ -318,3 +318,3 @@ query && (query.cursorFn || query.describeFirst) && write(Sync) | ||
} | ||
incoming = incoming.slice(length + 1) | ||
incoming = incoming.subarray(length + 1) | ||
remaining = 0 | ||
@@ -359,3 +359,3 @@ incomings = null | ||
socket.on('data', data) | ||
keep_alive && socket.setKeepAlive(true, 1000 * keep_alive) | ||
keep_alive && socket.setKeepAlive && socket.setKeepAlive(true, 1000 * keep_alive) | ||
const s = StartupMessage() | ||
@@ -489,3 +489,3 @@ write(s) | ||
: query.isRaw === true | ||
? x.slice(index, index += length) | ||
? x.subarray(index, index += length) | ||
: column.parser === undefined | ||
@@ -500,4 +500,4 @@ ? x.toString('utf8', index, index += length) | ||
? value | ||
: transform.value.from ? transform.value.from(value) : value) | ||
: (row[column.name] = transform.value.from ? transform.value.from(value) : value) | ||
: transform.value.from ? transform.value.from(value, column) : value) | ||
: (row[column.name] = transform.value.from ? transform.value.from(value, column) : value) | ||
} | ||
@@ -623,2 +623,4 @@ | ||
while (x[index++] !== 0); | ||
const table = x.readUInt32BE(index) | ||
const number = x.readUInt16BE(index + 4) | ||
const type = x.readUInt32BE(index + 6) | ||
@@ -630,2 +632,4 @@ query.statement.columns[i] = { | ||
parser: parsers[type], | ||
table, | ||
number, | ||
type | ||
@@ -662,3 +666,3 @@ } | ||
write( | ||
b().p().str('md5' + md5(Buffer.concat([Buffer.from(md5((await Pass()) + user)), x.slice(9)]))).z(1).end() | ||
b().p().str('md5' + md5(Buffer.concat([Buffer.from(md5((await Pass()) + user)), x.subarray(9)]))).z(1).end() | ||
) | ||
@@ -862,7 +866,7 @@ } | ||
function CopyData(x) { | ||
stream.push(x.slice(5)) || socket.pause() | ||
stream && (stream.push(x.subarray(5)) || socket.pause()) | ||
} | ||
function CopyDone() { | ||
stream.push(null) | ||
stream && stream.push(null) | ||
stream = null | ||
@@ -869,0 +873,0 @@ } |
@@ -11,4 +11,7 @@ import os from 'os' | ||
toPascal, | ||
pascal, | ||
toCamel, | ||
camel, | ||
toKebab, | ||
kebab, | ||
fromPascal, | ||
@@ -29,4 +32,7 @@ fromCamel, | ||
toPascal, | ||
pascal, | ||
toCamel, | ||
camel, | ||
toKebab, | ||
kebab, | ||
fromPascal, | ||
@@ -167,15 +173,15 @@ fromCamel, | ||
, exists = name in channels | ||
, channel = exists ? channels[name] : (channels[name] = { listeners: [listener] }) | ||
if (exists) { | ||
channel.listeners.push(listener) | ||
channels[name].listeners.push(listener) | ||
listener.onlisten && listener.onlisten() | ||
return Promise.resolve({ ...channel.result, unlisten }) | ||
return Promise.resolve({ ...channels[name].result, unlisten }) | ||
} | ||
channel.result = await sql`listen ${ sql(name) }` | ||
const result = await sql`listen ${ sql(name) }` | ||
channels[name] = { result, listeners: [listener] } | ||
listener.onlisten && listener.onlisten() | ||
channel.result.unlisten = unlisten | ||
result.unlisten = unlisten | ||
return channel.result | ||
return result | ||
@@ -186,3 +192,3 @@ async function unlisten() { | ||
channel.listeners = channel.listeners.filter(x => x !== listener) | ||
channels[name].listeners = channels[name].listeners.filter(x => x !== listener) | ||
if (channels[name].listeners.length) | ||
@@ -189,0 +195,0 @@ return |
@@ -57,5 +57,10 @@ const originCache = new Map() | ||
async readable() { | ||
simple() { | ||
this.options.simple = true | ||
this.options.prepare = false | ||
return this | ||
} | ||
async readable() { | ||
this.simple() | ||
this.streaming = true | ||
@@ -66,4 +71,3 @@ return this | ||
async writable() { | ||
this.options.simple = true | ||
this.options.prepare = false | ||
this.simple() | ||
this.streaming = true | ||
@@ -113,3 +117,4 @@ return this | ||
describe() { | ||
this.onlyDescribe = true | ||
this.options.simple = false | ||
this.onlyDescribe = this.options.prepare = true | ||
return this | ||
@@ -116,0 +121,0 @@ } |
@@ -14,2 +14,3 @@ const noop = () => { /* noop */ } | ||
...options, | ||
transform: { column: {}, value: {}, row: {} }, | ||
max: 1, | ||
@@ -39,3 +40,3 @@ fetch_types: false, | ||
ended = true | ||
stream && (await new Promise(r => (stream.once('end', r), stream.end()))) | ||
stream && (await new Promise(r => (stream.once('close', r), stream.end()))) | ||
return end() | ||
@@ -45,3 +46,3 @@ } | ||
sql.close = async() => { | ||
stream && (await new Promise(r => (stream.once('end', r), stream.end()))) | ||
stream && (await new Promise(r => (stream.once('close', r), stream.end()))) | ||
return close() | ||
@@ -109,3 +110,3 @@ } | ||
if (x[0] === 0x77) | ||
parse(x.slice(25), state, sql.options.parsers, handle) | ||
parse(x.subarray(25), state, sql.options.parsers, handle, options.transform) | ||
else if (x[0] === 0x6b && x[17]) | ||
@@ -143,3 +144,3 @@ pong() | ||
function parse(x, state, parsers, handle) { | ||
function parse(x, state, parsers, handle, transform) { | ||
const char = (acc, [k, v]) => (acc[k.charCodeAt(0)] = v, acc) | ||
@@ -151,4 +152,4 @@ | ||
const r = state[x.readUInt32BE(i)] = { | ||
schema: String(x.slice(i += 4, i = x.indexOf(0, i))) || 'pg_catalog', | ||
table: String(x.slice(i + 1, i = x.indexOf(0, i + 1))), | ||
schema: x.toString('utf8', i += 4, i = x.indexOf(0, i)) || 'pg_catalog', | ||
table: x.toString('utf8', i + 1, i = x.indexOf(0, i + 1)), | ||
columns: Array(x.readUInt16BE(i += 2)), | ||
@@ -165,3 +166,5 @@ keys: [] | ||
key: x[i++], | ||
name: String(x.slice(i, i = x.indexOf(0, i))), | ||
name: transform.column.from | ||
? transform.column.from(x.toString('utf8', i, i = x.indexOf(0, i))) | ||
: x.toString('utf8', i, i = x.indexOf(0, i)), | ||
type: x.readUInt32BE(i += 1), | ||
@@ -180,3 +183,3 @@ parser: parsers[x.readUInt32BE(i)], | ||
state.date = Time(x.readBigInt64BE(9)) | ||
state.lsn = x.slice(1, 9) | ||
state.lsn = x.subarray(1, 9) | ||
}, | ||
@@ -186,4 +189,3 @@ I: x => { // Insert | ||
const relation = state[x.readUInt32BE(i)] | ||
const row = {} | ||
tuples(x, row, relation.columns, i += 7) | ||
const { row } = tuples(x, relation.columns, i += 7, transform) | ||
@@ -200,9 +202,6 @@ handle(row, { | ||
const key = x[i] === 75 | ||
const row = key || x[i] === 79 | ||
? {} | ||
handle(key || x[i] === 79 | ||
? tuples(x, key ? relation.keys : relation.columns, i += 3, transform).row | ||
: null | ||
tuples(x, row, key ? relation.keys : relation.columns, i += 3) | ||
handle(row, { | ||
, { | ||
command: 'delete', | ||
@@ -218,10 +217,9 @@ relation, | ||
const key = x[i] === 75 | ||
const old = key || x[i] === 79 | ||
? {} | ||
const xs = key || x[i] === 79 | ||
? tuples(x, key ? relation.keys : relation.columns, i += 3, transform) | ||
: null | ||
old && (i = tuples(x, old, key ? relation.keys : relation.columns, i += 3)) | ||
xs && (i = xs.i) | ||
const row = {} | ||
tuples(x, row, relation.columns, i + 3) | ||
const { row } = tuples(x, relation.columns, i + 3, transform) | ||
@@ -232,3 +230,3 @@ handle(row, { | ||
key, | ||
old | ||
old: xs && xs.row | ||
}) | ||
@@ -241,10 +239,12 @@ }, | ||
function tuples(x, row, columns, xi) { | ||
function tuples(x, columns, xi, transform) { | ||
let type | ||
, column | ||
, value | ||
const row = transform.raw ? new Array(columns.length) : {} | ||
for (let i = 0; i < columns.length; i++) { | ||
type = x[xi++] | ||
column = columns[i] | ||
row[column.name] = type === 110 // n | ||
value = type === 110 // n | ||
? null | ||
@@ -258,5 +258,14 @@ : type === 117 // u | ||
: column.parser(x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi))) | ||
transform.raw | ||
? (row[i] = transform.raw === true | ||
? value | ||
: transform.value.from ? transform.value.from(value, column) : value) | ||
: (row[column.name] = transform.value.from | ||
? transform.value.from(value, column) | ||
: value | ||
) | ||
} | ||
return xi | ||
return { i: xi, row: transform.row.from ? transform.row.from(row) : row } | ||
} | ||
@@ -263,0 +272,0 @@ |
@@ -156,3 +156,6 @@ import { Query } from './query.js' | ||
values, | ||
in: values, | ||
in: (...xs) => { | ||
const x = values(...xs) | ||
return x === '()' ? '(null)' : x | ||
}, | ||
select, | ||
@@ -327,1 +330,32 @@ as: select, | ||
export const fromKebab = x => x.replace(/-/g, '_') | ||
function createJsonTransform(fn) { | ||
return function jsonTransform(x, column) { | ||
return column.type === 114 || column.type === 3802 | ||
? Array.isArray(x) | ||
? x.map(jsonTransform) | ||
: Object.entries(x).reduce((acc, [k, v]) => Object.assign(acc, { [fn(k)]: v }), {}) | ||
: x | ||
} | ||
} | ||
toCamel.column = { from: toCamel } | ||
toCamel.value = { from: createJsonTransform(toCamel) } | ||
fromCamel.column = { to: fromCamel } | ||
export const camel = { ...toCamel } | ||
camel.column.to = fromCamel | ||
toPascal.column = { from: toPascal } | ||
toPascal.value = { from: createJsonTransform(toPascal) } | ||
fromPascal.column = { to: fromPascal } | ||
export const pascal = { ...toPascal } | ||
pascal.column.to = fromPascal | ||
toKebab.column = { from: toKebab } | ||
toKebab.value = { from: createJsonTransform(toKebab) } | ||
fromKebab.column = { to: fromKebab } | ||
export const kebab = { ...toKebab } | ||
kebab.column.to = fromKebab |
@@ -8,3 +8,3 @@ import { Readable, Writable } from 'node:stream' | ||
*/ | ||
declare function postgres<T extends PostgresTypeList>(options?: postgres.Options<T>): postgres.Sql<PostgresTypeList extends T ? {} : { [type in keyof T]: T[type] extends { | ||
declare function postgres<T extends Record<string, postgres.PostgresType> = {}>(options?: postgres.Options<T> | undefined): postgres.Sql<Record<string, postgres.PostgresType> extends T ? {} : { [type in keyof T]: T[type] extends { | ||
serialize: (value: infer R) => any, | ||
@@ -19,3 +19,3 @@ parse: (raw: any) => infer R | ||
*/ | ||
declare function postgres<T extends PostgresTypeList>(url: string, options?: postgres.Options<T>): postgres.Sql<PostgresTypeList extends T ? {} : { [type in keyof T]: T[type] extends { | ||
declare function postgres<T extends Record<string, postgres.PostgresType> = {}>(url: string, options?: postgres.Options<T> | undefined): postgres.Sql<Record<string, postgres.PostgresType> extends T ? {} : { [type in keyof T]: T[type] extends { | ||
serialize: (value: infer R) => any, | ||
@@ -28,7 +28,7 @@ parse: (raw: any) => infer R | ||
*/ | ||
interface BaseOptions<T extends PostgresTypeList> { | ||
interface BaseOptions<T extends Record<string, postgres.PostgresType>> { | ||
/** Postgres ip address[s] or domain name[s] */ | ||
host: string | string[]; | ||
host: string | string[] | undefined; | ||
/** Postgres server[s] port[s] */ | ||
port: number | number[]; | ||
port: number | number[] | undefined; | ||
/** unix socket path (usually '/tmp') */ | ||
@@ -89,21 +89,19 @@ path: string | undefined; | ||
column?: ((column: string) => string) | { | ||
/** SQL to JS */ | ||
from?: (column: string) => string; | ||
/** JS to SQL */ | ||
to?: (column: string) => string; | ||
}; | ||
/** Transform function for column names in result rows */ | ||
from?: ((column: string) => string) | undefined; | ||
/** Transform function for column names in interpolated values passed to tagged template literal */ | ||
to?: ((column: string) => string) | undefined; | ||
} | undefined; | ||
/** Transforms incoming and outgoing row values */ | ||
value?: ((value: any) => any) | { | ||
/** SQL to JS */ | ||
from?: (value: unknown) => any; | ||
// /** JS to SQL */ | ||
// to?: (value: unknown) => any; // unused | ||
}; | ||
/** Transform function for values in result rows */ | ||
from?: ((value: unknown, column: postgres.Column<string>) => any) | undefined; | ||
// to?: ((value: unknown) => any) | undefined; // unused | ||
} | undefined; | ||
/** Transforms entire rows */ | ||
row?: ((row: postgres.Row) => any) | { | ||
/** SQL to JS */ | ||
from?: (row: postgres.Row) => any; | ||
// /** JS to SQL */ | ||
// to?: (row: postgres.Row) => any; // unused | ||
}; | ||
/** Transform function for entire result rows */ | ||
from?: ((row: postgres.Row) => any) | undefined; | ||
// to?: ((row: postgres.Row) => any) | undefined; // unused | ||
} | undefined; | ||
}; | ||
@@ -133,10 +131,3 @@ /** Connection parameters */ | ||
interface PostgresTypeList { | ||
[name: string]: postgres.PostgresType; | ||
} | ||
interface JSToPostgresTypeMap { | ||
[name: string]: unknown; | ||
} | ||
declare const PRIVATE: unique symbol; | ||
@@ -172,3 +163,3 @@ | ||
number extends K['length'] ? {} : | ||
(Record<Keys & (keyof T) & (K['length'] extends 0 ? string : K[number]), postgres.SerializableParameter<TT> | postgres.JSONValue> & Record<string, any>) | ||
Partial<(Record<Keys & (keyof T) & (K['length'] extends 0 ? string : K[number]), postgres.ParameterOrJSON<TT> | undefined> & Record<string, any>)> | ||
@@ -218,13 +209,13 @@ type First<T, K extends readonly any[], TT> = | ||
detail?: string; | ||
hint?: string; | ||
internal_position?: string; | ||
internal_query?: string; | ||
where?: string; | ||
schema_name?: string; | ||
table_name?: string; | ||
column_name?: string; | ||
data?: string; | ||
type_name?: string; | ||
constraint_name?: string; | ||
detail?: string | undefined; | ||
hint?: string | undefined; | ||
internal_position?: string | undefined; | ||
internal_query?: string | undefined; | ||
where?: string | undefined; | ||
schema_name?: string | undefined; | ||
table_name?: string | undefined; | ||
column_name?: string | undefined; | ||
data?: string | undefined; | ||
type_name?: string | undefined; | ||
constraint_name?: string | undefined; | ||
@@ -243,2 +234,6 @@ /** Only set when debug is enabled */ | ||
function toPascal(str: string): string; | ||
namespace toPascal { | ||
namespace column { function from(str: string): string; } | ||
namespace value { function from(str: unknown, column: Column<string>): string } | ||
} | ||
/** | ||
@@ -250,3 +245,16 @@ * Convert a PascalCase string to snake_case. | ||
function fromPascal(str: string): string; | ||
namespace fromPascal { | ||
namespace column { function to(str: string): string } | ||
} | ||
/** | ||
* Convert snake_case to and from PascalCase. | ||
*/ | ||
namespace pascal { | ||
namespace column { | ||
function from(str: string): string; | ||
function to(str: string): string; | ||
} | ||
namespace value { function from(str: unknown, column: Column<string>): string } | ||
} | ||
/** | ||
* Convert a snake_case string to camelCase. | ||
@@ -257,2 +265,6 @@ * @param str The string from snake_case to convert | ||
function toCamel(str: string): string; | ||
namespace toCamel { | ||
namespace column { function from(str: string): string; } | ||
namespace value { function from(str: unknown, column: Column<string>): string } | ||
} | ||
/** | ||
@@ -264,3 +276,16 @@ * Convert a camelCase string to snake_case. | ||
function fromCamel(str: string): string; | ||
namespace fromCamel { | ||
namespace column { function to(str: string): string } | ||
} | ||
/** | ||
* Convert snake_case to and from camelCase. | ||
*/ | ||
namespace camel { | ||
namespace column { | ||
function from(str: string): string; | ||
function to(str: string): string; | ||
} | ||
namespace value { function from(str: unknown, column: Column<string>): string } | ||
} | ||
/** | ||
* Convert a snake_case string to kebab-case. | ||
@@ -271,2 +296,6 @@ * @param str The string from snake_case to convert | ||
function toKebab(str: string): string; | ||
namespace toKebab { | ||
namespace column { function from(str: string): string; } | ||
namespace value { function from(str: unknown, column: Column<string>): string } | ||
} | ||
/** | ||
@@ -278,2 +307,15 @@ * Convert a kebab-case string to snake_case. | ||
function fromKebab(str: string): string; | ||
namespace fromKebab { | ||
namespace column { function to(str: string): string } | ||
} | ||
/** | ||
* Convert snake_case to and from kebab-case. | ||
*/ | ||
namespace kebab { | ||
namespace column { | ||
function from(str: string): string; | ||
function to(str: string): string; | ||
} | ||
namespace value { function from(str: unknown, column: Column<string>): string } | ||
} | ||
@@ -299,11 +341,11 @@ const BigInt: PostgresType<bigint>; | ||
interface Options<T extends PostgresTypeList> extends Partial<BaseOptions<T>> { | ||
interface Options<T extends Record<string, postgres.PostgresType>> extends Partial<BaseOptions<T>> { | ||
/** @inheritdoc */ | ||
host?: string; | ||
host?: string | undefined; | ||
/** @inheritdoc */ | ||
port?: number; | ||
port?: number | undefined; | ||
/** @inheritdoc */ | ||
path?: string; | ||
path?: string | undefined; | ||
/** Password of database user (an alias for `password`) */ | ||
pass?: Options<T>['password']; | ||
pass?: Options<T>['password'] | undefined; | ||
/** | ||
@@ -313,9 +355,9 @@ * Password of database user | ||
*/ | ||
password?: string | (() => string | Promise<string>); | ||
password?: string | (() => string | Promise<string>) | undefined; | ||
/** Name of database to connect to (an alias for `database`) */ | ||
db?: Options<T>['database']; | ||
db?: Options<T>['database'] | undefined; | ||
/** Username of database user (an alias for `user`) */ | ||
username?: Options<T>['user']; | ||
username?: Options<T>['user'] | undefined; | ||
/** Postgres ip address or domain name (an alias for `host`) */ | ||
hostname?: Options<T>['host']; | ||
hostname?: Options<T>['host'] | undefined; | ||
/** | ||
@@ -325,3 +367,3 @@ * Disable prepared mode | ||
*/ | ||
no_prepare?: boolean; | ||
no_prepare?: boolean | undefined; | ||
/** | ||
@@ -331,6 +373,6 @@ * Idle connection timeout in seconds | ||
*/ | ||
timeout?: Options<T>['idle_timeout']; | ||
timeout?: Options<T>['idle_timeout'] | undefined; | ||
} | ||
interface ParsedOptions<T extends JSToPostgresTypeMap> extends BaseOptions<{ [name in keyof T]: PostgresType<T[name]> }> { | ||
interface ParsedOptions<T extends Record<string, unknown> = {}> extends BaseOptions<{ [name in keyof T]: PostgresType<T[name]> }> { | ||
/** @inheritdoc */ | ||
@@ -352,14 +394,16 @@ host: string[]; | ||
/** Transforms incoming column names */ | ||
column: { | ||
/** Transform function for column names in result rows */ | ||
from: ((column: string) => string) | undefined; | ||
/** Transform function for column names in interpolated values passed to tagged template literal */ | ||
to: ((column: string) => string) | undefined; | ||
}; | ||
/** Transforms incoming row values */ | ||
value: { | ||
from: ((value: any) => any) | undefined; | ||
/** Transform function for values in result rows */ | ||
from: ((value: any, column?: Column<string>) => any) | undefined; | ||
/** Transform function for interpolated values passed to tagged template literal */ | ||
to: undefined; // (value: any) => any | ||
}; | ||
/** Transforms entire rows */ | ||
row: { | ||
/** Transform function for entire result rows */ | ||
from: ((row: postgres.Row) => any) | undefined; | ||
@@ -401,3 +445,3 @@ to: undefined; // (row: postgres.Row) => any | ||
address: string; | ||
port?: number; | ||
port?: number | undefined; | ||
} | ||
@@ -459,10 +503,10 @@ | ||
writable(options?: { | ||
highWaterMark?: number, | ||
start?: number | ||
}): Promise<Writable>; | ||
highWaterMark?: number | undefined, | ||
start?: number | undefined | ||
} | undefined): Promise<Writable>; | ||
readable(options?: { | ||
highWaterMark?: number, | ||
start?: number, | ||
end?: number | ||
}): Promise<Readable>; | ||
highWaterMark?: number | undefined, | ||
start?: number | undefined, | ||
end?: number | undefined | ||
} | undefined): Promise<Readable>; | ||
@@ -474,3 +518,3 @@ close(): Promise<void>; | ||
truncate(size: number): Promise<void>; | ||
seek(offset: number, whence?: number): Promise<void>; | ||
seek(offset: number, whence?: number | undefined): Promise<void>; | ||
size(): Promise<[{ position: bigint, size: bigint }]>; | ||
@@ -504,3 +548,3 @@ } | ||
| readonly JSONValue[] | ||
| { toJSON(): any } // `toJSON` called by `JSON.stringify`; not typing the return type, typings is strict enough anyway | ||
| { toJSON(): any } // `toJSON` called by `JSON.stringify`; not typing the return type, types definition is strict enough anyway | ||
| { | ||
@@ -519,12 +563,8 @@ readonly [prop: string | number]: | ||
type TransformRow<T> = T extends Serializable | ||
? { '?column?': T; } | ||
: T; | ||
type AsRowList<T extends readonly any[]> = { [k in keyof T]: TransformRow<T[k]> }; | ||
interface Column<T extends string> { | ||
name: T; | ||
type: number; | ||
parser?(raw: string): unknown; | ||
table: number; | ||
number: number; | ||
parser?: ((raw: string) => unknown) | undefined; | ||
} | ||
@@ -562,2 +602,3 @@ | ||
type ExecutionResult<T> = [] & ResultQueryMeta<number, keyof NonNullable<T>>; | ||
type ValuesRowList<T extends readonly any[]> = T[number][keyof T[number]][][] & ResultQueryMeta<T['length'], keyof T[number]>; | ||
type RawRowList<T extends readonly any[]> = Buffer[][] & Iterable<Buffer[][]> & ResultQueryMeta<T['length'], keyof T[number]>; | ||
@@ -580,3 +621,3 @@ type RowList<T extends readonly any[]> = T & Iterable<NonNullable<T[number]>> & ResultQueryMeta<T['length'], keyof T[number]>; | ||
cursor(rows?: number): AsyncIterable<NonNullable<TRow[number]>[]>; | ||
cursor(rows?: number | undefined): AsyncIterable<NonNullable<TRow[number]>[]>; | ||
cursor(cb: (row: [NonNullable<TRow[number]>]) => void): Promise<ExecutionResult<TRow[number]>>; | ||
@@ -589,2 +630,6 @@ cursor(rows: number, cb: (rows: NonNullable<TRow[number]>[]) => void): Promise<ExecutionResult<TRow[number]>>; | ||
interface PendingValuesQuery<TRow extends readonly MaybeRow[]> extends Promise<ValuesRowList<TRow>>, PendingQueryModifiers<TRow[number][keyof TRow[number]][][]> { | ||
describe(): PendingDescribeQuery; | ||
} | ||
interface PendingRawQuery<TRow extends readonly MaybeRow[]> extends Promise<RawRowList<TRow>>, PendingQueryModifiers<Buffer[][]> { | ||
@@ -595,2 +640,3 @@ } | ||
describe(): PendingDescribeQuery; | ||
values(): PendingValuesQuery<TRow>; | ||
raw(): PendingRawQuery<TRow>; | ||
@@ -611,3 +657,13 @@ } | ||
interface Sql<TTypes extends JSToPostgresTypeMap> { | ||
type Fragment = PendingQuery<any> | ||
type ParameterOrJSON<T> = | ||
| SerializableParameter<T> | ||
| JSONValue | ||
type ParameterOrFragment<T> = | ||
| SerializableParameter<T> | ||
| Fragment | ||
interface Sql<TTypes extends Record<string, unknown> = {}> { | ||
/** | ||
@@ -627,3 +683,3 @@ * Query helper | ||
*/ | ||
<T extends readonly (object | undefined)[] = Row[]>(template: TemplateStringsArray, ...parameters: readonly (SerializableParameter<TTypes[keyof TTypes]> | PendingQuery<any>)[]): PendingQuery<AsRowList<T>>; | ||
<T extends readonly (object | undefined)[] = Row[]>(template: TemplateStringsArray, ...parameters: readonly (ParameterOrFragment<TTypes[keyof TTypes]>)[]): PendingQuery<T>; | ||
@@ -641,11 +697,11 @@ CLOSE: {}; | ||
unsafe<T extends any[] = (Row & Iterable<Row>)[]>(query: string, parameters?: SerializableParameter<TTypes[keyof TTypes]>[], queryOptions?: UnsafeQueryOptions): PendingQuery<AsRowList<T>>; | ||
end(options?: { timeout?: number }): Promise<void>; | ||
unsafe<T extends any[] = (Row & Iterable<Row>)[]>(query: string, parameters?: (ParameterOrJSON<TTypes[keyof TTypes]>)[] | undefined, queryOptions?: UnsafeQueryOptions | undefined): PendingQuery<T>; | ||
end(options?: { timeout?: number | undefined } | undefined): Promise<void>; | ||
listen(channel: string, onnotify: (value: string) => void, onlisten?: () => void): ListenRequest; | ||
listen(channel: string, onnotify: (value: string) => void, onlisten?: (() => void) | undefined): ListenRequest; | ||
notify(channel: string, payload: string): PendingRequest; | ||
subscribe(event: string, cb: (row: Row | null, info: ReplicationEvent) => void, onsubscribe?: () => void): Promise<SubscriptionHandle>; | ||
subscribe(event: string, cb: (row: Row | null, info: ReplicationEvent) => void, onsubscribe?: (() => void) | undefined): Promise<SubscriptionHandle>; | ||
largeObject(oid?: number, /** @default 0x00020000 | 0x00040000 */ mode?: number): Promise<LargeObject>; | ||
largeObject(oid?: number | undefined, /** @default 0x00020000 | 0x00040000 */ mode?: number | undefined): Promise<LargeObject>; | ||
@@ -655,5 +711,5 @@ begin<T>(cb: (sql: TransactionSql<TTypes>) => T | Promise<T>): Promise<UnwrapPromiseArray<T>>; | ||
array<T extends SerializableParameter<TTypes[keyof TTypes]>[] = SerializableParameter<TTypes[keyof TTypes]>[]>(value: T, type?: number): ArrayParameter<T>; | ||
file<T extends readonly any[] = Row[]>(path: string | Buffer | URL | number, options?: { cache?: boolean }): PendingQuery<AsRowList<T>>; | ||
file<T extends readonly any[] = Row[]>(path: string | Buffer | URL | number, args: SerializableParameter<TTypes[keyof TTypes]>[], options?: { cache?: boolean }): PendingQuery<AsRowList<T>>; | ||
array<T extends SerializableParameter<TTypes[keyof TTypes]>[] = SerializableParameter<TTypes[keyof TTypes]>[]>(value: T, type?: number | undefined): ArrayParameter<T>; | ||
file<T extends readonly any[] = Row[]>(path: string | Buffer | URL | number, options?: { cache?: boolean | undefined } | undefined): PendingQuery<T>; | ||
file<T extends readonly any[] = Row[]>(path: string | Buffer | URL | number, args: (ParameterOrJSON<TTypes[keyof TTypes]>)[], options?: { cache?: boolean | undefined } | undefined): PendingQuery<T>; | ||
json(value: JSONValue): Parameter; | ||
@@ -667,6 +723,6 @@ } | ||
*/ | ||
prepare?: boolean; | ||
prepare?: boolean | undefined; | ||
} | ||
interface TransactionSql<TTypes extends JSToPostgresTypeMap> extends Sql<TTypes> { | ||
interface TransactionSql<TTypes extends Record<string, unknown> = {}> extends Sql<TTypes> { | ||
savepoint<T>(cb: (sql: TransactionSql<TTypes>) => T | Promise<T>): Promise<UnwrapPromiseArray<T>>; | ||
@@ -673,0 +729,0 @@ savepoint<T>(name: string, cb: (sql: TransactionSql<TTypes>) => T | Promise<T>): Promise<UnwrapPromiseArray<T>>; |
@@ -11,4 +11,5 @@ { | ||
"strict": true, | ||
"noImplicitAny": true | ||
"noImplicitAny": true, | ||
"exactOptionalPropertyTypes": true | ||
} | ||
} |
205474
5028
1190