Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

postgres

Package Overview
Dependencies
Maintainers
1
Versions
44
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

postgres - npm Package Compare versions

Comparing version 2.0.0-beta.11 to 3.0.0-rc.1

cjs/package.json

36

package.json
{
"name": "postgres",
"version": "2.0.0-beta.11",
"version": "3.0.0-rc.1",
"description": "Fastest full featured PostgreSQL client for Node.js",
"main": "lib/index.js",
"type": "module",
"module": "src/index.js",
"main": "cjs/src/index.js",
"exports": {
"import": "./src/index.js",
"default": "./cjs/src/index.js"
},
"types": "types/index.d.ts",
"typings": "types/index.d.ts",
"type": "commonjs",
"scripts": {
"test": "node tests/index.js",
"lint": "eslint lib && eslint tests",
"prepublishOnly": "npm run lint && npm test"
"build": "npm run build:cjs && npm run build:deno",
"build:cjs": "node transpile.cjs",
"build:deno": "node transpile.deno.js",
"test": "npm run test:esm && npm run test:cjs && npm run test:deno",
"test:esm": "node tests/index.js",
"test:cjs": "npm run build:cjs && pushd cjs/tests && node index.js && popd",
"test:deno": "npm run build:deno && pushd deno/tests && deno run --unstable --allow-all --unsafely-ignore-certificate-errors index.js && popd",
"lint": "eslint src && eslint tests",
"prepare": "npm run build",
"prepublishOnly": "npm run lint"
},
"files": [
"/lib",
"/cjs/src",
"/cjs/package.json",
"/src",
"/types"
],
"author": "Rasmus Porsager <rasmus@porsager.com>",
"author": "Rasmus Porsager <rasmus@porsager.com> (https://www.porsager.com)",
"funding": {
"type": "individual",
"url": "https://github.com/sponsors/porsager"
},
"license": "Unlicense",
"repository": "porsager/postgres",
"homepage": "https://github.com/porsager/postgres",
"bugs": "https://github.com/porsager/postgres/issues",
"keywords": [

@@ -22,0 +42,0 @@ "driver",

@@ -1,8 +0,8 @@

<img align="left" width="440" height="140" alt="Fastest full PostgreSQL nodejs client" src="https://raw.githubusercontent.com/porsager/postgres/master/postgresjs.svg?sanitize=true" />
<img align="left" width="440" height="180" alt="Fastest full PostgreSQL nodejs client" src="https://raw.githubusercontent.com/porsager/postgres/master/postgresjs.svg?sanitize=true" />
- [🚀 Fastest full featured PostgreSQL node client](https://github.com/porsager/postgres-benchmarks#results)
- 🚯 1250 LOC - 0 dependencies
- [🚀 Fastest full-featured node & deno client](https://github.com/porsager/postgres-benchmarks#results)
- 🏷 ES6 Tagged Template Strings at the core
- 🏄‍♀️ Simple surface API
- 💬 Chat on [Gitter](https://gitter.im/porsager/postgres)
- 🖊️ Dynamic query support
- 💬 Chat and help on [Gitter](https://gitter.im/porsager/postgres)

@@ -17,3 +17,3 @@ <br>

**Install**
### Installation
```bash

@@ -23,26 +23,69 @@ $ npm install postgres

**Use**
### Usage
Create your `sql` database instance
```js
// db.js
const postgres = require('postgres')
import postgres from 'postgres'
const sql = postgres({ ...options }) // will default to the same as psql
const sql = postgres({ /* options */ }) // will use psql environment variables
module.exports = sql
export default sql
```
Simply import for use elsewhere
```js
// other.js
const sql = require('./db.js')
// users.js
import sql from './db.js'
const users = await sql`
select name, age from users
`
// users: [{ name: 'Murray', age: 68 }, { name: 'Walter', age: 78 }]
async function getUsersOver(age) {
const users = await sql`
select
name,
age
from users
where age > ${ age }
`
// users = Result [{ name: "Walter", age: 80 }, { name: 'Murray', age: 68 }, ...]
return users
}
async function insertUser({ name, age }) {
const users = sql`
insert into users
(name, age)
values
(${ name }, ${ age })
returning name, age
`
// users = Result [{ name: "Murray", age: 68 }]
return users
}
```
## Connection options `postgres([url], [options])`
## Table of Contents
You can use either a `postgres://` url connection string or the options to define your database connection properties. Options in the object will override any present in the url.
* [Connection](#connection)
* [Queries](#queries)
* [Building queries](#building-queries)
* [Advanced query methods](#advanced-query-methods)
* [Transactions](#transactions)
* [Listen & notify](#listen--notify)
* [Realtime subscribe](#realtime-subscribe)
* [Numbers, bigint, numeric](#numbers-bigint-numeric)
* [Connection details](#connection-details)
* [Custom Types](#custom-types)
* [Teardown / Cleanup](#teardown--cleanup)
* [Error handling](#error-handling)
* [TypeScript support](#typescript-support)
## Connection
### `postgres([url], [options])`
You can use either a `postgres://` url connection string or the options to define your database connection properties. Options in the object will override any present in the url. Options will fall back to the same environment variables as psql.
```js

@@ -52,86 +95,27 @@ const sql = postgres('postgres://username:password@host:port/database', {

port : 5432, // Postgres server port[s]
path : '', // unix socket path (usually '/tmp')
database : '', // Name of database to connect to
username : '', // Username of database user
password : '', // Password of database user
ssl : false, // true, prefer, require, tls.connect options
max : 10, // Max number of connections
idle_timeout : 0, // Idle connection timeout in seconds
connect_timeout : 30, // Connect timeout in seconds
no_prepare : false, // No automatic creation of prepared statements
types : [], // Array of custom types, see more below
onnotice : fn, // Defaults to console.log
onparameter : fn, // (key, value) when server param change
debug : fn, // Is called with (connection, query, params)
transform : {
column : fn, // Transforms incoming column names
value : fn, // Transforms incoming row values
row : fn // Transforms entire rows
},
connection : {
application_name : 'postgres.js', // Default application_name
... // Other connection parameters
},
target_session_attrs : null, // Use 'read-write' with multiple hosts to
// ensure only connecting to primary
fetch_array_types : true, // Disable automatically fetching array types
// on initial connection.
...and more
})
```
### SSL
More info for the `ssl` option can be found in the [Node.js docs for tls connect options](https://nodejs.org/dist/latest-v10.x/docs/api/tls.html#tls_new_tls_tlssocket_socket_options).
More options can be found in the [Connection details section](#connection-details).
Although it is [vulnerable to MITM attacks](https://security.stackexchange.com/a/229297/174913), a common configuration for the `ssl` option for some cloud providers like Heroku is to set `rejectUnauthorized` to `false` (if `NODE_ENV` is `production`):
## Queries
```js
const sql =
process.env.NODE_ENV === 'production'
? // "Unless you're using a Private or Shield Heroku Postgres database, Heroku Postgres does not currently support verifiable certificates"
// https://help.heroku.com/3DELT3RK/why-can-t-my-third-party-utility-connect-to-heroku-postgres-with-ssl
postgres({ ssl: { rejectUnauthorized: false } })
: postgres();
```
### ```await sql`...` -> Result[]```
### Multi host connections - High Availability (HA)
Postgres.js utilizes [Tagged template functions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals#Tagged_templates) to process query parameters **before** interpolation. Using tagged template literals benefits developers by:
Connection uri strings with multiple hosts works like in [`psql multiple host uris`](https://www.postgresql.org/docs/13/libpq-connect.html#LIBPQ-MULTIPLE-HOSTS)
1. **Enforcing** safe query generation
2. Giving the `sql`` ` function powerful [utility](#dynamic-inserts) and [query building](#building-queries) features.
Connecting to the specified hosts/ports will be tried in order, and on a successfull connection retries will be reset. This ensures that hosts can come up and down seamless to your application.
Any generic value will be serialized according to an inferred type, and replaced by a PostgreSQL protocol placeholder `$1, $2, ...`. The parameters are then sent separately to the database which handles escaping & casting.
If you specify `target_session_attrs: 'read-write'` or `PGTARGETSESSIONATTRS=read-write` Postgres.js will only connect to a writeable host allowing for zero down time failovers.
All queries will return a `Result` array, with objects mapping column names to each row.
### Auto fetching of array types
When Postgres.js first connects to the database it automatically fetches array type information.
If you have revoked access to `pg_catalog` this feature will no longer work and will need to be disabled.
You can disable fetching array types by setting `fetch_array_types` to `false` when creating an instance.
### Environment Variables for Options
It is also possible to connect to the database without a connection string or any options. Postgres.js will fall back to the common environment variables used by `psql` as in the table below:
```js
const sql = postgres()
```
| Option | Environment Variables |
| ----------------- | ------------------------ |
| `host` | `PGHOST` |
| `port` | `PGPORT` |
| `database` | `PGDATABASE` |
| `username` | `PGUSERNAME` or `PGUSER` |
| `password` | `PGPASSWORD` |
| `idle_timeout` | `PGIDLE_TIMEOUT` |
| `connect_timeout` | `PGCONNECT_TIMEOUT` |
## Query ```sql` ` -> Promise```
A query will always return a `Promise` which resolves to a results array `[...]{ count, command, columns }`. Destructuring is great to immediately access the first element.
```js
const [new_user] = await sql`
const xs = await sql`
insert into users (

@@ -146,13 +130,15 @@ name, age

// new_user = { user_id: 1, name: 'Murray', age: 68 }
// xs = [{ user_id: 1, name: 'Murray', age: 68 }]
```
#### Query parameters
> Please note that queries are first executed when `awaited` – or manually by using `.execute()`.
Parameters are automatically inferred and handled by Postgres so that SQL injection isn't possible. No special handling is necessary, simply use JS tagged template literals as usual.
### Query parameters
Parameters are automatically extracted and handled by the database so that SQL injection isn't possible. No special handling is necessary, simply use tagged template literals as usual. **Dynamic queries and query building can be seen in the [next section]()**. // todo
```js
const name = 'Mur'
, age = 60
let search = 'Mur'
const users = await sql`

@@ -164,164 +150,28 @@ select

where
name like ${ search + '%' }
name like ${ name + '%' }
and age > ${ age }
`
// users = [{ name: 'Murray', age: 68 }]
```
> Be careful with quotation marks here. Because Postgres infers the types, you don't need to wrap your interpolated parameters in quotes like `'${name}'`. In fact, this will cause an error because the tagged template replaces `${name}` with `$1` in the query string, leaving Postgres to do the interpolation. If you wrap that in a string, Postgres will see `'$1'` and interpret it as a string as opposed to a parameter.
> Be careful with quotation marks here. Because Postgres infers column types, you do not need to wrap your interpolated parameters in quotes like `'${name}'`. This will cause an error because the tagged template replaces `${name}` with `$1` in the query string, leaving Postgres to do the interpolation. If you wrap that in a string, Postgres will see `'$1'` and interpret it as a string as opposed to a parameter.
#### Arrays
Arrays will be handled by replacement parameters too, so `where in` queries are also simple.
### Dynamic column selection
```js
const columns = ['name', 'age']
const users = await sql`
select
*
sql`
select
${ sql(columns) }
from users
where age in (${ [68, 75, 23] })
`
// Which results in:
select "name", "age" from users
```
### TypeScript support
### Dynamic inserts
`postgres` has TypeScript support. You can pass a row list type for your queries in this way:
```ts
interface User {
id: number
name: string
}
const users = await sql<User[]>`SELECT * FROM users`
users[0].id // ok => number
users[1].name // ok => string
users[0].invalid // fails: `invalid` does not exists on `User`
```
However, be sure to check the array length to avoid accessing properties of `undefined` rows:
```ts
const users = await sql<User[]>`SELECT * FROM users WHERE id = ${id}`
if (!users.length)
throw new Error('Not found')
return users[0]
```
You can also prefer destructuring when you only care about a fixed number of rows.
In this case, we recommand you to prefer using tuples to handle `undefined` properly:
```ts
const [user]: [User?] = await sql`SELECT * FROM users WHERE id = ${id}`
if (!user) // => User | undefined
throw new Error('Not found')
return user // => User
// NOTE:
const [first, second]: [User?] = await sql`SELECT * FROM users WHERE id = ${id}` // fails: `second` does not exist on `[User?]`
// vs
const [first, second] = await sql<[User?]>`SELECT * FROM users WHERE id = ${id}` // ok but should fail
```
All the public API is typed. Also, TypeScript support is still in beta. Feel free to open an issue if you have trouble with types.
## Stream ```sql` `.stream(fn) -> Promise```
If you want to handle rows returned by a query one by one, you can use `.stream` which returns a promise that resolves once there are no more rows.
```js
await sql`
select created_at, name from events
`.stream(row => {
// row = { created_at: '2019-11-22T14:22:00Z', name: 'connected' }
})
// No more rows
```
## Cursor ```sql` `.cursor([rows = 1], fn) -> Promise```
Use cursors if you need to throttle the amount of rows being returned from a query. New results won't be requested until the promise / async callback function has resolved.
```js
await sql`
select * from generate_series(1,4) as x
`.cursor(async row => {
// row = { x: 1 }
await http.request('https://example.com/wat', { row })
})
// No more rows
```
A single row will be returned by default, but you can also request batches by setting the number of rows desired in each batch as the first argument. That is usefull if you can do work with the rows in parallel like in this example:
```js
await sql`
select * from generate_series(1,1000) as x
`.cursor(10, async rows => {
// rows = [{ x: 1 }, { x: 2 }, ... ]
await Promise.all(rows.map(row =>
http.request('https://example.com/wat', { row })
))
})
```
If an error is thrown inside the callback function no more rows will be requested and the promise will reject with the thrown error.
You can also stop receiving any more rows early by returning an end token `sql.END` from the callback function.
```js
await sql`
select * from generate_series(1,1000) as x
`.cursor(row => {
return Math.random() > 0.9 && sql.END
})
```
## Raw ```sql``.raw()```
Using `.raw()` will return rows as an array with `Buffer` values for each column, instead of objects.
This can be useful to receive identical named columns, or for specific performance / transformation reasons. The column definitions are still included on the result array with access to parsers for each column.
## Listen and notify
When you call listen, a dedicated connection will automatically be made to ensure that you receive notifications in real time. This connection will be used for any further calls to listen. Listen returns a promise which resolves once the `LISTEN` query to Postgres completes, or if there is already a listener active.
```js
await sql.listen('news', payload => {
const json = JSON.parse(payload)
console.log(json.this) // logs 'is'
})
```
Notify can be done as usual in sql, or by using the `sql.notify` method.
```js
sql.notify('news', JSON.stringify({ no: 'this', is: 'news' }))
```
## Tagged template function ``` sql`` ```
[Tagged template functions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals#Tagged_templates) are not just ordinary template literal strings. They allow the function to handle any parameters within before interpolation. This means that they can be used to enforce a safe way of writing queries, which is what Postgres.js does. Any generic value will be serialized according to an inferred type, and replaced by a PostgreSQL protocol placeholders `$1, $2, ...` and then sent to the database as a parameter to let it handle any need for escaping / casting.
This also means you cannot write dynamic queries or concat queries together by simple string manipulation. To enable dynamic queries in a safe way, the `sql` function doubles as a regular function which escapes any value properly. It also includes overloads for common cases of inserting, selecting, updating and querying.
## Dynamic query helpers - `sql()` inside tagged template
Postgres.js has a safe, ergonomic way to aid you in writing queries. This makes it easier to write dynamic `insert`, `select` and `update` queries, and pass `where` parameters.
#### Insert
```js
const user = {

@@ -338,8 +188,7 @@ name: 'Murray',

// Is translated into this query:
insert into users (name, age) values ($1, $2)
// Which results in:
insert into users ("name", "age") values ($1, $2)
```
You can leave out the column names and simply do `sql(user)` if you want to get all fields from the object as columns, but be careful not to allow users to supply columns you don't want.
**You can omit column names and simply execute `sql(user)` to get all the fields from the object as columns**. Be careful not to allow users to supply columns that you do not want to be inserted.

@@ -350,3 +199,2 @@ #### Multiple inserts in one query

```js
const users = [{

@@ -356,22 +204,27 @@ name: 'Murray',

garbage: 'ignore'
}, {
},
{
name: 'Walter',
age: 78
age: 80
}]
sql`
insert into users ${
sql(users, 'name', 'age')
}
`
sql`insert into users ${ sql(users, 'name', 'age') }`
// Is translated to:
insert into users ("name", "age") values ($1, $2), ($3, $4)
// Here you can also omit column names which will use object keys as columns
sql`insert into users ${ sql(users) }`
// Which results in:
insert into users ("name", "age") values ($1, $2), ($3, $4)
```
#### Update
### Dynamic columns in updates
This is also useful for update queries
```js
const user = {
id: 1,
name: 'Muray'
name: 'Murray',
age: 68
}

@@ -381,147 +234,237 @@

update users set ${
sql(user, 'name')
} where
id = ${ user.id }
sql(user, 'name', 'age')
}
where user_id = ${ user.id }
`
// Is translated into this query:
update users set name = $1 where id = $2
// Which results in:
update users set "name" = $1, "age" = $2 where user_id = $3
```
#### Select
### Dyanmic values and `where in`
Value lists can also be created dynamically, making `where in` queries simple too.
```js
const users = await sql`
select
*
from users
where age in ${ sql([68, 75, 23]) }
`
```
or
```js
const [{ a, b, c }] => await sql`
select
*
from (values ${ sql(['a', 'b', 'c']) }) as x(a, b, c)
```
const columns = ['name', 'age']
## Building queries
Postgres.js features a simple dynamic query builder by conditionally appending/omitting query fragments.
It works by nesting ` sql`` ` fragments within other ` sql`` ` calls or fragments. This allows you to build dynamic queries safely without risking sql injections through usual string concatenation.
### Partial queries
```js
const olderThan = x => sql`and age > ${ x }`
const filterAge = true
sql`
select ${
sql(columns)
} from users
select
*
from users
where name is not null ${
filterAge
? olderThan(50)
: sql``
}
`
// Which results in:
select * from users where name is not null
// Or
select * from users where name is not null and age > 50
```
// Is translated into this query:
select name, age from users
### Dynamic filters
```js
sql`
select
*
from users ${
id
? sql`where user_id = ${ id }`
: sql``
}
`
// Which results in:
select * from users
// Or
select * from users where user_id = $1
```
#### Dynamic table name
### SQL functions
Using keywords or calling functions dynamically is also possible by using ``` sql`` ``` fragments.
```js
const date = null
sql`
update users set updated_at = ${ date || sql`now()` }
`
// Which results in:
update users set updated_at = now()
```
### Table names
Dynamic identifiers like table names and column names is also supported like so:
```js
const table = 'users'
, column = 'id'
sql`
select id from ${sql(table)}
select ${ sql(column) } from ${ sql(table) }
`
// Is translated into this query:
select id from users
// Which results in:
select "id" from "users"
```
#### Arrays `sql.array(Array)`
## Advanced query methods
PostgreSQL has a native array type which is similar to js arrays, but only allows the same type and shape for nested items. This method automatically infers the item type and serializes js arrays into PostgreSQL arrays.
### .cursor()
#### ```await sql``.cursor([rows = 1], [fn])```
Use cursors if you need to throttle the amount of rows being returned from a query. You can use a cursor either as an [async iterable](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of) or with a callback function. For a callback function new results won't be requested until the promise / async callback function has resolved.
##### callback function
```js
await sql`
select
*
from generate_series(1,4) as x
`.cursor(async([row]) => {
// row = { x: 1 }
await http.request('https://example.com/wat', { row })
}
```
const types = sql`
insert into types (
integers,
strings,
dates,
buffers,
multi
) values (
${ sql.array([1,2,3,4,5]) },
${ sql.array(['Hello', 'Postgres']) },
${ sql.array([new Date(), new Date(), new Date()]) },
${ sql.array([Buffer.from('Hello'), Buffer.from('Postgres')]) },
${ sql.array([[[1,2],[3,4]][[5,6],[7,8]]]) },
)
`
##### for await...of
```js
// for await...of
const cursor = sql`select * from generate_series(1,4) as x`.cursor()
for await (const [row] of cursor) {
// row = { x: 1 }
await http.request('https://example.com/wat', { row })
}
```
#### JSON `sql.json(object)`
A single row will be returned by default, but you can also request batches by setting the number of rows desired in each batch as the first argument to `.cursor`:
```js
await sql`
select
*
from generate_series(1,1000) as x
`.cursor(10, async rows => {
// rows = [{ x: 1 }, { x: 2 }, ... ]
await Promise.all(rows.map(row =>
http.request('https://example.com/wat', { row })
))
}
```
If an error is thrown inside the callback function no more rows will be requested and the outer promise will reject with the thrown error.
You can close the cursor early either by calling `break` in the `for await...of` loop, or by returning the token `sql.CLOSE` from the callback function.
```js
const body = { hello: 'postgres' }
await sql`
select * from generate_series(1,1000) as x
`.cursor(row => {
return Math.random() > 0.9 && sql.END
})
const [{ json }] = await sql`
insert into json (
body
) values (
${ sql.json(body) }
)
returning body
`
// json = { hello: 'postgres' }
```
## File query `sql.file(path, [args], [options]) -> Promise`
### .forEach()
Using an `.sql` file for a query. The contents will be cached in memory so that the file is only read once.
#### ```await sql``.forEach(fn)```
If you want to handle rows returned by a query one by one, you can use `.forEach` which returns a promise that resolves once there are no more rows.
```js
sql.file(path.join(__dirname, 'query.sql'), [], {
cache: true // Default true - disable for single shot queries or memory reasons
await sql`
select created_at, name from events
`.forEach(row => {
// row = { created_at: '2019-11-22T14:22:00Z', name: 'connected' }
})
// No more rows
```
## Subscribe / Realtime
### describe
#### ```await sql``.describe([rows = 1], fn) -> Result[]```
Postgres.js implements the logical replication protocol of PostgreSQL to support subscription to realtime updates of `insert`, `update` and `delete` operations.
Rather than executing a given query, `.describe` will return information utilized in the query process. This information can include the query identifier, column types, etc.
> **NOTE** To make this work you must [create the proper publications in your database](https://www.postgresql.org/docs/current/sql-createpublication.html), enable logical replication by setting `wal_level = logical` in `postgresql.conf` and connect using either a replication or superuser.
This is useful for debugging and analyzing your Postgres queries. Furthermore, **`.describe` will give you access to the final generated query string that would be executed.**
### Quick start
### Raw
#### ```sql``.raw()```
#### Create a publication (eg. in migration)
```sql
CREATE PUBLICATION alltables FOR ALL TABLES
```
Using `.raw()` will return rows as an array with `Buffer` values for each column, instead of objects.
#### Subscribe to updates
This can be useful to receive identically named columns, or for specific performance/transformation reasons. The column definitions are still included on the result array, plus access to parsers for each column.
### File
#### `await sql.file(path, [args], [options]) -> Result[]`
Using a `.sql` file for a query is also supported with optional parameters to use if the file includes `$1, $2, etc`
```js
const sql = postgres({ publications: 'alltables' })
const { unsubscribe } = await sql.subscribe('insert:events', row =>
// tell about new event row over eg. websockets or do something else
)
const result = await sql.file('query.sql', ['Murray', 68])
```
### Subscribe pattern
### Canceling Queries in Progress
You can subscribe to specific operations, tables or even rows with primary keys.
Postgres.js supports, [canceling queries in progress](https://www.postgresql.org/docs/7.1/protocol-protocol.html#AEN39000). It works by opening a new connection with a protocol level startup message to cancel the current query running on a specific connection. That means there is no guarantee that the query will be canceled, and due to the possible race conditions it might even result in canceling another query. This is fine for long running queries, but in the case of high load and fast queries it might be better to simply ignore results instead of canceling.
### `operation` `:` `schema` `.` `table` `=` `primary_key`
```js
**`operation`** is one of ``` * | insert | update | delete ``` and defaults to `*`
const query = sql`select pg_sleep 100`.execute()
setTimeout(() => query.cancel(), 100)
const result = await query
**`schema`** defaults to `public.`
```
**`table`** is a specific table name and defaults to `*`
### Unsafe raw string queries
**`primary_key`** can be used to only subscribe to specific rows
<details>
<summary>Advanced unsafe use cases</summary>
#### Examples
### `await sql.unsafe(query, [args], [options]) -> Result[]`
If you know what you're doing, you can use `unsafe` to pass any string you'd like to postgres. Please note that this can lead to sql injection if you're not careful.
```js
sql.subscribe('*', () => /* everything */ )
sql.subscribe('insert', () => /* all inserts */ )
sql.subscribe('*:users', () => /* all operations on the public.users table */ )
sql.subscribe('delete:users', () => /* all deletes on the public.users table */ )
sql.subscribe('update:users=1', () => /* all updates on the users row with a primary key = 1 */ )
sql.unsafe('select ' + danger + ' from users where id = ' + dragons)
```
</details>
## Transactions
#### BEGIN / COMMIT `await sql.begin([options = ''], fn) -> fn()`
#### BEGIN / COMMIT `sql.begin(fn) -> Promise`
Use `sql.begin` to start a new transaction. Postgres.js will reserve a connection for the transaction and supply a scoped `sql` instance for all transaction uses in the callback function. `sql.begin` will resolve with the returned value from the callback function.
Calling begin with a function will return a Promise which resolves with the returned value from the function. The function provides a single argument which is `sql` with a context of the newly created transaction. `BEGIN` is automatically called, and if the Promise fails `ROLLBACK` will be called. If it succeeds `COMMIT` will be called.
`BEGIN` is automatically sent with the optional options, and if anything fails `ROLLBACK` will be called so the connection can be released and execution can continue.

@@ -535,3 +478,3 @@ ```js

) values (
'Alice'
'Murray'
)

@@ -553,8 +496,19 @@ `

It's also possible to pipeline the requests in a transaction if needed by returning an array with queries from the callback function like this:
#### SAVEPOINT `sql.savepoint([name], fn) -> Promise`
```js
const result = await sql.begin(sql => [
sql`update ...`,
sql`update ...`,
sql`insert ...`
])
```
#### SAVEPOINT `await sql.savepoint([name], fn) -> fn()`
```js
sql.begin(async sql => {
sql.begin('read write', async sql => {
const [user] = await sql`

@@ -564,3 +518,3 @@ insert into users (

) values (
'Alice'
'Murray'
)

@@ -594,62 +548,68 @@ `

## Listen & notify
## Custom Types
When you call `.listen`, a dedicated connection will be created to ensure that you receive notifications in real-time. This connection will be used for any further calls to `.listen`.
You can add ergonomic support for custom types, or simply pass an object with a `{ type, value }` signature that contains the Postgres `oid` for the type and the correctly serialized value. _(`oid` values for types can be found in the `pg_catalog.pg_types` table.)_
`.listen` returns a promise which resolves once the `LISTEN` query to Postgres completes, or if there is already a listener active.
Adding Query helpers is the recommended approach which can be done like this:
```js
await sql.listen('news', payload => {
const json = JSON.parse(payload)
console.log(json.this) // logs 'is'
})
```
Notify can be done as usual in sql, or by using the `sql.notify` method.
```js
const sql = postgres({
types: {
rect: {
// The pg_types oid to pass to the db along with the serialized value.
to : 1337,
sql.notify('news', JSON.stringify({ no: 'this', is: 'news' }))
// An array of pg_types oids to handle when parsing values coming from the db.
from : [1337],
```
//Function that transform values before sending them to the db.
serialize : ({ x, y, width, height }) => [x, y, width, height],
## Realtime subscribe
// Function that transforms values coming from the db.
parse : ([x, y, width, height]) => { x, y, width, height }
}
}
})
Postgres.js implements the logical replication protocol of PostgreSQL to support subscription to real-time updates of `insert`, `update` and `delete` operations.
// Now you can use sql.types.rect() as specified above
const [custom] = sql`
insert into rectangles (
name,
rect
) values (
'wat',
${ sql.types.rect({ x: 13, y: 37, width: 42, height: 80 }) }
)
returning *
`
> **NOTE** To make this work you must [create the proper publications in your database](https://www.postgresql.org/docs/current/sql-createpublication.html), enable logical replication by setting `wal_level = logical` in `postgresql.conf` and connect using either a replication or superuser.
// custom = { name: 'wat', rect: { x: 13, y: 37, width: 42, height: 80 } }
### Quick start
#### Create a publication (eg. in migration)
```sql
CREATE PUBLICATION alltables FOR ALL TABLES
```
## Teardown / Cleanup
#### Subscribe to updates
```js
const sql = postgres({ publications: 'alltables' })
To ensure proper teardown and cleanup on server restarts use `sql.end({ timeout: 0 })` before `process.exit()`.
const { unsubscribe } = await sql.subscribe('insert:events', (row, { command, relation, key, old }) =>
// tell about new event row over eg. websockets or do something else
)
```
Calling `sql.end()` will reject new queries and return a Promise which resolves when all queries are finished and the underlying connections are closed. If a timeout is provided any pending queries will be rejected once the timeout is reached and the connections will be destroyed.
### Subscribe pattern
#### Sample shutdown using [Prexit](http://npmjs.com/prexit)
You can subscribe to specific operations, tables, or even rows with primary keys.
```js
#### `operation` `:` `schema` `.` `table` `=` `primary_key`
import prexit from 'prexit'
**`operation`** is one of ``` * | insert | update | delete ``` and defaults to `*`
prexit(async () => {
await sql.end({ timeout: 5 })
await new Promise(r => server.close(r))
})
**`schema`** defaults to `public.`
**`table`** is a specific table name and defaults to `*`
**`primary_key`** can be used to only subscribe to specific rows
### Examples
```js
sql.subscribe('*', () => /* everything */ )
sql.subscribe('insert', () => /* all inserts */ )
sql.subscribe('*:users', () => /* all operations on the public.users table */ )
sql.subscribe('delete:users', () => /* all deletes on the public.users table */ )
sql.subscribe('update:users=1', () => /* all updates on the users row with a primary key = 1 */ )
```

@@ -661,3 +621,3 @@

Since Node.js v10.4 we can use [`BigInt`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) to match the PostgreSQL type `bigint` which is returned for eg. `count(*)`. Unfortunately it doesn't work with `JSON.stringify` out of the box, so Postgres.js will return it as a string.
Since Node.js v10.4 we can use [`BigInt`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) to match the PostgreSQL type `bigint` which is returned for eg. `count(*)`. Unfortunately, it doesn't work with `JSON.stringify` out of the box, so Postgres.js will return it as a string.

@@ -674,6 +634,71 @@ If you want to use `BigInt` you can add this custom type:

There is currently no way to handle `numeric / decimal` in a native way in Javascript, so these and similar will be returned as `string`. You can also handle types like these using [custom types](#types) if you want to.
There is currently no guaranteed way to handle `numeric / decimal` types in native Javascript. **These [and similar] types will be returned as a `string`**. The best way in this case is to use [custom types](#custom-types).
## The Connection Pool
## Connection details
### All Postgres options
```js
const sql = postgres('postgres://username:password@host:port/database', {
host : '', // Postgres ip address[s] or domain name[s]
port : 5432, // Postgres server port[s]
path : '', // unix socket path (usually '/tmp')
database : '', // Name of database to connect to
username : '', // Username of database user
password : '', // Password of database user
ssl : false, // true, prefer, require, tls.connect options
max : 10, // Max number of connections
max_lifetime : null, // Max lifetime in seconds (more info below)
idle_timeout : 0, // Idle connection timeout in seconds
connect_timeout : 30, // Connect timeout in seconds
no_prepare : false, // No automatic creation of prepared statements
types : [], // Array of custom types, see more below
onnotice : fn, // Defaults to console.log
onparameter : fn, // (key, value) when server param change
debug : fn, // Is called with (connection, query, params)
transform : {
column : fn, // Transforms incoming column names
value : fn, // Transforms incoming row values
row : fn // Transforms entire rows
},
connection : {
application_name : 'postgres.js', // Default application_name
... // Other connection parameters
},
target_session_attrs : null, // Use 'read-write' with multiple hosts to
// ensure only connecting to primary
fetch_types : true, // Automatically fetches types on connect
// on initial connection.
})
```
Note that `max_lifetime = 60 * (30 + Math.random() * 30)` by default. This resolves to an interval between 45 and 90 minutes to optimize for the benefits of prepared statements **and** working nicely with Linux's OOM killer.
### SSL
Although [vulnerable to MITM attacks](https://security.stackexchange.com/a/229297/174913), a common configuration for the `ssl` option for some cloud providers is to set `rejectUnauthorized` to `false` (if `NODE_ENV` is `production`):
```js
const sql =
process.env.NODE_ENV === 'production'
? // "Unless you're using a Private or Shield Heroku Postgres database, Heroku Postgres does not currently support verifiable certificates"
// https://help.heroku.com/3DELT3RK/why-can-t-my-third-party-utility-connect-to-heroku-postgres-with-ssl
postgres({ ssl: { rejectUnauthorized: false } })
: postgres()
```
For more information regarding `ssl` with `postgres`, check out the [Node.js documentation for tls](https://nodejs.org/dist/latest-v16.x/docs/api/tls.html#new-tlstlssocketsocket-options).
### Multi-host connections - High Availability (HA)
Multiple connection strings can be passed to `postgres()` in the form of `postgres('postgres://localhost:5432,localhost:5433', ...)`. This works the same as native the `psql` command. Read more at [multiple host uris](https://www.postgresql.org/docs/13/libpq-connect.html#LIBPQ-MULTIPLE-HOSTS)
Connections will be attempted in order of the specified hosts/ports. On a successful connection, all retries will be reset. This ensures that hosts can come up and down seamlessly.
If you specify `target_session_attrs: 'primary'` or `PGTARGETSESSIONATTRS=primary` Postgres.js will only connect to the primary host, allowing for zero downtime failovers.
### The Connection Pool
Connections are created lazily once a query is created. This means that simply doing const `sql = postgres(...)` won't have any effect other than instantiating a new `sql` instance.

@@ -689,47 +714,128 @@

### Idle timeout
Since this library automatically creates prepared statements, it also has a default max lifetime for connections to prevent memory bloat on the database itself. This is a random interval for each connection between 45 and 90 minutes. This allows multiple connections to come up and down seamlessly without user interference.
### Connection timeout
By default, connections will not close until `.end()` is called. However, it may be useful to have them close automatically when:
- there is no activity for some period of time
- if using Postgres.js in Lamdas / Serverless environments
- if using Postgres.js with a database service that automatically closes the connection after some time (see [`ECONNRESET` issue](https://github.com/porsager/postgres/issues/179))
- re-instantiating multiple ` sql`` ` instances
- using Postgres.js in a Serverless environment (Lambda, etc.)
- using Postgres.js with a database service that automatically closes connections after some time (see [`ECONNRESET` issue](https://github.com/porsager/postgres/issues/179))
This can be done using the `idle_timeout` option to specify the amount of seconds to wait before automatically closing an idle connection.
This can be done using the `idle_timeout` or `max_lifetime` options. These configuration options specify the number of seconds to wait before automatically closing an idle connection and the maximum time a connection can exist, respectively.
For example, to close idle connections after 2 seconds:
For example, to close a connection that has either been idle for 20 seconds or existed for more than 30 minutes:
```js
const sql = postgres({
idle_timeout: 2
idle_timeout: 20,
max_lifetime: 60 * 30
})
```
## Prepared statements
### Auto fetching of array types
Postgres.js will automatically fetch table/array-type information when it first connects to a database.
If you have revoked access to `pg_catalog` this feature will no longer work and will need to be disabled.
You can disable this feature by setting `fetch_types` to `false`.
### Environmental variables
It is also possible to connect to the database without a connection string or any options. Postgres.js will fall back to the common environment variables used by `psql` as in the table below:
```js
const sql = postgres()
```
| Option | Environment Variables |
| ----------------- | ------------------------ |
| `host` | `PGHOST` |
| `port` | `PGPORT` |
| `database` | `PGDATABASE` |
| `username` | `PGUSERNAME` or `PGUSER` |
| `password` | `PGPASSWORD` |
| `idle_timeout` | `PGIDLE_TIMEOUT` |
| `connect_timeout` | `PGCONNECT_TIMEOUT` |
### Prepared statements
Prepared statements will automatically be created for any queries where it can be inferred that the query is static. This can be disabled by using the `no_prepare` option. For instance — this is useful when [using PGBouncer in `transaction mode`](https://github.com/porsager/postgres/issues/93).
<details><summary><code>sql.unsafe</code> - Advanced unsafe use cases</summary>
## Custom Types
### Unsafe queries `sql.unsafe(query, [args], [options]) -> promise`
You can add ergonomic support for custom types, or simply use `sql.typed(value, type)` inline, where type is the PostgreSQL `oid` for the type and the correctly serialized string. _(`oid` values for types can be found in the `pg_catalog.pg_types` table.)_
If you know what you're doing, you can use `unsafe` to pass any string you'd like to postgres. Please note that this can lead to sql injection if you're not careful.
Adding Query helpers is the cleanest approach which can be done like this:
```js
const sql = postgres({
types: {
rect: {
// The pg_types oid to pass to the db along with the serialized value.
to : 1337,
sql.unsafe('select ' + danger + ' from users where id = ' + dragons)
// An array of pg_types oids to handle when parsing values coming from the db.
from : [1337],
//Function that transform values before sending them to the db.
serialize : ({ x, y, width, height }) => [x, y, width, height],
// Function that transforms values coming from the db.
parse : ([x, y, width, height]) => { x, y, width, height }
}
}
})
// Now you can use sql.typed.rect() as specified above
const [custom] = sql`
insert into rectangles (
name,
rect
) values (
'wat',
${ sql.typed.rect({ x: 13, y: 37, width: 42, height: 80 }) }
)
returning *
`
// custom = { name: 'wat', rect: { x: 13, y: 37, width: 42, height: 80 } }
```
</details>
## Errors
## Teardown / Cleanup
Errors are all thrown to related queries and never globally. Errors coming from PostgreSQL itself are always in the [native Postgres format](https://www.postgresql.org/docs/current/errcodes-appendix.html), and the same goes for any [Node.js errors](https://nodejs.org/api/errors.html#errors_common_system_errors) eg. coming from the underlying connection.
To ensure proper teardown and cleanup on server restarts use `await sql.end()` before `process.exit()`.
Calling `sql.end()` will reject new queries and return a Promise which resolves when all queries are finished and the underlying connections are closed. If a `{ timeout }` option is provided any pending queries will be rejected once the timeout (in seconds) is reached and the connections will be destroyed.
#### Sample shutdown using [Prexit](https://github.com/porsager/prexit)
```js
import prexit from 'prexit'
prexit(async () => {
await sql.end({ timeout: 5 })
await new Promise(r => server.close(r))
})
```
## Error handling
Errors are all thrown to related queries and never globally. Errors coming from database itself are always in the [native Postgres format](https://www.postgresql.org/docs/current/errcodes-appendix.html), and the same goes for any [Node.js errors](https://nodejs.org/api/errors.html#errors_common_system_errors) eg. coming from the underlying connection.
Query errors will contain a stored error with the origin of the query to aid in tracing errors.
Query errors will also contain the `query` string and the `parameters` which are not enumerable to avoid accidentally leaking confidential information in logs. To log these it is required to specifically access `error.query` and `error.parameters`.
Query errors will also contain the `query` string and the `parameters`. These are not enumerable to avoid accidentally leaking confidential information in logs. To log these it is required to specifically access `error.query` and `error.parameters`, or set `debug: true` in options.
There are also the following errors specifically for this library.
##### UNSAFE_TRANSACTION
> Only use sql.begin or max: 1
To ensure statements in a transaction runs on the same connection (which is required for them to run inside the transaction), you must use [`sql.begin(...)`](#transactions) or only allow a single connection in options (`max: 1`).
##### UNDEFINED_VALUE

@@ -753,3 +859,3 @@ > Undefined values are not allowed

When using SASL authentication the server responds with a signature at the end of the authentication flow which needs to match the one on the client. This is to avoid [man in the middle attacks](https://en.wikipedia.org/wiki/Man-in-the-middle_attack). If you receive this error the connection was cancelled because the server did not reply with the expected signature.
When using SASL authentication the server responds with a signature at the end of the authentication flow which needs to match the one on the client. This is to avoid [man-in-the-middle attacks](https://en.wikipedia.org/wiki/Man-in-the-middle_attack). If you receive this error the connection was canceled because the server did not reply with the expected signature.

@@ -769,3 +875,3 @@ ##### NOT_TAGGED_CALL

This error is thrown if the connection was closed without an error. This should not happen during normal operation, so please create an issue if this was unexpected.
This error is thrown if the connection was closed without an error. This should not happen during normal operations, so please create an issue if this was unexpected.

@@ -775,3 +881,3 @@ ##### CONNECTION_ENDED

This error is thrown if the user has called [`sql.end()`](#sql_end) and performed a query afterwards.
This error is thrown if the user has called [`sql.end()`](#teardown--cleanup) and performed a query afterward.

@@ -781,3 +887,3 @@ ##### CONNECTION_DESTROYED

This error is thrown for any queries that were pending when the timeout to [`sql.end({ timeout: X })`](#sql_destroy) was reached.
This error is thrown for any queries that were pending when the timeout to [`sql.end({ timeout: X })`](#teardown--cleanup) was reached.

@@ -787,8 +893,47 @@ ##### CONNECTION_CONNECT_TIMEOUT

This error is thrown if the startup phase of the connection (tcp, protocol negotiation and auth) took more than the default 30 seconds or what was specified using `connect_timeout` or `PGCONNECT_TIMEOUT`.
This error is thrown if the startup phase of the connection (tcp, protocol negotiation, and auth) took more than the default 30 seconds or what was specified using `connect_timeout` or `PGCONNECT_TIMEOUT`.
## TypeScript support
`postgres` has TypeScript support. You can pass a row list type for your queries in this way:
```ts
interface User {
id: number
name: string
}
const users = await sql<User[]>`SELECT * FROM users`
users[0].id // ok => number
users[1].name // ok => string
users[0].invalid // fails: `invalid` does not exists on `User`
```
However, be sure to check the array length to avoid accessing properties of `undefined` rows:
```ts
const users = await sql<User[]>`SELECT * FROM users WHERE id = ${id}`
if (!users.length)
throw new Error('Not found')
return users[0]
```
You can also prefer destructuring when you only care about a fixed number of rows.
In this case, we recommand you to prefer using tuples to handle `undefined` properly:
```ts
const [user]: [User?] = await sql`SELECT * FROM users WHERE id = ${id}`
if (!user) // => User | undefined
throw new Error('Not found')
return user // => User
// NOTE:
const [first, second]: [User?] = await sql`SELECT * FROM users WHERE id = ${id}` // fails: `second` does not exist on `[User?]`
const [first, second] = await sql<[User?]>`SELECT * FROM users WHERE id = ${id}` // don't fail : `second: User | undefined`
```
We do our best to type all the public API, however types are not always updated when features are added ou changed. Feel free to open an issue if you have trouble with types.
## Migration tools
Postgres.js doesn't come with any migration solution since it's way out of scope, but here are some modules that supports Postgres.js for migrations:
Postgres.js doesn't come with any migration solution since it's way out of scope, but here are some modules that support Postgres.js for migrations:
- https://github.com/porsager/postgres-shift
- https://github.com/lukeed/ley

@@ -795,0 +940,0 @@

@@ -19,17 +19,37 @@ /**

interface BaseOptions<T extends JSToPostgresTypeMap> {
/** Postgres ip address or domain name */
/** Postgres ip address[s] or domain name[s] */
host: string | string[];
/** Postgres server port */
/** Postgres server[s] port[s] */
port: number | number[];
/** Name of database to connect to */
/** unix socket path (usually '/tmp') */
path: string | undefined;
/**
* Name of database to connect to
* @default process.env['PGDATABASE'] || options.user
*/
database: string;
/** Username of database user */
/**
* Username of database user
* @default process.env['PGUSERNAME'] || process.env['PGUSER'] || require('os').userInfo().username
*/
user: string;
/** True; or options for tls.connect */
ssl: 'require' | 'prefer' | boolean | object;
/** Max number of connections */
/**
* true, prefer, require or tls.connect options
* @default false
*/
ssl: 'require' | 'allow' | 'prefer' | boolean | object;
/**
* Max number of connections
* @default 10
*/
max: number;
/** Idle connection timeout in seconds */
/**
* Idle connection timeout in seconds
* @default process.env['PGIDLE_TIMEOUT']
*/
idle_timeout: number | undefined;
/** Connect timeout in seconds */
/**
* Connect timeout in seconds
* @default process.env['PGCONNECT_TIMEOUT']
*/
connect_timeout: number;

@@ -39,7 +59,2 @@ /** Array of custom types; see more below */

/**
* Disable prepared mode
* @deprecated use "prepare" option instead
*/
no_prepare: boolean;
/**
* Enables prepare mode.

@@ -49,5 +64,8 @@ * @default true

prepare: boolean;
/** Defaults to console.log */
/**
* Called when a notice is received
* @default console.log
*/
onnotice: (notice: postgres.Notice) => void;
/** (key; value) when server param change */
/** (key; value) when a server param change */
onparameter: (key: string, value: any) => void;

@@ -58,17 +76,47 @@ /** Is called with (connection; query; parameters) */

transform: {
/** Transforms incoming column names */
column?: (column: string) => string;
/** Transforms incoming row values */
value?: (value: any) => any;
/** Transforms incoming and outgoing column names */
column?: ((column: string) => string) | {
/** SQL to JS */
from?: (column: string) => string;
/** JS to SQL */
to?: (column: string) => string;
};
/** Transforms incoming and outgoing row values */
value?: ((value: any) => any) | {
/** SQL to JS */
from?: (value: unknown) => any;
// /** JS to SQL */
// to?: (value: unknown) => any; // unused
};
/** Transforms entire rows */
row?: (row: postgres.Row) => any;
row?: ((row: postgres.Row) => any) | {
/** SQL to JS */
from?: (row: postgres.Row) => any;
// /** JS to SQL */
// to?: (row: postgres.Row) => any; // unused
};
};
/** Connection parameters */
connection: Partial<postgres.ConnectionParameters>;
/**
* Use 'read-write' with multiple hosts to ensure only connecting to primary
* @default process.env['PGTARGETSESSIONATTRS']
*/
target_session_attrs: undefined | 'read-write' | 'read-only' | 'primary' | 'standby' | 'prefer-standby';
/**
* Automatically fetches types on connect
* @default true
*/
fetch_types: boolean;
/**
* Publications to subscribe to (only relevant when calling `sql.subscribe()`)
* @default 'alltables'
*/
publications: string
}
type PostgresTypeList<T> = {
[name in keyof T]: T[name] extends (...args: any) => unknown
[name in keyof T]: T[name] extends (...args: any) => postgres.SerializableParameter
? postgres.PostgresType<T[name]>
: postgres.PostgresType;
: postgres.PostgresType<(...args: any) => postgres.SerializableParameter>;
};

@@ -80,26 +128,22 @@

declare class PostgresError extends Error {
name: 'PostgresError';
severity_local: string;
severity: string;
code: string;
position: string;
file: string;
line: string;
routine: string;
declare const PRIVATE: unique symbol;
detail?: string;
hint?: string;
internal_position?: string;
internal_query?: string;
where?: string;
schema_name?: string;
table_name?: string;
column_name?: string;
data?: string;
type_name?: string;
constraint_name?: string;
declare class NotAPromise {
private [PRIVATE]: never; // prevent user-side interface implementation
// Disable user-side creation of PostgresError
private constructor();
/**
* @deprecated This object isn't an SQL query, and therefore not a Promise; use the tagged template string syntax instead: ```await sql\`...\`;```
* @throws NOT_TAGGED_CALL
*/
private then(): never;
/**
* @deprecated This object isn't an SQL query, and therefore not a Promise; use the tagged template string syntax instead: ```await sql\`...\`;```
* @throws NOT_TAGGED_CALL
*/
private catch(): never;
/**
* @deprecated This object isn't an SQL query, and therefore not a Promise; use the tagged template string syntax instead: ```await sql\`...\`;```
* @throws NOT_TAGGED_CALL
*/
private finally(): never;
}

@@ -111,33 +155,88 @@

type PostgresErrorType = typeof PostgresError
declare namespace postgres {
export const PostgresError: PostgresErrorType;
class PostgresError extends Error {
name: 'PostgresError';
severity_local: string;
severity: string;
code: string;
position: string;
file: string;
line: string;
routine: string;
detail?: string;
hint?: string;
internal_position?: string;
internal_query?: string;
where?: string;
schema_name?: string;
table_name?: string;
column_name?: string;
data?: string;
type_name?: string;
constraint_name?: string;
/** Only set when debug is enabled */
query: string;
/** Only set when debug is enabled */
parameters: any[];
// Disable user-side creation of PostgresError
private constructor();
}
/**
* Convert a string to Pascal case.
* @param str THe string to convert
* @returns The new string in Pascal case
* Convert a snake_case string to PascalCase.
* @param str The string from snake_case to convert
* @returns The new string in PascalCase
*/
function toPascal(str: string): string;
/**
* Convert a string to Camel case.
* @param str THe string to convert
* @returns The new string in Camel case
* Convert a PascalCase string to snake_case.
* @param str The string from snake_case to convert
* @returns The new string in snake_case
*/
function fromPascal(str: string): string;
/**
* Convert a snake_case string to camelCase.
* @param str The string from snake_case to convert
* @returns The new string in camelCase
*/
function toCamel(str: string): string;
/**
* Convert a string to Kebab case.
* @param str THe string to convert
* @returns The new string in Kebab case
* Convert a camelCase string to snake_case.
* @param str The string from snake_case to convert
* @returns The new string in snake_case
*/
function fromCamel(str: string): string;
/**
* Convert a snake_case string to kebab-case.
* @param str The string from snake_case to convert
* @returns The new string in kebab-case
*/
function toKebab(str: string): string;
/**
* Convert a kebab-case string to snake_case.
* @param str The string from snake_case to convert
* @returns The new string in snake_case
*/
function fromKebab(str: string): string;
const BigInt: PostgresType<(number: bigint) => string>;
interface PostgresType<T extends (...args: any[]) => unknown> {
to: number;
from: number[];
serialize: T;
parse: (raw: string) => unknown;
}
interface ConnectionParameters {
/** Default application_name */
/**
* Default application_name
* @default 'postgres.js'
*/
application_name: string;
/** Other connection parameters */
[name: string]: any;
[name: string]: string;
}

@@ -150,14 +249,27 @@

port?: number;
/** unix socket path (usually '/tmp') */
path?: string | (() => string);
/** @inheritdoc */
path?: string;
/** Password of database user (an alias for `password`) */
pass?: Options<T>['password'];
/** Password of database user */
/**
* Password of database user
* @default process.env['PGPASSWORD']
*/
password?: string | (() => string | Promise<string>);
/** Name of database to connect to (an alias for `database`) */
db?: Options<T>['database'];
/** Username of database user (an alias for `username`) */
/** Username of database user (an alias for `user`) */
username?: Options<T>['user'];
/** Postgres ip address or domain name (an alias for `host`) */
hostname?: Options<T>['host'];
/**
* Disable prepared mode
* @deprecated use "prepare" option instead
*/
no_prepare?: boolean;
/**
* Idle connection timeout in seconds
* @deprecated use "idle_timeout" option instead
*/
timeout?: Options<T>['idle_timeout'];
}

@@ -172,6 +284,26 @@

pass: null;
serializers: { [oid: number]: T[keyof T] };
parsers: { [oid: number]: T[keyof T] };
/** @inheritdoc */
transform: Transform;
serializers: Record<number, (...args: any) => SerializableParameter>;
parsers: Record<number, (value: string) => unknown>;
}
interface Transform {
/** Transforms incoming column names */
column: {
from: ((column: string) => string) | undefined;
to: ((column: string) => string) | undefined;
};
/** Transforms incoming row values */
value: {
from: ((value: any) => any) | undefined;
to: undefined; // (value: any) => any
};
/** Transforms entire rows */
row: {
from: ((row: postgres.Row) => any) | undefined;
to: undefined; // (row: postgres.Row) => any
};
}
interface Notice {

@@ -181,10 +313,3 @@ [field: string]: string;

interface PostgresType<T extends (...args: any) => any = (...args: any) => any> {
to: number;
from: number[];
serialize: T;
parse: (raw: ReturnType<T>) => unknown;
}
interface Parameter<T = SerializableParameter> {
interface Parameter<T = SerializableParameter> extends NotAPromise {
/**

@@ -209,3 +334,3 @@ * PostgreSQL OID of the type

interface ConnectionError extends globalThis.Error {
code: never
code:
| 'CONNECTION_DESTROYED'

@@ -222,13 +347,8 @@ | 'CONNECT_TIMEOUT'

code: 'MESSAGE_NOT_SUPPORTED';
name: never
| 'CopyInResponse'
| 'CopyOutResponse'
| 'ParameterDescription'
| 'FunctionCallResponse'
| 'NegotiateProtocolVersion'
| 'CopyBothResponse';
name: string;
}
interface GenericError extends globalThis.Error {
code: never
code:
| '57014' // canceling statement due to user request
| 'NOT_TAGGED_CALL'

@@ -243,13 +363,3 @@ | 'UNDEFINED_VALUE'

code: 'AUTH_TYPE_NOT_IMPLEMENTED';
type: number
| 'KerberosV5'
| 'CleartextPassword'
| 'MD5Password'
| 'SCMCredential'
| 'GSS'
| 'GSSContinue'
| 'SSPI'
| 'SASL'
| 'SASLContinue'
| 'SASLFinal';
type: number | string;
message: string;

@@ -265,2 +375,46 @@ }

interface ColumnInfo {
key: number;
name: string;
type: number;
parser?(raw: string): unknown;
atttypmod: number;
}
interface RelationInfo {
schema: string;
table: string;
columns: ColumnInfo[];
keys: ColumnInfo[];
}
type ReplicationEvent =
| { command: 'insert', relation: RelationInfo }
| { command: 'delete', relation: RelationInfo, key: boolean }
| { command: 'update', relation: RelationInfo, key: boolean, old: Row | null };
interface SubscriptionHandle {
unsubscribe(): void;
}
interface LargeObject {
writable(options?: {
highWaterMark?: number,
start?: number
}): Promise<import('node:stream').Writable>;
readable(options?: {
highWaterMark?: number,
start?: number,
end?: number
}): Promise<import('node:stream').Readable>;
close(): Promise<void>;
tell(): Promise<void>;
read(size: number): Promise<void>;
write(buffer: Uint8Array): Promise<[{ data: Uint8Array }]>;
truncate(size: number): Promise<void>;
seek(offset: number, whence?: number): Promise<void>;
size(): Promise<[{ position: bigint, size: bigint }]>;
}
type Serializable = null

@@ -277,3 +431,4 @@ | boolean

| ArrayParameter
| SerializableParameter[];
| Record<string, any> // implicit JSON
| readonly SerializableParameter[];

@@ -294,6 +449,2 @@ type HelperSerializable = { [index: string]: SerializableParameter } | { [index: string]: SerializableParameter }[];

interface UnlabeledRow<T = any> {
'?column?': T;
}
type MaybeRow = Row | undefined;

@@ -310,3 +461,3 @@

type: number;
parser(raw: string): string;
parser?(raw: string): unknown;
}

@@ -317,3 +468,3 @@

interface State {
state: 'I';
status: string;
pid: number;

@@ -323,5 +474,16 @@ secret: number;

interface Statement {
/** statement unique name */
name: string;
/** sql query */
string: string;
/** parameters types */
types: number[];
columns: ColumnList<string>;
}
interface ResultMeta<T extends number | null> {
count: T; // For tuples
command: string;
statement: Statement;
state: State;

@@ -335,11 +497,35 @@ }

type ExecutionResult<T> = [] & ResultQueryMeta<number, keyof NonNullable<T>>;
type RowList<T extends MaybeRow[]> = T & Iterable<NonNullable<T[number]>> & ResultQueryMeta<T['length'], keyof T[number]>;
type RawRowList<T extends readonly any[]> = Buffer[][] & Iterable<Buffer[][]> & ResultQueryMeta<T['length'], keyof T[number]>;
type RowList<T extends readonly any[]> = T & Iterable<NonNullable<T[number]>> & ResultQueryMeta<T['length'], keyof T[number]>;
interface PendingQuery<TRow extends MaybeRow[]> extends Promise<RowList<TRow>> {
stream(cb: (row: NonNullable<TRow[number]>, result: ExecutionResult<TRow[number]>) => void): Promise<ExecutionResult<TRow[number]>>;
cursor(cb: (row: NonNullable<TRow[number]>) => void): Promise<ExecutionResult<TRow[number]>>;
cursor(size: 1, cb: (row: NonNullable<TRow[number]>) => void): Promise<ExecutionResult<TRow[number]>>;
cursor(size: number, cb: (rows: NonNullable<TRow[number]>[]) => void): Promise<ExecutionResult<TRow[number]>>;
interface PendingQueryModifiers<TRow extends readonly any[]> {
readable(): import('node:stream').Readable;
writable(): import('node:stream').Writable;
execute(): this;
cancel(): void;
/**
* @deprecated `.stream` has been renamed to `.forEach`
* @throws
*/
stream(cb: (row: NonNullable<TRow[number]>, result: ExecutionResult<TRow[number]>) => void): never;
forEach(cb: (row: NonNullable<TRow[number]>, result: ExecutionResult<TRow[number]>) => void): Promise<ExecutionResult<TRow[number]>>;
cursor(rows?: number): AsyncIterable<NonNullable<TRow[number]>[]>;
cursor(cb: (row: [NonNullable<TRow[number]>]) => void): Promise<ExecutionResult<TRow[number]>>;
cursor(rows: number, cb: (rows: NonNullable<TRow[number]>[]) => void): Promise<ExecutionResult<TRow[number]>>;
}
interface PendingDescribeQuery extends Promise<Statement> {
}
interface PendingRawQuery<TRow extends readonly MaybeRow[]> extends Promise<RawRowList<TRow>>, PendingQueryModifiers<Buffer[][]> {
}
interface PendingQuery<TRow extends readonly MaybeRow[]> extends Promise<RowList<TRow>>, PendingQueryModifiers<TRow> {
describe(): PendingDescribeQuery;
raw(): PendingRawQuery<TRow>;
}
interface PendingRequest extends Promise<[] & ResultMeta<null>> { }

@@ -352,3 +538,3 @@

interface Helper<T, U extends any[] = T[]> {
interface Helper<T, U extends any[] = T[]> extends NotAPromise {
first: T;

@@ -366,3 +552,3 @@ rest: U;

*/
<T extends any[] = Row[]>(template: TemplateStringsArray, ...args: SerializableParameter[]): PendingQuery<AsRowList<T>>;
<T extends readonly any[] = Row[]>(template: TemplateStringsArray, ...args: SerializableParameter[]): PendingQuery<AsRowList<T>>;

@@ -385,14 +571,6 @@ /**

END: {}; // FIXME unique symbol ?
CLOSE: {};
END: this['CLOSE'];
PostgresError: typeof PostgresError;
array<T extends SerializableParameter[] = SerializableParameter[]>(value: T): ArrayParameter<T>;
begin<T>(cb: (sql: TransactionSql<TTypes>) => T | Promise<T>): Promise<UnwrapPromiseArray<T>>;
begin<T>(options: string, cb: (sql: TransactionSql<TTypes>) => T | Promise<T>): Promise<UnwrapPromiseArray<T>>;
end(options?: { timeout?: number }): Promise<void>;
file<T extends any[] = Row[]>(path: string, options?: { cache?: boolean }): PendingQuery<AsRowList<T>>;
file<T extends any[] = Row[]>(path: string, args: SerializableParameter[], options?: { cache?: boolean }): PendingQuery<AsRowList<T>>;
json(value: any): Parameter;
listen(channel: string, cb: (value?: string) => void): ListenRequest;
notify(channel: string, payload: string): PendingRequest;
options: ParsedOptions<TTypes>;

@@ -405,5 +583,30 @@ parameters: ConnectionParameters;

};
unsafe<T extends any[] = Row[]>(query: string, parameters?: SerializableParameter[], queryOptions?: UnsafeQueryOptions): PendingQuery<AsRowList<T>>;
unsafe<T extends any[] = (Row & Iterable<Row>)[]>(query: string, parameters?: SerializableParameter[], queryOptions?: UnsafeQueryOptions): PendingQuery<AsRowList<T>>;
end(options?: { timeout?: number }): Promise<void>;
listen(channel: string, cb: (value: string) => void): ListenRequest;
notify(channel: string, payload: string): PendingRequest;
subscribe(event: string, cb: (row: Row | null, info: ReplicationEvent) => void): Promise<SubscriptionHandle>;
largeObject(oid?: number, /** @default 0x00020000 | 0x00040000 */ mode?: number): Promise<LargeObject>;
begin<T>(cb: (sql: TransactionSql<TTypes>) => T | Promise<T>): Promise<UnwrapPromiseArray<T>>;
begin<T>(options: string, cb: (sql: TransactionSql<TTypes>) => T | Promise<T>): Promise<UnwrapPromiseArray<T>>;
array<T extends SerializableParameter[] = SerializableParameter[]>(value: T, type?: number): ArrayParameter<T>;
file<T extends readonly any[] = Row[]>(path: string | Buffer | URL | number, options?: { cache?: boolean }): PendingQuery<AsRowList<T>>;
file<T extends readonly any[] = Row[]>(path: string | Buffer | URL | number, args: SerializableParameter[], options?: { cache?: boolean }): PendingQuery<AsRowList<T>>;
json(value: any): Parameter;
}
interface UnsafeQueryOptions {
/**
* When executes query as prepared statement.
* @default false
*/
prepare?: boolean;
}
interface TransactionSql<TTypes extends JSToPostgresTypeMap> extends Sql<TTypes> {

@@ -413,13 +616,4 @@ savepoint<T>(cb: (sql: TransactionSql<TTypes>) => T | Promise<T>): Promise<UnwrapPromiseArray<T>>;

}
}
interface UnsafeQueryOptions {
/**
* When executes query as prepared statement.
* @default false
*/
prepare?: boolean;
}
export = postgres;
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc