@ff00ff/mammoth
Advanced tools
Comparing version 1.0.0-beta.11 to 1.0.0-rc.1
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const dataTypes_1 = require("./../columns/dataTypes"); | ||
const database_1 = require("../database"); | ||
const table_1 = require("../defines/table"); | ||
const __1 = require(".."); | ||
const helpers_1 = require("./helpers"); | ||
describe(`delete`, () => { | ||
const itemTable = table_1.defineTable({ | ||
id: dataTypes_1.uuid() | ||
.primary() | ||
.notNull() | ||
.default(`gen_random_uuid()`), | ||
createdAt: dataTypes_1.timestamptz() | ||
.notNull() | ||
.default(`now()`), | ||
name: dataTypes_1.text().notNull(), | ||
value: dataTypes_1.integer(), | ||
const foo = __1.defineTable(`foo`, { | ||
id: __1.uuid().primaryKey().default(`gen_random_id()`), | ||
createDate: __1.timestampWithTimeZone().notNull().default(`now()`), | ||
name: __1.text().notNull(), | ||
value: __1.integer(), | ||
}); | ||
const db = database_1.createDatabase(process.env.DATABASE_URL, { | ||
item: itemTable, | ||
const bar = __1.defineTable(`bar`, { | ||
id: __1.uuid().primaryKey().default(`gen_random_id()`), | ||
}); | ||
beforeEach(async () => { | ||
await db.sql `CREATE EXTENSION IF NOT EXISTS "pgcrypto"`; | ||
await db.sql `CREATE TABLE item ( | ||
id UUID PRIMARY KEY NOT NULL DEFAULT gen_random_uuid(), | ||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), | ||
name TEXT NOT NULL, | ||
value INTEGER | ||
)`; | ||
const baz = __1.defineTable(`baz`, { | ||
id: __1.uuid().primaryKey().default(`gen_random_id()`), | ||
}); | ||
afterEach(async () => { | ||
await db.sql `DROP TABLE item`; | ||
const db = __1.defineDb(() => Promise.resolve({ rows: [], affectedRowsCount: 0 })); | ||
it(`should delete`, () => { | ||
const query = db | ||
.deleteFrom(foo) | ||
.using(bar, baz) | ||
.where(foo.id.ne(bar.id)) | ||
.returning(`id`, `name`, `createDate`); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [], | ||
"text": "DELETE FROM foo USING bar, baz WHERE foo.id <> bar.id RETURNING id, name, create_date \\"createDate\\"", | ||
} | ||
`); | ||
}); | ||
afterAll(async () => { | ||
await db.destroy(); | ||
}); | ||
it(`should delete item using where clause`, async () => { | ||
const rows = await db | ||
.deleteFrom(db.item) | ||
.where(db.item.name.eq(`Test`)) | ||
.returning(`name`); | ||
expect(rows).toHaveLength(0); | ||
}); | ||
}); | ||
//# sourceMappingURL=delete.test.js.map |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const dataTypes_1 = require("./../columns/dataTypes"); | ||
const database_1 = require("../database"); | ||
const table_1 = require("../defines/table"); | ||
const keywords_1 = require("../keywords"); | ||
const __1 = require(".."); | ||
const helpers_1 = require("./helpers"); | ||
describe(`insert`, () => { | ||
const item = table_1.defineTable({ | ||
id: dataTypes_1.uuid() | ||
.primaryKey() | ||
.notNull() | ||
.default(`gen_random_uuid()`), | ||
createdAt: dataTypes_1.timestamptz() | ||
.notNull() | ||
.default(`now()`), | ||
name: dataTypes_1.text().notNull(), | ||
value: dataTypes_1.integer(), | ||
const foo = __1.defineTable(`foo`, { | ||
id: __1.uuid().primaryKey().default(`gen_random_id()`), | ||
createDate: __1.timestampWithTimeZone().notNull().default(`now()`), | ||
name: __1.text().notNull(), | ||
value: __1.integer(), | ||
}); | ||
const entry = table_1.defineTable({ | ||
id: dataTypes_1.uuid() | ||
.primaryKey() | ||
.notNull() | ||
.default(`gen_random_uuid()`), | ||
createdAt: dataTypes_1.timestamptz() | ||
.notNull() | ||
.default(`now()`), | ||
value: dataTypes_1.integer().default(1), | ||
}); | ||
const db = database_1.createDatabase(process.env.DATABASE_URL, { | ||
item, | ||
entry, | ||
}); | ||
beforeEach(async () => { | ||
await db.sql `CREATE EXTENSION IF NOT EXISTS "pgcrypto"`; | ||
await db.sql `CREATE TABLE item ( | ||
id UUID PRIMARY KEY NOT NULL DEFAULT gen_random_uuid(), | ||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), | ||
name TEXT NOT NULL, | ||
value INTEGER | ||
)`; | ||
await db.sql `CREATE TABLE entry ( | ||
id UUID PRIMARY KEY NOT NULL DEFAULT gen_random_uuid(), | ||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), | ||
value INTEGER DEFAULT 1 | ||
)`; | ||
}); | ||
afterEach(async () => { | ||
await db.sql `DROP TABLE item, entry`; | ||
}); | ||
afterAll(async () => { | ||
await db.destroy(); | ||
}); | ||
it(`should insert single row when using returning`, async () => { | ||
const item = await db | ||
.insertInto(db.item) | ||
const db = __1.defineDb(() => Promise.resolve({ rows: [], affectedRowsCount: 0 })); | ||
it(`should insert foo on conflict do update set`, () => { | ||
const query = db | ||
.insertInto(foo) | ||
.values({ | ||
name: `Item #1`, | ||
name: `Test`, | ||
}) | ||
.returning(`name`); | ||
expect(item).toMatchInlineSnapshot(` | ||
.onConflict(`id`) | ||
.doUpdateSet({ | ||
name: `Test 2`, | ||
}); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"name": "Item #1", | ||
"parameters": Array [ | ||
"Test", | ||
"Test 2", | ||
], | ||
"text": "INSERT INTO foo (name) VALUES ($1) ON CONFLICT (id) DO UPDATE SET name = $2", | ||
} | ||
`); | ||
}); | ||
it(`should insert and return using column syntax`, async () => { | ||
const item = await db | ||
.insertInto(db.item) | ||
.values({ | ||
name: `Item #1`, | ||
}) | ||
.returning(db.item.name); | ||
expect(item).toMatchInlineSnapshot(` | ||
Object { | ||
"name": "Item #1", | ||
} | ||
`); | ||
it(`should insert with default values`, () => { | ||
const query = db.insertInto(foo).defaultValues(); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [], | ||
"text": "INSERT INTO foo DEFAULT VALUES", | ||
} | ||
`); | ||
}); | ||
it(`should insert multiple rows and return them when using returning`, async () => { | ||
const items = await db | ||
.insertInto(db.item) | ||
.values([ | ||
{ | ||
name: `Item #1`, | ||
}, | ||
{ | ||
name: `Item #2`, | ||
}, | ||
]) | ||
.returning(`name`); | ||
expect(items).toMatchInlineSnapshot(` | ||
Array [ | ||
Object { | ||
"name": "Item #1", | ||
}, | ||
Object { | ||
"name": "Item #2", | ||
}, | ||
] | ||
it(`should insert with returning`, () => { | ||
const query = db.insertInto(foo).values({ name: `Test` }).returning(`id`); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [ | ||
"Test", | ||
], | ||
"text": "INSERT INTO foo (name) VALUES ($1) RETURNING id", | ||
} | ||
`); | ||
}); | ||
it(`should upsert and update using a keyword`, async () => { | ||
const result = await db | ||
.insertInto(db.item) | ||
.values({ | ||
name: `Item #1`, | ||
}) | ||
.onConflict(`id`) | ||
.doUpdateSet({ | ||
createdAt: keywords_1.now(), | ||
}); | ||
expect(result).toEqual(1); | ||
it(`should insert into select`, () => { | ||
const query = db | ||
.insertInto(foo, [`name`, `value`, `createDate`]) | ||
.select(foo.id, foo.name, foo.createDate) | ||
.from(foo); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [], | ||
"text": "INSERT INTO foo (name, value, create_date) SELECT foo.id, foo.name, foo.create_date \\"createDate\\" FROM foo", | ||
} | ||
`); | ||
}); | ||
it(`should upsert and update using a subquery`, async () => { | ||
const result = await db | ||
.insertInto(db.item) | ||
.values({ | ||
name: `Item #1`, | ||
}) | ||
.onConflict(`id`) | ||
.doUpdateSet({ | ||
value: db | ||
.select(db.item.value) | ||
.from(db.item) | ||
.orderBy(db.item.value.desc()) | ||
.limit(1), | ||
}); | ||
expect(result).toEqual(1); | ||
it(`should insert update returning`, () => { | ||
const query = db | ||
.insertInto(foo, [`name`, `value`, `createDate`]) | ||
.update(foo) | ||
.set({ value: 123 }) | ||
.returning(`name`, `value`, `createDate`); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [ | ||
123, | ||
], | ||
"text": "INSERT INTO foo (name, value, create_date) UPDATE foo SET value = $1 RETURNING name, value, create_date \\"createDate\\"", | ||
} | ||
`); | ||
}); | ||
it(`should upsert using where clause`, async () => { | ||
const result = await db | ||
.insertInto(db.item) | ||
.values({ name: `Item #1` }) | ||
.onConflict(`id`) | ||
.doUpdateSet({ value: db.item.value.plus(1) }) | ||
.where(db.item.value.lt(100)); | ||
expect(result).toEqual(1); | ||
it(`should insert delete returning`, () => { | ||
const query = db | ||
.insertInto(foo, [`name`, `value`, `createDate`]) | ||
.deleteFrom(foo) | ||
.where(foo.value.lt(123)) | ||
.returning(`name`, `value`, `createDate`); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [ | ||
123, | ||
], | ||
"text": "INSERT INTO foo (name, value, create_date) DELETE FROM foo WHERE foo.value < $1 RETURNING name, value, create_date \\"createDate\\"", | ||
} | ||
`); | ||
}); | ||
it(`should err when inserting multiple rows with default values`, async () => { | ||
expect.assertions(1); | ||
try { | ||
await db.insertInto(db.entry).values([{ value: null }, { value: null }]); | ||
} | ||
catch (e) { | ||
expect(e).toMatchInlineSnapshot(`[Error: Cannot insert multiple rows with only default values]`); | ||
} | ||
it(`insert into on conflict do nothing`, () => { | ||
const query = db | ||
.insertInto(foo) | ||
.values({ name: `Test` }) | ||
.onConflict() | ||
.doNothing(); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [ | ||
"Test", | ||
], | ||
"text": "INSERT INTO foo (name) VALUES ($1) ON CONFLICT DO NOTHING", | ||
} | ||
`); | ||
}); | ||
}); | ||
//# sourceMappingURL=insert.test.js.map |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const keywords_1 = require("../keywords"); | ||
const columns_1 = require("../columns"); | ||
const dataTypes_1 = require("./../columns/dataTypes"); | ||
const database_1 = require("../database"); | ||
const defines_1 = require("../defines"); | ||
const __1 = require(".."); | ||
const helpers_1 = require("./helpers"); | ||
describe(`select`, () => { | ||
const item = defines_1.defineTable({ | ||
id: new columns_1.UuidColumn() | ||
.primary() | ||
.notNull() | ||
.default(new keywords_1.GenRandomUuid()), | ||
createdAt: new columns_1.TimestampWithTimeZoneColumn().notNull().default(new keywords_1.Now()), | ||
name: new columns_1.TextColumn().notNull(), | ||
value: new columns_1.IntegerColumn(), | ||
const foo = __1.defineTable(`foo`, { | ||
id: __1.uuid().primaryKey().default(`gen_random_id()`), | ||
createDate: __1.timestampWithTimeZone().notNull().default(`now()`), | ||
name: __1.text().notNull(), | ||
value: __1.integer(), | ||
}); | ||
const itemFoo = defines_1.defineTable({ | ||
id: dataTypes_1.uuid() | ||
.primary() | ||
.notNull() | ||
.default(new keywords_1.GenRandomUuid()), | ||
itemId: dataTypes_1.uuid() | ||
.notNull() | ||
.references(item, `id`), | ||
name: dataTypes_1.text().notNull(), | ||
const db = __1.defineDb(() => Promise.resolve({ rows: [], affectedRowsCount: 0 })); | ||
it(`should select foo`, () => { | ||
const query = db.select(foo.id).from(foo); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [], | ||
"text": "SELECT foo.id FROM foo", | ||
} | ||
`); | ||
}); | ||
const db = database_1.createDatabase({ databaseUrl: process.env.DATABASE_URL, debug: true }, { | ||
item, | ||
itemFoo, | ||
it(`should alias a column`, () => { | ||
const query = db.select(foo.id.as(`fooId`)).from(foo); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [], | ||
"text": "SELECT foo.id \\"fooId\\" FROM foo", | ||
} | ||
`); | ||
}); | ||
beforeEach(async () => { | ||
await db.sql `CREATE EXTENSION IF NOT EXISTS "pgcrypto"`; | ||
await db.sql `CREATE TABLE item ( | ||
id UUID PRIMARY KEY NOT NULL DEFAULT gen_random_uuid(), | ||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), | ||
name TEXT NOT NULL, | ||
value INTEGER | ||
)`; | ||
await db.sql `CREATE TABLE item_foo ( | ||
id UUID PRIMARY KEY NOT NULL DEFAULT gen_random_uuid(), | ||
item_id UUID NOT NULL REFERENCES item (id), | ||
name TEXT NOT NULL | ||
)`; | ||
it(`should alias a table plus reference it in a condition `, () => { | ||
const baz = foo.as(`baz`); | ||
const query = db.select(baz.id).from(baz).where(baz.value.eq(1)); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [ | ||
1, | ||
], | ||
"text": "SELECT baz.id FROM foo \\"baz\\" WHERE baz.value = $1", | ||
} | ||
`); | ||
}); | ||
afterEach(async () => { | ||
await db.sql `DROP TABLE item, item_foo`; | ||
it(`should plus a column as expression`, () => { | ||
const query = db.select(foo.id, foo.value.plus(1).as(`test`)).from(foo); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [ | ||
1, | ||
], | ||
"text": "SELECT foo.id, ((foo.value + $1) \\"test\\") FROM foo", | ||
} | ||
`); | ||
}); | ||
afterAll(async () => { | ||
await db.destroy(); | ||
it(`should select subquery`, () => { | ||
const query = db.select(foo.id, db.select(foo.value).from(foo)).from(foo); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [], | ||
"text": "SELECT foo.id, (SELECT foo.value FROM foo) FROM foo", | ||
} | ||
`); | ||
}); | ||
it(`should select and limit with a number`, async () => { | ||
const rows = await db | ||
.select(db.item.name) | ||
.from(db.item) | ||
.limit(10); | ||
expect(rows).toHaveLength(0); | ||
it(`should select IN with subquery`, () => { | ||
const query = db | ||
.select(foo.id) | ||
.from(foo) | ||
.where(foo.id.in(db.select(foo.id).from(foo))); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [], | ||
"text": "SELECT foo.id FROM foo WHERE foo.id IN (SELECT foo.id FROM foo)", | ||
} | ||
`); | ||
}); | ||
it(`should select and limit with ALL`, async () => { | ||
const rows = await db | ||
.select(db.item.name) | ||
.from(db.item) | ||
.limit(`ALL`); | ||
expect(rows).toHaveLength(0); | ||
it(`should convert column to snake case`, () => { | ||
const query = db.select(foo.createDate).from(foo); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [], | ||
"text": "SELECT foo.create_date \\"createDate\\" FROM foo", | ||
} | ||
`); | ||
}); | ||
it(`should select for update`, async () => { | ||
const rows = await db | ||
.select(db.item.name) | ||
.from(db.item) | ||
.forUpdate(); | ||
expect(rows).toHaveLength(0); | ||
it(`should select aggregate with as`, () => { | ||
const query = db.select(foo.id, __1.sum(foo.value).as(`total`)).from(foo); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [], | ||
"text": "SELECT foo.id, ((SUM (foo.value)) \\"total\\") FROM foo", | ||
} | ||
`); | ||
}); | ||
it(`should select for no key update`, async () => { | ||
const rows = await db | ||
.select(db.item.name) | ||
.from(db.item) | ||
.forNoKeyUpdate(); | ||
expect(rows).toHaveLength(0); | ||
it(`should select with in`, () => { | ||
const query = db.select(foo.id).where(foo.name.in([`A`, `B`, `C`])); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [ | ||
"A", | ||
"B", | ||
"C", | ||
], | ||
"text": "SELECT foo.id WHERE foo.name IN ($1, $2, $3)", | ||
} | ||
`); | ||
}); | ||
it(`should select for share`, async () => { | ||
const rows = await db | ||
.select(db.item.name) | ||
.from(db.item) | ||
.forShare(); | ||
expect(rows).toHaveLength(0); | ||
it(`should select with order by`, () => { | ||
const query = db.select(foo.id).orderBy(foo.name.asc().nullsFirst()); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [], | ||
"text": "SELECT foo.id ORDER BY foo.name ASC NULLS FIRST", | ||
} | ||
`); | ||
}); | ||
it(`should select for no key share`, async () => { | ||
const rows = await db | ||
.select(db.item.name) | ||
.from(db.item) | ||
.forNoKeyShare(); | ||
expect(rows).toHaveLength(0); | ||
it(`should select where is not null`, () => { | ||
const query = db.select(foo.id).where(foo.value.isNotNull()); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [], | ||
"text": "SELECT foo.id WHERE foo.value IS NOT NULL", | ||
} | ||
`); | ||
}); | ||
it(`should select for update of tables`, async () => { | ||
const rows = await db | ||
.select(db.item.name) | ||
.from(db.item) | ||
.crossJoin(db.itemFoo) | ||
.forUpdate() | ||
.of(db.item, db.itemFoo); | ||
expect(rows).toHaveLength(0); | ||
it(`should basic math`, () => { | ||
const query = db | ||
.select(foo.id) | ||
.from(foo) | ||
.where(foo.value | ||
.plus(1) | ||
.multiply(2) | ||
.minus(3) | ||
.divide(4) | ||
.modulo(5) | ||
.between(-10) | ||
.and(10)); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [ | ||
1, | ||
2, | ||
3, | ||
4, | ||
5, | ||
-10, | ||
10, | ||
], | ||
"text": "SELECT foo.id FROM foo WHERE foo.value + $1 * $2 - $3 / $4 % $5 BETWEEN $6 AND $7", | ||
} | ||
`); | ||
}); | ||
it(`should select skip locked`, async () => { | ||
const rows = await db | ||
.select(db.item.name) | ||
.from(db.item) | ||
.forUpdate() | ||
.skipLocked(); | ||
expect(rows).toHaveLength(0); | ||
it(`should select camel cased`, () => { | ||
const query = db.select(foo.id).from(foo).where(foo.createDate.isNotNull()); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [], | ||
"text": "SELECT foo.id FROM foo WHERE foo.create_date IS NOT NULL", | ||
} | ||
`); | ||
}); | ||
it(`should select nowait`, async () => { | ||
const rows = await db | ||
.select(db.item.name) | ||
.from(db.item) | ||
.forUpdate() | ||
.nowait(); | ||
expect(rows).toHaveLength(0); | ||
}); | ||
it(`should left join`, async () => { | ||
const rows = await db | ||
.select(db.itemFoo.id, db.itemFoo.name, db.item.value) | ||
.from(db.itemFoo) | ||
.leftJoin(db.item) | ||
.on(db.itemFoo.itemId.eq(db.item.id)); | ||
expect(rows).toHaveLength(0); | ||
}); | ||
it(`should cross join`, async () => { | ||
const rows = await db | ||
.select(db.itemFoo.id, db.itemFoo.name, db.item.value) | ||
.from(db.itemFoo) | ||
.crossJoin(db.item); | ||
expect(rows).toHaveLength(0); | ||
}); | ||
it(`should join`, async () => { | ||
const rows = await db | ||
.select(db.itemFoo.id, db.itemFoo.name, db.item.value) | ||
.from(db.itemFoo) | ||
.join(db.item) | ||
.on(db.itemFoo.itemId.eq(db.item.id)); | ||
expect(rows).toHaveLength(0); | ||
}); | ||
it(`should right join`, async () => { | ||
const rows = await db | ||
.select(db.itemFoo.id, db.itemFoo.name, db.item.value) | ||
.from(db.itemFoo) | ||
.rightJoin(db.item) | ||
.on(db.itemFoo.itemId.eq(db.item.id)); | ||
expect(rows).toHaveLength(0); | ||
}); | ||
it(`should left outer join`, async () => { | ||
const rows = await db | ||
.select(db.itemFoo.id, db.itemFoo.name, db.item.value) | ||
.from(db.itemFoo) | ||
.leftOuterJoin(db.item) | ||
.on(db.itemFoo.itemId.eq(db.item.id)); | ||
expect(rows).toHaveLength(0); | ||
}); | ||
it(`should right outer join join`, async () => { | ||
const rows = await db | ||
.select(db.itemFoo.id, db.itemFoo.name, db.item.value) | ||
.from(db.itemFoo) | ||
.rightOuterJoin(db.item) | ||
.on(db.itemFoo.itemId.eq(db.item.id)); | ||
expect(rows).toHaveLength(0); | ||
}); | ||
it(`should full outer join`, async () => { | ||
const rows = await db | ||
.select(db.itemFoo.id, db.itemFoo.name, db.item.value) | ||
.from(db.itemFoo) | ||
.fullOuterJoin(db.item) | ||
.on(db.itemFoo.itemId.eq(db.item.id)); | ||
expect(rows).toHaveLength(0); | ||
}); | ||
it(`should full join`, async () => { | ||
const rows = await db | ||
.select(db.itemFoo.id, db.itemFoo.name, db.item.value) | ||
.from(db.itemFoo) | ||
.fullJoin(db.item) | ||
.on(db.itemFoo.itemId.eq(db.item.id)); | ||
expect(rows).toHaveLength(0); | ||
}); | ||
it(`should select count in subquery`, async () => { | ||
const result = await db | ||
.insertInto(db.item) | ||
.values({ name: `Test` }) | ||
.returning(`id`); | ||
const { id: itemId } = result; | ||
await db.insertInto(db.itemFoo).values([ | ||
{ | ||
itemId, | ||
name: `A`, | ||
}, | ||
{ | ||
itemId, | ||
name: `B`, | ||
}, | ||
]); | ||
const [row] = await db | ||
.select(db.item.id, db | ||
.select(db.itemFoo.id.count()) | ||
.from(db.itemFoo) | ||
.where(db.itemFoo.itemId.eq(db.item.id)) | ||
.as(`test`)) | ||
.from(db.item); | ||
expect(row).toMatchInlineSnapshot({ id: expect.any(String) }, ` | ||
it(`should select aggregate on camel cased column`, () => { | ||
const query = db.select(__1.count(foo.createDate)).from(foo); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"id": Any<String>, | ||
"test": "2", | ||
"parameters": Array [], | ||
"text": "SELECT (COUNT (foo.create_date \\"createDate\\")) FROM foo", | ||
} | ||
@@ -213,2 +170,1 @@ `); | ||
}); | ||
//# sourceMappingURL=select.test.js.map |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const dataTypes_1 = require("./../columns/dataTypes"); | ||
const defines_1 = require("../defines"); | ||
const database_1 = require("../database"); | ||
const keywords_1 = require("../keywords"); | ||
const __1 = require(".."); | ||
const helpers_1 = require("./helpers"); | ||
describe(`update`, () => { | ||
const itemTable = defines_1.defineTable({ | ||
id: dataTypes_1.uuid() | ||
.primary() | ||
.notNull() | ||
.default(`gen_random_uuid()`), | ||
createdAt: dataTypes_1.timestamptz() | ||
.notNull() | ||
.default(keywords_1.now()), | ||
name: dataTypes_1.text().notNull(), | ||
value: dataTypes_1.integer(), | ||
const foo = __1.defineTable(`foo`, { | ||
id: __1.uuid().primaryKey().default(`gen_random_id()`), | ||
createDate: __1.timestampWithTimeZone().notNull().default(`now()`), | ||
name: __1.text().notNull(), | ||
value: __1.integer(), | ||
}); | ||
const db = database_1.createDatabase(process.env.DATABASE_URL, { | ||
item: itemTable, | ||
const db = __1.defineDb(() => Promise.resolve({ rows: [], affectedRowsCount: 0 })); | ||
it(`should update foo`, () => { | ||
const query = db | ||
.update(foo) | ||
.set({ name: `Test` }) | ||
.where(foo.value.isNull()) | ||
.returning(`id`, `createDate`); | ||
expect(helpers_1.toSnap(query)).toMatchInlineSnapshot(` | ||
Object { | ||
"parameters": Array [ | ||
"Test", | ||
], | ||
"text": "UPDATE foo SET name = $1 WHERE foo.value IS NULL RETURNING id, create_date \\"createDate\\"", | ||
} | ||
`); | ||
}); | ||
beforeEach(async () => { | ||
await db.sql `CREATE EXTENSION IF NOT EXISTS "pgcrypto"`; | ||
await db.sql `CREATE TABLE item ( | ||
id UUID PRIMARY KEY NOT NULL DEFAULT gen_random_uuid(), | ||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), | ||
name TEXT NOT NULL, | ||
value INTEGER | ||
)`; | ||
}); | ||
afterEach(async () => { | ||
await db.sql `DROP TABLE item`; | ||
}); | ||
afterAll(async () => { | ||
await db.destroy(); | ||
}); | ||
it(`should update and return affect rows`, async () => { | ||
const affectedRows = await db.update(db.item).set({ value: undefined }); | ||
expect(affectedRows).toEqual(0); | ||
}); | ||
it(`should update with returning`, async () => { | ||
const result = await db | ||
.update(db.item) | ||
.set({ value: undefined }) | ||
.returning(db.item.value); | ||
expect(result).toHaveLength(0); | ||
}); | ||
}); | ||
//# sourceMappingURL=update.test.js.map |
@@ -1,8 +0,4 @@ | ||
import { PartialQuery, Tokenable } from './query'; | ||
export * from './database'; | ||
export * from './columns'; | ||
export * from './columns/dataTypes'; | ||
export * from './keywords'; | ||
export * from './defines'; | ||
export * from './types'; | ||
export declare const not: (tokenable: Tokenable) => PartialQuery; | ||
export * from './data-types'; | ||
export * from './db'; | ||
export * from './sql-functions'; | ||
export { defineTable } from './table'; |
"use strict"; | ||
function __export(m) { | ||
for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p]; | ||
} | ||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); | ||
}) : (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
o[k2] = m[k]; | ||
})); | ||
var __exportStar = (this && this.__exportStar) || function(m, exports) { | ||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const query_1 = require("./query"); | ||
const tokens_1 = require("./tokens"); | ||
__export(require("./database")); | ||
__export(require("./columns")); | ||
__export(require("./columns/dataTypes")); | ||
__export(require("./keywords")); | ||
__export(require("./defines")); | ||
// TODO: move this to expressions. | ||
exports.not = (tokenable) => new query_1.PartialQuery(new tokens_1.StringToken(`NOT`), new tokens_1.GroupToken(tokenable.toTokens())); | ||
//# sourceMappingURL=index.js.map | ||
exports.defineTable = void 0; | ||
__exportStar(require("./data-types"), exports); | ||
__exportStar(require("./db"), exports); | ||
__exportStar(require("./sql-functions"), exports); | ||
var table_1 = require("./table"); | ||
Object.defineProperty(exports, "defineTable", { enumerable: true, get: function () { return table_1.defineTable; } }); |
@@ -1,17 +0,11 @@ | ||
import { Column } from './columns'; | ||
export declare class TableType { | ||
[columnName: string]: Column<any>; | ||
import { Column, ColumnDefinition } from './column'; | ||
export declare type Table<TableName, Columns> = InternalTable<TableName, Columns> & Columns; | ||
interface InternalTable<TableName, Columns> { | ||
as<T>(alias: T): Table<T, { | ||
[K in keyof Columns]: Columns[K] extends Column<infer Name, string, infer DataType, infer IsNotNull, infer HasDefault, infer JoinType> ? Column<Name, T, DataType, IsNotNull, HasDefault, JoinType> : never; | ||
}>; | ||
} | ||
export declare class Table<Row, InsertRow = Row, UpdateRow = Row> { | ||
private readonly $name; | ||
private readonly $columnNames; | ||
private readonly $userDefinedTable; | ||
[`*`]: keyof Row; | ||
$row: Row; | ||
$insertRow: InsertRow; | ||
$updateRow: UpdateRow; | ||
['~']: string; | ||
[' ']: string; | ||
z: string; | ||
constructor(userDefinedTable: TableType, name: string); | ||
} | ||
export declare const defineTable: <T extends { | ||
[column: string]: ColumnDefinition<any, any, any>; | ||
}, TableName extends string>(tableName: TableName, tableDefinition: T) => Table<TableName, { [K in keyof T]: Column<K, TableName, T[K] extends ColumnDefinition<infer DataType, any, any> ? DataType : never, T[K] extends ColumnDefinition<any, infer IsNotNull, any> ? IsNotNull : never, T[K] extends ColumnDefinition<any, any, infer HasDefault> ? HasDefault : never, undefined>; }>; | ||
export {}; |
"use strict"; | ||
// type InternalTab<TableDefinitionName> = Nominal<TableDefinitionName>; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const columns_1 = require("./columns"); | ||
class TableType { | ||
} | ||
exports.TableType = TableType; | ||
class Table { | ||
constructor(userDefinedTable, name) { | ||
this.$userDefinedTable = userDefinedTable; | ||
this.$name = columns_1.toSnakeCase(name); | ||
this.$columnNames = Object.keys(userDefinedTable); | ||
this[`*`] = `${this.$name}.*`; | ||
const self = this; | ||
this.$columnNames.forEach(camelCaseName => { | ||
const column = userDefinedTable[camelCaseName]; | ||
if (!(column instanceof columns_1.Column)) { | ||
throw new Error(`Invalid column at ${name}#${camelCaseName}`); | ||
} | ||
const snakeCaseName = column.getSnakeCaseName(camelCaseName); | ||
if (self[camelCaseName]) { | ||
throw new Error(`Column \`${camelCaseName}\` in table \`${name}\` collides with property of the same name in TableWrapper class.`); | ||
} | ||
// TODO: change ColumnWrapper to Column instead ?! | ||
self[camelCaseName] = new columns_1.ColumnWrapper(this, column, camelCaseName, snakeCaseName); | ||
exports.defineTable = void 0; | ||
const column_1 = require("./column"); | ||
const data_1 = require("./data"); | ||
const snake_case_1 = require("./naming/snake-case"); | ||
const makeTable = (tableName, originalTableName, tableDefinition) => { | ||
const columnNames = Object.keys(tableDefinition); | ||
const columns = columnNames.reduce((map, columnName) => { | ||
const column = column_1.makeColumn(columnName, tableName, undefined); | ||
data_1.internalColumnData.set(column, { | ||
snakeCaseName: snake_case_1.toSnakeCase(columnName), | ||
}); | ||
this.$row = undefined; | ||
this.$insertRow = undefined; | ||
this.$updateRow = undefined; | ||
} | ||
/** @internal */ | ||
init(db) { | ||
this.$columnNames.forEach(columnName => { | ||
const column = this.$userDefinedTable[columnName]; | ||
column.createReference(db); | ||
}); | ||
} | ||
/** @internal */ | ||
getUserDefinedTable() { | ||
return this.$userDefinedTable; | ||
} | ||
/** @internal */ | ||
getColumns() { | ||
return this.$columnNames.map(columnName => this.getColumn(columnName)); | ||
} | ||
/** @internal */ | ||
getColumn(columnName) { | ||
return this[columnName]; | ||
} | ||
/** @internal */ | ||
getName() { | ||
return this.$name; | ||
} | ||
} | ||
exports.Table = Table; | ||
//# sourceMappingURL=table.js.map | ||
map[columnName] = column; | ||
return map; | ||
}, {}); | ||
const table = { | ||
...columns, | ||
as(alias) { | ||
return makeTable(alias, tableName, tableDefinition); | ||
}, | ||
}; | ||
data_1.internalTableData.set(table, { | ||
name: tableName, | ||
originalName: originalTableName, | ||
}); | ||
return table; | ||
}; | ||
exports.defineTable = (tableName, tableDefinition) => { | ||
return makeTable(tableName, undefined, tableDefinition); | ||
}; |
@@ -1,3 +0,2 @@ | ||
import { State } from '../query/base'; | ||
import { Token } from './token'; | ||
import { State, Token } from './token'; | ||
export declare class CollectionToken extends Token { | ||
@@ -4,0 +3,0 @@ tokens: Token[]; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const base_1 = require("../query/base"); | ||
exports.CollectionToken = void 0; | ||
const token_1 = require("./token"); | ||
@@ -11,3 +11,3 @@ class CollectionToken extends token_1.Token { | ||
reduce(state, numberOfParameters) { | ||
const tokensState = base_1.createState(this.tokens, numberOfParameters); | ||
const tokensState = token_1.createQueryState(this.tokens, numberOfParameters); | ||
state.parameters.push(...tokensState.parameters); | ||
@@ -19,2 +19,1 @@ state.text.push(...tokensState.text); | ||
exports.CollectionToken = CollectionToken; | ||
//# sourceMappingURL=collection-token.js.map |
@@ -1,3 +0,2 @@ | ||
import { State } from '../query/base'; | ||
import { Token } from './token'; | ||
import { State, Token } from './token'; | ||
export declare class GroupToken extends Token { | ||
@@ -4,0 +3,0 @@ tokens: Token[]; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const base_1 = require("../query/base"); | ||
exports.GroupToken = void 0; | ||
const token_1 = require("./token"); | ||
@@ -13,3 +13,3 @@ class GroupToken extends token_1.Token { | ||
reduce(state, numberOfParameters) { | ||
const tokensState = base_1.createState(this.tokens, numberOfParameters); | ||
const tokensState = token_1.createQueryState(this.tokens, numberOfParameters); | ||
state.parameters.push(...tokensState.parameters); | ||
@@ -29,2 +29,1 @@ const index = tokensState.text.length - 1; | ||
exports.GroupToken = GroupToken; | ||
//# sourceMappingURL=group-token.js.map |
export * from './collection-token'; | ||
export * from './empty-token'; | ||
export * from './group-token'; | ||
@@ -3,0 +4,0 @@ export * from './no-space-token'; |
"use strict"; | ||
function __export(m) { | ||
for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p]; | ||
} | ||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); | ||
}) : (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
o[k2] = m[k]; | ||
})); | ||
var __exportStar = (this && this.__exportStar) || function(m, exports) { | ||
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
__export(require("./collection-token")); | ||
__export(require("./group-token")); | ||
__export(require("./no-space-token")); | ||
__export(require("./parameter-token")); | ||
__export(require("./raw-token")); | ||
__export(require("./separator-token")); | ||
__export(require("./string-token")); | ||
__export(require("./token")); | ||
//# sourceMappingURL=index.js.map | ||
__exportStar(require("./collection-token"), exports); | ||
__exportStar(require("./empty-token"), exports); | ||
__exportStar(require("./group-token"), exports); | ||
__exportStar(require("./no-space-token"), exports); | ||
__exportStar(require("./parameter-token"), exports); | ||
__exportStar(require("./raw-token"), exports); | ||
__exportStar(require("./separator-token"), exports); | ||
__exportStar(require("./string-token"), exports); | ||
__exportStar(require("./token"), exports); |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.NoSpaceToken = void 0; | ||
const token_1 = require("./token"); | ||
@@ -16,2 +17,1 @@ class NoSpaceToken extends token_1.Token { | ||
exports.NoSpaceToken = NoSpaceToken; | ||
//# sourceMappingURL=no-space-token.js.map |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.ParameterToken = void 0; | ||
const token_1 = require("./token"); | ||
@@ -16,2 +17,1 @@ class ParameterToken extends token_1.Token { | ||
exports.ParameterToken = ParameterToken; | ||
//# sourceMappingURL=parameter-token.js.map |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.RawToken = void 0; | ||
const token_1 = require("./token"); | ||
@@ -16,2 +17,1 @@ class RawToken extends token_1.Token { | ||
exports.RawToken = RawToken; | ||
//# sourceMappingURL=raw-token.js.map |
@@ -1,3 +0,2 @@ | ||
import { State } from '../query/base'; | ||
import { Token } from './token'; | ||
import { State, Token } from './token'; | ||
export declare type Separator = ','; | ||
@@ -4,0 +3,0 @@ export declare class SeparatorToken extends Token { |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const base_1 = require("../query/base"); | ||
exports.SeparatorToken = void 0; | ||
const token_1 = require("./token"); | ||
@@ -16,3 +16,3 @@ class SeparatorToken extends token_1.Token { | ||
const last = index === length - 1; | ||
const tokenState = base_1.createState([token], parameterIndex); | ||
const tokenState = token_1.createQueryState([token], parameterIndex); | ||
if (tokenState.text.length > 0) { | ||
@@ -31,2 +31,1 @@ if (!last) { | ||
exports.SeparatorToken = SeparatorToken; | ||
//# sourceMappingURL=separator-token.js.map |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.StringToken = void 0; | ||
const token_1 = require("./token"); | ||
@@ -15,2 +16,1 @@ class StringToken extends token_1.Token { | ||
exports.StringToken = StringToken; | ||
//# sourceMappingURL=string-token.js.map |
export declare abstract class Token { | ||
abstract reduce(state: any, numberOfParameters: number): any; | ||
} | ||
export interface State { | ||
text: string[]; | ||
parameters: any[]; | ||
} | ||
export declare const createQueryState: (tokens: Token[], currentParameterIndex?: number) => State; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.createQueryState = exports.Token = void 0; | ||
class Token { | ||
} | ||
exports.Token = Token; | ||
//# sourceMappingURL=token.js.map | ||
exports.createQueryState = (tokens, currentParameterIndex = 0) => { | ||
const initialState = { | ||
text: [], | ||
parameters: [], | ||
}; | ||
return tokens.reduce((tokenState, token) => token.reduce(tokenState, tokenState.parameters.length + currentParameterIndex), initialState); | ||
}; |
@@ -1,27 +0,22 @@ | ||
import { ColumnWrapper } from './columns'; | ||
import { PickByValue } from 'utility-types'; | ||
export interface Null<T> { | ||
nullType: T; | ||
import { Column } from './column'; | ||
import { DeleteQuery } from './delete'; | ||
import { InsertQuery } from './insert'; | ||
import { NamedExpression } from './expression'; | ||
import { SelectQuery } from './select'; | ||
import { UpdateQuery } from './update'; | ||
export declare type ResultType = 'ROWS' | 'AFFECTED_COUNT'; | ||
export declare type PickByValue<T, ValueType> = Pick<T, { | ||
[Key in keyof T]-?: T[Key] extends ValueType ? Key : never; | ||
}[keyof T]>; | ||
export declare type Query = SelectQuery<any> | UpdateQuery<any, any> | InsertQuery<any, any> | DeleteQuery<any, any>; | ||
declare type GetColumnDataType<C> = C extends Column<any, any, infer DataType, infer IsNotNull, any, any> ? IsNotNull extends true ? DataType : DataType | undefined : never; | ||
export declare type GetReturning<TableColumns, ColumnName extends keyof TableColumns> = { | ||
[K in ColumnName]: GetColumnDataType<TableColumns[K]>; | ||
}; | ||
declare class DataType<Type, IsNull> { | ||
private _; | ||
} | ||
export interface NotNull<T> { | ||
notNullType: T; | ||
} | ||
export declare type toNotNull<T> = T extends Null<any> ? NotNull<T['nullType']> : T extends NotNull<any> ? T : NotNull<T>; | ||
export declare type toNull<T> = T extends NotNull<any> ? Null<T['notNullType']> : T extends Null<any> ? T : Null<T>; | ||
export declare type toType<T> = T extends never ? never : T extends NotNull<any> ? T['notNullType'] : T extends Null<any> ? T['nullType'] | undefined | null : T; | ||
export declare type TypeOf<T> = { | ||
[K in keyof T]?: toType<T[K]>; | ||
}; | ||
export declare type TypeOfNotNullable<T> = { | ||
[K in keyof T]: toType<T[K]>; | ||
}; | ||
export declare type Nullable<T> = { | ||
[P in keyof T]?: T[P] | null | undefined; | ||
}; | ||
export declare type SplitOptionalAndRequired<Table extends { | ||
[columnName: string]: ColumnWrapper<any, any, any, any, any>; | ||
}, ColumnType extends 'insertType' | 'selectType' | 'updateType'> = Nullable<TypeOf<PickByValue<{ | ||
[K in keyof Table]: Table[K][ColumnType]; | ||
}, Null<any>>>> & TypeOfNotNullable<PickByValue<{ | ||
[K in keyof Table]: Table[K][ColumnType]; | ||
}, NotNull<any>>>; | ||
export declare type ResultSet<T extends Query> = T extends SelectQuery<infer Selectables> ? { | ||
[K in keyof Selectables]: Selectables[K] extends Column<any, any, infer D, infer N, any, infer JoinType> ? Extract<JoinType, 'left-join'> extends never ? Extract<JoinType, 'left-side-of-right-join'> extends never ? Extract<JoinType, 'full-join'> extends never ? N extends true ? DataType<D, true> : DataType<D, false> : DataType<D, false> : DataType<D, false> : DataType<D, false> : Selectables[K] extends NamedExpression<any, infer D, infer IsNotNull> ? DataType<D, IsNotNull> : Selectables[K] extends SelectQuery<{}> ? ResultSet<Selectables[K]>[keyof ResultSet<Selectables[K]>] : never; | ||
} : T extends UpdateQuery<any, infer C> ? C : T extends InsertQuery<any, infer C> ? C : T extends DeleteQuery<any, infer C> ? C : never; | ||
export {}; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
//# sourceMappingURL=types.js.map | ||
class DataType { | ||
} |
{ | ||
"name": "@ff00ff/mammoth", | ||
"license": "MIT", | ||
"version": "1.0.0-beta.11", | ||
"version": "1.0.0-rc.1", | ||
"main": "./.build/index.js", | ||
@@ -18,49 +18,38 @@ "types": "./.build/index.d.ts", | ||
"files": [ | ||
".build", | ||
"bin" | ||
".build" | ||
], | ||
"devDependencies": { | ||
"@types/jest": "^26.0.13", | ||
"@types/node": "^14.6.4", | ||
"dts-jest": "^23.3.0", | ||
"jest": "^26.4.2", | ||
"prettier": "^2.1.1", | ||
"ts-jest": "^26.3.0", | ||
"typescript": "^4.0.2" | ||
}, | ||
"scripts": { | ||
"test": "jest --runInBand --setupFiles dotenv/config", | ||
"test": "jest", | ||
"prebuild": "rm -rf .build && rm -rf coverage", | ||
"build": "tsc", | ||
"publish-beta": "npm publish --tag beta" | ||
"build": "tsc" | ||
}, | ||
"dependencies": { | ||
"lodash": "^4.17.15", | ||
"minimist": "^1.2.2", | ||
"pg": "^7.18.1", | ||
"utility-types": "^3.10.0" | ||
}, | ||
"jest": { | ||
"moduleFileExtensions": [ | ||
"ts", | ||
"tsx", | ||
"js" | ||
], | ||
"coveragePathIgnorePatterns": [ | ||
"__tests__", | ||
"__checks__" | ||
], | ||
"testPathIgnorePatterns": [ | ||
"/\\.build/", | ||
"/node_modules/" | ||
"/node_modules/", | ||
"helpers" | ||
], | ||
"testRegex": "/__tests__|__checks__/.*\\.(test|check)\\.tsx?$", | ||
"testRegex": "/__tests__|__checks__/.*\\.(test|check)\\.ts$", | ||
"transform": { | ||
"\\.check\\.ts$": "dts-jest/transform", | ||
"\\.tsx?$": "ts-jest" | ||
"\\.ts$": "ts-jest" | ||
} | ||
}, | ||
"devDependencies": { | ||
"@types/jest": "^25.1.2", | ||
"@types/lodash": "^4.14.149", | ||
"@types/minimist": "^1.2.0", | ||
"@types/node": "^13.7.0", | ||
"@types/pg": "^7.14.1", | ||
"@types/uuid": "^3.4.7", | ||
"dotenv": "^8.2.0", | ||
"dts-jest": "^23.0.0", | ||
"jest": "^25.1.0", | ||
"prettier": "^1.19.1", | ||
"ts-jest": "^25.2.0", | ||
"ts-node": "^8.6.2", | ||
"typescript": "^3.8.3", | ||
"uuid": "^3.4.0" | ||
} | ||
} |
428
README.md
@@ -1,13 +0,13 @@ | ||
> This is `mammoth@next` which is available as beta in npm only. Switch to [mammoth@master](https://github.com/Ff00ff/mammoth/tree/master) to see the latest non-beta version. | ||
> This is `mammoth@next` which just had a major revamp. I'm about to publish a blog post with some more info. This will move to master soon. | ||
![Mammoth](https://s3-eu-west-1.amazonaws.com/mammoth-static.ff00ff.nl/mammoth-logo.png) | ||
# Mammoth: A type-safe Postgres query builder for TypeScript. | ||
# Mammoth: A type-safe Postgres query builder pur sang for TypeScript. | ||
[![Build Status](https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fff00ff%2Fmammoth%2Fbadge%3Fref%3Dnext&style=flat)](https://actions-badge.atrox.dev/ff00ff/mammoth/goto?ref=next) | ||
[![Coverage Status](https://coveralls.io/repos/github/Ff00ff/mammoth/badge.svg?branch=next)](https://coveralls.io/github/Ff00ff/mammoth?branch=next) | ||
[![Build Status](https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fff00ff%2Fmammoth%2Fbadge%3Fref%3Dmaster&style=flat)](https://actions-badge.atrox.dev/ff00ff/mammoth/goto?ref=master) | ||
[![Coverage Status](https://coveralls.io/repos/github/Ff00ff/mammoth/badge.svg?branch=master)](https://coveralls.io/github/Ff00ff/mammoth?branch=master) | ||
[![MIT License](https://img.shields.io/github/license/ff00ff/mammoth.svg)](https://raw.githubusercontent.com/Ff00ff/mammoth/master/LICENSE) | ||
``` | ||
npm i @ff00ff/mammoth@1.0.0-beta.9 | ||
npm i @ff00ff/mammoth@1.0.0-rc.1 | ||
``` | ||
@@ -17,114 +17,56 @@ | ||
Mammoth is a type-safe query builder. It only supports Postgres which we consider a feature. It's syntax is as close to SQL as possible so you already know how to use it. It's autocomplete features are great. It helps you avoid mistakes so you can develop applications faster. It comes with all features you need to create production ready apps. | ||
Mammoth is a type-safe query builder. It only supports Postgres which we consider a feature. It's syntax is as close to SQL as possible so you already know how to use it. It's autocomplete features are great. It helps you avoid mistakes so you can develop applications faster. | ||
```ts | ||
const rows = await db | ||
.select(db.list.id, db.list.createdAt) | ||
.from(db.list) | ||
.where(db.list.createdAt.gt(now().minus(days(2))).or(db.list.value.eq(0))) | ||
.limit(10); | ||
.select(foo.id, bar.name) | ||
.from(foo) | ||
.leftJoin(bar) | ||
.on(foo.barId.eq(bar.id)) | ||
.where(foo.id.eq(`1`)); | ||
``` | ||
--- | ||
The above query produces the following SQL: | ||
## Features | ||
- Type-safe query builder | ||
- Supports Postgres only | ||
- Excellent autocomplete | ||
- Transactions | ||
- Connection pooling | ||
- Automatic camelCase to/from snake_case conversion | ||
- No build step or watch needed | ||
--- | ||
<br/> | ||
### Quick start | ||
First of all it's important to define your schema. This means you have to define all your tables. | ||
```ts | ||
import mammoth from '@ff00ff/mammoth'; | ||
export const list = mammoth.defineTable({ | ||
id: mammoth | ||
.uuid() | ||
.primary() | ||
.notNull() | ||
.default(`gen_random_uuid()`), | ||
createdAt: mammoth | ||
.timestamptz() | ||
.notNull() | ||
.default(`now()`), | ||
name: mammoth.text().notNull(), | ||
value: mammoth.integer(), | ||
}); | ||
```sql | ||
SELECT | ||
foo.id, | ||
bar.name | ||
FROM foo | ||
LEFT JOIN bar ON (foo.bar_id = bar.id) | ||
WHERE | ||
foo.id = $1 | ||
``` | ||
> For now, it's only possible to define tables. But in the future you'll be able to define other types likes indices, functions, triggers, etc. | ||
More importantly, the resulting type of rows is `{ id: string; name: string | undefined }[]`. Notice how the name is automatically nullable because of the left join. | ||
Once your tables are defined you can create your database instance where you pass in all your tables. | ||
### Query examples | ||
```ts | ||
import mammoth from '@ff00ff/mammoth'; | ||
<details> | ||
<summary>Basic update</summary> | ||
export const db = mammoth.createDatabase(process.env.DATABASE_URL!, { | ||
list, | ||
}); | ||
``` | ||
Using your database instance you can access Mammoth's type safe query builder. Your db instance is a singleton which you can share throughout your app. | ||
```ts | ||
const rows = await db | ||
.select(db.list.id, db.list.createdAt) | ||
.from(db.list) | ||
.where(db.list.createdAt.gt(now().minus(days(2))).or(db.list.value.eq(0))) | ||
.limit(10); | ||
const updateCount = await db.update(foo).set({ name: `Test` }).where(foo.value.gt(0)); | ||
``` | ||
```sql | ||
SELECT list.id, list.created_at FROM list WHERE list.created_at > NOW() - $1::interval OR list.value = $2 LIMIT 10; | ||
UPDATE foo | ||
SET | ||
name = $1 | ||
WHERE | ||
value > $2 | ||
``` | ||
_A select should not require declaring an additional interface explicitly._ | ||
</details> | ||
The type of rows is automatically derived from the table. `.notNull()` columns are automatically required and the other columns are all optional. | ||
<details> | ||
<summary>Basic insert</summary> | ||
```ts | ||
const rows: { | ||
id: string; | ||
createdAt: Date; | ||
}[]; | ||
``` | ||
### Update | ||
When executing an update query, by default, the return type is `number` which indicates the number of affected rows. | ||
```ts | ||
const numberOfUpdates = await db | ||
.update(db.list) | ||
.set({ | ||
name: `New Name`, | ||
}) | ||
.where(db.list.id.eq(`acb82ff3-3311-430e-9d1d-8ff600abee31`)); | ||
``` | ||
```sql | ||
UPDATE list SET name = $1 WHERE list.id = $2 | ||
``` | ||
But when you use `.returning(..)` the return type is changed to an array of rows. | ||
```ts | ||
// { id: string }[] | ||
const rows = await db | ||
.update(db.list) | ||
.set({ | ||
name: `New Name`, | ||
.insertInto(foo) | ||
.values({ | ||
name: `Test`, | ||
value: 123, | ||
}) | ||
.where(db.list.id.eq(`acb82ff3-3311-430e-9d1d-8ff600abee31`)) | ||
.returning(`id`); | ||
@@ -134,186 +76,122 @@ ``` | ||
```sql | ||
UPDATE list SET name = $1 WHERE list.id = $2 RETURNING id | ||
INSERT INTO foo ( | ||
name, | ||
value | ||
) VALUES ( | ||
$1, | ||
$2 | ||
) | ||
RETURNING | ||
id | ||
``` | ||
### Insert | ||
</details> | ||
To insert a row you only have to specify the `.notNull()` without a `.default()`. The other columns are optional. | ||
<details> | ||
<summary>Insert into select</summary> | ||
```ts | ||
const numberOfRows = await db.insertInto(db.list).values({ | ||
name: `My List`, | ||
}); | ||
const affectedCount = await db | ||
.insertInto(foo, ['name']) | ||
.select(bar.name) | ||
.from(bar) | ||
.where(bar.name.isNotNull()); | ||
``` | ||
```sql | ||
INSERT INTO list (name) VALUES ($1) | ||
``` | ||
</details> | ||
> In an earlier version of Mammoth you still had to pass `undefined` for nullable columns, but with some type magic this is now fixed! | ||
--- | ||
Again, if you use `.returning(..)` the return type is changed automatically. | ||
<br/> | ||
```ts | ||
// { id: string, createdAt: Date, name: string } | ||
const list = await db | ||
.insertInto(db.list) | ||
.values({ | ||
name: `My List`, | ||
}) | ||
.returning(`id`, `createdAt`, `name`); | ||
``` | ||
### Quick start | ||
```sql | ||
INSERT INTO list (name) VALUES ($1) RETURNING id, created_at, name | ||
``` | ||
Mammoth is a query builder pur sang so it doesn't include a database driver. You need to create a db and pass a callback to execute the query. | ||
If you insert an array of rows and you use the `.returning(..)` the return type will change to an array as well. | ||
```ts | ||
// { id: string, createdAt: Date, name: string }[] | ||
const lists = await db | ||
.insertInto(db.list) | ||
.values([ | ||
{ | ||
name: `List #1`, | ||
}, | ||
{ | ||
name: `List #2`, | ||
}, | ||
]) | ||
.returning(`id`, `createdAt`, `name`); | ||
``` | ||
import { defineDb } from '@ff00ff/mammoth'; | ||
```sql | ||
INSERT INTO list (name) VALUES ($1), ($2) RETURNING id, created_at, name; | ||
``` | ||
const db = defineDb(async (query, parameters) => { | ||
const result = await pool.query(query, parameters); | ||
### Transactions | ||
You can call `transaction(callback)` which begins a transaction and depending on the promise you return in the transaction will commit or rollback the transaction. | ||
It's important you use the `db` passed in the transaction's callback, if not, you're effectively executing statements outside the transaction. | ||
```ts | ||
const result = await db.transaction(db => { | ||
const row = await db | ||
.insertInto(db.list) | ||
.values({ | ||
name: `My List`, | ||
}) | ||
.returning(`id`); | ||
await db.insertInto(db.listItem).values({ | ||
listId: row.id, | ||
name: `My Item`, | ||
}); | ||
return row; | ||
return { | ||
affectedRowCount: result.rowCount, | ||
rows: result.rows, | ||
}; | ||
}); | ||
``` | ||
### Schema | ||
To | ||
Before Mammoth can offer type safety features you have to define the schema in Mammoth's syntax. The syntax is designed to be as close to SQL as possible. | ||
```ts | ||
import mammoth from '@ff00ff/mammoth'; | ||
export const list = mammoth.defineTable({ | ||
id: mammoth.dataType(`UUID`).primary().notNull().default(`gen_random_uuid()`); | ||
createdAt = mammoth.dataType<Date>(`TIMESTAMP WITH TIME ZONE`).notNull().default(`NOW()`); | ||
name = mammoth.dataType(`TEXT`).notNull(); | ||
value = mammoth.dataType<number>(`INTEGER`); | ||
}) | ||
``` | ||
But to make things easier, there are data type specific functions. When using auto import this should be a breeze. | ||
```ts | ||
import mammoth from '@ff00ff/mammoth'; | ||
export const list = mammoth.defineTable({ | ||
id: mammoth | ||
.uuid() | ||
.primary() | ||
.notNull() | ||
.default(`gen_random_uuid()`), | ||
createdAt: mammoth | ||
.timestampWithTimeZone() | ||
.notNull() | ||
.default(`NOW()`), | ||
name: mammoth.text().notNull(), | ||
value: mammoth.integer(), | ||
const foo = defineTable(`foo`, { | ||
id: uuid().primaryKey().default(`gen_random_id()`), | ||
createDate: timestampWithTimeZone().notNull().default(`now()`), | ||
name: text().notNull(), | ||
value: integer(), | ||
}); | ||
export const listItem = mammoth.defineTable({ | ||
id: mammoth | ||
.uuid() | ||
.primary() | ||
.notNull() | ||
.default(`gen_random_uuid()`), | ||
createdAt: mammoth | ||
.timestampWithTimeZone() | ||
.notNull() | ||
.default(`now()`), | ||
listId: mammoth | ||
.uuid() | ||
.notNull() | ||
.references(list, 'id'), | ||
name: mammoth.text().notNull(), | ||
}); | ||
``` | ||
Which matches the below schema in SQL. | ||
> You should keep your column names camelCase in the defineTable call as they are automatically transformed to train_case throughout Mammoth. | ||
```sql | ||
CREATE TABLE list ( | ||
id UUID PRIMARY KEY NOT NULL DEFAULT gen_random_uuid(), | ||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), | ||
name TEXT NOT NULL, | ||
value INTEGER | ||
); | ||
## Compatibility | ||
CREATE TABLE list_item ( | ||
id UUID PRIMARY KEY NOT NULL DEFAULT gen_random_uuid(), | ||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), | ||
list_id UUID NOT NULL REFERENCES list (id), | ||
name TEXT NOT NULL | ||
); | ||
``` | ||
Below is a list of clauses per query and a short description on what we Mammoth supports. | ||
> We want to avoid a build step (even if there is some smart watch going on) so the schema must be defined in TypeScript. | ||
<details> | ||
<summary>SELECT</summary> | ||
### Migrations | ||
- [ WITH [ RECURSIVE ] with_query [, ...] ] — Not supported yet | ||
- SELECT [ ALL | DISTINCT [ ON ( expression [, ...] ) ] ] — Mostly supported. Distinct not yet. | ||
- [ \* | expression [ [ AS ] output_name ] [, ...] ] — mostly supported. Selecting certain expressions like update queries, insert and delete queries are not supported yet. Select queries are though. | ||
- [ FROM from_item [, ...] ] — partially supported. Only 1 table is currently supported in the from. | ||
- [ WHERE condition ] — mostly supported. The condition concept is pretty broad but it should contain a lot of cases. | ||
- [ GROUP BY grouping_element [, ...] ] — supported. | ||
- [ HAVING condition [, ...] ] — supported. | ||
- [ WINDOW window_name AS ( window_definition ) [, ...] ] — not supported. | ||
- [ { UNION | INTERSECT | EXCEPT } [ ALL | DISTINCT ] select ] — not supported yet | ||
- [ ORDER BY expression [ ASC | DESC | USING operator ] [ NULLS { FIRST | LAST } ] [, ...] ] — supported, but expressions are pretty broad and there might be cases not covered yet. | ||
- [ LIMIT { count | ALL } ] — supported. | ||
- [ OFFSET start [ ROW | ROWS ] ] — supported. | ||
- [ FETCH { FIRST | NEXT } [ count ] { ROW | ROWS } ONLY ] — supported | ||
- [ FOR { UPDATE | NO KEY UPDATE | SHARE | KEY SHARE } [ OF table_name [, ...] ] [ NOWAIT | SKIP LOCKED ] [...] ] — supported | ||
The accompanying `mammoth-cli` helps you generate migrations based on your schema and existing migrations. | ||
</details> | ||
### Raw queries | ||
<details> | ||
<summary>UPDATE</summary> | ||
When a new keyword is introduced in Postgres which you want to use badly but is not supported in this library yet, you can always fall back to raw sql. You can mix the type-safe functions with raw sql: | ||
- [ WITH [ RECURSIVE ] with_query [, ...] ] — not supported yet. | ||
- UPDATE [ ONLY ] table_name [ * ] [ [ AS ] alias ] — supported | ||
- SET { column_name = { expression | DEFAULT } | — supported, but expression concept is very broad and might be incomplete | ||
- ( column_name [, ...] ) = [ ROW ] ( { expression | DEFAULT } [, ...] ) | — supported, but expression concept is very broad and might be incomplete in some cases | ||
- ( column_name [, ...] ) = ( sub-SELECT ) — not supported | ||
- } [, ...] | ||
- [ FROM from_item [, ...] ] — partially supported. Only 1 table as from item is supported | ||
- [ WHERE condition | WHERE CURRENT OF cursor_name ] — supported, but the condition concept is very broad and is incomplete in some cases. | ||
- [ RETURNING \* | output_expression [ [ AS ] output_name ] [, ...] ] — supported, but up to 10 expressions | ||
```ts | ||
db.select(db.list.id).from(db.list).append`MAGIC NEW ORDER BY`; | ||
``` | ||
</details> | ||
```sql | ||
SELECT list.id FROM list MAGIC NEW ORDER BY | ||
``` | ||
<details> | ||
<summary>DELETE</summary> | ||
You can also write raw sql completely. This is not advised, obviously, because it defeats the whole purpose of this library. | ||
- [ WITH [ RECURSIVE ] with_query [, ...] ] — not supported yet | ||
- DELETE FROM [ ONLY ] table_name [ * ] [ [ AS ] alias ] — supported | ||
- [ USING from_item [, ...] ] — supported | ||
- [ WHERE condition | WHERE CURRENT OF cursor_name ] — supported, but the condition concept is very broad and might be incomplete | ||
- [ RETURNING \* | output_expression [ [ AS ] output_name ] [, ... ] ] — supported, but up to 10 expressions | ||
</details> | ||
```ts | ||
const result = await db.sql`SELECT * FROM account WHERE account.name = ${name}`; | ||
``` | ||
<details> | ||
<summary>INSERT</summary> | ||
Because type information is lost when using raw queries, you can pass in a type | ||
- [ WITH [ RECURSIVE ] with_query [, ...] ] — not supported | ||
- INSERT INTO table_name [ AS alias ] [ ( column_name [, ...] ) ] — supported | ||
- [ OVERRIDING { SYSTEM | USER } VALUE ] — not supported | ||
- { DEFAULT VALUES | VALUES ( { expression | DEFAULT } [, ...] ) [, ...] | query } - supported, but expression is a broad concept and may not be complete | ||
- [ ON CONFLICT [ conflict_target ] conflict_action ] — supported | ||
- [ RETURNING \* | output_expression [ [ AS ] output_name ] [, ...] ] — supported, but limited to 10 expressions | ||
</details> | ||
```ts | ||
const result = await db.sql<{ name: string }>`SELECT name FROM list`; | ||
result.rows.forEach(row => { | ||
// row.name | ||
}); | ||
``` | ||
### Column data type | ||
@@ -330,7 +208,6 @@ | ||
| decimal() | DECIMAL | | ||
| enum() | _Creates an enum type_ | | ||
| integer() | INTEGER | | ||
| interval() | INTERVAL | | ||
| jsonb<T>() | JSONB | | ||
| json<T>() | JSON | | ||
| jsonb\<T>() | JSONB | | ||
| json\<T>() | JSON | | ||
| money() | MONEY | | ||
@@ -341,3 +218,3 @@ | number() | IntegerColumn | | ||
| text() | TEXT | | ||
| text<T>() | TEXT | | ||
| text\<T>() | TEXT | | ||
| time() | TIME | | ||
@@ -351,61 +228,8 @@ | timestamp() | TIMESTAMP | | ||
### Enum alternative | ||
## Contribute | ||
Instead of using an `EnumColumn`, [because you cannot remove values (only add or rename)](https://www.postgresql.org/docs/current/sql-altertype.html), you can also opt to use a | ||
`text<T>()` which allows enforcing a type in your application e.g. | ||
`text<'ONE' | 'TWO' | 'THREE'>()`. | ||
Once you clone the repo, do a `npm install` you should be able to run `npm test` seeing everything turn green. Feel free to pick up one of the open issues — in particular you can pick up one labeled with "good first issue". Be sure to claim the issue before you start so we avoid two or more people working on the same thing. | ||
```ts | ||
export const item = mammoth.defineTable({ | ||
id: mammoth.uuid() | ||
.primaryKey() | ||
.notNull() | ||
.default(`gen_random_uuid()`), | ||
value: mammoth.text<'FOO' | 'BAR' | 'BAZ'>().notNull(), | ||
}); | ||
``` | ||
Which enforces type checking of the value column in TypeScript land. | ||
```ts | ||
// Allowed | ||
await db.insertInto(db.item).values({ value: `FOO` }); | ||
// Not allowed | ||
await db.insertInto(db.item).values({ value: `another string value` }); | ||
``` | ||
Of course it doesn't create any constraints on the database level like `enum()` is doing. If that's something you desire you should pick enum instead. | ||
### Documents & JSON(B) | ||
You can use `jsonb<T>()` to store json data. By using the `T` parameter you can specify the | ||
type e.g. `jsonb<{ foo: number }>()`. This makes it easier to work with json columns. | ||
> There is currently limited support for the different json(b) functions and operators. This is planned for a next release. | ||
You do need to be careful when your type needs to evolve (change). | ||
## Contribute / Set up locally | ||
To contribute to this library, you first need to do a few things to get set up. | ||
First make sure you have a test postgres database. For example, `mammoth_test`: | ||
$ createdb mammoth_test | ||
If you installed postgres using homebrew, make sure you have a postgres user named `postgres`. You can create one using this command: `createuser -s postgres` | ||
Finally, make sure all the tests run and pass before making any changes. Create a `.env` file with the following contents. | ||
``` | ||
DATABASE_URL=postgres://postgres@localhost/mammoth_test | ||
``` | ||
> Replace the database url connection string with a string to your local database | ||
$ npm test | ||
--- | ||
##### Mammoth logo created by Eucalyp from the Noun Project. |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
New author
Supply chain riskA new npm collaborator published a version of the package for the first time. New collaborators are usually benign additions to a project, but do indicate a change to the security surface area of a package.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 4 instances in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
0
7
1
127032
75
2318
231
2
- Removedlodash@^4.17.15
- Removedminimist@^1.2.2
- Removedpg@^7.18.1
- Removedutility-types@^3.10.0
- Removedbuffer-writer@2.0.0(transitive)
- Removedlodash@4.17.21(transitive)
- Removedminimist@1.2.8(transitive)
- Removedpacket-reader@1.0.0(transitive)
- Removedpg@7.18.2(transitive)
- Removedpg-connection-string@0.1.3(transitive)
- Removedpg-int8@1.0.1(transitive)
- Removedpg-packet-stream@1.1.0(transitive)
- Removedpg-pool@2.0.10(transitive)
- Removedpg-types@2.2.0(transitive)
- Removedpgpass@1.0.5(transitive)
- Removedpostgres-array@2.0.0(transitive)
- Removedpostgres-bytea@1.0.0(transitive)
- Removedpostgres-date@1.0.7(transitive)
- Removedpostgres-interval@1.2.0(transitive)
- Removedsemver@4.3.2(transitive)
- Removedsplit2@4.2.0(transitive)
- Removedutility-types@3.11.0(transitive)
- Removedxtend@4.0.2(transitive)