Comparing version 1.0.50 to 1.0.54
@@ -8,3 +8,3 @@ "use strict"; | ||
if (source[key] instanceof Object) | ||
Object.assign(source[key], exports.deep_merge(target[key], source[key])); | ||
Object.assign(source[key], (0, exports.deep_merge)(target[key], source[key])); | ||
} | ||
@@ -11,0 +11,0 @@ // Join `target` and modified `source` |
@@ -19,3 +19,3 @@ "use strict"; | ||
else if (path_template[recursion_depth] === 0) { | ||
(helpers_1.deep_get(current_path, obj) || []) | ||
((0, helpers_1.deep_get)(current_path, obj) || []) | ||
.forEach((el, i) => get_subpaths_recursive(path_template, obj, recursion_depth + 1, [...current_path, i])); | ||
@@ -22,0 +22,0 @@ } |
@@ -23,5 +23,5 @@ "use strict"; | ||
const next_path_el = i !== path_array.length - 1 ? path_array[i + 1] : undefined; | ||
const next_el_default = exports.type(next_path_el) === 'Number' ? [] : {}; | ||
const next_el_default = (0, exports.type)(next_path_el) === 'Number' ? [] : {}; | ||
const is_array = Array.isArray(pointer); | ||
const is_object = exports.is_simple_object(pointer); | ||
const is_object = (0, exports.is_simple_object)(pointer); | ||
// if (is_array && type(path_el) !== 'Number') { | ||
@@ -42,3 +42,3 @@ // throw new Error('Trying to path into an array without a number index') | ||
} | ||
const child_is_primitive = !exports.is_simple_object(pointer[path_el]) && | ||
const child_is_primitive = !(0, exports.is_simple_object)(pointer[path_el]) && | ||
!Array.isArray(pointer[path_el]); | ||
@@ -62,3 +62,3 @@ if (!contains_path_el || child_is_primitive) { | ||
const is_array = Array.isArray(pointer); | ||
const is_object = exports.is_simple_object(pointer); | ||
const is_object = (0, exports.is_simple_object)(pointer); | ||
// if (is_array && type(path_el) !== 'Number') { | ||
@@ -95,10 +95,10 @@ // throw new Error('Trying to path into an array without a number index') | ||
const new_path = [...current_path, i]; | ||
const subitem = exports.deep_map(el, processor, new_path); | ||
const subitem = (0, exports.deep_map)(el, processor, new_path); | ||
return subitem; | ||
}); | ||
} | ||
else if (exports.is_simple_object(item)) { | ||
else if ((0, exports.is_simple_object)(item)) { | ||
mapped_item = Object.keys(item).reduce((acc, key) => { | ||
const new_path = [...current_path, key]; | ||
const subitem = exports.deep_map(item[key], processor, new_path); | ||
const subitem = (0, exports.deep_map)(item[key], processor, new_path); | ||
acc[key] = subitem; | ||
@@ -125,3 +125,3 @@ return acc; | ||
const deep_for_each = (item, processor, current_path = []) => { | ||
const is_object = exports.is_simple_object(item); | ||
const is_object = (0, exports.is_simple_object)(item); | ||
const is_array = Array.isArray(item); | ||
@@ -132,3 +132,3 @@ const is_primitive = !is_object && !is_array; | ||
for (const prop in item) { | ||
exports.deep_for_each(item[prop], processor, [...current_path, prop]); | ||
(0, exports.deep_for_each)(item[prop], processor, [...current_path, prop]); | ||
} | ||
@@ -139,3 +139,3 @@ } | ||
item.forEach((el, i) => { | ||
exports.deep_for_each(el, processor, [...current_path, i]); | ||
(0, exports.deep_for_each)(el, processor, [...current_path, i]); | ||
}); | ||
@@ -178,3 +178,3 @@ } | ||
if (type == 'Array' || type == 'Object') { | ||
result[key] = exports.clone(value); | ||
result[key] = (0, exports.clone)(value); | ||
} | ||
@@ -181,0 +181,0 @@ else if (type == 'Date') { |
@@ -17,16 +17,16 @@ "use strict"; | ||
const [pth, list] = data[i]; | ||
const array_mode = helpers_1.last(pth) === 0; | ||
const path = array_mode ? helpers_1.drop_last(1, pth) : pth; | ||
const array_mode = (0, helpers_1.last)(pth) === 0; | ||
const path = array_mode ? (0, helpers_1.drop_last)(1, pth) : pth; | ||
if (!edges[i]) | ||
helpers_1.deep_set(path, list, result); | ||
(0, helpers_1.deep_set)(path, list, result); | ||
else { | ||
const left_list = extract_subpaths_1.extract_subpaths(helpers_1.drop_last(1, path), result); | ||
const { left, inner, right } = lir_join_1.lir_join(left_list, result, list, el => helpers_1.deep_get([...el, edges[i][0]], result), (l, i, r) => { | ||
const left_list = (0, extract_subpaths_1.extract_subpaths)((0, helpers_1.drop_last)(1, path), result); | ||
const { left, inner, right } = (0, lir_join_1.lir_join)(left_list, result, list, el => (0, helpers_1.deep_get)([...el, edges[i][0]], result), (l, i, r) => { | ||
r.forEach(right_adjacent => { | ||
l.forEach(left_adjacent => { | ||
if (array_mode) { | ||
push_path_1.push_path([...left_adjacent, helpers_1.last(path)], right_adjacent, i); | ||
(0, push_path_1.push_path)([...left_adjacent, (0, helpers_1.last)(path)], right_adjacent, i); | ||
} | ||
else { | ||
helpers_1.deep_set([...left_adjacent, helpers_1.last(path)], right_adjacent, i); | ||
(0, helpers_1.deep_set)([...left_adjacent, (0, helpers_1.last)(path)], right_adjacent, i); | ||
} | ||
@@ -33,0 +33,0 @@ }); |
@@ -9,8 +9,8 @@ "use strict"; | ||
const push_path = (path_to_arr, val, obj) => { | ||
const current_value = helpers_1.deep_get(path_to_arr, obj, null); | ||
const current_value = (0, helpers_1.deep_get)(path_to_arr, obj, null); | ||
if (!Array.isArray(current_value)) { | ||
helpers_1.deep_set(path_to_arr, [val], obj); | ||
(0, helpers_1.deep_set)(path_to_arr, [val], obj); | ||
} | ||
else { | ||
helpers_1.deep_get(path_to_arr, obj).push(val); | ||
(0, helpers_1.deep_get)(path_to_arr, obj).push(val); | ||
} | ||
@@ -17,0 +17,0 @@ return obj; |
@@ -72,2 +72,2 @@ /** | ||
export declare const get_unique_field_groups: (entity_name: string, exclude_nullable: boolean, orma_schema: orma_schema) => string[][]; | ||
export declare const field_exists: (entity: string, field: string | number, schema: orma_schema) => string | import("../introspector/introspector").orma_index_schema[] | orma_field_schema; | ||
export declare const field_exists: (entity: string, field: string | number, schema: orma_schema) => string | orma_field_schema | import("../introspector/introspector").orma_index_schema[]; |
@@ -36,3 +36,3 @@ "use strict"; | ||
var _a, _b; | ||
if (exports.is_reserved_keyword(field_name)) { | ||
if ((0, exports.is_reserved_keyword)(field_name)) { | ||
return []; // could be $comment, which is not actually a field | ||
@@ -75,6 +75,6 @@ } | ||
} | ||
const entity_names = exports.get_entity_names(orma_schema); | ||
const entity_names = (0, exports.get_entity_names)(orma_schema); | ||
const cache = {}; | ||
for (const entity_name of entity_names) { | ||
const parent_edges = exports.get_parent_edges(entity_name, orma_schema); | ||
const parent_edges = (0, exports.get_parent_edges)(entity_name, orma_schema); | ||
const child_edges = parent_edges.map(exports.reverse_edge); | ||
@@ -105,4 +105,4 @@ for (const child_edge of child_edges) { | ||
const get_all_edges = (entity_name, orma_schema) => { | ||
const parent_edges = exports.get_parent_edges(entity_name, orma_schema); | ||
const child_edges = exports.get_child_edges(entity_name, orma_schema); | ||
const parent_edges = (0, exports.get_parent_edges)(entity_name, orma_schema); | ||
const child_edges = (0, exports.get_child_edges)(entity_name, orma_schema); | ||
return [...parent_edges, ...child_edges]; | ||
@@ -120,3 +120,3 @@ }; | ||
const get_direct_edges = (from_entity, to_entity, orma_schema) => { | ||
const possible_edges = exports.get_all_edges(from_entity, orma_schema); | ||
const possible_edges = (0, exports.get_all_edges)(from_entity, orma_schema); | ||
const edges = possible_edges.filter(el => el.to_entity === to_entity); | ||
@@ -130,3 +130,3 @@ return edges; | ||
const get_direct_edge = (from_entity, to_entity, orma_schema) => { | ||
const edges = exports.get_direct_edges(from_entity, to_entity, orma_schema); | ||
const edges = (0, exports.get_direct_edges)(from_entity, to_entity, orma_schema); | ||
if (edges.length !== 1) { | ||
@@ -159,3 +159,3 @@ throw Error(`Did not find exactly one edge from ${from_entity} to ${to_entity}`); | ||
const to_entity = entities[i]; | ||
const edge = exports.get_direct_edge(from_entity, to_entity, orma_schema); | ||
const edge = (0, exports.get_direct_edge)(from_entity, to_entity, orma_schema); | ||
return edge; | ||
@@ -170,3 +170,3 @@ }); | ||
const is_parent_entity = (entity1, entity2, orma_schema) => { | ||
const edges = exports.get_child_edges(entity1, orma_schema); | ||
const edges = (0, exports.get_child_edges)(entity1, orma_schema); | ||
return edges.some(edge => edge.to_entity === entity2); | ||
@@ -179,3 +179,3 @@ }; | ||
const get_primary_keys = (entity_name, orma_schema) => { | ||
const fields = exports.get_field_names(entity_name, orma_schema); | ||
const fields = (0, exports.get_field_names)(entity_name, orma_schema); | ||
const primary_key_fields = fields.filter(field => { | ||
@@ -182,0 +182,0 @@ const field_schema = orma_schema[entity_name][field]; |
@@ -6,4 +6,4 @@ "use strict"; | ||
const extract_subpaths_1 = require("../extract_subpaths"); | ||
mocha_1.describe('Extract subpaths', () => { | ||
mocha_1.test('Extracts subpaths from arrays', () => { | ||
(0, mocha_1.describe)('Extract subpaths', () => { | ||
(0, mocha_1.test)('Extracts subpaths from arrays', () => { | ||
const products = [{ | ||
@@ -31,5 +31,5 @@ id: 1, | ||
]; | ||
const subpaths = extract_subpaths_1.extract_subpaths(template_path, products); | ||
chai_1.expect(subpaths).to.deep.equal(goal); | ||
const subpaths = (0, extract_subpaths_1.extract_subpaths)(template_path, products); | ||
(0, chai_1.expect)(subpaths).to.deep.equal(goal); | ||
}); | ||
}); |
@@ -9,4 +9,4 @@ "use strict"; | ||
const push_path_1 = require("../push_path"); | ||
mocha_1.describe('lir_join', () => { | ||
mocha_1.test('Merges flat lists', () => { | ||
(0, mocha_1.describe)('lir_join', () => { | ||
(0, mocha_1.test)('Merges flat lists', () => { | ||
const products = [{ id: 1, title: 'Laptop' }]; | ||
@@ -18,8 +18,8 @@ const product_in_stores = [{ product_id: 1, store_id: 1 }]; | ||
}]; | ||
const { left, inner, right } = lir_join_1.lir_join(products, [], product_in_stores, el => el.id, (l, i, r) => { i.push(Object.assign(Object.assign({}, l[0]), { product_in_stores: r })); return i; }, el => el.product_id); | ||
chai_1.expect(left).to.deep.equal([]); | ||
chai_1.expect(inner).to.deep.equal(goal); | ||
chai_1.expect(right).to.deep.equal([]); | ||
const { left, inner, right } = (0, lir_join_1.lir_join)(products, [], product_in_stores, el => el.id, (l, i, r) => { i.push(Object.assign(Object.assign({}, l[0]), { product_in_stores: r })); return i; }, el => el.product_id); | ||
(0, chai_1.expect)(left).to.deep.equal([]); | ||
(0, chai_1.expect)(inner).to.deep.equal(goal); | ||
(0, chai_1.expect)(right).to.deep.equal([]); | ||
}); | ||
mocha_1.test('Nests many entities to one location (right to left)', () => { | ||
(0, mocha_1.test)('Nests many entities to one location (right to left)', () => { | ||
const products = [{ id: 1, title: 'Laptop' }]; | ||
@@ -31,8 +31,8 @@ const product_in_stores = [{ product_id: 1, store_id: 1 }, { product_id: 1, store_id: 2 }]; | ||
}]; | ||
const { left, inner, right } = lir_join_1.lir_join(products, [], product_in_stores, el => el.id, (l, i, r) => { i.push(Object.assign(Object.assign({}, l[0]), { product_in_stores: r })); return i; }, el => el.product_id); | ||
chai_1.expect(left).to.deep.equal([]); | ||
chai_1.expect(inner).to.deep.equal(goal); | ||
chai_1.expect(right).to.deep.equal([]); | ||
const { left, inner, right } = (0, lir_join_1.lir_join)(products, [], product_in_stores, el => el.id, (l, i, r) => { i.push(Object.assign(Object.assign({}, l[0]), { product_in_stores: r })); return i; }, el => el.product_id); | ||
(0, chai_1.expect)(left).to.deep.equal([]); | ||
(0, chai_1.expect)(inner).to.deep.equal(goal); | ||
(0, chai_1.expect)(right).to.deep.equal([]); | ||
}); | ||
mocha_1.test('Nests many entities to one location (left to right)', () => { | ||
(0, mocha_1.test)('Nests many entities to one location (left to right)', () => { | ||
const products = [{ id: 1, title: 'Laptop' }]; | ||
@@ -44,8 +44,8 @@ const product_in_stores = [{ product_id: 1, store_id: 1 }, { product_id: 1, store_id: 2 }]; | ||
}]; | ||
const { left, inner, right } = lir_join_1.lir_join(product_in_stores, [], products, el => el.product_id, (l, i, r) => { i.push(Object.assign(Object.assign({}, r[0]), { product_in_stores: l })); return i; }, el => el.id); | ||
chai_1.expect(left).to.deep.equal([]); | ||
chai_1.expect(inner).to.deep.equal(goal); | ||
chai_1.expect(right).to.deep.equal([]); | ||
const { left, inner, right } = (0, lir_join_1.lir_join)(product_in_stores, [], products, el => el.product_id, (l, i, r) => { i.push(Object.assign(Object.assign({}, r[0]), { product_in_stores: l })); return i; }, el => el.id); | ||
(0, chai_1.expect)(left).to.deep.equal([]); | ||
(0, chai_1.expect)(inner).to.deep.equal(goal); | ||
(0, chai_1.expect)(right).to.deep.equal([]); | ||
}); | ||
mocha_1.test('Deeply nests', () => { | ||
(0, mocha_1.test)('Deeply nests', () => { | ||
// This is a deep nest because images are going on the variant which is on the product | ||
@@ -101,29 +101,29 @@ // The example includes a scenario with multiple images on a variant | ||
}]; | ||
const { left, inner, right } = lir_join_1.lir_join(extract_subpaths_1.extract_subpaths([0, 'variants', 0], products), products, images, (left_el) => helpers_1.deep_get([...left_el, 'id'], products), (l, i, r) => { | ||
const { left, inner, right } = (0, lir_join_1.lir_join)((0, extract_subpaths_1.extract_subpaths)([0, 'variants', 0], products), products, images, (left_el) => (0, helpers_1.deep_get)([...left_el, 'id'], products), (l, i, r) => { | ||
// for each image, mutate inner | ||
// placing the element at correct subpath with images appended to path | ||
r.forEach(right_adjacent => push_path_1.push_path([...l[0], 'images'], right_adjacent, i)); | ||
r.forEach(right_adjacent => (0, push_path_1.push_path)([...l[0], 'images'], right_adjacent, i)); | ||
return i; | ||
}, el => el.variant_id); | ||
chai_1.expect(left.map(left_el => helpers_1.deep_get(left_el, products))).to.deep.equal([{ id: 44, sku: 'tssu1-pink' }]); | ||
chai_1.expect(inner).to.deep.equal(goal); | ||
chai_1.expect(right).to.deep.equal([{ id: 555, variant_id: 55, bucket_url: 'http://www.stray.purple.jpg' }]); | ||
(0, chai_1.expect)(left.map(left_el => (0, helpers_1.deep_get)(left_el, products))).to.deep.equal([{ id: 44, sku: 'tssu1-pink' }]); | ||
(0, chai_1.expect)(inner).to.deep.equal(goal); | ||
(0, chai_1.expect)(right).to.deep.equal([{ id: 555, variant_id: 55, bucket_url: 'http://www.stray.purple.jpg' }]); | ||
}); | ||
mocha_1.test('Accepts undefined as if it was a string', () => { | ||
(0, mocha_1.test)('Accepts undefined as if it was a string', () => { | ||
// obj[undefined] in js is the same as obj['undefined'] | ||
const list1 = [1, 2, 3]; | ||
const list2 = [2, 4, 6, undefined]; | ||
const { left, inner, right } = lir_join_1.lir_join(list1, [], list2, el => el, (l, i, r) => { | ||
const { left, inner, right } = (0, lir_join_1.lir_join)(list1, [], list2, el => el, (l, i, r) => { | ||
i.push(l[0]); | ||
return i; | ||
}, el => el); | ||
chai_1.expect(left).to.deep.equal([1, 3]); | ||
chai_1.expect(inner).to.deep.equal([2]); | ||
chai_1.expect(right).to.deep.equal([4, 6, undefined]); | ||
(0, chai_1.expect)(left).to.deep.equal([1, 3]); | ||
(0, chai_1.expect)(inner).to.deep.equal([2]); | ||
(0, chai_1.expect)(right).to.deep.equal([4, 6, undefined]); | ||
}); | ||
mocha_1.test('Accepts undefined as if it was a string in object', () => { | ||
(0, mocha_1.test)('Accepts undefined as if it was a string in object', () => { | ||
// obj[undefined] in js is the same as obj['undefined'] | ||
const original = [{ id: 6, quantity: 1, reason: 'customer' }]; | ||
const modified = [{ quantity: 1, reason: 'customer' }]; | ||
const { left, inner, right } = lir_join_1.lir_join(original, [], modified, x => x.id, (l, i, r) => { | ||
const { left, inner, right } = (0, lir_join_1.lir_join)(original, [], modified, x => x.id, (l, i, r) => { | ||
// if (l.length !== 1 || r.length !== 1) throw new Error('You must not have arrays where id is same across more than one entry eg [{id:2},{id:2}]') | ||
@@ -138,5 +138,5 @@ // const left_obj = l[0] | ||
}, x => x.id); | ||
chai_1.expect(left).to.deep.equal(original); | ||
chai_1.expect(inner).to.deep.equal([]); | ||
chai_1.expect(right).to.deep.equal(modified); | ||
(0, chai_1.expect)(left).to.deep.equal(original); | ||
(0, chai_1.expect)(inner).to.deep.equal([]); | ||
(0, chai_1.expect)(right).to.deep.equal(modified); | ||
}); | ||
@@ -143,0 +143,0 @@ mocha_1.test.skip('preserves order of elements in array'); |
@@ -6,4 +6,4 @@ "use strict"; | ||
const nester_1 = require("../nester"); | ||
mocha_1.describe('nester', () => { | ||
mocha_1.test('basic deep nesting', async () => { | ||
(0, mocha_1.describe)('nester', () => { | ||
(0, mocha_1.test)('basic deep nesting', async () => { | ||
const data = [ | ||
@@ -35,6 +35,6 @@ [['vendors', 0], [{ id: 1 }, { id: 2 }]], | ||
}; | ||
let result = nester_1.nester(data, edges); | ||
chai_1.expect(result).to.deep.equal(goal); | ||
let result = (0, nester_1.nester)(data, edges); | ||
(0, chai_1.expect)(result).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('handles entities with no children', async () => { | ||
(0, mocha_1.test)('handles entities with no children', async () => { | ||
const data = [ | ||
@@ -65,6 +65,6 @@ [['vendors', 0], [{ id: 1 }]], | ||
}; | ||
let result = nester_1.nester(data, edges); | ||
chai_1.expect(result).to.deep.equal(goal); | ||
let result = (0, nester_1.nester)(data, edges); | ||
(0, chai_1.expect)(result).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('handles sibling nesting', async () => { | ||
(0, mocha_1.test)('handles sibling nesting', async () => { | ||
const data = [ | ||
@@ -104,6 +104,6 @@ [['vendors', 0], [{ id: 1 }]], | ||
}; | ||
let result = nester_1.nester(data, edges); | ||
chai_1.expect(result).to.deep.equal(goal); | ||
let result = (0, nester_1.nester)(data, edges); | ||
(0, chai_1.expect)(result).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('object nesting', () => { | ||
(0, mocha_1.test)('object nesting', () => { | ||
const data = [ | ||
@@ -128,6 +128,6 @@ [['vendors', 0], [{ id: 1 }, { id: 2 }]], | ||
}; | ||
let result = nester_1.nester(data, edges); | ||
chai_1.expect(result).to.deep.equal(goal); | ||
let result = (0, nester_1.nester)(data, edges); | ||
(0, chai_1.expect)(result).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('handles nesting same object multiple times', () => { | ||
(0, mocha_1.test)('handles nesting same object multiple times', () => { | ||
const data = [ | ||
@@ -156,4 +156,4 @@ [['variants', 0], [{ id: 10, product_id: 1 }, { id: 11, product_id: 1 }]], | ||
}; | ||
let result = nester_1.nester(data, edges); | ||
chai_1.expect(result).to.deep.equal(goal); | ||
let result = (0, nester_1.nester)(data, edges); | ||
(0, chai_1.expect)(result).to.deep.equal(goal); | ||
}); | ||
@@ -205,3 +205,3 @@ mocha_1.test.skip('breadth speed test', async () => { | ||
// for (let i = 0; i < 10000; i++) { | ||
result = nester_1.nester(data, edges); | ||
result = (0, nester_1.nester)(data, edges); | ||
// } | ||
@@ -208,0 +208,0 @@ console.timeEnd('t'); |
@@ -6,13 +6,13 @@ "use strict"; | ||
const push_path_1 = require("../push_path"); | ||
mocha_1.describe('push_path', () => { | ||
mocha_1.test('Creates [] when nothing at path', () => { | ||
(0, mocha_1.describe)('push_path', () => { | ||
(0, mocha_1.test)('Creates [] when nothing at path', () => { | ||
let obj = { my: { nested: {} } }; | ||
push_path_1.push_path(['my', 'nested', 'arr'], 'my_val', obj); | ||
chai_1.expect(obj).to.deep.equal({ my: { nested: { arr: ['my_val'] } } }); | ||
(0, push_path_1.push_path)(['my', 'nested', 'arr'], 'my_val', obj); | ||
(0, chai_1.expect)(obj).to.deep.equal({ my: { nested: { arr: ['my_val'] } } }); | ||
}); | ||
mocha_1.test('Pushes to array when something is at path', () => { | ||
(0, mocha_1.test)('Pushes to array when something is at path', () => { | ||
let obj = { my: { nested: { arr: [1] } } }; | ||
push_path_1.push_path(['my', 'nested', 'arr'], 2, obj); | ||
chai_1.expect(obj).to.deep.equal({ my: { nested: { arr: [1, 2] } } }); | ||
(0, push_path_1.push_path)(['my', 'nested', 'arr'], 2, obj); | ||
(0, chai_1.expect)(obj).to.deep.equal({ my: { nested: { arr: [1, 2] } } }); | ||
}); | ||
}); |
@@ -6,5 +6,5 @@ "use strict"; | ||
const schema_helpers_1 = require("../schema_helpers"); | ||
mocha_1.describe('schema_helpers', () => { | ||
mocha_1.describe('get_entity_names', () => { | ||
mocha_1.test('gets entity names', () => { | ||
(0, mocha_1.describe)('schema_helpers', () => { | ||
(0, mocha_1.describe)('get_entity_names', () => { | ||
(0, mocha_1.test)('gets entity names', () => { | ||
const orma_schema = { | ||
@@ -14,8 +14,8 @@ vendors: {}, | ||
}; | ||
const entity_names = schema_helpers_1.get_entity_names(orma_schema); | ||
chai_1.expect(entity_names.sort()).to.deep.equal(['vendors', 'products'].sort()); | ||
const entity_names = (0, schema_helpers_1.get_entity_names)(orma_schema); | ||
(0, chai_1.expect)(entity_names.sort()).to.deep.equal(['vendors', 'products'].sort()); | ||
}); | ||
}); | ||
mocha_1.describe('get_field_names', () => { | ||
mocha_1.test('gets field names', () => { | ||
(0, mocha_1.describe)('get_field_names', () => { | ||
(0, mocha_1.test)('gets field names', () => { | ||
const orma_schema = { | ||
@@ -28,8 +28,8 @@ vendors: {}, | ||
}; | ||
const field_names = schema_helpers_1.get_field_names('products', orma_schema); | ||
chai_1.expect(field_names.sort()).to.deep.equal(['id', 'title'].sort()); | ||
const field_names = (0, schema_helpers_1.get_field_names)('products', orma_schema); | ||
(0, chai_1.expect)(field_names.sort()).to.deep.equal(['id', 'title'].sort()); | ||
}); | ||
}); | ||
mocha_1.describe('get_parent_edges', () => { | ||
mocha_1.test('gets parent edges', () => { | ||
(0, mocha_1.describe)('get_parent_edges', () => { | ||
(0, mocha_1.test)('gets parent edges', () => { | ||
const orma_schema = { | ||
@@ -50,11 +50,11 @@ vendors: { | ||
}; | ||
const parent_edges = schema_helpers_1.get_parent_edges('products', orma_schema); | ||
const parent_edges = (0, schema_helpers_1.get_parent_edges)('products', orma_schema); | ||
const goal = [ | ||
{ from_entity: 'products', from_field: 'vendor_id', to_entity: 'vendors', to_field: 'id' } | ||
]; | ||
chai_1.expect(parent_edges.sort()).to.deep.equal(goal.sort()); | ||
(0, chai_1.expect)(parent_edges.sort()).to.deep.equal(goal.sort()); | ||
}); | ||
}); | ||
mocha_1.describe('get_child_edges', () => { | ||
mocha_1.test('gets child edges', () => { | ||
(0, mocha_1.describe)('get_child_edges', () => { | ||
(0, mocha_1.test)('gets child edges', () => { | ||
const orma_schema = { | ||
@@ -75,11 +75,11 @@ vendors: { | ||
}; | ||
const parent_edges = schema_helpers_1.get_child_edges('vendors', orma_schema); | ||
const parent_edges = (0, schema_helpers_1.get_child_edges)('vendors', orma_schema); | ||
const goal = [ | ||
{ from_entity: 'vendors', from_field: 'id', to_entity: 'products', to_field: 'vendor_id' } | ||
]; | ||
chai_1.expect(parent_edges.sort()).to.deep.equal(goal.sort()); | ||
(0, chai_1.expect)(parent_edges.sort()).to.deep.equal(goal.sort()); | ||
}); | ||
}); | ||
mocha_1.describe('get_all_edges', () => { | ||
mocha_1.test('gets all edges', () => { | ||
(0, mocha_1.describe)('get_all_edges', () => { | ||
(0, mocha_1.test)('gets all edges', () => { | ||
const orma_schema = { | ||
@@ -109,3 +109,3 @@ vendors: { | ||
}; | ||
const all_edges = schema_helpers_1.get_all_edges('products', orma_schema); | ||
const all_edges = (0, schema_helpers_1.get_all_edges)('products', orma_schema); | ||
const goal = [ | ||
@@ -115,5 +115,5 @@ { from_entity: 'products', from_field: 'vendor_id', to_entity: 'vendors', to_field: 'id' }, | ||
]; | ||
chai_1.expect(all_edges.sort()).to.deep.equal(goal.sort()); | ||
(0, chai_1.expect)(all_edges.sort()).to.deep.equal(goal.sort()); | ||
}); | ||
}); | ||
}); |
@@ -7,10 +7,10 @@ "use strict"; | ||
describe(string_to_path_1.string_to_path.name, () => { | ||
mocha_1.test('can stringify and parse', () => { | ||
chai_1.expect(string_to_path_1.string_to_path(string_to_path_1.path_to_string(1))).to.deep.equal(1); | ||
chai_1.expect(string_to_path_1.string_to_path(string_to_path_1.path_to_string(['test.0']))).to.deep.equal(['test.0']); | ||
chai_1.expect(string_to_path_1.string_to_path(string_to_path_1.path_to_string([1, 2, 'test.0']))).to.deep.equal([1, 2, 'test.0']); | ||
chai_1.expect(string_to_path_1.string_to_path(string_to_path_1.path_to_string(false))).to.deep.equal(false); | ||
chai_1.expect(string_to_path_1.string_to_path(string_to_path_1.path_to_string(true))).to.deep.equal(true); | ||
chai_1.expect(string_to_path_1.string_to_path(string_to_path_1.path_to_string(null))).to.deep.equal(null); | ||
chai_1.expect(string_to_path_1.string_to_path(string_to_path_1.path_to_string('null'))).to.deep.equal('null'); | ||
(0, mocha_1.test)('can stringify and parse', () => { | ||
(0, chai_1.expect)((0, string_to_path_1.string_to_path)((0, string_to_path_1.path_to_string)(1))).to.deep.equal(1); | ||
(0, chai_1.expect)((0, string_to_path_1.string_to_path)((0, string_to_path_1.path_to_string)(['test.0']))).to.deep.equal(['test.0']); | ||
(0, chai_1.expect)((0, string_to_path_1.string_to_path)((0, string_to_path_1.path_to_string)([1, 2, 'test.0']))).to.deep.equal([1, 2, 'test.0']); | ||
(0, chai_1.expect)((0, string_to_path_1.string_to_path)((0, string_to_path_1.path_to_string)(false))).to.deep.equal(false); | ||
(0, chai_1.expect)((0, string_to_path_1.string_to_path)((0, string_to_path_1.path_to_string)(true))).to.deep.equal(true); | ||
(0, chai_1.expect)((0, string_to_path_1.string_to_path)((0, string_to_path_1.path_to_string)(null))).to.deep.equal(null); | ||
(0, chai_1.expect)((0, string_to_path_1.string_to_path)((0, string_to_path_1.path_to_string)('null'))).to.deep.equal('null'); | ||
// expect(string_to_path(path_to_string(undefined))).to.deep.equal(undefined) | ||
@@ -17,0 +17,0 @@ // expect(string_to_path(path_to_string(Infinity))).to.deep.equal(Infinity) |
@@ -6,8 +6,8 @@ "use strict"; | ||
const mocha_1 = require("mocha"); | ||
mocha_1.describe('toposort', () => { | ||
mocha_1.it('toposorts an empty graph', () => { | ||
chai_1.expect(toposort_1.toposort({})).to.deep.equal([]); | ||
(0, mocha_1.describe)('toposort', () => { | ||
(0, mocha_1.it)('toposorts an empty graph', () => { | ||
(0, chai_1.expect)((0, toposort_1.toposort)({})).to.deep.equal([]); | ||
}); | ||
mocha_1.it('toposorts a simple DAG', () => { | ||
chai_1.expect(toposort_1.toposort({ | ||
(0, mocha_1.it)('toposorts a simple DAG', () => { | ||
(0, chai_1.expect)((0, toposort_1.toposort)({ | ||
a: ['b'], | ||
@@ -18,4 +18,4 @@ b: ['c'], | ||
}); | ||
mocha_1.it('toposorts a richer DAG', () => { | ||
chai_1.expect(toposort_1.toposort({ | ||
(0, mocha_1.it)('toposorts a richer DAG', () => { | ||
(0, chai_1.expect)((0, toposort_1.toposort)({ | ||
a: ['c'], | ||
@@ -26,4 +26,4 @@ b: ['c'], | ||
}); | ||
mocha_1.it('toposorts a complex DAG', () => { | ||
const result = toposort_1.toposort({ | ||
(0, mocha_1.it)('toposorts a complex DAG', () => { | ||
const result = (0, toposort_1.toposort)({ | ||
a: ['c', 'f'], | ||
@@ -40,3 +40,3 @@ b: ['d', 'e'], | ||
}); | ||
chai_1.expect(result).to.deep.equal([ | ||
(0, chai_1.expect)(result).to.deep.equal([ | ||
['a', 'b'], | ||
@@ -48,3 +48,3 @@ ['c', 'd', 'e'], | ||
}); | ||
mocha_1.it('errors on a small cyclic graph', () => { | ||
(0, mocha_1.it)('errors on a small cyclic graph', () => { | ||
const dg = { | ||
@@ -56,7 +56,7 @@ a: ['b'], | ||
const sortCyclicGraph = () => { | ||
toposort_1.toposort(dg); | ||
(0, toposort_1.toposort)(dg); | ||
}; | ||
chai_1.expect(sortCyclicGraph).to.throw(Error); | ||
(0, chai_1.expect)(sortCyclicGraph).to.throw(Error); | ||
}); | ||
mocha_1.it('errors on a larger cyclic graph', () => { | ||
(0, mocha_1.it)('errors on a larger cyclic graph', () => { | ||
const dg = { | ||
@@ -70,11 +70,11 @@ a: ['b', 'c'], | ||
const sortCyclicGraph = () => { | ||
toposort_1.toposort(dg); | ||
(0, toposort_1.toposort)(dg); | ||
}; | ||
chai_1.expect(sortCyclicGraph).to.throw(Error); | ||
(0, chai_1.expect)(sortCyclicGraph).to.throw(Error); | ||
}); | ||
mocha_1.it('counts in-degrees for an empty DAG', () => { | ||
(0, mocha_1.it)('counts in-degrees for an empty DAG', () => { | ||
const DAG = {}; | ||
chai_1.expect(toposort_1.countInDegrees(DAG)).to.deep.equal({}); | ||
(0, chai_1.expect)((0, toposort_1.countInDegrees)(DAG)).to.deep.equal({}); | ||
}); | ||
mocha_1.it('counts in-degrees for a small DAG', () => { | ||
(0, mocha_1.it)('counts in-degrees for a small DAG', () => { | ||
const DAG = { | ||
@@ -84,3 +84,3 @@ a: ['b'], | ||
}; | ||
chai_1.expect(toposort_1.countInDegrees(DAG)).to.deep.equal({ | ||
(0, chai_1.expect)((0, toposort_1.countInDegrees)(DAG)).to.deep.equal({ | ||
a: 0, | ||
@@ -90,3 +90,3 @@ b: 1, | ||
}); | ||
mocha_1.it('counts in-degrees for a medium DAG', () => { | ||
(0, mocha_1.it)('counts in-degrees for a medium DAG', () => { | ||
const DAG = { | ||
@@ -98,3 +98,3 @@ a: ['b', 'c'], | ||
}; | ||
chai_1.expect(toposort_1.countInDegrees(DAG)).to.deep.equal({ | ||
(0, chai_1.expect)((0, toposort_1.countInDegrees)(DAG)).to.deep.equal({ | ||
a: 0, | ||
@@ -106,3 +106,3 @@ b: 1, | ||
}); | ||
mocha_1.it('counts in-degrees for a bigger DAG', () => { | ||
(0, mocha_1.it)('counts in-degrees for a bigger DAG', () => { | ||
const DAG = { | ||
@@ -120,3 +120,3 @@ a: ['c', 'f'], | ||
}; | ||
chai_1.expect(toposort_1.countInDegrees(DAG)).to.deep.equal({ | ||
(0, chai_1.expect)((0, toposort_1.countInDegrees)(DAG)).to.deep.equal({ | ||
a: 0, | ||
@@ -123,0 +123,0 @@ b: 0, |
@@ -12,3 +12,3 @@ "use strict"; | ||
const toposort = (dag) => { | ||
const indegrees = exports.countInDegrees(dag); | ||
const indegrees = (0, exports.countInDegrees)(dag); | ||
const sorted = []; | ||
@@ -15,0 +15,0 @@ let roots = getRoots(indegrees); |
export declare const orma_introspect: (db: string, fn: (s: string[]) => Promise<Record<string, unknown>[][]>) => Promise<import("./introspector/introspector").orma_schema>; | ||
export declare const orma_query: (raw_query: any, orma_schema: any, query_function: (sql_string: string[]) => Promise<Record<string, unknown>[][]>, escaping_function: (value: any) => any) => Promise<{}>; | ||
export declare const orma_query: <Schema extends import("./types/schema_types").DeepReadonlyObject<import("./introspector/introspector").orma_schema>, Query extends import("./types/query/query_types").OrmaQuery<Schema>>(raw_query: Query, orma_schema_input: Schema, query_function: (sql_string: string[]) => Promise<Record<string, unknown>[][]>, escaping_function: (value: any) => any) => Promise<import("./types/query/query_result_types").StripKeywords<import("./types/query/query_result_types").WrapInArrays<import("./types/query/query_result_types").AddSchemaTypes<Schema, Query, unknown>>>>; | ||
export declare const orma_mutate: (mutation: any, mutate_fn: import("./mutate/mutate").mutate_fn, escape_fn: import("./mutate/mutate").escape_fn, orma_schema: import("./introspector/introspector").orma_schema) => Promise<any>; |
@@ -114,3 +114,3 @@ /** | ||
export declare const generate_database_schema: (mysql_tables: mysql_table[], mysql_columns: mysql_column[], mysql_foreign_keys: mysql_foreign_key[], mysql_indexes: mysql_index[]) => orma_schema; | ||
declare const mysql_to_simple_types: { | ||
export declare const mysql_to_simple_types: { | ||
readonly bigint: "number"; | ||
@@ -146,5 +146,5 @@ readonly binary: "string"; | ||
}; | ||
export declare const as_orma_schema: <Schema extends import("../types/schema_types").DeepReadonlyObject<orma_schema>>(schema: Schema) => Schema; | ||
export declare const generate_field_schema: (mysql_column: mysql_column) => orma_field_schema; | ||
export declare const generate_index_schemas: (mysql_indexes: mysql_index[]) => Record<string, orma_index_schema[]>; | ||
export declare const orma_introspect: (db: string, fn: (s: string[]) => Promise<Record<string, unknown>[][]>) => Promise<orma_schema>; | ||
export {}; |
@@ -7,3 +7,3 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.orma_introspect = exports.generate_index_schemas = exports.generate_field_schema = exports.generate_database_schema = exports.get_introspect_sqls = void 0; | ||
exports.orma_introspect = exports.generate_index_schemas = exports.generate_field_schema = exports.as_orma_schema = exports.mysql_to_simple_types = exports.generate_database_schema = exports.get_introspect_sqls = void 0; | ||
const helpers_1 = require("../helpers/helpers"); | ||
@@ -96,3 +96,3 @@ /** | ||
for (const mysql_column of mysql_columns) { | ||
const field_schema = exports.generate_field_schema(mysql_column); | ||
const field_schema = (0, exports.generate_field_schema)(mysql_column); | ||
database_schema[mysql_column.table_name][mysql_column.column_name] = field_schema; | ||
@@ -109,5 +109,5 @@ } | ||
]; | ||
helpers_1.deep_set(reference_path, {}, database_schema); | ||
(0, helpers_1.deep_set)(reference_path, {}, database_schema); | ||
} | ||
const index_schemas = exports.generate_index_schemas(mysql_indexes); | ||
const index_schemas = (0, exports.generate_index_schemas)(mysql_indexes); | ||
for (const table_name of Object.keys(index_schemas)) { | ||
@@ -119,3 +119,3 @@ database_schema[table_name].$indexes = index_schemas[table_name]; | ||
exports.generate_database_schema = generate_database_schema; | ||
const mysql_to_simple_types = { | ||
exports.mysql_to_simple_types = { | ||
bigint: 'number', | ||
@@ -151,6 +151,8 @@ binary: 'string', | ||
}; | ||
const as_orma_schema = (schema) => schema; | ||
exports.as_orma_schema = as_orma_schema; | ||
const generate_field_schema = (mysql_column) => { | ||
const { table_name, column_name, ordinal_position, column_default, is_nullable, data_type, character_maximum_length, numeric_precision, numeric_scale, datetime_precision, column_key, extra, generation_expression, column_comment } = mysql_column; | ||
const field_schema = { | ||
data_type: mysql_to_simple_types[data_type], | ||
data_type: exports.mysql_to_simple_types[data_type], | ||
ordinal_position | ||
@@ -196,7 +198,7 @@ }; | ||
const generate_index_schemas = (mysql_indexes) => { | ||
const mysql_indexes_by_table = helpers_1.group_by(mysql_indexes, index => index.table_name); | ||
const mysql_indexes_by_table = (0, helpers_1.group_by)(mysql_indexes, index => index.table_name); | ||
const table_names = Object.keys(mysql_indexes_by_table); | ||
const index_schemas_by_table = table_names.reduce((acc, table_name) => { | ||
const mysql_indexes = mysql_indexes_by_table[table_name]; | ||
const mysql_indexes_by_name = helpers_1.group_by(mysql_indexes, index => index.index_name); | ||
const mysql_indexes_by_name = (0, helpers_1.group_by)(mysql_indexes, index => index.index_name); | ||
const index_schemas = Object.keys(mysql_indexes_by_name).map(index_name => { | ||
@@ -223,3 +225,3 @@ const index = mysql_indexes_by_name[index_name][0]; | ||
const orma_introspect = async (db, fn) => { | ||
const sql_strings = exports.get_introspect_sqls(db); | ||
const sql_strings = (0, exports.get_introspect_sqls)(db); | ||
// @ts-ignore | ||
@@ -232,5 +234,5 @@ const [mysql_tables, mysql_columns, mysql_foreign_keys, mysql_indexes] = await fn(sql_strings); | ||
}, {}); | ||
const orma_schema = exports.generate_database_schema(mysql_tables.map(transform_keys_to_lower), mysql_columns.map(transform_keys_to_lower), mysql_foreign_keys.map(transform_keys_to_lower), mysql_indexes.map(transform_keys_to_lower)); | ||
const orma_schema = (0, exports.generate_database_schema)(mysql_tables.map(transform_keys_to_lower), mysql_columns.map(transform_keys_to_lower), mysql_foreign_keys.map(transform_keys_to_lower), mysql_indexes.map(transform_keys_to_lower)); | ||
return orma_schema; | ||
}; | ||
exports.orma_introspect = orma_introspect; |
@@ -7,9 +7,9 @@ "use strict"; | ||
const introspector_1 = require("./introspector"); | ||
mocha_1.describe('introspector', () => { | ||
mocha_1.test('introspect sqls are string', () => { | ||
const introspect_sqls = introspector_1.get_introspect_sqls('international'); | ||
chai_1.expect(introspect_sqls.length).to.equal(4); | ||
chai_1.expect(helpers_1.type(introspect_sqls[0])).to.equal('String'); | ||
(0, mocha_1.describe)('introspector', () => { | ||
(0, mocha_1.test)('introspect sqls are string', () => { | ||
const introspect_sqls = (0, introspector_1.get_introspect_sqls)('international'); | ||
(0, chai_1.expect)(introspect_sqls.length).to.equal(4); | ||
(0, chai_1.expect)((0, helpers_1.type)(introspect_sqls[0])).to.equal('String'); | ||
}); | ||
mocha_1.test('primary key field schema', () => { | ||
(0, mocha_1.test)('primary key field schema', () => { | ||
const mysql_column = { | ||
@@ -24,4 +24,4 @@ table_name: 'users', | ||
}; | ||
const field_schema = introspector_1.generate_field_schema(mysql_column); | ||
chai_1.expect(field_schema).to.deep.equal({ | ||
const field_schema = (0, introspector_1.generate_field_schema)(mysql_column); | ||
(0, chai_1.expect)(field_schema).to.deep.equal({ | ||
data_type: 'number', | ||
@@ -35,3 +35,3 @@ default: 'auto_increment', | ||
}); | ||
mocha_1.test('unique key field schema', () => { | ||
(0, mocha_1.test)('unique key field schema', () => { | ||
const mysql_column = { | ||
@@ -45,4 +45,4 @@ table_name: 'users', | ||
}; | ||
const field_schema = introspector_1.generate_field_schema(mysql_column); | ||
chai_1.expect(field_schema).to.deep.equal({ | ||
const field_schema = (0, introspector_1.generate_field_schema)(mysql_column); | ||
(0, chai_1.expect)(field_schema).to.deep.equal({ | ||
data_type: 'string', | ||
@@ -54,3 +54,3 @@ indexed: true, | ||
}); | ||
mocha_1.test('decimal precision field schema', () => { | ||
(0, mocha_1.test)('decimal precision field schema', () => { | ||
const mysql_column = { | ||
@@ -65,4 +65,4 @@ table_name: 'users', | ||
}; | ||
const field_schema = introspector_1.generate_field_schema(mysql_column); | ||
chai_1.expect(field_schema).to.deep.equal({ | ||
const field_schema = (0, introspector_1.generate_field_schema)(mysql_column); | ||
(0, chai_1.expect)(field_schema).to.deep.equal({ | ||
data_type: 'number', | ||
@@ -75,3 +75,3 @@ ordinal_position: 3, | ||
}); | ||
mocha_1.test('full schema test', () => { | ||
(0, mocha_1.test)('full schema test', () => { | ||
const mysql_tables = [ | ||
@@ -129,4 +129,4 @@ { | ||
]; | ||
const database_schema = introspector_1.generate_database_schema(mysql_tables, mysql_columns, mysql_foreign_keys, mysql_indexes); | ||
chai_1.expect(database_schema).to.deep.equal({ | ||
const database_schema = (0, introspector_1.generate_database_schema)(mysql_tables, mysql_columns, mysql_foreign_keys, mysql_indexes); | ||
(0, chai_1.expect)(database_schema).to.deep.equal({ | ||
posts: { | ||
@@ -163,3 +163,3 @@ $comment: 'user posts', | ||
}); | ||
mocha_1.test('generates index schemas', () => { | ||
(0, mocha_1.test)('generates index schemas', () => { | ||
const mysql_indexes = [ | ||
@@ -235,4 +235,4 @@ { | ||
]; | ||
const index_schemas_by_table = introspector_1.generate_index_schemas(mysql_indexes); | ||
chai_1.expect(index_schemas_by_table).to.deep.equal({ | ||
const index_schemas_by_table = (0, introspector_1.generate_index_schemas)(mysql_indexes); | ||
(0, chai_1.expect)(index_schemas_by_table).to.deep.equal({ | ||
users: [ | ||
@@ -239,0 +239,0 @@ { |
@@ -7,3 +7,3 @@ "use strict"; | ||
const paths_by_entity = {}; | ||
exports.mutation_entity_deep_for_each(mutation, (record, path, entity_name) => { | ||
(0, exports.mutation_entity_deep_for_each)(mutation, (record, path, entity_name) => { | ||
if (!paths_by_entity[entity_name]) { | ||
@@ -21,5 +21,5 @@ paths_by_entity[entity_name] = []; | ||
const mutation_entity_deep_for_each = (mutation, processor) => { | ||
helpers_1.deep_for_each(mutation, (value, path) => { | ||
(0, helpers_1.deep_for_each)(mutation, (value, path) => { | ||
if (path.length > 1 && | ||
typeof helpers_1.last(path) === 'number' && | ||
typeof (0, helpers_1.last)(path) === 'number' && | ||
typeof path[path.length - 2] === 'string') { | ||
@@ -26,0 +26,0 @@ // we are on an entity object |
@@ -13,3 +13,3 @@ "use strict"; | ||
// [[{"operation":"create","paths":[...]]}],[{"operation":"create","paths":[...]}]] | ||
const mutate_plan = exports.get_mutate_plan(mutation, orma_schema); | ||
const mutate_plan = (0, exports.get_mutate_plan)(mutation, orma_schema); | ||
let tier_results = { | ||
@@ -22,3 +22,3 @@ // Will be built up as each phase of the mutate_plan is executed | ||
const statements = planned_statements.flatMap(({ operation, paths }) => { | ||
const command_json_paths = exports.get_mutation_ast(operation, paths, mutation, tier_results, orma_schema, escape_fn); | ||
const command_json_paths = (0, exports.get_mutation_ast)(operation, paths, mutation, tier_results, orma_schema, escape_fn); | ||
return command_json_paths; | ||
@@ -29,10 +29,10 @@ }); | ||
const { path, row } = result; | ||
tier_results[string_to_path_1.path_to_string(path)] = row; | ||
tier_results[(0, string_to_path_1.path_to_string)(path)] = row; | ||
}); | ||
} | ||
const mutation_response = Object.entries(tier_results).reduce((acc, [path_string, row], i) => { | ||
const path = string_to_path_1.string_to_path(path_string); | ||
const mutation_obj = helpers_1.deep_get(path, mutation); | ||
const merged = deep_merge_1.deep_merge(mutation_obj, row); | ||
helpers_1.deep_set(path, merged, acc); | ||
const path = (0, string_to_path_1.string_to_path)(path_string); | ||
const mutation_obj = (0, helpers_1.deep_get)(path, mutation); | ||
const merged = (0, deep_merge_1.deep_merge)(mutation_obj, row); | ||
(0, helpers_1.deep_set)(path, merged, acc); | ||
return acc; | ||
@@ -92,4 +92,4 @@ }, {}); | ||
const update_asts = paths.map(path => { | ||
const record = helpers_1.deep_get(path, mutation); | ||
const identifying_keys = exports.get_identifying_keys(entity_name, record, orma_schema); | ||
const record = (0, helpers_1.deep_get)(path, mutation); | ||
const identifying_keys = (0, exports.get_identifying_keys)(entity_name, record, orma_schema); | ||
throw_identifying_key_errors('update', identifying_keys, path, mutation); | ||
@@ -99,5 +99,5 @@ const where = generate_record_where_clause(identifying_keys, record, escape_fn); | ||
.filter(key => !identifying_keys.includes(key)) | ||
.filter(key => !helpers_1.is_simple_object(record[key]) && | ||
.filter(key => !(0, helpers_1.is_simple_object)(record[key]) && | ||
!Array.isArray(record[key])) | ||
.filter(key => !schema_helpers_1.is_reserved_keyword(key)); | ||
.filter(key => !(0, schema_helpers_1.is_reserved_keyword)(key)); | ||
return { | ||
@@ -116,3 +116,3 @@ $update: entity_name, | ||
command_json_escaped: update_ast_escaped, | ||
command_sql: json_sql_1.json_to_sql(update_ast_escaped), | ||
command_sql: (0, json_sql_1.json_to_sql)(update_ast_escaped), | ||
}; | ||
@@ -127,4 +127,4 @@ }); | ||
const jsons = paths.map(path => { | ||
const record = helpers_1.deep_get(path, mutation); | ||
const identifying_keys = exports.get_identifying_keys(entity_name, record, orma_schema); | ||
const record = (0, helpers_1.deep_get)(path, mutation); | ||
const identifying_keys = (0, exports.get_identifying_keys)(entity_name, record, orma_schema); | ||
throw_identifying_key_errors('delete', identifying_keys, path, mutation); | ||
@@ -138,4 +138,4 @@ const where = generate_record_where_clause(identifying_keys, record, escape_fn); | ||
const wheres = paths.map(path => { | ||
const record = helpers_1.deep_get(path, mutation); | ||
const identifying_keys = exports.get_identifying_keys(entity_name, record, orma_schema); | ||
const record = (0, helpers_1.deep_get)(path, mutation); | ||
const identifying_keys = (0, exports.get_identifying_keys)(entity_name, record, orma_schema); | ||
throw_identifying_key_errors('delete', identifying_keys, path, mutation); | ||
@@ -145,3 +145,3 @@ const where = generate_record_where_clause(identifying_keys, record, escape_fn); | ||
}); | ||
const $where = query_helpers_1.combine_wheres(wheres, '$or'); | ||
const $where = (0, query_helpers_1.combine_wheres)(wheres, '$or'); | ||
const ast = { | ||
@@ -157,3 +157,3 @@ $delete_from: entity_name, | ||
command_json_escaped: ast, | ||
command_sql: json_sql_1.json_to_sql(ast), | ||
command_sql: (0, json_sql_1.json_to_sql)(ast), | ||
}, | ||
@@ -173,5 +173,5 @@ ]; | ||
const keys_to_insert = Object.keys(record) | ||
.filter(key => !helpers_1.is_simple_object(record[key]) && | ||
.filter(key => !(0, helpers_1.is_simple_object)(record[key]) && | ||
!Array.isArray(record[key])) | ||
.filter(key => !schema_helpers_1.is_reserved_keyword(key)); | ||
.filter(key => !(0, schema_helpers_1.is_reserved_keyword)(key)); | ||
keys_to_insert.forEach(key => acc.add(key)); | ||
@@ -196,3 +196,3 @@ return acc; | ||
command_json_escaped, | ||
command_sql: json_sql_1.json_to_sql(command_json_escaped), | ||
command_sql: (0, json_sql_1.json_to_sql)(command_json_escaped), | ||
}, | ||
@@ -207,6 +207,6 @@ ]; | ||
const entity_name = path_to_entity(record_path); | ||
const record = helpers_1.deep_get(record_path, mutation); | ||
const record = (0, helpers_1.deep_get)(record_path, mutation); | ||
// get a list of the above path, as well as any below paths. | ||
// Some of these might by parents and some might be children. | ||
const above_path = helpers_1.drop_last(2, record_path); | ||
const above_path = (0, helpers_1.drop_last)(2, record_path); | ||
const below_paths = Object.keys(record) | ||
@@ -222,3 +222,3 @@ .filter(key => Array.isArray(record[key])) | ||
// dont do anything for the child paths (foreign keys only come from parents by definition) | ||
if (!schema_helpers_1.is_parent_entity(parent_entity_name, entity_name, orma_schema)) { | ||
if (!(0, schema_helpers_1.is_parent_entity)(parent_entity_name, entity_name, orma_schema)) { | ||
return obj; | ||
@@ -229,8 +229,8 @@ } | ||
// This function throws an error if there is not exactly one edge | ||
const edge = schema_helpers_1.get_direct_edge(entity_name, parent_entity_name, orma_schema); | ||
const edge = (0, schema_helpers_1.get_direct_edge)(entity_name, parent_entity_name, orma_schema); | ||
// we take the combined parent record as it is in the original mutation (this might have some of the foreign keys) | ||
// and also the same parent record from the previous results (e.g. autogenerated primiary keys from the database). | ||
// The combination of these will contain all the possible foreign key values from this specific parent. | ||
const parent_record = helpers_1.deep_get(parent_path, mutation); | ||
const previous_result = results_by_path[string_to_path_1.path_to_string(parent_path)]; | ||
const parent_record = (0, helpers_1.deep_get)(parent_path, mutation); | ||
const previous_result = results_by_path[(0, string_to_path_1.path_to_string)(parent_path)]; | ||
const parent_record_with_results = Object.assign(Object.assign({}, parent_record), previous_result); | ||
@@ -257,8 +257,8 @@ // now we just set the foreign key to whatever is in the combined parent object | ||
const path_to_entity = (path) => { | ||
return typeof helpers_1.last(path) === 'number' | ||
return typeof (0, helpers_1.last)(path) === 'number' | ||
? path[path.length - 2] | ||
: helpers_1.last(path); | ||
: (0, helpers_1.last)(path); | ||
}; | ||
const get_identifying_keys = (entity_name, record, orma_schema) => { | ||
const primary_keys = schema_helpers_1.get_primary_keys(entity_name, orma_schema); | ||
const primary_keys = (0, schema_helpers_1.get_primary_keys)(entity_name, orma_schema); | ||
const has_primary_keys = primary_keys.every(primary_key => record[primary_key] !== undefined); | ||
@@ -270,3 +270,3 @@ if (has_primary_keys && primary_keys.length > 0) { | ||
// all having null so that column wouldnt uniquely identify a record | ||
const unique_field_groups = schema_helpers_1.get_unique_field_groups(entity_name, true, orma_schema); | ||
const unique_field_groups = (0, schema_helpers_1.get_unique_field_groups)(entity_name, true, orma_schema); | ||
const included_unique_keys = unique_field_groups.filter(unique_fields => unique_fields.every(field => record[field] !== undefined)); | ||
@@ -353,5 +353,5 @@ if (included_unique_keys.length === 1) { | ||
const route_graph = {}; | ||
helpers_1.deep_for_each(mutation, (value, path) => { | ||
(0, helpers_1.deep_for_each)(mutation, (value, path) => { | ||
var _a; | ||
if (!helpers_1.is_simple_object(value) || path.length === 0) { | ||
if (!(0, helpers_1.is_simple_object)(value) || path.length === 0) { | ||
return; // not pointing to a single row | ||
@@ -363,3 +363,3 @@ } | ||
.concat([[]]) // this is because we can also inherit operation from the root | ||
.map(path_slice => helpers_1.deep_get([...path_slice, '$operation'], mutation)) // check the operation at each path slice | ||
.map(path_slice => (0, helpers_1.deep_get)([...path_slice, '$operation'], mutation)) // check the operation at each path slice | ||
.find((el) => typeof el === 'string'); // find the first defined operation | ||
@@ -371,3 +371,3 @@ if (!operation) { | ||
const route = [operation, ...path_template]; | ||
const route_string = string_to_path_1.path_to_string(route); | ||
const route_string = (0, string_to_path_1.path_to_string)(route); | ||
// add path to paths_by_route | ||
@@ -382,3 +382,3 @@ if (!paths_by_route[route_string]) { | ||
} | ||
const higher_path = typeof helpers_1.last(path) === 'number' | ||
const higher_path = typeof (0, helpers_1.last)(path) === 'number' | ||
? path.slice(0, path.length - 2) // need to remove the entity name and index template (the number 0) | ||
@@ -390,18 +390,18 @@ : path.slice(0, path.length - 1); // only remove the entity name | ||
// if higher row does not have $operation, it inherits the same operation as the current row | ||
const higher_operation = (_a = helpers_1.deep_get([...higher_path, '$operation'], mutation)) !== null && _a !== void 0 ? _a : operation; | ||
const higher_path_template = typeof helpers_1.last(path_template) === 'number' | ||
const higher_operation = (_a = (0, helpers_1.deep_get)([...higher_path, '$operation'], mutation)) !== null && _a !== void 0 ? _a : operation; | ||
const higher_path_template = typeof (0, helpers_1.last)(path_template) === 'number' | ||
? path_template.slice(0, path_template.length - 2) // need to remove the entity name and index template (the number 0) | ||
: path_template.slice(0, path_template.length - 1); // only remove the entity name | ||
const higher_route = [higher_operation, ...higher_path_template]; | ||
const higher_route_string = string_to_path_1.path_to_string(higher_route); | ||
const higher_route_string = (0, string_to_path_1.path_to_string)(higher_route); | ||
if (!route_graph[higher_route_string]) { | ||
route_graph[higher_route_string] = new Set(); | ||
} | ||
const entity = typeof helpers_1.last(path) === 'number' | ||
const entity = typeof (0, helpers_1.last)(path) === 'number' | ||
? path[path.length - 2] | ||
: helpers_1.last(path); | ||
const higher_entity = typeof helpers_1.last(higher_path) === 'number' | ||
: (0, helpers_1.last)(path); | ||
const higher_entity = typeof (0, helpers_1.last)(higher_path) === 'number' | ||
? higher_path[higher_path.length - 2] | ||
: helpers_1.last(higher_path); | ||
const higher_entity_is_parent = schema_helpers_1.is_parent_entity(higher_entity, entity, orma_schema); // in cases of self-referenced entities, this will return true. So they will be processed in regular order (top -> bottom for creates, bottom -> top for deletes) | ||
: (0, helpers_1.last)(higher_path); | ||
const higher_entity_is_parent = (0, schema_helpers_1.is_parent_entity)(higher_entity, entity, orma_schema); // in cases of self-referenced entities, this will return true. So they will be processed in regular order (top -> bottom for creates, bottom -> top for deletes) | ||
const parent_route_string = higher_entity_is_parent | ||
@@ -428,5 +428,5 @@ ? higher_route_string | ||
}, {}); | ||
const topological_ordering = toposort_1.toposort(toposort_graph); | ||
const topological_ordering = (0, toposort_1.toposort)(toposort_graph); | ||
const mutate_plan = topological_ordering.map(route_strings => route_strings.map(route_string => ({ | ||
operation: string_to_path_1.string_to_path(route_string)[0], | ||
operation: (0, string_to_path_1.string_to_path)(route_string)[0], | ||
paths: paths_by_route[route_string], | ||
@@ -433,0 +433,0 @@ }))); |
@@ -7,3 +7,3 @@ "use strict"; | ||
const escape_fn = el => (typeof el === 'string' ? `\`${el}\`` : el); | ||
mocha_1.describe('mutate', () => { | ||
(0, mocha_1.describe)('mutate', () => { | ||
const orma_schema = { | ||
@@ -81,4 +81,4 @@ grandparents: { | ||
}; | ||
mocha_1.describe(mutate_1.get_mutate_plan.name, () => { | ||
mocha_1.test('simple mutation', () => { | ||
(0, mocha_1.describe)(mutate_1.get_mutate_plan.name, () => { | ||
(0, mocha_1.test)('simple mutation', () => { | ||
const mutation = { | ||
@@ -99,3 +99,3 @@ parents: [ | ||
}; | ||
const mutate_plan = mutate_1.get_mutate_plan(mutation, orma_schema); | ||
const mutate_plan = (0, mutate_1.get_mutate_plan)(mutation, orma_schema); | ||
const goal = [ | ||
@@ -113,5 +113,5 @@ [{ operation: 'create', paths: [['parents', 0]] }], | ||
]; | ||
chai_1.expect(mutate_plan).to.deep.equal(goal); | ||
(0, chai_1.expect)(mutate_plan).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('respects operation precedence', () => { | ||
(0, mocha_1.test)('respects operation precedence', () => { | ||
const mutation = { | ||
@@ -130,3 +130,3 @@ parents: [ | ||
}; | ||
const mutate_plan = mutate_1.get_mutate_plan(mutation, orma_schema); | ||
const mutate_plan = (0, mutate_1.get_mutate_plan)(mutation, orma_schema); | ||
const goal = [ | ||
@@ -139,5 +139,5 @@ [ | ||
]; | ||
chai_1.expect(mutate_plan).to.deep.equal(goal); | ||
(0, chai_1.expect)(mutate_plan).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('respects topological ordering for create', () => { | ||
(0, mocha_1.test)('respects topological ordering for create', () => { | ||
const mutation = { | ||
@@ -155,3 +155,3 @@ parents: [ | ||
}; | ||
const mutate_plan = mutate_1.get_mutate_plan(mutation, orma_schema); | ||
const mutate_plan = (0, mutate_1.get_mutate_plan)(mutation, orma_schema); | ||
const goal = [ | ||
@@ -166,5 +166,5 @@ [{ operation: 'create', paths: [['parents', 0]] }], | ||
]; | ||
chai_1.expect(mutate_plan).to.deep.equal(goal); | ||
(0, chai_1.expect)(mutate_plan).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('respects topological ordering for update', () => { | ||
(0, mocha_1.test)('respects topological ordering for update', () => { | ||
const mutation = { | ||
@@ -182,3 +182,3 @@ parents: [ | ||
}; | ||
const mutate_plan = mutate_1.get_mutate_plan(mutation, orma_schema); | ||
const mutate_plan = (0, mutate_1.get_mutate_plan)(mutation, orma_schema); | ||
// update order is not guaranteed | ||
@@ -194,5 +194,5 @@ const goal = [ | ||
]; | ||
chai_1.expect(mutate_plan).to.deep.equal(goal); | ||
(0, chai_1.expect)(mutate_plan).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('respects topological ordering for delete', () => { | ||
(0, mocha_1.test)('respects topological ordering for delete', () => { | ||
const mutation = { | ||
@@ -210,3 +210,3 @@ parents: [ | ||
}; | ||
const mutate_plan = mutate_1.get_mutate_plan(mutation, orma_schema); | ||
const mutate_plan = (0, mutate_1.get_mutate_plan)(mutation, orma_schema); | ||
const goal = [ | ||
@@ -221,5 +221,5 @@ [ | ||
]; | ||
chai_1.expect(mutate_plan).to.deep.equal(goal); | ||
(0, chai_1.expect)(mutate_plan).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('handles mixed operation requests', () => { | ||
(0, mocha_1.test)('handles mixed operation requests', () => { | ||
const mutation = { | ||
@@ -250,3 +250,3 @@ grandparents: [ | ||
}; | ||
const mutate_plan = mutate_1.get_mutate_plan(mutation, orma_schema); | ||
const mutate_plan = (0, mutate_1.get_mutate_plan)(mutation, orma_schema); | ||
const goal = [ | ||
@@ -274,5 +274,5 @@ [ | ||
]; | ||
chai_1.expect(mutate_plan).to.deep.equal(goal); | ||
(0, chai_1.expect)(mutate_plan).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('handles entity with no children', () => { | ||
(0, mocha_1.test)('handles entity with no children', () => { | ||
const mutation = { | ||
@@ -285,7 +285,7 @@ parents: [ | ||
}; | ||
const mutate_plan = mutate_1.get_mutate_plan(mutation, orma_schema); | ||
const mutate_plan = (0, mutate_1.get_mutate_plan)(mutation, orma_schema); | ||
const goal = [[{ operation: 'update', paths: [['parents', 0]] }]]; | ||
chai_1.expect(mutate_plan).to.deep.equal(goal); | ||
(0, chai_1.expect)(mutate_plan).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('handles reverse nesting', () => { | ||
(0, mocha_1.test)('handles reverse nesting', () => { | ||
const mutation = { | ||
@@ -303,3 +303,3 @@ children: [ | ||
}; | ||
const mutate_plan = mutate_1.get_mutate_plan(mutation, orma_schema); | ||
const mutate_plan = (0, mutate_1.get_mutate_plan)(mutation, orma_schema); | ||
const goal = [ | ||
@@ -314,7 +314,7 @@ [ | ||
]; | ||
chai_1.expect(mutate_plan).to.deep.equal(goal); | ||
(0, chai_1.expect)(mutate_plan).to.deep.equal(goal); | ||
}); | ||
}); | ||
mocha_1.describe(mutate_1.get_mutation_ast.name, () => { | ||
mocha_1.test('update/delete by id', () => { | ||
(0, mocha_1.describe)(mutate_1.get_mutation_ast.name, () => { | ||
(0, mocha_1.test)('update/delete by id', () => { | ||
const mutation = { | ||
@@ -329,3 +329,3 @@ grandparents: [ | ||
}; | ||
const result = mutate_1.get_mutation_ast('update', [['grandparents', 0]], mutation, {}, orma_schema, escape_fn); | ||
const result = (0, mutate_1.get_mutation_ast)('update', [['grandparents', 0]], mutation, {}, orma_schema, escape_fn); | ||
const goal = [ | ||
@@ -351,5 +351,5 @@ { | ||
]; | ||
chai_1.expect(result).to.deep.equal(goal); | ||
(0, chai_1.expect)(result).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('foreign key has precedence over unique', () => { | ||
(0, mocha_1.test)('foreign key has precedence over unique', () => { | ||
const mutation = { | ||
@@ -363,3 +363,3 @@ parents: [ | ||
}; | ||
const result = mutate_1.get_mutation_ast('update', [['parents', 0]], mutation, {}, orma_schema, escape_fn); | ||
const result = (0, mutate_1.get_mutation_ast)('update', [['parents', 0]], mutation, {}, orma_schema, escape_fn); | ||
const goal = [ | ||
@@ -385,5 +385,5 @@ { | ||
]; | ||
chai_1.expect(result).to.deep.equal(goal); | ||
(0, chai_1.expect)(result).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('update/delete by unique', () => { | ||
(0, mocha_1.test)('update/delete by unique', () => { | ||
const mutation = { | ||
@@ -397,3 +397,3 @@ parents: [ | ||
}; | ||
const result = mutate_1.get_mutation_ast('update', [['parents', 0]], mutation, {}, orma_schema, escape_fn); | ||
const result = (0, mutate_1.get_mutation_ast)('update', [['parents', 0]], mutation, {}, orma_schema, escape_fn); | ||
const goal = [ | ||
@@ -419,5 +419,5 @@ { | ||
]; | ||
chai_1.expect(result).to.deep.equal(goal); | ||
(0, chai_1.expect)(result).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('throws on no update key', () => { | ||
(0, mocha_1.test)('throws on no update key', () => { | ||
const mutation = { | ||
@@ -431,8 +431,8 @@ parents: [ | ||
try { | ||
const result = mutate_1.get_mutation_ast('update', [['parents', 0]], mutation, {}, orma_schema, escape_fn); | ||
chai_1.expect('should have thrown an error').to.equal(true); | ||
const result = (0, mutate_1.get_mutation_ast)('update', [['parents', 0]], mutation, {}, orma_schema, escape_fn); | ||
(0, chai_1.expect)('should have thrown an error').to.equal(true); | ||
} | ||
catch (error) { } | ||
}); | ||
mocha_1.test('throws on multiple unique update keys', () => { | ||
(0, mocha_1.test)('throws on multiple unique update keys', () => { | ||
const mutation = { | ||
@@ -448,8 +448,8 @@ parents: [ | ||
try { | ||
const result = mutate_1.get_mutation_ast('update', [['parents', 0]], mutation, {}, orma_schema, escape_fn); | ||
chai_1.expect('should have thrown an error').to.equal(true); | ||
const result = (0, mutate_1.get_mutation_ast)('update', [['parents', 0]], mutation, {}, orma_schema, escape_fn); | ||
(0, chai_1.expect)('should have thrown an error').to.equal(true); | ||
} | ||
catch (error) { } | ||
}); | ||
mocha_1.test('handles compound primary key', () => { | ||
(0, mocha_1.test)('handles compound primary key', () => { | ||
const mutation = { | ||
@@ -464,3 +464,3 @@ children: [ | ||
}; | ||
const result = mutate_1.get_mutation_ast('update', [['children', 0]], mutation, {}, orma_schema, escape_fn); | ||
const result = (0, mutate_1.get_mutation_ast)('update', [['children', 0]], mutation, {}, orma_schema, escape_fn); | ||
const goal = { | ||
@@ -480,5 +480,5 @@ $update: 'children', | ||
}; | ||
chai_1.expect(result[0].command_json_escaped).to.deep.equal(goal); | ||
(0, chai_1.expect)(result[0].command_json_escaped).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('handles deletes', () => { | ||
(0, mocha_1.test)('handles deletes', () => { | ||
const mutation = { | ||
@@ -491,3 +491,3 @@ parents: [ | ||
}; | ||
const result = mutate_1.get_mutation_ast('delete', [['parents', 0]], mutation, {}, orma_schema, escape_fn); | ||
const result = (0, mutate_1.get_mutation_ast)('delete', [['parents', 0]], mutation, {}, orma_schema, escape_fn); | ||
const goal = [ | ||
@@ -511,7 +511,7 @@ { | ||
]; | ||
chai_1.expect(result).to.deep.equal(goal); | ||
(0, chai_1.expect)(result).to.deep.equal(goal); | ||
}); | ||
}); | ||
mocha_1.describe(mutate_1.orma_mutate.name, () => { | ||
mocha_1.test('integrates orma mutation components', async () => { | ||
(0, mocha_1.describe)(mutate_1.orma_mutate.name, () => { | ||
(0, mocha_1.test)('integrates orma mutation components', async () => { | ||
const mutation = { | ||
@@ -556,3 +556,3 @@ parents: [ | ||
// const escape_fn = val => typeof val === 'string' ? `"${val}"` : val | ||
const results = await mutate_1.orma_mutate(mutation, mutate_fn, escape_fn, orma_schema); | ||
const results = await (0, mutate_1.orma_mutate)(mutation, mutate_fn, escape_fn, orma_schema); | ||
const goal = { | ||
@@ -577,5 +577,5 @@ parents: [ | ||
}; | ||
chai_1.expect(results).to.deep.equal(goal); | ||
(0, chai_1.expect)(results).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('integrates orma mutation components reverse nesting', async () => { | ||
(0, mocha_1.test)('integrates orma mutation components reverse nesting', async () => { | ||
const mutation = { | ||
@@ -605,3 +605,3 @@ $operation: 'create', | ||
// const escape_fn = val => typeof val === 'string' ? `"${val}"` : val | ||
const results = await mutate_1.orma_mutate(mutation, mutate_fn, escape_fn, orma_schema); | ||
const results = await (0, mutate_1.orma_mutate)(mutation, mutate_fn, escape_fn, orma_schema); | ||
const goal = { | ||
@@ -620,8 +620,8 @@ children: [ | ||
}; | ||
chai_1.expect(results).to.deep.equal(goal); | ||
(0, chai_1.expect)(results).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('escapes values'); | ||
mocha_1.test('can nest in keys from parent table results'); | ||
mocha_1.test('can nest in keys from child table results'); | ||
mocha_1.test('can throw an error if there is not a single foreign key'); | ||
(0, mocha_1.test)('escapes values'); | ||
(0, mocha_1.test)('can nest in keys from parent table results'); | ||
(0, mocha_1.test)('can nest in keys from child table results'); | ||
(0, mocha_1.test)('can throw an error if there is not a single foreign key'); | ||
}); | ||
@@ -628,0 +628,0 @@ }); |
@@ -31,7 +31,7 @@ "use strict"; | ||
*/ | ||
const pathed_records_by_entity = mutate_helpers_1.split_mutation_by_entity(mutation); | ||
const query = exports.get_verify_uniqueness_query(pathed_records_by_entity, orma_schema, escape_function); | ||
const pathed_records_by_entity = (0, mutate_helpers_1.split_mutation_by_entity)(mutation); | ||
const query = (0, exports.get_verify_uniqueness_query)(pathed_records_by_entity, orma_schema, escape_function); | ||
const results = await orma_query(query); | ||
const database_errors = exports.get_database_uniqueness_errors(pathed_records_by_entity, results, mutation, orma_schema); | ||
const mutation_errors = exports.get_mutation_uniqueness_errors(pathed_records_by_entity, mutation, orma_schema); | ||
const database_errors = (0, exports.get_database_uniqueness_errors)(pathed_records_by_entity, results, mutation, orma_schema); | ||
const mutation_errors = (0, exports.get_mutation_uniqueness_errors)(pathed_records_by_entity, mutation, orma_schema); | ||
return [...database_errors, ...mutation_errors]; | ||
@@ -54,7 +54,7 @@ }; | ||
const search_fields = new Set([ | ||
...schema_helpers_1.get_primary_keys(entity, orma_schema), | ||
...schema_helpers_1.get_unique_field_groups(entity, false, orma_schema).flatMap(el => el), | ||
...(0, schema_helpers_1.get_primary_keys)(entity, orma_schema), | ||
...(0, schema_helpers_1.get_unique_field_groups)(entity, false, orma_schema).flatMap(el => el), | ||
]); | ||
// searches all records for the entity | ||
const $where = query_helpers_1.get_search_records_where(searchable_pathed_records.map(({ record }) => record), record => mutate_1.get_identifying_keys(entity, record, orma_schema), escape_function); | ||
const $where = (0, query_helpers_1.get_search_records_where)(searchable_pathed_records.map(({ record }) => record), record => (0, mutate_1.get_identifying_keys)(entity, record, orma_schema), escape_function); | ||
if (!$where) { | ||
@@ -84,4 +84,4 @@ throw new Error('There should be a where clause. Something went wrong.'); | ||
const field_groups = [ | ||
schema_helpers_1.get_primary_keys(entity, orma_schema), | ||
...schema_helpers_1.get_unique_field_groups(entity, false, orma_schema), | ||
(0, schema_helpers_1.get_primary_keys)(entity, orma_schema), | ||
...(0, schema_helpers_1.get_unique_field_groups)(entity, false, orma_schema), | ||
]; | ||
@@ -92,3 +92,3 @@ const entity_errors = field_groups.flatMap(field_group => { | ||
const mutation_records = mutation_pathed_records.map(({ record }) => record); | ||
const database_duplicate_indices = exports.get_duplicate_record_indices(database_records, mutation_records, field_group); | ||
const database_duplicate_indices = (0, exports.get_duplicate_record_indices)(database_records, mutation_records, field_group); | ||
const database_duplicate_errors = database_duplicate_indices.map(([database_record_index, mutation_record_index]) => { | ||
@@ -125,4 +125,4 @@ const database_record = database_records[database_record_index]; | ||
const field_groups = [ | ||
schema_helpers_1.get_primary_keys(entity, orma_schema), | ||
...schema_helpers_1.get_unique_field_groups(entity, false, orma_schema), | ||
(0, schema_helpers_1.get_primary_keys)(entity, orma_schema), | ||
...(0, schema_helpers_1.get_unique_field_groups)(entity, false, orma_schema), | ||
]; | ||
@@ -132,3 +132,3 @@ const entity_errors = field_groups.flatMap(field_group => { | ||
const records = pathed_records.map(({ record }) => record); | ||
const duplicate_indices = exports.get_duplicate_record_indices(records, records, field_group); | ||
const duplicate_indices = (0, exports.get_duplicate_record_indices)(records, records, field_group); | ||
// we get false positives since a record always matches itself, so basically every record is picked up as a duplicate | ||
@@ -175,3 +175,3 @@ // to prevent this we need to filter out all the entries with only 1 duplicate | ||
// create an index of records by their identifying values. This allows fast lookups for duplicates | ||
const records1_indices_by_value = helpers_1.key_by(records1_indices, index => { | ||
const records1_indices_by_value = (0, helpers_1.key_by)(records1_indices, index => { | ||
const record1 = records1[index]; | ||
@@ -178,0 +178,0 @@ const values = identifying_fields.map(field => record1[field]); |
@@ -6,3 +6,3 @@ "use strict"; | ||
const chai_1 = require("chai"); | ||
mocha_1.describe('verify_uniqueness', () => { | ||
(0, mocha_1.describe)('verify_uniqueness', () => { | ||
const orma_schema = { | ||
@@ -47,4 +47,4 @@ users: { | ||
}; | ||
mocha_1.describe(verify_uniqueness_1.get_verify_uniqueness_query.name, () => { | ||
mocha_1.test('searches on primary key', () => { | ||
(0, mocha_1.describe)(verify_uniqueness_1.get_verify_uniqueness_query.name, () => { | ||
(0, mocha_1.test)('searches on primary key', () => { | ||
const pathed_records_by_entity = { | ||
@@ -61,4 +61,4 @@ products: [ | ||
}; | ||
const result = verify_uniqueness_1.get_verify_uniqueness_query(pathed_records_by_entity, orma_schema, el => el); | ||
chai_1.expect(result).to.deep.equal({ | ||
const result = (0, verify_uniqueness_1.get_verify_uniqueness_query)(pathed_records_by_entity, orma_schema, el => el); | ||
(0, chai_1.expect)(result).to.deep.equal({ | ||
products: { | ||
@@ -73,3 +73,3 @@ id: true, | ||
}); | ||
mocha_1.test('searches on unique keys', () => { | ||
(0, mocha_1.test)('searches on unique keys', () => { | ||
const pathed_records_by_entity = { | ||
@@ -87,4 +87,4 @@ users: [ | ||
}; | ||
const result = verify_uniqueness_1.get_verify_uniqueness_query(pathed_records_by_entity, orma_schema, el => el); | ||
chai_1.expect(result).to.deep.equal({ | ||
const result = (0, verify_uniqueness_1.get_verify_uniqueness_query)(pathed_records_by_entity, orma_schema, el => el); | ||
(0, chai_1.expect)(result).to.deep.equal({ | ||
users: { | ||
@@ -107,3 +107,3 @@ id: true, | ||
}); | ||
mocha_1.test('only searches updates and deletes', () => { | ||
(0, mocha_1.test)('only searches updates and deletes', () => { | ||
const pathed_records_by_entity = { | ||
@@ -133,4 +133,4 @@ products: [ | ||
}; | ||
const result = verify_uniqueness_1.get_verify_uniqueness_query(pathed_records_by_entity, orma_schema, el => el); | ||
chai_1.expect(result).to.deep.equal({ | ||
const result = (0, verify_uniqueness_1.get_verify_uniqueness_query)(pathed_records_by_entity, orma_schema, el => el); | ||
(0, chai_1.expect)(result).to.deep.equal({ | ||
products: { | ||
@@ -145,3 +145,3 @@ id: true, | ||
}); | ||
mocha_1.test('searches multiple entities and fields', () => { | ||
(0, mocha_1.test)('searches multiple entities and fields', () => { | ||
const pathed_records_by_entity = { | ||
@@ -174,4 +174,4 @@ products: [ | ||
}; | ||
const result = verify_uniqueness_1.get_verify_uniqueness_query(pathed_records_by_entity, orma_schema, el => el); | ||
chai_1.expect(result).to.deep.equal({ | ||
const result = (0, verify_uniqueness_1.get_verify_uniqueness_query)(pathed_records_by_entity, orma_schema, el => el); | ||
(0, chai_1.expect)(result).to.deep.equal({ | ||
products: { | ||
@@ -202,4 +202,4 @@ id: true, | ||
}); | ||
mocha_1.describe(verify_uniqueness_1.get_duplicate_record_indices.name, () => { | ||
mocha_1.test('gets duplicates', () => { | ||
(0, mocha_1.describe)(verify_uniqueness_1.get_duplicate_record_indices.name, () => { | ||
(0, mocha_1.test)('gets duplicates', () => { | ||
const records1 = [ | ||
@@ -226,9 +226,9 @@ { | ||
]; | ||
const result = verify_uniqueness_1.get_duplicate_record_indices(records1, records2, [ | ||
const result = (0, verify_uniqueness_1.get_duplicate_record_indices)(records1, records2, [ | ||
'field1', | ||
'field2', | ||
]); | ||
chai_1.expect(result).to.deep.equal([[0, 1]]); | ||
(0, chai_1.expect)(result).to.deep.equal([[0, 1]]); | ||
}); | ||
mocha_1.test('works for no duplicates', () => { | ||
(0, mocha_1.test)('works for no duplicates', () => { | ||
const records1 = [ | ||
@@ -240,11 +240,11 @@ { | ||
const records2 = []; | ||
const result = verify_uniqueness_1.get_duplicate_record_indices(records1, records2, [ | ||
const result = (0, verify_uniqueness_1.get_duplicate_record_indices)(records1, records2, [ | ||
'field1', | ||
'field2', | ||
]); | ||
chai_1.expect(result).to.deep.equal([]); | ||
(0, chai_1.expect)(result).to.deep.equal([]); | ||
}); | ||
}); | ||
mocha_1.describe(verify_uniqueness_1.get_database_uniqueness_errors.name, () => { | ||
mocha_1.test('gets uniqueness errors', () => { | ||
(0, mocha_1.describe)(verify_uniqueness_1.get_database_uniqueness_errors.name, () => { | ||
(0, mocha_1.test)('gets uniqueness errors', () => { | ||
const mutation_pathed_records_by_id = { | ||
@@ -280,8 +280,8 @@ users: [ | ||
}; | ||
const errors = verify_uniqueness_1.get_database_uniqueness_errors(mutation_pathed_records_by_id, database_records_by_id, {}, orma_schema); | ||
chai_1.expect(errors.length).to.equal(3); | ||
const errors = (0, verify_uniqueness_1.get_database_uniqueness_errors)(mutation_pathed_records_by_id, database_records_by_id, {}, orma_schema); | ||
(0, chai_1.expect)(errors.length).to.equal(3); | ||
}); | ||
}); | ||
mocha_1.describe(verify_uniqueness_1.get_mutation_uniqueness_errors.name, () => { | ||
mocha_1.test('gets uniqueness errors', () => { | ||
(0, mocha_1.describe)(verify_uniqueness_1.get_mutation_uniqueness_errors.name, () => { | ||
(0, mocha_1.test)('gets uniqueness errors', () => { | ||
const mutation_pathed_records_by_id = { | ||
@@ -320,6 +320,6 @@ users: [ | ||
}; | ||
const errors = verify_uniqueness_1.get_mutation_uniqueness_errors(mutation_pathed_records_by_id, {}, orma_schema); | ||
chai_1.expect(errors.length).to.equal(2); | ||
const errors = (0, verify_uniqueness_1.get_mutation_uniqueness_errors)(mutation_pathed_records_by_id, {}, orma_schema); | ||
(0, chai_1.expect)(errors.length).to.equal(2); | ||
}); | ||
}); | ||
}); |
@@ -31,3 +31,3 @@ "use strict"; | ||
// strings and other non-objects are returned as-is | ||
const is_object = helpers_1.is_simple_object(expression); | ||
const is_object = (0, helpers_1.is_simple_object)(expression); | ||
if (!is_object) { | ||
@@ -54,4 +54,4 @@ return expression; | ||
const parsed_args = Array.isArray(args) | ||
? args.map((arg, i) => exports.json_to_sql(arg, [...path, command, i])) | ||
: exports.json_to_sql(args, [...path, command]); | ||
? args.map((arg, i) => (0, exports.json_to_sql)(arg, [...path, command, i])) | ||
: (0, exports.json_to_sql)(args, [...path, command]); | ||
return command_parser(parsed_args, path); | ||
@@ -89,3 +89,3 @@ }) | ||
$having: args => `HAVING ${args}`, | ||
$in: (args, path) => `${args[0]}${helpers_1.last(path) === '$not' ? ' NOT' : ''} IN (${args[1]})`, | ||
$in: (args, path) => `${args[0]}${(0, helpers_1.last)(path) === '$not' ? ' NOT' : ''} IN (${args[1]})`, | ||
$group_by: args => `GROUP BY ${args.join(', ')}`, | ||
@@ -97,7 +97,7 @@ $order_by: args => `ORDER BY ${args.join(', ')}`, | ||
const res = `(${args.join(') AND (')})`; | ||
return helpers_1.last(path) === '$not' ? `NOT (${res})` : res; | ||
return (0, helpers_1.last)(path) === '$not' ? `NOT (${res})` : res; | ||
}, | ||
$or: (args, path) => { | ||
const res = `(${args.join(') OR (')})`; | ||
return helpers_1.last(path) === '$not' ? `NOT (${res})` : res; | ||
return (0, helpers_1.last)(path) === '$not' ? `NOT (${res})` : res; | ||
}, | ||
@@ -107,9 +107,9 @@ $any: args => `ANY (${args})`, | ||
$eq: (args, path) => args[1] === null | ||
? `${args[0]}${helpers_1.last(path) === '$not' ? ' NOT' : ''} IS NULL` | ||
: `${args[0]} ${helpers_1.last(path) === '$not' ? '!' : ''}= ${args[1]}`, | ||
$gt: (args, path) => `${args[0]} ${helpers_1.last(path) === '$not' ? '<=' : '>'} ${args[1]}`, | ||
$lt: (args, path) => `${args[0]} ${helpers_1.last(path) === '$not' ? '>=' : '<'} ${args[1]}`, | ||
$gte: (args, path) => `${args[0]} ${helpers_1.last(path) === '$not' ? '<' : '>='} ${args[1]}`, | ||
$lte: (args, path) => `${args[0]} ${helpers_1.last(path) === '$not' ? '>' : '<='} ${args[1]}`, | ||
$exists: (args, path) => `${helpers_1.last(path) === '$not' ? 'NOT ' : ''}EXISTS (${args})`, | ||
? `${args[0]}${(0, helpers_1.last)(path) === '$not' ? ' NOT' : ''} IS NULL` | ||
: `${args[0]} ${(0, helpers_1.last)(path) === '$not' ? '!' : ''}= ${args[1]}`, | ||
$gt: (args, path) => `${args[0]} ${(0, helpers_1.last)(path) === '$not' ? '<=' : '>'} ${args[1]}`, | ||
$lt: (args, path) => `${args[0]} ${(0, helpers_1.last)(path) === '$not' ? '>=' : '<'} ${args[1]}`, | ||
$gte: (args, path) => `${args[0]} ${(0, helpers_1.last)(path) === '$not' ? '<' : '>='} ${args[1]}`, | ||
$lte: (args, path) => `${args[0]} ${(0, helpers_1.last)(path) === '$not' ? '>' : '<='} ${args[1]}`, | ||
$exists: (args, path) => `${(0, helpers_1.last)(path) === '$not' ? 'NOT ' : ''}EXISTS (${args})`, | ||
$limit: args => `LIMIT ${args}`, | ||
@@ -120,3 +120,3 @@ $offset: args => `OFFSET ${args}`, | ||
const search_value = string_arg.replace(/^\'/, '').replace(/\'$/, ''); // get rid of quotes if they were put there by escape() | ||
return `${args[0]}${helpers_1.last(path) === '$not' ? ' NOT' : ''} LIKE '%${search_value}%'`; | ||
return `${args[0]}${(0, helpers_1.last)(path) === '$not' ? ' NOT' : ''} LIKE '%${search_value}%'`; | ||
}, | ||
@@ -123,0 +123,0 @@ $not: args => args, |
@@ -7,5 +7,5 @@ "use strict"; | ||
const json_sql_1 = require("./json_sql"); | ||
mocha_1.describe('query', () => { | ||
mocha_1.describe('json_to_sql', () => { | ||
mocha_1.test('joins commands', () => { | ||
(0, mocha_1.describe)('query', () => { | ||
(0, mocha_1.describe)('json_to_sql', () => { | ||
(0, mocha_1.test)('joins commands', () => { | ||
const json = { | ||
@@ -15,7 +15,7 @@ $select: ['a'], | ||
}; | ||
const sql = sql_formatter_1.format(json_sql_1.json_to_sql(json)); | ||
const goal = sql_formatter_1.format(`SELECT a FROM b`); | ||
chai_1.expect(sql).to.equal(goal); | ||
const sql = (0, sql_formatter_1.format)((0, json_sql_1.json_to_sql)(json)); | ||
const goal = (0, sql_formatter_1.format)(`SELECT a FROM b`); | ||
(0, chai_1.expect)(sql).to.equal(goal); | ||
}); | ||
mocha_1.test('nested command work', () => { | ||
(0, mocha_1.test)('nested command work', () => { | ||
const json = { | ||
@@ -26,7 +26,7 @@ $where: { | ||
}; | ||
const sql = sql_formatter_1.format(json_sql_1.json_to_sql(json)); | ||
const goal = sql_formatter_1.format('WHERE a = b'); | ||
chai_1.expect(sql).to.equal(goal); | ||
const sql = (0, sql_formatter_1.format)((0, json_sql_1.json_to_sql)(json)); | ||
const goal = (0, sql_formatter_1.format)('WHERE a = b'); | ||
(0, chai_1.expect)(sql).to.equal(goal); | ||
}); | ||
mocha_1.test("'not' command works", () => { | ||
(0, mocha_1.test)("'not' command works", () => { | ||
const json = { | ||
@@ -37,15 +37,15 @@ $not: { | ||
}; | ||
const sql = sql_formatter_1.format(json_sql_1.json_to_sql(json)); | ||
const goal = sql_formatter_1.format('a NOT IN (1, 2)'); | ||
chai_1.expect(sql).to.equal(goal); | ||
const sql = (0, sql_formatter_1.format)((0, json_sql_1.json_to_sql)(json)); | ||
const goal = (0, sql_formatter_1.format)('a NOT IN (1, 2)'); | ||
(0, chai_1.expect)(sql).to.equal(goal); | ||
}); | ||
mocha_1.test('ignores undefined properties', () => { | ||
(0, mocha_1.test)('ignores undefined properties', () => { | ||
const json = { | ||
$having: undefined | ||
}; | ||
const sql = sql_formatter_1.format(json_sql_1.json_to_sql(json)); | ||
const goal = sql_formatter_1.format(''); | ||
chai_1.expect(sql).to.equal(goal); | ||
const sql = (0, sql_formatter_1.format)((0, json_sql_1.json_to_sql)(json)); | ||
const goal = (0, sql_formatter_1.format)(''); | ||
(0, chai_1.expect)(sql).to.equal(goal); | ||
}); | ||
}); | ||
}); |
@@ -23,3 +23,3 @@ "use strict"; | ||
let paths_to_any = []; | ||
helpers_1.deep_for_each(query, (clause, path) => { | ||
(0, helpers_1.deep_for_each)(query, (clause, path) => { | ||
if (clause.$any_path) { | ||
@@ -35,3 +35,3 @@ paths_to_any.push([clause, path]); | ||
const filter_type = get_filter_type(clause_path); | ||
const processed_clause = exports.process_any_clause(clause, current_entity, filter_type, orma_schema); | ||
const processed_clause = (0, exports.process_any_clause)(clause, current_entity, filter_type, orma_schema); | ||
Object.keys(clause).forEach(key => delete clause[key]); | ||
@@ -49,4 +49,4 @@ Object.keys(processed_clause).forEach(key => clause[key] = processed_clause[key]); | ||
const path_segment = path.slice(0, i + 1); | ||
const previous_any = helpers_1.deep_get(path_segment, query); | ||
return helpers_1.last(previous_any[0]); | ||
const previous_any = (0, helpers_1.deep_get)(path_segment, query); | ||
return (0, helpers_1.last)(previous_any[0]); | ||
} | ||
@@ -57,7 +57,7 @@ else { | ||
}); | ||
const current_entity = helpers_1.last(previous_entities); | ||
const current_entity = (0, helpers_1.last)(previous_entities); | ||
return current_entity; | ||
}; | ||
const get_filter_type = path => { | ||
const filter_type = helpers_1.last(path.filter(path_el => path_el === '$having' || path_el === '$where')); | ||
const filter_type = (0, helpers_1.last)(path.filter(path_el => path_el === '$having' || path_el === '$where')); | ||
return filter_type; | ||
@@ -68,3 +68,3 @@ }; | ||
const full_path = [initial_entity].concat(any_path); | ||
const edge_path = schema_helpers_1.get_edge_path(full_path, orma_schema).reverse(); | ||
const edge_path = (0, schema_helpers_1.get_edge_path)(full_path, orma_schema).reverse(); | ||
const clause = edge_path.reduce((acc, edge) => { | ||
@@ -71,0 +71,0 @@ return { |
@@ -6,3 +6,3 @@ "use strict"; | ||
const chai_1 = require("chai"); | ||
mocha_1.describe('query_macros', () => { | ||
(0, mocha_1.describe)('query_macros', () => { | ||
const orma_schema = { | ||
@@ -42,4 +42,4 @@ products: { | ||
}; | ||
mocha_1.describe(any_path_macro_1.apply_any_path_macro.name, () => { | ||
mocha_1.test('multiple any clauses', () => { | ||
(0, mocha_1.describe)(any_path_macro_1.apply_any_path_macro.name, () => { | ||
(0, mocha_1.test)('multiple any clauses', () => { | ||
const query = { | ||
@@ -69,3 +69,3 @@ products: { | ||
}; | ||
any_path_macro_1.apply_any_path_macro(query, orma_schema); | ||
(0, any_path_macro_1.apply_any_path_macro)(query, orma_schema); | ||
const goal = { | ||
@@ -103,5 +103,5 @@ products: { | ||
}; | ||
chai_1.expect(query).to.deep.equal(goal); | ||
(0, chai_1.expect)(query).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('deep any path', () => { | ||
(0, mocha_1.test)('deep any path', () => { | ||
const query = { | ||
@@ -119,3 +119,3 @@ products: { | ||
}; | ||
any_path_macro_1.apply_any_path_macro(query, orma_schema); | ||
(0, any_path_macro_1.apply_any_path_macro)(query, orma_schema); | ||
const goal = { | ||
@@ -146,5 +146,5 @@ products: { | ||
}; | ||
chai_1.expect(query).to.deep.equal(goal); | ||
(0, chai_1.expect)(query).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('nested anys', () => { | ||
(0, mocha_1.test)('nested anys', () => { | ||
const query = { | ||
@@ -167,3 +167,3 @@ products: { | ||
}; | ||
any_path_macro_1.apply_any_path_macro(query, orma_schema); | ||
(0, any_path_macro_1.apply_any_path_macro)(query, orma_schema); | ||
const goal = { | ||
@@ -194,5 +194,5 @@ products: { | ||
}; | ||
chai_1.expect(query).to.deep.equal(goal); | ||
(0, chai_1.expect)(query).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('uses having', () => { | ||
(0, mocha_1.test)('uses having', () => { | ||
const query = { | ||
@@ -210,3 +210,3 @@ products: { | ||
}; | ||
any_path_macro_1.apply_any_path_macro(query, orma_schema); | ||
(0, any_path_macro_1.apply_any_path_macro)(query, orma_schema); | ||
const goal = { | ||
@@ -228,5 +228,5 @@ products: { | ||
}; | ||
chai_1.expect(query).to.deep.equal(goal); | ||
(0, chai_1.expect)(query).to.deep.equal(goal); | ||
}); | ||
}); | ||
}); |
@@ -7,3 +7,3 @@ "use strict"; | ||
let raw_paths = []; | ||
helpers_1.deep_for_each(query, (value, path) => { | ||
(0, helpers_1.deep_for_each)(query, (value, path) => { | ||
if (value === null || value === void 0 ? void 0 : value.$escape) { | ||
@@ -22,5 +22,5 @@ raw_paths.push([path, value]); | ||
} | ||
helpers_1.deep_set(path, escaping_function(value.$escape), query); | ||
(0, helpers_1.deep_set)(path, escaping_function(value.$escape), query); | ||
}); | ||
}; | ||
exports.apply_escape_macro = apply_escape_macro; |
@@ -6,5 +6,5 @@ "use strict"; | ||
const chai_1 = require("chai"); | ||
mocha_1.describe('escaping_macros', () => { | ||
mocha_1.describe(escaping_macros_1.apply_escape_macro.name, () => { | ||
mocha_1.test('escapes primitives', () => { | ||
(0, mocha_1.describe)('escaping_macros', () => { | ||
(0, mocha_1.describe)(escaping_macros_1.apply_escape_macro.name, () => { | ||
(0, mocha_1.test)('escapes primitives', () => { | ||
const query = { | ||
@@ -24,3 +24,3 @@ my_products: { | ||
}; | ||
escaping_macros_1.apply_escape_macro(query, value => { | ||
(0, escaping_macros_1.apply_escape_macro)(query, value => { | ||
if (typeof value === 'string') { | ||
@@ -36,3 +36,3 @@ return `"${value}"`; | ||
}); | ||
chai_1.expect(query).to.deep.equal({ | ||
(0, chai_1.expect)(query).to.deep.equal({ | ||
my_products: { | ||
@@ -46,3 +46,3 @@ $from: 'products', | ||
}); | ||
mocha_1.test('handles nested $escapes', () => { | ||
(0, mocha_1.test)('handles nested $escapes', () => { | ||
const query = { | ||
@@ -53,3 +53,3 @@ in: ['column', { $escape: [{ | ||
}; | ||
escaping_macros_1.apply_escape_macro(query, value => { | ||
(0, escaping_macros_1.apply_escape_macro)(query, value => { | ||
if (Array.isArray(value)) { | ||
@@ -62,3 +62,3 @@ return value; | ||
}); | ||
chai_1.expect(query).to.deep.equal({ | ||
(0, chai_1.expect)(query).to.deep.equal({ | ||
in: ['column', ['"val"']], | ||
@@ -65,0 +65,0 @@ }); |
@@ -13,5 +13,5 @@ "use strict"; | ||
const apply_nesting_macro = (query, subquery_path, previous_results, orma_schema) => { | ||
const subquery = helpers_1.deep_get(subquery_path, query); | ||
const nesting_where = exports.get_nesting_where(query, subquery_path, previous_results, orma_schema); | ||
const combined_where = query_helpers_1.combine_wheres([subquery.$where, nesting_where], '$and'); | ||
const subquery = (0, helpers_1.deep_get)(subquery_path, query); | ||
const nesting_where = (0, exports.get_nesting_where)(query, subquery_path, previous_results, orma_schema); | ||
const combined_where = (0, query_helpers_1.combine_wheres)([subquery.$where, nesting_where], '$and'); | ||
subquery.$where = combined_where; | ||
@@ -48,3 +48,3 @@ }; | ||
const subpath = subquery_path.slice(0, i + 1); | ||
const subquery = helpers_1.deep_get(subpath, query); | ||
const subquery = (0, helpers_1.deep_get)(subpath, query); | ||
if (subquery.$where || subquery.$having) { | ||
@@ -62,7 +62,7 @@ return i; | ||
const get_ancestor_where_clause = (ancestor_rows, ancestor_path, ancestor_to_entity_path, query, orma_schema) => { | ||
const ancestor_name = query_1.get_real_entity_name(ancestor_path, query); | ||
const ancestor_name = (0, query_1.get_real_entity_name)(ancestor_path, query); | ||
const under_ancestor_path = [...ancestor_path, ancestor_to_entity_path[0]]; | ||
const entity_under_ancestor = query_1.get_real_entity_name(under_ancestor_path, query); | ||
const entity_under_ancestor = (0, query_1.get_real_entity_name)(under_ancestor_path, query); | ||
// TODO refactor this so it works | ||
const edge_under_ancestor = schema_helpers_1.get_direct_edge(entity_under_ancestor, ancestor_name, orma_schema); | ||
const edge_under_ancestor = (0, schema_helpers_1.get_direct_edge)(entity_under_ancestor, ancestor_name, orma_schema); | ||
if (ancestor_rows === undefined || ancestor_rows.length === 0) { | ||
@@ -75,3 +75,3 @@ throw Error(`No ancestor rows provided for ${ancestor_name}`); | ||
const entity_name = entity_to_ancestor_path[0]; | ||
const ancestor_query = any_path_macro_1.process_any_clause({ | ||
const ancestor_query = (0, any_path_macro_1.process_any_clause)({ | ||
$any_path: [ | ||
@@ -78,0 +78,0 @@ any_path, |
@@ -6,3 +6,3 @@ "use strict"; | ||
const nesting_macro_1 = require("./nesting_macro"); | ||
mocha_1.describe('query_macros', () => { | ||
(0, mocha_1.describe)('query_macros', () => { | ||
const orma_schema = { | ||
@@ -42,4 +42,4 @@ products: { | ||
}; | ||
mocha_1.describe(nesting_macro_1.apply_nesting_macro.name, () => { | ||
mocha_1.test('handles root nesting', () => { | ||
(0, mocha_1.describe)(nesting_macro_1.apply_nesting_macro.name, () => { | ||
(0, mocha_1.test)('handles root nesting', () => { | ||
const query = { | ||
@@ -55,3 +55,3 @@ products: { | ||
const previous_results = [[['products'], [{ id: 1 }, { id: 2 }]]]; | ||
nesting_macro_1.apply_nesting_macro(query, ['products', 'images'], previous_results, orma_schema); | ||
(0, nesting_macro_1.apply_nesting_macro)(query, ['products', 'images'], previous_results, orma_schema); | ||
const goal = { | ||
@@ -69,5 +69,5 @@ products: { | ||
}; | ||
chai_1.expect(query).to.deep.equal(goal); | ||
(0, chai_1.expect)(query).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('handles deep nesting', () => { | ||
(0, mocha_1.test)('handles deep nesting', () => { | ||
const query = { | ||
@@ -83,3 +83,3 @@ products: { | ||
const previous_results = [[['products'], [{ id: 1 }, { id: 2 }]]]; | ||
nesting_macro_1.apply_nesting_macro(query, ['products', 'images', 'image_urls'], previous_results, orma_schema); | ||
(0, nesting_macro_1.apply_nesting_macro)(query, ['products', 'images', 'image_urls'], previous_results, orma_schema); | ||
const goal = { | ||
@@ -106,5 +106,5 @@ products: { | ||
}; | ||
chai_1.expect(query).to.deep.equal(goal); | ||
(0, chai_1.expect)(query).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('handles nesting under where clause', () => { | ||
(0, mocha_1.test)('handles nesting under where clause', () => { | ||
const query = { | ||
@@ -123,3 +123,3 @@ products: { | ||
]; | ||
nesting_macro_1.apply_nesting_macro(query, ['products', 'images', 'image_urls'], previous_results, orma_schema); | ||
(0, nesting_macro_1.apply_nesting_macro)(query, ['products', 'images', 'image_urls'], previous_results, orma_schema); | ||
const goal = { | ||
@@ -137,5 +137,5 @@ products: { | ||
}; | ||
chai_1.expect(query).to.deep.equal(goal); | ||
(0, chai_1.expect)(query).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('nests based on from clause', () => { | ||
(0, mocha_1.test)('nests based on from clause', () => { | ||
const query = { | ||
@@ -155,4 +155,4 @@ my_products: { | ||
]; | ||
nesting_macro_1.apply_nesting_macro(query, ['my_products', 'my_images'], previous_results, orma_schema); | ||
nesting_macro_1.apply_nesting_macro(query, ['my_products', 'my_images', 'image_urls'], previous_results, orma_schema); | ||
(0, nesting_macro_1.apply_nesting_macro)(query, ['my_products', 'my_images'], previous_results, orma_schema); | ||
(0, nesting_macro_1.apply_nesting_macro)(query, ['my_products', 'my_images', 'image_urls'], previous_results, orma_schema); | ||
const goal = { | ||
@@ -181,5 +181,5 @@ my_products: { | ||
}; | ||
chai_1.expect(query).to.deep.equal(goal); | ||
(0, chai_1.expect)(query).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('ignores undefined where/having clauses', () => { | ||
(0, mocha_1.test)('ignores undefined where/having clauses', () => { | ||
const query = { | ||
@@ -198,3 +198,3 @@ products: { | ||
]; | ||
nesting_macro_1.apply_nesting_macro(query, ['products', 'images', 'image_urls'], previous_results, orma_schema); | ||
(0, nesting_macro_1.apply_nesting_macro)(query, ['products', 'images', 'image_urls'], previous_results, orma_schema); | ||
const goal = { | ||
@@ -222,5 +222,5 @@ products: { | ||
}; | ||
chai_1.expect(query).to.deep.equal(goal); | ||
(0, chai_1.expect)(query).to.deep.equal(goal); | ||
}); | ||
}); | ||
}); |
@@ -11,8 +11,8 @@ "use strict"; | ||
const apply_select_macro = (query, orma_schema) => { | ||
query_helpers_1.query_for_each(query, (value, path) => { | ||
(0, query_helpers_1.query_for_each)(query, (value, path) => { | ||
var _a, _b; | ||
const new_select = exports.get_select(value, path, orma_schema); | ||
const new_select = (0, exports.get_select)(value, path, orma_schema); | ||
const existing_select = (_a = query.$select) !== null && _a !== void 0 ? _a : []; | ||
const $select = [...new_select, ...existing_select]; | ||
const $from = (_b = value.$from) !== null && _b !== void 0 ? _b : helpers_1.last(path); | ||
const $from = (_b = value.$from) !== null && _b !== void 0 ? _b : (0, helpers_1.last)(path); | ||
if ($select) { | ||
@@ -32,6 +32,6 @@ value.$select = $select; | ||
const get_select = (subquery, subquery_path, orma_schema) => { | ||
const entity_name = helpers_1.last(subquery_path); | ||
const entity_name = (0, helpers_1.last)(subquery_path); | ||
const $select = Object.keys(subquery).flatMap(key => { | ||
var _a; | ||
if (schema_helpers_1.is_reserved_keyword(key)) { | ||
if ((0, schema_helpers_1.is_reserved_keyword)(key)) { | ||
return []; | ||
@@ -45,9 +45,9 @@ } | ||
} | ||
if (helpers_1.is_simple_object(subquery[key]) && !query_helpers_1.is_subquery(subquery[key])) { | ||
if ((0, helpers_1.is_simple_object)(subquery[key]) && !(0, query_helpers_1.is_subquery)(subquery[key])) { | ||
return { $as: [subquery[key], key] }; | ||
} | ||
if (helpers_1.is_simple_object(subquery[key]) && query_helpers_1.is_subquery(subquery[key])) { | ||
if ((0, helpers_1.is_simple_object)(subquery[key]) && (0, query_helpers_1.is_subquery)(subquery[key])) { | ||
const lower_subquery = subquery[key]; | ||
const lower_subquery_entity = (_a = lower_subquery.$from) !== null && _a !== void 0 ? _a : key; | ||
const edge_to_lower_table = schema_helpers_1.get_direct_edge(entity_name, lower_subquery_entity, orma_schema); | ||
const edge_to_lower_table = (0, schema_helpers_1.get_direct_edge)(entity_name, lower_subquery_entity, orma_schema); | ||
return edge_to_lower_table.from_field; | ||
@@ -59,3 +59,3 @@ } | ||
const higher_entity = subquery_path[subquery_path.length - 2]; | ||
const edge_to_higher_entity = schema_helpers_1.get_direct_edge(entity_name, higher_entity, orma_schema); | ||
const edge_to_higher_entity = (0, schema_helpers_1.get_direct_edge)(entity_name, higher_entity, orma_schema); | ||
$select.push(edge_to_higher_entity.from_field); | ||
@@ -62,0 +62,0 @@ } |
@@ -6,3 +6,3 @@ "use strict"; | ||
const chai_1 = require("chai"); | ||
mocha_1.describe('select_macro', () => { | ||
(0, mocha_1.describe)('select_macro', () => { | ||
const orma_schema = { | ||
@@ -42,4 +42,4 @@ products: { | ||
}; | ||
mocha_1.describe(select_macro_1.apply_select_macro.name, () => { | ||
mocha_1.test('handles selects/handles root', () => { | ||
(0, mocha_1.describe)(select_macro_1.apply_select_macro.name, () => { | ||
(0, mocha_1.test)('handles selects/handles root', () => { | ||
const query = { | ||
@@ -54,3 +54,3 @@ products: { | ||
}; | ||
select_macro_1.apply_select_macro(query, orma_schema); | ||
(0, select_macro_1.apply_select_macro)(query, orma_schema); | ||
const goal = { | ||
@@ -66,5 +66,5 @@ products: { | ||
}; | ||
chai_1.expect(query).to.deep.equal(goal); | ||
(0, chai_1.expect)(query).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('handles adding foreign keys', () => { | ||
(0, mocha_1.test)('handles adding foreign keys', () => { | ||
const query = { | ||
@@ -76,3 +76,3 @@ products: { | ||
}; | ||
select_macro_1.apply_select_macro(query, orma_schema); | ||
(0, select_macro_1.apply_select_macro)(query, orma_schema); | ||
const goal = { | ||
@@ -92,5 +92,5 @@ products: { | ||
}; | ||
chai_1.expect(query).to.deep.equal(goal); | ||
(0, chai_1.expect)(query).to.deep.equal(goal); | ||
}); | ||
mocha_1.test("respects 'from' clause", () => { | ||
(0, mocha_1.test)("respects 'from' clause", () => { | ||
const query = { | ||
@@ -102,3 +102,3 @@ my_products: { | ||
}; | ||
select_macro_1.apply_select_macro(query, orma_schema); | ||
(0, select_macro_1.apply_select_macro)(query, orma_schema); | ||
const goal = { | ||
@@ -110,5 +110,5 @@ my_products: { | ||
}; | ||
chai_1.expect(query).to.deep.equal(goal); | ||
(0, chai_1.expect)(query).to.deep.equal(goal); | ||
}); | ||
}); | ||
}); |
/** | ||
* Returns true if the parameter is a subquery. A subquery is an object that has at least one data fetching prop | ||
* (i.e. a prop that doesnt start with a $) or is an empty object. | ||
* (i.e. a prop that doesnt start with a $), is an empty object, or has a $from keyword as a property. | ||
* @param subquery | ||
@@ -5,0 +5,0 @@ * @returns |
@@ -8,3 +8,3 @@ "use strict"; | ||
* Returns true if the parameter is a subquery. A subquery is an object that has at least one data fetching prop | ||
* (i.e. a prop that doesnt start with a $) or is an empty object. | ||
* (i.e. a prop that doesnt start with a $), is an empty object, or has a $from keyword as a property. | ||
* @param subquery | ||
@@ -14,3 +14,3 @@ * @returns | ||
const is_subquery = (subquery) => { | ||
const is_object = helpers_1.is_simple_object(subquery); | ||
const is_object = (0, helpers_1.is_simple_object)(subquery); | ||
if (!is_object) { | ||
@@ -20,4 +20,5 @@ return false; | ||
const subquery_keys = Object.keys(subquery); | ||
const has_data_prop = subquery_keys.some(key => !schema_helpers_1.is_reserved_keyword(key)); | ||
return has_data_prop || subquery_keys.length === 0; | ||
const has_data_prop = subquery_keys.some(key => !(0, schema_helpers_1.is_reserved_keyword)(key)); | ||
const has_from = subquery.$from !== undefined; | ||
return has_data_prop || has_from; | ||
}; | ||
@@ -34,4 +35,4 @@ exports.is_subquery = is_subquery; | ||
const path = queue.shift(); | ||
const subquery = helpers_1.deep_get(path, query); | ||
const subquery_keys = Object.keys(subquery).filter(key => exports.is_subquery(subquery[key]) && !schema_helpers_1.is_reserved_keyword(key)); | ||
const subquery = (0, helpers_1.deep_get)(path, query); | ||
const subquery_keys = Object.keys(subquery).filter(key => (0, exports.is_subquery)(subquery[key]) && !(0, schema_helpers_1.is_reserved_keyword)(key)); | ||
const subquery_paths = subquery_keys.map(key => [...path, key]); | ||
@@ -112,5 +113,5 @@ queue.push(...subquery_paths); | ||
}); | ||
const where = exports.combine_wheres(ors, '$or'); | ||
const where = (0, exports.combine_wheres)(ors, '$or'); | ||
return where; | ||
}; | ||
exports.get_search_records_where = get_search_records_where; |
@@ -6,20 +6,30 @@ "use strict"; | ||
const query_helpers_1 = require("./query_helpers"); | ||
mocha_1.describe('query helpers', () => { | ||
mocha_1.describe(query_helpers_1.is_subquery.name, () => { | ||
mocha_1.test('is subquery', () => { | ||
const result = query_helpers_1.is_subquery({ | ||
(0, mocha_1.describe)('query helpers', () => { | ||
(0, mocha_1.describe)(query_helpers_1.is_subquery.name, () => { | ||
(0, mocha_1.test)('data prop makes subquery', () => { | ||
const result = (0, query_helpers_1.is_subquery)({ | ||
$from: 'products', | ||
id: {}, | ||
}); | ||
chai_1.expect(result).to.equal(true); | ||
(0, chai_1.expect)(result).to.equal(true); | ||
}); | ||
mocha_1.test('not subquery', () => { | ||
const result = query_helpers_1.is_subquery({ | ||
$from: 'products', | ||
(0, mocha_1.test)('$from makes subquery', () => { | ||
const result = (0, query_helpers_1.is_subquery)({ | ||
$from: 'products' | ||
}); | ||
chai_1.expect(result).to.equal(false); | ||
(0, chai_1.expect)(result).to.equal(true); | ||
}); | ||
(0, mocha_1.test)('empty object is not subquery', () => { | ||
const result = (0, query_helpers_1.is_subquery)({}); | ||
(0, chai_1.expect)(result).to.equal(false); | ||
}); | ||
(0, mocha_1.test)('not subquery', () => { | ||
const result = (0, query_helpers_1.is_subquery)({ | ||
$limit: 'products', | ||
}); | ||
(0, chai_1.expect)(result).to.equal(false); | ||
}); | ||
}); | ||
mocha_1.describe(query_helpers_1.query_for_each.name, () => { | ||
mocha_1.test('maps over a query', () => { | ||
(0, mocha_1.describe)(query_helpers_1.query_for_each.name, () => { | ||
(0, mocha_1.test)('maps over a query', () => { | ||
const query = { | ||
@@ -33,6 +43,6 @@ root1: { | ||
const results = []; | ||
query_helpers_1.query_for_each(query, (value, path) => { | ||
(0, query_helpers_1.query_for_each)(query, (value, path) => { | ||
results.push([value, path]); | ||
}); | ||
chai_1.expect(results).to.deep.equal([ | ||
(0, chai_1.expect)(results).to.deep.equal([ | ||
[{ child: {}, $keyword: {} }, ['root1']], | ||
@@ -44,21 +54,21 @@ [{}, ['root2']], | ||
}); | ||
mocha_1.describe(query_helpers_1.combine_wheres.name, () => { | ||
mocha_1.test('returns undefined for no wheres', () => { | ||
(0, mocha_1.describe)(query_helpers_1.combine_wheres.name, () => { | ||
(0, mocha_1.test)('returns undefined for no wheres', () => { | ||
const wheres = []; | ||
const result = query_helpers_1.combine_wheres(wheres, '$and'); | ||
chai_1.expect(result).to.equal(undefined); | ||
const result = (0, query_helpers_1.combine_wheres)(wheres, '$and'); | ||
(0, chai_1.expect)(result).to.equal(undefined); | ||
}); | ||
mocha_1.test('works for 1 where', () => { | ||
(0, mocha_1.test)('works for 1 where', () => { | ||
const wheres = [{ $eq: ['id', 1] }]; | ||
const result = query_helpers_1.combine_wheres(wheres, '$and'); | ||
chai_1.expect(result).to.deep.equal({ $eq: ['id', 1] }); | ||
const result = (0, query_helpers_1.combine_wheres)(wheres, '$and'); | ||
(0, chai_1.expect)(result).to.deep.equal({ $eq: ['id', 1] }); | ||
}); | ||
mocha_1.test('works for multiple wheres without connective', () => { | ||
(0, mocha_1.test)('works for multiple wheres without connective', () => { | ||
const wheres = [{ $eq: ['id', 1] }, { $eq: ['id', 2] }]; | ||
const result = query_helpers_1.combine_wheres(wheres, '$and'); | ||
chai_1.expect(result).to.deep.equal({ | ||
const result = (0, query_helpers_1.combine_wheres)(wheres, '$and'); | ||
(0, chai_1.expect)(result).to.deep.equal({ | ||
$and: [{ $eq: ['id', 1] }, { $eq: ['id', 2] }], | ||
}); | ||
}); | ||
mocha_1.test('works for multiple wheres with connective', () => { | ||
(0, mocha_1.test)('works for multiple wheres with connective', () => { | ||
const wheres = [ | ||
@@ -68,4 +78,4 @@ { $and: [{ $eq: ['id', 1] }, { $eq: ['id', 2] }] }, | ||
]; | ||
const result = query_helpers_1.combine_wheres(wheres, '$and'); | ||
chai_1.expect(result).to.deep.equal({ | ||
const result = (0, query_helpers_1.combine_wheres)(wheres, '$and'); | ||
(0, chai_1.expect)(result).to.deep.equal({ | ||
$and: [ | ||
@@ -79,3 +89,3 @@ { $eq: ['id', 1] }, | ||
}); | ||
mocha_1.test('works for no initial connetive but added connective', () => { | ||
(0, mocha_1.test)('works for no initial connetive but added connective', () => { | ||
const wheres = [ | ||
@@ -85,4 +95,4 @@ { $eq: ['id', 1] }, | ||
]; | ||
const result = query_helpers_1.combine_wheres(wheres, '$and'); | ||
chai_1.expect(result).to.deep.equal({ | ||
const result = (0, query_helpers_1.combine_wheres)(wheres, '$and'); | ||
(0, chai_1.expect)(result).to.deep.equal({ | ||
$and: [ | ||
@@ -96,4 +106,4 @@ { $eq: ['id', 1] }, | ||
}); | ||
mocha_1.describe(query_helpers_1.get_search_records_where.name, () => { | ||
mocha_1.test('handles single field', () => { | ||
(0, mocha_1.describe)(query_helpers_1.get_search_records_where.name, () => { | ||
(0, mocha_1.test)('handles single field', () => { | ||
const records = [ | ||
@@ -105,8 +115,8 @@ { | ||
]; | ||
const result = query_helpers_1.get_search_records_where(records, record => ['field1'], el => el); | ||
chai_1.expect(result).to.deep.equal({ | ||
const result = (0, query_helpers_1.get_search_records_where)(records, record => ['field1'], el => el); | ||
(0, chai_1.expect)(result).to.deep.equal({ | ||
$in: ['field1', ['hi']], | ||
}); | ||
}); | ||
mocha_1.test('handles escaping', () => { | ||
(0, mocha_1.test)('handles escaping', () => { | ||
const records = [ | ||
@@ -117,8 +127,8 @@ { | ||
]; | ||
const result = query_helpers_1.get_search_records_where(records, record => ['field1'], el => `"${el}"`); | ||
chai_1.expect(result).to.deep.equal({ | ||
const result = (0, query_helpers_1.get_search_records_where)(records, record => ['field1'], el => `"${el}"`); | ||
(0, chai_1.expect)(result).to.deep.equal({ | ||
$in: ['field1', ['"hi"']], | ||
}); | ||
}); | ||
mocha_1.test('handles multiple fields', () => { | ||
(0, mocha_1.test)('handles multiple fields', () => { | ||
const records = [ | ||
@@ -131,4 +141,4 @@ { | ||
]; | ||
const result = query_helpers_1.get_search_records_where(records, record => ['field1', 'field2'], el => el); | ||
chai_1.expect(result).to.deep.equal({ | ||
const result = (0, query_helpers_1.get_search_records_where)(records, record => ['field1', 'field2'], el => el); | ||
(0, chai_1.expect)(result).to.deep.equal({ | ||
$and: [ | ||
@@ -144,3 +154,3 @@ { | ||
}); | ||
mocha_1.test('handles multiple records', () => { | ||
(0, mocha_1.test)('handles multiple records', () => { | ||
const records = [ | ||
@@ -166,4 +176,4 @@ { | ||
]; | ||
const result = query_helpers_1.get_search_records_where(records, record => record.type === 1 ? ['field1'] : ['field1', 'field2'], el => el); | ||
chai_1.expect(result).to.deep.equal({ | ||
const result = (0, query_helpers_1.get_search_records_where)(records, record => record.type === 1 ? ['field1'] : ['field1', 'field2'], el => el); | ||
(0, chai_1.expect)(result).to.deep.equal({ | ||
$or: [ | ||
@@ -170,0 +180,0 @@ { |
@@ -29,3 +29,3 @@ "use strict"; | ||
const tier_by_path = {}; | ||
query_helpers_1.query_for_each(query, (value, path) => { | ||
(0, query_helpers_1.query_for_each)(query, (value, path) => { | ||
// we create a new tier for 2 reasons: | ||
@@ -42,3 +42,3 @@ // 1. Entity is directly below a root entity and there is only one tier in the query plan. | ||
: undefined; | ||
const parent_value = helpers_1.deep_get(parent_path, query); | ||
const parent_value = (0, helpers_1.deep_get)(parent_path, query); | ||
const parent_has_filter = parent_value.$where || parent_value.$having; | ||
@@ -50,3 +50,3 @@ const tier = get_tier(path.length, parent_tier, parent_has_filter); | ||
tier_by_path[JSON.stringify(path)] = tier; | ||
helpers_1.last(query_plan).push(path); | ||
(0, helpers_1.last)(query_plan).push(path); | ||
}); | ||
@@ -53,0 +53,0 @@ return query_plan; |
@@ -6,5 +6,5 @@ "use strict"; | ||
const query_plan_1 = require("./query_plan"); | ||
mocha_1.describe('query_plan', () => { | ||
mocha_1.describe(query_plan_1.get_query_plan.name, () => { | ||
mocha_1.test('splits by $where clause and $having', () => { | ||
(0, mocha_1.describe)('query_plan', () => { | ||
(0, mocha_1.describe)(query_plan_1.get_query_plan.name, () => { | ||
(0, mocha_1.test)('splits by $where clause and $having', () => { | ||
const query = { | ||
@@ -26,3 +26,3 @@ vendors: { | ||
}; | ||
const result = query_plan_1.get_query_plan(query); | ||
const result = (0, query_plan_1.get_query_plan)(query); | ||
// the split happens at variants because it has a where clause | ||
@@ -38,5 +38,5 @@ const goal = [ | ||
]; | ||
chai_1.expect(result).to.deep.equal(goal); | ||
(0, chai_1.expect)(result).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('handles multiple top level props', () => { | ||
(0, mocha_1.test)('handles multiple top level props', () => { | ||
const query = { | ||
@@ -50,7 +50,7 @@ vendors: { | ||
}; | ||
const result = query_plan_1.get_query_plan(query); | ||
const result = (0, query_plan_1.get_query_plan)(query); | ||
const goal = [[['vendors'], ['products']]]; | ||
chai_1.expect(result).to.deep.equal(goal); | ||
(0, chai_1.expect)(result).to.deep.equal(goal); | ||
}); | ||
mocha_1.test('handles renamed queries', () => { | ||
(0, mocha_1.test)('handles renamed queries', () => { | ||
const query = { | ||
@@ -62,7 +62,7 @@ my_products: { | ||
}; | ||
const result = query_plan_1.get_query_plan(query); | ||
const result = (0, query_plan_1.get_query_plan)(query); | ||
const goal = [[['my_products']]]; | ||
chai_1.expect(result).to.deep.equal(goal); | ||
(0, chai_1.expect)(result).to.deep.equal(goal); | ||
}); | ||
}); | ||
}); |
import { orma_schema } from '../introspector/introspector'; | ||
import { QueryResult } from '../types/query/query_result_types'; | ||
import { OrmaQuery } from '../types/query/query_types'; | ||
import { DeepReadonlyObject } from '../types/schema_types'; | ||
export declare const get_real_parent_name: (path: (string | number)[], query: any) => any; | ||
@@ -6,2 +9,4 @@ export declare const get_real_entity_name: (path: (string | number)[], query: any) => any; | ||
export declare const orma_nester: (results: [string[], Record<string, unknown>[]][], query: any, orma_schema: orma_schema) => {}; | ||
export declare const orma_query: (raw_query: any, orma_schema: any, query_function: (sql_string: string[]) => Promise<Record<string, unknown>[][]>, escaping_function: (value: any) => any) => Promise<{}>; | ||
export declare const orma_query: <Schema extends DeepReadonlyObject<orma_schema>, Query extends OrmaQuery<Schema>>(raw_query: Query, orma_schema_input: Schema, query_function: (sql_string: string[]) => Promise<Record<string, unknown>[][]>, escaping_function: (value: any) => any) => Promise<import("../types/query/query_result_types").StripKeywords<import("../types/query/query_result_types").WrapInArrays<import("../types/query/query_result_types").AddSchemaTypes<Schema, Query, unknown>>>>; | ||
export declare const as_orma_schema: <Schema extends DeepReadonlyObject<orma_schema>>(schema: Schema) => Schema; | ||
export declare const as_orma_query: <Schema extends DeepReadonlyObject<orma_schema>, T extends OrmaQuery<Schema>>(schema: Schema, query: T) => T; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.orma_query = exports.orma_nester = exports.having_to_json_sql = exports.get_real_entity_name = exports.get_real_parent_name = void 0; | ||
exports.as_orma_query = exports.as_orma_schema = exports.orma_query = exports.orma_nester = exports.having_to_json_sql = exports.get_real_entity_name = exports.get_real_parent_name = void 0; | ||
const helpers_1 = require("../helpers/helpers"); | ||
@@ -17,3 +17,3 @@ const nester_1 = require("../helpers/nester"); | ||
return null; | ||
return helpers_1.deep_get([...helpers_1.drop_last(1, path), '$from'], query, null) || path[path.length - 2]; | ||
return (0, helpers_1.deep_get)([...(0, helpers_1.drop_last)(1, path), '$from'], query, null) || path[path.length - 2]; | ||
}; | ||
@@ -23,3 +23,3 @@ exports.get_real_parent_name = get_real_parent_name; | ||
const get_real_entity_name = (path, query) => { | ||
return helpers_1.deep_get([...path, '$from'], query, null) || helpers_1.last(path); | ||
return (0, helpers_1.deep_get)([...path, '$from'], query, null) || (0, helpers_1.last)(path); | ||
}; | ||
@@ -70,3 +70,3 @@ exports.get_real_entity_name = get_real_entity_name; | ||
const having_to_json_sql = (query, subquery_path, orma_schema) => { | ||
const subquery = helpers_1.deep_get(subquery_path, query); | ||
const subquery = (0, helpers_1.deep_get)(subquery_path, query); | ||
const $having = subquery.$having; | ||
@@ -83,5 +83,5 @@ return $having; | ||
} | ||
const entity = exports.get_real_entity_name(path, query); | ||
const higher_entity = exports.get_real_entity_name(path.slice(0, -1), query); | ||
const edge = schema_helpers_1.get_direct_edge(higher_entity, entity, orma_schema); | ||
const entity = (0, exports.get_real_entity_name)(path, query); | ||
const higher_entity = (0, exports.get_real_entity_name)(path.slice(0, -1), query); | ||
const edge = (0, schema_helpers_1.get_direct_edge)(higher_entity, entity, orma_schema); | ||
return [edge.from_field, edge.to_field]; | ||
@@ -94,11 +94,12 @@ }); | ||
}); | ||
return nester_1.nester(data, edges); | ||
return (0, nester_1.nester)(data, edges); | ||
}; | ||
exports.orma_nester = orma_nester; | ||
// export const orma_query = async <schema>(raw_query: validate_query<schema>, orma_schema: validate_orma_schema<schema>, query_function: (sql_string: string) => Promise<Record<string, unknown>[]>) => { | ||
const orma_query = async (raw_query, orma_schema, query_function, escaping_function) => { | ||
const query = helpers_1.clone(raw_query); // clone query so we can apply macros without mutating the actual input query | ||
any_path_macro_1.apply_any_path_macro(query, orma_schema); | ||
select_macro_1.apply_select_macro(query, orma_schema); | ||
const query_plan = query_plan_1.get_query_plan(query); | ||
const orma_query = async (raw_query, orma_schema_input, query_function, escaping_function) => { | ||
const query = (0, helpers_1.clone)(raw_query); // clone query so we can apply macros without mutating the actual input query | ||
const orma_schema = orma_schema_input; // this is just because the codebase isnt properly typed | ||
(0, any_path_macro_1.apply_any_path_macro)(query, orma_schema); | ||
(0, select_macro_1.apply_select_macro)(query, orma_schema); | ||
const query_plan = (0, query_plan_1.get_query_plan)(query); | ||
let results = []; | ||
@@ -109,6 +110,6 @@ // Sequential for query plan | ||
// the nesting macro needs previous results, so we cant do it in the beginning | ||
nesting_macro_1.apply_nesting_macro(query, path, results, orma_schema); | ||
const subquery = helpers_1.deep_get(path, query); | ||
escaping_macros_1.apply_escape_macro(subquery, escaping_function); | ||
return json_sql_1.json_to_sql(subquery); | ||
(0, nesting_macro_1.apply_nesting_macro)(query, path, results, orma_schema); | ||
const subquery = (0, helpers_1.deep_get)(path, query); | ||
(0, escaping_macros_1.apply_escape_macro)(subquery, escaping_function); | ||
return (0, json_sql_1.json_to_sql)(subquery); | ||
}); | ||
@@ -120,5 +121,10 @@ // Promise.all for each element in query plan | ||
} | ||
const output = exports.orma_nester(results, query, orma_schema); | ||
const output = (0, exports.orma_nester)(results, query, orma_schema); | ||
return output; | ||
}; | ||
exports.orma_query = orma_query; | ||
const as_orma_schema = (schema) => schema; | ||
exports.as_orma_schema = as_orma_schema; | ||
const as_orma_query = (schema, query) => query; | ||
exports.as_orma_query = as_orma_query; | ||
// export const as_orma_query_result = <Schema extends OrmaSchema, Query extends OrmaQuery<Schema>>(orma_schema: Schema, query: Query): QueryResult => |
@@ -6,3 +6,3 @@ "use strict"; | ||
const query_1 = require("./query"); | ||
mocha_1.describe('query', () => { | ||
(0, mocha_1.describe)('query', () => { | ||
const orma_schema = { | ||
@@ -42,5 +42,5 @@ products: { | ||
}; | ||
mocha_1.describe(query_1.orma_nester.name, () => { | ||
mocha_1.test('nests restults', () => { | ||
const result = query_1.orma_nester([ | ||
(0, mocha_1.describe)(query_1.orma_nester.name, () => { | ||
(0, mocha_1.test)('nests restults', () => { | ||
const result = (0, query_1.orma_nester)([ | ||
[['products'], [{ vendor_id: 1 }]], | ||
@@ -53,3 +53,3 @@ [['products', 'vendors'], [{ id: 1 }]] | ||
}, orma_schema); | ||
chai_1.expect(result).to.deep.equal({ | ||
(0, chai_1.expect)(result).to.deep.equal({ | ||
products: [ | ||
@@ -69,3 +69,3 @@ { | ||
mocha_1.describe.skip('dev', () => { | ||
mocha_1.test('dev', async () => { | ||
(0, mocha_1.test)('dev', async () => { | ||
const query = { | ||
@@ -171,7 +171,8 @@ calls: { | ||
var actual_query = ''; | ||
const test = await query_1.orma_query(query, orma_schema, sql_strings => { | ||
// @ts-ignore | ||
const test = await (0, query_1.orma_query)(query, orma_schema, sql_strings => { | ||
actual_query = sql_strings[0]; | ||
return Promise.resolve([]); | ||
}, el => el); | ||
chai_1.expect(actual_query).to.deep.equal('SELECT id FROM calls'); | ||
(0, chai_1.expect)(actual_query).to.deep.equal('SELECT id FROM calls'); | ||
}); | ||
@@ -178,0 +179,0 @@ }); |
@@ -11,6 +11,6 @@ "use strict"; | ||
// Walk the query | ||
helpers_1.deep_for_each(query, (val, path) => { | ||
(0, helpers_1.deep_for_each)(query, (val, path) => { | ||
const is_boolean_resolver = val === true; | ||
const is_virtual_column_resolver = helpers_1.is_simple_object(val) && !query_helpers_1.is_subquery(val); | ||
const is_subquery_resolver = helpers_1.is_simple_object(val) && query_helpers_1.is_subquery(val); | ||
const is_virtual_column_resolver = (0, helpers_1.is_simple_object)(val) && !(0, query_helpers_1.is_subquery)(val); | ||
const is_subquery_resolver = (0, helpers_1.is_simple_object)(val) && (0, query_helpers_1.is_subquery)(val); | ||
const is_clauses_resolver = '?'; | ||
@@ -86,3 +86,3 @@ if (is_boolean_resolver) { | ||
const first_arg = args[0]; | ||
const parent_entity = query_1.get_real_parent_name(path, query); | ||
const parent_entity = (0, query_1.get_real_parent_name)(path, query); | ||
if (typeof first_arg !== 'string' || !schema[parent_entity][first_arg]) { | ||
@@ -102,3 +102,3 @@ const error = { | ||
const last_path_equals = (desired_key, path) => { | ||
const is_valid = helpers_1.last(path) === desired_key; | ||
const is_valid = (0, helpers_1.last)(path) === desired_key; | ||
return is_valid; | ||
@@ -108,5 +108,5 @@ }; | ||
if (path.length > 1) { | ||
const parent_name = query_1.get_real_parent_name(path, query); | ||
const entity_name = query_1.get_real_entity_name(path, query); | ||
const direct_edges = schema_helpers_1.get_direct_edges(parent_name, entity_name, schema); | ||
const parent_name = (0, query_1.get_real_parent_name)(path, query); | ||
const entity_name = (0, query_1.get_real_entity_name)(path, query); | ||
const direct_edges = (0, schema_helpers_1.get_direct_edges)(parent_name, entity_name, schema); | ||
if (direct_edges.length === 0) { | ||
@@ -123,3 +123,3 @@ const error = { | ||
const ensure_field_exists = (val, path, query, schema) => { | ||
const parent_entity = query_1.get_real_parent_name(path, query); | ||
const parent_entity = (0, query_1.get_real_parent_name)(path, query); | ||
const original_field = val['$field']; | ||
@@ -137,5 +137,5 @@ if (typeof original_field !== 'string' || !schema[parent_entity][original_field]) { | ||
// User is requesting a specific field be in the response | ||
const parent_entity = query_1.get_real_parent_name(path, query); | ||
const requested_field = helpers_1.last(path); | ||
if (!schema_helpers_1.field_exists(parent_entity, requested_field, schema)) { | ||
const parent_entity = (0, query_1.get_real_parent_name)(path, query); | ||
const requested_field = (0, helpers_1.last)(path); | ||
if (!(0, schema_helpers_1.field_exists)(parent_entity, requested_field, schema)) { | ||
const error = { | ||
@@ -142,0 +142,0 @@ message: `Field ${requested_field} does not exist on entity ${parent_entity}`, |
@@ -51,5 +51,5 @@ "use strict"; | ||
*/ | ||
mocha_1.describe('validate_query.ts', () => { | ||
mocha_1.describe('boolean resolver', () => { | ||
mocha_1.test('validates boolean resolver column names', () => { | ||
(0, mocha_1.describe)('validate_query.ts', () => { | ||
(0, mocha_1.describe)('boolean resolver', () => { | ||
(0, mocha_1.test)('validates boolean resolver column names', () => { | ||
const query = { | ||
@@ -68,9 +68,9 @@ grand_parent: { | ||
}; | ||
const errors = validate_query_1.validator(query, schema); | ||
chai_1.expect(errors.length).to.deep.equal(1); | ||
chai_1.expect(errors[0].path).to.deep.equal(['grand_parent', 'parent', 'child', 'child_column_oops']); | ||
const errors = (0, validate_query_1.validator)(query, schema); | ||
(0, chai_1.expect)(errors.length).to.deep.equal(1); | ||
(0, chai_1.expect)(errors[0].path).to.deep.equal(['grand_parent', 'parent', 'child', 'child_column_oops']); | ||
}); | ||
}); | ||
mocha_1.describe('virtual columns', () => { | ||
mocha_1.test('validates $field virtual column', () => { | ||
(0, mocha_1.describe)('virtual columns', () => { | ||
(0, mocha_1.test)('validates $field virtual column', () => { | ||
const query = { | ||
@@ -82,9 +82,9 @@ parent: { | ||
}; | ||
const errors = validate_query_1.validator(query, schema); | ||
chai_1.expect(errors[0].path).to.deep.equal(['parent', 'custom_name2']); | ||
chai_1.expect(errors.length).to.deep.equal(1); | ||
const errors = (0, validate_query_1.validator)(query, schema); | ||
(0, chai_1.expect)(errors[0].path).to.deep.equal(['parent', 'custom_name2']); | ||
(0, chai_1.expect)(errors.length).to.deep.equal(1); | ||
}); | ||
}); | ||
mocha_1.describe('subqueries', () => { | ||
mocha_1.test('validates the from clause', () => { | ||
(0, mocha_1.describe)('subqueries', () => { | ||
(0, mocha_1.test)('validates the from clause', () => { | ||
const data = { | ||
@@ -102,8 +102,8 @@ parent: { | ||
}; | ||
const errors = validate_query_1.validator(data, schema); | ||
const errors = (0, validate_query_1.validator)(data, schema); | ||
const from_clause_errors = errors.filter(err => JSON.stringify(err.path) === | ||
JSON.stringify(['parent', 'bad', 'id'])); | ||
chai_1.expect(from_clause_errors.length).to.equal(1); | ||
(0, chai_1.expect)(from_clause_errors.length).to.equal(1); | ||
}); | ||
mocha_1.test('validates subquery connections', () => { | ||
(0, mocha_1.test)('validates subquery connections', () => { | ||
const data = { | ||
@@ -122,9 +122,9 @@ grand_parent: { | ||
}; | ||
const errors = validate_query_1.validator(data, schema); | ||
chai_1.expect(errors.length).to.equal(1); | ||
chai_1.expect(errors[0].path).to.deep.equal(['grand_parent', 'john', 'child', 'grand_parent']); | ||
const errors = (0, validate_query_1.validator)(data, schema); | ||
(0, chai_1.expect)(errors.length).to.equal(1); | ||
(0, chai_1.expect)(errors[0].path).to.deep.equal(['grand_parent', 'john', 'child', 'grand_parent']); | ||
}); | ||
}); | ||
mocha_1.describe.skip('clauses', () => { | ||
mocha_1.test('Requires $eq to have valid field name', () => { | ||
(0, mocha_1.test)('Requires $eq to have valid field name', () => { | ||
const data = { | ||
@@ -140,7 +140,7 @@ parent: { | ||
}; | ||
const errors = validate_query_1.validator(data, schema); | ||
chai_1.expect(errors.length).to.equal(1); | ||
chai_1.expect(errors[0].path).to.deep.equal(['parent', '$where']); | ||
const errors = (0, validate_query_1.validator)(data, schema); | ||
(0, chai_1.expect)(errors.length).to.equal(1); | ||
(0, chai_1.expect)(errors[0].path).to.deep.equal(['parent', '$where']); | ||
}); | ||
mocha_1.test('Lets $having have aliased fields', () => { | ||
(0, mocha_1.test)('Lets $having have aliased fields', () => { | ||
const data = { | ||
@@ -156,6 +156,6 @@ parent: { | ||
}; | ||
const errors = validate_query_1.validator(data, schema); | ||
chai_1.expect(errors.length).to.equal(0); | ||
const errors = (0, validate_query_1.validator)(data, schema); | ||
(0, chai_1.expect)(errors.length).to.equal(0); | ||
}); | ||
mocha_1.test('Lets operators have functions', () => { | ||
(0, mocha_1.test)('Lets operators have functions', () => { | ||
const data = { | ||
@@ -169,6 +169,6 @@ parent: { | ||
}; | ||
const errors = validate_query_1.validator(data, schema); | ||
chai_1.expect(errors.length).to.equal(0); | ||
const errors = (0, validate_query_1.validator)(data, schema); | ||
(0, chai_1.expect)(errors.length).to.equal(0); | ||
}); | ||
mocha_1.test('Lets operators have functions', () => { | ||
(0, mocha_1.test)('Lets operators have functions', () => { | ||
const data = { | ||
@@ -182,6 +182,6 @@ parent: { | ||
}; | ||
const errors = validate_query_1.validator(data, schema); | ||
chai_1.expect(errors.length).to.equal(0); | ||
const errors = (0, validate_query_1.validator)(data, schema); | ||
(0, chai_1.expect)(errors.length).to.equal(0); | ||
}); | ||
mocha_1.test('Lets $and', () => { | ||
(0, mocha_1.test)('Lets $and', () => { | ||
const data = { | ||
@@ -197,6 +197,6 @@ parent: { | ||
}; | ||
const errors = validate_query_1.validator(data, schema); | ||
chai_1.expect(errors.length).to.equal(0); | ||
const errors = (0, validate_query_1.validator)(data, schema); | ||
(0, chai_1.expect)(errors.length).to.equal(0); | ||
}); | ||
mocha_1.test('Changes scope for subqueries', () => { | ||
(0, mocha_1.test)('Changes scope for subqueries', () => { | ||
const data = { | ||
@@ -216,6 +216,6 @@ parent: { | ||
}; | ||
const errors = validate_query_1.validator(data, schema); | ||
chai_1.expect(errors.length).to.equal(1); | ||
const errors = (0, validate_query_1.validator)(data, schema); | ||
(0, chai_1.expect)(errors.length).to.equal(1); | ||
}); | ||
mocha_1.test('Requires subqueries to have valid fields', () => { | ||
(0, mocha_1.test)('Requires subqueries to have valid fields', () => { | ||
const data = { | ||
@@ -232,6 +232,6 @@ parent: { | ||
}; | ||
const errors = validate_query_1.validator(data, schema); | ||
chai_1.expect(errors.length).to.equal(1); | ||
const errors = (0, validate_query_1.validator)(data, schema); | ||
(0, chai_1.expect)(errors.length).to.equal(1); | ||
}); | ||
mocha_1.test('Requires subqueries to have valid fields', () => { | ||
(0, mocha_1.test)('Requires subqueries to have valid fields', () => { | ||
const data = { | ||
@@ -248,6 +248,6 @@ parent: { | ||
}; | ||
const errors = validate_query_1.validator(data, schema); | ||
chai_1.expect(errors.length).to.equal(1); | ||
const errors = (0, validate_query_1.validator)(data, schema); | ||
(0, chai_1.expect)(errors.length).to.equal(1); | ||
}); | ||
}); | ||
}); |
@@ -11,5 +11,13 @@ /** | ||
declare type PluckKey<Obj extends Record<string, any>, Keys extends string, DesiredKeys extends string> = Keys extends DesiredKeys ? Obj[Keys] : never; | ||
export declare type IsEqual<T1, T2> = T1 extends T2 ? T2 extends T1 ? true : false : false; | ||
export declare type IsEqual<T, U> = (<G>() => G extends T ? 1 : 2) extends <G>() => G extends U ? 1 : 2 ? true : false; | ||
export declare type IsExtends<T, U> = T extends U ? true : false; | ||
declare type UnionToIntersection<U> = (U extends any ? (k: U) => void : never) extends (k: infer I) => void ? I : never; | ||
export declare type IsUnion<T> = [T] extends [UnionToIntersection<T>] ? false : true; | ||
export declare type Expand<T> = T extends (...args: infer A) => infer R ? (...args: Expand<A>) => Expand<R> : T extends infer O ? { | ||
[K in keyof O]: O[K]; | ||
} : never; | ||
export declare type ExpandRecursively<T> = T extends (...args: infer A) => infer R ? (...args: ExpandRecursively<A>) => ExpandRecursively<R> : T extends Object ? T extends infer O ? { | ||
[K in keyof O]: ExpandRecursively<O[K]>; | ||
} : never : T; | ||
export declare type BooleanOr<A extends boolean, B extends boolean> = true extends A | B ? true : false; | ||
export declare type LowercaseChar = 'a' | 'b' | 'c' | 'd' | 'e' | 'f' | 'g' | 'h' | 'i' | 'j' | 'k' | 'l' | 'm' | 'n' | 'o' | 'p' | 'q' | 'r' | 's' | 't' | 'u' | 'v' | 'w' | 'x' | 'y' | 'z'; | ||
@@ -16,0 +24,0 @@ export declare type UppercaseChar = 'A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | 'P' | 'Q' | 'R' | 'S' | 'T' | 'U' | 'V' | 'W' | 'X' | 'Y' | 'Z'; |
@@ -5,1 +5,6 @@ "use strict"; | ||
} | ||
{ | ||
const test1 = true; | ||
const test2 = true; | ||
const test3 = false; | ||
} |
import { Edge } from '../helpers/schema_helpers'; | ||
import { orma_schema } from '../introspector/introspector'; | ||
import { mysql_to_simple_types, orma_schema } from '../introspector/introspector'; | ||
import { LowercaseChar, NumericChar, UppercaseChar } from './helper_types'; | ||
export declare type DeepReadonly<T> = T extends (infer R)[] ? DeepReadonlyArray<R> : T extends Function ? T : T extends object ? DeepReadonlyObject<T> : T; | ||
interface DeepReadonlyArray<T> extends ReadonlyArray<DeepReadonly<T>> { | ||
export interface DeepReadonlyArray<T> extends ReadonlyArray<DeepReadonly<T>> { | ||
} | ||
declare type DeepReadonlyObject<T> = { | ||
export declare type DeepReadonlyObject<T> = { | ||
readonly [P in keyof T]: DeepReadonly<T[P]>; | ||
@@ -12,3 +12,3 @@ }; | ||
export declare type Keyword = `$${string}`; | ||
export declare type IsKeyword<Field extends `$${string}`> = Field; | ||
export declare type IsKeyword<Field> = Field extends Keyword ? true : false; | ||
/** | ||
@@ -43,2 +43,6 @@ * Non keywords cannot start with a $ | ||
} ? EdgeParams : never; | ||
export declare type GetFieldType<Schema extends OrmaSchema, Entity extends GetAllEntities<Schema>, Field extends GetFields<Schema, Entity>> = Schema[Entity][Field] extends { | ||
data_type: any; | ||
} ? FieldTypeStringToType<Schema[Entity][Field]['data_type']> : any; | ||
declare type FieldTypeStringToType<TypeString extends typeof mysql_to_simple_types[keyof typeof mysql_to_simple_types]> = TypeString extends 'string' ? string : TypeString extends 'number' ? number : TypeString extends 'boolean' ? boolean : TypeString extends 'date' ? Date : any; | ||
export {}; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const getA = (a) => a; | ||
const test_schema = getA({ | ||
const introspector_1 = require("../introspector/introspector"); | ||
const test_schema = (0, introspector_1.as_orma_schema)({ | ||
products: { | ||
id: {}, | ||
id: { | ||
data_type: 'string', | ||
}, | ||
vendor_id: { | ||
@@ -143,1 +145,7 @@ references: { | ||
} | ||
{ | ||
// reads the type from the schema | ||
const good = true; | ||
// unknown types are cast as any | ||
const good2 = true; | ||
} |
{ | ||
"name": "orma", | ||
"version": "1.0.50", | ||
"version": "1.0.54", | ||
"description": "A declarative relational syncronous orm", | ||
@@ -55,3 +55,3 @@ "main": "build/index.js", | ||
"sucrase": "^3.19.0", | ||
"typescript": "^4.3.5" | ||
"typescript": "^4.6.0-dev.20211218" | ||
}, | ||
@@ -58,0 +58,0 @@ "mocha": { |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
294385
118
7347