@evidence-dev/db-commons
Advanced tools
+38
| export * from './index.cjs'; | ||
| export type RunQuery<T extends Record<string, unknown>> = ( | ||
| queryString: string, | ||
| database: T, | ||
| batchSize?: number | ||
| ) => Promise<QueryResult>; | ||
| export type EvidenceColumnType = number | boolean | string | Date; | ||
| export type GetRunner<T extends Record<string, unknown>> = ( | ||
| opts: T, | ||
| directory: string | ||
| ) => (queryContent: string, queryPath: string, batchSize: number) => Promise<QueryResult>; | ||
| export type ConnectionTester<T extends Record<string, unknown>> = ( | ||
| opts: T, | ||
| directory: string | ||
| ) => Promise<boolean>; | ||
| /** | ||
| * @param {boolean} [disableInterpolation = false] Disables the automatic injection of Evidence Source Variables | ||
| */ | ||
| type FileContent = (disableInterpolation?: boolean) => Promise<string>; | ||
| export interface SourceDirectory { | ||
| [filename: string]: SourceDirectory | FileContent; | ||
| } | ||
| export type ProcessSource<T extends Record<string, unknown>> = ( | ||
| opts: T, | ||
| files: SourceDirectory, | ||
| utils: { | ||
| isCached: (name: string, content: string) => boolean; | ||
| isFiltered: (name: string) => boolean; | ||
| shouldRun: (name: string, content: string) => boolean; | ||
| addToCache: (name: string, content: string) => void; | ||
| } | ||
| ) => AsyncIterable<QueryResult & { name: string; content: string }>; |
+71
-0
| # @evidence-dev/db-commons | ||
| ## 1.0.4 | ||
| ### Patch Changes | ||
| - 1da26c4e: add more leniency for comments in queries | ||
| ## 1.0.3 | ||
| ### Patch Changes | ||
| - 31381835: Add support for EVIDENCE_VAR\_\_ interpolation in source queries | ||
| ## 1.0.2 | ||
| ### Patch Changes | ||
| - fc7fe470: Add support for closeConnection callback when async generator has completed | ||
| ## 1.0.1 | ||
| ### Patch Changes | ||
| - Fix incorrectly published version | ||
| ## 1.0.0 | ||
| ### Patch Changes | ||
| - bf4a112a: Update package.json to use new datasource field | ||
| - cd57ba69: Add new interface for datasources for fine-grained control of output | ||
| - c4822852: Support for streaming results | ||
| - 781d2677: exhaust testconnection streams, improve type inference, add trino/databricks adapters | ||
| - 20127231: Bump all versions so version pinning works | ||
| - 29c149d6: added stricter types to db adapters | ||
| ## 0.2.1-usql.5 | ||
| ### Patch Changes | ||
| - 781d2677: exhaust testconnection streams, improve type inference, add trino/databricks adapters | ||
| ## 0.2.1-usql.4 | ||
| ### Patch Changes | ||
| - Update package.json to use new datasource field | ||
| ## 0.2.1-usql.3 | ||
| ### Patch Changes | ||
| - cd57ba69: Add new interface for datasources for fine-grained control of output | ||
| ## 0.2.1-usql.2 | ||
| ### Patch Changes | ||
| - Support for streaming results | ||
| ## 0.2.1-usql.1 | ||
| ### Patch Changes | ||
| - 20127231: Bump all versions so version pinning works | ||
| ## 0.2.1-usql.0 | ||
| ### Patch Changes | ||
| - 29c149d6: added stricter types to db adapters | ||
| ## 0.2.0 | ||
@@ -4,0 +75,0 @@ |
+163
-43
@@ -1,15 +0,36 @@ | ||
| var EvidenceType; | ||
| (function (EvidenceType) { | ||
| EvidenceType['BOOLEAN'] = 'boolean'; | ||
| EvidenceType['NUMBER'] = 'number'; | ||
| EvidenceType['STRING'] = 'string'; | ||
| EvidenceType['DATE'] = 'date'; | ||
| })(EvidenceType || (EvidenceType = {})); | ||
| /** | ||
| * Enum for evidence types | ||
| * @readonly | ||
| * @enum {'boolean' | 'number' | 'string' | 'date'} | ||
| */ | ||
| const EvidenceType = /** @type {const} */ ({ | ||
| BOOLEAN: 'boolean', | ||
| NUMBER: 'number', | ||
| STRING: 'string', | ||
| DATE: 'date', | ||
| BIGINT: 'bigint' | ||
| }); | ||
| var TypeFidelity; | ||
| (function (TypeFidelity) { | ||
| TypeFidelity['INFERRED'] = 'inferred'; | ||
| TypeFidelity['PRECISE'] = 'precise'; | ||
| })(TypeFidelity || (TypeFidelity = {})); | ||
| /** | ||
| * Enum for evidence type fidelity | ||
| * @readonly | ||
| * @enum {'inferred' | 'precise'} | ||
| */ | ||
| const TypeFidelity = /** @type {const} */ ({ | ||
| INFERRED: 'inferred', | ||
| PRECISE: 'precise' | ||
| }); | ||
| /** | ||
| * @typedef {Object} ColumnDefinition | ||
| * @property {string} name | ||
| * @property {EvidenceType} evidenceType | ||
| * @property {TypeFidelity} typeFidelity | ||
| */ | ||
| /** | ||
| * Infers the evidence type of a column value | ||
| * @param {unknown} columnValue | ||
| * @returns {EvidenceType} | ||
| */ | ||
| const inferValueType = function (columnValue) { | ||
@@ -21,2 +42,3 @@ if (typeof columnValue === 'number') { | ||
| } else if (typeof columnValue === 'string') { | ||
| /** @type {EvidenceType} */ | ||
| let result = EvidenceType.STRING; | ||
@@ -48,44 +70,138 @@ if (columnValue && (columnValue.match(/-/g) || []).length === 2) { | ||
| /** | ||
| * Infers the evidence type of each column in a set of rows | ||
| * @param {Record<string, unknown>[]} rows | ||
| * @returns {ColumnDefinition[] | undefined} | ||
| */ | ||
| const inferColumnTypes = function (rows) { | ||
| if (rows && rows.length > 0) { | ||
| let columns = Object.keys(rows[0]); | ||
| let columnTypes = columns?.map((column) => { | ||
| let firstRowWithColumnValue = rows.find((element) => | ||
| element[column] == null ? false : true | ||
| ); | ||
| if (firstRowWithColumnValue) { | ||
| let inferredType = inferValueType(firstRowWithColumnValue[column]); | ||
| return { name: column, evidenceType: inferredType, typeFidelity: TypeFidelity.INFERRED }; | ||
| } else { | ||
| return { | ||
| name: column, | ||
| evidenceType: EvidenceType.STRING, | ||
| typeFidelity: TypeFidelity.INFERRED | ||
| }; | ||
| } | ||
| }); | ||
| return columnTypes; | ||
| } | ||
| return undefined; | ||
| if (!rows) return undefined; | ||
| if (rows.length === 0) return []; | ||
| const columns = Object.keys(rows[0]); | ||
| const columnTypes = columns.map((column) => { | ||
| const firstRowWithColumnValue = rows.find((element) => element[column] != null); | ||
| const inferredType = firstRowWithColumnValue | ||
| ? inferValueType(firstRowWithColumnValue[column]) | ||
| : EvidenceType.STRING; | ||
| return { name: column, evidenceType: inferredType, typeFidelity: TypeFidelity.INFERRED }; | ||
| }); | ||
| return columnTypes; | ||
| }; | ||
| /** | ||
| * Processes query results | ||
| * @param {QueryResult | QueryResult["rows"]} queryResults | ||
| * @returns {QueryResult} | ||
| */ | ||
| const processQueryResults = function (queryResults) { | ||
| let rows; | ||
| let columnTypes; | ||
| const rows = queryResults.rows ?? queryResults; | ||
| const columnTypes = queryResults.columnTypes ?? inferColumnTypes(rows); | ||
| if (queryResults.rows) { | ||
| rows = queryResults.rows; | ||
| } else { | ||
| rows = queryResults; | ||
| } | ||
| return { rows, columnTypes }; | ||
| }; | ||
| if (queryResults.columnTypes) { | ||
| columnTypes = queryResults.columnTypes; | ||
| } else { | ||
| columnTypes = inferColumnTypes(rows); | ||
| /** | ||
| * @typedef {Object} AsyncIterableToBatchedAsyncGeneratorOptions | ||
| * @property {(rows: Record<string, unknown>[]) => QueryResult["columnTypes"]} [mapResultsToEvidenceColumnTypes] | ||
| * @property {(row: unknown) => Record<string, unknown>} [standardizeRow] | ||
| * @property {() => void | Promise<void>} [closeConnection] | ||
| */ | ||
| /** | ||
| * Converts an async iterable to a QueryResult | ||
| * @param {AsyncIterable<unknown>} iterable | ||
| * @param {number} batchSize | ||
| * @param {AsyncIterableToBatchedAsyncGeneratorOptions} options additional optional parameters | ||
| * @returns {Promise<QueryResult>} | ||
| */ | ||
| const asyncIterableToBatchedAsyncGenerator = async function ( | ||
| iterable, | ||
| batchSize, | ||
| { | ||
| // @ts-ignore | ||
| standardizeRow = (x) => x, | ||
| mapResultsToEvidenceColumnTypes, | ||
| closeConnection = () => {} | ||
| } = {} | ||
| ) { | ||
| /** @type {Record<string, unknown>[]} */ | ||
| const preread_rows = []; | ||
| /** @type {QueryResult["columnTypes"]} */ | ||
| let columnTypes = []; | ||
| if (mapResultsToEvidenceColumnTypes) { | ||
| const iterator = iterable[Symbol.asyncIterator](); | ||
| const firstRow = await iterator.next().then((x) => x.value); | ||
| const column_names = Object.keys(firstRow); | ||
| preread_rows.push(standardizeRow(firstRow)); | ||
| let null_columns = column_names.filter((column) => firstRow[column] == null); | ||
| while (null_columns.length > 0) { | ||
| const next = await iterator.next().then((x) => x.value); | ||
| preread_rows.push(standardizeRow(next)); | ||
| null_columns = null_columns.filter((column) => next[column] == null); | ||
| } | ||
| columnTypes = mapResultsToEvidenceColumnTypes(preread_rows); | ||
| } | ||
| const rows = async function* () { | ||
| let batch = []; | ||
| batch.push(...preread_rows); | ||
| for await (const row of iterable) { | ||
| batch.push(standardizeRow(row)); | ||
| if (batch.length >= batchSize) { | ||
| yield batch; | ||
| batch = []; | ||
| } | ||
| } | ||
| // No more batches, safe to close connection now | ||
| await closeConnection(); | ||
| if (batch.length > 0) { | ||
| yield batch; | ||
| } | ||
| // Clean up | ||
| }; | ||
| return { rows, columnTypes }; | ||
| }; | ||
| /** | ||
| * Converts an async generator to an array | ||
| * @param {() => AsyncIterable<Array<Record<string, unknown>>>} asyncGenerator | ||
| * @returns {Promise<Record<string, unknown>[]>} | ||
| */ | ||
| const batchedAsyncGeneratorToArray = async (asyncGenerator) => { | ||
| const result = []; | ||
| for await (const batch of asyncGenerator()) { | ||
| result.push(...batch); | ||
| } | ||
| return result; | ||
| }; | ||
| /** | ||
| * | ||
| * @param {string} query | ||
| * @returns {string} | ||
| */ | ||
| const cleanQuery = (query) => { | ||
| let cleanedString = query.trim(); | ||
| if (cleanedString.endsWith(';')) | ||
| cleanedString = cleanedString.substring(0, cleanedString.length - 1); | ||
| // query might end with a line comment, which has to be ended | ||
| return cleanedString + '\n'; | ||
| }; | ||
| /** | ||
| * @param {QueryResult} stream | ||
| * @returns {Promise<void>} | ||
| */ | ||
| const exhaustStream = async ({ rows }) => { | ||
| try { | ||
| for await (const _ of rows()) { | ||
| // exhaust the stream | ||
| } | ||
| } catch {} | ||
| }; | ||
| exports.EvidenceType = EvidenceType; | ||
@@ -95,3 +211,7 @@ exports.TypeFidelity = TypeFidelity; | ||
| exports.inferColumnTypes = inferColumnTypes; | ||
| exports.asyncIterableToBatchedAsyncGenerator = asyncIterableToBatchedAsyncGenerator; | ||
| exports.batchedAsyncGeneratorToArray = batchedAsyncGeneratorToArray; | ||
| exports.cleanQuery = cleanQuery; | ||
| exports.exhaustStream = exhaustStream; | ||
| exports.getEnv = require('./src/getEnv.cjs').getEnv; |
+3
-3
| { | ||
| "name": "@evidence-dev/db-commons", | ||
| "version": "0.0.0-84cf5211", | ||
| "description": "Shared modules for Evidence Database Drivers ", | ||
| "version": "0.0.0-854e2246", | ||
| "description": "Shared modules for Evidence Datasource Drivers ", | ||
| "main": "index.cjs", | ||
@@ -9,3 +9,3 @@ "author": "evidence.dev", | ||
| "dependencies": { | ||
| "fs-extra": "10.0.0" | ||
| "fs-extra": "11.2.0" | ||
| }, | ||
@@ -12,0 +12,0 @@ "type": "module", |
+1
-1
@@ -10,3 +10,3 @@ /** | ||
| while (keyPathParts.length) { | ||
| location = location[keyPathParts.shift()]; | ||
| location = location[/** @type {string} */ (keyPathParts.shift())]; | ||
| if (Array.isArray(location) && keyPathParts.length) { | ||
@@ -13,0 +13,0 @@ // We're too soon |
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 1 instance in 1 package
19246
45.63%8
14.29%448
46.41%+ Added
- Removed
Updated