@evidence-dev/bigquery
Advanced tools
Comparing version 0.0.0-f05698e5 to 0.0.0-f06eabfd
132
CHANGELOG.md
# @evidence-dev/bigquery | ||
## 2.0.5 | ||
### Patch Changes | ||
- Updated dependencies [1da26c4e] | ||
- @evidence-dev/db-commons@1.0.4 | ||
## 2.0.4 | ||
### Patch Changes | ||
- 2bcbf0ed: Add keywords to improve searchability for datasources | ||
- Updated dependencies [31381835] | ||
- @evidence-dev/db-commons@1.0.3 | ||
## 2.0.3 | ||
### Patch Changes | ||
- e023deb0: Make sure credentials are properly transformed | ||
## 2.0.2 | ||
### Patch Changes | ||
- 0e0a4392: Add skeleton README files for adapters | ||
- d4fc618e: - Removed legacy environment variable configuration | ||
- Adjusted connection to use a function | ||
- Connection Test now uses a connection directly; rather than runQuery | ||
- Updated dependencies [fc7fe470] | ||
- @evidence-dev/db-commons@1.0.2 | ||
## 2.0.1 | ||
### Patch Changes | ||
- Updated dependencies | ||
- @evidence-dev/db-commons@1.0.1 | ||
## 2.0.0 | ||
### Major Changes | ||
- cb0fc468: This update includes major changes to the way Evidence interacts with data. | ||
Instead of running queries against the production database, and including it | ||
with the project as pre-rendered, static JSON data; those queries are now stored as .parquet files. | ||
.parquet enables the use of DuckDB on the client, allowing for much greater levels of interactivity | ||
on pages, and interoperability between different data sources (e.g. joins across postgres & mysql). | ||
### Patch Changes | ||
- 9ff614d2: fix bigquery types timing out for large queries | ||
- bf4a112a: Update package.json to use new datasource field | ||
- bdf8e08a: revamp value standardization in bigquery adapter | ||
- c4822852: Support for streaming results | ||
- 781d2677: exhaust testconnection streams, improve type inference, add trino/databricks adapters | ||
- 20127231: Bump all versions so version pinning works | ||
- 29c149d6: added stricter types to db adapters | ||
- Updated dependencies [bf4a112a] | ||
- Updated dependencies [cd57ba69] | ||
- Updated dependencies [c4822852] | ||
- Updated dependencies [781d2677] | ||
- Updated dependencies [20127231] | ||
- Updated dependencies [29c149d6] | ||
- @evidence-dev/db-commons@0.2.1 | ||
## 2.0.0-usql.7 | ||
### Patch Changes | ||
- 9ff614d2: fix bigquery types timing out for large queries | ||
## 2.0.0-usql.6 | ||
### Patch Changes | ||
- 781d2677: exhaust testconnection streams, improve type inference, add trino/databricks adapters | ||
- Updated dependencies [781d2677] | ||
- @evidence-dev/db-commons@0.2.1-usql.5 | ||
## 2.0.0-usql.5 | ||
### Patch Changes | ||
- Update package.json to use new datasource field | ||
- Updated dependencies | ||
- @evidence-dev/db-commons@0.2.1-usql.4 | ||
## 2.0.0-usql.4 | ||
### Patch Changes | ||
- Updated dependencies [cd57ba69] | ||
- @evidence-dev/db-commons@0.2.1-usql.3 | ||
## 2.0.0-usql.3 | ||
### Patch Changes | ||
- Support for streaming results | ||
- Updated dependencies | ||
- @evidence-dev/db-commons@0.2.1-usql.2 | ||
## 2.0.0-usql.2 | ||
### Patch Changes | ||
- 20127231: Bump all versions so version pinning works | ||
- Updated dependencies [20127231] | ||
- @evidence-dev/db-commons@0.2.1-usql.1 | ||
## 2.0.0-usql.1 | ||
### Patch Changes | ||
- bdf8e08a: revamp value standardization in bigquery adapter | ||
- 29c149d6: added stricter types to db adapters | ||
- Updated dependencies [29c149d6] | ||
- @evidence-dev/db-commons@0.2.1-usql.0 | ||
## 2.0.0-usql.0 | ||
### Major Changes | ||
- cb0fc468: This update includes major changes to the way Evidence interacts with data. | ||
Instead of running queries against the production database, and including it | ||
with the project as pre-rendered, static JSON data; those queries are now stored as .parquet files. | ||
.parquet enables the use of DuckDB on the client, allowing for much greater levels of interactivity | ||
on pages, and interoperability between different data sources (e.g. joins across postgres & mysql). | ||
## 1.3.2 | ||
@@ -4,0 +136,0 @@ |
{ | ||
"name": "@evidence-dev/bigquery", | ||
"version": "0.0.0-f05698e5", | ||
"version": "0.0.0-f06eabfd", | ||
"description": "BigQuery driver for Evidence projects", | ||
@@ -10,5 +10,5 @@ "main": "index.cjs", | ||
"@google-cloud/bigquery": "6.2.0", | ||
"fs-extra": "9.1.0", | ||
"google-auth-library": "^8.8.0", | ||
"@evidence-dev/db-commons": "^0.0.0-f05698e5" | ||
"fs-extra": "11.2.0", | ||
"google-auth-library": "^9.6.3", | ||
"@evidence-dev/db-commons": "0.0.0-f06eabfd" | ||
}, | ||
@@ -19,2 +19,15 @@ "devDependencies": { | ||
"type": "module", | ||
"evidence": { | ||
"datasources": [ | ||
[ | ||
"bigquery", | ||
"bq" | ||
] | ||
] | ||
}, | ||
"keywords": [ | ||
"evidence", | ||
"evidence-datasource", | ||
"bigquery" | ||
], | ||
"scripts": { | ||
@@ -21,0 +34,0 @@ "test": "uvu test test.js" |
148
test/test.js
import { test } from 'uvu'; | ||
import * as assert from 'uvu/assert'; | ||
import runQuery from '../index.cjs'; | ||
import { TypeFidelity } from '@evidence-dev/db-commons'; | ||
import { TypeFidelity, batchedAsyncGeneratorToArray } from '@evidence-dev/db-commons'; | ||
import 'dotenv/config'; | ||
let results; | ||
test('query runs', async () => { | ||
results = await runQuery( | ||
"select 100 as number_col, current_date as date_col, current_timestamp as timestamp_col, 'Evidence' as string_col, true as bool_col" | ||
const { rows: row_generator, columnTypes } = await runQuery( | ||
`select | ||
INTERVAL 5 DAY AS interval_col, | ||
NUMERIC '123456' as numeric_col, | ||
BIGNUMERIC '1234567890123456789012345678' as bignumeric_col, | ||
'hello!' as string_col, | ||
B"abc" as bytes_col, | ||
123456 as int_col, | ||
123456.789 as float_col, | ||
true as bool_col, | ||
DATE '2021-01-01' as date_col, | ||
TIME '12:34:56' as time_col, | ||
TIMESTAMP '2021-01-01 12:34:56' as timestamp_col, | ||
DATETIME '2021-01-01 12:34:56' as datetime_col | ||
`, | ||
{ | ||
project_id: process.env.BIGQUERY_PROJECT_ID, | ||
client_email: process.env.BIGQUERY_CLIENT_EMAIL, | ||
private_key: process.env.BIGQUERY_PRIVATE_KEY | ||
} | ||
); | ||
assert.instance(results.rows, Array); | ||
assert.instance(results.columnTypes, Array); | ||
assert.type(results.rows[0], 'object'); | ||
assert.equal(results.rows[0].number_col, 100); | ||
const rows = await batchedAsyncGeneratorToArray(row_generator); | ||
let actualColumnTypes = results.columnTypes.map((columnType) => columnType.evidenceType); | ||
let actualColumnNames = results.columnTypes.map((columnType) => columnType.name); | ||
let actualTypePrecisions = results.columnTypes.map((columnType) => columnType.typeFidelity); | ||
assert.instance(rows, Array); | ||
assert.instance(columnTypes, Array); | ||
assert.type(rows[0], 'object'); | ||
let expectedColumnTypes = ['number', 'date', 'date', 'string', 'boolean']; | ||
let expectedColumnNames = ['number_col', 'date_col', 'timestamp_col', 'string_col', 'bool_col']; | ||
let expectedTypePrecision = Array(5).fill(TypeFidelity.PRECISE); | ||
const result = rows[0]; | ||
assert.equal(result.int_col, 123456, 'INT types should be converted to JS Numbers'); | ||
assert.equal(result.float_col, 123456.789, 'FLOAT types should be converted to JS Numbers'); | ||
assert.equal(result.bool_col, true, 'BOOL types should be converted to JS Booleans'); | ||
assert.equal(result.string_col, 'hello!', 'STRING types should be converted to JS Strings'); | ||
assert.equal(result.bytes_col, 'YWJj', 'BYTES types should be converted to base64 strings'); | ||
assert.equal( | ||
result.date_col.getTime(), | ||
new Date('2021-01-01').getTime(), | ||
'DATE types should be converted to JS Date objects' | ||
); | ||
assert.equal(result.time_col, '12:34:56', 'TIME types should be converted to strings'); | ||
assert.equal( | ||
result.timestamp_col.getTime(), | ||
new Date('2021-01-01T12:34:56.000Z').getTime(), | ||
'TIMESTAMP types should be converted to JS Date objects' | ||
); | ||
assert.equal( | ||
result.datetime_col.getTime(), | ||
new Date('2021-01-01T12:34:56.000Z').getTime(), | ||
'DATETIME types should be converted to JS Date objects' | ||
); | ||
assert.equal(result.interval_col, '0-0 5 0:0:0', 'INTERVAL types should be converted to strings'); | ||
assert.equal(result.numeric_col, 123456, 'NUMERIC types should be converted to JS Numbers'); | ||
assert.equal( | ||
result.bignumeric_col, | ||
1.2345678901234569e27, | ||
'BIGNUMERIC types should be converted to JS Numbers' | ||
); | ||
const actualColumnTypes = columnTypes.map((columnType) => columnType.evidenceType); | ||
const actualColumnNames = columnTypes.map((columnType) => columnType.name); | ||
const expectedColumnTypes = [ | ||
'string', | ||
'number', | ||
'number', | ||
'string', | ||
'string', | ||
'number', | ||
'number', | ||
'boolean', | ||
'date', | ||
'string', | ||
'date', | ||
'date' | ||
]; | ||
const expectedColumnNames = [ | ||
'interval_col', | ||
'numeric_col', | ||
'bignumeric_col', | ||
'string_col', | ||
'bytes_col', | ||
'int_col', | ||
'float_col', | ||
'bool_col', | ||
'date_col', | ||
'time_col', | ||
'timestamp_col', | ||
'datetime_col' | ||
]; | ||
assert.equal( | ||
true, | ||
@@ -36,18 +108,18 @@ expectedColumnTypes.length === actualColumnTypes.length && | ||
); | ||
assert.equal( | ||
true, | ||
expectedTypePrecision.length === actualTypePrecisions.length && | ||
expectedTypePrecision.every((value, index) => value === actualTypePrecisions[index]) | ||
); | ||
}); | ||
792; | ||
test('numeric types are retrieved correctly', async () => { | ||
results = await runQuery( | ||
'select CAST(1.23456789 AS NUMERIC) as numeric_number, CAST(1.23456789 AS FLOAT64) as float64_number, CAST(1.23456789 AS DECIMAL) as decimal_number, CAST(1.23456789 AS STRING) as string_number' | ||
const { rows: row_generator, columnTypes } = await runQuery( | ||
'select CAST(1.23456789 AS NUMERIC) as numeric_number, CAST(1.23456789 AS FLOAT64) as float64_number, CAST(1.23456789 AS DECIMAL) as decimal_number, CAST(1.23456789 AS STRING) as string_number', | ||
{ | ||
project_id: process.env.BIGQUERY_PROJECT_ID, | ||
client_email: process.env.BIGQUERY_CLIENT_EMAIL, | ||
private_key: process.env.BIGQUERY_PRIVATE_KEY | ||
} | ||
); | ||
let actualColumnTypes = results.columnTypes.map((columnType) => columnType.evidenceType); | ||
let actualColumnNames = results.columnTypes.map((columnType) => columnType.name); | ||
let actualTypePrecisions = results.columnTypes.map((columnType) => columnType.typeFidelity); | ||
let actualValues = Object.keys(results.rows[0]).map((key) => results.rows[0][key]); | ||
const rows = await batchedAsyncGeneratorToArray(row_generator); | ||
let actualColumnTypes = columnTypes.map((columnType) => columnType.evidenceType); | ||
let actualColumnNames = columnTypes.map((columnType) => columnType.name); | ||
let actualTypePrecisions = columnTypes.map((columnType) => columnType.typeFidelity); | ||
let actualValues = Object.keys(rows[0]).map((key) => rows[0][key]); | ||
@@ -77,2 +149,24 @@ let expectedColumnTypes = ['number', 'number', 'number', 'string']; | ||
test('query batches results properly', async () => { | ||
const { rows, expectedRowCount } = await runQuery( | ||
'select 1 union all select 2 union all select 3 union all select 4 union all select 5', | ||
{ | ||
project_id: process.env.BIGQUERY_PROJECT_ID, | ||
client_email: process.env.BIGQUERY_CLIENT_EMAIL, | ||
private_key: process.env.BIGQUERY_PRIVATE_KEY | ||
}, | ||
2 | ||
); | ||
const arr = []; | ||
for await (const batch of rows()) { | ||
arr.push(batch); | ||
} | ||
for (const batch of arr.slice(0, -1)) { | ||
assert.equal(batch.length, 2); | ||
} | ||
assert.equal(arr[arr.length - 1].length, 1); | ||
assert.equal(expectedRowCount, 5); | ||
}); | ||
test.run(); |
Sorry, the diff of this file is not supported yet
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 3 instances in 1 package
No README
QualityPackage does not have a README. This may indicate a failed publish or a low quality package.
Found 1 instance in 1 package
20615
6
439
0
4
10
+ Added@evidence-dev/db-commons@0.0.0-f06eabfd(transitive)
+ Addedagent-base@7.1.3(transitive)
+ Addedfs-extra@11.2.0(transitive)
+ Addedgaxios@6.7.1(transitive)
+ Addedgcp-metadata@6.1.1(transitive)
+ Addedgoogle-auth-library@9.15.1(transitive)
+ Addedgoogle-logging-utils@0.0.2(transitive)
+ Addedgtoken@7.1.0(transitive)
+ Addedhttps-proxy-agent@7.0.6(transitive)
- Removed@evidence-dev/db-commons@0.0.0-ffa649e3(transitive)
- Removedat-least-node@1.0.0(transitive)
- Removedfs-extra@10.0.09.1.0(transitive)
Updatedfs-extra@11.2.0
Updatedgoogle-auth-library@^9.6.3