Socket
Socket
Sign inDemoInstall

clickhouse-ts

Package Overview
Dependencies
Maintainers
1
Versions
70
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

clickhouse-ts - npm Package Compare versions

Comparing version 1.4.5 to 1.5.0

10

dist/src/caching/NodeJSCacheManager.js

@@ -31,7 +31,7 @@ "use strict";

_NodeJSCacheManager_checkChunks.set(this, async () => {
Debug_1.debug.log('chunksContent', { tableChunks: __classPrivateFieldGet(this, _NodeJSCacheManager_tableChunks, "f") });
Debug_1.debug.log('chunk.content', { tableChunks: __classPrivateFieldGet(this, _NodeJSCacheManager_tableChunks, "f") });
for await (const [table, chunkIds] of Object.entries(__classPrivateFieldGet(this, _NodeJSCacheManager_tableChunks, "f"))) {
for await (const chunkId of Object.keys(chunkIds)) {
const chunkLen = __classPrivateFieldGet(this, _NodeJSCacheManager_tableChunks, "f")[table][chunkId].length;
Debug_1.debug.log('chunkInfo', { table, chunkId, chunkLen });
Debug_1.debug.log('chunk.info', { table, chunkId, chunkLen });
}

@@ -42,3 +42,3 @@ }

for (const chunkNamespace of Object.keys(chunkNamespaces)) {
Debug_1.debug.log('checkChunk', { table, chunkNamespace });
Debug_1.debug.log('chunk.check', { table, chunkNamespace });
const [_chunk_, _table, _id, _strExpiresAtUnix] = chunkNamespace.split(__classPrivateFieldGet(this, _NodeJSCacheManager_splitter, "f"));

@@ -76,3 +76,3 @@ const expiresAt = Number(_strExpiresAtUnix);

__classPrivateFieldGet(this, _NodeJSCacheManager_tableChunks, "f")[table][newChunk] = [];
Debug_1.debug.log('createChunk', { table, newChunk, tableChunks: __classPrivateFieldGet(this, _NodeJSCacheManager_tableChunks, "f") });
Debug_1.debug.log('chunk.create', { table, newChunk, tableChunks: __classPrivateFieldGet(this, _NodeJSCacheManager_tableChunks, "f") });
return newChunk;

@@ -82,3 +82,3 @@ });

delete __classPrivateFieldGet(this, _NodeJSCacheManager_tableChunks, "f")[table][chunk];
Debug_1.debug.log('deleteChunk', { table, chunk, tableChunks: __classPrivateFieldGet(this, _NodeJSCacheManager_tableChunks, "f") });
Debug_1.debug.log('chunk.delete', { table, chunk, tableChunks: __classPrivateFieldGet(this, _NodeJSCacheManager_tableChunks, "f") });
});

@@ -85,0 +85,0 @@ _NodeJSCacheManager_getChunk.set(this, (table) => {

@@ -33,7 +33,7 @@ "use strict";

this.checkInstance();
Debug_1.debug.log('chunksContent', { tableChunks: __classPrivateFieldGet(this, _RedisCacheManager_tableChunks, "f") });
Debug_1.debug.log('chunk.content', { tableChunks: __classPrivateFieldGet(this, _RedisCacheManager_tableChunks, "f") });
for await (const [table, chunkIds] of Object.entries(__classPrivateFieldGet(this, _RedisCacheManager_tableChunks, "f"))) {
for await (const chunkId of chunkIds) {
const chunkLen = await __classPrivateFieldGet(this, _RedisCacheManager_instance, "f").llen(chunkId);
Debug_1.debug.log('chunkInfo', { table, chunkId, chunkLen });
Debug_1.debug.log('chunk.info', { table, chunkId, chunkLen });
}

@@ -44,3 +44,3 @@ }

for (const chunkNamespace of chunkNamespaces) {
Debug_1.debug.log('checkChunk', { table, chunkNamespace });
Debug_1.debug.log('chunk.check', { table, chunkNamespace });
const [_chunk_, _table, _id, _strExpiresAtUnix] = chunkNamespace.split(__classPrivateFieldGet(this, _RedisCacheManager_splitter, "f"));

@@ -91,3 +91,3 @@ const expiresAt = Number(_strExpiresAtUnix);

__classPrivateFieldGet(this, _RedisCacheManager_tableChunks, "f")[table].push(newChunk);
Debug_1.debug.log('createChunk', { table, newChunk, tableChunks: __classPrivateFieldGet(this, _RedisCacheManager_tableChunks, "f") });
Debug_1.debug.log('chunk.create', { table, newChunk, tableChunks: __classPrivateFieldGet(this, _RedisCacheManager_tableChunks, "f") });
return newChunk;

@@ -101,3 +101,3 @@ });

await __classPrivateFieldGet(this, _RedisCacheManager_instance, "f").del(chunk);
Debug_1.debug.log('deleteChunk', { table, chunk, tableChunks: __classPrivateFieldGet(this, _RedisCacheManager_tableChunks, "f") });
Debug_1.debug.log('chunk.delete', { table, chunk, tableChunks: __classPrivateFieldGet(this, _RedisCacheManager_tableChunks, "f") });
});

@@ -134,3 +134,3 @@ _RedisCacheManager_getChunk.set(this, (table) => {

const cachedChunkTables = await __classPrivateFieldGet(this, _RedisCacheManager_instance, "f").keys(`chunk${__classPrivateFieldGet(this, _RedisCacheManager_splitter, "f")}*`);
Debug_1.debug.log('useRedisInstance', { cachedChunkTables });
Debug_1.debug.log('hook.useInstance', { cachedChunkTables });
for await (const chunkTable of cachedChunkTables) {

@@ -137,0 +137,0 @@ const [_, table] = chunkTable.split(__classPrivateFieldGet(this, _RedisCacheManager_splitter, "f"));

@@ -35,41 +35,2 @@ "use strict";

_Clickhouse_isFirstInsert.set(this, void 0);
this.formatInsertRows = (rows) => {
const keysArr = Object.keys(rows[0]);
const valuesSqlArr = rows.map(row => `(${keysArr.map(key => this.formatInsertValue(row[key])).join(',')})`);
return { keysArr, valuesSqlFormat: valuesSqlArr.join(',') };
};
this.formatInsertValue = (rowValue) => {
/**
* is Array
*/
if (Array.isArray(rowValue)) {
return `[${rowValue.map(this.formatInsertValue).join(',')}]`;
}
/**
* is Map
*/
if (lodash_1.isObject(rowValue)) {
const mapValues = Object
.entries(rowValue)
.map(([mapKey, mapValue]) => {
if (lodash_1.isObject(rowValue))
throw new PreprocessInsertQueryError_1.PreprocessInsertQueryError(`Value of map() has denied type, ${mapValue}, ${typeof mapValue}`);
return [sqlstring_1.default.escape(mapKey), sqlstring_1.default.escape(mapValue)];
});
return `map(${mapValues.join(',')})`;
}
/**
* is Number
*/
if (typeof rowValue === 'number') {
return rowValue;
}
/**
* is String
*/
if (typeof rowValue === 'string') {
return sqlstring_1.default.escape(rowValue);
}
throw new PreprocessInsertQueryError_1.PreprocessInsertQueryError('Unknown type of row: ' + rowValue);
};
__classPrivateFieldSet(this, _Clickhouse_isFirstInsert, true, "f");

@@ -93,3 +54,3 @@ __classPrivateFieldSet(this, _Clickhouse_onChunkCb, [], "f");

useInsert: async (table, rows) => {
Debug_1.debug.log('hooks.useInsert', { table, rows });
Debug_1.debug.log('hook.useInsert', { table, rows });
this.insert(table, rows);

@@ -105,2 +66,48 @@ }

}
formatInsertRows(rows) {
const keysArr = Object.keys(rows[0]);
const valuesSqlArr = rows.map(row => `(${keysArr.map(key => this.formatInsertValue(row[key])).join(',')})`);
return { keysArr, valuesSqlFormat: valuesSqlArr.join(',') };
}
formatInsertValue(rowValue) {
Debug_1.debug.log('row.value', { rowValue });
/**
* is Array
*/
if (Array.isArray(rowValue)) {
return `[${rowValue.map(this.formatInsertValue).join(',')}]`;
}
/**
* is Map
*/
if (lodash_1.isObject(rowValue)) {
const mapValues = Object
.entries(rowValue)
.map(([mapKey, mapValue]) => {
if (lodash_1.isObject(rowValue))
throw new PreprocessInsertQueryError_1.PreprocessInsertQueryError(`Value of map() has denied type, ${mapValue}, ${typeof mapValue}`);
return [sqlstring_1.default.escape(mapKey), sqlstring_1.default.escape(mapValue)];
});
return `map(${mapValues.join(',')})`;
}
/**
* is Number
*/
if (typeof rowValue === 'number') {
return rowValue;
}
/**
* is String
*/
if (typeof rowValue === 'string') {
return sqlstring_1.default.escape(rowValue);
}
/**
* is Null
*/
if (lodash_1.isNull(rowValue)) {
return sqlstring_1.default.escape('NULL');
}
throw new PreprocessInsertQueryError_1.PreprocessInsertQueryError('Unknown type of row: ' + rowValue);
}
/**

@@ -132,3 +139,3 @@ *

const request = `${query} FORMAT ${options.responseFormat ?? __classPrivateFieldGet(this, _Clickhouse_options, "f").defaultResponseFormat}`;
Debug_1.debug.log('Clickhouse.query', request);
Debug_1.debug.log('ch.query', request);
return __classPrivateFieldGet(this, _Clickhouse_httpClient, "f").request({ data: request });

@@ -147,3 +154,3 @@ }

__classPrivateFieldGet(this, _Clickhouse_cacheManager, "f").on('chunk', (chunkId, table, rows) => {
Debug_1.debug.log('Clickhouse.useCaching', 'received event \'chunk\'', { chunkId, table, rowsCount: rows.length, firstRow: rows[0] });
Debug_1.debug.log('ch.useCaching', `received event 'chunk'`, { chunkId, table, rowsCount: rows.length, firstRow: rows[0] });
__classPrivateFieldGet(this, _Clickhouse_onChunkCb, "f").forEach(cb => cb(chunkId, table, rows));

@@ -160,3 +167,3 @@ });

if (__classPrivateFieldGet(this, _Clickhouse_isFirstInsert, "f")) {
Debug_1.debug.log('Clickhouse.cache', 'Implementing redis cache instance');
Debug_1.debug.log('ch.cache', 'Implementing redis cache instance');
__classPrivateFieldSet(this, _Clickhouse_isFirstInsert, false, "f");

@@ -163,0 +170,0 @@ if (__classPrivateFieldGet(this, _Clickhouse_cacheManager, "f") instanceof RedisCacheManager_1.RedisCacheManager) {

@@ -1,2 +0,2 @@

export declare type DebugProvider = 'deleteChunk' | 'checkChunk' | 'createChunk' | 'chunksContent' | 'chunkInfo' | 'useRedisInstance' | 'cache' | 'hooks.useInsert' | 'Clickhouse.insert' | 'Clickhouse.query' | 'Clickhouse.cache' | 'Clickhouse.useCaching' | 'ClickhouseHttpClient.request';
export declare type DebugProvider = 'chunk.delete' | 'chunk.check' | 'chunk.create' | 'chunk.content' | 'chunk.info' | 'hook.useInsert' | 'hook.useInstance' | 'cache' | 'ch.insert' | 'ch.query' | 'ch.cache' | 'ch.useCaching' | 'http.request' | 'row.value';
export declare class Debug {

@@ -3,0 +3,0 @@ #private;

@@ -60,3 +60,3 @@ "use strict";

};
Debug_1.debug.log('ClickhouseHttpClient.request', 'Http request', { config });
Debug_1.debug.log('http.request', 'Http request', { config });
__classPrivateFieldGet(this, _ClickhouseHttpClient_axios, "f")

@@ -63,0 +63,0 @@ .request(config)

@@ -12,3 +12,3 @@ {

],
"version": "1.4.5",
"version": "1.5.0",
"license": "ISC",

@@ -15,0 +15,0 @@ "description": "Clickhouse client on TypeScript using redis caching queries",

@@ -41,3 +41,3 @@ import dayjs from "dayjs";

readonly #checkChunks = async () => {
debug.log('chunksContent', { tableChunks: this.#tableChunks })
debug.log('chunk.content', { tableChunks: this.#tableChunks })

@@ -47,3 +47,3 @@ for await (const [table, chunkIds] of Object.entries(this.#tableChunks)) {

const chunkLen = this.#tableChunks[table][chunkId].length
debug.log('chunkInfo', { table, chunkId, chunkLen });
debug.log('chunk.info', { table, chunkId, chunkLen });
}

@@ -55,3 +55,3 @@ }

for (const chunkNamespace of Object.keys(chunkNamespaces)) {
debug.log('checkChunk', { table, chunkNamespace })
debug.log('chunk.check', { table, chunkNamespace })
const [_chunk_, _table, _id, _strExpiresAtUnix] = chunkNamespace.split(this.#splitter)

@@ -96,3 +96,3 @@ const expiresAt = Number(_strExpiresAtUnix)

debug.log('createChunk', { table, newChunk, tableChunks: this.#tableChunks })
debug.log('chunk.create', { table, newChunk, tableChunks: this.#tableChunks })

@@ -104,3 +104,3 @@ return newChunk

delete this.#tableChunks[table][chunk]
debug.log('deleteChunk', { table, chunk, tableChunks: this.#tableChunks })
debug.log('chunk.delete', { table, chunk, tableChunks: this.#tableChunks })
}

@@ -107,0 +107,0 @@

@@ -41,3 +41,3 @@ import dayjs from "dayjs";

debug.log('chunksContent', { tableChunks: this.#tableChunks })
debug.log('chunk.content', { tableChunks: this.#tableChunks })

@@ -47,3 +47,3 @@ for await (const [table, chunkIds] of Object.entries(this.#tableChunks)) {

const chunkLen = await this.#instance!.llen(chunkId)
debug.log('chunkInfo', { table, chunkId, chunkLen })
debug.log('chunk.info', { table, chunkId, chunkLen })
}

@@ -56,3 +56,3 @@ }

debug.log('checkChunk', { table, chunkNamespace })
debug.log('chunk.check', { table, chunkNamespace })

@@ -117,3 +117,3 @@ const [_chunk_, _table, _id, _strExpiresAtUnix] = chunkNamespace.split(this.#splitter)

this.#tableChunks[table].push(newChunk)
debug.log('createChunk', { table, newChunk, tableChunks: this.#tableChunks })
debug.log('chunk.create', { table, newChunk, tableChunks: this.#tableChunks })
return newChunk

@@ -130,3 +130,3 @@ }

debug.log('deleteChunk', { table, chunk, tableChunks: this.#tableChunks })
debug.log('chunk.delete', { table, chunk, tableChunks: this.#tableChunks })
}

@@ -165,3 +165,3 @@

debug.log('useRedisInstance', { cachedChunkTables })
debug.log('hook.useInstance', { cachedChunkTables })

@@ -168,0 +168,0 @@ for await (const chunkTable of cachedChunkTables) {

@@ -48,3 +48,3 @@ import sqlstring from 'sqlstring'

useInsert: async (table: string, rows: ClickhouseNamespace.InsertRows) => {
debug.log('hooks.useInsert', { table, rows })
debug.log('hook.useInsert', { table, rows })
this.insert(table, rows)

@@ -63,3 +63,3 @@ }

private formatInsertRows = (rows: ClickhouseNamespace.InsertRows) => {
private formatInsertRows (rows: ClickhouseNamespace.InsertRows) {
const keysArr = Object.keys(rows[0])

@@ -70,3 +70,4 @@ const valuesSqlArr = rows.map(row => `(${keysArr.map(key => this.formatInsertValue(row[key])).join(',')})`)

private formatInsertValue = (rowValue: any): ClickhouseNamespace.FormattedRowType => {
private formatInsertValue (rowValue: any): ClickhouseNamespace.FormattedRowType {
debug.log('row.value', { rowValue })
/**

@@ -153,3 +154,3 @@ * is Array

debug.log('Clickhouse.query', request)
debug.log('ch.query', request)

@@ -177,4 +178,4 @@ return this.#httpClient.request({ data: request })

debug.log(
'Clickhouse.useCaching',
'received event \'chunk\'',
'ch.useCaching',
`received event 'chunk'`,
{ chunkId, table, rowsCount: rows.length, firstRow: rows[0] }

@@ -201,3 +202,3 @@ )

if (this.#isFirstInsert) {
debug.log('Clickhouse.cache', 'Implementing redis cache instance')
debug.log('ch.cache', 'Implementing redis cache instance')

@@ -204,0 +205,0 @@ this.#isFirstInsert = false

export type DebugProvider =
'deleteChunk' |
'checkChunk' |
'createChunk' |
'chunksContent' |
'chunkInfo' |
'useRedisInstance' |
'chunk.delete' |
'chunk.check' |
'chunk.create' |
'chunk.content' |
'chunk.info' |
'hook.useInsert' |
'hook.useInstance' |
'cache' |
'hooks.useInsert' |
'Clickhouse.insert' |
'Clickhouse.query' |
'Clickhouse.cache' |
'Clickhouse.useCaching' |
'ClickhouseHttpClient.request'
'ch.insert' |
'ch.query' |
'ch.cache' |
'ch.useCaching' |
'http.request' |
'row.value'

@@ -17,0 +18,0 @@ export class Debug {

@@ -77,3 +77,3 @@ import axios, { AxiosRequestConfig, AxiosError } from 'axios'

debug.log('ClickhouseHttpClient.request', 'Http request', { config })
debug.log('http.request', 'Http request', { config })

@@ -80,0 +80,0 @@ this.#axios

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc