Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

clickhouse-ts

Package Overview
Dependencies
Maintainers
1
Versions
70
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

clickhouse-ts - npm Package Compare versions

Comparing version 1.3.0 to 1.3.1

2

dist/src/caching/CacheFactory.d.ts

@@ -9,3 +9,3 @@ import { RedisCache } from "./RedisCache";

chunkCollectorIntervalMs?: number;
}): RedisCache | SystemCache | undefined;
}): SystemCache | RedisCache | undefined;
}

@@ -36,3 +36,3 @@ import { ClickhouseNamespace } from './interface';

}>;
useCaching(type: 'redis', client: Redis): Promise<void>;
useCaching(type: 'redis', client: Redis): void;
onChunk(onChunkCb: (rows: ClickhouseNamespace.InsertRows) => void): void;

@@ -39,0 +39,0 @@ cache(table: string, rows: ClickhouseNamespace.InsertRows): Promise<{

@@ -16,3 +16,3 @@ "use strict";

};
var _Clickhouse_httpClient, _Clickhouse_options, _Clickhouse_cacheClient, _Clickhouse_cacheManager, _Clickhouse_onChunkCb;
var _Clickhouse_httpClient, _Clickhouse_options, _Clickhouse_cacheClient, _Clickhouse_cacheManager, _Clickhouse_onChunkCb, _Clickhouse_redisClient, _Clickhouse_isFirstInsert;
Object.defineProperty(exports, "__esModule", { value: true });

@@ -31,9 +31,8 @@ exports.Clickhouse = void 0;

_Clickhouse_onChunkCb.set(this, void 0);
_Clickhouse_redisClient.set(this, void 0);
_Clickhouse_isFirstInsert.set(this, void 0);
__classPrivateFieldSet(this, _Clickhouse_isFirstInsert, true, "f");
__classPrivateFieldSet(this, _Clickhouse_onChunkCb, () => undefined, "f");
__classPrivateFieldSet(this, _Clickhouse_options, options, "f");
__classPrivateFieldSet(this, _Clickhouse_httpClient, new ClickhouseHttpClient_1.ClickhouseHttpClient({ context, options: options.clickhouseOptions }), "f");
// this.#cacheClient = new CacheFactory(options.cache?.client).getCacheClient({
// chunkSize: options.cache?.chunkSize,
// chunkCollectorIntervalMs: options.cache?.chunkCollectorIntervalMs
// })
__classPrivateFieldSet(this, _Clickhouse_cacheManager, __classPrivateFieldGet(this, _Clickhouse_options, "f").cache ?

@@ -55,13 +54,2 @@ new RedisCacheManager_1.RedisCacheManager({

undefined, "f");
// this.#cacheClient?.useSave(async (table, rows) => {
// const keys = Object.keys(rows[0]).join(',')
// const values = rows.map(row => `(${Object.values(row).map(v => sqlstring.escape(v)).join(',')})`).join(',')
// await this.#httpClient.request({
// params: { query: `INSERT INTO ${table} (${keys}) VALUES` },
// data: values
// })
// })
// if (this.#options.cache?.redis?.instance && this.#cacheClient instanceof RedisCache) {
// this.#cacheClient.useInstance(this.#options.cache!.redis!)
// }
}

@@ -76,2 +64,7 @@ /**

if (__classPrivateFieldGet(this, _Clickhouse_cacheManager, "f")) {
if (__classPrivateFieldGet(this, _Clickhouse_isFirstInsert, "f")) {
console.log('first insert');
await __classPrivateFieldGet(this, _Clickhouse_cacheManager, "f").useRedisInstance(__classPrivateFieldGet(this, _Clickhouse_redisClient, "f"));
__classPrivateFieldSet(this, _Clickhouse_isFirstInsert, false, "f");
}
const result = await __classPrivateFieldGet(this, _Clickhouse_cacheManager, "f")

@@ -101,9 +94,7 @@ .cache(table, rows.map(row => JSON.stringify(row)));

}
async useCaching(type, client) {
useCaching(type, client) {
if (!__classPrivateFieldGet(this, _Clickhouse_cacheManager, "f")) {
throw new Error('Cache manager is not initialized!');
}
if (type === 'redis') {
await __classPrivateFieldGet(this, _Clickhouse_cacheManager, "f").useRedisInstance(client);
}
__classPrivateFieldSet(this, _Clickhouse_redisClient, client, "f");
__classPrivateFieldGet(this, _Clickhouse_cacheManager, "f").on('chunk', rows => __classPrivateFieldGet(this, _Clickhouse_onChunkCb, "f").call(this, rows));

@@ -123,2 +114,2 @@ }

exports.Clickhouse = Clickhouse;
_Clickhouse_httpClient = new WeakMap(), _Clickhouse_options = new WeakMap(), _Clickhouse_cacheClient = new WeakMap(), _Clickhouse_cacheManager = new WeakMap(), _Clickhouse_onChunkCb = new WeakMap();
_Clickhouse_httpClient = new WeakMap(), _Clickhouse_options = new WeakMap(), _Clickhouse_cacheClient = new WeakMap(), _Clickhouse_cacheManager = new WeakMap(), _Clickhouse_onChunkCb = new WeakMap(), _Clickhouse_redisClient = new WeakMap(), _Clickhouse_isFirstInsert = new WeakMap();

@@ -12,3 +12,3 @@ {

],
"version": "1.3.0",
"version": "1.3.1",
"license": "MIT",

@@ -15,0 +15,0 @@ "description": "Clickhouse client on TypeScript using nodejs ot redis caching queries",

@@ -22,91 +22,80 @@ # clickhouse-ts

async function initClient () {
const clickhouseInstance = new Clickhouse(
{
url: 'url',
port: 8443,
user: 'user',
password: 'password',
database: 'database',
ca: fs.readFileSync('cert.crt')
const clickhouseInstance = new Clickhouse(
{
url: 'url',
port: 8443,
user: 'user',
password: 'password',
database: 'database',
ca: fs.readFileSync('cert.crt')
},
{
cache: { // cache can be undefined
chunkTTLSeconds: 3600,
/* after this time chunk will be completed */
chunkResolverIntervalSeconds: 180,
/* interval of checking chunks */
chunkSizeLimit: 10_000,
/* count of rows in one chunk */
chunkResolveType: 'events'
/*
'events': on completed chunk emits event 'chunk'. You can save rows as you want
'autoInsert': on completed chunk inserts rows automatically
*/
},
{
cache: { // cache can be undefined
chunkTTLSeconds: 3600,
/* after this time chunk will be completed */
chunkResolverIntervalSeconds: 180,
/* interval of checking chunks */
chunkSizeLimit: 10_000,
/* count of rows in one chunk */
chunkResolveType: 'events'
/*
'events': on completed chunk emits event 'chunk'. You can save rows as you want
'autoInsert': on completed chunk inserts rows automatically
*/
},
defaultResponseFormat: 'JSON',
clickhouseOptions: {
/*
any clickhouse options
https://clickhouse.tech/docs/en/operations/settings/settings/
*/
send_progress_in_http_headers: '1'
}
defaultResponseFormat: 'JSON',
clickhouseOptions: {
/*
any clickhouse options
https://clickhouse.tech/docs/en/operations/settings/settings/
*/
send_progress_in_http_headers: '1'
}
)
await clickhouseInstance.useCaching('redis', new Redis())
clickhouseInstance.onChunk(rows => {
// do want you want
})
return {
query: clickhouseInstance.query,
insert: clickhouseInstance.insert
}
}
)
await clickhouseInstance.useCaching('redis', new Redis())
initClient().then(client => {
clickhouseInstance.onChunk(rows => {
// do want you want
})
client
.query('WITH now() as t SELECT t', {
responseFormat: 'TSV',
// ...another query options
})
.then(result => result.data.data)
.catch(e => serializeError(e))
clickhouseInstance
.query('WITH now() as t SELECT t', {
responseFormat: 'TSV',
// ...another query options
})
.then(result => result.data.data)
.catch(e => serializeError(e))
client.query(`
CREATE TABLE strings (
date DateTime('UTC'),
string String
) Engine = ReplacingMergeTree()
PARTITION BY toMonday(date)
ORDER BY (date, string)
`)
clickhouseInstance.query(`
CREATE TABLE strings (
date DateTime('UTC'),
string String
) Engine = ReplacingMergeTree()
PARTITION BY toMonday(date)
ORDER BY (date, string)
`)
const response = client
.insert(
'strings',
[{ date: '2021-01-01', string: 'str1' }, { date: '2021-01-02', string: 'str2' }],
{
responseFormat: 'CSVWithNames' // or another format
// another query options
}
)
.then(response => response)
.catch(e => serializeError(e))
response
/*
const response = clickhouseInstance
.insert(
'strings',
[{ date: '2021-01-01', string: 'str1' }, { date: '2021-01-02', string: 'str2' }],
{
r: 1,
result: {
cached: 2,
chunk: 'strings-1624746778066-4ebdfda07dc6e73a73a3c87490b8ebf0'
}
responseFormat: 'CSVWithNames' // or another format
// another query options
}
*/
})
)
.then(response => response)
.catch(e => serializeError(e))
response
/*
{
r: 1,
result: {
cached: 2,
chunk: 'strings-1624746778066-4ebdfda07dc6e73a73a3c87490b8ebf0'
}
}
*/
```
import { ClickhouseHttpClient } from '../httpClient/ClickhouseHttpClient'
import sqlstring from 'sqlstring'
import { CacheFactory } from '../caching/CacheFactory'
import { SystemCache } from '../caching/SystemCache'

@@ -19,2 +18,4 @@ import { RedisCache } from '../caching/RedisCache'

#onChunkCb: (rows: ClickhouseNamespace.InsertRows) => void
#redisClient?: Redis
#isFirstInsert: boolean

@@ -25,9 +26,6 @@ constructor(

) {
this.#isFirstInsert = true
this.#onChunkCb = () => undefined
this.#options = options
this.#httpClient = new ClickhouseHttpClient({ context, options: options.clickhouseOptions })
// this.#cacheClient = new CacheFactory(options.cache?.client).getCacheClient({
// chunkSize: options.cache?.chunkSize,
// chunkCollectorIntervalMs: options.cache?.chunkCollectorIntervalMs
// })

@@ -50,15 +48,2 @@ this.#cacheManager = this.#options.cache ?

undefined
// this.#cacheClient?.useSave(async (table, rows) => {
// const keys = Object.keys(rows[0]).join(',')
// const values = rows.map(row => `(${Object.values(row).map(v => sqlstring.escape(v)).join(',')})`).join(',')
// await this.#httpClient.request({
// params: { query: `INSERT INTO ${table} (${keys}) VALUES` },
// data: values
// })
// })
// if (this.#options.cache?.redis?.instance && this.#cacheClient instanceof RedisCache) {
// this.#cacheClient.useInstance(this.#options.cache!.redis!)
// }
}

@@ -78,2 +63,8 @@

if (this.#cacheManager) {
if (this.#isFirstInsert) {
console.log('first insert')
await this.#cacheManager.useRedisInstance(this.#redisClient!)
this.#isFirstInsert = false
}
const result = await this.#cacheManager

@@ -114,3 +105,3 @@ .cache(

public async useCaching(type: 'redis', client: Redis) {
public useCaching(type: 'redis', client: Redis) {
if (!this.#cacheManager) {

@@ -120,5 +111,3 @@ throw new Error('Cache manager is not initialized!')

if (type === 'redis') {
await this.#cacheManager.useRedisInstance(client)
}
this.#redisClient = client

@@ -125,0 +114,0 @@ this.#cacheManager.on('chunk', rows => this.#onChunkCb(rows))

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc