Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@axiomhq/js

Package Overview
Dependencies
Maintainers
7
Versions
13
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@axiomhq/js - npm Package Compare versions

Comparing version 0.1.2 to 0.1.3

8

dist/cjs/batch.js

@@ -72,6 +72,7 @@ "use strict";

this.flush = function () { return __awaiter(_this, void 0, void 0, function () {
var res;
var events, res;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
events = this.events.splice(0, this.events.length);
clearTimeout(this.nextFlush);

@@ -81,10 +82,9 @@ return [4 /*yield*/, this.activeFlush];

_a.sent();
if (this.events.length === 0) {
if (events.length === 0) {
this.lastFlush = new Date(); // we tried
return [2 /*return*/];
}
return [4 /*yield*/, this.ingestFn(this.id, this.events, this.options)];
return [4 /*yield*/, this.ingestFn(this.id, events, this.options)];
case 2:
res = _a.sent();
this.events = [];
this.lastFlush = new Date();

@@ -91,0 +91,0 @@ return [2 /*return*/, res];

@@ -67,2 +67,20 @@ "use strict";

_this.localPath = '/v1';
/**
* Ingest events into the provided dataset using raw data types, e.g: string, buffer or a stream.
*
* @param dataset - name of the dataset to ingest events into
* @param data - data to be ingested
* @param contentType - optional content type, defaults to JSON
* @param contentEncoding - optional content encoding, defaults to Identity
* @param options - optional ingest options
* @returns result a promise of ingest and its status, check: {@link IngestStatus}
*
* @example
* ```
* import { AxiomWithoutBatching } from '@axiomhq/js';
*
* const axiom = new AxiomWithoutBatching();
* ```
*
*/
_this.ingestRaw = function (dataset, data, contentType, contentEncoding, options) {

@@ -91,2 +109,15 @@ if (contentType === void 0) { contentType = ContentType.JSON; }

};
/**
* Executes APL query using the provided APL and returns the result
*
* @param apl - the apl query
* @param options - optional query options
* @returns result of the query, check: {@link QueryResult}
*
* @example
* ```
* await axiom.query("['dataset'] | count");
* ```
*
*/
_this.query = function (apl, options) {

@@ -108,2 +139,15 @@ var req = { apl: apl };

};
/**
* Executes APL query using the provided APL and returns the result.
* This is just an alias for the `query()` method, please use that instead.
*
* @param apl - the apl query
* @param options - optional query options
* @returns Promise<QueryResult>
*
* @example
* ```
* await axiom.aplQuery("['dataset'] | count");
* ```
*/
_this.aplQuery = function (apl, options) { return _this.query(apl, options); };

@@ -116,2 +160,10 @@ _this.datasets = new datasets_1.datasets.Service(options);

}(httpClient_1.default));
/**
* Axiom's client without batching events in the background.
* In most cases you'll want to use the {@link Axiom} client instead.
*
*
* @param options - The {@link ClientOptions} to configure authentication
*
*/
var AxiomWithoutBatching = /** @class */ (function (_super) {

@@ -122,2 +174,19 @@ __extends(AxiomWithoutBatching, _super);

}
/**
* Ingest event(s) asynchronously
*
* @param dataset - name of the dataset to ingest events into
* @param events - list of events to be ingested, could be a single object as well
* @param options - optional ingest options
* @returns the result of the ingest, check: {@link IngestStatus}
*
* @example
* ```
* import { AxiomWithoutBatching } from '@axiomhq/js';
*
* const axiom = new AxiomWithoutBatching();
* await axiom.ingest('dataset-name', [{ foo: 'bar' }])
* ```
*
*/
AxiomWithoutBatching.prototype.ingest = function (dataset, events, options) {

@@ -131,2 +200,9 @@ var array = Array.isArray(events) ? events : [events];

exports.AxiomWithoutBatching = AxiomWithoutBatching;
/**
* Axiom's default client that queues events in the background,
* sends them asynchronously to the server every 1s or every 1000 events.
*
* @param options - The options passed to the client
*
*/
var Axiom = /** @class */ (function (_super) {

@@ -137,2 +213,15 @@ __extends(Axiom, _super);

_this.batch = {};
/**
* Ingest events asynchronously
*
* @remarks
* Events passed to ingest method will be queued in a batch and sent
* in the background every second or every 1000 events.
*
* @param dataset - name of the dataset to ingest events into
* @param events - list of events to be ingested, could be a single object as well
* @param options - optional ingest options
* @returns void, as the events are sent in the background
*
*/
_this.ingest = function (dataset, events, options) {

@@ -149,2 +238,9 @@ var key = (0, batch_1.createBatchKey)(dataset, options);

};
/**
* Flushes all the events that have been queued in the background
*
* @remarks
* calling `await flush()` will wait for all the events to be sent to the server
* and is necessary to ensure data delivery.
*/
_this.flush = function () { return __awaiter(_this, void 0, void 0, function () {

@@ -176,3 +272,3 @@ var promises, key;

ContentType["CSV"] = "text/csv";
})(ContentType = exports.ContentType || (exports.ContentType = {}));
})(ContentType || (exports.ContentType = ContentType = {}));
var ContentEncoding;

@@ -182,3 +278,3 @@ (function (ContentEncoding) {

ContentEncoding["GZIP"] = "gzip";
})(ContentEncoding = exports.ContentEncoding || (exports.ContentEncoding = {}));
})(ContentEncoding || (exports.ContentEncoding = ContentEncoding = {}));
var AggregationOp;

@@ -203,3 +299,3 @@ (function (AggregationOp) {

AggregationOp["CountDistinctIf"] = "distinctif";
})(AggregationOp = exports.AggregationOp || (exports.AggregationOp = {}));
})(AggregationOp || (exports.AggregationOp = AggregationOp = {}));
var FilterOp;

@@ -230,3 +326,3 @@ (function (FilterOp) {

FilterOp["NotRegexp"] = "not-regexp";
})(FilterOp = exports.FilterOp || (exports.FilterOp = {}));
})(FilterOp || (exports.FilterOp = FilterOp = {}));
//# sourceMappingURL=client.js.map

@@ -49,3 +49,3 @@ "use strict";

datasets.Service = Service;
})(datasets = exports.datasets || (exports.datasets = {}));
})(datasets || (exports.datasets = datasets = {}));
//# sourceMappingURL=datasets.js.map
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.users = exports.datasets = void 0;
__exportStar(require("./client"), exports);
exports.users = exports.datasets = exports.FilterOp = exports.AggregationOp = exports.ContentEncoding = exports.ContentType = exports.Axiom = exports.AxiomWithoutBatching = void 0;
var client_1 = require("./client");
Object.defineProperty(exports, "AxiomWithoutBatching", { enumerable: true, get: function () { return client_1.AxiomWithoutBatching; } });
Object.defineProperty(exports, "Axiom", { enumerable: true, get: function () { return client_1.Axiom; } });
Object.defineProperty(exports, "ContentType", { enumerable: true, get: function () { return client_1.ContentType; } });
Object.defineProperty(exports, "ContentEncoding", { enumerable: true, get: function () { return client_1.ContentEncoding; } });
Object.defineProperty(exports, "AggregationOp", { enumerable: true, get: function () { return client_1.AggregationOp; } });
Object.defineProperty(exports, "FilterOp", { enumerable: true, get: function () { return client_1.FilterOp; } });
var datasets_1 = require("./datasets");

@@ -20,0 +12,0 @@ Object.defineProperty(exports, "datasets", { enumerable: true, get: function () { return datasets_1.datasets; } });

@@ -20,3 +20,3 @@ "use strict";

LimitScope["anonymous"] = "anonymous";
})(LimitScope = exports.LimitScope || (exports.LimitScope = {}));
})(LimitScope || (exports.LimitScope = LimitScope = {}));
var LimitType;

@@ -27,3 +27,3 @@ (function (LimitType) {

LimitType["ingest"] = "ingest";
})(LimitType = exports.LimitType || (exports.LimitType = {}));
})(LimitType || (exports.LimitType = LimitType = {}));
var Limit = /** @class */ (function () {

@@ -30,0 +30,0 @@ function Limit(scope, type, value, remaining, reset) {

@@ -35,3 +35,3 @@ "use strict";

users.Service = Service;
})(users = exports.users || (exports.users = {}));
})(users || (exports.users = users = {}));
//# sourceMappingURL=users.js.map

@@ -38,10 +38,10 @@ export function createBatchKey(id, options) {

flush = async () => {
const events = this.events.splice(0, this.events.length);
clearTimeout(this.nextFlush);
await this.activeFlush;
if (this.events.length === 0) {
if (events.length === 0) {
this.lastFlush = new Date(); // we tried
return;
}
const res = await this.ingestFn(this.id, this.events, this.options);
this.events = [];
const res = await this.ingestFn(this.id, events, this.options);
this.lastFlush = new Date();

@@ -48,0 +48,0 @@ return res;

@@ -14,2 +14,20 @@ import { datasets } from './datasets';

}
/**
* Ingest events into the provided dataset using raw data types, e.g: string, buffer or a stream.
*
* @param dataset - name of the dataset to ingest events into
* @param data - data to be ingested
* @param contentType - optional content type, defaults to JSON
* @param contentEncoding - optional content encoding, defaults to Identity
* @param options - optional ingest options
* @returns result a promise of ingest and its status, check: {@link IngestStatus}
*
* @example
* ```
* import { AxiomWithoutBatching } from '@axiomhq/js';
*
* const axiom = new AxiomWithoutBatching();
* ```
*
*/
ingestRaw = (dataset, data, contentType = ContentType.JSON, contentEncoding = ContentEncoding.Identity, options) => this.client.post(this.localPath + '/datasets/' + dataset + '/ingest', {

@@ -32,2 +50,15 @@ headers: {

});
/**
* Executes APL query using the provided APL and returns the result
*
* @param apl - the apl query
* @param options - optional query options
* @returns result of the query, check: {@link QueryResult}
*
* @example
* ```
* await axiom.query("['dataset'] | count");
* ```
*
*/
query = (apl, options) => {

@@ -49,5 +80,43 @@ const req = { apl: apl };

};
/**
* Executes APL query using the provided APL and returns the result.
* This is just an alias for the `query()` method, please use that instead.
*
* @param apl - the apl query
* @param options - optional query options
* @returns Promise<QueryResult>
*
* @example
* ```
* await axiom.aplQuery("['dataset'] | count");
* ```
*/
aplQuery = (apl, options) => this.query(apl, options);
}
/**
* Axiom's client without batching events in the background.
* In most cases you'll want to use the {@link Axiom} client instead.
*
*
* @param options - The {@link ClientOptions} to configure authentication
*
*/
export class AxiomWithoutBatching extends BaseClient {
/**
* Ingest event(s) asynchronously
*
* @param dataset - name of the dataset to ingest events into
* @param events - list of events to be ingested, could be a single object as well
* @param options - optional ingest options
* @returns the result of the ingest, check: {@link IngestStatus}
*
* @example
* ```
* import { AxiomWithoutBatching } from '@axiomhq/js';
*
* const axiom = new AxiomWithoutBatching();
* await axiom.ingest('dataset-name', [{ foo: 'bar' }])
* ```
*
*/
ingest(dataset, events, options) {

@@ -59,4 +128,24 @@ const array = Array.isArray(events) ? events : [events];

}
/**
* Axiom's default client that queues events in the background,
* sends them asynchronously to the server every 1s or every 1000 events.
*
* @param options - The options passed to the client
*
*/
export class Axiom extends BaseClient {
batch = {};
/**
* Ingest events asynchronously
*
* @remarks
* Events passed to ingest method will be queued in a batch and sent
* in the background every second or every 1000 events.
*
* @param dataset - name of the dataset to ingest events into
* @param events - list of events to be ingested, could be a single object as well
* @param options - optional ingest options
* @returns void, as the events are sent in the background
*
*/
ingest = (dataset, events, options) => {

@@ -73,2 +162,9 @@ const key = createBatchKey(dataset, options);

};
/**
* Flushes all the events that have been queued in the background
*
* @remarks
* calling `await flush()` will wait for all the events to be sent to the server
* and is necessary to ensure data delivery.
*/
flush = async () => {

@@ -75,0 +171,0 @@ let promises = [];

@@ -1,4 +0,4 @@

export * from './client';
export { AxiomWithoutBatching, Axiom, ContentType, ContentEncoding, AggregationOp, FilterOp } from './client';
export { datasets } from './datasets';
export { users } from './users';
//# sourceMappingURL=index.js.map

@@ -11,10 +11,86 @@ /// <reference types="node" />

constructor(options?: ClientOptions);
/**
* Ingest events into the provided dataset using raw data types, e.g: string, buffer or a stream.
*
* @param dataset - name of the dataset to ingest events into
* @param data - data to be ingested
* @param contentType - optional content type, defaults to JSON
* @param contentEncoding - optional content encoding, defaults to Identity
* @param options - optional ingest options
* @returns result a promise of ingest and its status, check: {@link IngestStatus}
*
* @example
* ```
* import { AxiomWithoutBatching } from '@axiomhq/js';
*
* const axiom = new AxiomWithoutBatching();
* ```
*
*/
ingestRaw: (dataset: string, data: string | Buffer | ReadableStream, contentType?: ContentType, contentEncoding?: ContentEncoding, options?: IngestOptions) => Promise<IngestStatus>;
queryLegacy: (dataset: string, query: QueryLegacy, options?: QueryOptions) => Promise<QueryLegacyResult>;
/**
* Executes APL query using the provided APL and returns the result
*
* @param apl - the apl query
* @param options - optional query options
* @returns result of the query, check: {@link QueryResult}
*
* @example
* ```
* await axiom.query("['dataset'] | count");
* ```
*
*/
query: (apl: string, options?: QueryOptions) => Promise<QueryResult>;
/**
* Executes APL query using the provided APL and returns the result.
* This is just an alias for the `query()` method, please use that instead.
*
* @param apl - the apl query
* @param options - optional query options
* @returns Promise<QueryResult>
*
* @example
* ```
* await axiom.aplQuery("['dataset'] | count");
* ```
*/
aplQuery: (apl: string, options?: QueryOptions) => Promise<QueryResult>;
}
/**
* Axiom's client without batching events in the background.
* In most cases you'll want to use the {@link Axiom} client instead.
*
*
* @param options - The {@link ClientOptions} to configure authentication
*
*/
export declare class AxiomWithoutBatching extends BaseClient {
/**
* Ingest event(s) asynchronously
*
* @param dataset - name of the dataset to ingest events into
* @param events - list of events to be ingested, could be a single object as well
* @param options - optional ingest options
* @returns the result of the ingest, check: {@link IngestStatus}
*
* @example
* ```
* import { AxiomWithoutBatching } from '@axiomhq/js';
*
* const axiom = new AxiomWithoutBatching();
* await axiom.ingest('dataset-name', [{ foo: 'bar' }])
* ```
*
*/
ingest(dataset: string, events: Array<object> | object, options?: IngestOptions): Promise<IngestStatus>;
}
/**
* Axiom's default client that queues events in the background,
* sends them asynchronously to the server every 1s or every 1000 events.
*
* @param options - The options passed to the client
*
*/
export declare class Axiom extends BaseClient {

@@ -24,3 +100,23 @@ batch: {

};
/**
* Ingest events asynchronously
*
* @remarks
* Events passed to ingest method will be queued in a batch and sent
* in the background every second or every 1000 events.
*
* @param dataset - name of the dataset to ingest events into
* @param events - list of events to be ingested, could be a single object as well
* @param options - optional ingest options
* @returns void, as the events are sent in the background
*
*/
ingest: (dataset: string, events: Array<object> | object, options?: IngestOptions) => void;
/**
* Flushes all the events that have been queued in the background
*
* @remarks
* calling `await flush()` will wait for all the events to be sent to the server
* and is necessary to ensure data delivery.
*/
flush: () => Promise<void>;

@@ -37,13 +133,48 @@ }

}
/**
* Ingest options
*
*/
export interface IngestOptions {
/**
* name of the field that contains the timestamp
*/
timestampField?: string;
/**
* format of the timestamp
*/
timestampFormat?: string;
/**
* delimiter used in the csv file
*/
csvDelimiter?: string;
}
/**
* Query result
*
*/
export interface IngestStatus {
/**
* number of ingested events
*/
ingested: number;
/**
* number of failed events
*/
failed: number;
/**
* list of failed events
*/
failures?: Array<IngestFailure>;
/**
* number of processed bytes
*/
processedBytes: number;
/**
* number of blocks created
*/
blocksCreated: number;
/**
* length of the write ahead log
*/
walLength: number;

@@ -50,0 +181,0 @@ }

import { FetchClient } from './fetchClient';
/**
* ClientOptions is used to configure the HTTPClient and provide the necessary
* authentication information.
*
* @remarks
*
* If no options are passed to the client, The options will fallback into read its values from environment variables:
* AXIOM_TOKEN and AXIOM_ORG_ID for token and orgId respectively.
*
* @example
* ```
* const axiom = new Axiom({
* token: "my-token",
* orgId: "my-org-id",
* })
* ```
*/
export interface ClientOptions {
/**
* an API or personal token to use for authentication, you can get one
* from @{link: Axiom settings | https://app.axiom.co/profile}.
*
* @defaultValue reads from the AXIOM_TOKEN environment variable
*/
token?: string;
/**
* the URL of the Axiom API, defaults to https://api.axiom.co. You should not
* need to change this unless you are using a self-hosted version of Axiom.
*
* @defaultValue reads from the AXIOM_URL environment variable
*/
url?: string;
/**
* the ID of the organization to use, you can get this from Axiom settings page of your
* organization. This is only needed if you are using a personal token.
*
* @defaultValue reads from the AXIOM_ORG_ID environment variable
*/
orgId?: string;

@@ -6,0 +41,0 @@ }

@@ -1,2 +0,2 @@

export * from './client';
export { AxiomWithoutBatching, Axiom, ContentType, ContentEncoding, IngestOptions, IngestStatus, IngestFailure, QueryOptionsBase, QueryOptions, QueryLegacy, Aggregation, AggregationOp, Filter, FilterOp, Order, Projection, VirtualColumn, QueryLegacyResult, QueryResult, Timeseries, Interval, EntryGroup, EntryGroupAgg, Entry, Status, Message, Query } from './client';
export { ClientOptions } from './httpClient';

@@ -3,0 +3,0 @@ export { datasets } from './datasets';

{
"name": "@axiomhq/js",
"description": "The official javascript bindings for the Axiom API",
"version": "0.1.2",
"version": "0.1.3",
"author": "Axiom, Inc.",

@@ -42,6 +42,6 @@ "license": "MIT",

"exports": {
"types": "./dist/types/index.d.ts",
"import": "./dist/esm/index.js",
"require": "./dist/cjs/index.js",
"default": "./dist/cjs/index.js",
"types": "./dist/types/index.d.ts"
"default": "./dist/cjs/index.js"
},

@@ -48,0 +48,0 @@ "scripts": {

import { IngestOptions, IngestStatus } from './client';
export type IngestFunction = (id: string, events: Array<object> | object, options?: IngestOptions) => Promise<IngestStatus>;
export type IngestFunction = (
id: string,
events: Array<object> | object,
options?: IngestOptions,
) => Promise<IngestStatus>;

@@ -47,6 +51,8 @@ export function createBatchKey(id: string, options?: IngestOptions): string {

flush = async (): Promise<IngestStatus | undefined> => {
const events = this.events.splice(0, this.events.length);
clearTimeout(this.nextFlush);
await this.activeFlush;
if (this.events.length === 0) {
if (events.length === 0) {
this.lastFlush = new Date(); // we tried

@@ -56,4 +62,3 @@ return;

const res = await this.ingestFn(this.id, this.events, this.options);
this.events = [];
const res = await this.ingestFn(this.id, events, this.options);
this.lastFlush = new Date();

@@ -60,0 +65,0 @@ return res;

@@ -17,2 +17,20 @@ import { datasets } from './datasets';

/**
* Ingest events into the provided dataset using raw data types, e.g: string, buffer or a stream.
*
* @param dataset - name of the dataset to ingest events into
* @param data - data to be ingested
* @param contentType - optional content type, defaults to JSON
* @param contentEncoding - optional content encoding, defaults to Identity
* @param options - optional ingest options
* @returns result a promise of ingest and its status, check: {@link IngestStatus}
*
* @example
* ```
* import { AxiomWithoutBatching } from '@axiomhq/js';
*
* const axiom = new AxiomWithoutBatching();
* ```
*
*/
ingestRaw = (

@@ -53,2 +71,15 @@ dataset: string,

/**
* Executes APL query using the provided APL and returns the result
*
* @param apl - the apl query
* @param options - optional query options
* @returns result of the query, check: {@link QueryResult}
*
* @example
* ```
* await axiom.query("['dataset'] | count");
* ```
*
*/
query = (apl: string, options?: QueryOptions): Promise<QueryResult> => {

@@ -75,6 +106,44 @@ const req: Query = { apl: apl };

/**
* Executes APL query using the provided APL and returns the result.
* This is just an alias for the `query()` method, please use that instead.
*
* @param apl - the apl query
* @param options - optional query options
* @returns Promise<QueryResult>
*
* @example
* ```
* await axiom.aplQuery("['dataset'] | count");
* ```
*/
aplQuery = (apl: string, options?: QueryOptions): Promise<QueryResult> => this.query(apl, options);
}
/**
* Axiom's client without batching events in the background.
* In most cases you'll want to use the {@link Axiom} client instead.
*
*
* @param options - The {@link ClientOptions} to configure authentication
*
*/
export class AxiomWithoutBatching extends BaseClient {
/**
* Ingest event(s) asynchronously
*
* @param dataset - name of the dataset to ingest events into
* @param events - list of events to be ingested, could be a single object as well
* @param options - optional ingest options
* @returns the result of the ingest, check: {@link IngestStatus}
*
* @example
* ```
* import { AxiomWithoutBatching } from '@axiomhq/js';
*
* const axiom = new AxiomWithoutBatching();
* await axiom.ingest('dataset-name', [{ foo: 'bar' }])
* ```
*
*/
ingest(dataset: string, events: Array<object> | object, options?: IngestOptions): Promise<IngestStatus> {

@@ -87,5 +156,25 @@ const array = Array.isArray(events) ? events : [events];

/**
* Axiom's default client that queues events in the background,
* sends them asynchronously to the server every 1s or every 1000 events.
*
* @param options - The options passed to the client
*
*/
export class Axiom extends BaseClient {
batch: { [id: string]: Batch } = {};
/**
* Ingest events asynchronously
*
* @remarks
* Events passed to ingest method will be queued in a batch and sent
* in the background every second or every 1000 events.
*
* @param dataset - name of the dataset to ingest events into
* @param events - list of events to be ingested, could be a single object as well
* @param options - optional ingest options
* @returns void, as the events are sent in the background
*
*/
ingest = (dataset: string, events: Array<object> | object, options?: IngestOptions) => {

@@ -107,2 +196,9 @@ const key = createBatchKey(dataset, options);

/**
* Flushes all the events that have been queued in the background
*
* @remarks
* calling `await flush()` will wait for all the events to be sent to the server
* and is necessary to ensure data delivery.
*/
flush = async (): Promise<void> => {

@@ -128,14 +224,49 @@ let promises: Array<Promise<IngestStatus | void>> = [];

/**
* Ingest options
*
*/
export interface IngestOptions {
/**
* name of the field that contains the timestamp
*/
timestampField?: string;
/**
* format of the timestamp
*/
timestampFormat?: string;
/**
* delimiter used in the csv file
*/
csvDelimiter?: string;
}
/**
* Query result
*
*/
export interface IngestStatus {
/**
* number of ingested events
*/
ingested: number;
/**
* number of failed events
*/
failed: number;
/**
* list of failed events
*/
failures?: Array<IngestFailure>;
/**
* number of processed bytes
*/
processedBytes: number;
/**
* number of blocks created
*/
blocksCreated: number;
/**
* length of the write ahead log
*/
walLength: number;

@@ -142,0 +273,0 @@ }

@@ -6,5 +6,40 @@ import { FetchClient } from './fetchClient';

/**
* ClientOptions is used to configure the HTTPClient and provide the necessary
* authentication information.
*
* @remarks
*
* If no options are passed to the client, The options will fallback into read its values from environment variables:
* AXIOM_TOKEN and AXIOM_ORG_ID for token and orgId respectively.
*
* @example
* ```
* const axiom = new Axiom({
* token: "my-token",
* orgId: "my-org-id",
* })
* ```
*/
export interface ClientOptions {
/**
* an API or personal token to use for authentication, you can get one
* from @{link: Axiom settings | https://app.axiom.co/profile}.
*
* @defaultValue reads from the AXIOM_TOKEN environment variable
*/
token?: string;
/**
* the URL of the Axiom API, defaults to https://api.axiom.co. You should not
* need to change this unless you are using a self-hosted version of Axiom.
*
* @defaultValue reads from the AXIOM_URL environment variable
*/
url?: string;
/**
* the ID of the organization to use, you can get this from Axiom settings page of your
* organization. This is only needed if you are using a personal token.
*
* @defaultValue reads from the AXIOM_ORG_ID environment variable
*/
orgId?: string;

@@ -11,0 +46,0 @@ }

@@ -1,4 +0,4 @@

export * from './client';
export { AxiomWithoutBatching, Axiom, ContentType, ContentEncoding, IngestOptions, IngestStatus, IngestFailure, QueryOptionsBase, QueryOptions, QueryLegacy, Aggregation, AggregationOp, Filter, FilterOp, Order, Projection, VirtualColumn, QueryLegacyResult, QueryResult, Timeseries, Interval, EntryGroup, EntryGroupAgg, Entry, Status, Message, Query } from './client';
export { ClientOptions } from './httpClient';
export { datasets } from './datasets';
export { users } from './users';

@@ -1,67 +0,120 @@

import { describe, expect, it, jest } from '@jest/globals'
import { describe, expect, it, jest } from '@jest/globals';
import { Batch, IngestFunction } from '../../src/batch';
import { IngestStatus } from '../../src/client';
async function sleep(ms: number) {
return new Promise((resolve) => setTimeout(resolve, ms));
return new Promise((resolve) => setTimeout(resolve, ms));
}
describe('Batch', () => {
it('sends events after 1s', async () => {
jest.useFakeTimers();
const sendFn = jest.fn() as IngestFunction;
it('sends events after 1s', async () => {
jest.useFakeTimers();
const sendFn = jest.fn() as IngestFunction;
const batch = new Batch(sendFn, 'my-dataset', { timestampField: 'foo' });
batch.ingest({ foo: 'bar' });
batch.ingest({ foo: 'baz' });
const batch = new Batch(sendFn, 'my-dataset', { timestampField: 'foo' });
batch.ingest({ foo: 'bar' });
batch.ingest({ foo: 'baz' });
expect(sendFn).not.toHaveBeenCalled();
jest.runAllTimers();
jest.useRealTimers();
await sleep(100); // async code yay
expect(sendFn).not.toHaveBeenCalled();
jest.runAllTimers();
jest.useRealTimers();
await sleep(100); // async code yay
expect(sendFn).toHaveBeenCalledTimes(1);
});
expect(sendFn).toHaveBeenCalledWith('my-dataset', [{ foo: 'bar' }, { foo: 'baz' }], { timestampField: 'foo' });
expect(batch.events).toEqual([]);
});
it('sends events after 1k events', async () => {
const sendFn = jest.fn() as IngestFunction;;
it('sends events after 1k events', async () => {
const sendFn = jest.fn() as IngestFunction;
const batch = new Batch(sendFn, 'my-dataset', { timestampField: 'foo' });
const batch = new Batch(sendFn, 'my-dataset', { timestampField: 'foo' });
for (let i = 0; i < 1000; i++) {
batch.ingest({ foo: 'bar' });
}
for (let i = 0; i < 1000; i++) {
batch.ingest({ foo: 'bar' });
}
await sleep(100); // just make sure we have enough time
expect(sendFn).toHaveBeenCalledTimes(1);
});
await sleep(100); // just make sure we have enough time
expect(sendFn).toHaveBeenCalledTimes(1);
});
it('sends events after 1s when ingesting one event every 100ms', async () => {
jest.useFakeTimers();
const sendFn = jest.fn() as IngestFunction;;
it('sends events after 1s when ingesting one event every 100ms', async () => {
jest.useFakeTimers();
const sendFn = jest.fn() as IngestFunction;
const batch = new Batch(sendFn, 'my-dataset', { timestampField: 'foo' });
const batch = new Batch(sendFn, 'my-dataset', { timestampField: 'foo' });
for (let i = 0; i < 10; i++) {
batch.ingest({ foo: 'bar' });
jest.advanceTimersByTime(120);
}
for (let i = 0; i < 10; i++) {
batch.ingest({ foo: 'bar' });
jest.advanceTimersByTime(120);
}
jest.useRealTimers();
await sleep(100); // just make sure we have enough time
expect(sendFn).toHaveBeenCalledTimes(1);
});
jest.useRealTimers();
await sleep(100); // just make sure we have enough time
expect(sendFn).toHaveBeenCalledTimes(1);
});
it('sends events on flush', async () => {
const sendFn = jest.fn() as IngestFunction;;
it('sends events on flush', async () => {
const sendFn = jest.fn() as IngestFunction;
const batch = new Batch(sendFn, 'my-dataset', { timestampField: 'foo' });
const batch = new Batch(sendFn, 'my-dataset', { timestampField: 'foo' });
for (let i = 0; i < 10; i++) {
batch.ingest({ foo: 'bar' });
}
for (let i = 0; i < 10; i++) {
batch.ingest({ foo: 'bar' });
}
expect(sendFn).toHaveBeenCalledTimes(0);
await batch.flush();
expect(sendFn).toHaveBeenCalledTimes(1);
});
expect(sendFn).toHaveBeenCalledTimes(0);
await batch.flush();
expect(sendFn).toHaveBeenCalledTimes(1);
});
it("doesn't drop events after multiple flushes in a row", async () => {
// This test is a regression test for a bug that could lead to dropped
// events.
//
// A timeline:
// 1. The first event is ingested and flush is called automatically
// 2. A second event is ingested, flush is called again and and waits for
// the first flush to complete
// 3. The first flush ingests the first event and clears the events array
// 4. The second flush continues execution but the events array is empty
// We want to wait for the first ingest to be called before adding the
// second event. Without waiting we'd add our event to the events array ref
// and the first flush would ingest both events.
let sentEventsCalled: (value: any) => void;
let sentEventsCalledPromise = new Promise((resolve) => (sentEventsCalled = resolve));
let sentEvents: Array<object> = [];
const sendFn = async (_id: string, events: Array<object> | object) => {
sentEventsCalled({});
if (Array.isArray(events)) {
sentEvents = sentEvents.concat(events);
} else {
sentEvents.push(events);
}
return {};
};
// @ts-ignore
const batch = new Batch(sendFn, 'my-dataset', { timestampField: 'foo' });
let secondFlushDone: Promise<IngestStatus | void>;
// 1. Ingest one event
batch.events.push({ foo: 'bar' });
// 2. Flush that event and wait for it to be sent (in reality this would be
// network time)
batch.activeFlush = batch.flush();
await sentEventsCalledPromise;
// 3. Ingest a second event
batch.events.push({ foo: 'baz' });
// 4. Flush the second event
secondFlushDone = batch.activeFlush = batch.flush();
// Wait for the second flush to complete (it'll abort early because of the
// empty array.
await secondFlushDone;
// Make sure we ingested both events
expect(sentEvents).toEqual([{ foo: 'bar' }, { foo: 'baz' }]);
});
});

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc