Launch Week Day 5: Introducing Reachability for PHP.Learn More
Socket
Book a DemoSign in
Socket

axiom-ai

Package Overview
Dependencies
Maintainers
1
Versions
4
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

axiom-ai - npm Package Compare versions

Comparing version
1.1.0
to
2.0.0
+21
dist/shared.d.ts
export interface AxiomClient {
ingestEvents(events: Array<object> | object): Promise<void>;
flush(): Promise<void>;
}
export declare class ImmediateAxiomClient implements AxiomClient {
private readonly client;
private readonly dataset;
constructor(token: string | undefined, dataset: string);
ingestEvents(events: Array<object> | object): Promise<void>;
flush(): Promise<void>;
}
export declare class BatchedAxiomClient implements AxiomClient {
private readonly client;
private readonly dataset;
private batch;
private throttledFlush;
private activeFlush;
constructor(token: string | undefined, dataset: string);
ingestEvents(events: Array<object> | object): Promise<void>;
flush(): Promise<void>;
}
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.BatchedAxiomClient = exports.ImmediateAxiomClient = void 0;
const axiom_node_1 = __importDefault(require("@axiomhq/axiom-node"));
function throttle(fn, wait) {
let lastFn, lastTime;
return function () {
const context = this, args = arguments;
// First call, set lastTime
if (lastTime == null) {
lastTime = Date.now();
}
clearTimeout(lastFn);
lastFn = setTimeout(() => {
if (Date.now() - lastTime >= wait) {
fn.apply(context, args);
lastTime = Date.now();
}
}, Math.max(wait - (Date.now() - lastTime), 0));
};
}
class ImmediateAxiomClient {
constructor(token, dataset) {
this.client = new axiom_node_1.default({ token });
this.dataset = dataset;
}
ingestEvents(events) {
return __awaiter(this, void 0, void 0, function* () {
yield this.client.ingestEvents(this.dataset, events);
});
}
flush() {
return __awaiter(this, void 0, void 0, function* () {
// No-op
});
}
}
exports.ImmediateAxiomClient = ImmediateAxiomClient;
const FLUSH_INTERVAL = 1000;
const FLUSH_SIZE = 1000;
class BatchedAxiomClient {
constructor(token, dataset) {
this.throttledFlush = throttle(this.flush.bind(this), FLUSH_INTERVAL);
this.activeFlush = null;
this.client = new axiom_node_1.default({ token });
this.dataset = dataset;
this.batch = [];
}
// Ingests events into Axiom asynchronously every FLUSH_SIZE events or
// FLUSH_INTERVAL millis
ingestEvents(events) {
return __awaiter(this, void 0, void 0, function* () {
if (!Array.isArray(events)) {
this.batch.push(events);
}
else {
this.batch.push(...events);
}
if (this.batch.length >= FLUSH_SIZE) {
this.flush();
}
else {
this.throttledFlush();
}
});
}
flush() {
return __awaiter(this, void 0, void 0, function* () {
// If there's an active flush (due to throttling), wait for it to finish first
if (this.activeFlush) {
yield this.activeFlush;
}
this.activeFlush = (() => __awaiter(this, void 0, void 0, function* () {
yield this.client.ingestEvents(this.dataset, this.batch);
this.batch = [];
}))();
});
}
}
exports.BatchedAxiomClient = BatchedAxiomClient;
import * as dotenv from 'dotenv';
import { Configuration, OpenAIApi } from "openai";
import { withAxiom } from "axiom-ai/openai";
(async function() {
dotenv.config()
const configuration = new Configuration({
apiKey: process.env.OPENAI_API_KEY,
});
const { openai } = withAxiom(new OpenAIApi(configuration), { sendType: "immediate" });
const completion = await openai.createCompletion({
model: "text-davinci-003",
prompt: "Hello world",
});
console.log(completion.data.choices[0].text);
})()
import Client from "@axiomhq/axiom-node";
function throttle(fn: Function, wait: number) {
let lastFn: ReturnType<typeof setTimeout>, lastTime: number;
return function (this: any) {
const context = this,
args = arguments;
// First call, set lastTime
if (lastTime == null) {
lastTime = Date.now();
}
clearTimeout(lastFn);
lastFn = setTimeout(() => {
if (Date.now() - lastTime >= wait) {
fn.apply(context, args);
lastTime = Date.now();
}
}, Math.max(wait - (Date.now() - lastTime), 0));
};
}
export interface AxiomClient {
ingestEvents(events: Array<object> | object): Promise<void>
flush(): Promise<void>
}
export class ImmediateAxiomClient implements AxiomClient {
private readonly client: Client;
private readonly dataset: string;
constructor(token: string | undefined, dataset: string) {
this.client = new Client({ token });
this.dataset = dataset;
}
public async ingestEvents(events: Array<object> | object) {
await this.client.ingestEvents(this.dataset, events);
}
public async flush() {
// No-op
}
}
const FLUSH_INTERVAL = 1000;
const FLUSH_SIZE = 1000;
export class BatchedAxiomClient implements AxiomClient {
private readonly client: Client;
private readonly dataset: string;
private batch: object[];
private throttledFlush = throttle(this.flush.bind(this), FLUSH_INTERVAL);
private activeFlush: Promise<void> | null = null;
constructor(token: string | undefined, dataset: string) {
this.client = new Client({ token });
this.dataset = dataset;
this.batch = [];
}
// Ingests events into Axiom asynchronously every FLUSH_SIZE events or
// FLUSH_INTERVAL millis
public async ingestEvents(events: Array<object> | object) {
if (!Array.isArray(events)) {
this.batch.push(events);
} else {
this.batch.push(...events);
}
if (this.batch.length >= FLUSH_SIZE) {
this.flush();
} else {
this.throttledFlush();
}
}
public async flush() {
// If there's an active flush (due to throttling), wait for it to finish first
if (this.activeFlush) {
await this.activeFlush;
}
this.activeFlush = (async () => {
await this.client.ingestEvents(this.dataset, this.batch);
this.batch = [];
})()
}
}
+5
-1

@@ -7,3 +7,7 @@ import { OpenAIApi } from "openai";

excludeChoices?: boolean;
sendType?: "batch" | "immediate";
}
export declare function withAxiom(openai: OpenAIApi, opts?: WithAxiomOptions): OpenAIApi;
export declare function withAxiom(openai: OpenAIApi, opts?: WithAxiomOptions): {
openai: OpenAIApi;
flush: Function;
};
+13
-10

@@ -11,11 +11,14 @@ "use strict";

};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.withAxiom = void 0;
const axiom_node_1 = __importDefault(require("@axiomhq/axiom-node"));
const shared_1 = require("./shared");
function withAxiom(openai, opts) {
const axiom = new axiom_node_1.default({ token: opts === null || opts === void 0 ? void 0 : opts.token });
const dataset = (opts === null || opts === void 0 ? void 0 : opts.dataset) || process.env.AXIOM_DATASET;
let axiom;
if ((opts === null || opts === void 0 ? void 0 : opts.sendType) === "immediate") {
axiom = new shared_1.ImmediateAxiomClient(opts === null || opts === void 0 ? void 0 : opts.token, dataset);
}
else {
axiom = new shared_1.BatchedAxiomClient(opts === null || opts === void 0 ? void 0 : opts.token, dataset);
}
const createCompletion = openai.createCompletion;

@@ -35,3 +38,3 @@ openai.createCompletion = (request, options) => __awaiter(this, void 0, void 0, function* () {

catch (e) {
yield axiom.ingestEvents(dataset, {
yield axiom.ingestEvents({
_time: start.toISOString(),

@@ -50,3 +53,3 @@ type: "completion",

transformedResponse.created = new Date(transformedResponse.created * 1000).toISOString();
yield axiom.ingestEvents(dataset, {
yield axiom.ingestEvents({
_time: start.toISOString(),

@@ -74,3 +77,3 @@ type: "completion",

catch (e) {
yield axiom.ingestEvents(dataset, {
yield axiom.ingestEvents({
_time: start.toISOString(),

@@ -89,3 +92,3 @@ type: "chatCompletion",

transformedResponse.created = new Date(transformedResponse.created * 1000).toISOString();
yield axiom.ingestEvents(dataset, {
yield axiom.ingestEvents({
_time: start.toISOString(),

@@ -99,4 +102,4 @@ type: "chatCompletion",

});
return openai;
return { openai, flush: axiom.flush.bind(axiom) };
}
exports.withAxiom = withAxiom;

@@ -11,3 +11,7 @@ import * as dotenv from 'dotenv';

});
const openai = withAxiom(new OpenAIApi(configuration));
const { openai, flush } = withAxiom(new OpenAIApi(configuration));
process.on("beforeExit", async () => {
await flush()
process.exit(0);
});

@@ -14,0 +18,0 @@ const completion = await openai.createCompletion({

import { OpenAIApi, CreateCompletionRequest, CreateChatCompletionRequest } from "openai";
import { AxiosRequestConfig } from "axios";
import Client from "@axiomhq/axiom-node";
import { AxiomClient, BatchedAxiomClient, ImmediateAxiomClient } from './shared';

@@ -10,8 +10,15 @@ export interface WithAxiomOptions {

excludeChoices?: boolean;
sendType?: "batch"|"immediate";
}
export function withAxiom(openai: OpenAIApi, opts?: WithAxiomOptions): OpenAIApi {
const axiom = new Client({ token: opts?.token });
export function withAxiom(openai: OpenAIApi, opts?: WithAxiomOptions): { openai: OpenAIApi, flush: Function } {
const dataset = opts?.dataset || process.env.AXIOM_DATASET;
let axiom: AxiomClient;
if (opts?.sendType === "immediate") {
axiom = new ImmediateAxiomClient(opts?.token, dataset!);
} else {
axiom = new BatchedAxiomClient(opts?.token, dataset!);
}
const createCompletion = openai.createCompletion;

@@ -32,3 +39,3 @@ openai.createCompletion = async (request: CreateCompletionRequest, options?: AxiosRequestConfig<any>) => {

} catch (e: any) {
await axiom.ingestEvents(dataset!, {
await axiom.ingestEvents({
_time: start.toISOString(),

@@ -49,3 +56,3 @@ type: "completion",

await axiom.ingestEvents(dataset!, {
await axiom.ingestEvents({
_time: start.toISOString(),

@@ -76,3 +83,3 @@ type: "completion",

} catch (e: any) {
await axiom.ingestEvents(dataset!, {
await axiom.ingestEvents({
_time: start.toISOString(),

@@ -93,3 +100,3 @@ type: "chatCompletion",

await axiom.ingestEvents(dataset!, {
await axiom.ingestEvents({
_time: start.toISOString(),

@@ -105,3 +112,4 @@ type: "chatCompletion",

return openai;
return { openai, flush: axiom.flush.bind(axiom) };
}
{
"name": "axiom-ai",
"version": "1.1.0",
"version": "2.0.0",
"description": "The official package to send events from AI libraries to Axiom.",

@@ -45,2 +45,2 @@ "scripts": {

}
}
}

@@ -36,7 +36,16 @@ <picture>

});
const openai = withAxiom(new OpenAIApi(configuration), {
const { openai, flush } = withAxiom(new OpenAIApi(configuration), {
token: process.env.AXIOM_TOKEN,
dataset: process.env.AXIOM_DATASET,
// excludePromptOrMessages: false,
// excludeChoices: false,
// sendType: "batch", // or "immediate" for sending events synchronously
});
// We need to flush events before exit
process.on("beforeExit", async () => {
await flush()
process.exit(0);
});
const completion = await openai.createCompletion({

@@ -81,5 +90,1 @@ model: "text-davinci-003",

```
If you pass `excludePromptOrMessages: true` and/or `excludeChoices: true` to
the `withAxiom` options it won't send the prompt/messages or choices,
respectively.