New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@celljs/ai-core

Package Overview
Dependencies
Maintainers
0
Versions
8
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@celljs/ai-core - npm Package Compare versions

Comparing version 3.2.0 to 3.3.0

lib/common/chat/message/tool-response-message.d.ts

5

lib/common/chat/message/assistant-message.d.ts
import { AbstractMessage } from './abstract-message';
import { Media } from './message-protocol';
import { Media, ToolCall } from './message-protocol';
export declare class AssistantMessage extends AbstractMessage {

@@ -7,4 +7,5 @@ readonly content: string;

readonly metadata: Record<string, any>;
constructor(content: string, media?: Media[], metadata?: Record<string, any>);
readonly toolCalls: ToolCall[];
constructor(content?: string, media?: Media[], metadata?: Record<string, any>, toolCalls?: ToolCall[]);
}
//# sourceMappingURL=assistant-message.d.ts.map

3

lib/common/chat/message/assistant-message.js

@@ -7,3 +7,3 @@ "use strict";

class AssistantMessage extends abstract_message_1.AbstractMessage {
constructor(content, media = [], metadata = {}) {
constructor(content = '', media = [], metadata = {}, toolCalls = []) {
super(message_protocol_1.MessageType.ASSISTANT, content, media, metadata);

@@ -13,2 +13,3 @@ this.content = content;

this.metadata = metadata;
this.toolCalls = toolCalls;
}

@@ -15,0 +16,0 @@ }

@@ -7,2 +7,3 @@ export * from './message-protocol';

export * from './function-message';
export * from './tool-response-message';
//# sourceMappingURL=index.d.ts.map

@@ -10,2 +10,3 @@ "use strict";

tslib_1.__exportStar(require("./function-message"), exports);
tslib_1.__exportStar(require("./tool-response-message"), exports);
//# sourceMappingURL=index.js.map

@@ -25,3 +25,4 @@ import { MediaType } from '@celljs/http';

SYSTEM = "system",
FUNCTION = "function"
FUNCTION = "function",
TOOL = "tool"
}

@@ -42,2 +43,23 @@ /**

}
/**
* Represents a tool call.
*/
export interface ToolCall {
/**
* The unique identifier for the tool call.
*/
id: string;
/**
* The type of the tool.
*/
type: string;
/**
* The name of the tool.
*/
name: string;
/**
* The arguments passed to the tool.
*/
arguments: string;
}
//# sourceMappingURL=message-protocol.d.ts.map

@@ -14,2 +14,3 @@ "use strict";

MessageType["FUNCTION"] = "function";
MessageType["TOOL"] = "tool";
})(MessageType = exports.MessageType || (exports.MessageType = {}));

@@ -16,0 +17,0 @@ var Message;

export * from './metadata-protocol';
export * from './chat-generation-metadata';
//# sourceMappingURL=index.d.ts.map

@@ -5,2 +5,3 @@ "use strict";

tslib_1.__exportStar(require("./metadata-protocol"), exports);
tslib_1.__exportStar(require("./chat-generation-metadata"), exports);
//# sourceMappingURL=index.js.map

@@ -93,2 +93,4 @@ import { ResponseMetadata } from '../../model/model-protocol';

export interface ChatResponseMetadata extends ResponseMetadata {
readonly id?: string;
readonly model?: string;
/**

@@ -95,0 +97,0 @@ * AI provider specific metadata on rate limits.

export * from './prompt-protocol';
export * from './prompt';
export * from './prompt-template';
//# sourceMappingURL=index.d.ts.map

@@ -6,2 +6,3 @@ "use strict";

tslib_1.__exportStar(require("./prompt"), exports);
tslib_1.__exportStar(require("./prompt-template"), exports);
//# sourceMappingURL=index.js.map
import { ModelOptions, ModelRequest } from '../../model/model-protocol';
import { Media, Message } from '../message';
import { Message, MessageType } from '../message/message-protocol';
export declare const PromptTemplate: unique symbol;
/**

@@ -7,5 +8,10 @@ * The ChatOptions represent the common options, portable across different chat models.

export interface ChatOptions extends ModelOptions {
readonly temperature?: number;
readonly topP?: number;
readonly topK?: number;
model?: string;
frequencyPenalty?: number;
maxTokens?: number;
presencePenalty?: number;
stopSequences?: string[];
temperature?: number;
topK?: number;
topP?: number;
}

@@ -16,16 +22,11 @@ export interface Prompt extends ModelRequest<Message[]> {

}
export interface PromptTemplateStringActions {
render(model?: Map<String, Object>): string;
export interface PromptTemplateContext {
variables?: Record<string, any>;
chatOptions?: ChatOptions;
messageType?: MessageType;
}
export interface PromptTemplateActions extends PromptTemplateStringActions {
create(model: Map<String, Object>): Prompt;
export interface PromptTemplate {
render(template: string, ctx?: PromptTemplateContext): Promise<string>;
create(template: string, ctx?: PromptTemplateContext): Promise<Prompt>;
}
export interface PromptTemplateChatActions {
createMessages(): Message[];
createMessages(model: Map<String, Object>): Message[];
}
export interface PromptTemplateMessageActions {
createMessage(mediaList: Media[]): Message;
createMessage(model: Map<String, Object>): Message;
}
//# sourceMappingURL=prompt-protocol.d.ts.map
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.PromptTemplate = void 0;
exports.PromptTemplate = Symbol('PromptTemplate');
//# sourceMappingURL=prompt-protocol.js.map

@@ -6,2 +6,11 @@ import { MimeType } from '@celljs/core';

/**
* Enum representing the modality type of the source data.
*/
export declare enum ModalityType {
TEXT = "TEXT",
IMAGE = "IMAGE",
AUDIO = "AUDIO",
VIDEO = "VIDEO"
}
/**
* Represents the metadata for an embedding result.

@@ -27,10 +36,4 @@ */

}
/**
* Enum representing the modality type of the source data.
*/
export declare enum ModalityType {
TEXT = "TEXT",
IMAGE = "IMAGE",
AUDIO = "AUDIO",
VIDEO = "VIDEO"
export declare namespace EmbeddingResultMetadata {
const EMPTY: EmbeddingResultMetadata;
}

@@ -37,0 +40,0 @@ /**

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ModalityUtils = exports.ModalityType = exports.EmbeddingModel = void 0;
exports.ModalityUtils = exports.EmbeddingResultMetadata = exports.ModalityType = exports.EmbeddingModel = void 0;
const core_1 = require("@celljs/core");

@@ -16,2 +16,11 @@ exports.EmbeddingModel = Symbol('EmbeddingModel');

})(ModalityType = exports.ModalityType || (exports.ModalityType = {}));
var EmbeddingResultMetadata;
(function (EmbeddingResultMetadata) {
EmbeddingResultMetadata.EMPTY = {
modalityType: ModalityType.TEXT,
documentId: '',
mimeType: core_1.MimeTypeUtils.TEXT_PLAIN,
documentData: undefined
};
})(EmbeddingResultMetadata = exports.EmbeddingResultMetadata || (exports.EmbeddingResultMetadata = {}));
/**

@@ -18,0 +27,0 @@ * Utility class for modality-related operations.

export * from './chat';
export * from './model';
export * from './embedding';
export * from './sse';
export * from './utils';
export * from './error';
//# sourceMappingURL=index.d.ts.map

@@ -7,3 +7,5 @@ "use strict";

tslib_1.__exportStar(require("./embedding"), exports);
tslib_1.__exportStar(require("./sse"), exports);
tslib_1.__exportStar(require("./utils"), exports);
tslib_1.__exportStar(require("./error"), exports);
//# sourceMappingURL=index.js.map

@@ -23,3 +23,3 @@ import { ChatOptions } from '../../chat/prompt';

* to pass the function arguments in the pre-configured JSON schema format.
* @param functionInput JSON string with the function arguments to be passed to the
* @param functionArguments JSON string with the function arguments to be passed to the
* function. The arguments are defined as JSON schema usually registered with the the

@@ -29,3 +29,3 @@ * model.

*/
call(functionInput: string): Promise<string>;
call(functionArguments: string): Promise<string>;
}

@@ -32,0 +32,0 @@ export interface FunctionCallingOptions {

@@ -40,2 +40,3 @@ /**

export interface ResponseMetadata {
readonly extra: Record<string, any>;
}

@@ -161,4 +162,4 @@ /**

*/
stream(request: TReq): Observable<TResChunk>;
stream(request: TReq): Promise<Observable<TResChunk>>;
}
//# sourceMappingURL=model-protocol.d.ts.map
export * from './prompt-util';
export * from './sse-util';
//# sourceMappingURL=index.d.ts.map

@@ -5,2 +5,3 @@ "use strict";

tslib_1.__exportStar(require("./prompt-util"), exports);
tslib_1.__exportStar(require("./sse-util"), exports);
//# sourceMappingURL=index.js.map
{
"name": "@celljs/ai-core",
"version": "3.2.0",
"version": "3.3.0",
"description": "Core domain for AI programming",

@@ -8,4 +8,4 @@ "main": "lib/common/index.js",

"dependencies": {
"@celljs/core": "3.2.0",
"@celljs/http": "3.2.0",
"@celljs/core": "3.3.0",
"@celljs/http": "3.3.0",
"rxjs": "^6.6.0",

@@ -42,5 +42,5 @@ "tslib": "^2.8.0"

"devDependencies": {
"@celljs/component": "3.2.0"
"@celljs/component": "3.3.0"
},
"gitHead": "8547a0ea0468f20441c46736c544875f65d7dc30"
"gitHead": "6046657df0c1674e13e2e3911559643160f9d94c"
}
# Cell - AI Core Component
## 概览
AI Core 模块是一个用于与 AI 模型服务交互的库,提供了生成聊天响应和嵌入向量的功能。通过简单易用的 API 接口,支持消息的创建、请求的发送和响应的处理。是所有 AI 模块的基础,提供了 AI 模块通用的 API 接口。
## 特性
- 生成聊天响应
- 生成嵌入向量
- 支持流式响应
- 支持多种模型参数配置
## 安装
使用 npm 安装 AI Core 模块:
```bash
npm install @celljs/ai-core
```
或者使用 yarn:
```bash
yarn add @celljs/ai-core
```
AI Core 模块是所有 AI 模块的基础,提供了 AI 模块通用的 API 接口,包括消息的创建、请求的发送和响应的处理。所以在使用 AI Core 模块之前,需要先安装厂商对应的模型服务适配模块。例如:`@celljs/ai-ollama`。
```bash
npm install @celljs/ai-ollama
```
或者使用 yarn:
```bash
yarn add @celljs/ai-ollama
```
## 快速开始
以下是一个简单的示例,展示如何使用 AI Ollama 模块生成聊天响应和嵌入向量:
```typescript
import { AssistantMessage, PromptTemplate } from '@celljs/ai-core';
import { Component Autowired } from '@celljs/core';
@Component()
export class OllamaDemo {
@Autowired(OllamChatModel)
private chatModel: ChatModel;
@Autowired(EmbeddingModel)
private embeddingModel: EmbeddingModel;
@Autowired(PromptTemplate)
private promptTemplate: PromptTemplate;
/**
* Chat with Ollama
*/
async chat() {
const prompt = await this.promptTemplate.create(
'Hello {name}',
{
chatOptions: { model: 'llama3.2' },
variables: { name: 'Ollama' }
}
);
const response = await this.chatModel.call(prompt);
console.log(response.result.output);
}
/**
* Stream chat response
*/
async stream() {
const prompt = await this.promptTemplate.create(
'Hello {name}',
{
chatOptions: { model: 'llama3.2' },
variables: { name: 'Ollama' }
}
);
const response$ = await this.chatModel.stream(prompt);
response$.subscribe({
next: response => console.log(response.result.output),
complete: () => console.log('Chat completed!')
});
}
/**
* Embed text to vector
*/
async embed() {
const response = await this.embeddingModel.call({
inputs: ['text to embed'],
options: { model: 'llama3.2' }
});
console.log(response.result.embeddings);
}
}
```
## 许可证
本项目采用 MIT 许可证。
import { AbstractMessage } from './abstract-message';
import { Media, MessageType } from './message-protocol';
import { Media, MessageType, ToolCall } from './message-protocol';

@@ -7,7 +7,9 @@ export class AssistantMessage extends AbstractMessage {

constructor(
override readonly content: string,
override readonly content: string = '',
override readonly media: Media[] = [],
override readonly metadata: Record<string, any> = {}) {
override readonly metadata: Record<string, any> = {},
readonly toolCalls: ToolCall[] = []
) {
super(MessageType.ASSISTANT, content, media, metadata);
}
}

@@ -7,1 +7,2 @@ export * from './message-protocol';

export * from './function-message';
export * from './tool-response-message';

@@ -30,3 +30,4 @@ import { MediaType } from '@celljs/http';

SYSTEM = 'system',
FUNCTION = 'function'
FUNCTION = 'function',
TOOL = 'tool'
}

@@ -51,1 +52,26 @@

}
/**
* Represents a tool call.
*/
export interface ToolCall {
/**
* The unique identifier for the tool call.
*/
id: string;
/**
* The type of the tool.
*/
type: string;
/**
* The name of the tool.
*/
name: string;
/**
* The arguments passed to the tool.
*/
arguments: string;
}
export * from './metadata-protocol';
export * from './chat-generation-metadata';

@@ -121,2 +121,4 @@ import { IllegalArgumentError } from '@celljs/core';

export interface ChatResponseMetadata extends ResponseMetadata {
readonly id?: string;
readonly model?: string;
/**

@@ -123,0 +125,0 @@ * AI provider specific metadata on rate limits.

export * from './prompt-protocol';
export * from './prompt';
export * from './prompt-template';
import { ModelOptions, ModelRequest } from '../../model/model-protocol';
import { Media, Message } from '../message';
import { Message, MessageType } from '../message/message-protocol';
export const PromptTemplate = Symbol('PromptTemplate');
/**

@@ -8,7 +10,10 @@ * The ChatOptions represent the common options, portable across different chat models.

export interface ChatOptions extends ModelOptions {
readonly temperature?: number;
readonly topP?: number;
readonly topK?: number;
model?: string;
frequencyPenalty?: number;
maxTokens?: number;
presencePenalty?: number;
stopSequences?: string[];
temperature?: number;
topK?: number;
topP?: number;
}

@@ -21,26 +26,11 @@

export interface PromptTemplateStringActions {
render(model?: Map<String, Object>): string;
export interface PromptTemplateContext {
variables?: Record<string, any>;
chatOptions?: ChatOptions;
messageType?: MessageType;
}
export interface PromptTemplateActions extends PromptTemplateStringActions {
create(model: Map<String, Object>): Prompt;
export interface PromptTemplate {
render(template: string, ctx?: PromptTemplateContext): Promise<string>;
create(template: string, ctx?: PromptTemplateContext): Promise<Prompt>;
}
export interface PromptTemplateChatActions {
createMessages(): Message[];
createMessages(model: Map<String, Object>): Message[];
}
export interface PromptTemplateMessageActions {
createMessage(mediaList: Media[]): Message;
createMessage(model: Map<String, Object>): Message;
}

@@ -8,2 +8,12 @@ import { MimeType, MimeTypeUtils } from '@celljs/core';

/**
* Enum representing the modality type of the source data.
*/
export enum ModalityType {
TEXT = 'TEXT',
IMAGE = 'IMAGE',
AUDIO = 'AUDIO',
VIDEO = 'VIDEO'
}
/**
* Represents the metadata for an embedding result.

@@ -33,10 +43,9 @@ */

/**
* Enum representing the modality type of the source data.
*/
export enum ModalityType {
TEXT = 'TEXT',
IMAGE = 'IMAGE',
AUDIO = 'AUDIO',
VIDEO = 'VIDEO'
export namespace EmbeddingResultMetadata {
export const EMPTY: EmbeddingResultMetadata = {
modalityType: ModalityType.TEXT,
documentId: '',
mimeType: MimeTypeUtils.TEXT_PLAIN,
documentData: undefined
};
}

@@ -43,0 +52,0 @@

export * from './chat';
export * from './model';
export * from './embedding';
export * from './sse';
export * from './utils';
export * from './error';

@@ -28,3 +28,3 @@ import { ChatOptions } from '../../chat/prompt';

* to pass the function arguments in the pre-configured JSON schema format.
* @param functionInput JSON string with the function arguments to be passed to the
* @param functionArguments JSON string with the function arguments to be passed to the
* function. The arguments are defined as JSON schema usually registered with the the

@@ -34,3 +34,3 @@ * model.

*/
call(functionInput: string): Promise<string>;
call(functionArguments: string): Promise<string>;

@@ -37,0 +37,0 @@ }

@@ -42,3 +42,5 @@ /**

*/
export interface ResponseMetadata {}
export interface ResponseMetadata {
readonly extra: Record<string, any>;
}

@@ -186,4 +188,4 @@ /**

*/
stream(request: TReq): Observable<TResChunk>;
stream(request: TReq): Promise<Observable<TResChunk>>;
}
export * from './prompt-util';
export * from './sse-util';

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc