Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@callstack/byorg-core

Package Overview
Dependencies
Maintainers
0
Versions
14
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@callstack/byorg-core - npm Package Compare versions

Comparing version 0.5.0 to 0.6.0

12

CHANGELOG.md
# @callstack/byorg-core
## 0.6.0
### Minor Changes
- 287c02b: core: make systemPrompt both optional and sync-only
- 02bd239: core: allow for dynamic model selection
- bf009e8: core: mock chat model for testing setup
### Patch Changes
- @callstack/byorg-utils@0.6.0
## 0.5.0

@@ -4,0 +16,0 @@

14

dist/esm/index.d.ts

@@ -9,4 +9,4 @@ import { LanguageModel } from 'ai';

export declare type ApplicationConfig = {
chatModel: ChatModel;
systemPrompt: (context: RequestContext) => Promise<string> | string;
chatModel: ChatModel | ((context: RequestContext) => ChatModel);
systemPrompt?: ((context: RequestContext) => string | null) | string;
plugins?: ApplicationPlugin[];

@@ -62,2 +62,4 @@ errorHandler?: ErrorHandler;

export declare function createMockChatModel(config?: MockModelConfig): ChatModel;
export declare type DocumentReference = {

@@ -99,2 +101,8 @@ title: string;

export declare type MockModelConfig = {
responses?: string[];
delay?: number;
seed?: number;
};
export declare type ModelUsage = {

@@ -179,3 +187,3 @@ model: string;

resolvedEntities: EntityInfo;
systemPrompt: () => Promise<string> | string;
systemPrompt: () => string | null;
onPartialResponse?: (text: string) => void;

@@ -182,0 +190,0 @@ extras: MessageRequestExtras;

@@ -207,3 +207,2 @@ import * as __WEBPACK_EXTERNAL_MODULE__callstack_byorg_utils__ from "@callstack/byorg-utils";

performance.markStart(PerformanceMarks.processMessages);
const onPartialResponse = options?.onPartialResponse;
const context = {

@@ -219,9 +218,9 @@ messages,

},
systemPrompt: ()=>typeof config.systemPrompt === 'function' ? config.systemPrompt(context) : config.systemPrompt ?? null,
onPartialResponse: options?.onPartialResponse,
tools,
references: getReferenceStorage(),
resolvedEntities: {},
onPartialResponse,
extras: options?.extras ?? {},
performance,
systemPrompt: ()=>config.systemPrompt(context)
performance
};

@@ -232,3 +231,4 @@ const handler = async ()=>{

performance.markStart(PerformanceMarks.chatModel);
const response = await chatModel.generateResponse(context);
const resolvedChatModel = typeof chatModel === 'function' ? chatModel(context) : chatModel;
const response = await resolvedChatModel.generateResponse(context);
performance.markEnd(PerformanceMarks.chatModel);

@@ -299,17 +299,17 @@ // Opens the 'middleware:afterHandler' mark that will be closed after middlewareExecutor has run

async generateResponse(context) {
const messages = context.messages;
const systemPrompt = await context.systemPrompt();
const entitiesPrompt = formatResolvedEntities(context.resolvedEntities);
const finalSystemPrompt = [
systemPrompt,
entitiesPrompt
].join('\n\n');
// TODO: Use userId in anonymous case
const resolvedMessages = [
{
let systemPrompt = context.systemPrompt();
if (systemPrompt) {
const entitiesPrompt = formatResolvedEntities(context.resolvedEntities);
if (entitiesPrompt) {
systemPrompt += '\n\n' + entitiesPrompt;
}
}
const messages = [];
if (systemPrompt) {
messages.push({
role: 'system',
content: finalSystemPrompt
},
...messages.map(toMessageParam)
];
content: systemPrompt
});
}
messages.push(...context.messages.map(toMessageParam));
const getRunToolFunction = (tool, context)=>(params)=>{

@@ -331,3 +331,3 @@ context.performance.markStart(PerformanceMarks.toolExecution);

tools,
messages: resolvedMessages
messages: messages
};

@@ -446,7 +446,7 @@ const executionResult = context.onPartialResponse ? await this.executeRequestWithStreaming(executionContext, context.onPartialResponse) : await this.executeRequest(executionContext);

if (Object.keys(entities).length === 0) {
return '';
return null;
}
return `ENTITY DICTIONARY: \n
${Object.entries(entities).map(([key, value])=>`'${key}' is '${JSON.stringify(value)}'`).join('\n')}
`;
return `### ENTITY DICTIONARY ###\n
${Object.entries(entities).map(([key, value])=>`'${key}' is '${JSON.stringify(value)}'`).join('\n')}
`;
}

@@ -526,2 +526,56 @@

;// CONCATENATED MODULE: ./src/mock/mock-model.ts
const LOREM_IPSUM_RESPONSES = [
'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.',
'Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.',
'Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.',
'Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.',
'Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo.',
'Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt.',
'Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem.',
'Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur?',
'Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?'
];
function createMockChatModel(config) {
const responses = config?.responses ?? LOREM_IPSUM_RESPONSES;
const delay = config?.delay ?? 100;
let lastRandom = config?.seed ?? Date.now();
return {
generateResponse: async (context)=>{
lastRandom = random(lastRandom);
const response = responses[lastRandom % responses.length];
const tokens = response.split(/(\S+\s*)/).filter(Boolean);
if (context.onPartialResponse) {
let accumulator = '';
for (const token of tokens){
await sleep(delay);
accumulator += token;
context.onPartialResponse(accumulator);
}
}
await sleep(delay);
const allInputLength = context.messages.reduce((acc, msg)=>acc + msg.content.length, 0);
return Promise.resolve({
role: 'assistant',
content: response,
usage: {
model: 'test',
inputTokens: Math.round(allInputLength / 4),
outputTokens: tokens.length,
requests: 1,
responseTime: tokens.length * delay
}
});
}
};
}
function sleep(ms) {
return new Promise((resolve)=>setTimeout(resolve, ms));
}
// Simple PRNG for testing
// Definitely not safe for production
function random(state) {
return state * 16807 % 2147483647; // Linear congruential generator
}
;// CONCATENATED MODULE: ./src/index.ts

@@ -538,2 +592,3 @@

export { PerformanceMarks, PerformanceTimeline, SUPPORTED_ATTACHMENT_TYPES, UnsupportedAttachmentsException, VercelChatModelAdapter, createApp, createCommandsPlugin, getReferenceStorage, isAttachmentTypeSupported, loggingPlugin };
export { PerformanceMarks, PerformanceTimeline, SUPPORTED_ATTACHMENT_TYPES, UnsupportedAttachmentsException, VercelChatModelAdapter, createApp, createCommandsPlugin, createMockChatModel, getReferenceStorage, isAttachmentTypeSupported, loggingPlugin };
{
"name": "@callstack/byorg-core",
"version": "0.5.0",
"version": "0.6.0",
"type": "module",

@@ -28,3 +28,3 @@ "license": "MIT",

"zod": "^3.23.8",
"@callstack/byorg-utils": "0.5.0"
"@callstack/byorg-utils": "0.6.0"
},

@@ -35,6 +35,6 @@ "peerDependencies": {

"devDependencies": {
"@microsoft/api-extractor": "^7.47.12",
"@rslib/core": "^0.1.0",
"ai": "^4.0.3",
"vitest": "^2.1.5"
"@microsoft/api-extractor": "^7.48.0",
"@rslib/core": "^0.1.3",
"ai": "^4.0.13",
"vitest": "^2.1.8"
},

@@ -41,0 +41,0 @@ "scripts": {

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc