New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@ai-sdk/anthropic

Package Overview
Dependencies
Maintainers
2
Versions
82
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@ai-sdk/anthropic - npm Package Compare versions

Comparing version 0.0.42 to 0.0.43

125

./dist/index.js

@@ -55,7 +55,19 @@ "use strict";

var import_provider_utils2 = require("@ai-sdk/provider-utils");
function convertToAnthropicMessagesPrompt(prompt) {
var _a;
function convertToAnthropicMessagesPrompt({
prompt,
cacheControl: isCacheControlEnabled
}) {
var _a, _b, _c;
const blocks = groupIntoBlocks(prompt);
let system = void 0;
const messages = [];
function getCacheControl(providerMetadata) {
var _a2;
if (isCacheControlEnabled === false) {
return void 0;
}
const anthropic2 = providerMetadata == null ? void 0 : providerMetadata.anthropic;
const cacheControlValue = (_a2 = anthropic2 == null ? void 0 : anthropic2.cacheControl) != null ? _a2 : anthropic2 == null ? void 0 : anthropic2.cache_control;
return cacheControlValue;
}
for (let i = 0; i < blocks.length; i++) {

@@ -71,3 +83,7 @@ const block = blocks[i];

}
system = block.messages.map(({ content }) => content).join("\n");
system = block.messages.map(({ content, providerMetadata }) => ({
type: "text",
text: content,
cache_control: getCacheControl(providerMetadata)
}));
break;

@@ -77,9 +93,17 @@ }

const anthropicContent = [];
for (const { role, content } of block.messages) {
for (const message of block.messages) {
const { role, content } = message;
switch (role) {
case "user": {
for (const part of content) {
for (let i2 = 0; i2 < content.length; i2++) {
const part = content[i2];
const isLastPart = i2 === content.length - 1;
const cacheControl = (_a = getCacheControl(part.providerMetadata)) != null ? _a : isLastPart ? getCacheControl(message.providerMetadata) : void 0;
switch (part.type) {
case "text": {
anthropicContent.push({ type: "text", text: part.text });
anthropicContent.push({
type: "text",
text: part.text,
cache_control: cacheControl
});
break;

@@ -97,5 +121,6 @@ }

type: "base64",
media_type: (_a = part.mimeType) != null ? _a : "image/jpeg",
media_type: (_b = part.mimeType) != null ? _b : "image/jpeg",
data: (0, import_provider_utils2.convertUint8ArrayToBase64)(part.image)
}
},
cache_control: cacheControl
});

@@ -109,3 +134,6 @@ break;

case "tool": {
for (const part of content) {
for (let i2 = 0; i2 < content.length; i2++) {
const part = content[i2];
const isLastPart = i2 === content.length - 1;
const cacheControl = (_c = getCacheControl(part.providerMetadata)) != null ? _c : isLastPart ? getCacheControl(message.providerMetadata) : void 0;
anthropicContent.push({

@@ -115,3 +143,4 @@ type: "tool_result",

content: JSON.stringify(part.result),
is_error: part.isError
is_error: part.isError,
cache_control: cacheControl
});

@@ -144,3 +173,5 @@ }

i === blocks.length - 1 && j === block.messages.length - 1 ? part.text.trim() : part.text
)
),
cache_control: void 0
// not used in assistant messages
});

@@ -178,3 +209,4 @@ break;

let currentBlock = void 0;
for (const { role, content } of prompt) {
for (const message of prompt) {
const { role } = message;
switch (role) {

@@ -186,3 +218,3 @@ case "system": {

}
currentBlock.messages.push({ role, content });
currentBlock.messages.push(message);
break;

@@ -195,3 +227,3 @@ }

}
currentBlock.messages.push({ role, content });
currentBlock.messages.push(message);
break;

@@ -204,3 +236,3 @@ }

}
currentBlock.messages.push({ role, content });
currentBlock.messages.push(message);
break;

@@ -213,3 +245,3 @@ }

}
currentBlock.messages.push({ role, content });
currentBlock.messages.push(message);
break;

@@ -267,2 +299,3 @@ }

}) {
var _a;
const type = mode.type;

@@ -295,3 +328,6 @@ const warnings = [];

}
const messagesPrompt = convertToAnthropicMessagesPrompt(prompt);
const messagesPrompt = convertToAnthropicMessagesPrompt({
prompt,
cacheControl: (_a = this.settings.cacheControl) != null ? _a : false
});
const baseArgs = {

@@ -341,7 +377,15 @@ // model id:

}
getHeaders(optionHeaders) {
return (0, import_provider_utils3.combineHeaders)(
this.config.headers(),
this.settings.cacheControl ? { "anthropic-beta": "prompt-caching-2024-07-31" } : {},
optionHeaders
);
}
async doGenerate(options) {
var _a, _b;
const { args, warnings } = await this.getArgs(options);
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
url: `${this.config.baseURL}/messages`,
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
headers: this.getHeaders(options.headers),
body: args,

@@ -386,3 +430,9 @@ failedResponseHandler: anthropicFailedResponseHandler,

rawResponse: { headers: responseHeaders },
warnings
warnings,
providerMetadata: this.settings.cacheControl === true ? {
anthropic: {
cacheCreationInputTokens: (_a = response.usage.cache_creation_input_tokens) != null ? _a : null,
cacheReadInputTokens: (_b = response.usage.cache_read_input_tokens) != null ? _b : null
}
} : void 0
};

@@ -394,7 +444,4 @@ }

url: `${this.config.baseURL}/messages`,
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
body: {
...args,
stream: true
},
headers: this.getHeaders(options.headers),
body: { ...args, stream: true },
failedResponseHandler: anthropicFailedResponseHandler,

@@ -414,2 +461,4 @@ successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(

const toolCallContentBlocks = {};
let providerMetadata = void 0;
const self = this;
return {

@@ -419,2 +468,3 @@ stream: response.pipeThrough(

transform(chunk, controller) {
var _a, _b;
if (!chunk.success) {

@@ -498,2 +548,10 @@ controller.enqueue({ type: "error", error: chunk.error });

usage.completionTokens = value.message.usage.output_tokens;
if (self.settings.cacheControl === true) {
providerMetadata = {
anthropic: {
cacheCreationInputTokens: (_a = value.message.usage.cache_creation_input_tokens) != null ? _a : null,
cacheReadInputTokens: (_b = value.message.usage.cache_read_input_tokens) != null ? _b : null
}
};
}
return;

@@ -507,3 +565,8 @@ }

case "message_stop": {
controller.enqueue({ type: "finish", finishReason, usage });
controller.enqueue({
type: "finish",
finishReason,
usage,
providerMetadata
});
return;

@@ -545,6 +608,8 @@ }

),
stop_reason: import_zod2.z.string().optional().nullable(),
stop_reason: import_zod2.z.string().nullish(),
usage: import_zod2.z.object({
input_tokens: import_zod2.z.number(),
output_tokens: import_zod2.z.number()
output_tokens: import_zod2.z.number(),
cache_creation_input_tokens: import_zod2.z.number().nullish(),
cache_read_input_tokens: import_zod2.z.number().nullish()
})

@@ -558,3 +623,5 @@ });

input_tokens: import_zod2.z.number(),
output_tokens: import_zod2.z.number()
output_tokens: import_zod2.z.number(),
cache_creation_input_tokens: import_zod2.z.number().nullish(),
cache_read_input_tokens: import_zod2.z.number().nullish()
})

@@ -605,3 +672,3 @@ })

type: import_zod2.z.literal("message_delta"),
delta: import_zod2.z.object({ stop_reason: import_zod2.z.string().optional().nullable() }),
delta: import_zod2.z.object({ stop_reason: import_zod2.z.string().nullish() }),
usage: import_zod2.z.object({ output_tokens: import_zod2.z.number() })

@@ -608,0 +675,0 @@ }),

@@ -15,2 +15,7 @@ import { LanguageModelV1 } from '@ai-sdk/provider';

topK?: number;
/**
Enable Anthropic cache control (beta feature). This will add the beta header and
allow you to use provider-specific cacheControl metadata.
*/
cacheControl?: boolean;
}

@@ -34,2 +39,3 @@

private getArgs;
private getHeaders;
doGenerate(options: Parameters<LanguageModelV1['doGenerate']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doGenerate']>>>;

@@ -36,0 +42,0 @@ doStream(options: Parameters<LanguageModelV1['doStream']>[0]): Promise<Awaited<ReturnType<LanguageModelV1['doStream']>>>;

@@ -55,7 +55,19 @@ "use strict";

var import_provider_utils2 = require("@ai-sdk/provider-utils");
function convertToAnthropicMessagesPrompt(prompt) {
var _a;
function convertToAnthropicMessagesPrompt({
prompt,
cacheControl: isCacheControlEnabled
}) {
var _a, _b, _c;
const blocks = groupIntoBlocks(prompt);
let system = void 0;
const messages = [];
function getCacheControl(providerMetadata) {
var _a2;
if (isCacheControlEnabled === false) {
return void 0;
}
const anthropic2 = providerMetadata == null ? void 0 : providerMetadata.anthropic;
const cacheControlValue = (_a2 = anthropic2 == null ? void 0 : anthropic2.cacheControl) != null ? _a2 : anthropic2 == null ? void 0 : anthropic2.cache_control;
return cacheControlValue;
}
for (let i = 0; i < blocks.length; i++) {

@@ -71,3 +83,7 @@ const block = blocks[i];

}
system = block.messages.map(({ content }) => content).join("\n");
system = block.messages.map(({ content, providerMetadata }) => ({
type: "text",
text: content,
cache_control: getCacheControl(providerMetadata)
}));
break;

@@ -77,9 +93,17 @@ }

const anthropicContent = [];
for (const { role, content } of block.messages) {
for (const message of block.messages) {
const { role, content } = message;
switch (role) {
case "user": {
for (const part of content) {
for (let i2 = 0; i2 < content.length; i2++) {
const part = content[i2];
const isLastPart = i2 === content.length - 1;
const cacheControl = (_a = getCacheControl(part.providerMetadata)) != null ? _a : isLastPart ? getCacheControl(message.providerMetadata) : void 0;
switch (part.type) {
case "text": {
anthropicContent.push({ type: "text", text: part.text });
anthropicContent.push({
type: "text",
text: part.text,
cache_control: cacheControl
});
break;

@@ -97,5 +121,6 @@ }

type: "base64",
media_type: (_a = part.mimeType) != null ? _a : "image/jpeg",
media_type: (_b = part.mimeType) != null ? _b : "image/jpeg",
data: (0, import_provider_utils2.convertUint8ArrayToBase64)(part.image)
}
},
cache_control: cacheControl
});

@@ -109,3 +134,6 @@ break;

case "tool": {
for (const part of content) {
for (let i2 = 0; i2 < content.length; i2++) {
const part = content[i2];
const isLastPart = i2 === content.length - 1;
const cacheControl = (_c = getCacheControl(part.providerMetadata)) != null ? _c : isLastPart ? getCacheControl(message.providerMetadata) : void 0;
anthropicContent.push({

@@ -115,3 +143,4 @@ type: "tool_result",

content: JSON.stringify(part.result),
is_error: part.isError
is_error: part.isError,
cache_control: cacheControl
});

@@ -144,3 +173,5 @@ }

i === blocks.length - 1 && j === block.messages.length - 1 ? part.text.trim() : part.text
)
),
cache_control: void 0
// not used in assistant messages
});

@@ -178,3 +209,4 @@ break;

let currentBlock = void 0;
for (const { role, content } of prompt) {
for (const message of prompt) {
const { role } = message;
switch (role) {

@@ -186,3 +218,3 @@ case "system": {

}
currentBlock.messages.push({ role, content });
currentBlock.messages.push(message);
break;

@@ -195,3 +227,3 @@ }

}
currentBlock.messages.push({ role, content });
currentBlock.messages.push(message);
break;

@@ -204,3 +236,3 @@ }

}
currentBlock.messages.push({ role, content });
currentBlock.messages.push(message);
break;

@@ -213,3 +245,3 @@ }

}
currentBlock.messages.push({ role, content });
currentBlock.messages.push(message);
break;

@@ -267,2 +299,3 @@ }

}) {
var _a;
const type = mode.type;

@@ -295,3 +328,6 @@ const warnings = [];

}
const messagesPrompt = convertToAnthropicMessagesPrompt(prompt);
const messagesPrompt = convertToAnthropicMessagesPrompt({
prompt,
cacheControl: (_a = this.settings.cacheControl) != null ? _a : false
});
const baseArgs = {

@@ -341,7 +377,15 @@ // model id:

}
getHeaders(optionHeaders) {
return (0, import_provider_utils3.combineHeaders)(
this.config.headers(),
this.settings.cacheControl ? { "anthropic-beta": "prompt-caching-2024-07-31" } : {},
optionHeaders
);
}
async doGenerate(options) {
var _a, _b;
const { args, warnings } = await this.getArgs(options);
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
url: `${this.config.baseURL}/messages`,
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
headers: this.getHeaders(options.headers),
body: args,

@@ -386,3 +430,9 @@ failedResponseHandler: anthropicFailedResponseHandler,

rawResponse: { headers: responseHeaders },
warnings
warnings,
providerMetadata: this.settings.cacheControl === true ? {
anthropic: {
cacheCreationInputTokens: (_a = response.usage.cache_creation_input_tokens) != null ? _a : null,
cacheReadInputTokens: (_b = response.usage.cache_read_input_tokens) != null ? _b : null
}
} : void 0
};

@@ -394,7 +444,4 @@ }

url: `${this.config.baseURL}/messages`,
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
body: {
...args,
stream: true
},
headers: this.getHeaders(options.headers),
body: { ...args, stream: true },
failedResponseHandler: anthropicFailedResponseHandler,

@@ -414,2 +461,4 @@ successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(

const toolCallContentBlocks = {};
let providerMetadata = void 0;
const self = this;
return {

@@ -419,2 +468,3 @@ stream: response.pipeThrough(

transform(chunk, controller) {
var _a, _b;
if (!chunk.success) {

@@ -498,2 +548,10 @@ controller.enqueue({ type: "error", error: chunk.error });

usage.completionTokens = value.message.usage.output_tokens;
if (self.settings.cacheControl === true) {
providerMetadata = {
anthropic: {
cacheCreationInputTokens: (_a = value.message.usage.cache_creation_input_tokens) != null ? _a : null,
cacheReadInputTokens: (_b = value.message.usage.cache_read_input_tokens) != null ? _b : null
}
};
}
return;

@@ -507,3 +565,8 @@ }

case "message_stop": {
controller.enqueue({ type: "finish", finishReason, usage });
controller.enqueue({
type: "finish",
finishReason,
usage,
providerMetadata
});
return;

@@ -545,6 +608,8 @@ }

),
stop_reason: import_zod2.z.string().optional().nullable(),
stop_reason: import_zod2.z.string().nullish(),
usage: import_zod2.z.object({
input_tokens: import_zod2.z.number(),
output_tokens: import_zod2.z.number()
output_tokens: import_zod2.z.number(),
cache_creation_input_tokens: import_zod2.z.number().nullish(),
cache_read_input_tokens: import_zod2.z.number().nullish()
})

@@ -558,3 +623,5 @@ });

input_tokens: import_zod2.z.number(),
output_tokens: import_zod2.z.number()
output_tokens: import_zod2.z.number(),
cache_creation_input_tokens: import_zod2.z.number().nullish(),
cache_read_input_tokens: import_zod2.z.number().nullish()
})

@@ -605,3 +672,3 @@ })

type: import_zod2.z.literal("message_delta"),
delta: import_zod2.z.object({ stop_reason: import_zod2.z.string().optional().nullable() }),
delta: import_zod2.z.object({ stop_reason: import_zod2.z.string().nullish() }),
usage: import_zod2.z.object({ output_tokens: import_zod2.z.number() })

@@ -608,0 +675,0 @@ }),

{
"name": "@ai-sdk/anthropic",
"version": "0.0.42",
"version": "0.0.43",
"license": "Apache-2.0",

@@ -21,4 +21,4 @@ "sideEffects": false,

"dependencies": {
"@ai-sdk/provider": "0.0.19",
"@ai-sdk/provider-utils": "1.0.12"
"@ai-sdk/provider": "0.0.20",
"@ai-sdk/provider-utils": "1.0.13"
},

@@ -25,0 +25,0 @@ "devDependencies": {

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc