@google/generative-ai
Advanced tools
Comparing version 0.19.0 to 0.20.0
@@ -529,2 +529,6 @@ /** | ||
citationMetadata?: CitationMetadata; | ||
/** Average log probability score of the candidate. */ | ||
avgLogprobs?: number; | ||
/** Log-likelihood scores for the response tokens and top tokens. */ | ||
logprobsResult?: LogprobsResult; | ||
} | ||
@@ -617,2 +621,21 @@ | ||
responseSchema?: ResponseSchema; | ||
/** | ||
* Presence penalty applied to the next token's logprobs if the token has | ||
* already been seen in the response. | ||
*/ | ||
presencePenalty?: number; | ||
/** | ||
* Frequency penalty applied to the next token's logprobs, multiplied by the | ||
* number of times each token has been seen in the respponse so far. | ||
*/ | ||
frequencyPenalty?: number; | ||
/** | ||
* If True, export the logprobs results in response. | ||
*/ | ||
responseLogprobs?: boolean; | ||
/** | ||
* Valid if responseLogProbs is set to True. This will set the number of top | ||
* logprobs to return at each decoding step in the logprobsResult. | ||
*/ | ||
logprobs?: number; | ||
} | ||
@@ -803,2 +826,29 @@ | ||
/** | ||
* Candidate for the logprobs token and score. | ||
* @public | ||
*/ | ||
export declare interface LogprobsCandidate { | ||
/** The candidate's token string value. */ | ||
token: string; | ||
/** The candidate's token id value. */ | ||
tokenID: number; | ||
/** The candidate's log probability. */ | ||
logProbability: number; | ||
} | ||
/** | ||
* Logprobs Result | ||
* @public | ||
*/ | ||
export declare interface LogprobsResult { | ||
/** Length = total number of decoding steps. */ | ||
topCandidates: TopCandidates[]; | ||
/** | ||
* Length = total number of decoding steps. | ||
* The chosen candidates may or may not be in topCandidates. | ||
*/ | ||
chosenCandidates: LogprobsCandidate[]; | ||
} | ||
/** | ||
* Params passed to {@link GoogleGenerativeAI.getGenerativeModel}. | ||
@@ -1042,2 +1092,10 @@ * @public | ||
/** | ||
* Candidates with top log probabilities at each decoding step | ||
*/ | ||
export declare interface TopCandidates { | ||
/** Sorted by log probability in descending order. */ | ||
candidates: LogprobsCandidate[]; | ||
} | ||
/** | ||
* Metadata on the generation request's token usage. | ||
@@ -1044,0 +1102,0 @@ * @public |
@@ -286,3 +286,3 @@ 'use strict'; | ||
*/ | ||
const PACKAGE_VERSION = "0.19.0"; | ||
const PACKAGE_VERSION = "0.20.0"; | ||
const PACKAGE_LOG_HEADER = "genai-js"; | ||
@@ -289,0 +289,0 @@ var Task; |
@@ -72,3 +72,3 @@ 'use strict'; | ||
*/ | ||
const PACKAGE_VERSION = "0.19.0"; | ||
const PACKAGE_VERSION = "0.20.0"; | ||
const PACKAGE_LOG_HEADER = "genai-js"; | ||
@@ -75,0 +75,0 @@ var Task; |
@@ -93,2 +93,21 @@ /** | ||
responseSchema?: ResponseSchema; | ||
/** | ||
* Presence penalty applied to the next token's logprobs if the token has | ||
* already been seen in the response. | ||
*/ | ||
presencePenalty?: number; | ||
/** | ||
* Frequency penalty applied to the next token's logprobs, multiplied by the | ||
* number of times each token has been seen in the respponse so far. | ||
*/ | ||
frequencyPenalty?: number; | ||
/** | ||
* If True, export the logprobs results in response. | ||
*/ | ||
responseLogprobs?: boolean; | ||
/** | ||
* Valid if responseLogProbs is set to True. This will set the number of top | ||
* logprobs to return at each decoding step in the logprobsResult. | ||
*/ | ||
logprobs?: number; | ||
} | ||
@@ -95,0 +114,0 @@ /** |
@@ -79,2 +79,34 @@ /** | ||
/** | ||
* Logprobs Result | ||
* @public | ||
*/ | ||
export interface LogprobsResult { | ||
/** Length = total number of decoding steps. */ | ||
topCandidates: TopCandidates[]; | ||
/** | ||
* Length = total number of decoding steps. | ||
* The chosen candidates may or may not be in topCandidates. | ||
*/ | ||
chosenCandidates: LogprobsCandidate[]; | ||
} | ||
/** | ||
* Candidate for the logprobs token and score. | ||
* @public | ||
*/ | ||
export interface LogprobsCandidate { | ||
/** The candidate's token string value. */ | ||
token: string; | ||
/** The candidate's token id value. */ | ||
tokenID: number; | ||
/** The candidate's log probability. */ | ||
logProbability: number; | ||
} | ||
/** | ||
* Candidates with top log probabilities at each decoding step | ||
*/ | ||
export interface TopCandidates { | ||
/** Sorted by log probability in descending order. */ | ||
candidates: LogprobsCandidate[]; | ||
} | ||
/** | ||
* Metadata on the generation request's token usage. | ||
@@ -114,2 +146,6 @@ * @public | ||
citationMetadata?: CitationMetadata; | ||
/** Average log probability score of the candidate. */ | ||
avgLogprobs?: number; | ||
/** Log-likelihood scores for the response tokens and top tokens. */ | ||
logprobsResult?: LogprobsResult; | ||
} | ||
@@ -116,0 +152,0 @@ /** |
@@ -93,2 +93,21 @@ /** | ||
responseSchema?: ResponseSchema; | ||
/** | ||
* Presence penalty applied to the next token's logprobs if the token has | ||
* already been seen in the response. | ||
*/ | ||
presencePenalty?: number; | ||
/** | ||
* Frequency penalty applied to the next token's logprobs, multiplied by the | ||
* number of times each token has been seen in the respponse so far. | ||
*/ | ||
frequencyPenalty?: number; | ||
/** | ||
* If True, export the logprobs results in response. | ||
*/ | ||
responseLogprobs?: boolean; | ||
/** | ||
* Valid if responseLogProbs is set to True. This will set the number of top | ||
* logprobs to return at each decoding step in the logprobsResult. | ||
*/ | ||
logprobs?: number; | ||
} | ||
@@ -95,0 +114,0 @@ /** |
@@ -79,2 +79,34 @@ /** | ||
/** | ||
* Logprobs Result | ||
* @public | ||
*/ | ||
export interface LogprobsResult { | ||
/** Length = total number of decoding steps. */ | ||
topCandidates: TopCandidates[]; | ||
/** | ||
* Length = total number of decoding steps. | ||
* The chosen candidates may or may not be in topCandidates. | ||
*/ | ||
chosenCandidates: LogprobsCandidate[]; | ||
} | ||
/** | ||
* Candidate for the logprobs token and score. | ||
* @public | ||
*/ | ||
export interface LogprobsCandidate { | ||
/** The candidate's token string value. */ | ||
token: string; | ||
/** The candidate's token id value. */ | ||
tokenID: number; | ||
/** The candidate's log probability. */ | ||
logProbability: number; | ||
} | ||
/** | ||
* Candidates with top log probabilities at each decoding step | ||
*/ | ||
export interface TopCandidates { | ||
/** Sorted by log probability in descending order. */ | ||
candidates: LogprobsCandidate[]; | ||
} | ||
/** | ||
* Metadata on the generation request's token usage. | ||
@@ -114,2 +146,6 @@ * @public | ||
citationMetadata?: CitationMetadata; | ||
/** Average log probability score of the candidate. */ | ||
avgLogprobs?: number; | ||
/** Log-likelihood scores for the response tokens and top tokens. */ | ||
logprobsResult?: LogprobsResult; | ||
} | ||
@@ -116,0 +152,0 @@ /** |
{ | ||
"name": "@google/generative-ai", | ||
"version": "0.19.0", | ||
"version": "0.20.0", | ||
"description": "Google AI JavaScript SDK", | ||
@@ -5,0 +5,0 @@ "main": "dist/index.js", |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
382561
10003