@samchon/openapi
Advanced tools
Comparing version 2.0.0-dev.20241125 to 2.0.0-dev.20241126
@@ -43,2 +43,3 @@ "use strict"; | ||
var GeminiConverter_1 = require("./GeminiConverter"); | ||
var LlamaConverter_1 = require("./LlamaConverter"); | ||
var LlmConverterV3_2 = require("./LlmConverterV3"); | ||
@@ -215,4 +216,5 @@ var LlmConverterV3_1_1 = require("./LlmConverterV3_1"); | ||
gemini: GeminiConverter_1.GeminiConverter.separate, | ||
llama: LlamaConverter_1.LlamaConverter.separate, | ||
"3.0": LlmConverterV3_2.LlmConverterV3.separate, | ||
"3.1": LlmConverterV3_1_1.LlmConverterV3_1.separate, | ||
}; |
@@ -75,6 +75,8 @@ "use strict"; | ||
} | ||
else if (LlmTypeCheckerV3_1.LlmTypeCheckerV3.isInteger(schema) || | ||
LlmTypeCheckerV3_1.LlmTypeCheckerV3.isNumber(schema)) | ||
else if (props.config.constraint === false && | ||
(LlmTypeCheckerV3_1.LlmTypeCheckerV3.isInteger(schema) || | ||
LlmTypeCheckerV3_1.LlmTypeCheckerV3.isNumber(schema))) | ||
OpenApiContraintShifter_1.OpenApiContraintShifter.shiftNumeric(schema); | ||
else if (LlmTypeCheckerV3_1.LlmTypeCheckerV3.isString(schema)) | ||
else if (props.config.constraint === false && | ||
LlmTypeCheckerV3_1.LlmTypeCheckerV3.isString(schema)) | ||
OpenApiContraintShifter_1.OpenApiContraintShifter.shiftString(schema); | ||
@@ -81,0 +83,0 @@ }); |
@@ -5,2 +5,3 @@ import { OpenApi } from "../OpenApi"; | ||
import { IGeminiSchema } from "../structures/IGeminiSchema"; | ||
import { ILlamaSchema } from "../structures/ILlamaSchema"; | ||
import { ILlmApplication } from "../structures/ILlmApplication"; | ||
@@ -26,2 +27,7 @@ import { ILlmSchemaV3 } from "../structures/ILlmSchemaV3"; | ||
}) => IGeminiSchema.IObject | null; | ||
llama: (props: { | ||
components: OpenApi.IComponents; | ||
schema: OpenApi.IJsonSchema.IObject; | ||
config: ILlamaSchema.IConfig; | ||
}) => IClaudeSchema.IParameters | null; | ||
"3.0": (props: { | ||
@@ -56,2 +62,8 @@ components: OpenApi.IComponents; | ||
}) => IGeminiSchema | null; | ||
llama: (props: { | ||
components: OpenApi.IComponents; | ||
schema: OpenApi.IJsonSchema; | ||
$defs: Record<string, ILlamaSchema>; | ||
config: ILlamaSchema.IConfig; | ||
}) => ILlamaSchema | null; | ||
"3.0": (props: { | ||
@@ -79,2 +91,5 @@ components: OpenApi.IComponents; | ||
}; | ||
llama: { | ||
reference: false; | ||
}; | ||
"3.0": { | ||
@@ -81,0 +96,0 @@ constraint: false; |
@@ -7,2 +7,3 @@ "use strict"; | ||
var GeminiConverter_1 = require("./GeminiConverter"); | ||
var LlamaConverter_1 = require("./LlamaConverter"); | ||
var LlmConverterV3_2 = require("./LlmConverterV3"); | ||
@@ -22,2 +23,3 @@ var LlmConverterV3_1_1 = require("./LlmConverterV3_1"); | ||
gemini: function (props) { return GeminiConverter_1.GeminiConverter.parameters(props); }, | ||
llama: function (props) { return LlamaConverter_1.LlamaConverter.parameters(props); }, | ||
"3.0": function (props) { return LlmConverterV3_2.LlmConverterV3.parameters(props); }, | ||
@@ -50,2 +52,10 @@ "3.1": function (props) { return LlmConverterV3_1_1.LlmConverterV3_1.parameters(props); }, | ||
}, | ||
llama: function (props) { | ||
return LlamaConverter_1.LlamaConverter.schema({ | ||
components: props.components, | ||
schema: props.schema, | ||
$defs: props.$defs, | ||
config: props.config, | ||
}); | ||
}, | ||
"3.0": function (props) { | ||
@@ -77,2 +87,5 @@ return LlmConverterV3_2.LlmConverterV3.schema({ | ||
}, | ||
llama: { | ||
reference: false, | ||
}, | ||
"3.0": { | ||
@@ -79,0 +92,0 @@ constraint: false, |
@@ -64,3 +64,3 @@ import { OpenApi } from "./OpenApi"; | ||
document: OpenApi.IDocument<OpenApi.IJsonSchema, Operation> | IHttpMigrateApplication<OpenApi.IJsonSchema, Operation>; | ||
options?: Partial<IHttpLlmApplication.IOptions<Model, Parameters["properties"][string] extends IHttpLlmApplication.ModelSchema[Model] ? Parameters["properties"][string] : IHttpLlmApplication.ModelSchema[Model]>>; | ||
options?: Partial<IHttpLlmApplication.IOptions<Model, IHttpLlmApplication.ModelSchema[Model]>>; | ||
}) => IHttpLlmApplication<Model, Parameters>; | ||
@@ -67,0 +67,0 @@ /** |
@@ -89,3 +89,3 @@ import { OpenApi } from "../OpenApi"; | ||
*/ | ||
options: IHttpLlmApplication.IOptions<Model, Parameters["properties"][string] extends IHttpLlmApplication.ModelSchema[Model] ? Parameters["properties"][string] : IHttpLlmApplication.ModelSchema[Model]>; | ||
options: IHttpLlmApplication.IOptions<Model, IHttpLlmApplication.ModelSchema[Model]>; | ||
} | ||
@@ -92,0 +92,0 @@ export declare namespace IHttpLlmApplication { |
import { IChatGptSchema } from "./IChatGptSchema"; | ||
import { IClaudeSchema } from "./IClaudeSchema"; | ||
import { IGeminiSchema } from "./IGeminiSchema"; | ||
import { ILlamaSchema } from "./ILlamaSchema"; | ||
import { ILlmFunction } from "./ILlmFunction"; | ||
@@ -44,6 +45,6 @@ import { ILlmSchemaV3 } from "./ILlmSchemaV3"; | ||
*/ | ||
options: ILlmApplication.IOptions<Model, Parameters["properties"][string] extends ILlmApplication.ModelSchema[Model] ? Parameters["properties"][string] : ILlmApplication.ModelSchema[Model]>; | ||
options: ILlmApplication.IOptions<Model, ILlmApplication.ModelSchema[Model]>; | ||
} | ||
export declare namespace ILlmApplication { | ||
type Model = "chatgpt" | "claude" | "gemini" | "3.0" | "3.1"; | ||
type Model = "chatgpt" | "claude" | "gemini" | "llama" | "3.0" | "3.1"; | ||
type ModelParameters = { | ||
@@ -53,2 +54,3 @@ chatgpt: IChatGptSchema.IParameters; | ||
gemini: IGeminiSchema.IParameters; | ||
llama: ILlamaSchema.IParameters; | ||
"3.0": ILlmSchemaV3.IParameters; | ||
@@ -61,2 +63,3 @@ "3.1": ILlmSchemaV3_1.IParameters; | ||
gemini: IGeminiSchema; | ||
llama: ILlamaSchema; | ||
"3.0": ILlmSchemaV3; | ||
@@ -69,2 +72,3 @@ "3.1": ILlmSchemaV3_1; | ||
gemini: IGeminiSchema.IConfig; | ||
llama: ILlamaSchema.IConfig; | ||
"3.0": ILlmSchemaV3.IConfig; | ||
@@ -71,0 +75,0 @@ "3.1": ILlmSchemaV3_1.IConfig; |
@@ -1,10 +0,6 @@ | ||
import { IChatGptSchema } from "./IChatGptSchema"; | ||
import { IClaudeSchema } from "./IClaudeSchema"; | ||
import { IGeminiSchema } from "./IGeminiSchema"; | ||
import { ILlmSchemaV3 } from "./ILlmSchemaV3"; | ||
import { ILlmSchemaV3_1 } from "./ILlmSchemaV3_1"; | ||
import { ILlmApplication } from "./ILlmApplication"; | ||
/** | ||
* The schemas for the LLM function calling. | ||
* | ||
* `ILlmSchema` is an union type collecting all the schemas for the | ||
* `ILlmSchema` is an union type collecting every the schemas for the | ||
* LLM function calling. | ||
@@ -14,8 +10,24 @@ * | ||
* | ||
* @template Model Name of the target LLM model | ||
* @reference https://platform.openai.com/docs/guides/function-calling | ||
* @reference https://platform.openai.com/docs/guides/structured-outputs | ||
* @author Jeongho Nam - https://github.com/samchon | ||
*/ | ||
export type ILlmSchema = IChatGptSchema | IClaudeSchema | IGeminiSchema | ILlmSchemaV3 | ILlmSchemaV3_1; | ||
export type ILlmSchema<Model extends ILlmApplication.Model = ILlmApplication.Model> = ILlmApplication.ModelSchema[Model]; | ||
export declare namespace ILlmSchema { | ||
type IParameters = IChatGptSchema.IParameters | IClaudeSchema.IParameters | IGeminiSchema.IParameters | ILlmSchemaV3.IParameters | ILlmSchemaV3_1.IParameters; | ||
type IConfig = IChatGptSchema.IConfig | IClaudeSchema.IConfig | IGeminiSchema.IConfig | ILlmSchemaV3.IConfig | ILlmSchemaV3_1.IConfig; | ||
/** | ||
* Type of function parameters. | ||
* | ||
* `ILlmSchema.IParameters` is a type defining a function's pamameters | ||
* as a keyworded object type. | ||
* | ||
* It also can be utilized for the structured output metadata. | ||
* | ||
* @reference https://platform.openai.com/docs/guides/structured-outputs | ||
*/ | ||
type IParameters<Model extends ILlmApplication.Model = ILlmApplication.Model> = ILlmApplication.ModelParameters[Model]; | ||
/** | ||
* Configuration for the LLM schema composition. | ||
*/ | ||
type IConfig<Model extends ILlmApplication.Model = ILlmApplication.Model> = ILlmApplication.ModelConfig[Model]; | ||
} |
{ | ||
"name": "@samchon/openapi", | ||
"version": "2.0.0-dev.20241125", | ||
"version": "2.0.0-dev.20241126", | ||
"description": "OpenAPI definitions and converters for 'typia' and 'nestia'.", | ||
@@ -50,2 +50,3 @@ "main": "./lib/index.js", | ||
"@types/uuid": "^10.0.0", | ||
"axios": "^1.7.7", | ||
"chalk": "^4.1.2", | ||
@@ -52,0 +53,0 @@ "dotenv": "^16.4.5", |
@@ -45,9 +45,9 @@ > ## Next version is coming. | ||
- Supported schemas | ||
- ✔️[`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts): OpenAI ChatGPT | ||
- ✔️[`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts): Anthropic Claude (same with [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)) | ||
- ✔️[`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts): Google Gemini | ||
- ✍️`ILlamaSchema`: Meta (Facebook) Llama | ||
- [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts): OpenAI ChatGPT | ||
- [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts): Anthropic Claude (same with [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)) | ||
- [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts): Google Gemini | ||
- [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts): Meta (Facebook) Llama | ||
- Midldle layer schemas | ||
- ✔️[`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts): middle layer based on OpenAPI v3.0 specification | ||
- ✔️[`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts): middle layer based on OpenAPI v3.1 specification | ||
- [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts): middle layer based on OpenAPI v3.0 specification | ||
- [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts): middle layer based on OpenAPI v3.1 specification | ||
@@ -254,3 +254,13 @@ > [!TIP] | ||
> | ||
> Preparing playground website utilizing [`web-llm`](https://github.com/mlc-ai/web-llm). | ||
> You also can compose `ILlmApplication` from a class type through `typia`. | ||
> | ||
> ```typescript | ||
> import { ILlmApplication } from "@samchon/openapi"; | ||
> import typia from "typia"; | ||
> | ||
> const app: ILlmApplication<"chatgpt"> = | ||
> typia.llm.application<YourClassType, "chatgpt">(); | ||
> ``` | ||
> | ||
> https://typia.io/docs/llm/application | ||
@@ -257,0 +267,0 @@ > [!TIP] |
@@ -10,2 +10,3 @@ import { OpenApi } from "../OpenApi"; | ||
import { GeminiConverter } from "./GeminiConverter"; | ||
import { LlamaConverter } from "./LlamaConverter"; | ||
import { LlmConverterV3 } from "./LlmConverterV3"; | ||
@@ -242,4 +243,5 @@ import { LlmConverterV3_1 } from "./LlmConverterV3_1"; | ||
gemini: GeminiConverter.separate, | ||
llama: LlamaConverter.separate, | ||
"3.0": LlmConverterV3.separate, | ||
"3.1": LlmConverterV3_1.separate, | ||
}; |
@@ -44,4 +44,5 @@ import { OpenApi } from "../OpenApi"; | ||
} else if ( | ||
LlmTypeCheckerV3.isInteger(schema) || | ||
LlmTypeCheckerV3.isNumber(schema) | ||
props.config.constraint === false && | ||
(LlmTypeCheckerV3.isInteger(schema) || | ||
LlmTypeCheckerV3.isNumber(schema)) | ||
) | ||
@@ -51,3 +52,6 @@ OpenApiContraintShifter.shiftNumeric( | ||
); | ||
else if (LlmTypeCheckerV3.isString(schema)) | ||
else if ( | ||
props.config.constraint === false && | ||
LlmTypeCheckerV3.isString(schema) | ||
) | ||
OpenApiContraintShifter.shiftString( | ||
@@ -54,0 +58,0 @@ schema as OpenApi.IJsonSchema.IString, |
@@ -5,2 +5,3 @@ import { OpenApi } from "../OpenApi"; | ||
import { IGeminiSchema } from "../structures/IGeminiSchema"; | ||
import { ILlamaSchema } from "../structures/ILlamaSchema"; | ||
import { ILlmApplication } from "../structures/ILlmApplication"; | ||
@@ -12,2 +13,3 @@ import { ILlmSchemaV3 } from "../structures/ILlmSchemaV3"; | ||
import { GeminiConverter } from "./GeminiConverter"; | ||
import { LlamaConverter } from "./LlamaConverter"; | ||
import { LlmConverterV3 } from "./LlmConverterV3"; | ||
@@ -45,2 +47,7 @@ import { LlmConverterV3_1 } from "./LlmConverterV3_1"; | ||
}) => GeminiConverter.parameters(props), | ||
llama: (props: { | ||
components: OpenApi.IComponents; | ||
schema: OpenApi.IJsonSchema.IObject; | ||
config: ILlamaSchema.IConfig; | ||
}) => LlamaConverter.parameters(props), | ||
"3.0": (props: { | ||
@@ -93,2 +100,14 @@ components: OpenApi.IComponents; | ||
}), | ||
llama: (props: { | ||
components: OpenApi.IComponents; | ||
schema: OpenApi.IJsonSchema; | ||
$defs: Record<string, ILlamaSchema>; | ||
config: ILlamaSchema.IConfig; | ||
}) => | ||
LlamaConverter.schema({ | ||
components: props.components, | ||
schema: props.schema, | ||
$defs: props.$defs, | ||
config: props.config, | ||
}), | ||
"3.0": (props: { | ||
@@ -128,2 +147,5 @@ components: OpenApi.IComponents; | ||
} satisfies IGeminiSchema.IConfig, | ||
llama: { | ||
reference: false, | ||
} satisfies ILlamaSchema.IConfig, | ||
"3.0": { | ||
@@ -130,0 +152,0 @@ constraint: false, |
@@ -83,5 +83,3 @@ import { HttpMigration } from "./HttpMigration"; | ||
Model, | ||
Parameters["properties"][string] extends IHttpLlmApplication.ModelSchema[Model] | ||
? Parameters["properties"][string] | ||
: IHttpLlmApplication.ModelSchema[Model] | ||
IHttpLlmApplication.ModelSchema[Model] | ||
> | ||
@@ -88,0 +86,0 @@ >; |
@@ -102,5 +102,3 @@ import { OpenApi } from "../OpenApi"; | ||
Model, | ||
Parameters["properties"][string] extends IHttpLlmApplication.ModelSchema[Model] | ||
? Parameters["properties"][string] | ||
: IHttpLlmApplication.ModelSchema[Model] | ||
IHttpLlmApplication.ModelSchema[Model] | ||
>; | ||
@@ -107,0 +105,0 @@ } |
import { IChatGptSchema } from "./IChatGptSchema"; | ||
import { IClaudeSchema } from "./IClaudeSchema"; | ||
import { IGeminiSchema } from "./IGeminiSchema"; | ||
import { ILlamaSchema } from "./ILlamaSchema"; | ||
import { ILlmFunction } from "./ILlmFunction"; | ||
@@ -51,11 +52,6 @@ import { ILlmSchemaV3 } from "./ILlmSchemaV3"; | ||
*/ | ||
options: ILlmApplication.IOptions< | ||
Model, | ||
Parameters["properties"][string] extends ILlmApplication.ModelSchema[Model] | ||
? Parameters["properties"][string] | ||
: ILlmApplication.ModelSchema[Model] | ||
>; | ||
options: ILlmApplication.IOptions<Model, ILlmApplication.ModelSchema[Model]>; | ||
} | ||
export namespace ILlmApplication { | ||
export type Model = "chatgpt" | "claude" | "gemini" | "3.0" | "3.1"; | ||
export type Model = "chatgpt" | "claude" | "gemini" | "llama" | "3.0" | "3.1"; | ||
export type ModelParameters = { | ||
@@ -65,2 +61,3 @@ chatgpt: IChatGptSchema.IParameters; | ||
gemini: IGeminiSchema.IParameters; | ||
llama: ILlamaSchema.IParameters; | ||
"3.0": ILlmSchemaV3.IParameters; | ||
@@ -73,2 +70,3 @@ "3.1": ILlmSchemaV3_1.IParameters; | ||
gemini: IGeminiSchema; | ||
llama: ILlamaSchema; | ||
"3.0": ILlmSchemaV3; | ||
@@ -81,2 +79,3 @@ "3.1": ILlmSchemaV3_1; | ||
gemini: IGeminiSchema.IConfig; | ||
llama: ILlamaSchema.IConfig; | ||
"3.0": ILlmSchemaV3.IConfig; | ||
@@ -83,0 +82,0 @@ "3.1": ILlmSchemaV3_1.IConfig; |
@@ -1,6 +0,2 @@ | ||
import { IChatGptSchema } from "./IChatGptSchema"; | ||
import { IClaudeSchema } from "./IClaudeSchema"; | ||
import { IGeminiSchema } from "./IGeminiSchema"; | ||
import { ILlmSchemaV3 } from "./ILlmSchemaV3"; | ||
import { ILlmSchemaV3_1 } from "./ILlmSchemaV3_1"; | ||
import { ILlmApplication } from "./ILlmApplication"; | ||
@@ -10,3 +6,3 @@ /** | ||
* | ||
* `ILlmSchema` is an union type collecting all the schemas for the | ||
* `ILlmSchema` is an union type collecting every the schemas for the | ||
* LLM function calling. | ||
@@ -16,25 +12,32 @@ * | ||
* | ||
* @template Model Name of the target LLM model | ||
* @reference https://platform.openai.com/docs/guides/function-calling | ||
* @reference https://platform.openai.com/docs/guides/structured-outputs | ||
* @author Jeongho Nam - https://github.com/samchon | ||
*/ | ||
export type ILlmSchema = | ||
| IChatGptSchema | ||
| IClaudeSchema | ||
| IGeminiSchema | ||
| ILlmSchemaV3 | ||
| ILlmSchemaV3_1; | ||
export type ILlmSchema< | ||
Model extends ILlmApplication.Model = ILlmApplication.Model, | ||
> = ILlmApplication.ModelSchema[Model]; | ||
export namespace ILlmSchema { | ||
export type IParameters = | ||
| IChatGptSchema.IParameters | ||
| IClaudeSchema.IParameters | ||
| IGeminiSchema.IParameters | ||
| ILlmSchemaV3.IParameters | ||
| ILlmSchemaV3_1.IParameters; | ||
/** | ||
* Type of function parameters. | ||
* | ||
* `ILlmSchema.IParameters` is a type defining a function's pamameters | ||
* as a keyworded object type. | ||
* | ||
* It also can be utilized for the structured output metadata. | ||
* | ||
* @reference https://platform.openai.com/docs/guides/structured-outputs | ||
*/ | ||
export type IParameters< | ||
Model extends ILlmApplication.Model = ILlmApplication.Model, | ||
> = ILlmApplication.ModelParameters[Model]; | ||
export type IConfig = | ||
| IChatGptSchema.IConfig | ||
| IClaudeSchema.IConfig | ||
| IGeminiSchema.IConfig | ||
| ILlmSchemaV3.IConfig | ||
| ILlmSchemaV3_1.IConfig; | ||
/** | ||
* Configuration for the LLM schema composition. | ||
*/ | ||
export type IConfig< | ||
Model extends ILlmApplication.Model = ILlmApplication.Model, | ||
> = ILlmApplication.ModelConfig[Model]; | ||
} |
@@ -7,5 +7,9 @@ /** | ||
* is not the final type for the LLM function calling, but the intermediate | ||
* structure for the conversion to the final type like {@link IChatGptSchema}. | ||
* structure for the conversion to the final type of below: | ||
* | ||
* However, the `IChatGptSchema` does not follow the entire specification of | ||
* - {@link IChatGptSchema} | ||
* - {@link IClaudeSchema} | ||
* - {@link ILlamaSchema} | ||
* | ||
* However, the `ILlmSchemaV3_1` does not follow the entire specification of | ||
* the OpenAPI v3.1. It has own specific restrictions and definitions. Here is the | ||
@@ -12,0 +16,0 @@ * list of how `ILlmSchemaV3_1` is different with the OpenAPI v3.1 JSON schema. |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
1614170
255
27891
441
33