@samchon/openapi
Advanced tools
Comparing version 2.0.0-dev.20241120-2 to 2.0.0-dev.20241120-3
@@ -19,8 +19,8 @@ /** | ||
* | ||
* Also, by the documents of Gemini, these additional properties are not | ||
* supported, either. However, I can't sure that these additional properties | ||
* Also, by the documents of Gemini, these constraint properties are not | ||
* supported, either. However, I can't sure that these constraint properties | ||
* are really not supported in the Geimni, because the Gemini seems like | ||
* understanding them. Therefore, I've decided to keep them alive. | ||
* | ||
* - ex) additional properties | ||
* - ex) constraint properties | ||
* - {@link IGeminiSchema.IString.default} | ||
@@ -27,0 +27,0 @@ * - {@link IGeminiSchema.__IAttribute.example} |
{ | ||
"name": "@samchon/openapi", | ||
"version": "2.0.0-dev.20241120-2", | ||
"version": "2.0.0-dev.20241120-3", | ||
"description": "OpenAPI definitions and converters for 'typia' and 'nestia'.", | ||
@@ -5,0 +5,0 @@ "main": "./lib/index.js", |
145
README.md
@@ -255,5 +255,5 @@ # `@samchon/openapi` | ||
HttpLlm, | ||
IChatGptSchema, | ||
IHttpLlmApplication, | ||
IHttpLlmFunction, | ||
ILlmSchemaV3_1, | ||
OpenApi, | ||
@@ -281,4 +281,4 @@ OpenApiV3, | ||
const document: OpenApi.IDocument = OpenApi.convert(swagger); | ||
const application: IHttpLlmApplication<"3.1"> = HttpLlm.application({ | ||
model: "3.1", | ||
const application: IHttpLlmApplication<"chatgpt"> = HttpLlm.application({ | ||
model: "chatgpt", | ||
document, | ||
@@ -288,6 +288,7 @@ }); | ||
// Let's imagine that LLM has selected a function to call | ||
const func: IHttpLlmFunction<ILlmSchemaV3_1> | undefined = application.functions.find( | ||
// (f) => f.name === "llm_selected_fuction_name" | ||
(f) => f.path === "/bbs/{section}/articles/{id}" && f.method === "put", | ||
); | ||
const func: IHttpLlmFunction<IChatGptSchema.IParameters> | undefined = | ||
application.functions.find( | ||
// (f) => f.name === "llm_selected_fuction_name" | ||
(f) => f.path === "/bbs/{section}/articles/{id}" && f.method === "put", | ||
); | ||
if (func === undefined) throw new Error("No matched function exists."); | ||
@@ -302,6 +303,10 @@ | ||
function: func, | ||
arguments: [ | ||
"general", | ||
v4(), | ||
{ | ||
input: { | ||
section: "general", | ||
id: v4(), | ||
query: { | ||
language: "en-US", | ||
format: "markdown", | ||
}, | ||
body: { | ||
title: "Hello, world!", | ||
@@ -311,83 +316,4 @@ body: "Let's imagine that this argument is composed by LLM.", | ||
}, | ||
], | ||
}); | ||
console.log("article", article); | ||
}; | ||
main().catch(console.error); | ||
``` | ||
### Keyword Parameter | ||
Combine parameters into single object. | ||
If you configure `keyword` option when composing the LLM (Large Language Model) function calling appliation, every parameters of OpenAPI operations would be combined to a single object type in the LLM funtion calling schema. This strategy is loved in many A.I. Chatbot developers, because LLM tends to a little professional in the single parameter function case. | ||
Also, do not worry about the function call execution case. You don't need to resolve the keyworded parameter manually. The `HttpLlm.execute()` and `HttpLlm.propagate()` functions will resolve the keyworded parameter automatically by analyzing the `IHttpLlmApplication.options` property. | ||
```typescript | ||
import { | ||
HttpLlm, | ||
IHttpLlmApplication, | ||
IHttpLlmFunction, | ||
ILlmSchemaV3_1, | ||
OpenApi, | ||
OpenApiV3, | ||
OpenApiV3_1, | ||
SwaggerV2, | ||
} from "@samchon/openapi"; | ||
import fs from "fs"; | ||
import typia from "typia"; | ||
import { v4 } from "uuid"; | ||
const main = async (): Promise<void> => { | ||
// read swagger document and validate it | ||
const swagger: | ||
| SwaggerV2.IDocument | ||
| OpenApiV3.IDocument | ||
| OpenApiV3_1.IDocument = JSON.parse( | ||
await fs.promises.readFile("swagger.json", "utf8"), | ||
); | ||
typia.assert(swagger); // recommended | ||
// convert to emended OpenAPI document, | ||
// and compose LLM function calling application | ||
const document: OpenApi.IDocument = OpenApi.convert(swagger); | ||
const application: IHttpLlmApplication<"3.1"> = HttpLlm.application({ | ||
model: "3.1", | ||
document, | ||
options: { | ||
keyword: true, | ||
}, | ||
}); | ||
// Let's imagine that LLM has selected a function to call | ||
const func: IHttpLlmFunction<ILlmSchemaV3_1> | undefined = application.functions.find( | ||
// (f) => f.name === "llm_selected_fuction_name" | ||
(f) => f.path === "/bbs/{section}/articles/{id}" && f.method === "put", | ||
); | ||
if (func === undefined) throw new Error("No matched function exists."); | ||
// actual execution is by yourself | ||
const article = await HttpLlm.execute({ | ||
connection: { | ||
host: "http://localhost:3000", | ||
}, | ||
application, | ||
function: func, | ||
arguments: [ | ||
// one single object with key-value paired | ||
{ | ||
section: "general", | ||
id: v4(), | ||
query: { | ||
language: "en-US", | ||
format: "markdown", | ||
}, | ||
body: { | ||
title: "Hello, world!", | ||
body: "Let's imagine that this argument is composed by LLM.", | ||
thumbnail: null, | ||
}, | ||
}, | ||
], | ||
}); | ||
console.log("article", article); | ||
@@ -410,6 +336,6 @@ }; | ||
HttpLlm, | ||
ChatGptTypeChecker, | ||
IChatGptSchema, | ||
IHttpLlmApplication, | ||
IHttpLlmFunction, | ||
ILlmSchemaV3_1, | ||
LlmTypeCheckerV3_1, | ||
OpenApi, | ||
@@ -437,4 +363,4 @@ OpenApiV3, | ||
const document: OpenApi.IDocument = OpenApi.convert(swagger); | ||
const application: IHttpLlmApplication<"3.1"> = HttpLlm.application({ | ||
model: "3.1", | ||
const application: IHttpLlmApplication<"chatgpt"> = HttpLlm.application({ | ||
model: "chatgpt", | ||
document, | ||
@@ -444,3 +370,3 @@ options: { | ||
separate: (schema) => | ||
LlmTypeCheckerV3_1.isString(schema) && schema.contentMediaType !== undefined, | ||
ChatGptTypeChecker.isString(schema) && schema.contentMediaType !== undefined, | ||
}, | ||
@@ -450,6 +376,7 @@ }); | ||
// Let's imagine that LLM has selected a function to call | ||
const func: IHttpLlmFunction<ILlmSchemaV3_1> | undefined = application.functions.find( | ||
// (f) => f.name === "llm_selected_fuction_name" | ||
(f) => f.path === "/bbs/articles/{id}" && f.method === "put", | ||
); | ||
const func: IHttpLlmFunction<IChatGptSchema.IParameters> | undefined = | ||
application.functions.find( | ||
// (f) => f.name === "llm_selected_fuction_name" | ||
(f) => f.path === "/bbs/articles/{id}" && f.method === "put", | ||
); | ||
if (func === undefined) throw new Error("No matched function exists."); | ||
@@ -466,19 +393,21 @@ | ||
function: func, | ||
llm: [ | ||
llm: { | ||
// LLM composed parameter values | ||
"general", | ||
v4(), | ||
{ | ||
section: "general", | ||
id: v4(), | ||
query: { | ||
language: "en-US", | ||
format: "markdown", | ||
}, | ||
{ | ||
body: { | ||
title: "Hello, world!", | ||
content: "Let's imagine that this argument is composed by LLM.", | ||
}, | ||
], | ||
human: [ | ||
}, | ||
human: { | ||
// Human composed parameter values | ||
{ thumbnail: "https://example.com/thumbnail.jpg" }, | ||
], | ||
body: { | ||
thumbnail: "https://example.com/thumbnail.jpg", | ||
}, | ||
}, | ||
}), | ||
@@ -485,0 +414,0 @@ }); |
@@ -19,8 +19,8 @@ /** | ||
* | ||
* Also, by the documents of Gemini, these additional properties are not | ||
* supported, either. However, I can't sure that these additional properties | ||
* Also, by the documents of Gemini, these constraint properties are not | ||
* supported, either. However, I can't sure that these constraint properties | ||
* are really not supported in the Geimni, because the Gemini seems like | ||
* understanding them. Therefore, I've decided to keep them alive. | ||
* | ||
* - ex) additional properties | ||
* - ex) constraint properties | ||
* - {@link IGeminiSchema.IString.default} | ||
@@ -27,0 +27,0 @@ * - {@link IGeminiSchema.__IAttribute.example} |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
1518883
408