Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@samchon/openapi

Package Overview
Dependencies
Maintainers
0
Versions
146
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@samchon/openapi - npm Package Compare versions

Comparing version 2.0.0-dev.20241109 to 2.0.0-dev.20241110

4

lib/index.d.ts

@@ -15,8 +15,10 @@ export * from "./OpenApi";

export * from "./structures/ILlmApplication";
export * from "./structures/ILlmSchemaV3";
export * from "./structures/IChatGptSchema";
export * from "./structures/IGeminiSchema";
export * from "./structures/ILlmSchemaV3";
export * from "./structures/ILlmSchemaV3_1";
export * from "./HttpLlm";
export * from "./utils/LlmTypeCheckerV3";
export * from "./utils/LlmTypeCheckerV3_1";
export * from "./utils/ChatGptTypeChecker";
export * from "./utils/GeminiTypeChecker";

@@ -40,8 +40,10 @@ "use strict";

__exportStar(require("./structures/ILlmApplication"), exports);
__exportStar(require("./structures/ILlmSchemaV3"), exports);
__exportStar(require("./structures/IChatGptSchema"), exports);
__exportStar(require("./structures/IGeminiSchema"), exports);
__exportStar(require("./structures/ILlmSchemaV3"), exports);
__exportStar(require("./structures/ILlmSchemaV3_1"), exports);
__exportStar(require("./HttpLlm"), exports);
__exportStar(require("./utils/LlmTypeCheckerV3"), exports);
__exportStar(require("./utils/LlmTypeCheckerV3_1"), exports);
__exportStar(require("./utils/ChatGptTypeChecker"), exports);
__exportStar(require("./utils/GeminiTypeChecker"), exports);

@@ -63,3 +63,3 @@ import { IChatGptSchema } from "./IChatGptSchema";

*/
interface IOptions<Model extends ILlmApplication.Model, Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema> {
interface IOptions<Model extends ILlmApplication.Model, Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema = ILlmApplication.ModelSchema[Model]> {
/**

@@ -66,0 +66,0 @@ * Whether to allow recursive types or not.

{
"name": "@samchon/openapi",
"version": "2.0.0-dev.20241109",
"version": "2.0.0-dev.20241110",
"description": "OpenAPI definitions and converters for 'typia' and 'nestia'.",

@@ -5,0 +5,0 @@ "main": "./lib/index.js",

@@ -35,5 +35,6 @@ # `@samchon/openapi`

- Supported schemes
- [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
- [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
- [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
- [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
- [`IOpenAiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IOpenAiSchema.ts)
- [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)

@@ -213,7 +214,16 @@ > [!TIP]

- [`HttpLlm.application()`](https://github.com/samchon/openapi/blob/master/src/HttpLlm.ts)
- [`IHttpLlmApplication`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmApplication.ts)
- [`IHttpLlmFunction`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmFunction.ts)
- [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
- [`LlmTypeChecker`](https://github.com/samchon/openapi/blob/master/src/utils/LlmTypeChecker.ts)
- Application
- [`HttpLlm.application()`](https://github.com/samchon/openapi/blob/master/src/HttpLlm.ts)
- [`IHttpLlmApplication`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmApplication.ts)
- [`IHttpLlmFunction`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmFunction.ts)
- Schemas
- [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
- [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
- [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
- [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
- Type Checkers
- [`ChatGptTypeChecker`](https://github.com/samchon/openapi/blob/master/src/utils/ChatGptTypeChecker.ts)
- [`GeminiTypeChecker`](https://github.com/samchon/openapi/blob/master/src/utils/GeminiTypeChecker.ts)
- [`LlmTypeCheckerV3`](https://github.com/samchon/openapi/blob/master/src/utils/LlmTypeCheckerV3.ts)
- [`LlmTypeCheckerV3_1`](https://github.com/samchon/openapi/blob/master/src/utils/LlmTypeCheckerV3_1.ts)

@@ -249,2 +259,3 @@ > [!NOTE]

IHttpLlmFunction,
ILlmSchemaV3_1,
OpenApi,

@@ -272,6 +283,9 @@ OpenApiV3,

const document: OpenApi.IDocument = OpenApi.convert(swagger);
const application: IHttpLlmApplication = HttpLlm.application(document);
const application: IHttpLlmApplication<"3.1"> = HttpLlm.application({
model: "3.1",
document,
});
// Let's imagine that LLM has selected a function to call
const func: IHttpLlmFunction | undefined = application.functions.find(
const func: IHttpLlmFunction<ILlmSchemaV3_1> | undefined = application.functions.find(
// (f) => f.name === "llm_selected_fuction_name"

@@ -316,2 +330,3 @@ (f) => f.path === "/bbs/{section}/articles/{id}" && f.method === "put",

IHttpLlmFunction,
ILlmSchemaV3_1,
OpenApi,

@@ -339,8 +354,12 @@ OpenApiV3,

const document: OpenApi.IDocument = OpenApi.convert(swagger);
const application: IHttpLlmApplication = HttpLlm.application(document, {
keyword: true,
const application: IHttpLlmApplication<"3.1"> = HttpLlm.application({
model: "3.1",
document,
options: {
keyword: true,
},
});
// Let's imagine that LLM has selected a function to call
const func: IHttpLlmFunction | undefined = application.functions.find(
const func: IHttpLlmFunction<ILlmSchemaV3_1> | undefined = application.functions.find(
// (f) => f.name === "llm_selected_fuction_name"

@@ -394,3 +413,4 @@ (f) => f.path === "/bbs/{section}/articles/{id}" && f.method === "put",

IHttpLlmFunction,
LlmTypeChecker,
ILlmSchemaV3_1,
LlmTypeCheckerV3_1,
OpenApi,

@@ -418,10 +438,14 @@ OpenApiV3,

const document: OpenApi.IDocument = OpenApi.convert(swagger);
const application: IHttpLlmApplication = HttpLlm.application(document, {
keyword: false,
separate: (schema) =>
LlmTypeChecker.isString(schema) && schema.contentMediaType !== undefined,
const application: IHttpLlmApplication<"3.1"> = HttpLlm.application({
model: "3.1",
document,
options: {
keyword: false,
separate: (schema) =>
LlmTypeCheckerV3_1.isString(schema) && schema.contentMediaType !== undefined,
},
});
// Let's imagine that LLM has selected a function to call
const func: IHttpLlmFunction | undefined = application.functions.find(
const func: IHttpLlmFunction<ILlmSchemaV3_1> | undefined = application.functions.find(
// (f) => f.name === "llm_selected_fuction_name"

@@ -428,0 +452,0 @@ (f) => f.path === "/bbs/articles/{id}" && f.method === "put",

@@ -30,9 +30,11 @@ //----

export * from "./structures/ILlmSchemaV3";
export * from "./structures/IChatGptSchema";
export * from "./structures/IGeminiSchema";
export * from "./structures/ILlmSchemaV3";
export * from "./structures/ILlmSchemaV3_1";
export * from "./HttpLlm";
export * from "./utils/LlmTypeCheckerV3";
export * from "./utils/LlmTypeCheckerV3_1";
export * from "./utils/ChatGptTypeChecker";
export * from "./utils/GeminiTypeChecker";

@@ -80,3 +80,3 @@ import { IChatGptSchema } from "./IChatGptSchema";

| IChatGptSchema
| IGeminiSchema,
| IGeminiSchema = ILlmApplication.ModelSchema[Model],
> {

@@ -83,0 +83,0 @@ /**

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc