Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@samchon/openapi

Package Overview
Dependencies
Maintainers
0
Versions
101
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@samchon/openapi - npm Package Compare versions

Comparing version 0.5.0-dev.20240906-2 to 1.0.0-dev.20240908

26

lib/http/HttpLlmFunctionFetcher.d.ts

@@ -1,26 +0,6 @@

import { IHttpConnection } from "../structures/IHttpConnection";
import { IHttpLlmApplication } from "../structures/IHttpLlmApplication";
import { IHttpLlmFunction } from "../structures/IHttpLlmFunction";
import type { HttpLlm } from "../HttpLlm";
import { IHttpResponse } from "../structures/IHttpResponse";
export declare namespace HttpLlmFunctionFetcher {
interface IProps {
/**
* Application of the OpenAI function call schemas.
*/
application: IHttpLlmApplication;
/**
* Function schema to call.
*/
function: IHttpLlmFunction;
/**
* Connection info to the server.
*/
connection: IHttpConnection;
/**
* Arguments for the function call.
*/
arguments: any[];
}
const execute: (props: IProps) => Promise<unknown>;
const propagate: (props: IProps) => Promise<IHttpResponse>;
const execute: (props: HttpLlm.IFetchProps) => Promise<unknown>;
const propagate: (props: HttpLlm.IFetchProps) => Promise<IHttpResponse>;
}

2

lib/http/HttpLlmFunctionFetcher.js

@@ -45,3 +45,3 @@ "use strict";

HttpLlmFunctionFetcher.execute = function (props) { return __awaiter(_this, void 0, void 0, function () { return __generator(this, function (_a) {
return [2 /*return*/, HttpMigrateRouteFetcher_1.HttpMigrateRouteFetcher.request(getFetchArguments("execute", props))];
return [2 /*return*/, HttpMigrateRouteFetcher_1.HttpMigrateRouteFetcher.execute(getFetchArguments("execute", props))];
}); }); };

@@ -48,0 +48,0 @@ HttpLlmFunctionFetcher.propagate = function (props) { return __awaiter(_this, void 0, void 0, function () { return __generator(this, function (_a) {

@@ -1,14 +0,6 @@

import { IHttpConnection } from "../structures/IHttpConnection";
import { IHttpMigrateRoute } from "../structures/IHttpMigrateRoute";
import type { HttpMigration } from "../HttpMigration";
import { IHttpResponse } from "../structures/IHttpResponse";
export declare namespace HttpMigrateRouteFetcher {
interface IProps {
connection: IHttpConnection;
route: IHttpMigrateRoute;
parameters: Array<string | number | boolean | bigint | null> | Record<string, string | number | boolean | bigint | null>;
query?: object | undefined;
body?: object | undefined;
}
const request: (props: IProps) => Promise<unknown>;
const propagate: (props: IProps) => Promise<IHttpResponse>;
const execute: (props: HttpMigration.IFetchProps) => Promise<unknown>;
const propagate: (props: HttpMigration.IFetchProps) => Promise<IHttpResponse>;
}

@@ -91,3 +91,3 @@ "use strict";

var _this = this;
HttpMigrateRouteFetcher.request = function (props) { return __awaiter(_this, void 0, void 0, function () {
HttpMigrateRouteFetcher.execute = function (props) { return __awaiter(_this, void 0, void 0, function () {
var result;

@@ -107,5 +107,3 @@ var _a;

}); };
HttpMigrateRouteFetcher.propagate = function (props) {
return _Propagate("propagate", props);
};
HttpMigrateRouteFetcher.propagate = function (props) { return _Propagate("propagate", props); };
})(HttpMigrateRouteFetcher || (exports.HttpMigrateRouteFetcher = HttpMigrateRouteFetcher = {}));

@@ -112,0 +110,0 @@ var _Propagate = function (from, props) { return __awaiter(void 0, void 0, void 0, function () {

@@ -9,4 +9,64 @@ import { OpenApi } from "./OpenApi";

import { ILlmSchema } from "./structures/ILlmSchema";
/**
* LLM function calling application composer from OpenAPI document.
*
* `HttpLlm` is a module for composing LLM (Large Language Model) function calling
* application from the {@link OpenApi.IDocument OpenAPI document}, and also for
* LLM function call execution and parameter merging.
*
* At first, you can construct the LLM function calling application by the
* {@link HttpLlm.application HttpLlm.application()} function. And then the LLM
* has selected a {@link IHttpLlmFunction function} to call and composes its
* arguments, you can execute the function by
* {@link HttpLlm.execute HttpLlm.execute()} or
* {@link HttpLlm.propagate HttpLlm.propagate()}.
*
* By the way, if you have configured the {@link IHttpLlmApplication.IOptions.separate}
* option to separate the parameters into human and LLM sides, you can merge these
* human and LLM sides' parameters into one through
* {@link HttpLlm.mergeParameters HttpLlm.mergeParameters()} before the actual LLM
* function call execution.
*
* @author Jeongho Nam - https://github.com/samchon
*/
export declare namespace HttpLlm {
/**
* Convert OpenAPI document to LLM function calling application.
*
* Converts {@link OpenApi.IDocument OpenAPI document} or
* {@link IHttpMigrateApplication migrated application} to the
* {@link IHttpLlmApplication LLM function calling application}. Every
* {@link OpenApi.IOperation API operations} in the OpenAPI document are converted
* to the {@link IHttpLlmFunction LLM function} type, and they would be used for
* the LLM function calling.
*
* If you have configured the {@link IHttpLlmApplication.IOptions.separate} option,
* every parameters in the {@link IHttpLlmFunction} would be separated into both
* human and LLM sides. In that case, you can merge these human and LLM sides'
* parameters into one through {@link HttpLlm.mergeParameters} before the actual
* LLM function call execution.
*
* Additionally, if you have configured the {@link IHttpLlmApplication.IOptions.keyword}
* as `true`, the number of {@link IHttpLlmFunction.parameters} are always 1 and the
* first parameter type is always {@link ILlmSchema.IObject}. I recommend this option
* because LLM can understand the keyword arguments more easily.
*
* @param document Target OpenAPI document to convert (or migrate application)
* @param options Options for the LLM function calling application conversion
* @returns LLM function calling application
*/
const application: <Schema extends ILlmSchema, Operation extends OpenApi.IOperation>(document: OpenApi.IDocument<any, Operation> | IHttpMigrateApplication<any, Operation>, options?: Partial<IHttpLlmApplication.IOptions>) => IHttpLlmApplication<Schema>;
/**
* Convert JSON schema to LLM schema.
*
* Converts {@link OpenApi.IJsonSchema JSON schema} to {@link ILlmSchema LLM schema}.
*
* By the way, if the target JSON schema has some recursive references, the
* conversion would be failed and `null` value would be returned. It's because
* the LLM schema does not support the reference type embodied by the
* {@link OpenApi.IJsonSchema.IReference} type.
*
* @param props Schema to convert and components to refer
* @returns LLM schema or null value
*/
const schema: (props: {

@@ -16,2 +76,5 @@ components: OpenApi.IComponents;

}) => ILlmSchema | null;
/**
* Properties for the LLM function call.
*/
interface IFetchProps {

@@ -27,3 +90,3 @@ /**

/**
* Connection info to the server.
* Connection info to the HTTP server.
*/

@@ -36,11 +99,100 @@ connection: IHttpConnection;

}
/**
* Execute the LLM function call.
*
* `HttmLlm.execute()` is a function executing the target
* {@link OpenApi.IOperation API endpoint} with with the connection information
* and arguments composed by Large Language Model like OpenAI (+human sometimes).
*
* By the way, if you've configured the {@link IHttpLlmApplication.IOptions.separate},
* so that the parameters are separated to human and LLM sides, you have to merge
* these humand and LLM sides' parameters into one through
* {@link HttpLlm.mergeParameters} function.
*
* About the {@link IHttpLlmApplication.IOptions.keyword} option, don't worry anything.
* This `HttmLlm.execute()` function will automatically recognize the keyword arguments
* and convert them to the proper sequence.
*
* For reference, if the target API endpoinnt responds none 200/201 status, this
* would be considered as an error and the {@link HttpError} would be thrown.
* Otherwise you don't want such rule, you can use the {@link HttpLlm.propagate}
* function instead.
*
* @param props Properties for the LLM function call
* @returns Return value (response body) from the API endpoint
* @throws HttpError when the API endpoint responds none 200/201 status
*/
const execute: (props: IFetchProps) => Promise<unknown>;
/**
* Propagate the LLM function call.
*
* `HttmLlm.propagate()` is a function propagating the target
* {@link OpenApi.IOperation API endpoint} with with the connection information
* and arguments composed by Large Language Model like OpenAI (+human sometimes).
*
* By the way, if you've configured the {@link IHttpLlmApplication.IOptions.separate},
* so that the parameters are separated to human and LLM sides, you have to merge
* these humand and LLM sides' parameters into one through
* {@link HttpLlm.mergeParameters} function.
*
* About the {@link IHttpLlmApplication.IOptions.keyword} option, don't worry anything.
* This `HttmLlm.propagate()` function will automatically recognize the keyword arguments
* and convert them to the proper sequence.
*
* For reference, the propagation means always returning the response from the API
* endpoint, even if the status is not 200/201. This is useful when you want to
* handle the response by yourself.
*
* @param props Properties for the LLM function call
* @returns Response from the API endpoint
* @throws Error only when the connection is failed
*/
const propagate: (props: IFetchProps) => Promise<IHttpResponse>;
/**
* Properties for the parameters' merging.
*/
interface IMergeProps {
/**
* Metadata of the target function.
*/
function: ILlmFunction;
/**
* Arguments composed by the LLM.
*/
llm: unknown[];
/**
* Arguments composed by the human.
*/
human: unknown[];
}
/**
* Merge the parameters.
*
* If you've configured the {@link IHttpLlmApplication.IOptions.separate} option,
* so that the parameters are separated to human and LLM sides, you can merge these
* humand and LLM sides' parameters into one through this `HttpLlm.mergeParameters()`
* function before the actual LLM function call execution.
*
* On contrary, if you've not configured the
* {@link IHttpLlmApplication.IOptions.separate} option, this function would throw
* an error.
*
* @param props Properties for the parameters' merging
* @returns Merged parameter values
*/
const mergeParameters: (props: IMergeProps) => unknown[];
/**
* Merge two values.
*
* If both values are objects, then combines them in the properties level.
*
* Otherwise, returns the latter value if it's not null, otherwise the former value.
*
* - `return (y ?? x)`
*
* @param x Value X to merge
* @param y Value Y to merge
* @returns Merged value
*/
const mergeValue: (x: unknown, y: unknown) => unknown;
}

@@ -8,4 +8,54 @@ "use strict";

var LlmDataMerger_1 = require("./utils/LlmDataMerger");
/**
* LLM function calling application composer from OpenAPI document.
*
* `HttpLlm` is a module for composing LLM (Large Language Model) function calling
* application from the {@link OpenApi.IDocument OpenAPI document}, and also for
* LLM function call execution and parameter merging.
*
* At first, you can construct the LLM function calling application by the
* {@link HttpLlm.application HttpLlm.application()} function. And then the LLM
* has selected a {@link IHttpLlmFunction function} to call and composes its
* arguments, you can execute the function by
* {@link HttpLlm.execute HttpLlm.execute()} or
* {@link HttpLlm.propagate HttpLlm.propagate()}.
*
* By the way, if you have configured the {@link IHttpLlmApplication.IOptions.separate}
* option to separate the parameters into human and LLM sides, you can merge these
* human and LLM sides' parameters into one through
* {@link HttpLlm.mergeParameters HttpLlm.mergeParameters()} before the actual LLM
* function call execution.
*
* @author Jeongho Nam - https://github.com/samchon
*/
var HttpLlm;
(function (HttpLlm) {
/* -----------------------------------------------------------
COMPOSERS
----------------------------------------------------------- */
/**
* Convert OpenAPI document to LLM function calling application.
*
* Converts {@link OpenApi.IDocument OpenAPI document} or
* {@link IHttpMigrateApplication migrated application} to the
* {@link IHttpLlmApplication LLM function calling application}. Every
* {@link OpenApi.IOperation API operations} in the OpenAPI document are converted
* to the {@link IHttpLlmFunction LLM function} type, and they would be used for
* the LLM function calling.
*
* If you have configured the {@link IHttpLlmApplication.IOptions.separate} option,
* every parameters in the {@link IHttpLlmFunction} would be separated into both
* human and LLM sides. In that case, you can merge these human and LLM sides'
* parameters into one through {@link HttpLlm.mergeParameters} before the actual
* LLM function call execution.
*
* Additionally, if you have configured the {@link IHttpLlmApplication.IOptions.keyword}
* as `true`, the number of {@link IHttpLlmFunction.parameters} are always 1 and the
* first parameter type is always {@link ILlmSchema.IObject}. I recommend this option
* because LLM can understand the keyword arguments more easily.
*
* @param document Target OpenAPI document to convert (or migrate application)
* @param options Options for the LLM function calling application conversion
* @returns LLM function calling application
*/
HttpLlm.application = function (document, options) {

@@ -21,12 +71,102 @@ var _a, _b;

};
/**
* Convert JSON schema to LLM schema.
*
* Converts {@link OpenApi.IJsonSchema JSON schema} to {@link ILlmSchema LLM schema}.
*
* By the way, if the target JSON schema has some recursive references, the
* conversion would be failed and `null` value would be returned. It's because
* the LLM schema does not support the reference type embodied by the
* {@link OpenApi.IJsonSchema.IReference} type.
*
* @param props Schema to convert and components to refer
* @returns LLM schema or null value
*/
HttpLlm.schema = function (props) { return HttpLlmConverter_1.HttpLlmConverter.schema(props); };
/**
* Execute the LLM function call.
*
* `HttmLlm.execute()` is a function executing the target
* {@link OpenApi.IOperation API endpoint} with with the connection information
* and arguments composed by Large Language Model like OpenAI (+human sometimes).
*
* By the way, if you've configured the {@link IHttpLlmApplication.IOptions.separate},
* so that the parameters are separated to human and LLM sides, you have to merge
* these humand and LLM sides' parameters into one through
* {@link HttpLlm.mergeParameters} function.
*
* About the {@link IHttpLlmApplication.IOptions.keyword} option, don't worry anything.
* This `HttmLlm.execute()` function will automatically recognize the keyword arguments
* and convert them to the proper sequence.
*
* For reference, if the target API endpoinnt responds none 200/201 status, this
* would be considered as an error and the {@link HttpError} would be thrown.
* Otherwise you don't want such rule, you can use the {@link HttpLlm.propagate}
* function instead.
*
* @param props Properties for the LLM function call
* @returns Return value (response body) from the API endpoint
* @throws HttpError when the API endpoint responds none 200/201 status
*/
HttpLlm.execute = function (props) {
return HttpLlmFunctionFetcher_1.HttpLlmFunctionFetcher.execute(props);
};
/**
* Propagate the LLM function call.
*
* `HttmLlm.propagate()` is a function propagating the target
* {@link OpenApi.IOperation API endpoint} with with the connection information
* and arguments composed by Large Language Model like OpenAI (+human sometimes).
*
* By the way, if you've configured the {@link IHttpLlmApplication.IOptions.separate},
* so that the parameters are separated to human and LLM sides, you have to merge
* these humand and LLM sides' parameters into one through
* {@link HttpLlm.mergeParameters} function.
*
* About the {@link IHttpLlmApplication.IOptions.keyword} option, don't worry anything.
* This `HttmLlm.propagate()` function will automatically recognize the keyword arguments
* and convert them to the proper sequence.
*
* For reference, the propagation means always returning the response from the API
* endpoint, even if the status is not 200/201. This is useful when you want to
* handle the response by yourself.
*
* @param props Properties for the LLM function call
* @returns Response from the API endpoint
* @throws Error only when the connection is failed
*/
HttpLlm.propagate = function (props) {
return HttpLlmFunctionFetcher_1.HttpLlmFunctionFetcher.propagate(props);
};
/**
* Merge the parameters.
*
* If you've configured the {@link IHttpLlmApplication.IOptions.separate} option,
* so that the parameters are separated to human and LLM sides, you can merge these
* humand and LLM sides' parameters into one through this `HttpLlm.mergeParameters()`
* function before the actual LLM function call execution.
*
* On contrary, if you've not configured the
* {@link IHttpLlmApplication.IOptions.separate} option, this function would throw
* an error.
*
* @param props Properties for the parameters' merging
* @returns Merged parameter values
*/
HttpLlm.mergeParameters = function (props) {
return LlmDataMerger_1.LlmDataMerger.parameters(props);
};
/**
* Merge two values.
*
* If both values are objects, then combines them in the properties level.
*
* Otherwise, returns the latter value if it's not null, otherwise the former value.
*
* - `return (y ?? x)`
*
* @param x Value X to merge
* @param y Value Y to merge
* @returns Merged value
*/
HttpLlm.mergeValue = function (x, y) {

@@ -33,0 +173,0 @@ return LlmDataMerger_1.LlmDataMerger.value(x, y);

@@ -6,13 +6,128 @@ import { OpenApi } from "./OpenApi";

import { IHttpResponse } from "./structures/IHttpResponse";
/**
* HTTP migration application composer from OpenAPI document.
*
* `HttpMigration` is a module for composing HTTP migration application from the
* {@link OpenApi.IDocument OpenAPI document}. It is designed for helping the OpenAPI
* generator libraries, which converts {@link OpenApi.IOperation OpenAPI operations} to
* an RPC (Remote Procedure Call) function.
*
* The key feature of the `HttpModule` is the {@link HttpMigration.application} function.
* It converts the {@link OpenApi.IOperation OpenAPI operations} to the
* {@link IHttpMigrateRoute HTTP migration route}, and it normalizes the OpenAPI operations
* to the RPC function calling suitable route structure.
*
* The other functions, {@link HttpMigration.execute} and {@link HttpMigration.propagate},
* are for executing the HTTP request to the HTTP server. The {@link HttpMigration.execute}
* function returns the response body from the API endpoint when the status code is `200`
* or `201`. Otherwise, it throws an {@link HttpError} when the status code is not `200`
* or `201`. The {@link HttpMigration.propagate} function returns the response information
* from the API endpoint, including the status code, headers, and response body.
*
* The {@link HttpLlm} module is a good example utilizing this `HttpMigration` module
* for composing RPC function calling application. The {@link HttpLlm} module composes
* LLM (Large Language Model) function calling application from the OpenAPI document
* bypassing through the {@link IHttpLlmApplication} type.
*
* @author Jeongho Nam - https://github.com/samchon
*/
export declare namespace HttpMigration {
/**
* Convert HTTP migration application from OpenAPI document.
*
* `HttpMigration.application()` is a function converting the
* {@link OpenApi.IDocument OpenAPI document} and its {@link OpenApi.IOperation operations}
* to the {@link IHttpMigrateApplication HTTP migration application}.
*
* The HTTP migration application is designed for helping the OpenAPI generator
* libraries, which converts OpenAPI operations to an RPC (Remote Procedure Call)
* function. To support the OpenAPI generator libraries, {@link IHttpMigrateRoute}
* takes below normalization rules:
*
* - Path parameters are separated to atomic level.
* - Query parameters are binded into one object.
* - Header parameters are binded into one object.
* - Allow only below HTTP methods
* - `head`
* - `get`
* - `post`
* - `put`
* - `patch`
* - `delete`
* - Allow only below content media types
* - `application/json`
* - `application/x-www-form-urlencoded`
* - `multipart/form-data`
* - `text/plain`
*
* If there're some {@link OpenApi.IOperation API operations} which canont adjust
* the above rules or there're some logically insensible, these operation would be
* failed to migrate and registered into the {@link IHttpMigrateApplication.errors}.
*
* @param document OpenAPI document to migrate.
* @returns Migrated application.
*/
const application: <Schema extends OpenApi.IJsonSchema = OpenApi.IJsonSchema, Operation extends OpenApi.IOperation<Schema> = OpenApi.IOperation<Schema>>(document: OpenApi.IDocument<Schema, Operation>) => IHttpMigrateApplication<Schema, Operation>;
/**
* Properties for the request to the HTTP server.
*/
interface IFetchProps {
/**
* Connection info to the HTTP server.
*/
connection: IHttpConnection;
/**
* Route information for the migration.
*/
route: IHttpMigrateRoute;
/**
* Path parameters.
*
* Path parameters with sequenced array or key-value paired object.
*/
parameters: Array<string | number | boolean | bigint | null> | Record<string, string | number | boolean | bigint | null>;
/**
* Query parameters as a key-value paired object.
*/
query?: object | undefined;
/**
* Request body data.
*/
body?: object | undefined;
}
const request: (props: IFetchProps) => Promise<unknown>;
/**
* Execute the HTTP request.
*
* `HttpMigration.execute()` is a function executing the HTTP request to the HTTP server.
*
* It returns the response body from the API endpoint when the status code is `200`
* or `201`. Otherwise, it throws an {@link HttpError} when the status code is not
* `200` or `201`.
*
* If you want to get more information than the response body, or get the detailed
* response information even when the status code is `200` or `201`, use the
* {@link HttpMigration.propagate} function instead.
*
* @param props Properties for the request.
* @returns Return value (response body) from the API endpoint.
* @throws HttpError when the API endpoint responds none 200/201 status.
*/
const execute: (props: IFetchProps) => Promise<unknown>;
/**
* Propagate the HTTP request.
*
* `HttpMigration.propagate()` is a function propagating the request to the HTTP server.
*
* It returns the response information from the API endpoint, including the status code,
* headers, and response body.
*
* Even if the status code is not `200` or `201`, this function
* would return the response information. By the way, if the connection to the HTTP server
* is failed, this function would throw an {@link Error}.
*
* @param props Properties for the request.
* @returns Response from the API endpoint.
* @throws Error when the connection is failed.
*/
const propagate: (props: IFetchProps) => Promise<IHttpResponse>;
}

@@ -6,10 +6,111 @@ "use strict";

var HttpMigrateRouteFetcher_1 = require("./http/HttpMigrateRouteFetcher");
/**
* HTTP migration application composer from OpenAPI document.
*
* `HttpMigration` is a module for composing HTTP migration application from the
* {@link OpenApi.IDocument OpenAPI document}. It is designed for helping the OpenAPI
* generator libraries, which converts {@link OpenApi.IOperation OpenAPI operations} to
* an RPC (Remote Procedure Call) function.
*
* The key feature of the `HttpModule` is the {@link HttpMigration.application} function.
* It converts the {@link OpenApi.IOperation OpenAPI operations} to the
* {@link IHttpMigrateRoute HTTP migration route}, and it normalizes the OpenAPI operations
* to the RPC function calling suitable route structure.
*
* The other functions, {@link HttpMigration.execute} and {@link HttpMigration.propagate},
* are for executing the HTTP request to the HTTP server. The {@link HttpMigration.execute}
* function returns the response body from the API endpoint when the status code is `200`
* or `201`. Otherwise, it throws an {@link HttpError} when the status code is not `200`
* or `201`. The {@link HttpMigration.propagate} function returns the response information
* from the API endpoint, including the status code, headers, and response body.
*
* The {@link HttpLlm} module is a good example utilizing this `HttpMigration` module
* for composing RPC function calling application. The {@link HttpLlm} module composes
* LLM (Large Language Model) function calling application from the OpenAPI document
* bypassing through the {@link IHttpLlmApplication} type.
*
* @author Jeongho Nam - https://github.com/samchon
*/
var HttpMigration;
(function (HttpMigration) {
/* -----------------------------------------------------------
COMPOSER
----------------------------------------------------------- */
/**
* Convert HTTP migration application from OpenAPI document.
*
* `HttpMigration.application()` is a function converting the
* {@link OpenApi.IDocument OpenAPI document} and its {@link OpenApi.IOperation operations}
* to the {@link IHttpMigrateApplication HTTP migration application}.
*
* The HTTP migration application is designed for helping the OpenAPI generator
* libraries, which converts OpenAPI operations to an RPC (Remote Procedure Call)
* function. To support the OpenAPI generator libraries, {@link IHttpMigrateRoute}
* takes below normalization rules:
*
* - Path parameters are separated to atomic level.
* - Query parameters are binded into one object.
* - Header parameters are binded into one object.
* - Allow only below HTTP methods
* - `head`
* - `get`
* - `post`
* - `put`
* - `patch`
* - `delete`
* - Allow only below content media types
* - `application/json`
* - `application/x-www-form-urlencoded`
* - `multipart/form-data`
* - `text/plain`
*
* If there're some {@link OpenApi.IOperation API operations} which canont adjust
* the above rules or there're some logically insensible, these operation would be
* failed to migrate and registered into the {@link IHttpMigrateApplication.errors}.
*
* @param document OpenAPI document to migrate.
* @returns Migrated application.
*/
HttpMigration.application = function (document) {
return MigrateConverter_1.MigrateConverter.convert(document);
};
HttpMigration.request = function (props) {
return HttpMigrateRouteFetcher_1.HttpMigrateRouteFetcher.request(props);
/* -----------------------------------------------------------
FETCHERS
----------------------------------------------------------- */
/**
* Execute the HTTP request.
*
* `HttpMigration.execute()` is a function executing the HTTP request to the HTTP server.
*
* It returns the response body from the API endpoint when the status code is `200`
* or `201`. Otherwise, it throws an {@link HttpError} when the status code is not
* `200` or `201`.
*
* If you want to get more information than the response body, or get the detailed
* response information even when the status code is `200` or `201`, use the
* {@link HttpMigration.propagate} function instead.
*
* @param props Properties for the request.
* @returns Return value (response body) from the API endpoint.
* @throws HttpError when the API endpoint responds none 200/201 status.
*/
HttpMigration.execute = function (props) {
return HttpMigrateRouteFetcher_1.HttpMigrateRouteFetcher.execute(props);
};
/**
* Propagate the HTTP request.
*
* `HttpMigration.propagate()` is a function propagating the request to the HTTP server.
*
* It returns the response information from the API endpoint, including the status code,
* headers, and response body.
*
* Even if the status code is not `200` or `201`, this function
* would return the response information. By the way, if the connection to the HTTP server
* is failed, this function would throw an {@link Error}.
*
* @param props Properties for the request.
* @returns Response from the API endpoint.
* @throws Error when the connection is failed.
*/
HttpMigration.propagate = function (props) {

@@ -16,0 +117,0 @@ return HttpMigrateRouteFetcher_1.HttpMigrateRouteFetcher.propagate(props);

@@ -5,2 +5,65 @@ import { OpenApi } from "../OpenApi";

import { ILlmSchema } from "./ILlmSchema";
/**
* Application of LLM function call from OpenAPI document.
*
* `IHttpLlmApplication` is a data structure representing collection of
* {@link IHttpLlmFunction LLM function calling schemas} composed from the
* {@link OpenApi.IDocument OpenAPI document} and its {@link OpenApi.IOperation operation}
* metadata. It also contains {@link IHttpLlmApplication.errors failed operations}, and
* adjusted {@link IHttpLlmApplication.options options} during the `IHttpLlmApplication`
* construction.
*
* About the {@link OpenApi.IOperation API operations}, they are converted to
* {@link IHttpLlmFunction} type which represents LLM function calling schema.
* By the way, if tehre're some recursive types which can't escape the
* {@link OpenApi.IJsonSchema.IReference} type, the operation would be failed and
* pushed into the {@link IHttpLlmApplication.errors}. Otherwise not, the operation
* would be successfully converted to {@link IHttpLlmFunction} and its type schemas
* are downgraded to {@link OpenApiV3.IJsonSchema} and converted to {@link ILlmSchema}.
*
* About the options, if you've configured {@link IHttpLlmApplication.options.keyword}
* (as `true`), number of {@link IHttpLlmFunction.parameters} are always 1 and the first
* parameter type is always {@link ILlmSchema.IObject}. Otherwise, the parameters would
* be multiple, and the sequence of the parameters are following below rules.
*
* - `pathParameters`: Path parameters of {@link IHttpMigrateRoute.parameters}
* - `query`: Query parameter of {@link IHttpMigrateRoute.query}
* - `body`: Body parameter of {@link IHttpMigrateRoute.body}
*
* ```typescript
* // KEYWORD TRUE
* {
* ...pathParameters,
* query,
* body,
* }
*
* // KEYWORD FALSE
* [
* ...pathParameters,
* ...(query ? [query] : []),
* ...(body ? [body] : []),
* ]
* ```
*
* By the way, there can be some parameters (or their nested properties) which must be
* composed by human, not by LLM. File uploading feature or some sensitive information
* like secrety key (password) are the examples. In that case, you can separate the
* function parameters to both LLM and human sides by configuring the
* {@link IHttpLlmApplication.IOptions.separate} property. The separated parameters are
* assigned to the {@link IHttpLlmFunction.separated} property.
*
* For reference, the actual function call execution is not by LLM, but by you.
* When the LLM selects the proper function and fills the arguments, you just call
* the function by {@link HttpLlm.execute} with the LLM prepared arguments. And then
* informs the return value to the LLM by system prompt. The LLM will continue the next
* conversation based on the return value.
*
* Additionally, if you've configured {@link IHttpLlmApplication.IOptions.separate},
* so that the parameters are separated to human and LLM sides, you can merge these
* humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
* before the actual LLM function call execution.
*
* @author Jeongho Nam - https://github.com/samchon
*/
export interface IHttpLlmApplication<Schema extends ILlmSchema = ILlmSchema, Operation extends OpenApi.IOperation = OpenApi.IOperation, Route extends IHttpMigrateRoute = IHttpMigrateRoute> {

@@ -79,9 +142,8 @@ /**

* the pararameter is always {@link ILlmSchema.IObject} type.
* Also, its properties are following below rules:
*
* - `pathParameters`: Path parameters of {@link IHttpMigrateRoute.parameters}
* - `query`: Query parameter of {@link IHttpMigrateRoute.query}
* - `body`: Body parameter of {@link IHttpMigrateRoute.body}
* Otherwise, the parameters would be multiple, and the sequence of the parameters
* are following below rules.
*
* ```typescript
* // KEYWORD TRUE
* {

@@ -92,9 +154,4 @@ * ...pathParameters,

* }
* ```
*
* Otherwise (this property value is `false`), length of the
* {@link IHttpLlmFunction.parameters} is variable, and sequence of the
* parameters are following below rules.
*
* ```typescript
* // KEYWORD FALSE
* [

@@ -101,0 +158,0 @@ * ...pathParameters,

@@ -5,23 +5,21 @@ import { OpenApi } from "../OpenApi";

/**
* LLM function metadata from HTTP (OpenAPI) operation.
* LLM function calling schema from HTTP (OpenAPI) operation.
*
* `IHttpLlmFunction` is a data structure representing a procedure converted
* from the OpenAPI operation, used for the LLM (Large Language Model)
* function calling. It's a typical RPC (Remote Procedure Call) structure
* containing the procedure {@link name}, {@link parameters}, and
* `IHttpLlmFunction` is a data structure representing a function converted
* from the {@link OpenApi.IOperation OpenAPI operation}, used for the LLM
* (Large Language Model) function calling. It's a typical RPC (Remote Procedure Call)
* structure containing the function {@link name}, {@link parameters}, and
* {@link output return type}.
*
* If you provide this `IHttpLlmFunction` data to the LLM like "OpenAI",
* the "OpenAI" will compose a function arguments by analyzing
* conversations with the user. With the LLM composed arguments, you can
* execute the procedure through {@link LlmFetcher.execute} and get the
* result.
* If you provide this `IHttpLlmFunction` data to the LLM provider like "OpenAI",
* the "OpenAI" will compose a function arguments by analyzing conversations with
* the user. With the LLM composed arguments, you can execute the function through
* {@link LlmFetcher.execute} and get the result.
*
* For reference, different between `IHttpLlmFunction` and its origin source
* {@link OpenApi.IOperation} is, `IHttpLlmFunction` has converted every type
* schema informations from {@link OpenApi.IJsonSchema} to {@link ILlmSchema}
* to escape {@link OpenApi.IJsonSchema.IReference reference types}, and
* downgrade the version of the JSON schema to OpenAPI 3.0. It's because
* LLM function call feature cannot understand both reference types and
* OpenAPI 3.1 specification.
* {@link OpenApi.IOperation} is, `IHttpLlmFunction` has converted every type schema
* informations from {@link OpenApi.IJsonSchema} to {@link ILlmSchema} to escape
* {@link OpenApi.IJsonSchema.IReference reference types}, and downgrade the version
* of the JSON schema to OpenAPI 3.0. It's because LLM function call feature cannot
* understand both reference types and OpenAPI 3.1 specification.
*

@@ -155,3 +153,3 @@ * Additionally, if you've composed `IHttpLlmFunction` with

/**
* Description of the procedure.
* Description of the function.
*

@@ -217,2 +215,4 @@ * `IHttpLlmFunction.description` is composed by below rule:

* Index of the parameter.
*
* @type uint
*/

@@ -219,0 +219,0 @@ index: number;

@@ -7,4 +7,5 @@ import { OpenApi } from "../OpenApi";

* The `IHttpMigrateApplication` interface is an application migrated from
* {@link OpenAPI.IDocument OpenAPI document} to RPC (Remote Procedure Call)
* functions; {@link IHttpMigrateRoute}.
* {@link OpenAPI.IDocument OpenAPI document} for supporting the OpenAPI generator
* libraries which compose RPC (Remote Procedure Call) functions from the
* {@link OpenAPI.IOperation OpenAPI operations}.
*

@@ -11,0 +12,0 @@ * As the `IHttpMigrateApplication` and {@link IHttpMigrateRoute} have a lot of special

@@ -6,4 +6,4 @@ import { OpenApi } from "../OpenApi";

* The `IHttpMigrateRoute` is a structure representing a route information for
* OpenAPI generated RPC (Remote Procedure Call) function composed from the
* {@link OpenApi.IOperation OpenAPI operation}.
* OpenAPI generator libraries, which composes an RPC (Remote Procedure Call) function
* from the {@link OpenApi.IOperation OpenAPI operation}.
*

@@ -10,0 +10,0 @@ * As the `IHttpMigrateRoute` has a lot of speical stories, when you're developing

@@ -0,5 +1,23 @@

/**
* Represents an HTTP response.
*
* The `IHttpResponse` interface represents an HTTP response.
*
* It contains the {@link status} code, {@link headers}, and {@link body} of the response.
*
* @author Jeongho Nam - https://github.com/samchon
*/
export interface IHttpResponse {
/**
* Status code of the response.
*/
status: number;
/**
* Headers of the response.
*/
headers: Record<string, string | string[]>;
/**
* Body of the response.
*/
body: unknown;
}

@@ -10,8 +10,8 @@ import { ILlmSchema } from "./ILlmSchema";

*
* If you provide this `ILlmFunction` data to the LLM like "OpenAI",
* the "OpenAI" will compose a function arguments by analyzing
* conversations with the user. With the LLM composed arguments, you can
* execute the function and get the result.
* If you provide this `ILlmFunction` data to the LLM provider like "OpenAI",
* the "OpenAI" will compose a function arguments by analyzing conversations
* with the user. With the LLM composed arguments, you can execute the function
* and get the result.
*
* By the way, do not sure that LLM will always provide the correct
* By the way, do not ensure that LLM will always provide the correct
* arguments. The LLM of present age is not perfect, so that you would

@@ -78,2 +78,4 @@ * better to validate the arguments before executing the function.

* Index of the parameter.
*
* @type uint
*/

@@ -80,0 +82,0 @@ index: number;

{
"name": "@samchon/openapi",
"version": "0.5.0-dev.20240906-2",
"version": "1.0.0-dev.20240908",
"description": "OpenAPI definitions and converters for 'typia' and 'nestia'.",

@@ -5,0 +5,0 @@ "main": "./lib/index.js",

@@ -1,4 +0,3 @@

import { IHttpConnection } from "../structures/IHttpConnection";
import { IHttpLlmApplication } from "../structures/IHttpLlmApplication";
import { IHttpLlmFunction } from "../structures/IHttpLlmFunction";
import type { HttpLlm } from "../HttpLlm";
import type { HttpMigration } from "../HttpMigration";
import { IHttpMigrateRoute } from "../structures/IHttpMigrateRoute";

@@ -9,28 +8,8 @@ import { IHttpResponse } from "../structures/IHttpResponse";

export namespace HttpLlmFunctionFetcher {
export interface IProps {
/**
* Application of the OpenAI function call schemas.
*/
application: IHttpLlmApplication;
export const execute = async (props: HttpLlm.IFetchProps): Promise<unknown> =>
HttpMigrateRouteFetcher.execute(getFetchArguments("execute", props));
/**
* Function schema to call.
*/
function: IHttpLlmFunction;
/**
* Connection info to the server.
*/
connection: IHttpConnection;
/**
* Arguments for the function call.
*/
arguments: any[];
}
export const execute = async (props: IProps): Promise<unknown> =>
HttpMigrateRouteFetcher.request(getFetchArguments("execute", props));
export const propagate = async (props: IProps): Promise<IHttpResponse> =>
export const propagate = async (
props: HttpLlm.IFetchProps,
): Promise<IHttpResponse> =>
HttpMigrateRouteFetcher.propagate(getFetchArguments("propagate", props));

@@ -40,4 +19,4 @@

from: string,
props: IProps,
): HttpMigrateRouteFetcher.IProps => {
props: HttpLlm.IFetchProps,
): HttpMigration.IFetchProps => {
const route: IHttpMigrateRoute = props.function.route();

@@ -44,0 +23,0 @@ if (props.application.options.keyword === true) {

@@ -0,3 +1,3 @@

import type { HttpMigration } from "../HttpMigration";
import { IHttpConnection } from "../structures/IHttpConnection";
import { IHttpMigrateRoute } from "../structures/IHttpMigrateRoute";
import { IHttpResponse } from "../structures/IHttpResponse";

@@ -7,13 +7,5 @@ import { HttpError } from "./HttpError";

export namespace HttpMigrateRouteFetcher {
export interface IProps {
connection: IHttpConnection;
route: IHttpMigrateRoute;
parameters:
| Array<string | number | boolean | bigint | null>
| Record<string, string | number | boolean | bigint | null>;
query?: object | undefined;
body?: object | undefined;
}
export const request = async (props: IProps): Promise<unknown> => {
export const execute = async (
props: HttpMigration.IFetchProps,
): Promise<unknown> => {
const result: IHttpResponse = await _Propagate("request", props);

@@ -32,4 +24,5 @@ props.route.success?.media;

export const propagate = (props: IProps): Promise<IHttpResponse> =>
_Propagate("propagate", props);
export const propagate = (
props: HttpMigration.IFetchProps,
): Promise<IHttpResponse> => _Propagate("propagate", props);
}

@@ -39,3 +32,3 @@

from: string,
props: HttpMigrateRouteFetcher.IProps,
props: HttpMigration.IFetchProps,
): Promise<IHttpResponse> => {

@@ -148,3 +141,3 @@ // VALIDATE PARAMETERS

const getPath = (
props: Pick<HttpMigrateRouteFetcher.IProps, "route" | "parameters" | "query">,
props: Pick<HttpMigration.IFetchProps, "route" | "parameters" | "query">,
): string => {

@@ -151,0 +144,0 @@ let path: string = props.route.emendedPath;

@@ -14,3 +14,53 @@ import { HttpMigration } from "./HttpMigration";

/**
* LLM function calling application composer from OpenAPI document.
*
* `HttpLlm` is a module for composing LLM (Large Language Model) function calling
* application from the {@link OpenApi.IDocument OpenAPI document}, and also for
* LLM function call execution and parameter merging.
*
* At first, you can construct the LLM function calling application by the
* {@link HttpLlm.application HttpLlm.application()} function. And then the LLM
* has selected a {@link IHttpLlmFunction function} to call and composes its
* arguments, you can execute the function by
* {@link HttpLlm.execute HttpLlm.execute()} or
* {@link HttpLlm.propagate HttpLlm.propagate()}.
*
* By the way, if you have configured the {@link IHttpLlmApplication.IOptions.separate}
* option to separate the parameters into human and LLM sides, you can merge these
* human and LLM sides' parameters into one through
* {@link HttpLlm.mergeParameters HttpLlm.mergeParameters()} before the actual LLM
* function call execution.
*
* @author Jeongho Nam - https://github.com/samchon
*/
export namespace HttpLlm {
/* -----------------------------------------------------------
COMPOSERS
----------------------------------------------------------- */
/**
* Convert OpenAPI document to LLM function calling application.
*
* Converts {@link OpenApi.IDocument OpenAPI document} or
* {@link IHttpMigrateApplication migrated application} to the
* {@link IHttpLlmApplication LLM function calling application}. Every
* {@link OpenApi.IOperation API operations} in the OpenAPI document are converted
* to the {@link IHttpLlmFunction LLM function} type, and they would be used for
* the LLM function calling.
*
* If you have configured the {@link IHttpLlmApplication.IOptions.separate} option,
* every parameters in the {@link IHttpLlmFunction} would be separated into both
* human and LLM sides. In that case, you can merge these human and LLM sides'
* parameters into one through {@link HttpLlm.mergeParameters} before the actual
* LLM function call execution.
*
* Additionally, if you have configured the {@link IHttpLlmApplication.IOptions.keyword}
* as `true`, the number of {@link IHttpLlmFunction.parameters} are always 1 and the
* first parameter type is always {@link ILlmSchema.IObject}. I recommend this option
* because LLM can understand the keyword arguments more easily.
*
* @param document Target OpenAPI document to convert (or migrate application)
* @param options Options for the LLM function calling application conversion
* @returns LLM function calling application
*/
export const application = <

@@ -39,2 +89,15 @@ Schema extends ILlmSchema,

/**
* Convert JSON schema to LLM schema.
*
* Converts {@link OpenApi.IJsonSchema JSON schema} to {@link ILlmSchema LLM schema}.
*
* By the way, if the target JSON schema has some recursive references, the
* conversion would be failed and `null` value would be returned. It's because
* the LLM schema does not support the reference type embodied by the
* {@link OpenApi.IJsonSchema.IReference} type.
*
* @param props Schema to convert and components to refer
* @returns LLM schema or null value
*/
export const schema = (props: {

@@ -45,2 +108,8 @@ components: OpenApi.IComponents;

/* -----------------------------------------------------------
FETCHERS
----------------------------------------------------------- */
/**
* Properties for the LLM function call.
*/
export interface IFetchProps {

@@ -58,3 +127,3 @@ /**

/**
* Connection info to the server.
* Connection info to the HTTP server.
*/

@@ -68,16 +137,114 @@ connection: IHttpConnection;

}
/**
* Execute the LLM function call.
*
* `HttmLlm.execute()` is a function executing the target
* {@link OpenApi.IOperation API endpoint} with with the connection information
* and arguments composed by Large Language Model like OpenAI (+human sometimes).
*
* By the way, if you've configured the {@link IHttpLlmApplication.IOptions.separate},
* so that the parameters are separated to human and LLM sides, you have to merge
* these humand and LLM sides' parameters into one through
* {@link HttpLlm.mergeParameters} function.
*
* About the {@link IHttpLlmApplication.IOptions.keyword} option, don't worry anything.
* This `HttmLlm.execute()` function will automatically recognize the keyword arguments
* and convert them to the proper sequence.
*
* For reference, if the target API endpoinnt responds none 200/201 status, this
* would be considered as an error and the {@link HttpError} would be thrown.
* Otherwise you don't want such rule, you can use the {@link HttpLlm.propagate}
* function instead.
*
* @param props Properties for the LLM function call
* @returns Return value (response body) from the API endpoint
* @throws HttpError when the API endpoint responds none 200/201 status
*/
export const execute = (props: IFetchProps): Promise<unknown> =>
HttpLlmFunctionFetcher.execute(props);
/**
* Propagate the LLM function call.
*
* `HttmLlm.propagate()` is a function propagating the target
* {@link OpenApi.IOperation API endpoint} with with the connection information
* and arguments composed by Large Language Model like OpenAI (+human sometimes).
*
* By the way, if you've configured the {@link IHttpLlmApplication.IOptions.separate},
* so that the parameters are separated to human and LLM sides, you have to merge
* these humand and LLM sides' parameters into one through
* {@link HttpLlm.mergeParameters} function.
*
* About the {@link IHttpLlmApplication.IOptions.keyword} option, don't worry anything.
* This `HttmLlm.propagate()` function will automatically recognize the keyword arguments
* and convert them to the proper sequence.
*
* For reference, the propagation means always returning the response from the API
* endpoint, even if the status is not 200/201. This is useful when you want to
* handle the response by yourself.
*
* @param props Properties for the LLM function call
* @returns Response from the API endpoint
* @throws Error only when the connection is failed
*/
export const propagate = (props: IFetchProps): Promise<IHttpResponse> =>
HttpLlmFunctionFetcher.propagate(props);
/* -----------------------------------------------------------
MERGERS
----------------------------------------------------------- */
/**
* Properties for the parameters' merging.
*/
export interface IMergeProps {
/**
* Metadata of the target function.
*/
function: ILlmFunction;
/**
* Arguments composed by the LLM.
*/
llm: unknown[];
/**
* Arguments composed by the human.
*/
human: unknown[];
}
/**
* Merge the parameters.
*
* If you've configured the {@link IHttpLlmApplication.IOptions.separate} option,
* so that the parameters are separated to human and LLM sides, you can merge these
* humand and LLM sides' parameters into one through this `HttpLlm.mergeParameters()`
* function before the actual LLM function call execution.
*
* On contrary, if you've not configured the
* {@link IHttpLlmApplication.IOptions.separate} option, this function would throw
* an error.
*
* @param props Properties for the parameters' merging
* @returns Merged parameter values
*/
export const mergeParameters = (props: IMergeProps): unknown[] =>
LlmDataMerger.parameters(props);
/**
* Merge two values.
*
* If both values are objects, then combines them in the properties level.
*
* Otherwise, returns the latter value if it's not null, otherwise the former value.
*
* - `return (y ?? x)`
*
* @param x Value X to merge
* @param y Value Y to merge
* @returns Merged value
*/
export const mergeValue = (x: unknown, y: unknown): unknown =>
LlmDataMerger.value(x, y);
}

@@ -9,3 +9,68 @@ import { OpenApi } from "./OpenApi";

/**
* HTTP migration application composer from OpenAPI document.
*
* `HttpMigration` is a module for composing HTTP migration application from the
* {@link OpenApi.IDocument OpenAPI document}. It is designed for helping the OpenAPI
* generator libraries, which converts {@link OpenApi.IOperation OpenAPI operations} to
* an RPC (Remote Procedure Call) function.
*
* The key feature of the `HttpModule` is the {@link HttpMigration.application} function.
* It converts the {@link OpenApi.IOperation OpenAPI operations} to the
* {@link IHttpMigrateRoute HTTP migration route}, and it normalizes the OpenAPI operations
* to the RPC function calling suitable route structure.
*
* The other functions, {@link HttpMigration.execute} and {@link HttpMigration.propagate},
* are for executing the HTTP request to the HTTP server. The {@link HttpMigration.execute}
* function returns the response body from the API endpoint when the status code is `200`
* or `201`. Otherwise, it throws an {@link HttpError} when the status code is not `200`
* or `201`. The {@link HttpMigration.propagate} function returns the response information
* from the API endpoint, including the status code, headers, and response body.
*
* The {@link HttpLlm} module is a good example utilizing this `HttpMigration` module
* for composing RPC function calling application. The {@link HttpLlm} module composes
* LLM (Large Language Model) function calling application from the OpenAPI document
* bypassing through the {@link IHttpLlmApplication} type.
*
* @author Jeongho Nam - https://github.com/samchon
*/
export namespace HttpMigration {
/* -----------------------------------------------------------
COMPOSER
----------------------------------------------------------- */
/**
* Convert HTTP migration application from OpenAPI document.
*
* `HttpMigration.application()` is a function converting the
* {@link OpenApi.IDocument OpenAPI document} and its {@link OpenApi.IOperation operations}
* to the {@link IHttpMigrateApplication HTTP migration application}.
*
* The HTTP migration application is designed for helping the OpenAPI generator
* libraries, which converts OpenAPI operations to an RPC (Remote Procedure Call)
* function. To support the OpenAPI generator libraries, {@link IHttpMigrateRoute}
* takes below normalization rules:
*
* - Path parameters are separated to atomic level.
* - Query parameters are binded into one object.
* - Header parameters are binded into one object.
* - Allow only below HTTP methods
* - `head`
* - `get`
* - `post`
* - `put`
* - `patch`
* - `delete`
* - Allow only below content media types
* - `application/json`
* - `application/x-www-form-urlencoded`
* - `multipart/form-data`
* - `text/plain`
*
* If there're some {@link OpenApi.IOperation API operations} which canont adjust
* the above rules or there're some logically insensible, these operation would be
* failed to migrate and registered into the {@link IHttpMigrateApplication.errors}.
*
* @param document OpenAPI document to migrate.
* @returns Migrated application.
*/
export const application = <

@@ -19,15 +84,77 @@ Schema extends OpenApi.IJsonSchema = OpenApi.IJsonSchema,

/**
* Properties for the request to the HTTP server.
*/
export interface IFetchProps {
/**
* Connection info to the HTTP server.
*/
connection: IHttpConnection;
/**
* Route information for the migration.
*/
route: IHttpMigrateRoute;
/**
* Path parameters.
*
* Path parameters with sequenced array or key-value paired object.
*/
parameters:
| Array<string | number | boolean | bigint | null>
| Record<string, string | number | boolean | bigint | null>;
/**
* Query parameters as a key-value paired object.
*/
query?: object | undefined;
/**
* Request body data.
*/
body?: object | undefined;
}
export const request = (props: IFetchProps): Promise<unknown> =>
HttpMigrateRouteFetcher.request(props);
/* -----------------------------------------------------------
FETCHERS
----------------------------------------------------------- */
/**
* Execute the HTTP request.
*
* `HttpMigration.execute()` is a function executing the HTTP request to the HTTP server.
*
* It returns the response body from the API endpoint when the status code is `200`
* or `201`. Otherwise, it throws an {@link HttpError} when the status code is not
* `200` or `201`.
*
* If you want to get more information than the response body, or get the detailed
* response information even when the status code is `200` or `201`, use the
* {@link HttpMigration.propagate} function instead.
*
* @param props Properties for the request.
* @returns Return value (response body) from the API endpoint.
* @throws HttpError when the API endpoint responds none 200/201 status.
*/
export const execute = (props: IFetchProps): Promise<unknown> =>
HttpMigrateRouteFetcher.execute(props);
/**
* Propagate the HTTP request.
*
* `HttpMigration.propagate()` is a function propagating the request to the HTTP server.
*
* It returns the response information from the API endpoint, including the status code,
* headers, and response body.
*
* Even if the status code is not `200` or `201`, this function
* would return the response information. By the way, if the connection to the HTTP server
* is failed, this function would throw an {@link Error}.
*
* @param props Properties for the request.
* @returns Response from the API endpoint.
* @throws Error when the connection is failed.
*/
export const propagate = (props: IFetchProps): Promise<IHttpResponse> =>
HttpMigrateRouteFetcher.propagate(props);
}

@@ -6,2 +6,65 @@ import { OpenApi } from "../OpenApi";

/**
* Application of LLM function call from OpenAPI document.
*
* `IHttpLlmApplication` is a data structure representing collection of
* {@link IHttpLlmFunction LLM function calling schemas} composed from the
* {@link OpenApi.IDocument OpenAPI document} and its {@link OpenApi.IOperation operation}
* metadata. It also contains {@link IHttpLlmApplication.errors failed operations}, and
* adjusted {@link IHttpLlmApplication.options options} during the `IHttpLlmApplication`
* construction.
*
* About the {@link OpenApi.IOperation API operations}, they are converted to
* {@link IHttpLlmFunction} type which represents LLM function calling schema.
* By the way, if tehre're some recursive types which can't escape the
* {@link OpenApi.IJsonSchema.IReference} type, the operation would be failed and
* pushed into the {@link IHttpLlmApplication.errors}. Otherwise not, the operation
* would be successfully converted to {@link IHttpLlmFunction} and its type schemas
* are downgraded to {@link OpenApiV3.IJsonSchema} and converted to {@link ILlmSchema}.
*
* About the options, if you've configured {@link IHttpLlmApplication.options.keyword}
* (as `true`), number of {@link IHttpLlmFunction.parameters} are always 1 and the first
* parameter type is always {@link ILlmSchema.IObject}. Otherwise, the parameters would
* be multiple, and the sequence of the parameters are following below rules.
*
* - `pathParameters`: Path parameters of {@link IHttpMigrateRoute.parameters}
* - `query`: Query parameter of {@link IHttpMigrateRoute.query}
* - `body`: Body parameter of {@link IHttpMigrateRoute.body}
*
* ```typescript
* // KEYWORD TRUE
* {
* ...pathParameters,
* query,
* body,
* }
*
* // KEYWORD FALSE
* [
* ...pathParameters,
* ...(query ? [query] : []),
* ...(body ? [body] : []),
* ]
* ```
*
* By the way, there can be some parameters (or their nested properties) which must be
* composed by human, not by LLM. File uploading feature or some sensitive information
* like secrety key (password) are the examples. In that case, you can separate the
* function parameters to both LLM and human sides by configuring the
* {@link IHttpLlmApplication.IOptions.separate} property. The separated parameters are
* assigned to the {@link IHttpLlmFunction.separated} property.
*
* For reference, the actual function call execution is not by LLM, but by you.
* When the LLM selects the proper function and fills the arguments, you just call
* the function by {@link HttpLlm.execute} with the LLM prepared arguments. And then
* informs the return value to the LLM by system prompt. The LLM will continue the next
* conversation based on the return value.
*
* Additionally, if you've configured {@link IHttpLlmApplication.IOptions.separate},
* so that the parameters are separated to human and LLM sides, you can merge these
* humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
* before the actual LLM function call execution.
*
* @author Jeongho Nam - https://github.com/samchon
*/
export interface IHttpLlmApplication<

@@ -95,9 +158,8 @@ Schema extends ILlmSchema = ILlmSchema,

* the pararameter is always {@link ILlmSchema.IObject} type.
* Also, its properties are following below rules:
*
* - `pathParameters`: Path parameters of {@link IHttpMigrateRoute.parameters}
* - `query`: Query parameter of {@link IHttpMigrateRoute.query}
* - `body`: Body parameter of {@link IHttpMigrateRoute.body}
* Otherwise, the parameters would be multiple, and the sequence of the parameters
* are following below rules.
*
* ```typescript
* // KEYWORD TRUE
* {

@@ -108,9 +170,4 @@ * ...pathParameters,

* }
* ```
*
* Otherwise (this property value is `false`), length of the
* {@link IHttpLlmFunction.parameters} is variable, and sequence of the
* parameters are following below rules.
*
* ```typescript
* // KEYWORD FALSE
* [

@@ -117,0 +174,0 @@ * ...pathParameters,

@@ -6,23 +6,21 @@ import { OpenApi } from "../OpenApi";

/**
* LLM function metadata from HTTP (OpenAPI) operation.
* LLM function calling schema from HTTP (OpenAPI) operation.
*
* `IHttpLlmFunction` is a data structure representing a procedure converted
* from the OpenAPI operation, used for the LLM (Large Language Model)
* function calling. It's a typical RPC (Remote Procedure Call) structure
* containing the procedure {@link name}, {@link parameters}, and
* `IHttpLlmFunction` is a data structure representing a function converted
* from the {@link OpenApi.IOperation OpenAPI operation}, used for the LLM
* (Large Language Model) function calling. It's a typical RPC (Remote Procedure Call)
* structure containing the function {@link name}, {@link parameters}, and
* {@link output return type}.
*
* If you provide this `IHttpLlmFunction` data to the LLM like "OpenAI",
* the "OpenAI" will compose a function arguments by analyzing
* conversations with the user. With the LLM composed arguments, you can
* execute the procedure through {@link LlmFetcher.execute} and get the
* result.
* If you provide this `IHttpLlmFunction` data to the LLM provider like "OpenAI",
* the "OpenAI" will compose a function arguments by analyzing conversations with
* the user. With the LLM composed arguments, you can execute the function through
* {@link LlmFetcher.execute} and get the result.
*
* For reference, different between `IHttpLlmFunction` and its origin source
* {@link OpenApi.IOperation} is, `IHttpLlmFunction` has converted every type
* schema informations from {@link OpenApi.IJsonSchema} to {@link ILlmSchema}
* to escape {@link OpenApi.IJsonSchema.IReference reference types}, and
* downgrade the version of the JSON schema to OpenAPI 3.0. It's because
* LLM function call feature cannot understand both reference types and
* OpenAPI 3.1 specification.
* {@link OpenApi.IOperation} is, `IHttpLlmFunction` has converted every type schema
* informations from {@link OpenApi.IJsonSchema} to {@link ILlmSchema} to escape
* {@link OpenApi.IJsonSchema.IReference reference types}, and downgrade the version
* of the JSON schema to OpenAPI 3.0. It's because LLM function call feature cannot
* understand both reference types and OpenAPI 3.1 specification.
*

@@ -167,3 +165,3 @@ * Additionally, if you've composed `IHttpLlmFunction` with

/**
* Description of the procedure.
* Description of the function.
*

@@ -233,2 +231,4 @@ * `IHttpLlmFunction.description` is composed by below rule:

* Index of the parameter.
*
* @type uint
*/

@@ -235,0 +235,0 @@ index: number;

@@ -8,4 +8,5 @@ import { OpenApi } from "../OpenApi";

* The `IHttpMigrateApplication` interface is an application migrated from
* {@link OpenAPI.IDocument OpenAPI document} to RPC (Remote Procedure Call)
* functions; {@link IHttpMigrateRoute}.
* {@link OpenAPI.IDocument OpenAPI document} for supporting the OpenAPI generator
* libraries which compose RPC (Remote Procedure Call) functions from the
* {@link OpenAPI.IOperation OpenAPI operations}.
*

@@ -12,0 +13,0 @@ * As the `IHttpMigrateApplication` and {@link IHttpMigrateRoute} have a lot of special

@@ -7,4 +7,4 @@ import { OpenApi } from "../OpenApi";

* The `IHttpMigrateRoute` is a structure representing a route information for
* OpenAPI generated RPC (Remote Procedure Call) function composed from the
* {@link OpenApi.IOperation OpenAPI operation}.
* OpenAPI generator libraries, which composes an RPC (Remote Procedure Call) function
* from the {@link OpenApi.IOperation OpenAPI operation}.
*

@@ -11,0 +11,0 @@ * As the `IHttpMigrateRoute` has a lot of speical stories, when you're developing

@@ -0,5 +1,25 @@

/**
* Represents an HTTP response.
*
* The `IHttpResponse` interface represents an HTTP response.
*
* It contains the {@link status} code, {@link headers}, and {@link body} of the response.
*
* @author Jeongho Nam - https://github.com/samchon
*/
export interface IHttpResponse {
/**
* Status code of the response.
*/
status: number;
/**
* Headers of the response.
*/
headers: Record<string, string | string[]>;
/**
* Body of the response.
*/
body: unknown;
}

@@ -11,8 +11,8 @@ import { ILlmSchema } from "./ILlmSchema";

*
* If you provide this `ILlmFunction` data to the LLM like "OpenAI",
* the "OpenAI" will compose a function arguments by analyzing
* conversations with the user. With the LLM composed arguments, you can
* execute the function and get the result.
* If you provide this `ILlmFunction` data to the LLM provider like "OpenAI",
* the "OpenAI" will compose a function arguments by analyzing conversations
* with the user. With the LLM composed arguments, you can execute the function
* and get the result.
*
* By the way, do not sure that LLM will always provide the correct
* By the way, do not ensure that LLM will always provide the correct
* arguments. The LLM of present age is not perfect, so that you would

@@ -85,2 +85,4 @@ * better to validate the arguments before executing the function.

* Index of the parameter.
*
* @type uint
*/

@@ -87,0 +89,0 @@ index: number;

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc