New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@samchon/openapi

Package Overview
Dependencies
Maintainers
0
Versions
182
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@samchon/openapi - npm Package Compare versions

Comparing version 2.0.0-dev.20241108 to 2.0.0-dev.20241109

lib/converters/ChatGptConverter.mjs

5

lib/converters/ChatGptConverter.d.ts

@@ -8,2 +8,7 @@ import { OpenApi } from "../OpenApi";

}) => IChatGptSchema.ITop | null;
const separate: (props: {
top: IChatGptSchema.ITop;
predicate: (schema: IChatGptSchema) => boolean;
schema: IChatGptSchema;
}) => [IChatGptSchema | null, IChatGptSchema | null];
}

@@ -29,4 +29,16 @@ "use strict";

};
var __values = (this && this.__values) || function(o) {
var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0;
if (m) return m.call(o);
if (o && typeof o.length === "number") return {
next: function () {
if (o && i >= o.length) o = void 0;
return { value: o && o[i++], done: !o };
}
};
throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.ChatGptConverter = void 0;
var ChatGptTypeChecker_1 = require("../utils/ChatGptTypeChecker");
var OpenApiTypeChecker_1 = require("../utils/OpenApiTypeChecker");

@@ -143,2 +155,136 @@ var ChatGptConverter;

};
ChatGptConverter.separate = function (props) {
if (props.predicate(props.schema) === true)
return [null, props.schema];
else if (ChatGptTypeChecker_1.ChatGptTypeChecker.isUnknown(props.schema) ||
ChatGptTypeChecker_1.ChatGptTypeChecker.isOneOf(props.schema))
return [props.schema, null];
else if (ChatGptTypeChecker_1.ChatGptTypeChecker.isObject(props.schema))
return separateObject({
top: props.top,
predicate: props.predicate,
schema: props.schema,
});
else if (ChatGptTypeChecker_1.ChatGptTypeChecker.isArray(props.schema))
return separateArray({
top: props.top,
predicate: props.predicate,
schema: props.schema,
});
else if (ChatGptTypeChecker_1.ChatGptTypeChecker.isReference(props.schema))
return separateReference({
top: props.top,
predicate: props.predicate,
schema: props.schema,
});
return [props.schema, null];
};
var separateArray = function (props) {
var _a = __read(ChatGptConverter.separate({
top: props.top,
predicate: props.predicate,
schema: props.schema.items,
}), 2), x = _a[0], y = _a[1];
return [
x !== null
? __assign(__assign({}, props.schema), { items: x }) : null,
y !== null
? __assign(__assign({}, props.schema), { items: y }) : null,
];
};
var separateObject = function (props) {
var e_1, _a;
var _b;
var llm = __assign(__assign({}, props.schema), { properties: {} });
var human = __assign(__assign({}, props.schema), { properties: {} });
try {
for (var _c = __values(Object.entries((_b = props.schema.properties) !== null && _b !== void 0 ? _b : {})), _d = _c.next(); !_d.done; _d = _c.next()) {
var _e = __read(_d.value, 2), key = _e[0], value = _e[1];
var _f = __read(ChatGptConverter.separate({
top: props.top,
predicate: props.predicate,
schema: value,
}), 2), x = _f[0], y = _f[1];
if (x !== null)
llm.properties[key] = x;
if (y !== null)
human.properties[key] = y;
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (_d && !_d.done && (_a = _c.return)) _a.call(_c);
}
finally { if (e_1) throw e_1.error; }
}
if (typeof props.schema.additionalProperties === "object" &&
props.schema.additionalProperties !== null) {
var _g = __read(ChatGptConverter.separate({
top: props.top,
predicate: props.predicate,
schema: props.schema.additionalProperties,
}), 2), x = _g[0], y = _g[1];
if (x !== null)
llm.additionalProperties = x;
if (y !== null)
human.additionalProperties = y;
}
else {
llm.additionalProperties = false;
human.additionalProperties = false;
}
return [
Object.keys(llm.properties).length === 0 &&
llm.additionalProperties === false
? null
: shrinkRequired(llm),
Object.keys(human.properties).length === 0 &&
human.additionalProperties === false
? null
: shrinkRequired(human),
];
};
var separateReference = function (props) {
var _a, _b, _c, _d, _e;
var key = props.schema.$ref.split("#/$defs/")[1];
// FIND EXISTING
if (((_a = props.top.$defs) === null || _a === void 0 ? void 0 : _a["".concat(key, ".Human")]) || ((_b = props.top.$defs) === null || _b === void 0 ? void 0 : _b["".concat(key, ".Llm")]))
return [
((_c = props.top.$defs) === null || _c === void 0 ? void 0 : _c["".concat(key, ".Llm")])
? __assign(__assign({}, props.schema), { $ref: "#/$defs/".concat(key, ".Llm") }) : null,
((_d = props.top.$defs) === null || _d === void 0 ? void 0 : _d["".concat(key, ".Human")])
? __assign(__assign({}, props.schema), { $ref: "#/$defs/".concat(key, ".Human") }) : null,
];
// PRE-ASSIGNMENT
props.top.$defs["".concat(key, ".Llm")] = {};
props.top.$defs["".concat(key, ".Human")] = {};
// DO COMPOSE
var schema = (_e = props.top.$defs) === null || _e === void 0 ? void 0 : _e[key];
var _f = __read(ChatGptConverter.separate({
top: props.top,
predicate: props.predicate,
schema: schema,
}), 2), llm = _f[0], human = _f[1];
if (llm === null)
delete props.top.$defs["".concat(key, ".Llm")];
else
props.top.$defs["".concat(key, ".Llm")] = llm;
if (human === null)
delete props.top.$defs["".concat(key, ".Human")];
else
props.top.$defs["".concat(key, ".Human")] = human;
// FINALIZE
return [
llm !== null
? __assign(__assign({}, props.schema), { $ref: "#/$defs/".concat(key, ".Llm") }) : null,
human !== null
? __assign(__assign({}, props.schema), { $ref: "#/$defs/".concat(key, ".Human") }) : null,
];
};
var shrinkRequired = function (s) {
if (s.required !== undefined)
s.required = s.required.filter(function (key) { var _a; return ((_a = s.properties) === null || _a === void 0 ? void 0 : _a[key]) !== undefined; });
return s;
};
})(ChatGptConverter || (exports.ChatGptConverter = ChatGptConverter = {}));

4

lib/converters/GeminiConverter.d.ts

@@ -9,2 +9,6 @@ import { OpenApi } from "../OpenApi";

}) => IGeminiSchema | null;
const separate: (props: {
predicate: (schema: IGeminiSchema) => boolean;
schema: IGeminiSchema;
}) => [IGeminiSchema | null, IGeminiSchema | null];
}

13

lib/converters/GeminiConverter.js
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.GeminiConverter = void 0;
var LlmTypeChecker_1 = require("../utils/LlmTypeChecker");
var HttpLlmConverter_1 = require("./HttpLlmConverter");
var LlmTypeCheckerV3_1 = require("../utils/LlmTypeCheckerV3");
var LlmConverterV3_1 = require("./LlmConverterV3");
var GeminiConverter;
(function (GeminiConverter) {
GeminiConverter.schema = function (props) {
var schema = HttpLlmConverter_1.HttpLlmConverter.schema(props);
var schema = LlmConverterV3_1.LlmConverterV3.schema(props);
if (schema === null)
return null;
var union = false;
LlmTypeChecker_1.LlmTypeChecker.visit(schema, function (v) {
if (LlmTypeChecker_1.LlmTypeChecker.isOneOf(v))
LlmTypeCheckerV3_1.LlmTypeCheckerV3.visit(schema, function (v) {
if (LlmTypeCheckerV3_1.LlmTypeCheckerV3.isOneOf(v))
union = true;

@@ -19,2 +19,5 @@ });

};
GeminiConverter.separate = function (props) {
return LlmConverterV3_1.LlmConverterV3.separate(props);
};
})(GeminiConverter || (exports.GeminiConverter = GeminiConverter = {}));
import { OpenApi } from "../OpenApi";
import { IChatGptSchema } from "../structures/IChatGptSchema";
import { IGeminiSchema } from "../structures/IGeminiSchema";
import { IHttpLlmApplication } from "../structures/IHttpLlmApplication";
import { IHttpMigrateApplication } from "../structures/IHttpMigrateApplication";
import { ILlmSchema } from "../structures/ILlmSchema";
import { IHttpMigrateRoute } from "../structures/IHttpMigrateRoute";
import { ILlmSchemaV3 } from "../structures/ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "../structures/ILlmSchemaV3_1";
export declare namespace HttpLlmConverter {
const compose: (migrate: IHttpMigrateApplication, options: IHttpLlmApplication.IOptions) => IHttpLlmApplication;
const schema: (props: {
const compose: <Model extends IHttpLlmApplication.Model, Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema = IHttpLlmApplication.ModelSchema[Model], Operation extends OpenApi.IOperation = OpenApi.IOperation<OpenApi.IJsonSchema>, Route extends IHttpMigrateRoute = IHttpMigrateRoute<OpenApi.IJsonSchema, Operation>>(props: {
model: Model;
migrate: IHttpMigrateApplication<OpenApi.IJsonSchema, Operation>;
options: IHttpLlmApplication.IOptions<Model, Schema>;
}) => IHttpLlmApplication<Model, Schema, Operation, Route>;
const schema: <Model extends IHttpLlmApplication.Model, Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema = IHttpLlmApplication.ModelSchema[Model]>(props: {
model: Model;
components: OpenApi.IComponents;
schema: OpenApi.IJsonSchema;
recursive: false | number;
}) => ILlmSchema | null;
}) => Schema | null;
}

@@ -40,11 +40,11 @@ "use strict";

exports.HttpLlmConverter = void 0;
var LlmSchemaSeparator_1 = require("../utils/LlmSchemaSeparator");
var LlmTypeChecker_1 = require("../utils/LlmTypeChecker");
var OpenApiTypeChecker_1 = require("../utils/OpenApiTypeChecker");
var OpenApiV3Downgrader_1 = require("./OpenApiV3Downgrader");
var ChatGptConverter_1 = require("./ChatGptConverter");
var GeminiConverter_1 = require("./GeminiConverter");
var LlmConverterV3_2 = require("./LlmConverterV3");
var LlmConverterV3_1_1 = require("./LlmConverterV3_1");
var HttpLlmConverter;
(function (HttpLlmConverter) {
HttpLlmConverter.compose = function (migrate, options) {
HttpLlmConverter.compose = function (props) {
// COMPOSE FUNCTIONS
var errors = migrate.errors.map(function (e) { return ({
var errors = props.migrate.errors.map(function (e) { return ({
method: e.method,

@@ -56,7 +56,12 @@ path: e.path,

}); });
var functions = migrate.routes
var functions = props.migrate.routes
.map(function (route) {
if (route.method === "head")
return null;
var func = composeFunction(options)(migrate.document().components)(route);
var func = composeFunction({
model: props.model,
options: props.options,
components: props.migrate.document().components,
route: route,
});
if (func === null)

@@ -74,122 +79,151 @@ errors.push({

return {
openapi: "3.0.3",
model: props.model,
options: props.options,
functions: functions,
errors: errors,
options: options,
};
};
HttpLlmConverter.schema = function (props) {
var resolved = OpenApiTypeChecker_1.OpenApiTypeChecker.escape({
return CASTERS[props.model]({
components: props.components,
recursive: props.recursive,
schema: props.schema,
recursive: props.recursive,
});
if (resolved === null)
return null;
var downgraded = OpenApiV3Downgrader_1.OpenApiV3Downgrader.downgradeSchema({
original: {},
downgraded: {},
})(resolved);
LlmTypeChecker_1.LlmTypeChecker.visit(downgraded, function (schema) {
if (LlmTypeChecker_1.LlmTypeChecker.isOneOf(schema) &&
schema.discriminator !== undefined)
delete schema.discriminator;
};
})(HttpLlmConverter || (exports.HttpLlmConverter = HttpLlmConverter = {}));
var composeFunction = function (props) {
var _a, _b, _c;
var cast = function (s) {
return CASTERS[props.model]({
components: props.components,
recursive: props.options.recursive,
schema: s,
});
return downgraded;
};
})(HttpLlmConverter || (exports.HttpLlmConverter = HttpLlmConverter = {}));
var composeFunction = function (options) {
return function (components) {
return function (route) {
var _a, _b, _c;
// CAST SCHEMA TYPES
var cast = function (s) {
return HttpLlmConverter.schema({
components: components,
schema: s,
recursive: options.recursive,
});
};
var output = route.success && route.success ? cast(route.success.schema) : undefined;
if (output === null)
return null;
var properties = __spreadArray(__spreadArray(__spreadArray([], __read(route.parameters.map(function (p) {
var _a, _b;
return ({
key: p.key,
schema: __assign(__assign({}, p.schema), { title: (_a = p.parameter().title) !== null && _a !== void 0 ? _a : p.schema.title, description: (_b = p.parameter().description) !== null && _b !== void 0 ? _b : p.schema.description }),
});
})), false), __read((route.query
? [
{
key: route.query.key,
schema: __assign(__assign({}, route.query.schema), { title: (_a = route.query.title()) !== null && _a !== void 0 ? _a : route.query.schema.title, description: (_b = route.query.description()) !== null && _b !== void 0 ? _b : route.query.schema.description }),
},
]
: [])), false), __read((route.body
? [
{
key: route.body.key,
schema: __assign(__assign({}, route.body.schema), { description: (_c = route.body.description()) !== null && _c !== void 0 ? _c : route.body.schema.description }),
},
]
: [])), false).map(function (o) { return [o.key, cast(o.schema)]; });
if (properties.some(function (_a) {
var _b = __read(_a, 2), _k = _b[0], v = _b[1];
return v === null;
}))
return null;
// COMPOSE PARAMETERS
var parameters = options.keyword
? [
{
type: "object",
properties: Object.fromEntries(properties),
},
]
: properties.map(function (_a) {
var _b = __read(_a, 2), _k = _b[0], v = _b[1];
return v;
});
var operation = route.operation();
// FINALIZATION
return {
method: route.method,
path: route.path,
name: route.accessor.join("_"),
strict: true,
var output = props.route.success && props.route.success
? cast(props.route.success.schema)
: undefined;
if (output === null)
return null;
var properties = __spreadArray(__spreadArray(__spreadArray([], __read(props.route.parameters.map(function (p) {
var _a, _b;
return ({
key: p.key,
schema: __assign(__assign({}, p.schema), { title: (_a = p.parameter().title) !== null && _a !== void 0 ? _a : p.schema.title, description: (_b = p.parameter().description) !== null && _b !== void 0 ? _b : p.schema.description }),
});
})), false), __read((props.route.query
? [
{
key: props.route.query.key,
schema: __assign(__assign({}, props.route.query.schema), { title: (_a = props.route.query.title()) !== null && _a !== void 0 ? _a : props.route.query.schema.title, description: (_b = props.route.query.description()) !== null && _b !== void 0 ? _b : props.route.query.schema.description }),
},
]
: [])), false), __read((props.route.body
? [
{
key: props.route.body.key,
schema: __assign(__assign({}, props.route.body.schema), { description: (_c = props.route.body.description()) !== null && _c !== void 0 ? _c : props.route.body.schema.description }),
},
]
: [])), false).map(function (o) { return [o.key, cast(o.schema)]; });
if (properties.some(function (_a) {
var _b = __read(_a, 2), _k = _b[0], v = _b[1];
return v === null;
}))
return null;
// COMPOSE PARAMETERS
var parameters = props.options.keyword
? [
{
type: "object",
properties: Object.fromEntries(properties),
additionalProperties: false,
},
]
: properties.map(function (_a) {
var _b = __read(_a, 2), _k = _b[0], v = _b[1];
return v;
});
var operation = props.route.operation();
// FINALIZATION
return {
method: props.route.method,
path: props.route.path,
name: props.route.accessor.join("_"),
strict: true,
parameters: parameters,
separated: props.options.separate
? separateParameters({
model: props.model,
predicate: props.options.separate,
parameters: parameters,
separated: options.separate
? LlmSchemaSeparator_1.LlmSchemaSeparator.parameters({
parameters: parameters,
predicator: options.separate,
})
: undefined,
output: output
? OpenApiV3Downgrader_1.OpenApiV3Downgrader.downgradeSchema({
original: {},
downgraded: {},
})(output)
: undefined,
description: (function () {
var _a;
if (operation.summary && operation.description) {
return operation.description.startsWith(operation.summary)
? operation.description
: [
operation.summary,
operation.summary.endsWith(".") ? "" : ".",
"\n\n",
operation.description,
].join("");
}
return (_a = operation.description) !== null && _a !== void 0 ? _a : operation.summary;
})(),
deprecated: operation.deprecated,
tags: operation.tags,
route: function () { return route; },
operation: function () { return operation; },
};
};
})
: undefined,
output: output,
description: (function () {
var _a;
if (operation.summary && operation.description) {
return operation.description.startsWith(operation.summary)
? operation.description
: [
operation.summary,
operation.summary.endsWith(".") ? "" : ".",
"\n\n",
operation.description,
].join("");
}
return (_a = operation.description) !== null && _a !== void 0 ? _a : operation.summary;
})(),
deprecated: operation.deprecated,
tags: operation.tags,
route: function () { return props.route; },
operation: function () { return props.route.operation(); },
};
};
var separateParameters = function (props) {
var separator = SEPARATORS[props.model];
var indexes = props.parameters.map(function (schema) {
return separator({
predicate: props.predicate,
schema: schema,
});
});
return {
llm: indexes
.map(function (_a, index) {
var _b = __read(_a, 1), llm = _b[0];
return ({
index: index,
schema: llm,
});
})
.filter(function (_a) {
var schema = _a.schema;
return schema !== null;
}),
human: indexes
.map(function (_a, index) {
var _b = __read(_a, 2), human = _b[1];
return ({
index: index,
schema: human,
});
})
.filter(function (_a) {
var schema = _a.schema;
return schema !== null;
}),
};
};
var CASTERS = {
"3.0": function (props) { return LlmConverterV3_2.LlmConverterV3.schema(props); },
"3.1": function (props) { return LlmConverterV3_1_1.LlmConverterV3_1.schema(props); },
chatgpt: function (props) { return ChatGptConverter_1.ChatGptConverter.schema(props); },
gemini: function (props) { return GeminiConverter_1.GeminiConverter.schema(props); },
};
var SEPARATORS = {
"3.0": LlmConverterV3_2.LlmConverterV3.separate,
"3.1": LlmConverterV3_1_1.LlmConverterV3_1.separate,
chatgpt: ChatGptConverter_1.ChatGptConverter.separate,
gemini: GeminiConverter_1.GeminiConverter.separate,
};
import type { HttpLlm } from "../HttpLlm";
import { OpenApi } from "../OpenApi";
import { IChatGptSchema } from "../structures/IChatGptSchema";
import { IGeminiSchema } from "../structures/IGeminiSchema";
import { IHttpLlmApplication } from "../structures/IHttpLlmApplication";
import { IHttpMigrateRoute } from "../structures/IHttpMigrateRoute";
import { IHttpResponse } from "../structures/IHttpResponse";
import { ILlmSchemaV3 } from "../structures/ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "../structures/ILlmSchemaV3_1";
export declare namespace HttpLlmFunctionFetcher {
const execute: (props: HttpLlm.IFetchProps) => Promise<unknown>;
const propagate: (props: HttpLlm.IFetchProps) => Promise<IHttpResponse>;
const execute: <Model extends IHttpLlmApplication.Model, Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema = IHttpLlmApplication.ModelSchema[Model], Operation extends OpenApi.IOperation = OpenApi.IOperation<OpenApi.IJsonSchema>, Route extends IHttpMigrateRoute = IHttpMigrateRoute<OpenApi.IJsonSchema, Operation>>(props: HttpLlm.IFetchProps<Model, Schema, Operation, Route>) => Promise<unknown>;
const propagate: <Model extends IHttpLlmApplication.Model, Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema = IHttpLlmApplication.ModelSchema[Model], Operation extends OpenApi.IOperation = OpenApi.IOperation<OpenApi.IJsonSchema>, Route extends IHttpMigrateRoute = IHttpMigrateRoute<OpenApi.IJsonSchema, Operation>>(props: HttpLlm.IFetchProps<Model, Schema, Operation, Route>) => Promise<IHttpResponse>;
}
import { OpenApi } from "./OpenApi";
import { IChatGptSchema } from "./structures/IChatGptSchema";
import { IGeminiSchema } from "./structures/IGeminiSchema";
import { IHttpConnection } from "./structures/IHttpConnection";

@@ -6,5 +8,7 @@ import { IHttpLlmApplication } from "./structures/IHttpLlmApplication";

import { IHttpMigrateApplication } from "./structures/IHttpMigrateApplication";
import { IHttpMigrateRoute } from "./structures/IHttpMigrateRoute";
import { IHttpResponse } from "./structures/IHttpResponse";
import { ILlmFunction } from "./structures/ILlmFunction";
import { ILlmSchema } from "./structures/ILlmSchema";
import { ILlmSchemaV3 } from "./structures/ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "./structures/ILlmSchemaV3_1";
/**

@@ -51,3 +55,3 @@ * LLM function calling application composer from OpenAPI document.

* as `true`, the number of {@link IHttpLlmFunction.parameters} are always 1 and the
* first parameter type is always {@link ILlmSchema.IObject}. I recommend this option
* first parameter type is always {@link ILlmSchemaV3.IObject}. I recommend this option
* because LLM can understand the keyword arguments more easily.

@@ -59,33 +63,25 @@ *

*/
const application: <Schema extends ILlmSchema, Operation extends OpenApi.IOperation>(document: OpenApi.IDocument<any, Operation> | IHttpMigrateApplication<any, Operation>, options?: Partial<IHttpLlmApplication.IOptions>) => IHttpLlmApplication<Schema>;
/**
* Convert JSON schema to LLM schema.
*
* Converts {@link OpenApi.IJsonSchema JSON schema} to {@link ILlmSchema LLM schema}.
*
* By the way, if the target JSON schema has some recursive references, the
* conversion would be failed and `null` value would be returned. It's because
* the LLM schema does not support the reference type embodied by the
* {@link OpenApi.IJsonSchema.IReference} type.
*
* @param props Schema to convert and components to refer
* @returns LLM schema or null value
*/
const schema: (props: {
const application: <Model extends IHttpLlmApplication.Model, Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema = IHttpLlmApplication.ModelSchema[Model], Operation extends OpenApi.IOperation = OpenApi.IOperation<OpenApi.IJsonSchema>>(props: {
model: Model;
document: OpenApi.IDocument<OpenApi.IJsonSchema, Operation> | IHttpMigrateApplication<OpenApi.IJsonSchema, Operation>;
options?: Partial<IHttpLlmApplication.IOptions<Model, Schema>>;
}) => IHttpLlmApplication<Model, Schema>;
const schema: <Model extends IHttpLlmApplication.Model, Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema = IHttpLlmApplication.ModelSchema[Model]>(props: {
model: Model;
components: OpenApi.IComponents;
schema: OpenApi.IJsonSchema;
recursive: false | number;
}) => ILlmSchema | null;
}) => Schema | null;
/**
* Properties for the LLM function call.
*/
interface IFetchProps {
interface IFetchProps<Model extends IHttpLlmApplication.Model, Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema = IHttpLlmApplication.ModelSchema[Model], Operation extends OpenApi.IOperation = OpenApi.IOperation, Route extends IHttpMigrateRoute = IHttpMigrateRoute<OpenApi.IJsonSchema, Operation>> {
/**
* Application of the LLM function calling.
*/
application: IHttpLlmApplication;
application: IHttpLlmApplication<Model, Schema, Operation>;
/**
* LLM function schema to call.
*/
function: IHttpLlmFunction;
function: IHttpLlmFunction<Schema, Operation, Route>;
/**

@@ -125,3 +121,3 @@ * Connection info to the HTTP server.

*/
const execute: (props: IFetchProps) => Promise<unknown>;
const execute: <Model extends IHttpLlmApplication.Model, Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema = IHttpLlmApplication.ModelSchema[Model], Operation extends OpenApi.IOperation = OpenApi.IOperation<OpenApi.IJsonSchema>>(props: IFetchProps<Model, Schema, Operation>) => Promise<unknown>;
/**

@@ -151,11 +147,11 @@ * Propagate the LLM function call.

*/
const propagate: (props: IFetchProps) => Promise<IHttpResponse>;
const propagate: <Model extends IHttpLlmApplication.Model, Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema = IHttpLlmApplication.ModelSchema[Model], Operation extends OpenApi.IOperation = OpenApi.IOperation<OpenApi.IJsonSchema>>(props: IFetchProps<Model, Schema, Operation>) => Promise<IHttpResponse>;
/**
* Properties for the parameters' merging.
*/
interface IMergeProps {
interface IMergeProps<Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema> {
/**
* Metadata of the target function.
*/
function: ILlmFunction;
function: ILlmFunction<Schema>;
/**

@@ -185,3 +181,3 @@ * Arguments composed by the LLM.

*/
const mergeParameters: (props: IMergeProps) => unknown[];
const mergeParameters: <Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema>(props: IMergeProps<Schema>) => unknown[];
/**

@@ -188,0 +184,0 @@ * Merge two values.

@@ -53,3 +53,3 @@ "use strict";

* as `true`, the number of {@link IHttpLlmFunction.parameters} are always 1 and the
* first parameter type is always {@link ILlmSchema.IObject}. I recommend this option
* first parameter type is always {@link ILlmSchemaV3.IObject}. I recommend this option
* because LLM can understand the keyword arguments more easily.

@@ -61,26 +61,20 @@ *

*/
HttpLlm.application = function (document, options) {
var _a, _b, _c;
HttpLlm.application = function (props) {
var _a, _b, _c, _d, _e, _f;
// MIGRATE
if (document["x-samchon-emended"] === true)
document = HttpMigration_1.HttpMigration.application(document);
return HttpLlmConverter_1.HttpLlmConverter.compose(document, {
keyword: (_a = options === null || options === void 0 ? void 0 : options.keyword) !== null && _a !== void 0 ? _a : false,
separate: (_b = options === null || options === void 0 ? void 0 : options.separate) !== null && _b !== void 0 ? _b : null,
recursive: (_c = options === null || options === void 0 ? void 0 : options.recursive) !== null && _c !== void 0 ? _c : 3,
var migrate = props.document["x-samchon-emended"] === true
? HttpMigration_1.HttpMigration.application(props.document)
: props.document;
return HttpLlmConverter_1.HttpLlmConverter.compose({
migrate: migrate,
model: props.model,
options: {
keyword: (_b = (_a = props.options) === null || _a === void 0 ? void 0 : _a.keyword) !== null && _b !== void 0 ? _b : false,
separate: (_d = (_c = props.options) === null || _c === void 0 ? void 0 : _c.separate) !== null && _d !== void 0 ? _d : null,
recursive: (props.model === "chatgpt"
? undefined
: ((_f = (_e = props.options) === null || _e === void 0 ? void 0 : _e.recursive) !== null && _f !== void 0 ? _f : 3)),
},
});
};
/**
* Convert JSON schema to LLM schema.
*
* Converts {@link OpenApi.IJsonSchema JSON schema} to {@link ILlmSchema LLM schema}.
*
* By the way, if the target JSON schema has some recursive references, the
* conversion would be failed and `null` value would be returned. It's because
* the LLM schema does not support the reference type embodied by the
* {@link OpenApi.IJsonSchema.IReference} type.
*
* @param props Schema to convert and components to refer
* @returns LLM schema or null value
*/
HttpLlm.schema = function (props) { return HttpLlmConverter_1.HttpLlmConverter.schema(props); };

@@ -112,5 +106,3 @@ /**

*/
HttpLlm.execute = function (props) {
return HttpLlmFunctionFetcher_1.HttpLlmFunctionFetcher.execute(props);
};
HttpLlm.execute = function (props) { return HttpLlmFunctionFetcher_1.HttpLlmFunctionFetcher.execute(props); };
/**

@@ -140,5 +132,3 @@ * Propagate the LLM function call.

*/
HttpLlm.propagate = function (props) {
return HttpLlmFunctionFetcher_1.HttpLlmFunctionFetcher.propagate(props);
};
HttpLlm.propagate = function (props) { return HttpLlmFunctionFetcher_1.HttpLlmFunctionFetcher.propagate(props); };
/**

@@ -159,5 +149,3 @@ * Merge the parameters.

*/
HttpLlm.mergeParameters = function (props) {
return LlmDataMerger_1.LlmDataMerger.parameters(props);
};
HttpLlm.mergeParameters = function (props) { return LlmDataMerger_1.LlmDataMerger.parameters(props); };
/**

@@ -164,0 +152,0 @@ * Merge two values.

@@ -15,8 +15,8 @@ export * from "./OpenApi";

export * from "./structures/ILlmApplication";
export * from "./structures/ILlmSchema";
export * from "./structures/ILlmSchemaV3";
export * from "./structures/IChatGptSchema";
export * from "./structures/IGeminiSchema";
export * from "./HttpLlm";
export * from "./utils/LlmTypeChecker";
export * from "./utils/LlmTypeCheckerV3";
export * from "./utils/ChatGptTypeChecker";
export * from "./utils/GeminiTypeChecker";

@@ -40,8 +40,8 @@ "use strict";

__exportStar(require("./structures/ILlmApplication"), exports);
__exportStar(require("./structures/ILlmSchema"), exports);
__exportStar(require("./structures/ILlmSchemaV3"), exports);
__exportStar(require("./structures/IChatGptSchema"), exports);
__exportStar(require("./structures/IGeminiSchema"), exports);
__exportStar(require("./HttpLlm"), exports);
__exportStar(require("./utils/LlmTypeChecker"), exports);
__exportStar(require("./utils/LlmTypeCheckerV3"), exports);
__exportStar(require("./utils/ChatGptTypeChecker"), exports);
__exportStar(require("./utils/GeminiTypeChecker"), exports);
import { OpenApi } from "../OpenApi";
import { IChatGptSchema } from "./IChatGptSchema";
import { IGeminiSchema } from "./IGeminiSchema";
import { IHttpLlmFunction } from "./IHttpLlmFunction";
import { IHttpMigrateRoute } from "./IHttpMigrateRoute";
import { ILlmSchema } from "./ILlmSchema";
import { ILlmSchemaV3 } from "./ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "./ILlmSchemaV3_1";
/**

@@ -21,7 +24,7 @@ * Application of LLM function call from OpenAPI document.

* would be successfully converted to {@link IHttpLlmFunction} and its type schemas
* are downgraded to {@link OpenApiV3.IJsonSchema} and converted to {@link ILlmSchema}.
* are downgraded to {@link OpenApiV3.IJsonSchema} and converted to {@link ILlmSchemaV3}.
*
* About the options, if you've configured {@link IHttpLlmApplication.options.keyword}
* (as `true`), number of {@link IHttpLlmFunction.parameters} are always 1 and the first
* parameter type is always {@link ILlmSchema.IObject}. Otherwise, the parameters would
* parameter type is always {@link ILlmSchemaV3.IObject}. Otherwise, the parameters would
* be multiple, and the sequence of the parameters are following below rules.

@@ -69,9 +72,7 @@ *

*/
export interface IHttpLlmApplication<Schema extends ILlmSchema = ILlmSchema, Operation extends OpenApi.IOperation = OpenApi.IOperation, Route extends IHttpMigrateRoute = IHttpMigrateRoute> {
export interface IHttpLlmApplication<Model extends IHttpLlmApplication.Model, Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema = IHttpLlmApplication.ModelSchema[Model], Operation extends OpenApi.IOperation = OpenApi.IOperation, Route extends IHttpMigrateRoute = IHttpMigrateRoute> {
/**
* Version of OpenAPI.
*
* LLM function call schema is based on OpenAPI 3.0.3 specification.
* Model of the target LLM.
*/
openapi: "3.0.3";
model: Model;
/**

@@ -85,3 +86,3 @@ * List of function metadata.

*/
functions: IHttpLlmFunction[];
functions: IHttpLlmFunction<Schema, Operation, Route>[];
/**

@@ -97,5 +98,12 @@ * List of errors occurred during the composition.

*/
options: IHttpLlmApplication.IOptions<Schema>;
options: IHttpLlmApplication.IOptions<Model, Schema>;
}
export declare namespace IHttpLlmApplication {
type Model = "3.0" | "3.1" | "chatgpt" | "gemini";
type ModelSchema = {
"3.0": ILlmSchemaV3;
"3.1": ILlmSchemaV3_1;
chatgpt: IChatGptSchema;
gemini: IGeminiSchema;
};
/**

@@ -138,3 +146,3 @@ * Error occurred in the composition.

*/
interface IOptions<Schema extends ILlmSchema = ILlmSchema> {
interface IOptions<Model extends IHttpLlmApplication.Model, Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema = IHttpLlmApplication.ModelSchema[Model]> {
/**

@@ -145,3 +153,3 @@ * Whether the parameters are keyworded or not.

* {@link IHttpLlmApplication.IFunction.parameters} is always 1, and type of
* the pararameter is always {@link ILlmSchema.IObject} type.
* the pararameter is always {@link ILlmSchemaV3.IObject} type.
*

@@ -175,5 +183,8 @@ * Otherwise, the parameters would be multiple, and the sequence of the parameters

*
* By the way, if the model is "chatgpt", the recursive types are always
* allowed without any limitation, due to it supports the reference type.
*
* @default 3
*/
recursive: false | number;
recursive: Model extends "chatgpt" ? never : false | number;
/**

@@ -186,3 +197,3 @@ * Separator function for the parameters.

* parameter type has configured
* {@link ILlmSchema.IString.contentMediaType} which indicates file
* {@link ILlmSchemaV3.IString.contentMediaType} which indicates file
* uploading, it must be composed by Human, not by LLM

@@ -189,0 +200,0 @@ * (Large Language Model).

import { OpenApi } from "../OpenApi";
import { IChatGptSchema } from "./IChatGptSchema";
import { IGeminiSchema } from "./IGeminiSchema";
import { IHttpMigrateRoute } from "./IHttpMigrateRoute";
import { ILlmSchema } from "./ILlmSchema";
import { ILlmSchemaV3 } from "./ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "./ILlmSchemaV3_1";
/**

@@ -20,3 +23,3 @@ * LLM function calling schema from HTTP (OpenAPI) operation.

* {@link OpenApi.IOperation} is, `IHttpLlmFunction` has converted every type schema
* informations from {@link OpenApi.IJsonSchema} to {@link ILlmSchema} to escape
* informations from {@link OpenApi.IJsonSchema} to {@link ILlmSchemaV3} to escape
* {@link OpenApi.IJsonSchema.IReference reference types}, and downgrade the version

@@ -29,3 +32,3 @@ * of the JSON schema to OpenAPI 3.0. It's because LLM function call feature cannot

* {@link IHttpLlmFunction.parameters} are always 1 and the first parameter's
* type is always {@link ILlmSchema.IObject}. The properties' rule is:
* type is always {@link ILlmSchemaV3.IObject}. The properties' rule is:
*

@@ -58,3 +61,3 @@ * - `pathParameters`: Path parameters of {@link OpenApi.IOperation.parameters}

*/
export interface IHttpLlmFunction<Schema extends ILlmSchema = ILlmSchema, Operation extends OpenApi.IOperation = OpenApi.IOperation, Route extends IHttpMigrateRoute = IHttpMigrateRoute> {
export interface IHttpLlmFunction<Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema, Operation extends OpenApi.IOperation = OpenApi.IOperation, Route extends IHttpMigrateRoute = IHttpMigrateRoute> {
/**

@@ -114,3 +117,3 @@ * HTTP method of the endpoint.

* number of {@link IHttpLlmFunction.parameters} are always 1 and the first
* parameter's type is always {@link ILlmSchema.IObject}. The
* parameter's type is always {@link ILlmSchemaV3.IObject}. The
* properties' rule is:

@@ -215,3 +218,3 @@ *

*/
interface ISeparated<Schema extends ILlmSchema = ILlmSchema> {
interface ISeparated<Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema> {
/**

@@ -229,3 +232,3 @@ * Parameters that would be composed by the LLM.

*/
interface ISeparatedParameter<Schema extends ILlmSchema = ILlmSchema> {
interface ISeparatedParameter<Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema> {
/**

@@ -232,0 +235,0 @@ * Index of the parameter.

@@ -0,3 +1,6 @@

import { IChatGptSchema } from "./IChatGptSchema";
import { IGeminiSchema } from "./IGeminiSchema";
import { ILlmFunction } from "./ILlmFunction";
import { ILlmSchema } from "./ILlmSchema";
import { ILlmSchemaV3 } from "./ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "./ILlmSchemaV3_1";
/**

@@ -33,4 +36,8 @@ * Application of LLM function calling.

*/
export interface ILlmApplication<Schema extends ILlmSchema = ILlmSchema> {
export interface ILlmApplication<Model extends ILlmApplication.Model, Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema = ILlmApplication.ModelSchema[Model]> {
/**
* Model of the LLM.
*/
model: Model;
/**
* List of function metadata.

@@ -44,9 +51,16 @@ *

*/
options: ILlmApplication.IOptions<Schema>;
options: ILlmApplication.IOptions<Model, Schema>;
}
export declare namespace ILlmApplication {
type Model = "3.0" | "3.1" | "chatgpt" | "gemini";
type ModelSchema = {
"3.0": ILlmSchemaV3;
"3.1": ILlmSchemaV3_1;
chatgpt: IChatGptSchema;
gemini: IGeminiSchema;
};
/**
* Options for composing the LLM application.
*/
interface IOptions<Schema extends ILlmSchema = ILlmSchema> {
interface IOptions<Model extends ILlmApplication.Model, Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema> {
/**

@@ -57,5 +71,8 @@ * Whether to allow recursive types or not.

*
* By the way, if the model is "chatgpt", the recursive types are always
* allowed without any limitation, due to it supports the reference type.
*
* @default 3
*/
recursive: false | number;
recursive: Model extends "chatgpt" ? never : false | number;
/**

@@ -68,3 +85,3 @@ * Separator function for the parameters.

* parameter type has configured
* {@link ILlmSchema.IString.contentMediaType} which indicates file
* {@link ILlmSchemaV3.IString.contentMediaType} which indicates file
* uploading, it must be composed by human, not by LLM

@@ -71,0 +88,0 @@ * (Large Language Model).

@@ -1,2 +0,5 @@

import { ILlmSchema } from "./ILlmSchema";
import { IChatGptSchema } from "./IChatGptSchema";
import { IGeminiSchema } from "./IGeminiSchema";
import { ILlmSchemaV3 } from "./ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "./ILlmSchemaV3_1";
/**

@@ -24,3 +27,3 @@ * LLM function metadata.

*/
export interface ILlmFunction<Schema extends ILlmSchema = ILlmSchema> {
export interface ILlmFunction<Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema> {
/**

@@ -76,3 +79,3 @@ * Representative name of the function.

*/
interface ISeparated<Schema extends ILlmSchema = ILlmSchema> {
interface ISeparated<Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema> {
/**

@@ -90,3 +93,3 @@ * Parameters that would be composed by the LLM.

*/
interface ISeparatedParameter<Schema extends ILlmSchema = ILlmSchema> {
interface ISeparatedParameter<Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema> {
/**

@@ -93,0 +96,0 @@ * Index of the parameter.

@@ -0,2 +1,6 @@

import { IChatGptSchema } from "../structures/IChatGptSchema";
import { IGeminiSchema } from "../structures/IGeminiSchema";
import { ILlmFunction } from "../structures/ILlmFunction";
import { ILlmSchemaV3 } from "../structures/ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "../structures/ILlmSchemaV3_1";
/**

@@ -11,7 +15,7 @@ * Data combiner for LLM function call.

*/
interface IProps {
interface IProps<Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema> {
/**
* Target function to call.
*/
function: ILlmFunction;
function: ILlmFunction<Schema>;
/**

@@ -41,3 +45,3 @@ * Arguments composed by LLM (Large Language Model).

*/
const parameters: (props: IProps) => unknown[];
const parameters: <Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema>(props: IProps<Schema>) => unknown[];
/**

@@ -44,0 +48,0 @@ * Combine two values into one.

@@ -96,3 +96,3 @@ "use strict";

.map(function (_, i) { return LlmDataMerger.value(x[i], y[i]); })
: y !== null && y !== void 0 ? y : x;
: (y !== null && y !== void 0 ? y : x);
};

@@ -99,0 +99,0 @@ var combineObject = function (x, y) {

{
"name": "@samchon/openapi",
"version": "2.0.0-dev.20241108",
"version": "2.0.0-dev.20241109",
"description": "OpenAPI definitions and converters for 'typia' and 'nestia'.",

@@ -5,0 +5,0 @@ "main": "./lib/index.js",

@@ -35,3 +35,3 @@ # `@samchon/openapi`

- Supported schemes
- [`ILlmSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchema.ts)
- [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
- [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)

@@ -216,3 +216,3 @@ - [`IOpenAiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IOpenAiSchema.ts)

- [`IHttpLlmFunction`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmFunction.ts)
- [`ILlmSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchema.ts)
- [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
- [`LlmTypeChecker`](https://github.com/samchon/openapi/blob/master/src/utils/LlmTypeChecker.ts)

@@ -219,0 +219,0 @@

import { OpenApi } from "../OpenApi";
import { IChatGptSchema } from "../structures/IChatGptSchema";
import { ChatGptTypeChecker } from "../utils/ChatGptTypeChecker";
import { OpenApiTypeChecker } from "../utils/OpenApiTypeChecker";

@@ -134,2 +135,176 @@

};
export const separate = (props: {
top: IChatGptSchema.ITop;
predicate: (schema: IChatGptSchema) => boolean;
schema: IChatGptSchema;
}): [IChatGptSchema | null, IChatGptSchema | null] => {
if (props.predicate(props.schema) === true) return [null, props.schema];
else if (
ChatGptTypeChecker.isUnknown(props.schema) ||
ChatGptTypeChecker.isOneOf(props.schema)
)
return [props.schema, null];
else if (ChatGptTypeChecker.isObject(props.schema))
return separateObject({
top: props.top,
predicate: props.predicate,
schema: props.schema,
});
else if (ChatGptTypeChecker.isArray(props.schema))
return separateArray({
top: props.top,
predicate: props.predicate,
schema: props.schema,
});
else if (ChatGptTypeChecker.isReference(props.schema))
return separateReference({
top: props.top,
predicate: props.predicate,
schema: props.schema,
});
return [props.schema, null];
};
const separateArray = (props: {
top: IChatGptSchema.ITop;
predicate: (schema: IChatGptSchema) => boolean;
schema: IChatGptSchema.IArray;
}): [IChatGptSchema.IArray | null, IChatGptSchema.IArray | null] => {
const [x, y] = separate({
top: props.top,
predicate: props.predicate,
schema: props.schema.items,
});
return [
x !== null
? {
...props.schema,
items: x,
}
: null,
y !== null
? {
...props.schema,
items: y,
}
: null,
];
};
const separateObject = (props: {
top: IChatGptSchema.ITop;
predicate: (schema: IChatGptSchema) => boolean;
schema: IChatGptSchema.IObject;
}): [IChatGptSchema.IObject | null, IChatGptSchema.IObject | null] => {
const llm = {
...props.schema,
properties: {} as Record<string, IChatGptSchema>,
} satisfies IChatGptSchema.IObject;
const human = {
...props.schema,
properties: {} as Record<string, IChatGptSchema>,
} satisfies IChatGptSchema.IObject;
for (const [key, value] of Object.entries(props.schema.properties ?? {})) {
const [x, y] = separate({
top: props.top,
predicate: props.predicate,
schema: value,
});
if (x !== null) llm.properties[key] = x;
if (y !== null) human.properties[key] = y;
}
if (
typeof props.schema.additionalProperties === "object" &&
props.schema.additionalProperties !== null
) {
const [x, y] = separate({
top: props.top,
predicate: props.predicate,
schema: props.schema.additionalProperties,
});
if (x !== null) llm.additionalProperties = x;
if (y !== null) human.additionalProperties = y;
} else {
llm.additionalProperties = false;
human.additionalProperties = false;
}
return [
Object.keys(llm.properties).length === 0 &&
llm.additionalProperties === false
? null
: shrinkRequired(llm),
Object.keys(human.properties).length === 0 &&
human.additionalProperties === false
? null
: shrinkRequired(human),
];
};
const separateReference = (props: {
top: IChatGptSchema.ITop;
predicate: (schema: IChatGptSchema) => boolean;
schema: IChatGptSchema.IReference;
}): [IChatGptSchema.IReference | null, IChatGptSchema.IReference | null] => {
const key: string = props.schema.$ref.split("#/$defs/")[1];
// FIND EXISTING
if (props.top.$defs?.[`${key}.Human`] || props.top.$defs?.[`${key}.Llm`])
return [
props.top.$defs?.[`${key}.Llm`]
? {
...props.schema,
$ref: `#/$defs/${key}.Llm`,
}
: null,
props.top.$defs?.[`${key}.Human`]
? {
...props.schema,
$ref: `#/$defs/${key}.Human`,
}
: null,
];
// PRE-ASSIGNMENT
props.top.$defs![`${key}.Llm`] = {};
props.top.$defs![`${key}.Human`] = {};
// DO COMPOSE
const schema: IChatGptSchema = props.top.$defs?.[key]!;
const [llm, human] = separate({
top: props.top,
predicate: props.predicate,
schema,
});
if (llm === null) delete props.top.$defs![`${key}.Llm`];
else props.top.$defs![`${key}.Llm`] = llm;
if (human === null) delete props.top.$defs![`${key}.Human`];
else props.top.$defs![`${key}.Human`] = human;
// FINALIZE
return [
llm !== null
? {
...props.schema,
$ref: `#/$defs/${key}.Llm`,
}
: null,
human !== null
? {
...props.schema,
$ref: `#/$defs/${key}.Human`,
}
: null,
];
};
const shrinkRequired = (
s: IChatGptSchema.IObject,
): IChatGptSchema.IObject => {
if (s.required !== undefined)
s.required = s.required.filter(
(key) => s.properties?.[key] !== undefined,
);
return s;
};
}
import { OpenApi } from "../OpenApi";
import { IGeminiSchema } from "../structures/IGeminiSchema";
import { ILlmSchema } from "../structures/ILlmSchema";
import { LlmTypeChecker } from "../utils/LlmTypeChecker";
import { HttpLlmConverter } from "./HttpLlmConverter";
import { ILlmSchemaV3 } from "../structures/ILlmSchemaV3";
import { LlmTypeCheckerV3 } from "../utils/LlmTypeCheckerV3";
import { LlmConverterV3 } from "./LlmConverterV3";

@@ -13,11 +13,17 @@ export namespace GeminiConverter {

}): IGeminiSchema | null => {
const schema: ILlmSchema | null = HttpLlmConverter.schema(props);
const schema: ILlmSchemaV3 | null = LlmConverterV3.schema(props);
if (schema === null) return null;
let union: boolean = false;
LlmTypeChecker.visit(schema, (v) => {
if (LlmTypeChecker.isOneOf(v)) union = true;
LlmTypeCheckerV3.visit(schema, (v) => {
if (LlmTypeCheckerV3.isOneOf(v)) union = true;
});
return union ? null : schema;
};
export const separate = (props: {
predicate: (schema: IGeminiSchema) => boolean;
schema: IGeminiSchema;
}): [IGeminiSchema | null, IGeminiSchema | null] =>
LlmConverterV3.separate(props);
}
import { OpenApi } from "../OpenApi";
import { IChatGptSchema } from "../structures/IChatGptSchema";
import { IGeminiSchema } from "../structures/IGeminiSchema";
import { IHttpLlmApplication } from "../structures/IHttpLlmApplication";

@@ -6,173 +8,267 @@ import { IHttpLlmFunction } from "../structures/IHttpLlmFunction";

import { IHttpMigrateRoute } from "../structures/IHttpMigrateRoute";
import { ILlmSchema } from "../structures/ILlmSchema";
import { LlmSchemaSeparator } from "../utils/LlmSchemaSeparator";
import { LlmTypeChecker } from "../utils/LlmTypeChecker";
import { OpenApiTypeChecker } from "../utils/OpenApiTypeChecker";
import { OpenApiV3Downgrader } from "./OpenApiV3Downgrader";
import { ILlmSchemaV3 } from "../structures/ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "../structures/ILlmSchemaV3_1";
import { ChatGptConverter } from "./ChatGptConverter";
import { GeminiConverter } from "./GeminiConverter";
import { LlmConverterV3 } from "./LlmConverterV3";
import { LlmConverterV3_1 } from "./LlmConverterV3_1";
export namespace HttpLlmConverter {
export const compose = (
migrate: IHttpMigrateApplication,
options: IHttpLlmApplication.IOptions,
): IHttpLlmApplication => {
export const compose = <
Model extends IHttpLlmApplication.Model,
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema = IHttpLlmApplication.ModelSchema[Model],
Operation extends OpenApi.IOperation = OpenApi.IOperation,
Route extends IHttpMigrateRoute = IHttpMigrateRoute<
OpenApi.IJsonSchema,
Operation
>,
>(props: {
model: Model;
migrate: IHttpMigrateApplication<OpenApi.IJsonSchema, Operation>;
options: IHttpLlmApplication.IOptions<Model, Schema>;
}): IHttpLlmApplication<Model, Schema, Operation, Route> => {
// COMPOSE FUNCTIONS
const errors: IHttpLlmApplication.IError[] = migrate.errors.map((e) => ({
method: e.method,
path: e.path,
messages: e.messages,
operation: () => e.operation(),
route: () => undefined,
}));
const functions: IHttpLlmFunction[] = migrate.routes
.map((route) => {
if (route.method === "head") return null;
const func: IHttpLlmFunction | null = composeFunction(options)(
migrate.document().components,
)(route);
if (func === null)
errors.push({
method: route.method,
path: route.path,
messages: ["Failed to escape $ref"],
operation: () => route.operation(),
route: () => route,
const errors: IHttpLlmApplication.IError<Operation, Route>[] =
props.migrate.errors.map((e) => ({
method: e.method,
path: e.path,
messages: e.messages,
operation: () => e.operation(),
route: () => undefined,
}));
const functions: IHttpLlmFunction<Schema, Operation, Route>[] =
props.migrate.routes
.map((route) => {
if (route.method === "head") return null;
const func: IHttpLlmFunction<Schema> | null = composeFunction({
model: props.model,
options: props.options,
components: props.migrate.document().components,
route,
});
return func;
})
.filter((v): v is IHttpLlmFunction => v !== null);
if (func === null)
errors.push({
method: route.method,
path: route.path,
messages: ["Failed to escape $ref"],
operation: () => route.operation(),
route: () => route as any as Route,
});
return func;
})
.filter(
(v): v is IHttpLlmFunction<Schema, Operation, Route> => v !== null,
);
return {
openapi: "3.0.3",
model: props.model,
options: props.options,
functions,
errors,
options,
};
};
export const schema = (props: {
export const schema = <
Model extends IHttpLlmApplication.Model,
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema = IHttpLlmApplication.ModelSchema[Model],
>(props: {
model: Model;
components: OpenApi.IComponents;
schema: OpenApi.IJsonSchema;
recursive: false | number;
}): ILlmSchema | null => {
const resolved: OpenApi.IJsonSchema | null = OpenApiTypeChecker.escape({
}): Schema | null => {
return CASTERS[props.model]({
components: props.components,
recursive: props.recursive,
schema: props.schema,
recursive: props.recursive,
});
if (resolved === null) return null;
const downgraded: ILlmSchema = OpenApiV3Downgrader.downgradeSchema({
original: {},
downgraded: {},
})(resolved) as ILlmSchema;
LlmTypeChecker.visit(downgraded, (schema) => {
if (
LlmTypeChecker.isOneOf(schema) &&
(schema as any).discriminator !== undefined
)
delete (schema as any).discriminator;
});
return downgraded;
}) as Schema | null;
};
}
const composeFunction =
(options: IHttpLlmApplication.IOptions) =>
(components: OpenApi.IComponents) =>
(route: IHttpMigrateRoute): IHttpLlmFunction | null => {
// CAST SCHEMA TYPES
const cast = (s: OpenApi.IJsonSchema) =>
HttpLlmConverter.schema({
components,
schema: s,
recursive: options.recursive,
});
const output: ILlmSchema | null | undefined =
route.success && route.success ? cast(route.success.schema) : undefined;
if (output === null) return null;
const properties: [string, ILlmSchema | null][] = [
...route.parameters.map((p) => ({
key: p.key,
schema: {
...p.schema,
title: p.parameter().title ?? p.schema.title,
description: p.parameter().description ?? p.schema.description,
},
})),
...(route.query
? [
{
key: route.query.key,
schema: {
...route.query.schema,
title: route.query.title() ?? route.query.schema.title,
description:
route.query.description() ?? route.query.schema.description,
},
const composeFunction = <
Model extends IHttpLlmApplication.Model,
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema = IHttpLlmApplication.ModelSchema[Model],
Operation extends OpenApi.IOperation = OpenApi.IOperation,
Route extends IHttpMigrateRoute = IHttpMigrateRoute<
OpenApi.IJsonSchema,
Operation
>,
>(props: {
model: Model;
components: OpenApi.IComponents;
route: IHttpMigrateRoute<OpenApi.IJsonSchema, Operation>;
options: IHttpLlmApplication.IOptions<Model, Schema>;
}): IHttpLlmFunction<Schema, Operation, Route> | null => {
const cast = (s: OpenApi.IJsonSchema): Schema | null =>
CASTERS[props.model]({
components: props.components,
recursive: props.options.recursive,
schema: s,
}) as Schema | null;
const output: Schema | null | undefined =
props.route.success && props.route.success
? cast(props.route.success.schema)
: undefined;
if (output === null) return null;
const properties: [string, Schema | null][] = [
...props.route.parameters.map((p) => ({
key: p.key,
schema: {
...p.schema,
title: p.parameter().title ?? p.schema.title,
description: p.parameter().description ?? p.schema.description,
},
})),
...(props.route.query
? [
{
key: props.route.query.key,
schema: {
...props.route.query.schema,
title:
props.route.query.title() ?? props.route.query.schema.title,
description:
props.route.query.description() ??
props.route.query.schema.description,
},
]
: []),
...(route.body
? [
{
key: route.body.key,
schema: {
...route.body.schema,
description:
route.body.description() ?? route.body.schema.description,
},
},
]
: []),
].map((o) => [o.key, cast(o.schema)]);
if (properties.some(([_k, v]) => v === null)) return null;
// COMPOSE PARAMETERS
const parameters: ILlmSchema[] = options.keyword
},
]
: []),
...(props.route.body
? [
{
type: "object",
properties: Object.fromEntries(
properties as [string, ILlmSchema][],
),
key: props.route.body.key,
schema: {
...props.route.body.schema,
description:
props.route.body.description() ??
props.route.body.schema.description,
},
},
]
: properties.map(([_k, v]) => v!);
const operation: OpenApi.IOperation = route.operation();
: []),
].map((o) => [o.key, cast(o.schema)]);
if (properties.some(([_k, v]) => v === null)) return null;
// FINALIZATION
return {
method: route.method as "get",
path: route.path,
name: route.accessor.join("_"),
strict: true,
parameters,
separated: options.separate
? LlmSchemaSeparator.parameters({
parameters,
predicator: options.separate,
})
: undefined,
output: output
? (OpenApiV3Downgrader.downgradeSchema({
original: {},
downgraded: {},
})(output as any) as ILlmSchema)
: undefined,
description: (() => {
if (operation.summary && operation.description) {
return operation.description.startsWith(operation.summary)
? operation.description
: [
operation.summary,
operation.summary.endsWith(".") ? "" : ".",
"\n\n",
operation.description,
].join("");
}
return operation.description ?? operation.summary;
})(),
deprecated: operation.deprecated,
tags: operation.tags,
route: () => route,
operation: () => operation,
};
// COMPOSE PARAMETERS
const parameters: Schema[] = props.options.keyword
? [
{
type: "object",
properties: Object.fromEntries(properties as [string, Schema][]),
additionalProperties: false,
} as any as Schema,
]
: properties.map(([_k, v]) => v!);
const operation: OpenApi.IOperation = props.route.operation();
// FINALIZATION
return {
method: props.route.method as "get",
path: props.route.path,
name: props.route.accessor.join("_"),
strict: true,
parameters,
separated: props.options.separate
? separateParameters({
model: props.model,
predicate: props.options.separate,
parameters,
})
: undefined,
output,
description: (() => {
if (operation.summary && operation.description) {
return operation.description.startsWith(operation.summary)
? operation.description
: [
operation.summary,
operation.summary.endsWith(".") ? "" : ".",
"\n\n",
operation.description,
].join("");
}
return operation.description ?? operation.summary;
})(),
deprecated: operation.deprecated,
tags: operation.tags,
route: () => props.route as any,
operation: () => props.route.operation(),
};
};
const separateParameters = <
Model extends IHttpLlmApplication.Model,
Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema,
>(props: {
model: Model;
parameters: Schema[];
predicate: (schema: Schema) => boolean;
}): IHttpLlmFunction.ISeparated<Schema> => {
const separator: (props: {
predicate: (schema: Schema) => boolean;
schema: Schema;
}) => [Schema | null, Schema | null] = SEPARATORS[props.model] as any;
const indexes: Array<[Schema | null, Schema | null]> = props.parameters.map(
(schema) =>
separator({
predicate: props.predicate,
schema,
}),
);
return {
llm: indexes
.map(([llm], index) => ({
index,
schema: llm!,
}))
.filter(({ schema }) => schema !== null),
human: indexes
.map(([, human], index) => ({
index,
schema: human!,
}))
.filter(({ schema }) => schema !== null),
};
};
const CASTERS = {
"3.0": (props: {
components: OpenApi.IComponents;
schema: OpenApi.IJsonSchema;
recursive: false | number;
}) => LlmConverterV3.schema(props),
"3.1": (props: {
components: OpenApi.IComponents;
schema: OpenApi.IJsonSchema;
recursive: false | number;
}) => LlmConverterV3_1.schema(props),
chatgpt: (props: {
components: OpenApi.IComponents;
schema: OpenApi.IJsonSchema;
recursive: false | number;
}) => ChatGptConverter.schema(props),
gemini: (props: {
components: OpenApi.IComponents;
schema: OpenApi.IJsonSchema;
recursive: false | number;
}) => GeminiConverter.schema(props),
};
const SEPARATORS = {
"3.0": LlmConverterV3.separate,
"3.1": LlmConverterV3_1.separate,
chatgpt: ChatGptConverter.separate,
gemini: GeminiConverter.separate,
};
import type { HttpLlm } from "../HttpLlm";
import type { HttpMigration } from "../HttpMigration";
import { OpenApi } from "../OpenApi";
import { IChatGptSchema } from "../structures/IChatGptSchema";
import { IGeminiSchema } from "../structures/IGeminiSchema";
import { IHttpLlmApplication } from "../structures/IHttpLlmApplication";
import { IHttpMigrateRoute } from "../structures/IHttpMigrateRoute";
import { IHttpResponse } from "../structures/IHttpResponse";
import { ILlmSchemaV3 } from "../structures/ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "../structures/ILlmSchemaV3_1";
import { HttpMigrateRouteFetcher } from "./HttpMigrateRouteFetcher";
export namespace HttpLlmFunctionFetcher {
export const execute = async (props: HttpLlm.IFetchProps): Promise<unknown> =>
export const execute = async <
Model extends IHttpLlmApplication.Model,
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema = IHttpLlmApplication.ModelSchema[Model],
Operation extends OpenApi.IOperation = OpenApi.IOperation,
Route extends IHttpMigrateRoute = IHttpMigrateRoute<
OpenApi.IJsonSchema,
Operation
>,
>(
props: HttpLlm.IFetchProps<Model, Schema, Operation, Route>,
): Promise<unknown> =>
HttpMigrateRouteFetcher.execute(getFetchArguments("execute", props));
export const propagate = async (
props: HttpLlm.IFetchProps,
export const propagate = async <
Model extends IHttpLlmApplication.Model,
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema = IHttpLlmApplication.ModelSchema[Model],
Operation extends OpenApi.IOperation = OpenApi.IOperation,
Route extends IHttpMigrateRoute = IHttpMigrateRoute<
OpenApi.IJsonSchema,
Operation
>,
>(
props: HttpLlm.IFetchProps<Model, Schema, Operation, Route>,
): Promise<IHttpResponse> =>
HttpMigrateRouteFetcher.propagate(getFetchArguments("propagate", props));
const getFetchArguments = (
const getFetchArguments = <
Model extends IHttpLlmApplication.Model,
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema = IHttpLlmApplication.ModelSchema[Model],
Operation extends OpenApi.IOperation = OpenApi.IOperation,
Route extends IHttpMigrateRoute = IHttpMigrateRoute<
OpenApi.IJsonSchema,
Operation
>,
>(
from: string,
props: HttpLlm.IFetchProps,
props: HttpLlm.IFetchProps<Model, Schema, Operation, Route>,
): HttpMigration.IFetchProps => {
const route: IHttpMigrateRoute = props.function.route();
const route: Route = props.function.route();
if (props.application.options.keyword === true) {

@@ -55,4 +99,4 @@ const input: Record<string, any> = props.arguments[0];

body,
};
} satisfies HttpMigration.IFetchProps;
};
}

@@ -5,2 +5,4 @@ import { HttpMigration } from "./HttpMigration";

import { HttpLlmFunctionFetcher } from "./http/HttpLlmFunctionFetcher";
import { IChatGptSchema } from "./structures/IChatGptSchema";
import { IGeminiSchema } from "./structures/IGeminiSchema";
import { IHttpConnection } from "./structures/IHttpConnection";

@@ -10,5 +12,7 @@ import { IHttpLlmApplication } from "./structures/IHttpLlmApplication";

import { IHttpMigrateApplication } from "./structures/IHttpMigrateApplication";
import { IHttpMigrateRoute } from "./structures/IHttpMigrateRoute";
import { IHttpResponse } from "./structures/IHttpResponse";
import { ILlmFunction } from "./structures/ILlmFunction";
import { ILlmSchema } from "./structures/ILlmSchema";
import { ILlmSchemaV3 } from "./structures/ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "./structures/ILlmSchemaV3_1";
import { LlmDataMerger } from "./utils/LlmDataMerger";

@@ -60,3 +64,3 @@

* as `true`, the number of {@link IHttpLlmFunction.parameters} are always 1 and the
* first parameter type is always {@link ILlmSchema.IObject}. I recommend this option
* first parameter type is always {@link ILlmSchemaV3.IObject}. I recommend this option
* because LLM can understand the keyword arguments more easily.

@@ -69,43 +73,50 @@ *

export const application = <
Schema extends ILlmSchema,
Operation extends OpenApi.IOperation,
>(
Model extends IHttpLlmApplication.Model,
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema = IHttpLlmApplication.ModelSchema[Model],
Operation extends OpenApi.IOperation = OpenApi.IOperation,
>(props: {
model: Model;
document:
| OpenApi.IDocument<any, Operation>
| IHttpMigrateApplication<any, Operation>,
options?: Partial<IHttpLlmApplication.IOptions>,
): IHttpLlmApplication<Schema> => {
| OpenApi.IDocument<OpenApi.IJsonSchema, Operation>
| IHttpMigrateApplication<OpenApi.IJsonSchema, Operation>;
options?: Partial<IHttpLlmApplication.IOptions<Model, Schema>>;
}): IHttpLlmApplication<Model, Schema> => {
// MIGRATE
if ((document as OpenApi.IDocument)["x-samchon-emended"] === true)
document = HttpMigration.application(
document as OpenApi.IDocument<any, Operation>,
);
return HttpLlmConverter.compose(
document as IHttpMigrateApplication<any, Operation>,
{
keyword: options?.keyword ?? false,
separate: options?.separate ?? null,
recursive: options?.recursive ?? 3,
const migrate: IHttpMigrateApplication =
(props.document as OpenApi.IDocument)["x-samchon-emended"] === true
? HttpMigration.application(props.document as OpenApi.IDocument)
: (props.document as IHttpMigrateApplication);
return HttpLlmConverter.compose<Model, Schema>({
migrate,
model: props.model,
options: {
keyword: props.options?.keyword ?? false,
separate: props.options?.separate ?? null,
recursive: (props.model === "chatgpt"
? undefined
: (props.options?.recursive ?? 3)) as IHttpLlmApplication.IOptions<
Model,
Schema
>["recursive"],
},
);
});
};
/**
* Convert JSON schema to LLM schema.
*
* Converts {@link OpenApi.IJsonSchema JSON schema} to {@link ILlmSchema LLM schema}.
*
* By the way, if the target JSON schema has some recursive references, the
* conversion would be failed and `null` value would be returned. It's because
* the LLM schema does not support the reference type embodied by the
* {@link OpenApi.IJsonSchema.IReference} type.
*
* @param props Schema to convert and components to refer
* @returns LLM schema or null value
*/
export const schema = (props: {
export const schema = <
Model extends IHttpLlmApplication.Model,
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema = IHttpLlmApplication.ModelSchema[Model],
>(props: {
model: Model;
components: OpenApi.IComponents;
schema: OpenApi.IJsonSchema;
recursive: false | number;
}): ILlmSchema | null => HttpLlmConverter.schema(props);
}): Schema | null => HttpLlmConverter.schema(props);

@@ -118,7 +129,19 @@ /* -----------------------------------------------------------

*/
export interface IFetchProps {
export interface IFetchProps<
Model extends IHttpLlmApplication.Model,
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema = IHttpLlmApplication.ModelSchema[Model],
Operation extends OpenApi.IOperation = OpenApi.IOperation,
Route extends IHttpMigrateRoute = IHttpMigrateRoute<
OpenApi.IJsonSchema,
Operation
>,
> {
/**
* Application of the LLM function calling.
*/
application: IHttpLlmApplication;
application: IHttpLlmApplication<Model, Schema, Operation>;

@@ -128,3 +151,3 @@ /**

*/
function: IHttpLlmFunction;
function: IHttpLlmFunction<Schema, Operation, Route>;

@@ -167,4 +190,13 @@ /**

*/
export const execute = (props: IFetchProps): Promise<unknown> =>
HttpLlmFunctionFetcher.execute(props);
export const execute = <
Model extends IHttpLlmApplication.Model,
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema = IHttpLlmApplication.ModelSchema[Model],
Operation extends OpenApi.IOperation = OpenApi.IOperation,
>(
props: IFetchProps<Model, Schema, Operation>,
): Promise<unknown> => HttpLlmFunctionFetcher.execute(props);

@@ -195,4 +227,13 @@ /**

*/
export const propagate = (props: IFetchProps): Promise<IHttpResponse> =>
HttpLlmFunctionFetcher.propagate(props);
export const propagate = <
Model extends IHttpLlmApplication.Model,
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema = IHttpLlmApplication.ModelSchema[Model],
Operation extends OpenApi.IOperation = OpenApi.IOperation,
>(
props: IFetchProps<Model, Schema, Operation>,
): Promise<IHttpResponse> => HttpLlmFunctionFetcher.propagate(props);

@@ -205,7 +246,13 @@ /* -----------------------------------------------------------

*/
export interface IMergeProps {
export interface IMergeProps<
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema,
> {
/**
* Metadata of the target function.
*/
function: ILlmFunction;
function: ILlmFunction<Schema>;

@@ -238,4 +285,11 @@ /**

*/
export const mergeParameters = (props: IMergeProps): unknown[] =>
LlmDataMerger.parameters(props);
export const mergeParameters = <
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema,
>(
props: IMergeProps<Schema>,
): unknown[] => LlmDataMerger.parameters(props);

@@ -242,0 +296,0 @@ /**

@@ -30,3 +30,3 @@ //----

export * from "./structures/ILlmSchema";
export * from "./structures/ILlmSchemaV3";
export * from "./structures/IChatGptSchema";

@@ -36,4 +36,4 @@ export * from "./structures/IGeminiSchema";

export * from "./HttpLlm";
export * from "./utils/LlmTypeChecker";
export * from "./utils/LlmTypeCheckerV3";
export * from "./utils/ChatGptTypeChecker";
export * from "./utils/GeminiTypeChecker";

@@ -20,2 +20,3 @@ export type IChatGptSchema =

};
/**

@@ -22,0 +23,0 @@ * Constant value type.

import { OpenApi } from "../OpenApi";
import { IChatGptSchema } from "./IChatGptSchema";
import { IGeminiSchema } from "./IGeminiSchema";
import { IHttpLlmFunction } from "./IHttpLlmFunction";
import { IHttpMigrateRoute } from "./IHttpMigrateRoute";
import { ILlmSchema } from "./ILlmSchema";
import { ILlmSchemaV3 } from "./ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "./ILlmSchemaV3_1";

@@ -22,7 +25,7 @@ /**

* would be successfully converted to {@link IHttpLlmFunction} and its type schemas
* are downgraded to {@link OpenApiV3.IJsonSchema} and converted to {@link ILlmSchema}.
* are downgraded to {@link OpenApiV3.IJsonSchema} and converted to {@link ILlmSchemaV3}.
*
* About the options, if you've configured {@link IHttpLlmApplication.options.keyword}
* (as `true`), number of {@link IHttpLlmFunction.parameters} are always 1 and the first
* parameter type is always {@link ILlmSchema.IObject}. Otherwise, the parameters would
* parameter type is always {@link ILlmSchemaV3.IObject}. Otherwise, the parameters would
* be multiple, and the sequence of the parameters are following below rules.

@@ -71,3 +74,8 @@ *

export interface IHttpLlmApplication<
Schema extends ILlmSchema = ILlmSchema,
Model extends IHttpLlmApplication.Model,
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema = IHttpLlmApplication.ModelSchema[Model],
Operation extends OpenApi.IOperation = OpenApi.IOperation,

@@ -77,7 +85,5 @@ Route extends IHttpMigrateRoute = IHttpMigrateRoute,

/**
* Version of OpenAPI.
*
* LLM function call schema is based on OpenAPI 3.0.3 specification.
* Model of the target LLM.
*/
openapi: "3.0.3";
model: Model;

@@ -92,3 +98,3 @@ /**

*/
functions: IHttpLlmFunction[];
functions: IHttpLlmFunction<Schema, Operation, Route>[];

@@ -106,5 +112,13 @@ /**

*/
options: IHttpLlmApplication.IOptions<Schema>;
options: IHttpLlmApplication.IOptions<Model, Schema>;
}
export namespace IHttpLlmApplication {
export type Model = "3.0" | "3.1" | "chatgpt" | "gemini";
export type ModelSchema = {
"3.0": ILlmSchemaV3;
"3.1": ILlmSchemaV3_1;
chatgpt: IChatGptSchema;
gemini: IGeminiSchema;
};
/**

@@ -155,3 +169,10 @@ * Error occurred in the composition.

*/
export interface IOptions<Schema extends ILlmSchema = ILlmSchema> {
export interface IOptions<
Model extends IHttpLlmApplication.Model,
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema = IHttpLlmApplication.ModelSchema[Model],
> {
/**

@@ -162,3 +183,3 @@ * Whether the parameters are keyworded or not.

* {@link IHttpLlmApplication.IFunction.parameters} is always 1, and type of
* the pararameter is always {@link ILlmSchema.IObject} type.
* the pararameter is always {@link ILlmSchemaV3.IObject} type.
*

@@ -193,5 +214,8 @@ * Otherwise, the parameters would be multiple, and the sequence of the parameters

*
* By the way, if the model is "chatgpt", the recursive types are always
* allowed without any limitation, due to it supports the reference type.
*
* @default 3
*/
recursive: false | number;
recursive: Model extends "chatgpt" ? never : false | number;

@@ -205,3 +229,3 @@ /**

* parameter type has configured
* {@link ILlmSchema.IString.contentMediaType} which indicates file
* {@link ILlmSchemaV3.IString.contentMediaType} which indicates file
* uploading, it must be composed by Human, not by LLM

@@ -208,0 +232,0 @@ * (Large Language Model).

import { OpenApi } from "../OpenApi";
import { IChatGptSchema } from "./IChatGptSchema";
import { IGeminiSchema } from "./IGeminiSchema";
import { IHttpMigrateRoute } from "./IHttpMigrateRoute";
import { ILlmSchema } from "./ILlmSchema";
import { ILlmSchemaV3 } from "./ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "./ILlmSchemaV3_1";

@@ -21,3 +24,3 @@ /**

* {@link OpenApi.IOperation} is, `IHttpLlmFunction` has converted every type schema
* informations from {@link OpenApi.IJsonSchema} to {@link ILlmSchema} to escape
* informations from {@link OpenApi.IJsonSchema} to {@link ILlmSchemaV3} to escape
* {@link OpenApi.IJsonSchema.IReference reference types}, and downgrade the version

@@ -30,3 +33,3 @@ * of the JSON schema to OpenAPI 3.0. It's because LLM function call feature cannot

* {@link IHttpLlmFunction.parameters} are always 1 and the first parameter's
* type is always {@link ILlmSchema.IObject}. The properties' rule is:
* type is always {@link ILlmSchemaV3.IObject}. The properties' rule is:
*

@@ -60,3 +63,3 @@ * - `pathParameters`: Path parameters of {@link OpenApi.IOperation.parameters}

export interface IHttpLlmFunction<
Schema extends ILlmSchema = ILlmSchema,
Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema,
Operation extends OpenApi.IOperation = OpenApi.IOperation,

@@ -123,3 +126,3 @@ Route extends IHttpMigrateRoute = IHttpMigrateRoute,

* number of {@link IHttpLlmFunction.parameters} are always 1 and the first
* parameter's type is always {@link ILlmSchema.IObject}. The
* parameter's type is always {@link ILlmSchemaV3.IObject}. The
* properties' rule is:

@@ -231,3 +234,9 @@ *

*/
export interface ISeparated<Schema extends ILlmSchema = ILlmSchema> {
export interface ISeparated<
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema,
> {
/**

@@ -247,3 +256,9 @@ * Parameters that would be composed by the LLM.

*/
export interface ISeparatedParameter<Schema extends ILlmSchema = ILlmSchema> {
export interface ISeparatedParameter<
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema,
> {
/**

@@ -250,0 +265,0 @@ * Index of the parameter.

@@ -0,3 +1,6 @@

import { IChatGptSchema } from "./IChatGptSchema";
import { IGeminiSchema } from "./IGeminiSchema";
import { ILlmFunction } from "./ILlmFunction";
import { ILlmSchema } from "./ILlmSchema";
import { ILlmSchemaV3 } from "./ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "./ILlmSchemaV3_1";

@@ -34,4 +37,16 @@ /**

*/
export interface ILlmApplication<Schema extends ILlmSchema = ILlmSchema> {
export interface ILlmApplication<
Model extends ILlmApplication.Model,
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema = ILlmApplication.ModelSchema[Model],
> {
/**
* Model of the LLM.
*/
model: Model;
/**
* List of function metadata.

@@ -46,9 +61,24 @@ *

*/
options: ILlmApplication.IOptions<Schema>;
options: ILlmApplication.IOptions<Model, Schema>;
}
export namespace ILlmApplication {
export type Model = "3.0" | "3.1" | "chatgpt" | "gemini";
export type ModelSchema = {
"3.0": ILlmSchemaV3;
"3.1": ILlmSchemaV3_1;
chatgpt: IChatGptSchema;
gemini: IGeminiSchema;
};
/**
* Options for composing the LLM application.
*/
export interface IOptions<Schema extends ILlmSchema = ILlmSchema> {
export interface IOptions<
Model extends ILlmApplication.Model,
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema,
> {
/**

@@ -59,5 +89,8 @@ * Whether to allow recursive types or not.

*
* By the way, if the model is "chatgpt", the recursive types are always
* allowed without any limitation, due to it supports the reference type.
*
* @default 3
*/
recursive: false | number;
recursive: Model extends "chatgpt" ? never : false | number;

@@ -71,3 +104,3 @@ /**

* parameter type has configured
* {@link ILlmSchema.IString.contentMediaType} which indicates file
* {@link ILlmSchemaV3.IString.contentMediaType} which indicates file
* uploading, it must be composed by human, not by LLM

@@ -74,0 +107,0 @@ * (Large Language Model).

@@ -1,2 +0,5 @@

import { ILlmSchema } from "./ILlmSchema";
import { IChatGptSchema } from "./IChatGptSchema";
import { IGeminiSchema } from "./IGeminiSchema";
import { ILlmSchemaV3 } from "./ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "./ILlmSchemaV3_1";

@@ -25,3 +28,5 @@ /**

*/
export interface ILlmFunction<Schema extends ILlmSchema = ILlmSchema> {
export interface ILlmFunction<
Schema extends ILlmSchemaV3 | ILlmSchemaV3_1 | IChatGptSchema | IGeminiSchema,
> {
/**

@@ -83,3 +88,9 @@ * Representative name of the function.

*/
export interface ISeparated<Schema extends ILlmSchema = ILlmSchema> {
export interface ISeparated<
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema,
> {
/**

@@ -99,3 +110,9 @@ * Parameters that would be composed by the LLM.

*/
export interface ISeparatedParameter<Schema extends ILlmSchema = ILlmSchema> {
export interface ISeparatedParameter<
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema,
> {
/**

@@ -102,0 +119,0 @@ * Index of the parameter.

@@ -0,3 +1,7 @@

import { IChatGptSchema } from "../structures/IChatGptSchema";
import { IGeminiSchema } from "../structures/IGeminiSchema";
import { IHttpLlmFunction } from "../structures/IHttpLlmFunction";
import { ILlmFunction } from "../structures/ILlmFunction";
import { ILlmSchemaV3 } from "../structures/ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "../structures/ILlmSchemaV3_1";

@@ -13,7 +17,13 @@ /**

*/
export interface IProps {
export interface IProps<
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema,
> {
/**
* Target function to call.
*/
function: ILlmFunction;
function: ILlmFunction<Schema>;

@@ -46,4 +56,12 @@ /**

*/
export const parameters = (props: IProps): unknown[] => {
const separated: IHttpLlmFunction.ISeparated | undefined =
export const parameters = <
Schema extends
| ILlmSchemaV3
| ILlmSchemaV3_1
| IChatGptSchema
| IGeminiSchema,
>(
props: IProps<Schema>,
): unknown[] => {
const separated: IHttpLlmFunction.ISeparated<Schema> | undefined =
props.function.separated;

@@ -85,3 +103,3 @@ if (separated === undefined)

.map((_, i) => value(x[i], y[i]))
: y ?? x;
: (y ?? x);

@@ -88,0 +106,0 @@ const combineObject = (x: any, y: any): any => {

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc