New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@samchon/openapi

Package Overview
Dependencies
Maintainers
0
Versions
166
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@samchon/openapi - npm Package Compare versions

Comparing version 2.0.0-dev.20241129-8 to 2.0.0-dev.20241130-2

2

lib/converters/HttpLlmConverter.d.ts

@@ -15,3 +15,3 @@ import { IHttpLlmApplication } from "../structures/IHttpLlmApplication";

predicate: (schema: ILlmSchema.ModelSchema[Model]) => boolean;
}) => IHttpLlmFunction.ISeparated<ILlmSchema.ModelParameters[Model]>;
}) => IHttpLlmFunction.ISeparated<Model>;
}

@@ -45,7 +45,7 @@ import { IChatGptSchema } from "../structures/IChatGptSchema";

"3.0": {
constraint: false;
constraint: true;
recursive: number;
};
"3.1": {
constraint: false;
constraint: true;
reference: false;

@@ -52,0 +52,0 @@ };

@@ -76,9 +76,9 @@ "use strict";

"3.0": {
constraint: false,
constraint: true,
recursive: 3,
},
"3.1": {
constraint: false,
constraint: true,
reference: false,
},
};
import { OpenApi } from "../OpenApi";
import { IChatGptSchema } from "./IChatGptSchema";
import { IGeminiSchema } from "./IGeminiSchema";
import { IHttpMigrateRoute } from "./IHttpMigrateRoute";
import { ILlmSchema } from "./ILlmSchema";
import { ILlmSchemaV3 } from "./ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "./ILlmSchemaV3_1";
/**

@@ -147,3 +143,3 @@ * LLM function calling schema from HTTP (OpenAPI) operation.

*/
separated?: IHttpLlmFunction.ISeparated<ILlmSchema.ModelParameters[Model]>;
separated?: IHttpLlmFunction.ISeparated<Model>;
/**

@@ -216,12 +212,12 @@ * Expected return type.

*/
interface ISeparated<Parameters extends ILlmSchemaV3.IParameters | ILlmSchemaV3_1.IParameters | IChatGptSchema.IParameters | IGeminiSchema.IParameters> {
interface ISeparated<Model extends ILlmSchema.Model> {
/**
* Parameters that would be composed by the LLM.
*/
llm: Parameters | null;
llm: ILlmSchema.ModelParameters[Model] | null;
/**
* Parameters that would be composed by the human.
*/
human: Parameters | null;
human: ILlmSchema.ModelParameters[Model] | null;
}
}

@@ -1,7 +0,2 @@

import { IChatGptSchema } from "./IChatGptSchema";
import { IClaudeSchema } from "./IClaudeSchema";
import { IGeminiSchema } from "./IGeminiSchema";
import { ILlmSchema } from "./ILlmSchema";
import { ILlmSchemaV3 } from "./ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "./ILlmSchemaV3_1";
/**

@@ -41,3 +36,3 @@ * LLM function metadata.

*/
separated?: ILlmFunction.ISeparated<ILlmSchema.ModelParameters[Model]>;
separated?: ILlmFunction.ISeparated<Model>;
/**

@@ -89,12 +84,12 @@ * Expected return type.

*/
interface ISeparated<Parameters extends IChatGptSchema.IParameters | IClaudeSchema.IParameters | IGeminiSchema.IParameters | ILlmSchemaV3.IParameters | ILlmSchemaV3_1.IParameters> {
interface ISeparated<Model extends ILlmSchema.Model> {
/**
* Parameters that would be composed by the LLM.
*/
llm: Parameters | null;
llm: ILlmSchema.ModelParameters[Model] | null;
/**
* Parameters that would be composed by the human.
*/
human: Parameters | null;
human: ILlmSchema.ModelParameters[Model] | null;
}
}

@@ -435,8 +435,9 @@ /**

*
* This is because the some LLM model's function calling understands the constraint
* properties when the function parameter types are simple, however it occurs
* some errors when the parameter types are complex.
* This is because some LLM schema model like {@link IChatGptSchema}
* has banned such constraint, because their LLM cannot understand the
* constraint properties and occur the hallucination.
*
* Therefore, considering the complexity of your parameter types, determine
* which is better, to allow the constraint properties or not.
* Therefore, considering your LLM model's performance, capability,
* and the complexity of your parameter types, determine which is better,
* to allow the constraint properties or not.
*

@@ -456,3 +457,3 @@ * - {@link ILlmSchemaV3_1.INumber.minimum}

*
* @default false
* @default true
*/

@@ -459,0 +460,0 @@ constraint: boolean;

@@ -392,8 +392,9 @@ /**

*
* This is because the some LLM model's function calling understands the constraint
* properties when the function parameter types are simple, however it occurs
* some errors when the parameter types are complex.
* This is because some LLM schema model like {@link IGeminiSchema}
* has banned such constraint, because their LLM cannot understand the
* constraint properties and occur the hallucination.
*
* Therefore, considering the complexity of your parameter types, determine
* which is better, to allow the constraint properties or not.
* Therefore, considering your LLM model's performance, capability,
* and the complexity of your parameter types, determine which is better,
* to allow the constraint properties or not.
*

@@ -413,3 +414,3 @@ * - {@link ILlmSchemaV3.INumber.minimum}

*
* @default false
* @default true
*/

@@ -416,0 +417,0 @@ constraint: boolean;

{
"name": "@samchon/openapi",
"version": "2.0.0-dev.20241129-8",
"version": "2.0.0-dev.20241130-2",
"description": "OpenAPI definitions and converters for 'typia' and 'nestia'.",

@@ -5,0 +5,0 @@ "main": "./lib/index.js",

@@ -90,3 +90,3 @@ import { OpenApi } from "../OpenApi";

predicate: (schema: ILlmSchema.ModelSchema[Model]) => boolean;
}): IHttpLlmFunction.ISeparated<ILlmSchema.ModelParameters[Model]> => {
}): IHttpLlmFunction.ISeparated<Model> => {
const separator: (props: {

@@ -106,5 +106,3 @@ predicate: (schema: ILlmSchema.ModelSchema[Model]) => boolean;

human,
} satisfies IHttpLlmFunction.ISeparated<
ILlmSchema.ModelParameters[Model]
> as IHttpLlmFunction.ISeparated<ILlmSchema.ModelParameters[Model]>;
} satisfies IHttpLlmFunction.ISeparated<Model>;
};

@@ -111,0 +109,0 @@

@@ -70,9 +70,9 @@ import { IChatGptSchema } from "../structures/IChatGptSchema";

"3.0": {
constraint: false,
constraint: true,
recursive: 3,
} satisfies ILlmSchemaV3.IConfig,
"3.1": {
constraint: false,
constraint: true,
reference: false,
} satisfies ILlmSchemaV3_1.IConfig,
};
import { OpenApi } from "../OpenApi";
import { IChatGptSchema } from "./IChatGptSchema";
import { IGeminiSchema } from "./IGeminiSchema";
import { IHttpMigrateRoute } from "./IHttpMigrateRoute";
import { ILlmSchema } from "./ILlmSchema";
import { ILlmSchemaV3 } from "./ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "./ILlmSchemaV3_1";

@@ -153,3 +149,3 @@ /**

*/
separated?: IHttpLlmFunction.ISeparated<ILlmSchema.ModelParameters[Model]>;
separated?: IHttpLlmFunction.ISeparated<Model>;

@@ -228,13 +224,7 @@ /**

*/
export interface ISeparated<
Parameters extends
| ILlmSchemaV3.IParameters
| ILlmSchemaV3_1.IParameters
| IChatGptSchema.IParameters
| IGeminiSchema.IParameters,
> {
export interface ISeparated<Model extends ILlmSchema.Model> {
/**
* Parameters that would be composed by the LLM.
*/
llm: Parameters | null;
llm: ILlmSchema.ModelParameters[Model] | null;

@@ -244,4 +234,4 @@ /**

*/
human: Parameters | null;
human: ILlmSchema.ModelParameters[Model] | null;
}
}

@@ -1,7 +0,2 @@

import { IChatGptSchema } from "./IChatGptSchema";
import { IClaudeSchema } from "./IClaudeSchema";
import { IGeminiSchema } from "./IGeminiSchema";
import { ILlmSchema } from "./ILlmSchema";
import { ILlmSchemaV3 } from "./ILlmSchemaV3";
import { ILlmSchemaV3_1 } from "./ILlmSchemaV3_1";

@@ -44,3 +39,3 @@ /**

*/
separated?: ILlmFunction.ISeparated<ILlmSchema.ModelParameters[Model]>;
separated?: ILlmFunction.ISeparated<Model>;

@@ -97,14 +92,7 @@ /**

*/
export interface ISeparated<
Parameters extends
| IChatGptSchema.IParameters
| IClaudeSchema.IParameters
| IGeminiSchema.IParameters
| ILlmSchemaV3.IParameters
| ILlmSchemaV3_1.IParameters,
> {
export interface ISeparated<Model extends ILlmSchema.Model> {
/**
* Parameters that would be composed by the LLM.
*/
llm: Parameters | null;
llm: ILlmSchema.ModelParameters[Model] | null;

@@ -114,4 +102,4 @@ /**

*/
human: Parameters | null;
human: ILlmSchema.ModelParameters[Model] | null;
}
}

@@ -510,8 +510,9 @@ /**

*
* This is because the some LLM model's function calling understands the constraint
* properties when the function parameter types are simple, however it occurs
* some errors when the parameter types are complex.
* This is because some LLM schema model like {@link IChatGptSchema}
* has banned such constraint, because their LLM cannot understand the
* constraint properties and occur the hallucination.
*
* Therefore, considering the complexity of your parameter types, determine
* which is better, to allow the constraint properties or not.
* Therefore, considering your LLM model's performance, capability,
* and the complexity of your parameter types, determine which is better,
* to allow the constraint properties or not.
*

@@ -531,3 +532,3 @@ * - {@link ILlmSchemaV3_1.INumber.minimum}

*
* @default false
* @default true
*/

@@ -534,0 +535,0 @@ constraint: boolean;

@@ -467,8 +467,9 @@ /**

*
* This is because the some LLM model's function calling understands the constraint
* properties when the function parameter types are simple, however it occurs
* some errors when the parameter types are complex.
* This is because some LLM schema model like {@link IGeminiSchema}
* has banned such constraint, because their LLM cannot understand the
* constraint properties and occur the hallucination.
*
* Therefore, considering the complexity of your parameter types, determine
* which is better, to allow the constraint properties or not.
* Therefore, considering your LLM model's performance, capability,
* and the complexity of your parameter types, determine which is better,
* to allow the constraint properties or not.
*

@@ -488,3 +489,3 @@ * - {@link ILlmSchemaV3.INumber.minimum}

*
* @default false
* @default true
*/

@@ -491,0 +492,0 @@ constraint: boolean;

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc