Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@kubb/core

Package Overview
Dependencies
Maintainers
0
Versions
676
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@kubb/core - npm Package Compare versions

Comparing version 0.0.0-canary-20240806053140 to 0.0.0-canary-20241104172400

dist/chunk-2EP2SOAU.cjs

490

dist/index.d.ts

@@ -1,465 +0,9 @@

import PQueue from 'p-queue';
import { write, read } from '@kubb/fs';
import * as KubbFile from '@kubb/fs/types';
import { BaseName, File, UUID } from '@kubb/fs/src/types.ts';
import { PossiblePromise, TupleToUnion, ObjValueTuple, GreaterThan } from '@kubb/types';
import { DirectoryTreeOptions } from 'directory-tree';
import { E as EventEmitter, L as Logger } from './logger-DChjnJMn.js';
import { P as PluginContext, F as FileManager, a as PluginManager, U as UserConfig, C as Config, I as InputPath, b as PluginFactoryOptions, c as UserPluginWithLifeCycle } from './FileManager-Cn8nSj1H.js';
export { B as BarrelType, d as FileMetaBase, G as GetPluginFactoryOptions, o as Group, e as InputData, O as Output, i as Plugin, f as PluginKey, k as PluginLifecycle, l as PluginLifecycleHooks, m as PluginParameter, j as PluginWithLifeCycle, n as ResolveNameParams, R as ResolvePathParams, h as UserPlugin, g as getSource } from './FileManager-Cn8nSj1H.js';
import { L as Logger } from './logger-DvbHXjIO.js';
import { PossiblePromise } from '@kubb/types';
import '@kubb/fs';
import '@kubb/fs/types';
import 'consola';
import 'ora';
type BarrelManagerOptions = {
treeNode?: DirectoryTreeOptions;
isTypeOnly?: boolean;
/**
* Add .ts or .js
*/
extName?: KubbFile.Extname;
};
type RequiredPluginLifecycle = Required<PluginLifecycle>;
/**
* Get the type of the first argument in a function.
* @example Arg0<(a: string, b: number) => void> -> string
*/
type Argument0<H extends keyof PluginLifecycle> = Parameters<RequiredPluginLifecycle[H]>[0];
type Strategy$1 = 'hookFirst' | 'hookForPlugin' | 'hookParallel' | 'hookReduceArg0' | 'hookSeq';
type Executer<H extends PluginLifecycleHooks = PluginLifecycleHooks> = {
strategy: Strategy$1;
hookName: H;
plugin: Plugin;
parameters?: unknown[] | undefined;
output?: unknown;
};
type ParseResult<H extends PluginLifecycleHooks> = RequiredPluginLifecycle[H];
type SafeParseResult<H extends PluginLifecycleHooks, Result = ReturnType<ParseResult<H>>> = {
result: Result;
plugin: Plugin;
};
type Options$2 = {
logger: Logger;
/**
* Task for the FileManager
*/
task: (file: ResolvedFile) => Promise<ResolvedFile>;
};
type Events = {
execute: [executer: Executer];
executed: [executer: Executer];
error: [error: Error];
};
type GetFileProps<TOptions = object> = {
name: string;
mode?: KubbFile.Mode;
extName: KubbFile.Extname;
pluginKey: Plugin['key'];
options?: TOptions;
};
declare class PluginManager {
#private;
readonly plugins: PluginWithLifeCycle[];
readonly fileManager: FileManager;
readonly events: EventEmitter<Events>;
readonly config: Config;
readonly executed: Array<Executer>;
readonly logger: Logger;
readonly queue: PQueue;
constructor(config: Config, options: Options$2);
getFile<TOptions = object>({ name, mode, extName, pluginKey, options }: GetFileProps<TOptions>): KubbFile.File<{
pluginKey: Plugin['key'];
}>;
resolvePath: <TOptions = object>(params: ResolvePathParams<TOptions>) => KubbFile.OptionalPath;
resolveName: (params: ResolveNameParams) => string;
/**
* Instead of calling `pluginManager.events.on` you can use `pluginManager.on`. This one also has better types.
*/
on<TEventName extends keyof Events & string>(eventName: TEventName, handler: (...eventArg: Events[TEventName]) => void): void;
/**
* Run a specific hookName for plugin x.
*/
hookForPlugin<H extends PluginLifecycleHooks>({ pluginKey, hookName, parameters, }: {
pluginKey: Plugin['key'];
hookName: H;
parameters: PluginParameter<H>;
}): Promise<Array<ReturnType<ParseResult<H>> | null>> | null;
/**
* Run a specific hookName for plugin x.
*/
hookForPluginSync<H extends PluginLifecycleHooks>({ pluginKey, hookName, parameters, }: {
pluginKey: Plugin['key'];
hookName: H;
parameters: PluginParameter<H>;
}): Array<ReturnType<ParseResult<H>>> | null;
/**
* First non-null result stops and will return it's value.
*/
hookFirst<H extends PluginLifecycleHooks>({ hookName, parameters, skipped, }: {
hookName: H;
parameters: PluginParameter<H>;
skipped?: ReadonlySet<Plugin> | null;
}): Promise<SafeParseResult<H>>;
/**
* First non-null result stops and will return it's value.
*/
hookFirstSync<H extends PluginLifecycleHooks>({ hookName, parameters, skipped, }: {
hookName: H;
parameters: PluginParameter<H>;
skipped?: ReadonlySet<Plugin> | null;
}): SafeParseResult<H>;
/**
* Run all plugins in parallel(order will be based on `this.plugin` and if `pre` or `post` is set).
*/
hookParallel<H extends PluginLifecycleHooks, TOuput = void>({ hookName, parameters, }: {
hookName: H;
parameters?: Parameters<RequiredPluginLifecycle[H]> | undefined;
}): Promise<Awaited<TOuput>[]>;
/**
* Chain all plugins, `reduce` can be passed through to handle every returned value. The return value of the first plugin will be used as the first parameter for the plugin after that.
*/
hookReduceArg0<H extends PluginLifecycleHooks>({ hookName, parameters, reduce, }: {
hookName: H;
parameters: PluginParameter<H>;
reduce: (reduction: Argument0<H>, result: ReturnType<ParseResult<H>>, plugin: Plugin) => PossiblePromise<Argument0<H> | null>;
}): Promise<Argument0<H>>;
/**
* Chains plugins
*/
hookSeq<H extends PluginLifecycleHooks>({ hookName, parameters }: {
hookName: H;
parameters?: PluginParameter<H>;
}): Promise<void>;
getPluginsByKey(hookName: keyof PluginLifecycle, pluginKey: Plugin['key']): Plugin[];
static getDependedPlugins<T1 extends PluginFactoryOptions, T2 extends PluginFactoryOptions = never, T3 extends PluginFactoryOptions = never, TOutput = T3 extends never ? (T2 extends never ? [T1: Plugin<T1>] : [T1: Plugin<T1>, T2: Plugin<T2>]) : [T1: Plugin<T1>, T2: Plugin<T2>, T3: Plugin<T3>]>(plugins: Array<Plugin>, dependedPluginNames: string | string[]): TOutput;
static get hooks(): readonly ["buildStart", "resolvePath", "resolveName", "load", "transform", "writeFile", "buildEnd"];
}
type Plugins = _Register;
type OptionsPlugins = {
[K in keyof Plugins]: Plugins[K]['options'];
};
type PluginUnion = TupleToUnion<ObjValueTuple<OptionsPlugins>>;
interface Cache<TStore extends object = object> {
delete(id: keyof TStore): boolean;
get(id: keyof TStore): TStore[keyof TStore] | null;
has(id: keyof TStore): boolean;
set(id: keyof TStore, value: unknown): void;
}
/**
* Config used in `kubb.config.js`
*
* @example import { defineConfig } from '@kubb/core'
* export default defineConfig({
* ...
* })
*/
type UserConfig = Omit<Config, 'root' | 'plugins'> & {
/**
* Project root directory. Can be an absolute path, or a path relative from
* the location of the config file itself.
* @default process.cwd()
*/
root?: string;
/**
* Plugin type can be KubbJSONPlugin or Plugin
* Example: ['@kubb/plugin-oas', { output: false }]
* Or: pluginOas({ output: false })
*/
plugins?: Array<Omit<UnknownUserPlugin, 'api'> | UnionPlugins | [name: string, options: object]>;
};
type InputPath = {
/**
* Path to be used as the input. This can be an absolute path or a path relative to the `root`.
*/
path: string;
};
type InputData = {
/**
* `string` or `object` containing the data.
*/
data: string | unknown;
};
type Input = InputPath | InputData;
/**
* @private
*/
type Config<TInput = Input> = {
/**
* Optional config name to show in CLI output
*/
name?: string;
/**
* Project root directory. Can be an absolute path, or a path relative from
* the location of the config file itself.
* @default process.cwd()
*/
root: string;
input: TInput;
output: {
/**
* Path to be used to export all generated files.
* This can be an absolute path, or a path relative based of the defined `root` option.
*/
path: string;
/**
* Clean output directory before each build.
*/
clean?: boolean;
/**
* Write files to the fileSystem
* This is being used for the playground.
* @default true
*/
write?: boolean;
};
/**
* Array of Kubb plugins to use.
* The plugin/package can forsee some options that you need to pass through.
* Sometimes a plugin is depended on another plugin, if that's the case you will get an error back from the plugin you installed.
*/
plugins?: Array<Plugin>;
/**
* Hooks that will be called when a specific action is triggered in Kubb.
*/
hooks?: {
/**
* Hook that will be triggered at the end of all executions.
* Useful for running Prettier or ESLint to format/lint your code.
*/
done?: string | Array<string>;
};
};
type UnionPlugins = PluginUnion;
type ObjectPlugin = keyof OptionsPlugins;
type PluginFactoryOptions<
/**
* Name to be used for the plugin, this will also be used for they key.
*/
TName extends string = string,
/**
* Options of the plugin.
*/
TOptions extends object = object,
/**
* Options of the plugin that can be used later on, see `options` inside your plugin config.
*/
TResolvedOptions extends object = TOptions,
/**
* API that you want to expose to other plugins.
*/
TAPI = any,
/**
* When calling `resolvePath` you can specify better types.
*/
TResolvePathOptions extends object = object> = {
name: TName;
/**
* Same behaviour like what has been done with `QueryKey` in `@tanstack/react-query`
*/
key: PluginKey<TName | string>;
options: TOptions;
resolvedOptions: TResolvedOptions;
api: TAPI;
resolvePathOptions: TResolvePathOptions;
};
type PluginKey<TName> = [name: TName, identifier?: string | number];
type GetPluginFactoryOptions<TPlugin extends UserPlugin> = TPlugin extends UserPlugin<infer X> ? X : never;
type UserPlugin<TOptions extends PluginFactoryOptions = PluginFactoryOptions> = {
/**
* Unique name used for the plugin
* The name of the plugin follows the format scope:foo-bar or foo-bar, adding scope: can avoid naming conflicts with other plugins.
* @example @kubb/typescript
*/
name: TOptions['name'];
/**
* Options set for a specific plugin(see kubb.config.js), passthrough of options.
*/
options: TOptions['resolvedOptions'];
/**
* Specifies the preceding plugins for the current plugin. You can pass an array of preceding plugin names, and the current plugin will be executed after these plugins.
* Can be used to validate depended plugins.
*/
pre?: Array<string>;
/**
* Specifies the succeeding plugins for the current plugin. You can pass an array of succeeding plugin names, and the current plugin will be executed before these plugins.
*/
post?: Array<string>;
} & (TOptions['api'] extends never ? {
api?: never;
} : {
api: (this: TOptions['name'] extends 'core' ? null : Omit<PluginContext<TOptions>, 'addFile'>) => TOptions['api'];
});
type UserPluginWithLifeCycle<TOptions extends PluginFactoryOptions = PluginFactoryOptions> = UserPlugin<TOptions> & PluginLifecycle<TOptions>;
type UnknownUserPlugin = UserPlugin<PluginFactoryOptions<any, any, any, any, any>>;
type Plugin<TOptions extends PluginFactoryOptions = PluginFactoryOptions> = {
/**
* Unique name used for the plugin
* @example @kubb/typescript
*/
name: TOptions['name'];
/**
* Internal key used when a developer uses more than one of the same plugin
* @private
*/
key: TOptions['key'];
/**
* Specifies the preceding plugins for the current plugin. You can pass an array of preceding plugin names, and the current plugin will be executed after these plugins.
* Can be used to validate depended plugins.
*/
pre?: Array<string>;
/**
* Specifies the succeeding plugins for the current plugin. You can pass an array of succeeding plugin names, and the current plugin will be executed before these plugins.
*/
post?: Array<string>;
/**
* Options set for a specific plugin(see kubb.config.js), passthrough of options.
*/
options: TOptions['resolvedOptions'];
} & (TOptions['api'] extends never ? {
api?: never;
} : {
api: TOptions['api'];
});
type PluginWithLifeCycle<TOptions extends PluginFactoryOptions = PluginFactoryOptions> = Plugin<TOptions> & PluginLifecycle<TOptions>;
type PluginLifecycle<TOptions extends PluginFactoryOptions = PluginFactoryOptions> = {
/**
* Start of the lifecycle of a plugin.
* @type hookParallel
*/
buildStart?: (this: PluginContext<TOptions>, Config: Config) => PossiblePromise<void>;
/**
* Resolve to a Path based on a baseName(example: `./Pet.ts`) and directory(example: `./models`).
* Options can als be included.
* @type hookFirst
* @example ('./Pet.ts', './src/gen/') => '/src/gen/Pet.ts'
*/
resolvePath?: (this: PluginContext<TOptions>, baseName: string, mode?: KubbFile.Mode, options?: TOptions['resolvePathOptions']) => KubbFile.OptionalPath;
/**
* Resolve to a name based on a string.
* Useful when converting to PascalCase or camelCase.
* @type hookFirst
* @example ('pet') => 'Pet'
*/
resolveName?: (this: PluginContext<TOptions>, name: ResolveNameParams['name'], type?: ResolveNameParams['type']) => string;
/**
* Makes it possible to run async logic to override the path defined previously by `resolvePath`.
* @type hookFirst
*/
load?: (this: Omit<PluginContext<TOptions>, 'addFile'>, path: KubbFile.Path) => PossiblePromise<TransformResult | null>;
/**
* Transform the source-code.
* @type hookReduceArg0
*/
transform?: (this: Omit<PluginContext<TOptions>, 'addFile'>, source: string, path: KubbFile.Path) => PossiblePromise<TransformResult>;
/**
* Write the result to the file-system based on the id(defined by `resolvePath` or changed by `load`).
* @type hookParallel
*/
writeFile?: (this: Omit<PluginContext<TOptions>, 'addFile'>, path: KubbFile.Path, source: string | undefined) => PossiblePromise<string | void>;
/**
* End of the plugin lifecycle.
* @type hookParallel
*/
buildEnd?: (this: PluginContext<TOptions>) => PossiblePromise<void>;
};
type PluginLifecycleHooks = keyof PluginLifecycle;
type PluginParameter<H extends PluginLifecycleHooks> = Parameters<Required<PluginLifecycle>[H]>;
type PluginCache = Record<string, [number, unknown]>;
type ResolvePathParams<TOptions = object> = {
pluginKey?: Plugin['key'];
baseName: string;
mode?: KubbFile.Mode;
/**
* Options to be passed to 'resolvePath' 3th parameter
*/
options?: TOptions;
};
type ResolveNameParams = {
name: string;
pluginKey?: Plugin['key'];
/**
* `file` will be used to customize the name of the created file(use of camelCase)
* `function` can be used used to customize the exported functions(use of camelCase)
* `type` is a special type for TypeScript(use of PascalCase)
*/
type?: 'file' | 'function' | 'type';
};
type PluginContext<TOptions extends PluginFactoryOptions = PluginFactoryOptions> = {
config: Config;
cache: Cache<PluginCache>;
fileManager: FileManager;
pluginManager: PluginManager;
addFile: (...file: Array<KubbFile.File>) => Promise<Array<KubbFile.File>>;
resolvePath: (params: ResolvePathParams<TOptions['resolvePathOptions']>) => KubbFile.OptionalPath;
resolveName: (params: ResolveNameParams) => string;
logger: Logger;
/**
* All plugins
*/
plugins: Plugin[];
/**
* Current plugin
*/
plugin: Plugin<TOptions>;
};
type TransformResult = string | null;
type ResolvedFile<TMeta extends FileMetaBase = FileMetaBase, TBaseName extends BaseName = BaseName> = File<TMeta, TBaseName> & {
/**
* @default crypto.randomUUID()
*/
id: UUID;
/**
* Contains the first part of the baseName, generated based on baseName
* @link https://nodejs.org/api/path.html#pathformatpathobject
*/
name: string;
};
type FileMetaBase = {
pluginKey?: Plugin['key'];
};
type FileWithMeta<TMeta extends FileMetaBase = FileMetaBase> = KubbFile.File<TMeta>;
type AddResult<T extends Array<FileWithMeta>> = Promise<Awaited<GreaterThan<T['length'], 1> extends true ? Promise<ResolvedFile[]> : Promise<ResolvedFile>>>;
type AddIndexesProps = {
/**
* Root based on root and output.path specified in the config
*/
root: string;
/**
* Output for plugin
*/
output: {
path: string;
exportAs?: string;
extName?: KubbFile.Extname;
exportType?: 'barrel' | 'barrelNamed' | false;
};
logger: Logger;
options?: BarrelManagerOptions;
meta?: FileWithMeta['meta'];
};
type Options$1 = {
queue?: PQueue;
task?: (file: ResolvedFile) => Promise<ResolvedFile>;
};
declare class FileManager {
#private;
constructor({ task, queue }?: Options$1);
get files(): Array<FileWithMeta>;
get isExecuting(): boolean;
add<T extends Array<FileWithMeta> = Array<FileWithMeta>>(...files: T): AddResult<T>;
addIndexes({ root, output, meta, logger, options }: AddIndexesProps): Promise<void>;
getCacheByUUID(UUID: KubbFile.UUID): FileWithMeta | undefined;
get(path: KubbFile.Path): Array<FileWithMeta> | undefined;
remove(path: KubbFile.Path): void;
write(...params: Parameters<typeof write>): Promise<string | undefined>;
read(...params: Parameters<typeof read>): Promise<string>;
static getSource<TMeta extends FileMetaBase = FileMetaBase>(file: FileWithMeta<TMeta>): Promise<string>;
static combineFiles<TMeta extends FileMetaBase = FileMetaBase>(files: Array<FileWithMeta<TMeta> | null>): Array<FileWithMeta<TMeta>>;
static getMode(path: string | undefined | null): KubbFile.Mode;
static get extensions(): Array<KubbFile.Extname>;
static isJavascript(baseName: string): boolean;
}
type BuildOptions = {

@@ -509,5 +53,3 @@ config: PluginContext['config'];

/**
* Type helper to make it easier to use kubb.config.js
* accepts a direct {@link Config} object, or a function that returns it.
* The function receives a {@link ConfigEnv} object that exposes two properties:
* Type helper to make it easier to use vite.config.ts accepts a direct UserConfig object, or a function that returns it. The function receives a ConfigEnv object.
*/

@@ -520,15 +62,6 @@ declare function defineConfig(options: PossiblePromise<UserConfig | Array<UserConfig>> | ((

/**
* Behaves as an Error to log a warning in the console(still stops the execution)
*/
declare class Warning extends Error {
constructor(message?: string, options?: {
cause: Error;
});
}
/**
* Abstract class that contains the building blocks for plugins to create their own Generator
* @link idea based on https://github.com/colinhacks/zod/blob/master/src/types.ts#L137
*/
declare abstract class Generator<TOptions = unknown, TContext = unknown> {
declare abstract class BaseGenerator<TOptions = unknown, TContext = unknown> {
#private;

@@ -571,3 +104,3 @@ constructor(options?: TOptions, context?: TContext);

type ValueOfPromiseFuncArray<TInput extends Array<unknown>> = TInput extends Array<PromiseFunc$1<infer X, infer Y>> ? X | Y : never;
type SeqOutput<TInput extends Array<PromiseFunc$1<TValue, null>>, TValue> = Array<Awaited<ValueOfPromiseFuncArray<TInput>>>;
type SeqOutput<TInput extends Array<PromiseFunc$1<TValue, null>>, TValue> = Promise<Array<Awaited<ValueOfPromiseFuncArray<TInput>>>>;
type HookFirstOutput<TInput extends Array<PromiseFunc$1<TValue, null>>, TValue = unknown> = ValueOfPromiseFuncArray<TInput>;

@@ -588,5 +121,2 @@ type HookParallelOutput<TInput extends Array<PromiseFunc$1<TValue, null>>, TValue> = Promise<PromiseSettledResult<Awaited<ValueOfPromiseFuncArray<TInput>>>[]>;

interface _Register {
}
export { type Config, FileManager, type FileMetaBase, Generator, type GetPluginFactoryOptions, type InputData, type InputPath, type ObjectPlugin, PackageManager, type Plugin, type PluginCache, type PluginContext, type PluginFactoryOptions, type PluginKey, type PluginLifecycle, type PluginLifecycleHooks, PluginManager, type PluginParameter, type PluginWithLifeCycle, PromiseManager, type ResolveNameParams, type ResolvePathParams, type TransformResult, type UnionPlugins, type UserConfig, type UserPlugin, type UserPluginWithLifeCycle, Warning, type _Register, build, createPlugin, build as default, defineConfig, isInputPath, safeBuild };
export { BaseGenerator, Config, FileManager, InputPath, PackageManager, PluginContext, PluginFactoryOptions, PluginManager, PromiseManager, UserConfig, UserPluginWithLifeCycle, build, createPlugin, build as default, defineConfig, isInputPath, safeBuild };

@@ -1,33 +0,15 @@

import {
EventEmitter,
LogLevel,
createLogger,
p,
randomCliColour
} from "./chunk-3OXCZ5DJ.js";
import {
FileManager
} from "./chunk-SA2GZKXS.js";
import {
transformReservedWord
} from "./chunk-JKZG2IJR.js";
import {
URLPath,
setUniqueName
} from "./chunk-5JZNFPUP.js";
import "./chunk-4X5FFJPJ.js";
import {
__privateAdd,
__privateGet,
__privateMethod,
__privateSet
} from "./chunk-HMLY7DHA.js";
import { transformReservedWord } from './chunk-2EU7DMPM.js';
import { EventEmitter, createLogger } from './chunk-HTOO3HNK.js';
import { FileManager, setUniqueName, processFiles, URLPath } from './chunk-DXGMSPTW.js';
export { FileManager, getSource } from './chunk-DXGMSPTW.js';
import './chunk-BQXM32UO.js';
import './chunk-HBQM723K.js';
import { getRelativePath, read, readSync, clean } from '@kubb/fs';
import path, { resolve, join } from 'node:path';
import mod from 'node:module';
import os from 'node:os';
import { pathToFileURL } from 'node:url';
import { findUp, findUpSync } from 'find-up';
import { coerce, satisfies } from 'semver';
// src/build.ts
import { clean, read } from "@kubb/fs";
// src/PluginManager.ts
import PQueue from "p-queue";
import { readSync } from "@kubb/fs";
// src/utils/executeStrategies.ts

@@ -68,7 +50,6 @@ function hookSeq(promises) {

// src/PromiseManager.ts
var _options;
var PromiseManager = class {
#options = {};
constructor(options = {}) {
__privateAdd(this, _options, {});
__privateSet(this, _options, options);
this.#options = options;
return this;

@@ -81,3 +62,3 @@ }

if (strategy === "first") {
return hookFirst(promises, __privateGet(this, _options).nullCheck);
return hookFirst(promises, this.#options.nullCheck);
}

@@ -90,3 +71,2 @@ if (strategy === "parallel") {

};
_options = new WeakMap();
function isPromise(result) {

@@ -100,43 +80,4 @@ return !!result && typeof result?.then === "function";

// src/errors.ts
var Warning = class extends Error {
constructor(message, options) {
super(message, { cause: options?.cause });
this.name = "Warning";
}
};
var ValidationPluginError = class extends Error {
};
// src/plugin.ts
import path from "path";
// src/utils/cache.ts
function createPluginCache(Store = /* @__PURE__ */ Object.create(null)) {
return {
set(id, value) {
Store[id] = [0, value];
},
get(id) {
const item = Store[id];
if (!item) {
return null;
}
item[0] = 0;
return item[1];
},
has(id) {
const item = Store[id];
if (!item) {
return false;
}
item[0] = 0;
return true;
},
delete(id) {
return delete Store[id];
}
};
}
// src/plugin.ts
function createPlugin(factory) {

@@ -153,3 +94,3 @@ return (options = {}) => {

key: ["core"],
api() {
context() {
return {

@@ -176,8 +117,7 @@ get config() {

resolvePath,
resolveName,
cache: createPluginCache()
resolveName
};
},
resolvePath(baseName) {
const root = path.resolve(this.config.root, this.config.output.path);
const root = path.resolve(options.config.root, options.config.output.path);
return path.resolve(root, baseName);

@@ -192,71 +132,21 @@ },

// src/PluginManager.ts
var _core, _usedPluginNames, _promiseManager, _PluginManager_instances, getSortedPlugins_fn, addExecutedToCallStack_fn, execute_fn, executeSync_fn, catcher_fn, parse_fn;
var PluginManager = class {
plugins = /* @__PURE__ */ new Set();
fileManager;
events = new EventEmitter();
config;
executed = [];
logger;
options;
#core;
#usedPluginNames = {};
#promiseManager;
constructor(config, options) {
__privateAdd(this, _PluginManager_instances);
this.events = new EventEmitter();
this.executed = [];
__privateAdd(this, _core);
__privateAdd(this, _usedPluginNames, {});
__privateAdd(this, _promiseManager);
this.resolvePath = (params) => {
if (params.pluginKey) {
const paths = this.hookForPluginSync({
pluginKey: params.pluginKey,
hookName: "resolvePath",
parameters: [params.baseName, params.mode, params.options]
});
if (paths && paths?.length > 1) {
this.logger.emit("debug", [
`Cannot return a path where the 'pluginKey' ${params.pluginKey ? JSON.stringify(params.pluginKey) : '"'} is not unique enough
Paths: ${JSON.stringify(paths, void 0, 2)}
Falling back on the first item.
`
]);
}
return paths?.at(0);
}
return this.hookFirstSync({
hookName: "resolvePath",
parameters: [params.baseName, params.mode, params.options]
}).result;
};
this.resolveName = (params) => {
if (params.pluginKey) {
const names = this.hookForPluginSync({
pluginKey: params.pluginKey,
hookName: "resolveName",
parameters: [params.name, params.type]
});
if (names && names?.length > 1) {
this.logger.emit("debug", [
`Cannot return a name where the 'pluginKey' ${params.pluginKey ? JSON.stringify(params.pluginKey) : '"'} is not unique enough
Names: ${JSON.stringify(names, void 0, 2)}
Falling back on the first item.
`
]);
}
return transformReservedWord(names?.at(0) || params.name);
}
const name = this.hookFirstSync({
hookName: "resolveName",
parameters: [params.name, params.type]
}).result;
return transformReservedWord(name);
};
this.config = config;
this.options = options;
this.logger = options.logger;
this.queue = new PQueue({ concurrency: 1 });
this.fileManager = new FileManager({
task: options.task,
queue: this.queue
this.fileManager = new FileManager();
this.#promiseManager = new PromiseManager({
nullCheck: (state) => !!state?.result
});
__privateSet(this, _promiseManager, new PromiseManager({
nullCheck: (state) => !!state?.result
}));
const plugins = config.plugins || [];
const core = pluginCore({

@@ -269,13 +159,13 @@ config,

resolveName: this.resolveName.bind(this),
getPlugins: __privateMethod(this, _PluginManager_instances, getSortedPlugins_fn).bind(this)
getPlugins: this.#getSortedPlugins.bind(this)
});
__privateSet(this, _core, __privateMethod(this, _PluginManager_instances, parse_fn).call(this, core, this, core.api.call(null)));
this.plugins = [__privateGet(this, _core), ...plugins].map((plugin) => {
return __privateMethod(this, _PluginManager_instances, parse_fn).call(this, plugin, this, __privateGet(this, _core).api);
this.#core = this.#parse(core, this, core.context.call(null));
[this.#core, ...config.plugins || []].forEach((plugin) => {
const parsedPlugin = this.#parse(plugin, this, this.#core.context);
this.plugins.add(parsedPlugin);
});
return this;
}
getFile({ name, mode, extName, pluginKey, options }) {
let source = "";
const baseName = `${name}${extName}`;
getFile({ name, mode, extname, pluginKey, options }) {
const baseName = `${name}${extname}`;
const path2 = this.resolvePath({ baseName, mode, pluginKey, options });

@@ -285,6 +175,2 @@ if (!path2) {

}
try {
source = readSync(path2);
} catch (_e) {
}
return {

@@ -296,5 +182,65 @@ path: path2,

},
source
sources: []
};
}
resolvePath = (params) => {
if (params.pluginKey) {
const paths = this.hookForPluginSync({
pluginKey: params.pluginKey,
hookName: "resolvePath",
parameters: [params.baseName, params.mode, params.options],
message: `Resolving path '${params.baseName}'`
});
if (paths && paths?.length > 1) {
this.logger.emit("debug", {
date: /* @__PURE__ */ new Date(),
logs: [
`Cannot return a path where the 'pluginKey' ${params.pluginKey ? JSON.stringify(params.pluginKey) : '"'} is not unique enough
Paths: ${JSON.stringify(paths, void 0, 2)}
Falling back on the first item.
`
]
});
}
return paths?.at(0);
}
return this.hookFirstSync({
hookName: "resolvePath",
parameters: [params.baseName, params.mode, params.options],
message: `Resolving path '${params.baseName}'`
}).result;
};
//TODO refactor by using the order of plugins and the cache of the fileManager instead of guessing and recreating the name/path
resolveName = (params) => {
if (params.pluginKey) {
const names = this.hookForPluginSync({
pluginKey: params.pluginKey,
hookName: "resolveName",
parameters: [params.name, params.type],
message: `Resolving name '${params.name}' and type '${params.type}'`
});
if (names && names?.length > 1) {
this.logger.emit("debug", {
date: /* @__PURE__ */ new Date(),
logs: [
`Cannot return a name where the 'pluginKey' ${params.pluginKey ? JSON.stringify(params.pluginKey) : '"'} is not unique enough
Names: ${JSON.stringify(names, void 0, 2)}
Falling back on the first item.
`
]
});
}
return transformReservedWord(names?.at(0) || params.name);
}
const name = this.hookFirstSync({
hookName: "resolveName",
parameters: [params.name, params.type],
message: `Resolving name '${params.name}' and type '${params.type}'`
}).result;
return transformReservedWord(name);
};
/**

@@ -309,17 +255,22 @@ * Instead of calling `pluginManager.events.on` you can use `pluginManager.on`. This one also has better types.

*/
hookForPlugin({
async hookForPlugin({
pluginKey,
hookName,
parameters
parameters,
message
}) {
const plugins = this.getPluginsByKey(hookName, pluginKey);
this.logger.emit("progress_start", { id: hookName, size: plugins.length, message: "Running plugins..." });
const promises = plugins.map((plugin) => {
return __privateMethod(this, _PluginManager_instances, execute_fn).call(this, {
return this.#execute({
strategy: "hookFirst",
hookName,
parameters,
plugin
plugin,
message
});
}).filter(Boolean);
return Promise.all(promises);
const items = await Promise.all(promises);
this.logger.emit("progress_stop", { id: hookName });
return items;
}

@@ -332,13 +283,16 @@ /**

hookName,
parameters
parameters,
message
}) {
const plugins = this.getPluginsByKey(hookName, pluginKey);
return plugins.map((plugin) => {
return __privateMethod(this, _PluginManager_instances, executeSync_fn).call(this, {
const result = plugins.map((plugin) => {
return this.#executeSync({
strategy: "hookFirst",
hookName,
parameters,
plugin
plugin,
message
});
}).filter(Boolean);
return result;
}

@@ -351,13 +305,17 @@ /**

parameters,
skipped
skipped,
message
}) {
const promises = __privateMethod(this, _PluginManager_instances, getSortedPlugins_fn).call(this).filter((plugin) => {
const plugins = this.#getSortedPlugins(hookName).filter((plugin) => {
return skipped ? skipped.has(plugin) : true;
}).map((plugin) => {
});
this.logger.emit("progress_start", { id: hookName, size: plugins.length });
const promises = plugins.map((plugin) => {
return async () => {
const value = await __privateMethod(this, _PluginManager_instances, execute_fn).call(this, {
const value = await this.#execute({
strategy: "hookFirst",
hookName,
parameters,
plugin
plugin,
message
});

@@ -370,3 +328,5 @@ return Promise.resolve({

});
return __privateGet(this, _promiseManager).run("first", promises);
const result = await this.#promiseManager.run("first", promises);
this.logger.emit("progress_stop", { id: hookName });
return result;
}

@@ -379,15 +339,17 @@ /**

parameters,
skipped
skipped,
message
}) {
let parseResult = null;
for (const plugin of __privateMethod(this, _PluginManager_instances, getSortedPlugins_fn).call(this)) {
if (skipped?.has(plugin)) {
continue;
}
const plugins = this.#getSortedPlugins(hookName).filter((plugin) => {
return skipped ? skipped.has(plugin) : true;
});
for (const plugin of plugins) {
parseResult = {
result: __privateMethod(this, _PluginManager_instances, executeSync_fn).call(this, {
result: this.#executeSync({
strategy: "hookFirst",
hookName,
parameters,
plugin
plugin,
message
}),

@@ -407,62 +369,75 @@ plugin

hookName,
parameters
parameters,
message
}) {
const promises = __privateMethod(this, _PluginManager_instances, getSortedPlugins_fn).call(this).map((plugin) => {
return () => __privateMethod(this, _PluginManager_instances, execute_fn).call(this, {
const plugins = this.#getSortedPlugins(hookName);
this.logger.emit("progress_start", { id: hookName, size: plugins.length });
const promises = plugins.map((plugin) => {
return () => this.#execute({
strategy: "hookParallel",
hookName,
parameters,
plugin
plugin,
message
});
});
const results = await __privateGet(this, _promiseManager).run("parallel", promises);
const results = await this.#promiseManager.run("parallel", promises);
results.forEach((result, index) => {
if (isPromiseRejectedResult(result)) {
const plugin = __privateMethod(this, _PluginManager_instances, getSortedPlugins_fn).call(this)[index];
__privateMethod(this, _PluginManager_instances, catcher_fn).call(this, result.reason, plugin, hookName);
const plugin = this.#getSortedPlugins(hookName)[index];
this.#catcher(result.reason, plugin, hookName);
}
});
this.logger.emit("progress_stop", { id: hookName });
return results.filter((result) => result.status === "fulfilled").map((result) => result.value);
}
/**
* Chain all plugins, `reduce` can be passed through to handle every returned value. The return value of the first plugin will be used as the first parameter for the plugin after that.
* Chains plugins
*/
hookReduceArg0({
async hookSeq({
hookName,
parameters,
reduce
message
}) {
const [argument0, ...rest] = parameters;
let promise = Promise.resolve(argument0);
for (const plugin of __privateMethod(this, _PluginManager_instances, getSortedPlugins_fn).call(this)) {
promise = promise.then((arg0) => {
const value = __privateMethod(this, _PluginManager_instances, execute_fn).call(this, {
strategy: "hookReduceArg0",
hookName,
parameters: [arg0, ...rest],
plugin
});
return value;
}).then((result) => reduce.call(__privateGet(this, _core).api, argument0, result, plugin));
}
return promise;
}
/**
* Chains plugins
*/
async hookSeq({ hookName, parameters }) {
const promises = __privateMethod(this, _PluginManager_instances, getSortedPlugins_fn).call(this).map((plugin) => {
return () => __privateMethod(this, _PluginManager_instances, execute_fn).call(this, {
const plugins = this.#getSortedPlugins(hookName);
this.logger.emit("progress_start", { id: hookName, size: plugins.length });
const promises = plugins.map((plugin) => {
return () => this.#execute({
strategy: "hookSeq",
hookName,
parameters,
plugin
plugin,
message
});
});
return __privateGet(this, _promiseManager).run("seq", promises);
await this.#promiseManager.run("seq", promises);
this.logger.emit("progress_stop", { id: hookName });
}
#getSortedPlugins(hookName) {
const plugins = [...this.plugins].filter((plugin) => plugin.name !== "core");
if (hookName) {
return plugins.filter((plugin) => hookName in plugin);
}
return plugins.map((plugin) => {
if (plugin.pre) {
const isValid = plugin.pre.every((pluginName) => plugins.find((pluginToFind) => pluginToFind.name === pluginName));
if (!isValid) {
throw new ValidationPluginError(`This plugin has a pre set that is not valid(${JSON.stringify(plugin.pre, void 0, 2)})`);
}
}
return plugin;
}).sort((a, b) => {
if (b.pre?.includes(a.name)) {
return 1;
}
if (b.post?.includes(a.name)) {
return -1;
}
return 0;
});
}
getPluginsByKey(hookName, pluginKey) {
const plugins = [...this.plugins];
const [searchPluginName, searchIdentifier] = pluginKey;
const pluginByPluginName = plugins.filter((plugin) => plugin[hookName]).filter((item) => {
const pluginByPluginName = plugins.filter((plugin) => hookName in plugin).filter((item) => {
const [name, identifier] = item.key;

@@ -477,7 +452,13 @@ const identifierCheck = identifier?.toString() === searchIdentifier?.toString();

if (!pluginByPluginName?.length) {
const corePlugin = plugins.find((plugin) => plugin.name === "core" && plugin[hookName]);
const corePlugin = plugins.find((plugin) => plugin.name === "core" && hookName in plugin);
if (corePlugin) {
this.logger.emit("debug", [`No hook '${hookName}' for pluginKey '${JSON.stringify(pluginKey)}' found, falling back on the '@kubb/core' plugin`]);
this.logger.emit("debug", {
date: /* @__PURE__ */ new Date(),
logs: [`No hook '${hookName}' for pluginKey '${JSON.stringify(pluginKey)}' found, falling back on the '@kubb/core' plugin`]
});
} else {
this.logger.emit("debug", [`No hook '${hookName}' for pluginKey '${JSON.stringify(pluginKey)}' found, no fallback found in the '@kubb/core' plugin`]);
this.logger.emit("debug", {
date: /* @__PURE__ */ new Date(),
logs: [`No hook '${hookName}' for pluginKey '${JSON.stringify(pluginKey)}' found, no fallback found in the '@kubb/core' plugin`]
});
}

@@ -488,2 +469,128 @@ return corePlugin ? [corePlugin] : [];

}
#addExecutedToCallStack(executer) {
if (executer) {
this.events.emit("executed", executer);
this.executed.push(executer);
this.logger.emit("progressed", { id: executer.hookName, message: `${executer.plugin.name}: ${executer.message}` });
}
}
/**
* Run an async plugin hook and return the result.
* @param hookName Name of the plugin hook. Must be either in `PluginHooks` or `OutputPluginValueHooks`.
* @param args Arguments passed to the plugin hook.
* @param plugin The actual pluginObject to run.
*/
// Implementation signature
#execute({
strategy,
hookName,
parameters,
plugin,
message
}) {
const hook = plugin[hookName];
let output;
if (!hook) {
return null;
}
this.events.emit("executing", { strategy, hookName, parameters, plugin, message });
const promise = new Promise((resolve2) => {
resolve2(void 0);
});
const task = promise.then(() => {
if (typeof hook === "function") {
const possiblePromiseResult = hook.apply({ ...this.#core.context, plugin }, parameters);
if (isPromise(possiblePromiseResult)) {
return Promise.resolve(possiblePromiseResult);
}
return possiblePromiseResult;
}
return hook;
}).then((result) => {
output = result;
this.#addExecutedToCallStack({
parameters,
output,
strategy,
hookName,
plugin,
message
});
return result;
}).catch((e) => {
this.#catcher(e, plugin, hookName);
return null;
});
return task;
}
/**
* Run a sync plugin hook and return the result.
* @param hookName Name of the plugin hook. Must be in `PluginHooks`.
* @param args Arguments passed to the plugin hook.
* @param plugin The acutal plugin
* @param replaceContext When passed, the plugin context can be overridden.
*/
#executeSync({
strategy,
hookName,
parameters,
plugin,
message
}) {
const hook = plugin[hookName];
let output;
if (!hook) {
return null;
}
this.events.emit("executing", { strategy, hookName, parameters, plugin, message });
try {
if (typeof hook === "function") {
const fn = hook.apply({ ...this.#core.context, plugin }, parameters);
output = fn;
this.#addExecutedToCallStack({
parameters,
output,
strategy,
hookName,
plugin,
message
});
return fn;
}
output = hook;
this.#addExecutedToCallStack({
parameters,
output,
strategy,
hookName,
plugin,
message
});
return hook;
} catch (e) {
this.#catcher(e, plugin, hookName);
return null;
}
}
#catcher(cause, plugin, hookName) {
const text = `${cause.message} (plugin: ${plugin?.name || "unknown"}, hook: ${hookName || "unknown"})`;
this.logger.emit("error", text, cause);
this.events.emit("error", cause);
}
#parse(plugin, pluginManager, context) {
const usedPluginNames = pluginManager.#usedPluginNames;
setUniqueName(plugin.name, usedPluginNames);
const key = [plugin.name, usedPluginNames[plugin.name]].filter(Boolean);
if (plugin.context && typeof plugin.context === "function") {
return {
...plugin,
key,
context: plugin.context.call(context)
};
}
return {
...plugin,
key
};
}
static getDependedPlugins(plugins, dependedPluginNames) {

@@ -505,154 +612,5 @@ let pluginNames = [];

static get hooks() {
return ["buildStart", "resolvePath", "resolveName", "load", "transform", "writeFile", "buildEnd"];
return ["buildStart", "resolvePath", "resolveName", "buildEnd"];
}
};
_core = new WeakMap();
_usedPluginNames = new WeakMap();
_promiseManager = new WeakMap();
_PluginManager_instances = new WeakSet();
getSortedPlugins_fn = function(hookName) {
const plugins = [...this.plugins].filter((plugin) => plugin.name !== "core");
if (hookName) {
if (this.logger.logLevel === LogLevel.info) {
const containsHookName = plugins.some((item) => item[hookName]);
if (!containsHookName) {
this.logger.emit("warning", `No hook ${hookName} found`);
}
}
return plugins.filter((item) => item[hookName]);
}
return plugins.map((plugin) => {
if (plugin.pre) {
const isValid = plugin.pre.every((pluginName) => plugins.find((pluginToFind) => pluginToFind.name === pluginName));
if (!isValid) {
throw new ValidationPluginError(`This plugin has a pre set that is not valid(${JSON.stringify(plugin.pre, void 0, 2)})`);
}
}
return plugin;
}).sort((a, b) => {
if (b.pre?.includes(a.name)) {
return 1;
}
if (b.post?.includes(a.name)) {
return -1;
}
return 0;
});
};
addExecutedToCallStack_fn = function(executer) {
if (executer) {
this.events.emit("executed", executer);
this.executed.push(executer);
}
};
/**
* Run an async plugin hook and return the result.
* @param hookName Name of the plugin hook. Must be either in `PluginHooks` or `OutputPluginValueHooks`.
* @param args Arguments passed to the plugin hook.
* @param plugin The actual pluginObject to run.
*/
// Implementation signature
execute_fn = function({
strategy,
hookName,
parameters,
plugin
}) {
const hook = plugin[hookName];
let output;
if (!hook) {
return null;
}
this.events.emit("execute", { strategy, hookName, parameters, plugin });
const task = Promise.resolve().then(() => {
if (typeof hook === "function") {
const possiblePromiseResult = hook.apply({ ...__privateGet(this, _core).api, plugin }, parameters);
if (isPromise(possiblePromiseResult)) {
return Promise.resolve(possiblePromiseResult);
}
return possiblePromiseResult;
}
return hook;
}).then((result) => {
output = result;
__privateMethod(this, _PluginManager_instances, addExecutedToCallStack_fn).call(this, {
parameters,
output,
strategy,
hookName,
plugin
});
return result;
}).catch((e) => {
__privateMethod(this, _PluginManager_instances, catcher_fn).call(this, e, plugin, hookName);
return null;
});
return task;
};
/**
* Run a sync plugin hook and return the result.
* @param hookName Name of the plugin hook. Must be in `PluginHooks`.
* @param args Arguments passed to the plugin hook.
* @param plugin The acutal plugin
* @param replaceContext When passed, the plugin context can be overridden.
*/
executeSync_fn = function({
strategy,
hookName,
parameters,
plugin
}) {
const hook = plugin[hookName];
let output;
if (!hook) {
return null;
}
this.events.emit("execute", { strategy, hookName, parameters, plugin });
try {
if (typeof hook === "function") {
const fn = hook.apply({ ...__privateGet(this, _core).api, plugin }, parameters);
output = fn;
return fn;
}
output = hook;
__privateMethod(this, _PluginManager_instances, addExecutedToCallStack_fn).call(this, {
parameters,
output,
strategy,
hookName,
plugin
});
return hook;
} catch (e) {
__privateMethod(this, _PluginManager_instances, catcher_fn).call(this, e, plugin, hookName);
return null;
}
};
catcher_fn = function(cause, plugin, hookName) {
const text = `${cause.message} (plugin: ${plugin?.name || "unknown"}, hook: ${hookName || "unknown"})`;
this.logger.emit("error", text, cause);
this.events.emit("error", cause);
};
parse_fn = function(plugin, pluginManager, context) {
const usedPluginNames = __privateGet(pluginManager, _usedPluginNames);
setUniqueName(plugin.name, usedPluginNames);
const key = [plugin.name, usedPluginNames[plugin.name]].filter(Boolean);
if (!plugin.transform) {
plugin.transform = function transform(_path, code) {
return code;
};
}
if (plugin.api && typeof plugin.api === "function") {
const api = plugin.api.call(context);
return {
...plugin,
key,
api
};
}
return {
...plugin,
key
};
};

@@ -664,12 +622,6 @@ // src/config.ts

function isInputPath(result) {
return !!result && "path" in result;
return !!result && "path" in result?.input;
}
// src/build.ts
async function transformReducer(_previousCode, result, _plugin) {
return result;
}
async function setup(options) {
const { config, logger = createLogger({ logLevel: LogLevel.silent }) } = options;
let count = 0;
const { config, logger = createLogger() } = options;
try {

@@ -681,8 +633,5 @@ if (isInputPath(config) && !new URLPath(config.input.path).isURL) {

if (isInputPath(config)) {
throw new Error(
`Cannot read file/URL defined in \`input.path\` or set with \`kubb generate PATH\` in the CLI of your Kubb config ${p.dim(config.input.path)}`,
{
cause: e
}
);
throw new Error(`Cannot read file/URL defined in \`input.path\` or set with \`kubb generate PATH\` in the CLI of your Kubb config ${config.input.path}`, {
cause: e
});
}

@@ -692,135 +641,81 @@ }

await clean(config.output.path);
await clean(join(config.root, ".kubb"));
}
const task = async (file) => {
const { path: path2 } = file;
let source = await FileManager.getSource(file);
const { result: loadedResult } = await pluginManager.hookFirst({
hookName: "load",
parameters: [path2]
});
if (loadedResult && isPromise(loadedResult)) {
source = await loadedResult;
}
if (loadedResult && !isPromise(loadedResult)) {
source = loadedResult;
}
if (source) {
source = await pluginManager.hookReduceArg0({
hookName: "transform",
parameters: [path2, source],
reduce: transformReducer
});
if (config.output.write || config.output.write === void 0) {
if (file.meta?.pluginKey) {
await pluginManager.hookForPlugin({
pluginKey: file.meta?.pluginKey,
hookName: "writeFile",
parameters: [path2, source]
});
}
await pluginManager.hookFirst({
hookName: "writeFile",
parameters: [path2, source]
});
}
}
return {
...file,
source: source || ""
};
};
const pluginManager = new PluginManager(config, { logger, task });
pluginManager.on("execute", (executer) => {
const { hookName, parameters, plugin } = executer;
if (hookName === "writeFile") {
const [code] = parameters;
logger.emit("debug", [`PluginKey ${p.dim(JSON.stringify(plugin.key))}
with source
${code}`]);
}
});
pluginManager.queue.on("add", () => {
if (logger.logLevel !== LogLevel.info) {
return;
}
if (count === 0) {
logger.emit("start", "\u{1F4BE} Writing");
}
});
pluginManager.queue.on("active", () => {
if (logger.logLevel !== LogLevel.info) {
return;
}
if (logger.spinner && pluginManager.queue.size > 0) {
const text = `Item: ${count} Size: ${pluginManager.queue.size} Pending: ${pluginManager.queue.pending}`;
logger.spinner.suffixText = p.dim(text);
}
++count;
});
pluginManager.queue.on("completed", () => {
if (logger.logLevel !== LogLevel.info) {
return;
}
if (logger.spinner) {
const text = `Item: ${count} Size: ${pluginManager.queue.size} Pending: ${pluginManager.queue.pending}`;
logger.spinner.suffixText = p.dim(text);
}
});
pluginManager.on("executed", (executer) => {
const { hookName, plugin, output, parameters } = executer;
const logs = [
`${randomCliColour(plugin.name)} Executing ${hookName}`,
parameters && `${p.bgWhite("Parameters")} ${randomCliColour(plugin.name)} ${hookName}`,
JSON.stringify(parameters, void 0, 2),
output && `${p.bgWhite("Output")} ${randomCliColour(plugin.name)} ${hookName}`,
output
].filter(Boolean);
logger.emit("debug", logs);
});
return pluginManager;
return new PluginManager(config, { logger });
}
async function build(options) {
const pluginManager = await setup(options);
const { fileManager, logger } = pluginManager;
await pluginManager.hookParallel({
hookName: "buildStart",
parameters: [options.config]
});
await pluginManager.hookParallel({ hookName: "buildEnd" });
if (logger.logLevel === LogLevel.info) {
logger.emit("end", "\u{1F4BE} Writing completed");
}
const files = await Promise.all(
fileManager.files.map(async (file) => ({
...file,
source: await FileManager.getSource(file)
}))
);
const { files, pluginManager, error } = await safeBuild(options);
if (error) throw error;
return {
files,
pluginManager
pluginManager,
error
};
}
async function safeBuild(options) {
let files = [];
const pluginManager = await setup(options);
const { fileManager, logger } = pluginManager;
try {
pluginManager.events.on("executing", ({ plugin, message }) => {
pluginManager.logger.emit("debug", { date: /* @__PURE__ */ new Date(), logs: [`Executing pluginKey ${plugin.key?.join(".")} | ${message}`] });
});
pluginManager.events.on("executed", ({ plugin, message, output }) => {
pluginManager.logger.emit("debug", {
date: /* @__PURE__ */ new Date(),
logs: [`Executed pluginKey ${plugin.key?.join(".")} | ${message} | ${JSON.stringify(output, void 0, 2)}`]
});
});
await pluginManager.hookParallel({
hookName: "buildStart",
parameters: [options.config]
parameters: [options.config],
message: "buildStart"
});
await pluginManager.hookParallel({ hookName: "buildEnd" });
if (logger.logLevel === LogLevel.info) {
logger.emit("end", "\u{1F4BE} Writing completed");
const root = resolve(options.config.root);
const rootPath = resolve(root, options.config.output.path, "index.ts");
const barrelFiles = pluginManager.fileManager.files.filter((file) => {
return file.sources.some((source) => source.isIndexable);
});
const rootFile = {
path: rootPath,
baseName: "index.ts",
exports: barrelFiles.flatMap((file) => {
return file.sources?.map((source) => {
if (!file.path || !source.isIndexable) {
return void 0;
}
const plugin = [...pluginManager.plugins].find((item) => {
const meta = file.meta;
return item.key === meta?.pluginKey;
});
const pluginOptions = plugin?.options ?? {};
if (pluginOptions.output?.barrelType === false) {
return void 0;
}
if (FileManager.getMode(pluginOptions.output?.path) === "single") {
return void 0;
}
return {
name: pluginOptions.output?.barrelType === "all" ? void 0 : [source.name],
path: getRelativePath(rootPath, file.path),
isTypeOnly: source.isTypeOnly
};
}).filter(Boolean);
}).filter(Boolean),
sources: [],
meta: {}
};
if (options.config.output.barrelType) {
await pluginManager.fileManager.add(rootFile);
}
files = await processFiles({
config: options.config,
dryRun: !options.config.output.write,
files: pluginManager.fileManager.files,
logger: pluginManager.logger
});
await pluginManager.hookParallel({ hookName: "buildEnd", message: `Build stopped for ${options.config.name}` });
pluginManager.fileManager.clear();
} catch (e) {
const files2 = await Promise.all(
fileManager.files.map(async (file) => ({
...file,
source: await FileManager.getSource(file)
}))
);
return {
files: files2,
files: [],
pluginManager,

@@ -830,8 +725,2 @@ error: e

}
const files = await Promise.all(
fileManager.files.map(async (file) => ({
...file,
source: await FileManager.getSource(file)
}))
);
return {

@@ -843,13 +732,12 @@ files,

// src/Generator.ts
var _options2, _context;
var Generator = class {
// src/BaseGenerator.ts
var BaseGenerator = class {
#options = {};
#context = {};
constructor(options, context) {
__privateAdd(this, _options2, {});
__privateAdd(this, _context, {});
if (context) {
__privateSet(this, _context, context);
this.#context = context;
}
if (options) {
__privateSet(this, _options2, options);
this.#options = options;
}

@@ -859,29 +747,18 @@ return this;

get options() {
return __privateGet(this, _options2);
return this.#options;
}
get context() {
return __privateGet(this, _context);
return this.#context;
}
set options(options) {
__privateSet(this, _options2, { ...__privateGet(this, _options2), ...options });
this.#options = { ...this.#options, ...options };
}
};
_options2 = new WeakMap();
_context = new WeakMap();
// src/PackageManager.ts
import mod from "module";
import os from "os";
import { pathToFileURL } from "url";
import { findUp, findUpSync } from "find-up";
import { coerce, satisfies } from "semver";
import { read as read2, readSync as readSync2 } from "@kubb/fs";
var _cache, _cwd, _SLASHES, _PackageManager_instances, match_fn;
var _PackageManager = class _PackageManager {
var PackageManager = class _PackageManager {
static #cache = {};
#cwd;
#SLASHES = /* @__PURE__ */ new Set(["/", "\\"]);
constructor(workspace) {
__privateAdd(this, _PackageManager_instances);
__privateAdd(this, _cwd);
__privateAdd(this, _SLASHES, /* @__PURE__ */ new Set(["/", "\\"]));
if (workspace) {
__privateSet(this, _cwd, workspace);
this.#cwd = workspace;
}

@@ -891,9 +768,9 @@ return this;

set workspace(workspace) {
__privateSet(this, _cwd, workspace);
this.#cwd = workspace;
}
get workspace() {
return __privateGet(this, _cwd);
return this.#cwd;
}
normalizeDirectory(directory) {
if (!__privateGet(this, _SLASHES).has(directory[directory.length - 1])) {
if (!this.#SLASHES.has(directory[directory.length - 1])) {
return `${directory}/`;

@@ -905,4 +782,4 @@ }

let location = path2;
if (__privateGet(this, _cwd)) {
const require2 = mod.createRequire(this.normalizeDirectory(__privateGet(this, _cwd)));
if (this.#cwd) {
const require2 = mod.createRequire(this.normalizeDirectory(this.#cwd));
location = require2.resolve(path2);

@@ -921,3 +798,3 @@ }

} catch (e) {
console.log(e);
console.error(e);
return void 0;

@@ -928,3 +805,3 @@ }

const pkgPath = await findUp(["package.json"], {
cwd: __privateGet(this, _cwd)
cwd: this.#cwd
});

@@ -934,3 +811,3 @@ if (!pkgPath) {

}
const json = await read2(pkgPath);
const json = await read(pkgPath);
return JSON.parse(json);

@@ -940,3 +817,3 @@ }

const pkgPath = findUpSync(["package.json"], {
cwd: __privateGet(this, _cwd)
cwd: this.#cwd
});

@@ -946,11 +823,22 @@ if (!pkgPath) {

}
const json = readSync2(pkgPath);
const json = readSync(pkgPath);
return JSON.parse(json);
}
static setVersion(dependency, version) {
__privateGet(_PackageManager, _cache)[dependency] = version;
_PackageManager.#cache[dependency] = version;
}
#match(packageJSON, dependency) {
const dependencies = {
...packageJSON["dependencies"] || {},
...packageJSON["devDependencies"] || {}
};
if (typeof dependency === "string" && dependencies[dependency]) {
return dependencies[dependency];
}
const matchedDependency = Object.keys(dependencies).find((dep) => dep.match(dependency));
return matchedDependency ? dependencies[matchedDependency] : void 0;
}
async getVersion(dependency) {
if (typeof dependency === "string" && __privateGet(_PackageManager, _cache)[dependency]) {
return __privateGet(_PackageManager, _cache)[dependency];
if (typeof dependency === "string" && _PackageManager.#cache[dependency]) {
return _PackageManager.#cache[dependency];
}

@@ -961,7 +849,7 @@ const packageJSON = await this.getPackageJSON();

}
return __privateMethod(this, _PackageManager_instances, match_fn).call(this, packageJSON, dependency);
return this.#match(packageJSON, dependency);
}
getVersionSync(dependency) {
if (typeof dependency === "string" && __privateGet(_PackageManager, _cache)[dependency]) {
return __privateGet(_PackageManager, _cache)[dependency];
if (typeof dependency === "string" && _PackageManager.#cache[dependency]) {
return _PackageManager.#cache[dependency];
}

@@ -972,3 +860,3 @@ const packageJSON = this.getPackageJSONSync();

}
return __privateMethod(this, _PackageManager_instances, match_fn).call(this, packageJSON, dependency);
return this.#match(packageJSON, dependency);
}

@@ -1001,33 +889,5 @@ async isValid(dependency, version) {

};
_cache = new WeakMap();
_cwd = new WeakMap();
_SLASHES = new WeakMap();
_PackageManager_instances = new WeakSet();
match_fn = function(packageJSON, dependency) {
const dependencies = {
...packageJSON["dependencies"] || {},
...packageJSON["devDependencies"] || {}
};
if (typeof dependency === "string" && dependencies[dependency]) {
return dependencies[dependency];
}
const matchedDependency = Object.keys(dependencies).find((dep) => dep.match(dependency));
return matchedDependency ? dependencies[matchedDependency] : void 0;
};
__privateAdd(_PackageManager, _cache, {});
var PackageManager = _PackageManager;
export {
FileManager,
Generator,
PackageManager,
PluginManager,
PromiseManager,
Warning,
build,
createPlugin,
build as default,
defineConfig,
isInputPath,
safeBuild
};
export { BaseGenerator, PackageManager, PluginManager, PromiseManager, build, createPlugin, build as default, defineConfig, isInputPath, safeBuild };
//# sourceMappingURL=index.js.map
//# sourceMappingURL=index.js.map

@@ -1,3 +0,2 @@

export { a as LogLevel, b as LogMapper, L as Logger, c as createLogger, d as randomCliColour, r as randomColour } from './logger-DChjnJMn.js';
export { a as LogMapper, L as Logger, c as createLogger, b as randomCliColour, r as randomColour } from './logger-DvbHXjIO.js';
import 'consola';
import 'ora';

@@ -1,16 +0,4 @@

import {
LogLevel,
LogMapper,
createLogger,
randomCliColour,
randomColour
} from "./chunk-3OXCZ5DJ.js";
import "./chunk-HMLY7DHA.js";
export {
LogLevel,
LogMapper,
createLogger,
randomCliColour,
randomColour
};
export { LogMapper, createLogger, randomCliColour, randomColour } from './chunk-HTOO3HNK.js';
import './chunk-HBQM723K.js';
//# sourceMappingURL=logger.js.map
//# sourceMappingURL=logger.js.map

@@ -1,15 +0,13 @@

import * as KubbFile from '@kubb/fs/types';
import { PluginManager } from './index.js';
import 'p-queue';
import { ResolvedFile, File } from '@kubb/fs/types';
import { a as PluginManager } from './FileManager-Cn8nSj1H.js';
import { L as Logger } from './logger-DvbHXjIO.js';
import '@kubb/fs';
import '@kubb/fs/src/types.ts';
import '@kubb/types';
import 'directory-tree';
import './logger-DChjnJMn.js';
import 'consola';
import 'ora';
declare const mockedLogger: Logger;
declare const createMockedPluginManager: (name?: string) => PluginManager;
declare const mockedPluginManager: PluginManager;
declare function matchFiles(files: KubbFile.File[]): Promise<void>;
declare function matchFiles(files: Array<ResolvedFile | File> | undefined): Promise<undefined>;
export { matchFiles, mockedPluginManager };
export { createMockedPluginManager, matchFiles, mockedLogger, mockedPluginManager };

@@ -1,19 +0,24 @@

import {
FileManager
} from "./chunk-SA2GZKXS.js";
import "./chunk-JKZG2IJR.js";
import "./chunk-5JZNFPUP.js";
import {
pascalCase
} from "./chunk-4X5FFJPJ.js";
import "./chunk-HMLY7DHA.js";
import { getSource, createFile } from './chunk-DXGMSPTW.js';
import { camelCase, pascalCase } from './chunk-BQXM32UO.js';
import path from 'node:path';
// mocks/index.ts
import { readSync } from "@kubb/fs";
var mockedPluginManager = {
resolveName: ({ name, type }) => {
if (type === "type") {
return pascalCase(name);
var mockedLogger = {
emit(type, message) {
},
on(type, message) {
},
consola: {}
};
var createMockedPluginManager = (name) => ({
resolveName: (result) => {
if (result.type === "file") {
return camelCase(name || result.name);
}
return name;
if (result.type === "type") {
return pascalCase(result.name);
}
if (result.type === "function") {
return camelCase(result.name);
}
return camelCase(result.name);
},

@@ -32,11 +37,6 @@ config: {

},
logLevel: "info"
logLevel: 3
},
getFile: ({ name, extName, pluginKey }) => {
const baseName = `${name}${extName}`;
let source = "";
try {
source = readSync(baseName);
} catch (_e) {
}
getFile: ({ name: name2, extname, pluginKey }) => {
const baseName = `${name2}${extname}`;
return {

@@ -47,17 +47,19 @@ path: baseName,

pluginKey
},
source
}
};
}
};
});
var mockedPluginManager = createMockedPluginManager("");
async function matchFiles(files) {
if (!files) {
return void 0;
}
for (const file of files) {
const source = await FileManager.getSource(file);
expect(source).toMatchSnapshot();
const source = await getSource(createFile(file), { logger: mockedLogger });
expect(source).toMatchFileSnapshot(path.join("__snapshots__", file.path));
}
}
export {
matchFiles,
mockedPluginManager
};
export { createMockedPluginManager, matchFiles, mockedLogger, mockedPluginManager };
//# sourceMappingURL=mocks.js.map
//# sourceMappingURL=mocks.js.map

@@ -47,3 +47,3 @@ import { orderBy } from 'natural-orderby';

declare function stringify(value: string | number | undefined): string;
declare function stringify(value: string | number | boolean | undefined): string;
declare function stringifyObject(value: object): string;

@@ -61,3 +61,2 @@

declare function trimQuotes(text: string): string;
declare function trimExtName(text: string): string;

@@ -78,3 +77,2 @@ declare const _default: {

readonly trimQuotes: typeof trimQuotes;
readonly trimExtName: typeof trimExtName;
readonly JSDoc: {

@@ -90,2 +88,2 @@ readonly createJSDocBlockText: typeof createJSDocBlockText;

export { camelCase, combineCodes, createIndent, createJSDocBlockText, _default as default, escape, jsStringEscape, nameSorter, pascalCase, pathCase, searchAndReplace, stringify, stringifyObject, toRegExp, toRegExpString, transformReservedWord, trim, trimExtName, trimQuotes };
export { camelCase, combineCodes, createIndent, createJSDocBlockText, _default as default, escape, jsStringEscape, nameSorter, pascalCase, pathCase, searchAndReplace, stringify, stringifyObject, toRegExp, toRegExpString, transformReservedWord, trim, trimQuotes };

@@ -1,8 +0,144 @@

import {
import { transformReservedWord } from './chunk-2EU7DMPM.js';
export { transformReservedWord } from './chunk-2EU7DMPM.js';
import { orderBy, camelCase, pascalCase, pathCase } from './chunk-BQXM32UO.js';
export { camelCase, orderBy, pascalCase, pathCase } from './chunk-BQXM32UO.js';
import { merge } from 'remeda';
export { merge } from 'remeda';
// src/transformers/combineCodes.ts
function combineCodes(codes) {
return codes.join("\n");
}
// src/transformers/createJSDocBlockText.ts
function createJSDocBlockText({ comments }) {
const filteredComments = comments.filter(Boolean);
if (!filteredComments.length) {
return "";
}
return `/**
* ${filteredComments.join("\n * ")}
*/`;
}
// src/transformers/escape.ts
function escape(text) {
return text ? text.replaceAll("`", "\\`") : "";
}
function jsStringEscape(input) {
return `${input}`.replace(/["'\\\n\r\u2028\u2029]/g, (character) => {
switch (character) {
case '"':
case "'":
case "\\":
return `\\${character}`;
// Four possible LineTerminator characters need to be escaped:
case "\n":
return "\\n";
case "\r":
return "\\r";
case "\u2028":
return "\\u2028";
case "\u2029":
return "\\u2029";
default:
return "";
}
});
}
// src/transformers/indent.ts
function createIndent(size) {
return Array.from({ length: size + 1 }).join(" ");
}
// src/transformers/nameSorter.ts
function nameSorter(a, b) {
if (a.name < b.name) {
return -1;
}
if (a.name > b.name) {
return 1;
}
return 0;
}
// src/transformers/searchAndReplace.ts
function searchAndReplace(options) {
const { text, replaceBy, prefix = "", key } = options;
const searchValues = options.searchValues?.(prefix, key) || [
`${prefix}["${key}"]`,
`${prefix}['${key}']`,
`${prefix}[\`${key}\`]`,
`${prefix}"${key}"`,
`${prefix}'${key}'`,
`${prefix}\`${key}\``,
new RegExp(`${prefix}${key}`, "g")
];
return searchValues.reduce((prev, searchValue) => {
return prev.toString().replaceAll(searchValue, replaceBy);
}, text);
}
// src/transformers/trim.ts
function trim(text) {
return text.replaceAll(/\n/g, "").trim();
}
function trimQuotes(text) {
if (text.match(/^"(.*)"$/)) {
return text.replace(/^"(.*)"$/, "$1");
}
if (text.match(/^'(.*)'$/)) {
return text.replace(/^'(.*)'$/, "$1");
}
if (text.match(/^`(.*)`$/)) {
return text.replace(/^`(.*)`$/, "$1");
}
return text;
}
// src/transformers/stringify.ts
function stringify(value) {
if (value === void 0 || value === null) {
return '""';
}
return JSON.stringify(trimQuotes(value.toString()));
}
function stringifyObject(value) {
const items = Object.entries(value).map(([key, value2]) => {
if (typeof value2 === "object") {
return `${key}: {
${stringifyObject(value2)}
}`;
}
return `${key}: ${value2}`;
}).filter(Boolean);
return items.join(",\n");
}
// src/transformers/toRegExp.ts
function stringToRegex(text) {
const isStartWithSlash = text.startsWith("/");
const isEndWithSlash = text.endsWith("/");
return new RegExp(text.slice(isStartWithSlash ? 1 : 0, isEndWithSlash ? -1 : void 0));
}
function toRegExp(text) {
if (typeof text === "string") {
const source = trimQuotes(text);
return stringToRegex(source);
}
return stringToRegex(text.toString());
}
function toRegExpString(text, func = "RegExp") {
const isStartWithSlash = text.startsWith("/");
const isEndWithSlash = text.endsWith("/");
const regexp = `new ${func}('${jsStringEscape(text.slice(isStartWithSlash ? 1 : 0, isEndWithSlash ? -1 : void 0))}')`;
return regexp;
}
var transformers_default = {
combineCodes,
createIndent,
createJSDocBlockText,
escape,
jsStringEscape,
merge,
createIndent,
transformReservedWord,
nameSorter,

@@ -14,38 +150,16 @@ searchAndReplace,

toRegExpString,
transformReservedWord,
transformers_default,
trim,
trimExtName,
trimQuotes
} from "./chunk-JKZG2IJR.js";
import {
trimQuotes,
JSDoc: {
createJSDocBlockText
},
orderBy,
merge,
camelCase,
orderBy,
pascalCase,
pathCase
} from "./chunk-4X5FFJPJ.js";
import "./chunk-HMLY7DHA.js";
export {
camelCase,
combineCodes,
createIndent,
createJSDocBlockText,
transformers_default as default,
escape,
jsStringEscape,
merge,
nameSorter,
orderBy,
pascalCase,
pathCase,
searchAndReplace,
stringify,
stringifyObject,
toRegExp,
toRegExpString,
transformReservedWord,
trim,
trimExtName,
trimQuotes
};
export { combineCodes, createIndent, createJSDocBlockText, transformers_default as default, escape, jsStringEscape, nameSorter, searchAndReplace, stringify, stringifyObject, toRegExp, toRegExpString, trim, trimQuotes };
//# sourceMappingURL=transformers.js.map
//# sourceMappingURL=transformers.js.map
import { PossiblePromise } from '@kubb/types';
import * as _kubb_parser_ts from '@kubb/parser-ts';
import * as KubbFile from '@kubb/fs/types';
import { L as Logger } from './logger-DvbHXjIO.js';
import 'consola';

@@ -104,4 +106,28 @@ type FunctionParamsASTWithoutType = {

declare function getParser(language: string | undefined): Promise<typeof _kubb_parser_ts>;
/**
* Helper to create a file with name and id set
*/
declare function createFile<TMeta extends object = object>(file: KubbFile.File<TMeta>): KubbFile.ResolvedFile<TMeta>;
/**
* Helper to create a fileImport with extname set
*/
declare function createFileImport(imp: KubbFile.Import): KubbFile.ResolvedImport;
/**
* Helper to create a fileExport with extname set
*/
declare function createFileExport(exp: KubbFile.Export): KubbFile.ResolvedExport;
type ParserModule<TMeta extends object = object> = {
format: (source: string) => Promise<string>;
/**
* Convert a file to string
*/
print: (file: KubbFile.ResolvedFile<TMeta>, options: PrintOptions) => Promise<string>;
};
declare function createFileParser<TMeta extends object = object>(parser: ParserModule<TMeta>): ParserModule<TMeta>;
type PrintOptions = {
extname?: KubbFile.Extname;
logger?: Logger;
};
declare function getFileParser<TMeta extends object = object>(extname: KubbFile.Extname | undefined): Promise<ParserModule<TMeta>>;
export { FunctionParams, type FunctionParamsAST, type URLObject, URLPath, getParser, getUniqueName, isPromise, isPromiseFulfilledResult, isPromiseRejectedResult, renderTemplate, setUniqueName, timeout };
export { FunctionParams, type FunctionParamsAST, type ParserModule, type URLObject, URLPath, createFile, createFileExport, createFileImport, createFileParser, getFileParser, getUniqueName, isPromise, isPromiseFulfilledResult, isPromiseRejectedResult, renderTemplate, setUniqueName, timeout };

@@ -1,27 +0,4 @@

import {
FunctionParams,
URLPath,
getParser,
getUniqueName,
isPromise,
isPromiseFulfilledResult,
isPromiseRejectedResult,
renderTemplate,
setUniqueName,
timeout
} from "./chunk-5JZNFPUP.js";
import "./chunk-4X5FFJPJ.js";
import "./chunk-HMLY7DHA.js";
export {
FunctionParams,
URLPath,
getParser,
getUniqueName,
isPromise,
isPromiseFulfilledResult,
isPromiseRejectedResult,
renderTemplate,
setUniqueName,
timeout
};
export { FunctionParams, URLPath, createFile, createFileExport, createFileImport, createFileParser, getFileParser, getUniqueName, isPromise, isPromiseFulfilledResult, isPromiseRejectedResult, renderTemplate, setUniqueName, timeout } from './chunk-DXGMSPTW.js';
import './chunk-BQXM32UO.js';
//# sourceMappingURL=utils.js.map
//# sourceMappingURL=utils.js.map
{
"name": "@kubb/core",
"version": "0.0.0-canary-20240806053140",
"version": "0.0.0-canary-20241104172400",
"description": "Generator core",

@@ -47,5 +47,2 @@ "keywords": [

"./package.json": "./package.json",
"./globals": {
"types": "./globals.d.ts"
},
"./*": "./*"

@@ -83,30 +80,27 @@ },

"change-case": "^5.4.4",
"directory-tree": "^3.5.2",
"find-up": "^7.0.0",
"natural-orderby": "^3.0.2",
"natural-orderby": "^4.0.0",
"object-hash": "^3.0.0",
"p-queue": "^8.0.1",
"remeda": "^2.7.1",
"remeda": "^2.16.0",
"seedrandom": "^3.0.5",
"semver": "^7.6.3",
"unraw": "^3.0.0",
"@kubb/fs": "0.0.0-canary-20240806053140",
"@kubb/parser-ts": "0.0.0-canary-20240806053140",
"@kubb/types": "0.0.0-canary-20240806053140"
"@kubb/fs": "0.0.0-canary-20241104172400",
"@kubb/parser-ts": "0.0.0-canary-20241104172400",
"@kubb/types": "0.0.0-canary-20241104172400"
},
"devDependencies": {
"@types/react": "^18.3.3",
"@types/object-hash": "^3.0.6",
"@types/seedrandom": "^3.0.8",
"@types/semver": "^7.5.8",
"consola": "^3.2.3",
"ora": "^8.0.1",
"prettier": "^3.3.3",
"tinyrainbow": "^1.2.0",
"tsup": "^8.2.4",
"typescript": "^5.5.4",
"@kubb/config-biome": "0.0.0-canary-20240806053140",
"@kubb/config-ts": "0.0.0-canary-20240806053140",
"@kubb/config-tsup": "0.0.0-canary-20240806053140"
"tsup": "^8.3.5",
"typescript": "^5.6.3",
"@kubb/config-ts": "0.0.0-canary-20241104172400",
"@kubb/config-tsup": "0.0.0-canary-20241104172400"
},
"engines": {
"node": ">=18"
"node": ">=20"
},

@@ -113,0 +107,0 @@ "publishConfig": {

@@ -16,8 +16,4 @@ <div align="center">

<!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section -->
<!-- ALL-CONTRIBUTORS-BADGE:END -->
</p>
<h4>
<a href="https://codesandbox.io/s/github/kubb-labs/kubb/tree/alpha/examples/typescript" target="_blank">View Demo</a>
<a href="https://codesandbox.io/s/github/kubb-labs/kubb/tree/main//examples/typescript" target="_blank">View Demo</a>
<span> · </span>

@@ -32,2 +28,15 @@ <a href="https://kubb.dev/" target="_blank">Documentation</a>

## Supporting Kubb
Kubb uses an MIT-licensed open source project with its ongoing development made possible entirely by the support of Sponsors. If you would like to become a sponsor, please consider:
- [Become a Sponsor on GitHub](https://github.com/sponsors/stijnvanhulle)
<p align="center">
<a href="https://github.com/sponsors/stijnvanhulle">
<img src="https://raw.githubusercontent.com/stijnvanhulle/sponsors/main/sponsors.svg" alt="My sponsors" />
</a>
</p>
<!-- Badges -->

@@ -34,0 +43,0 @@

@@ -1,18 +0,12 @@

import { getExports } from '@kubb/parser-ts'
import { join } from 'node:path'
import path from 'node:path'
import { trimExtName } from './transformers/trim.ts'
import { TreeNode } from './utils/TreeNode.ts'
import { getRelativePath } from '@kubb/fs'
import type * as KubbFile from '@kubb/fs/types'
import type { DirectoryTreeOptions } from 'directory-tree'
import type { FileMetaBase } from './FileManager.ts'
import type { Logger } from './logger.ts'
export type BarrelManagerOptions = {
treeNode?: DirectoryTreeOptions
isTypeOnly?: boolean
/**
* Add .ts or .js
*/
extName?: KubbFile.Extname
type BarrelManagerOptions = {
logger?: Logger
}

@@ -29,114 +23,106 @@

getNamedExport(root: string, item: KubbFile.Export): KubbFile.Export[] {
const exportedNames = getExports(path.resolve(root, item.path))
getFiles({ files: generatedFiles, root, meta }: { files: KubbFile.File[]; root?: string; meta?: FileMetaBase | undefined }): Array<KubbFile.File> {
const { logger } = this.#options
if (!exportedNames) {
return [item]
}
const cachedFiles = new Map<KubbFile.Path, KubbFile.File>()
return exportedNames.reduce(
(prev, curr) => {
if (!prev[0]?.name || !prev[1]?.name) {
return prev
}
logger?.emit('debug', { date: new Date(), logs: [`Start barrel generation for pluginKey ${meta?.pluginKey?.join('.')} and root '${root}'`] })
if (curr.isTypeOnly) {
prev[1] = { ...prev[1], name: [...prev[1].name, curr.name] }
} else {
prev[0] = { ...prev[0], name: [...prev[0].name, curr.name] }
TreeNode.build(generatedFiles, root)?.forEach((treeNode) => {
if (!treeNode || !treeNode.children || !treeNode.parent?.data.path) {
return undefined
}
const barrelFile: KubbFile.File = {
path: join(treeNode.parent?.data.path, 'index.ts') as KubbFile.Path,
baseName: 'index.ts',
exports: [],
sources: [],
}
const previousBarrelFile = cachedFiles.get(barrelFile.path)
const leaves = treeNode.leaves
leaves.forEach((item) => {
if (!item.data.name) {
return undefined
}
return prev
},
[
{
...item,
name: [],
isTypeOnly: false,
},
{
...item,
name: [],
isTypeOnly: true,
},
] as KubbFile.Export[],
)
}
const sources = item.data.file?.sources || []
getNamedExports(root: string, exports: KubbFile.Export[]): KubbFile.Export[] {
return exports?.flatMap((item) => {
return this.getNamedExport(root, item)
})
}
if (!sources.some((source) => source.isIndexable)) {
logger?.emit(
'warning',
`No isIndexable source found(source should have a name and isIndexable):\nFile: ${JSON.stringify(item.data.file, undefined, 2)}`,
)
}
getIndexes(root: string): Array<KubbFile.File> | null {
const { treeNode = {}, isTypeOnly, extName } = this.#options
const tree = TreeNode.build(root, treeNode)
sources.forEach((source) => {
if (!item.data.file?.path || !source.isIndexable || !source.name) {
return undefined
}
const alreadyContainInPreviousBarrelFile = previousBarrelFile?.sources.some((item) => item.name === source.name)
if (!tree) {
return null
}
if (alreadyContainInPreviousBarrelFile) {
return undefined
}
const fileReducer = (files: Array<KubbFile.File>, treeNode: TreeNode) => {
if (!treeNode.children) {
return []
}
if (!barrelFile.exports) {
barrelFile.exports = []
}
if (treeNode.children.length > 1) {
const indexPath: KubbFile.Path = path.resolve(treeNode.data.path, 'index.ts')
// true when we have a subdirectory that also contains barrel files
const isSubExport = !!treeNode.parent?.data.path?.split?.('/')?.length
const exports: Array<KubbFile.Export> = treeNode.children
.filter(Boolean)
.map((file) => {
const importPath: string = file.data.type === 'split' ? `./${file.data.name}/index` : `./${trimExtName(file.data.name)}`
if (isSubExport) {
barrelFile.exports.push({
name: [source.name],
path: getRelativePath(treeNode.parent?.data.path, item.data.path),
isTypeOnly: source.isTypeOnly,
})
} else {
barrelFile.exports.push({
name: [source.name],
path: `./${item.data.file.baseName}`,
isTypeOnly: source.isTypeOnly,
})
}
if (importPath.endsWith('index') && file.data.type === 'single') {
return undefined
}
return {
path: extName ? `${importPath}${extName}` : importPath,
isTypeOnly,
} as KubbFile.Export
barrelFile.sources.push({
name: source.name,
isTypeOnly: source.isTypeOnly,
//TODO use parser to generate import
value: '',
isExportable: false,
isIndexable: false,
})
.filter(Boolean)
files.push({
path: indexPath,
baseName: 'index.ts',
source: '',
exports,
exportable: true,
})
} else if (treeNode.children.length === 1) {
const [treeNodeChild] = treeNode.children as [TreeNode]
})
const indexPath = path.resolve(treeNode.data.path, 'index.ts')
const importPath = treeNodeChild.data.type === 'split' ? `./${treeNodeChild.data.name}/index` : `./${trimExtName(treeNodeChild.data.name)}`
logger?.emit('debug', {
date: new Date(),
logs: [
`Generating barrelFile '${getRelativePath(root, barrelFile.path)}' for '${getRelativePath(root, treeNode.data?.path)}' with ${barrelFile.sources.length} indexable exports: '${barrelFile.sources?.map((source) => source.name).join(', ')}'`,
],
})
const exports = [
{
path: extName ? `${importPath}${extName}` : importPath,
isTypeOnly,
},
]
logger?.emit('debug', {
date: new Date(),
logs: [
`Generated barrelFile '${getRelativePath(root, barrelFile.path)}' for '${getRelativePath(root, treeNode.data?.path)}' with exports: '${cachedFiles
.get(barrelFile.path)
?.sources?.map((source) => source.name)
.join(', ')}'`,
],
})
files.push({
path: indexPath,
baseName: 'index.ts',
source: '',
exports,
exportable: true,
})
if (previousBarrelFile) {
previousBarrelFile.sources.push(...barrelFile.sources)
previousBarrelFile.exports?.push(...(barrelFile.exports || []))
} else {
cachedFiles.set(barrelFile.path, barrelFile)
}
})
treeNode.children.forEach((childItem) => {
fileReducer(files, childItem)
})
return files
}
return fileReducer([], tree).reverse()
return [...cachedFiles.values()]
}
}

@@ -1,13 +0,13 @@

import c from 'tinyrainbow'
import { clean, read } from '@kubb/fs'
import { FileManager, type ResolvedFile } from './FileManager.ts'
import type * as KubbFile from '@kubb/fs/types'
import { FileManager, processFiles } from './FileManager.ts'
import { PluginManager } from './PluginManager.ts'
import { isPromise } from './PromiseManager.ts'
import { isInputPath } from './config.ts'
import { LogLevel, createLogger, randomCliColour } from './logger.ts'
import { createLogger } from './logger.ts'
import { URLPath } from './utils/URLPath.ts'
import { join, resolve } from 'node:path'
import { getRelativePath } from '@kubb/fs'
import type { Logger } from './logger.ts'
import type { Plugin, PluginContext, PluginParameter, TransformResult } from './types.ts'
import type { Output, PluginContext } from './types.ts'

@@ -31,14 +31,4 @@ type BuildOptions = {

async function transformReducer(
this: PluginContext,
_previousCode: string,
result: TransformResult | Promise<TransformResult>,
_plugin: Plugin,
): Promise<string | null> {
return result
}
async function setup(options: BuildOptions): Promise<PluginManager> {
const { config, logger = createLogger({ logLevel: LogLevel.silent }) } = options
let count = 0
const { config, logger = createLogger() } = options

@@ -51,8 +41,5 @@ try {

if (isInputPath(config)) {
throw new Error(
`Cannot read file/URL defined in \`input.path\` or set with \`kubb generate PATH\` in the CLI of your Kubb config ${c.dim(config.input.path)}`,
{
cause: e,
},
)
throw new Error(`Cannot read file/URL defined in \`input.path\` or set with \`kubb generate PATH\` in the CLI of your Kubb config ${config.input.path}`, {
cause: e,
})
}

@@ -63,140 +50,17 @@ }

await clean(config.output.path)
await clean(join(config.root, '.kubb'))
}
const task = async (file: ResolvedFile): Promise<ResolvedFile> => {
const { path } = file
let source: string | null = await FileManager.getSource(file)
const { result: loadedResult } = await pluginManager.hookFirst({
hookName: 'load',
parameters: [path],
})
if (loadedResult && isPromise(loadedResult)) {
source = await loadedResult
}
if (loadedResult && !isPromise(loadedResult)) {
source = loadedResult
}
if (source) {
source = await pluginManager.hookReduceArg0({
hookName: 'transform',
parameters: [path, source],
reduce: transformReducer,
})
if (config.output.write || config.output.write === undefined) {
if (file.meta?.pluginKey) {
// run only for pluginKey defined in the meta of the file
await pluginManager.hookForPlugin({
pluginKey: file.meta?.pluginKey,
hookName: 'writeFile',
parameters: [path, source],
})
}
await pluginManager.hookFirst({
hookName: 'writeFile',
parameters: [path, source],
})
}
}
return {
...file,
source: source || '',
}
}
const pluginManager = new PluginManager(config, { logger, task })
pluginManager.on('execute', (executer) => {
const { hookName, parameters, plugin } = executer
if (hookName === 'writeFile') {
const [code] = parameters as PluginParameter<'writeFile'>
logger.emit('debug', [`PluginKey ${c.dim(JSON.stringify(plugin.key))} \nwith source\n\n${code}`])
}
})
pluginManager.queue.on('add', () => {
if (logger.logLevel !== LogLevel.info) {
return
}
if (count === 0) {
logger.emit('start', '💾 Writing')
}
})
pluginManager.queue.on('active', () => {
if (logger.logLevel !== LogLevel.info) {
return
}
if (logger.spinner && pluginManager.queue.size > 0) {
const text = `Item: ${count} Size: ${pluginManager.queue.size} Pending: ${pluginManager.queue.pending}`
logger.spinner.suffixText = c.dim(text)
}
++count
})
pluginManager.queue.on('completed', () => {
if (logger.logLevel !== LogLevel.info) {
return
}
if (logger.spinner) {
const text = `Item: ${count} Size: ${pluginManager.queue.size} Pending: ${pluginManager.queue.pending}`
logger.spinner.suffixText = c.dim(text)
}
})
pluginManager.on('executed', (executer) => {
const { hookName, plugin, output, parameters } = executer
const logs = [
`${randomCliColour(plugin.name)} Executing ${hookName}`,
parameters && `${c.bgWhite('Parameters')} ${randomCliColour(plugin.name)} ${hookName}`,
JSON.stringify(parameters, undefined, 2),
output && `${c.bgWhite('Output')} ${randomCliColour(plugin.name)} ${hookName}`,
output,
].filter(Boolean)
logger.emit('debug', logs as string[])
})
return pluginManager
return new PluginManager(config, { logger })
}
export async function build(options: BuildOptions): Promise<BuildOutput> {
const pluginManager = await setup(options)
const { files, pluginManager, error } = await safeBuild(options)
const { fileManager, logger } = pluginManager
if (error) throw error
await pluginManager.hookParallel({
hookName: 'buildStart',
parameters: [options.config],
})
await pluginManager.hookParallel({ hookName: 'buildEnd' })
if (logger.logLevel === LogLevel.info) {
logger.emit('end', '💾 Writing completed')
}
const files = await Promise.all(
fileManager.files.map(async (file) => ({
...file,
source: await FileManager.getSource(file),
})),
)
return {
files,
pluginManager,
error,
}

@@ -206,27 +70,85 @@ }

export async function safeBuild(options: BuildOptions): Promise<BuildOutput> {
let files = []
const pluginManager = await setup(options)
const { fileManager, logger } = pluginManager
try {
pluginManager.events.on('executing', ({ plugin, message }) => {
pluginManager.logger.emit('debug', { date: new Date(), logs: [`Executing pluginKey ${plugin.key?.join('.')} | ${message}`] })
})
try {
pluginManager.events.on('executed', ({ plugin, message, output }) => {
pluginManager.logger.emit('debug', {
date: new Date(),
logs: [`Executed pluginKey ${plugin.key?.join('.')} | ${message} | ${JSON.stringify(output, undefined, 2)}`],
})
})
await pluginManager.hookParallel({
hookName: 'buildStart',
parameters: [options.config],
message: 'buildStart',
})
await pluginManager.hookParallel({ hookName: 'buildEnd' })
// create root barrel file
const root = resolve(options.config.root)
const rootPath = resolve(root, options.config.output.path, 'index.ts')
const barrelFiles = pluginManager.fileManager.files.filter((file) => {
return file.sources.some((source) => source.isIndexable)
})
if (logger.logLevel === LogLevel.info) {
logger.emit('end', '💾 Writing completed')
const rootFile: KubbFile.File = {
path: rootPath,
baseName: 'index.ts',
exports: barrelFiles
.flatMap((file) => {
return file.sources
?.map((source) => {
if (!file.path || !source.isIndexable) {
return undefined
}
// validate of the file is coming from plugin x, needs pluginKey on every file TODO update typing
const plugin = [...pluginManager.plugins].find((item) => {
const meta = file.meta as any
return item.key === meta?.pluginKey
})
const pluginOptions = (plugin?.options as { output?: Output }) ?? {}
if (pluginOptions.output?.barrelType === false) {
return undefined
}
if (FileManager.getMode(pluginOptions.output?.path) === 'single') {
return undefined
}
return {
name: pluginOptions.output?.barrelType === 'all' ? undefined : [source.name],
path: getRelativePath(rootPath, file.path),
isTypeOnly: source.isTypeOnly,
} as KubbFile.Export
})
.filter(Boolean)
})
.filter(Boolean),
sources: [],
meta: {},
}
if (options.config.output.barrelType) {
await pluginManager.fileManager.add(rootFile)
}
files = await processFiles({
config: options.config,
dryRun: !options.config.output.write,
files: pluginManager.fileManager.files,
logger: pluginManager.logger,
})
await pluginManager.hookParallel({ hookName: 'buildEnd', message: `Build stopped for ${options.config.name}` })
pluginManager.fileManager.clear()
} catch (e) {
const files = await Promise.all(
fileManager.files.map(async (file) => ({
...file,
source: await FileManager.getSource(file),
})),
)
return {
files,
files: [],
pluginManager,

@@ -237,9 +159,2 @@ error: e as Error,

const files = await Promise.all(
fileManager.files.map(async (file) => ({
...file,
source: await FileManager.getSource(file),
})),
)
return {

@@ -246,0 +161,0 @@ files,

@@ -32,5 +32,3 @@ import type { PossiblePromise } from '@kubb/types'

/**
* Type helper to make it easier to use kubb.config.js
* accepts a direct {@link Config} object, or a function that returns it.
* The function receives a {@link ConfigEnv} object that exposes two properties:
* Type helper to make it easier to use vite.config.ts accepts a direct UserConfig object, or a function that returns it. The function receives a ConfigEnv object.
*/

@@ -49,3 +47,3 @@ export function defineConfig(

export function isInputPath(result: Config | undefined): result is Config<InputPath> {
return !!result && 'path' in (result as any)
return !!result && 'path' in (result?.input as any)
}

@@ -1,12 +0,1 @@

/**
* Behaves as an Error to log a warning in the console(still stops the execution)
*/
export class Warning extends Error {
constructor(message?: string, options?: { cause: Error }) {
super(message, { cause: options?.cause })
this.name = 'Warning'
}
}
export class ValidationPluginError extends Error {}

@@ -1,36 +0,20 @@

import crypto from 'node:crypto'
import { extname, resolve } from 'node:path'
import { extname, join, relative } from 'node:path'
import { orderBy } from 'natural-orderby'
import PQueue from 'p-queue'
import { isDeepEqual } from 'remeda'
import { getRelativePath, read, write } from '@kubb/fs'
import { read, write } from '@kubb/fs'
import { BarrelManager } from './BarrelManager.ts'
import { searchAndReplace } from './transformers/searchAndReplace.ts'
import { trimExtName } from './transformers/trim.ts'
import type * as KubbFile from '@kubb/fs/types'
import type { BaseName, File, UUID } from '@kubb/fs/src/types.ts'
import { trimExtName } from '@kubb/fs'
import type { ResolvedFile } from '@kubb/fs/types'
import type { GreaterThan } from '@kubb/types'
import type { BarrelManagerOptions } from './BarrelManager.ts'
import PQueue from 'p-queue'
import type { Logger } from './logger.ts'
import transformers from './transformers/index.ts'
import type { Plugin } from './types.ts'
import { getParser } from './utils'
import type { BarrelType, Config, Plugin } from './types.ts'
import { createFile, getFileParser } from './utils'
import { type DirectoryTree, TreeNode, buildDirectoryTree } from './utils/TreeNode.ts'
export type ResolvedFile<TMeta extends FileMetaBase = FileMetaBase, TBaseName extends BaseName = BaseName> = File<TMeta, TBaseName> & {
/**
* @default crypto.randomUUID()
*/
id: UUID
/**
* Contains the first part of the baseName, generated based on baseName
* @link https://nodejs.org/api/path.html#pathformatpathobject
*/
name: string
}
export type FileMetaBase = {

@@ -40,11 +24,6 @@ pluginKey?: Plugin['key']

type FileWithMeta<TMeta extends FileMetaBase = FileMetaBase> = KubbFile.File<TMeta>
type AddResult<T extends Array<KubbFile.File>> = Promise<Awaited<GreaterThan<T['length'], 1> extends true ? Promise<ResolvedFile[]> : Promise<ResolvedFile>>>
type CacheItem = ResolvedFile & {
cancel?: () => void
}
type AddResult<T extends Array<FileWithMeta>> = Promise<Awaited<GreaterThan<T['length'], 1> extends true ? Promise<ResolvedFile[]> : Promise<ResolvedFile>>>
type AddIndexesProps = {
type: BarrelType | false | undefined
/**

@@ -54,2 +33,3 @@ * Root based on root and output.path specified in the config

root: string
files: KubbFile.File[]
/**

@@ -60,43 +40,45 @@ * Output for plugin

path: string
exportAs?: string
extName?: KubbFile.Extname
exportType?: 'barrel' | 'barrelNamed' | false
}
logger: Logger
options?: BarrelManagerOptions
meta?: FileWithMeta['meta']
}
group?: {
output: string
exportAs: string
}
logger?: Logger
type Options = {
queue?: PQueue
task?: (file: ResolvedFile) => Promise<ResolvedFile>
meta?: FileMetaBase
}
export class FileManager {
#cache: Map<KubbFile.Path, CacheItem[]> = new Map()
#task: Options['task']
#queue: PQueue
constructor({ task = async (file) => file, queue = new PQueue() }: Options = {}) {
this.#task = task
this.#queue = queue
#filesByPath: Map<KubbFile.Path, KubbFile.ResolvedFile> = new Map()
constructor() {
return this
}
get files(): Array<FileWithMeta> {
const files: Array<FileWithMeta> = []
this.#cache.forEach((item) => {
files.push(...item.flat(1))
})
get files(): Array<KubbFile.ResolvedFile> {
return [...this.#filesByPath.values()]
}
return files
get orderedFiles(): Array<KubbFile.ResolvedFile> {
return orderBy(
[...this.#filesByPath.values()],
[
(v) => v?.meta && 'pluginKey' in v.meta && !v.meta.pluginKey,
(v) => v.path.length,
(v) => trimExtName(v.path).endsWith('index'),
(v) => trimExtName(v.baseName),
(v) => v.path.split('.').pop(),
],
)
}
get isExecuting(): boolean {
return this.#queue.size !== 0 && this.#queue.pending !== 0
get groupedFiles(): DirectoryTree | null {
return buildDirectoryTree([...this.#filesByPath.values()])
}
async add<T extends Array<FileWithMeta> = Array<FileWithMeta>>(...files: T): AddResult<T> {
const promises = combineFiles(files).map((file) => {
get treeNode(): TreeNode | null {
return TreeNode.build([...this.#filesByPath.values()])
}
async add<T extends Array<KubbFile.File> = Array<KubbFile.File>>(...files: T): AddResult<T> {
const promises = files.map((file) => {
if (file.override) {

@@ -118,48 +100,21 @@ return this.#add(file)

async #add(file: FileWithMeta): Promise<ResolvedFile> {
const controller = new AbortController()
const resolvedFile: ResolvedFile = {
id: crypto.randomUUID(),
name: trimExtName(file.baseName),
...file,
}
async #add(file: KubbFile.File): Promise<ResolvedFile> {
const resolvedFile = createFile(file)
if (resolvedFile.exports?.length) {
const folder = resolvedFile.path.replace(resolvedFile.baseName, '')
this.#filesByPath.set(resolvedFile.path, resolvedFile)
resolvedFile.exports = resolvedFile.exports.filter((exportItem) => {
const exportedFile = this.files.find((file) => file.path.includes(resolve(folder, exportItem.path)))
return resolvedFile
}
if (exportedFile) {
return exportedFile.exportable
}
return true
})
}
this.#cache.set(resolvedFile.path, [{ cancel: () => controller.abort(), ...resolvedFile }])
return this.#queue.add(
async () => {
return this.#task?.(resolvedFile)
},
{ signal: controller.signal },
) as Promise<ResolvedFile>
clear() {
this.#filesByPath.clear()
}
async #addOrAppend(file: FileWithMeta): Promise<ResolvedFile> {
const previousCaches = this.#cache.get(file.path)
const previousCache = previousCaches ? previousCaches.at(previousCaches.length - 1) : undefined
async #addOrAppend(file: KubbFile.File): Promise<ResolvedFile> {
const previousFile = this.#filesByPath.get(file.path)
if (previousCache) {
this.#cache.delete(previousCache.path)
if (previousFile) {
this.#filesByPath.delete(previousFile.path)
return this.#add({
...file,
source: previousCache.source && file.source ? `${previousCache.source}\n${file.source}` : '',
imports: [...(previousCache.imports || []), ...(file.imports || [])],
exports: [...(previousCache.exports || []), ...(file.exports || [])],
env: { ...(previousCache.env || {}), ...(file.env || {}) },
})
return this.#add(mergeFile(previousFile, file))
}

@@ -169,107 +124,63 @@ return this.#add(file)

async addIndexes({ root, output, meta, logger, options = {} }: AddIndexesProps): Promise<void> {
const { exportType = 'barrel' } = output
// ^?
if (exportType === false) {
return undefined
}
getCacheById(id: string): KubbFile.File | undefined {
return [...this.#filesByPath.values()].find((file) => file.id === id)
}
const pathToBuildFrom = resolve(root, output.path)
getByPath(path: KubbFile.Path): KubbFile.ResolvedFile | undefined {
return this.#filesByPath.get(path)
}
if (transformers.trimExtName(pathToBuildFrom).endsWith('index')) {
logger.emit('warning', 'Output has the same fileName as the barrelFiles, please disable barrel generation')
deleteByPath(path: KubbFile.Path): void {
const cacheItem = this.getByPath(path)
if (!cacheItem) {
return
}
const exportPath = output.path.startsWith('./') ? trimExtName(output.path) : `./${trimExtName(output.path)}`
const mode = FileManager.getMode(output.path)
const barrelManager = new BarrelManager({
extName: output.extName,
...options,
})
let files = barrelManager.getIndexes(pathToBuildFrom)
this.#filesByPath.delete(path)
}
if (!files) {
return undefined
async getBarrelFiles({ type, files, meta = {}, root, output, logger }: AddIndexesProps): Promise<KubbFile.File[]> {
if (!type) {
return []
}
if (exportType === 'barrelNamed') {
files = files.map((file) => {
if (file.exports) {
return {
...file,
exports: barrelManager.getNamedExports(pathToBuildFrom, file.exports),
}
}
return file
})
}
const barrelManager = new BarrelManager({ logger })
await Promise.all(
files.map((file) => {
return this.#addOrAppend({
...file,
meta: meta ? meta : file.meta,
})
}),
)
const pathToBuildFrom = join(root, output.path)
const rootPath = mode === 'split' ? `${exportPath}/index${output.extName || ''}` : `${exportPath}${output.extName || ''}`
const rootFile: FileWithMeta = {
path: resolve(root, 'index.ts'),
baseName: 'index.ts',
source: '',
exports: [
output.exportAs
? {
name: output.exportAs,
asAlias: true,
path: rootPath,
isTypeOnly: options.isTypeOnly,
}
: {
path: rootPath,
isTypeOnly: options.isTypeOnly,
},
],
exportable: true,
}
if (trimExtName(pathToBuildFrom).endsWith('index')) {
logger?.emit('warning', 'Output has the same fileName as the barrelFiles, please disable barrel generation')
if (exportType === 'barrelNamed' && !output.exportAs && rootFile.exports?.[0]) {
rootFile.exports = barrelManager.getNamedExport(root, rootFile.exports[0])
return []
}
await this.#addOrAppend({
...rootFile,
meta: meta ? meta : rootFile.meta,
})
}
const barrelFiles = barrelManager.getFiles({ files, root: pathToBuildFrom, meta })
getCacheByUUID(UUID: KubbFile.UUID): FileWithMeta | undefined {
let cache: FileWithMeta | undefined
if (type === 'all') {
return barrelFiles.map((file) => {
return {
...file,
exports: file.exports?.map((exportItem) => {
return {
...exportItem,
name: undefined,
}
}),
}
})
}
this.#cache.forEach((files) => {
cache = files.find((item) => item.id === UUID)
return barrelFiles.map((indexFile) => {
return {
...indexFile,
meta,
}
})
return cache
}
get(path: KubbFile.Path): Array<FileWithMeta> | undefined {
return this.#cache.get(path)
}
remove(path: KubbFile.Path): void {
const cacheItem = this.get(path)
if (!cacheItem) {
return
}
this.#cache.delete(path)
}
async write(...params: Parameters<typeof write>): Promise<string | undefined> {
async write(...params: Parameters<typeof write>): ReturnType<typeof write> {
return write(...params)
}
async read(...params: Parameters<typeof read>): Promise<string> {
async read(...params: Parameters<typeof read>): ReturnType<typeof read> {
return read(...params)

@@ -279,10 +190,2 @@ }

// statics
static async getSource<TMeta extends FileMetaBase = FileMetaBase>(file: FileWithMeta<TMeta>): Promise<string> {
return getSource<TMeta>(file)
}
static combineFiles<TMeta extends FileMetaBase = FileMetaBase>(files: Array<FileWithMeta<TMeta> | null>): Array<FileWithMeta<TMeta>> {
return combineFiles<TMeta>(files)
}
static getMode(path: string | undefined | null): KubbFile.Mode {

@@ -294,93 +197,62 @@ if (!path) {

}
}
static get extensions(): Array<KubbFile.Extname> {
return ['.js', '.ts', '.tsx']
}
type GetSourceOptions = {
extname?: KubbFile.Extname
logger?: Logger
}
static isJavascript(baseName: string): boolean {
return FileManager.extensions.some((extension) => baseName.endsWith(extension))
export async function getSource<TMeta extends FileMetaBase = FileMetaBase>(
file: ResolvedFile<TMeta>,
{ logger, extname }: GetSourceOptions = {},
): Promise<string> {
const parser = await getFileParser(file.extname)
const source = await parser.print(file, { logger, extname })
return parser.format(source)
}
function mergeFile<TMeta extends FileMetaBase = FileMetaBase>(a: KubbFile.File<TMeta>, b: KubbFile.File<TMeta>): KubbFile.File<TMeta> {
return {
...a,
sources: [...(a.sources || []), ...(b.sources || [])],
imports: [...(a.imports || []), ...(b.imports || [])],
exports: [...(a.exports || []), ...(b.exports || [])],
}
}
function combineFiles<TMeta extends FileMetaBase = FileMetaBase>(files: Array<FileWithMeta<TMeta> | null>): Array<FileWithMeta<TMeta>> {
return files.filter(Boolean).reduce(
(acc, file: FileWithMeta<TMeta>) => {
const prevIndex = acc.findIndex((item) => item.path === file.path)
export function combineSources(sources: Array<KubbFile.Source>): Array<KubbFile.Source> {
return sources.reduce(
(prev, curr) => {
const prevByName = prev.findLast((imp) => imp.name && imp.name === curr.name)
const prevByPathAndIsExportable = prev.findLast((imp) => imp.name === curr.name && imp.isExportable)
if (prevIndex === -1) {
return [...acc, file]
if (prevByPathAndIsExportable) {
// we already have an export that has the same name but uses `isExportable` (export type ...)
return [...prev, curr]
}
const prev = acc[prevIndex]
if (prevByName) {
prevByName.value = curr.value
prevByName.isExportable = curr.isExportable
prevByName.isTypeOnly = curr.isTypeOnly
prevByName.isIndexable = curr.isIndexable
if (prev && file.override) {
acc[prevIndex] = {
imports: [],
exports: [],
...file,
}
return acc
return prev
}
if (prev) {
acc[prevIndex] = {
...file,
source: prev.source && file.source ? `${prev.source}\n${file.source}` : '',
imports: [...(prev.imports || []), ...(file.imports || [])],
exports: [...(prev.exports || []), ...(file.exports || [])],
env: { ...(prev.env || {}), ...(file.env || {}) },
}
}
return acc
return [...prev, curr]
},
[] as Array<FileWithMeta<TMeta>>,
[] as Array<KubbFile.Source>,
)
}
export async function getSource<TMeta extends FileMetaBase = FileMetaBase>(file: FileWithMeta<TMeta>): Promise<string> {
// only use .js, .ts or .tsx files for ESM imports
if (file.language ? !['typescript', 'javascript'].includes(file.language) : !FileManager.isJavascript(file.baseName)) {
return file.source
}
const parser = await getParser(file.language)
const exports = file.exports ? combineExports(file.exports) : []
// imports should be defined and source should contain code or we have imports without them being used
const imports = file.imports && file.source ? combineImports(file.imports, exports, file.source) : []
const importNodes = imports
.filter((item) => {
const path = item.root ? getRelativePath(item.root, item.path) : item.path
// trim extName
return path !== trimExtName(file.path)
})
.map((item) => {
const path = item.root ? getRelativePath(item.root, item.path) : item.path
return parser.factory.createImportDeclaration({
name: item.name,
path: item.extName ? `${path}${item.extName}` : path,
isTypeOnly: item.isTypeOnly,
})
})
const exportNodes = exports.map((item) =>
parser.factory.createExportDeclaration({
name: item.name,
path: item.extName ? `${item.path}${item.extName}` : item.path,
isTypeOnly: item.isTypeOnly,
asAlias: item.asAlias,
}),
)
const source = [parser.print([...importNodes, ...exportNodes]), getEnvSource(file.source, file.env)].join('\n')
// do some basic linting with the ts compiler
return parser.print([], { source, noEmitHelpers: false })
}
export function combineExports(exports: Array<KubbFile.Export>): Array<KubbFile.Export> {
const combinedExports = orderBy(exports, [(v) => !v.isTypeOnly], ['asc']).reduce(
return orderBy(exports, [
(v) => !!Array.isArray(v.name),
(v) => !v.isTypeOnly,
(v) => v.path,
(v) => !!v.name,
(v) => (Array.isArray(v.name) ? orderBy(v.name) : v.name),
]).reduce(
(prev, curr) => {

@@ -400,2 +272,3 @@ const name = curr.name

// we already have an item that was unique enough or name field is empty or prev asAlias is set but current has no changes
if (uniquePrev || (Array.isArray(name) && !name.length) || (prevByPath?.asAlias && !curr.asAlias)) {

@@ -415,2 +288,3 @@ return prev

// merge all names when prev and current both have the same isTypeOnly set
if (prevByPath && Array.isArray(prevByPath.name) && Array.isArray(curr.name) && prevByPath.isTypeOnly === curr.isTypeOnly) {

@@ -426,8 +300,12 @@ prevByPath.name = [...new Set([...prevByPath.name, ...curr.name])]

)
return orderBy(combinedExports, [(v) => !v.isTypeOnly, (v) => v.asAlias], ['desc', 'desc'])
}
export function combineImports(imports: Array<KubbFile.Import>, exports: Array<KubbFile.Export>, source?: string): Array<KubbFile.Import> {
const combinedImports = orderBy(imports, [(v) => !v.isTypeOnly], ['asc']).reduce(
return orderBy(imports, [
(v) => !!Array.isArray(v.name),
(v) => !v.isTypeOnly,
(v) => v.path,
(v) => !!v.name,
(v) => (Array.isArray(v.name) ? orderBy(v.name) : v.name),
]).reduce(
(prev, curr) => {

@@ -441,3 +319,5 @@ let name = Array.isArray(curr.name) ? [...new Set(curr.name)] : curr.name

const checker = (name?: string) => name && !!source.includes(name)
const checker = (name?: string) => {
return name && !!source.includes(name)
}

@@ -452,2 +332,3 @@ return checker(importName) || exports.some(({ name }) => (Array.isArray(name) ? name.some(checker) : checker(name)))

// merge all names and check if the importName is being used in the generated source and if not filter those imports out
if (Array.isArray(name)) {

@@ -466,2 +347,3 @@ name = name.filter((item) => (typeof item === 'string' ? hasImportInSource(item) : hasImportInSource(item.propertyName)))

// already unique enough or name is empty
if (uniquePrev || (Array.isArray(name) && !name.length)) {

@@ -471,2 +353,3 @@ return prev

// new item, append name
if (!prevByPath) {

@@ -482,2 +365,3 @@ return [

// merge all names when prev and current both have the same isTypeOnly set
if (prevByPath && Array.isArray(prevByPath.name) && Array.isArray(name) && prevByPath.isTypeOnly === curr.isTypeOnly) {

@@ -489,2 +373,3 @@ prevByPath.name = [...new Set([...prevByPath.name, ...name])]

// no import was found in the source, ignore import
if (!Array.isArray(name) && name && !hasImportInSource(name)) {

@@ -498,42 +383,51 @@ return prev

)
}
return orderBy(combinedImports, [(v) => !v.isTypeOnly], ['desc'])
type WriteFilesProps = {
config: Config
files: Array<KubbFile.ResolvedFile>
logger: Logger
dryRun?: boolean
}
/**
* Global queue
*/
const queue = new PQueue({ concurrency: 100 })
function getEnvSource(source: string, env: NodeJS.ProcessEnv | undefined): string {
if (!env) {
return source
}
export async function processFiles({ dryRun, config, logger, files }: WriteFilesProps) {
const orderedFiles = orderBy(files, [
(v) => v?.meta && 'pluginKey' in v.meta && !v.meta.pluginKey,
(v) => v.path.length,
(v) => trimExtName(v.path).endsWith('index'),
])
const keys = Object.keys(env)
logger.emit('debug', {
date: new Date(),
logs: [JSON.stringify({ files: orderedFiles }, null, 2)],
fileName: 'kubb-files.log',
})
if (!keys.length) {
return source
}
if (!dryRun) {
const size = orderedFiles.length
return keys.reduce((prev, key: string) => {
const environmentValue = env[key]
const replaceBy = environmentValue ? `'${environmentValue.replaceAll('"', '')?.replaceAll("'", '')}'` : 'undefined'
logger.emit('progress_start', { id: 'files', size, message: 'Writing files ...' })
const promises = orderedFiles.map(async (file) => {
await queue.add(async () => {
const message = file ? `Writing ${relative(config.root, file.path)}` : ''
const extname = config.output.extension?.[file.extname]
if (key.toUpperCase() !== key) {
throw new TypeError(`Environment should be in upperCase for ${key}`)
}
const source = await getSource(file, { logger, extname })
if (typeof replaceBy === 'string') {
prev = searchAndReplace({
text: prev.replaceAll(`process.env.${key}`, replaceBy),
replaceBy,
prefix: 'process.env',
key,
await write(file.path, source, { sanity: false })
logger.emit('progressed', { id: 'files', message })
})
// removes `declare const ...`
prev = searchAndReplace({
text: prev.replaceAll(/(declare const).*\n/gi, ''),
replaceBy,
key,
})
}
})
return prev
}, source)
await Promise.all(promises)
logger.emit('progress_stop', { id: 'files' })
}
return files
}
export { build, build as default, safeBuild } from './build.ts'
export { defineConfig, isInputPath } from './config.ts'
export { Warning } from './errors.ts'
export { FileManager } from './FileManager.ts'
export { FileManager, getSource } from './FileManager.ts'
export type { FileMetaBase } from './FileManager.ts'
export { Generator } from './Generator.ts'
export { BaseGenerator } from './BaseGenerator.ts'
export { PackageManager } from './PackageManager.ts'

@@ -12,4 +11,1 @@ export { createPlugin } from './plugin.ts'

export type * from './types.ts'
// biome-ignore lint/suspicious/noEmptyInterface: <explanation>
export interface _Register {}

@@ -6,13 +6,21 @@ import seedrandom from 'seedrandom'

import type { ConsolaInstance } from 'consola'
import type { Ora } from 'ora'
import { resolve } from 'node:path'
import { write } from '@kubb/fs'
import { type ConsolaInstance, type LogLevel, createConsola } from 'consola'
import type { Formatter } from 'tinyrainbow'
//TODO replace with verbose flag and debug flag
export const LogLevel = {
silent: 'silent',
info: 'info',
debug: 'debug',
} as const
type DebugEvent = { date: Date; logs: string[]; fileName?: string }
type Events = {
start: [message: string]
success: [message: string]
error: [message: string, cause: Error]
warning: [message: string]
debug: [DebugEvent]
info: [message: string]
progress_start: [{ id: string; size: number; message?: string }]
progressed: [{ id: string; message?: string }]
progress_stop: [{ id: string }]
}
export const LogMapper = {

@@ -24,11 +32,2 @@ silent: Number.NEGATIVE_INFINITY,

export type LogLevel = keyof typeof LogLevel
type Events = {
start: [message: string]
end: [message: string]
error: [message: string, cause: Error]
warning: [message: string]
debug: [logs: string[]]
}
export type Logger = {

@@ -40,6 +39,6 @@ /**

logLevel: LogLevel
spinner?: Ora
consola?: ConsolaInstance
on: EventEmitter<Events>['on']
emit: EventEmitter<Events>['emit']
writeLogs: () => Promise<string[]>
}

@@ -49,27 +48,47 @@

name?: string
logLevel: LogLevel
spinner?: Ora
logLevel?: LogLevel
consola?: ConsolaInstance
}
export function createLogger({ logLevel, name, spinner, consola }: Props): Logger {
export function createLogger({ logLevel = 3, name, consola: _consola }: Props = {}): Logger {
const events = new EventEmitter<Events>()
const startDate = Date.now()
const cachedLogs = new Set<DebugEvent>()
const consola =
_consola ||
createConsola({
level: logLevel,
formatOptions: {
colors: true,
date: true,
columns: 80,
compact: logLevel !== LogMapper.debug,
},
}).withTag(name ? randomCliColour(name) : '')
consola?.wrapConsole()
events.on('start', (message) => {
if (spinner) {
spinner.start(message)
}
consola.start(message)
})
events.on('end', (message) => {
if (spinner) {
spinner.suffixText = ''
spinner.succeed(message)
}
events.on('success', (message) => {
consola.success(message)
})
events.on('warning', (message) => {
if (spinner) {
spinner.warn(c.yellow(message))
consola.warn(c.yellow(message))
})
events.on('info', (message) => {
consola.info(c.yellow(message))
})
events.on('debug', (message) => {
if (message.logs.join('\n\n').length <= 100 && logLevel === LogMapper.debug) {
console.log(message.logs.join('\n\n'))
}
cachedLogs.add(message)
})

@@ -84,13 +103,36 @@

if (consola) {
consola.level = logLevel
}
const logger: Logger = {
name,
logLevel,
spinner,
consola,
on: (...args) => {
on(...args) {
return events.on(...args)
},
emit: (...args) => {
emit(...args) {
return events.emit(...args)
},
async writeLogs() {
const files: Record<string, string[]> = {}
cachedLogs.forEach((log) => {
const fileName = resolve(process.cwd(), '.kubb', log.fileName || `kubb-${startDate}.log`)
if (!files[fileName]) {
files[fileName] = []
}
files[fileName] = [...files[fileName], `[${log.date.toLocaleString()}]: ${log.logs.join('\n\n')}`]
})
await Promise.all(
Object.entries(files).map(async ([fileName, logs]) => {
return write(fileName, logs.join('\n'))
}),
)
return Object.keys(files)
},
}

@@ -97,0 +139,0 @@

@@ -71,3 +71,3 @@ import mod from 'node:module'

} catch (e) {
console.log(e)
console.error(e)
return undefined

@@ -74,0 +74,0 @@ }

import path from 'node:path'
import { createPluginCache } from './utils/cache.ts'
import type { FileManager } from './FileManager.ts'

@@ -40,3 +38,3 @@ import type { PluginManager } from './PluginManager.ts'

key: ['core'],
api() {
context() {
return {

@@ -50,3 +48,3 @@ get config() {

get plugin() {
// see pluginManger.#execute where we override with `.call` the this with the correct plugin
// see pluginManger.#execute where we override with `.call` the context with the correct plugin
return options.plugin as NonNullable<Options['plugin']>

@@ -68,7 +66,6 @@ },

resolveName,
cache: createPluginCache(),
}
},
resolvePath(baseName) {
const root = path.resolve(this.config.root, this.config.output.path)
const root = path.resolve(options.config.root, options.config.output.path)

@@ -75,0 +72,0 @@ return path.resolve(root, baseName)

@@ -1,9 +0,5 @@

import PQueue from 'p-queue'
import { readSync } from '@kubb/fs'
import { FileManager, type ResolvedFile } from './FileManager.ts'
import { FileManager } from './FileManager.ts'
import { isPromise, isPromiseRejectedResult } from './PromiseManager.ts'
import { PromiseManager } from './PromiseManager.ts'
import { ValidationPluginError } from './errors.ts'
import { LogLevel } from './logger.ts'
import { pluginCore } from './plugin.ts'

@@ -35,11 +31,6 @@ import { transformReservedWord } from './transformers/transformReservedWord.ts'

/**
* Get the type of the first argument in a function.
* @example Arg0<(a: string, b: number) => void> -> string
*/
type Argument0<H extends keyof PluginLifecycle> = Parameters<RequiredPluginLifecycle[H]>[0]
type Strategy = 'hookFirst' | 'hookForPlugin' | 'hookParallel' | 'hookSeq'
type Strategy = 'hookFirst' | 'hookForPlugin' | 'hookParallel' | 'hookReduceArg0' | 'hookSeq'
type Executer<H extends PluginLifecycleHooks = PluginLifecycleHooks> = {
message: string
strategy: Strategy

@@ -63,11 +54,6 @@ hookName: H

logger: Logger
/**
* Task for the FileManager
*/
task: (file: ResolvedFile) => Promise<ResolvedFile>
}
type Events = {
execute: [executer: Executer]
executing: [executer: Executer]
executed: [executer: Executer]

@@ -80,3 +66,3 @@ error: [error: Error]

mode?: KubbFile.Mode
extName: KubbFile.Extname
extname: KubbFile.Extname
pluginKey: Plugin['key']

@@ -87,3 +73,3 @@ options?: TOptions

export class PluginManager {
readonly plugins: PluginWithLifeCycle[]
readonly plugins = new Set<Plugin<GetPluginFactoryOptions<any>>>()
readonly fileManager: FileManager

@@ -96,2 +82,3 @@ readonly events: EventEmitter<Events> = new EventEmitter()

readonly logger: Logger
readonly options: Options
readonly #core: Plugin<PluginCore>

@@ -102,12 +89,7 @@

readonly queue: PQueue
constructor(config: Config, options: Options) {
this.config = config
this.options = options
this.logger = options.logger
this.queue = new PQueue({ concurrency: 1 })
this.fileManager = new FileManager({
task: options.task,
queue: this.queue,
})
this.fileManager = new FileManager()
this.#promiseManager = new PromiseManager({

@@ -117,4 +99,2 @@ nullCheck: (state: SafeParseResult<'resolveName'> | null) => !!state?.result,

const plugins = config.plugins || []
const core = pluginCore({

@@ -130,7 +110,8 @@ config,

// call core.api.call with empty context so we can transform `api()` to `api: {}`
this.#core = this.#parse(core as unknown as UserPlugin, this as any, core.api.call(null as any)) as Plugin<PluginCore>
// call core.context.call with empty context so we can transform `context()` to `context: {}`
this.#core = this.#parse(core as unknown as UserPlugin, this as any, core.context.call(null as any)) as Plugin<PluginCore>
;[this.#core, ...(config.plugins || [])].forEach((plugin) => {
const parsedPlugin = this.#parse(plugin as UserPlugin, this, this.#core.context)
this.plugins = [this.#core, ...plugins].map((plugin) => {
return this.#parse(plugin as UserPlugin, this, this.#core.api)
this.plugins.add(parsedPlugin)
})

@@ -141,5 +122,4 @@

getFile<TOptions = object>({ name, mode, extName, pluginKey, options }: GetFileProps<TOptions>): KubbFile.File<{ pluginKey: Plugin['key'] }> {
let source = ''
const baseName = `${name}${extName}` as const
getFile<TOptions = object>({ name, mode, extname, pluginKey, options }: GetFileProps<TOptions>): KubbFile.File<{ pluginKey: Plugin['key'] }> {
const baseName = `${name}${extname}` as const
const path = this.resolvePath({ baseName, mode, pluginKey, options })

@@ -151,8 +131,2 @@

try {
source = readSync(path)
} catch (_e) {
//
}
return {

@@ -164,3 +138,3 @@ path,

},
source,
sources: [],
}

@@ -175,10 +149,14 @@ }

parameters: [params.baseName, params.mode, params.options as object],
message: `Resolving path '${params.baseName}'`,
})
if (paths && paths?.length > 1) {
this.logger.emit('debug', [
`Cannot return a path where the 'pluginKey' ${
params.pluginKey ? JSON.stringify(params.pluginKey) : '"'
} is not unique enough\n\nPaths: ${JSON.stringify(paths, undefined, 2)}\n\nFalling back on the first item.\n`,
])
this.logger.emit('debug', {
date: new Date(),
logs: [
`Cannot return a path where the 'pluginKey' ${
params.pluginKey ? JSON.stringify(params.pluginKey) : '"'
} is not unique enough\n\nPaths: ${JSON.stringify(paths, undefined, 2)}\n\nFalling back on the first item.\n`,
],
})
}

@@ -191,4 +169,6 @@

parameters: [params.baseName, params.mode, params.options as object],
message: `Resolving path '${params.baseName}'`,
}).result
}
//TODO refactor by using the order of plugins and the cache of the fileManager instead of guessing and recreating the name/path
resolveName = (params: ResolveNameParams): string => {

@@ -200,10 +180,14 @@ if (params.pluginKey) {

parameters: [params.name, params.type],
message: `Resolving name '${params.name}' and type '${params.type}'`,
})
if (names && names?.length > 1) {
this.logger.emit('debug', [
`Cannot return a name where the 'pluginKey' ${
params.pluginKey ? JSON.stringify(params.pluginKey) : '"'
} is not unique enough\n\nNames: ${JSON.stringify(names, undefined, 2)}\n\nFalling back on the first item.\n`,
])
this.logger.emit('debug', {
date: new Date(),
logs: [
`Cannot return a name where the 'pluginKey' ${
params.pluginKey ? JSON.stringify(params.pluginKey) : '"'
} is not unique enough\n\nNames: ${JSON.stringify(names, undefined, 2)}\n\nFalling back on the first item.\n`,
],
})
}

@@ -217,2 +201,3 @@

parameters: [params.name, params.type],
message: `Resolving name '${params.name}' and type '${params.type}'`,
}).result

@@ -233,6 +218,7 @@

*/
hookForPlugin<H extends PluginLifecycleHooks>({
async hookForPlugin<H extends PluginLifecycleHooks>({
pluginKey,
hookName,
parameters,
message,
}: {

@@ -242,5 +228,8 @@ pluginKey: Plugin['key']

parameters: PluginParameter<H>
}): Promise<Array<ReturnType<ParseResult<H>> | null>> | null {
message: string
}): Promise<Array<ReturnType<ParseResult<H>> | null>> {
const plugins = this.getPluginsByKey(hookName, pluginKey)
this.logger.emit('progress_start', { id: hookName, size: plugins.length, message: 'Running plugins...' })
const promises = plugins

@@ -253,2 +242,3 @@ .map((plugin) => {

plugin,
message,
})

@@ -258,3 +248,7 @@ })

return Promise.all(promises)
const items = await Promise.all(promises)
this.logger.emit('progress_stop', { id: hookName })
return items
}

@@ -269,2 +263,3 @@ /**

parameters,
message,
}: {

@@ -274,6 +269,7 @@ pluginKey: Plugin['key']

parameters: PluginParameter<H>
message: string
}): Array<ReturnType<ParseResult<H>>> | null {
const plugins = this.getPluginsByKey(hookName, pluginKey)
return plugins
const result = plugins
.map((plugin) => {

@@ -285,5 +281,8 @@ return this.#executeSync<H>({

plugin,
message,
})
})
.filter(Boolean)
return result
}

@@ -298,2 +297,3 @@

skipped,
message,
}: {

@@ -303,24 +303,32 @@ hookName: H

skipped?: ReadonlySet<Plugin> | null
message: string
}): Promise<SafeParseResult<H>> {
const promises = this.#getSortedPlugins()
.filter((plugin) => {
return skipped ? skipped.has(plugin) : true
})
.map((plugin) => {
return async () => {
const value = await this.#execute<H>({
strategy: 'hookFirst',
hookName,
parameters,
plugin,
})
const plugins = this.#getSortedPlugins(hookName).filter((plugin) => {
return skipped ? skipped.has(plugin) : true
})
return Promise.resolve({
plugin,
result: value,
} as SafeParseResult<H>)
}
})
this.logger.emit('progress_start', { id: hookName, size: plugins.length })
return this.#promiseManager.run('first', promises)
const promises = plugins.map((plugin) => {
return async () => {
const value = await this.#execute<H>({
strategy: 'hookFirst',
hookName,
parameters,
plugin,
message,
})
return Promise.resolve({
plugin,
result: value,
} as SafeParseResult<H>)
}
})
const result = await this.#promiseManager.run('first', promises)
this.logger.emit('progress_stop', { id: hookName })
return result
}

@@ -335,2 +343,3 @@

skipped,
message,
}: {

@@ -340,10 +349,10 @@ hookName: H

skipped?: ReadonlySet<Plugin> | null
message: string
}): SafeParseResult<H> {
let parseResult: SafeParseResult<H> = null as unknown as SafeParseResult<H>
const plugins = this.#getSortedPlugins(hookName).filter((plugin) => {
return skipped ? skipped.has(plugin) : true
})
for (const plugin of this.#getSortedPlugins()) {
if (skipped?.has(plugin)) {
continue
}
for (const plugin of plugins) {
parseResult = {

@@ -355,2 +364,3 @@ result: this.#executeSync<H>({

plugin,
message,
}),

@@ -364,2 +374,3 @@ plugin,

}
return parseResult

@@ -374,7 +385,12 @@ }

parameters,
message,
}: {
hookName: H
parameters?: Parameters<RequiredPluginLifecycle[H]> | undefined
message: string
}): Promise<Awaited<TOuput>[]> {
const promises = this.#getSortedPlugins().map((plugin) => {
const plugins = this.#getSortedPlugins(hookName)
this.logger.emit('progress_start', { id: hookName, size: plugins.length })
const promises = plugins.map((plugin) => {
return () =>

@@ -386,2 +402,3 @@ this.#execute({

plugin,
message,
}) as Promise<TOuput>

@@ -394,3 +411,3 @@ })

if (isPromiseRejectedResult<Error>(result)) {
const plugin = this.#getSortedPlugins()[index]
const plugin = this.#getSortedPlugins(hookName)[index]

@@ -401,2 +418,4 @@ this.#catcher<H>(result.reason, plugin, hookName)

this.logger.emit('progress_stop', { id: hookName })
return results.filter((result) => result.status === 'fulfilled').map((result) => (result as PromiseFulfilledResult<Awaited<TOuput>>).value)

@@ -406,38 +425,13 @@ }

/**
* Chain all plugins, `reduce` can be passed through to handle every returned value. The return value of the first plugin will be used as the first parameter for the plugin after that.
* Chains plugins
*/
hookReduceArg0<H extends PluginLifecycleHooks>({
async hookSeq<H extends PluginLifecycleHooks>({
hookName,
parameters,
reduce,
}: {
hookName: H
parameters: PluginParameter<H>
reduce: (reduction: Argument0<H>, result: ReturnType<ParseResult<H>>, plugin: Plugin) => PossiblePromise<Argument0<H> | null>
}): Promise<Argument0<H>> {
const [argument0, ...rest] = parameters
message,
}: { hookName: H; parameters?: PluginParameter<H>; message: string }): Promise<void> {
const plugins = this.#getSortedPlugins(hookName)
this.logger.emit('progress_start', { id: hookName, size: plugins.length })
let promise: Promise<Argument0<H>> = Promise.resolve(argument0)
for (const plugin of this.#getSortedPlugins()) {
promise = promise
.then((arg0) => {
const value = this.#execute({
strategy: 'hookReduceArg0',
hookName,
parameters: [arg0, ...rest] as PluginParameter<H>,
plugin,
})
return value
})
.then((result) => reduce.call(this.#core.api, argument0, result as ReturnType<ParseResult<H>>, plugin)) as Promise<Argument0<H>>
}
return promise
}
/**
* Chains plugins
*/
async hookSeq<H extends PluginLifecycleHooks>({ hookName, parameters }: { hookName: H; parameters?: PluginParameter<H> }): Promise<void> {
const promises = this.#getSortedPlugins().map((plugin) => {
const promises = plugins.map((plugin) => {
return () =>

@@ -449,6 +443,9 @@ this.#execute({

plugin,
message,
})
})
return this.#promiseManager.run('seq', promises)
await this.#promiseManager.run('seq', promises)
this.logger.emit('progress_stop', { id: hookName })
}

@@ -460,10 +457,3 @@

if (hookName) {
if (this.logger.logLevel === LogLevel.info) {
const containsHookName = plugins.some((item) => item[hookName])
if (!containsHookName) {
this.logger.emit('warning', `No hook ${hookName} found`)
}
}
return plugins.filter((item) => item[hookName])
return plugins.filter((plugin) => hookName in plugin)
}

@@ -500,3 +490,3 @@ // TODO add test case for sorting with pre/post

const pluginByPluginName = plugins
.filter((plugin) => plugin[hookName])
.filter((plugin) => hookName in plugin)
.filter((item) => {

@@ -518,8 +508,14 @@ const [name, identifier] = item.key

const corePlugin = plugins.find((plugin) => plugin.name === 'core' && plugin[hookName])
const corePlugin = plugins.find((plugin) => plugin.name === 'core' && hookName in plugin)
if (corePlugin) {
this.logger.emit('debug', [`No hook '${hookName}' for pluginKey '${JSON.stringify(pluginKey)}' found, falling back on the '@kubb/core' plugin`])
this.logger.emit('debug', {
date: new Date(),
logs: [`No hook '${hookName}' for pluginKey '${JSON.stringify(pluginKey)}' found, falling back on the '@kubb/core' plugin`],
})
} else {
this.logger.emit('debug', [`No hook '${hookName}' for pluginKey '${JSON.stringify(pluginKey)}' found, no fallback found in the '@kubb/core' plugin`])
this.logger.emit('debug', {
date: new Date(),
logs: [`No hook '${hookName}' for pluginKey '${JSON.stringify(pluginKey)}' found, no fallback found in the '@kubb/core' plugin`],
})
}

@@ -536,2 +532,4 @@ return corePlugin ? [corePlugin] : []

this.executed.push(executer)
this.logger.emit('progressed', { id: executer.hookName, message: `${executer.plugin.name}: ${executer.message}` })
}

@@ -552,2 +550,3 @@ }

plugin,
message,
}: {

@@ -558,2 +557,3 @@ strategy: Strategy

plugin: PluginWithLifeCycle
message: string
}): Promise<ReturnType<ParseResult<H>> | null> | null {

@@ -567,8 +567,11 @@ const hook = plugin[hookName]

this.events.emit('execute', { strategy, hookName, parameters, plugin })
this.events.emit('executing', { strategy, hookName, parameters, plugin, message })
const promise = new Promise((resolve) => {
resolve(undefined)
})
const task = Promise.resolve()
const task = promise
.then(() => {
if (typeof hook === 'function') {
const possiblePromiseResult = (hook as Function).apply({ ...this.#core.api, plugin }, parameters) as Promise<ReturnType<ParseResult<H>>>
const possiblePromiseResult = (hook as Function).apply({ ...this.#core.context, plugin }, parameters) as Promise<ReturnType<ParseResult<H>>>

@@ -592,2 +595,3 @@ if (isPromise(possiblePromiseResult)) {

plugin,
message,
})

@@ -618,2 +622,3 @@

plugin,
message,
}: {

@@ -624,2 +629,3 @@ strategy: Strategy

plugin: PluginWithLifeCycle
message: string
}): ReturnType<ParseResult<H>> | null {

@@ -633,9 +639,19 @@ const hook = plugin[hookName]

this.events.emit('execute', { strategy, hookName, parameters, plugin })
this.events.emit('executing', { strategy, hookName, parameters, plugin, message })
try {
if (typeof hook === 'function') {
const fn = (hook as Function).apply({ ...this.#core.api, plugin }, parameters) as ReturnType<ParseResult<H>>
const fn = (hook as Function).apply({ ...this.#core.context, plugin }, parameters) as ReturnType<ParseResult<H>>
output = fn
this.#addExecutedToCallStack({
parameters,
output,
strategy,
hookName,
plugin,
message,
})
return fn

@@ -652,2 +668,3 @@ }

plugin,
message,
})

@@ -673,3 +690,3 @@

pluginManager: PluginManager,
context: PluginCore['api'] | undefined,
context: PluginCore['context'] | undefined,
): Plugin<GetPluginFactoryOptions<TPlugin>> {

@@ -682,16 +699,7 @@ const usedPluginNames = pluginManager.#usedPluginNames

// default transform
if (!plugin.transform) {
plugin.transform = function transform(_path, code) {
return code
}
}
if (plugin.api && typeof plugin.api === 'function') {
const api = (plugin.api as Function).call(context) as typeof plugin.api
if (plugin.context && typeof plugin.context === 'function') {
return {
...plugin,
key,
api,
context: (plugin.context as Function).call(context) as typeof plugin.context,
} as unknown as Plugin<GetPluginFactoryOptions<TPlugin>>

@@ -729,4 +737,4 @@ }

static get hooks() {
return ['buildStart', 'resolvePath', 'resolveName', 'load', 'transform', 'writeFile', 'buildEnd'] as const
return ['buildStart', 'resolvePath', 'resolveName', 'buildEnd'] as const
}
}

@@ -45,3 +45,3 @@ import { hookFirst, hookParallel, hookSeq } from './utils/executeStrategies.ts'

export function isPromiseFulfilledResult<T = unknown>(result: PromiseSettledResult<unknown>): result is PromiseFulfilledResult<T> {
function isPromiseFulfilledResult<T = unknown>(result: PromiseSettledResult<unknown>): result is PromiseFulfilledResult<T> {
return result.status === 'fulfilled'

@@ -48,0 +48,0 @@ }

@@ -32,11 +32,1 @@ export function escape(text?: string): string {

}
export function escapeStringRegexp(string: string) {
if (typeof string !== 'string') {
throw new TypeError('Expected a string')
}
// Escape characters with special meaning either inside or outside character sets.
// Use a simple backslash escape when it’s always valid, and a `\xnn` escape when the simpler form would be disallowed by Unicode patterns’ stricter grammar.
return string.replace(/[|\\{}()[\]^$+*?.]/g, '\\$&').replace(/-/g, '\\x2d')
}

@@ -14,3 +14,3 @@ import { orderBy } from 'natural-orderby'

import { transformReservedWord } from './transformReservedWord.ts'
import { trim, trimExtName, trimQuotes } from './trim.ts'
import { trim, trimQuotes } from './trim.ts'

@@ -27,3 +27,3 @@ export { camelCase, pascalCase, pathCase } from './casing.ts'

export { transformReservedWord } from './transformReservedWord.ts'
export { trim, trimExtName, trimQuotes } from './trim.ts'
export { trim, trimQuotes } from './trim.ts'
export { merge } from 'remeda'

@@ -46,3 +46,2 @@ export { orderBy } from 'natural-orderby'

trimQuotes,
trimExtName,
JSDoc: {

@@ -49,0 +48,0 @@ createJSDocBlockText,

import { trimQuotes } from './trim'
export function stringify(value: string | number | undefined): string {
export function stringify(value: string | number | boolean | undefined): string {
if (value === undefined || value === null) {

@@ -5,0 +5,0 @@ return '""'

@@ -19,5 +19,1 @@ export function trim(text: string): string {

}
export function trimExtName(text: string): string {
return text.replace(/\.[^/.]+$/, '')
}

@@ -5,12 +5,9 @@ import type * as KubbFile from '@kubb/fs/types'

import type { PluginManager } from './PluginManager.ts'
import type { OptionsPlugins, PluginUnion } from './kubb.ts'
import type { Logger } from './logger.ts'
import type { Cache } from './utils/cache.ts'
// config
/**
* Config used in `kubb.config.js`
* Config used in `kubb.config.ts`
*
* @example import { defineConfig } from '@kubb/core'
* @example
* import { defineConfig } from '@kubb/core'
* export default defineConfig({

@@ -22,4 +19,3 @@ * ...

/**
* Project root directory. Can be an absolute path, or a path relative from
* the location of the config file itself.
* The project root directory, which can be either an absolute path or a path relative to the location of your `kubb.config.ts` file.
* @default process.cwd()

@@ -29,7 +25,5 @@ */

/**
* Plugin type can be KubbJSONPlugin or Plugin
* Example: ['@kubb/plugin-oas', { output: false }]
* Or: pluginOas({ output: false })
* An array of Kubb plugins used for generation. Each plugin may have additional configurable options (defined within the plugin itself). If a plugin relies on another plugin, an error will occur if the required dependency is missing. Refer to “pre” for more details.
*/
plugins?: Array<Omit<UnknownUserPlugin, 'api'> | UnionPlugins | [name: string, options: object]>
plugins?: Array<Omit<UnknownUserPlugin, 'context'>>
}

@@ -39,3 +33,3 @@

/**
* Path to be used as the input. This can be an absolute path or a path relative to the `root`.
* Specify your Swagger/OpenAPI file, either as an absolute path or a path relative to the root.
*/

@@ -47,3 +41,3 @@ path: string

/**
* `string` or `object` containing the data.
* A `string` or `object` that contains your Swagger/OpenAPI data.
*/

@@ -55,2 +49,4 @@ data: string | unknown

export type BarrelType = 'all' | 'named'
/**

@@ -61,33 +57,45 @@ * @private

/**
* Optional config name to show in CLI output
* The name to display in the CLI output.
*/
name?: string
/**
* Project root directory. Can be an absolute path, or a path relative from
* the location of the config file itself.
* The project root directory, which can be either an absolute path or a path relative to the location of your `kubb.config.ts` file.
* @default process.cwd()
*/
root: string
/**
* You can use either `input.path` or `input.data`, depending on your specific needs.
*/
input: TInput
output: {
/**
* Path to be used to export all generated files.
* This can be an absolute path, or a path relative based of the defined `root` option.
* The path where all generated files will be exported.
* This can be an absolute path or a path relative to the specified root option.
*/
path: string
/**
* Clean output directory before each build.
* Clean the output directory before each build.
*/
clean?: boolean
/**
* Write files to the fileSystem
* This is being used for the playground.
* Save files to the file system.
* @default true
*/
write?: boolean
/**
* Override the extension to the generated imports and exports, by default each plugin will add an extension
* @default { '.ts': '.ts'}
*/
extension?: Record<KubbFile.Extname, KubbFile.Extname>
/**
* Specify how `index.ts` files should be created. You can also disable the generation of barrel files here. While each plugin has its own `barrelType` option, this setting controls the creation of the root barrel file, such as` src/gen/index.ts`.
* @default 'named'
*/
barrelType?: BarrelType | false
}
/**
* Array of Kubb plugins to use.
* The plugin/package can forsee some options that you need to pass through.
* Sometimes a plugin is depended on another plugin, if that's the case you will get an error back from the plugin you installed.
* An array of Kubb plugins that will be used in the generation.
* Each plugin may include additional configurable options(defined in the plugin itself).
* If a plugin depends on another plugin, an error will be returned if the required dependency is missing. See pre for more details.
*/

@@ -109,6 +117,2 @@ plugins?: Array<Plugin>

export type UnionPlugins = PluginUnion
export type ObjectPlugin = keyof OptionsPlugins
export type PluginFactoryOptions<

@@ -128,5 +132,5 @@ /**

/**
* API that you want to expose to other plugins.
* Context that you want to expose to other plugins.
*/
TAPI = any,
TContext = any,
/**

@@ -144,3 +148,3 @@ * When calling `resolvePath` you can specify better types.

resolvedOptions: TResolvedOptions
api: TAPI
context: TContext
resolvePathOptions: TResolvePathOptions

@@ -166,3 +170,3 @@ }

* Specifies the preceding plugins for the current plugin. You can pass an array of preceding plugin names, and the current plugin will be executed after these plugins.
* Can be used to validate depended plugins.
* Can be used to validate dependent plugins.
*/

@@ -174,8 +178,8 @@ pre?: Array<string>

post?: Array<string>
} & (TOptions['api'] extends never
} & (TOptions['context'] extends never
? {
api?: never
context?: never
}
: {
api: (this: TOptions['name'] extends 'core' ? null : Omit<PluginContext<TOptions>, 'addFile'>) => TOptions['api']
context: (this: TOptions['name'] extends 'core' ? null : Omit<PluginContext<TOptions>, 'addFile'>) => TOptions['context']
})

@@ -200,3 +204,3 @@

* Specifies the preceding plugins for the current plugin. You can pass an array of preceding plugin names, and the current plugin will be executed after these plugins.
* Can be used to validate depended plugins.
* Can be used to validate dependent plugins.
*/

@@ -213,10 +217,10 @@ pre?: Array<string>

/**
* Define an api that can be used by other plugins, see `PluginManager' where we convert from `UserPlugin` to `Plugin`(used when calling `createPlugin`).
* Define a context that can be used by other plugins, see `PluginManager' where we convert from `UserPlugin` to `Plugin`(used when calling `createPlugin`).
*/
} & (TOptions['api'] extends never
} & (TOptions['context'] extends never
? {
api?: never
context?: never
}
: {
api: TOptions['api']
context: TOptions['context']
})

@@ -238,3 +242,8 @@

*/
resolvePath?: (this: PluginContext<TOptions>, baseName: string, mode?: KubbFile.Mode, options?: TOptions['resolvePathOptions']) => KubbFile.OptionalPath
resolvePath?: (
this: PluginContext<TOptions>,
baseName: KubbFile.BaseName,
mode?: KubbFile.Mode,
options?: TOptions['resolvePathOptions'],
) => KubbFile.OptionalPath
/**

@@ -248,17 +257,2 @@ * Resolve to a name based on a string.

/**
* Makes it possible to run async logic to override the path defined previously by `resolvePath`.
* @type hookFirst
*/
load?: (this: Omit<PluginContext<TOptions>, 'addFile'>, path: KubbFile.Path) => PossiblePromise<TransformResult | null>
/**
* Transform the source-code.
* @type hookReduceArg0
*/
transform?: (this: Omit<PluginContext<TOptions>, 'addFile'>, source: string, path: KubbFile.Path) => PossiblePromise<TransformResult>
/**
* Write the result to the file-system based on the id(defined by `resolvePath` or changed by `load`).
* @type hookParallel
*/
writeFile?: (this: Omit<PluginContext<TOptions>, 'addFile'>, path: KubbFile.Path, source: string | undefined) => PossiblePromise<string | void>
/**
* End of the plugin lifecycle.

@@ -274,7 +268,5 @@ * @type hookParallel

export type PluginCache = Record<string, [number, unknown]>
export type ResolvePathParams<TOptions = object> = {
pluginKey?: Plugin['key']
baseName: string
baseName: KubbFile.BaseName
mode?: KubbFile.Mode

@@ -292,6 +284,7 @@ /**

* `file` will be used to customize the name of the created file(use of camelCase)
* `function` can be used used to customize the exported functions(use of camelCase)
* `function` can be used to customize the exported functions(use of camelCase)
* `type` is a special type for TypeScript(use of PascalCase)
* `const` can be used for variables(use of camelCase)
*/
type?: 'file' | 'function' | 'type'
type?: 'file' | 'function' | 'type' | 'const'
}

@@ -301,6 +294,5 @@

config: Config
cache: Cache<PluginCache>
fileManager: FileManager
pluginManager: PluginManager
addFile: (...file: Array<KubbFile.File>) => Promise<Array<KubbFile.File>>
addFile: (...file: Array<KubbFile.File>) => Promise<Array<KubbFile.ResolvedFile>>
resolvePath: (params: ResolvePathParams<TOptions['resolvePathOptions']>) => KubbFile.OptionalPath

@@ -318,4 +310,38 @@ resolveName: (params: ResolveNameParams) => string

}
/**
* Specify the export location for the files and define the behavior of the output
*/
export type Output = {
/**
* Path to the output folder or file that will contain the generated code
*/
path: string
/**
* Define what needs to be exported, here you can also disable the export of barrel files
* @default 'named'
*/
barrelType?: BarrelType | false
/**
* Add a banner text in the beginning of every file
*/
banner?: string
/**
* Add a footer text in the beginning of every file
*/
footer?: string
}
// null will mean clear the watcher for this key
export type TransformResult = string | null
type GroupContext = {
group: string
}
export type Group = {
/**
* Define a type where to group the files on
*/
type: 'tag'
/**
* Return the name of a group based on the group name, this will be used for the file and name generation
*/
name?: (context: GroupContext) => string
}

@@ -10,3 +10,3 @@ import { EventEmitter as NodeEventEmitter } from 'node:events'

emit<TEventName extends keyof TEvents & string>(eventName: TEventName, ...eventArg: TEvents[TEventName]): void {
this.#emitter.emit(eventName, ...(eventArg as []))
this.#emitter.emit(eventName, ...(eventArg as any))
}

@@ -13,0 +13,0 @@

type PromiseFunc<T = unknown, T2 = never> = (state?: T) => T2 extends never ? Promise<T> : Promise<T> | T2
export type ValueOfPromiseFuncArray<TInput extends Array<unknown>> = TInput extends Array<PromiseFunc<infer X, infer Y>> ? X | Y : never
type ValueOfPromiseFuncArray<TInput extends Array<unknown>> = TInput extends Array<PromiseFunc<infer X, infer Y>> ? X | Y : never
export function noReturn(): void {}
function noReturn(): void {}
type SeqOutput<TInput extends Array<PromiseFunc<TValue, null>>, TValue> = Array<Awaited<ValueOfPromiseFuncArray<TInput>>>
type SeqOutput<TInput extends Array<PromiseFunc<TValue, null>>, TValue> = Promise<Array<Awaited<ValueOfPromiseFuncArray<TInput>>>>

@@ -9,0 +9,0 @@ /**

@@ -13,2 +13,3 @@ export type { FunctionParamsAST } from './FunctionParams.ts'

export { URLPath } from './URLPath.ts'
export { getParser } from './getParser.ts'
export { getFileParser, createFileImport, createFileExport, createFile, createFileParser } from './parser.ts'
export type { ParserModule } from './parser.ts'

@@ -1,20 +0,21 @@

import dirTree from 'directory-tree'
import type * as KubbFile from '@kubb/fs/types'
import { FileManager } from '../FileManager.ts'
import type * as KubbFile from '@kubb/fs/types'
import type { DirectoryTree, DirectoryTreeOptions } from 'directory-tree'
type BarrelData = {
file?: KubbFile.File
/**
* @deprecated use file instead
*/
type: KubbFile.Mode
path: string
name: string
}
export type TreeNodeOptions = DirectoryTreeOptions
export class TreeNode {
data: BarrelData
parent?: TreeNode
children: Array<TreeNode> = []
#cachedLeaves?: Array<TreeNode> = undefined
type BarrelData = { type: KubbFile.Mode; path: KubbFile.Path; name: string }
export class TreeNode<T = BarrelData> {
public data: T
public parent?: TreeNode<T>
public children: Array<TreeNode<T>> = []
constructor(data: T, parent?: TreeNode<T>) {
constructor(data: BarrelData, parent?: TreeNode) {
this.data = data

@@ -25,3 +26,3 @@ this.parent = parent

addChild(data: T): TreeNode<T> {
addChild(data: BarrelData): TreeNode {
const child = new TreeNode(data, this)

@@ -35,24 +36,10 @@ if (!this.children) {

find(data?: T): TreeNode<T> | null {
if (!data) {
return null
}
if (data === this.data) {
get root(): TreeNode {
if (!this.parent) {
return this
}
if (this.children?.length) {
for (let i = 0, { length } = this.children, target: TreeNode<T> | null = null; i < length; i++) {
target = this.children[i]!.find(data)
if (target) {
return target
}
}
}
return null
return this.parent.root
}
get leaves(): TreeNode<T>[] {
get leaves(): Array<TreeNode> {
if (!this.children || this.children.length === 0) {

@@ -63,4 +50,8 @@ // this is a leaf

if (this.#cachedLeaves) {
return this.#cachedLeaves
}
// if not a leaf, return all children's leaves recursively
const leaves: TreeNode<T>[] = []
const leaves: TreeNode[] = []
if (this.children) {

@@ -71,13 +62,9 @@ for (let i = 0, { length } = this.children; i < length; i++) {

}
this.#cachedLeaves = leaves
return leaves
}
get root(): TreeNode<T> {
if (!this.parent) {
return this
}
return this.parent.root
}
forEach(callback: (treeNode: TreeNode<T>) => void): this {
forEach(callback: (treeNode: TreeNode) => void): this {
if (typeof callback !== 'function') {

@@ -100,9 +87,37 @@ throw new TypeError('forEach() callback must be a function')

public static build(path: string, options: TreeNodeOptions = {}): TreeNode | null {
findDeep(predicate?: (value: TreeNode, index: number, obj: TreeNode[]) => boolean): TreeNode | undefined {
if (typeof predicate !== 'function') {
throw new TypeError('find() predicate must be a function')
}
return this.leaves.find(predicate)
}
forEachDeep(callback: (treeNode: TreeNode) => void): void {
if (typeof callback !== 'function') {
throw new TypeError('forEach() callback must be a function')
}
this.leaves.forEach(callback)
}
filterDeep(callback: (treeNode: TreeNode) => boolean): Array<TreeNode> {
if (typeof callback !== 'function') {
throw new TypeError('filter() callback must be a function')
}
return this.leaves.filter(callback)
}
mapDeep<T>(callback: (treeNode: TreeNode) => T): Array<T> {
if (typeof callback !== 'function') {
throw new TypeError('map() callback must be a function')
}
return this.leaves.map(callback)
}
public static build(files: KubbFile.File[], root?: string): TreeNode | null {
try {
const exclude = Array.isArray(options.exclude) ? options.exclude : [options.exclude].filter(Boolean)
const filteredTree = dirTree(path, {
extensions: options.extensions,
exclude: [/node_modules/, ...exclude],
})
const filteredTree = buildDirectoryTree(files, root)

@@ -116,2 +131,3 @@ if (!filteredTree) {

path: filteredTree.path,
file: filteredTree.file,
type: FileManager.getMode(filteredTree.path),

@@ -124,2 +140,3 @@ })

path: item.path,
file: item.file,
type: FileManager.getMode(item.path),

@@ -139,5 +156,70 @@ })

} catch (e) {
throw new Error('Something went wrong with creating index files with the TreehNode class', { cause: e })
throw new Error('Something went wrong with creating barrel files with the TreeNode class', { cause: e })
}
}
}
export type DirectoryTree = {
name: string
path: string
file?: KubbFile.File
children: Array<DirectoryTree>
}
export function buildDirectoryTree(files: Array<KubbFile.File>, rootFolder = ''): DirectoryTree | null {
const rootPrefix = rootFolder.endsWith('/') ? rootFolder : `${rootFolder}/`
const filteredFiles = files.filter((file) => (rootFolder ? file.path.startsWith(rootPrefix) && !file.path.endsWith('.json') : !file.path.endsWith('.json')))
if (filteredFiles.length === 0) {
return null // No files match the root folder
}
const root: DirectoryTree = {
name: rootFolder || '',
path: rootFolder || '',
children: [],
}
filteredFiles.forEach((file) => {
const path = file.path.slice(rootFolder.length)
const parts = path.split('/')
let currentLevel: DirectoryTree[] = root.children
let currentPath = rootFolder
parts.forEach((part, index) => {
if (index !== 0) {
currentPath += `/${part}`
} else {
currentPath += `${part}`
}
let existingNode = currentLevel.find((node) => node.name === part)
if (!existingNode) {
if (index === parts.length - 1) {
// If it's the last part, it's a file
existingNode = {
name: part,
file,
path: currentPath,
} as DirectoryTree
} else {
// Otherwise, it's a folder
existingNode = {
name: part,
path: currentPath,
children: [],
} as DirectoryTree
}
currentLevel.push(existingNode)
}
// Move to the next level if it's a folder
if (!existingNode.file) {
currentLevel = existingNode.children
}
})
})
return root
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc