Socket
Socket
Sign inDemoInstall

@effect/io

Package Overview
Dependencies
Maintainers
3
Versions
183
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@effect/io - npm Package Compare versions

Comparing version 0.31.4 to 0.32.0

16

Cause.d.ts

@@ -114,3 +114,3 @@ /**

*/
export type Cause<E> = (Empty | Fail<E> | Die | Interrupt | Annotated<E> | Sequential<E> | Parallel<E>) & Pipeable<Cause<E>>;
export type Cause<E> = Empty | Fail<E> | Die | Interrupt | Annotated<E> | Sequential<E> | Parallel<E>;
/**

@@ -215,3 +215,3 @@ * @since 1.0.0

*/
export interface Empty extends Cause.Variance<never>, Equal.Equal, Pipeable<Empty> {
export interface Empty extends Cause.Variance<never>, Equal.Equal, Pipeable {
readonly _tag: "Empty";

@@ -226,3 +226,3 @@ }

*/
export interface Fail<E> extends Cause.Variance<E>, Equal.Equal, Pipeable<Fail<E>> {
export interface Fail<E> extends Cause.Variance<E>, Equal.Equal, Pipeable {
readonly _tag: "Fail";

@@ -239,3 +239,3 @@ readonly error: E;

*/
export interface Die extends Cause.Variance<never>, Equal.Equal, Pipeable<Die> {
export interface Die extends Cause.Variance<never>, Equal.Equal, Pipeable {
readonly _tag: "Die";

@@ -251,3 +251,3 @@ readonly defect: unknown;

*/
export interface Interrupt extends Cause.Variance<never>, Equal.Equal, Pipeable<Interrupt> {
export interface Interrupt extends Cause.Variance<never>, Equal.Equal, Pipeable {
readonly _tag: "Interrupt";

@@ -265,3 +265,3 @@ readonly fiberId: FiberId.FiberId;

*/
export interface Annotated<E> extends Cause.Variance<E>, Equal.Equal, Pipeable<Annotated<E>> {
export interface Annotated<E> extends Cause.Variance<E>, Equal.Equal, Pipeable {
readonly _tag: "Annotated";

@@ -284,3 +284,3 @@ readonly cause: Cause<E>;

*/
export interface Parallel<E> extends Cause.Variance<E>, Equal.Equal, Pipeable<Parallel<E>> {
export interface Parallel<E> extends Cause.Variance<E>, Equal.Equal, Pipeable {
readonly _tag: "Parallel";

@@ -302,3 +302,3 @@ readonly left: Cause<E>;

*/
export interface Sequential<E> extends Cause.Variance<E>, Equal.Equal, Pipeable<Sequential<E>> {
export interface Sequential<E> extends Cause.Variance<E>, Equal.Equal, Pipeable {
readonly _tag: "Sequential";

@@ -305,0 +305,0 @@ readonly left: Cause<E>;

@@ -41,3 +41,3 @@ /**

*/
export interface Config<A> extends Config.Variance<A>, Pipeable<Config<A>> {
export interface Config<A> extends Config.Variance<A>, Pipeable {
}

@@ -44,0 +44,0 @@ /**

@@ -39,3 +39,3 @@ /**

*/
export interface ConfigProvider extends ConfigProvider.Proto, Pipeable<ConfigProvider> {
export interface ConfigProvider extends ConfigProvider.Proto, Pipeable {
/**

@@ -164,3 +164,3 @@ * Loads the specified configuration, or fails with a config error.

*/
export declare const contramapPath: {
export declare const mapInputPath: {
(f: (path: string) => string): (self: ConfigProvider) => ConfigProvider;

@@ -167,0 +167,0 @@ (self: ConfigProvider, f: (path: string) => string): ConfigProvider;

@@ -6,3 +6,3 @@ "use strict";

});
exports.within = exports.upperCase = exports.unnested = exports.snakeCase = exports.orElse = exports.nested = exports.makeFlat = exports.make = exports.lowerCase = exports.kebabCase = exports.fromMap = exports.fromFlat = exports.fromEnv = exports.contramapPath = exports.constantCase = exports.FlatConfigProviderTypeId = exports.ConfigProviderTypeId = exports.ConfigProvider = void 0;
exports.within = exports.upperCase = exports.unnested = exports.snakeCase = exports.orElse = exports.nested = exports.mapInputPath = exports.makeFlat = exports.make = exports.lowerCase = exports.kebabCase = exports.fromMap = exports.fromFlat = exports.fromEnv = exports.constantCase = exports.FlatConfigProviderTypeId = exports.ConfigProviderTypeId = exports.ConfigProvider = void 0;
var internal = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("@effect/io/internal/configProvider"));

@@ -94,3 +94,3 @@ function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function (nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }

exports.constantCase = constantCase;
const contramapPath = internal.contramapPath;
const mapInputPath = internal.mapInputPath;
/**

@@ -105,3 +105,3 @@ * Returns a new config provider that will automatically convert all property

*/
exports.contramapPath = contramapPath;
exports.mapInputPath = mapInputPath;
const kebabCase = internal.kebabCase;

@@ -108,0 +108,0 @@ /**

@@ -33,3 +33,3 @@ /**

*/
export interface Deferred<E, A> extends Deferred.Variance<E, A>, Pipeable<Deferred<E, A>> {
export interface Deferred<E, A> extends Deferred.Variance<E, A>, Pipeable {
}

@@ -36,0 +36,0 @@ /**

@@ -6,3 +6,3 @@ /**

import type * as Option from "@effect/data/Option";
import type { Pipeable, PipeableOverride } from "@effect/data/Pipeable";
import type { Pipeable } from "@effect/data/Pipeable";
import type { Predicate } from "@effect/data/Predicate";

@@ -23,3 +23,3 @@ import type * as Unify from "@effect/data/Unify";

*/
export type Exit<E, A> = (Failure<E, A> | Success<E, A>) & Pipeable<Exit<E, A>>;
export type Exit<E, A> = Failure<E, A> | Success<E, A>;
/**

@@ -32,3 +32,3 @@ * Represents a failed `Effect` workflow containing the `Cause` of the failure

*/
export interface Failure<E, A> extends PipeableOverride<Effect.Effect<never, E, A>, Failure<E, A>> {
export interface Failure<E, A> extends Effect.Effect<never, E, A>, Pipeable {
readonly _tag: "Failure";

@@ -63,3 +63,3 @@ readonly cause: Cause.Cause<E>;

*/
export interface Success<E, A> extends PipeableOverride<Effect.Effect<never, E, A>, Success<E, A>> {
export interface Success<E, A> extends Effect.Effect<never, E, A>, Pipeable {
readonly _tag: "Success";

@@ -66,0 +66,0 @@ readonly value: A;

@@ -49,3 +49,3 @@ /**

*/
export interface Fiber<E, A> extends Fiber.Variance<E, A>, Pipeable<Fiber<E, A>> {
export interface Fiber<E, A> extends Fiber.Variance<E, A>, Pipeable {
/**

@@ -52,0 +52,0 @@ * The identity of the fiber.

@@ -38,3 +38,3 @@ /**

*/
export interface FiberRef<A> extends Variance<A>, Pipeable<FiberRef<A>> {
export interface FiberRef<A> extends Variance<A>, Pipeable {
}

@@ -41,0 +41,0 @@ /**

@@ -156,3 +156,3 @@ "use strict";

exports.updateSomeAndGet = updateSomeAndGet;
const currentRequestBatchingEnabled = core.currentRequestBatchingEnabled;
const currentRequestBatchingEnabled = core.currentRequestBatching;
/**

@@ -159,0 +159,0 @@ * @since 1.0.0

@@ -30,3 +30,3 @@ /**

*/
export interface FiberRefs extends Pipeable<FiberRefs> {
export interface FiberRefs extends Pipeable {
readonly [FiberRefsSym]: FiberRefsSym;

@@ -33,0 +33,0 @@ readonly locals: Map<FiberRef.FiberRef<any>, Arr.NonEmptyReadonlyArray<readonly [FiberId.Runtime, any]>>;

/**
* @since 1.0.0
*/
import type { PipeableOverride } from "@effect/data/Pipeable";
import type { Pipeable } from "@effect/data/Pipeable";
import type * as Effect from "@effect/io/Effect";

@@ -16,3 +16,3 @@ import type * as Queue from "@effect/io/Queue";

*/
export interface Hub<A> extends PipeableOverride<Queue.Enqueue<A>, Hub<A>> {
export interface Hub<A> extends Queue.Enqueue<A>, Pipeable {
/**

@@ -19,0 +19,0 @@ * Publishes a message to the hub, returning whether the message was published

@@ -477,3 +477,3 @@ "use strict";

exports.make = make;
const makeWith = options => core.map(fiberRuntime.all(core.context(), core.fiberId), ([context, fiberId]) => new CacheImpl(options.capacity, context, fiberId, options.lookup, exit => Duration.decode(options.timeToLive(exit))));
const makeWith = options => core.map(fiberRuntime.all([core.context(), core.fiberId]), ([context, fiberId]) => new CacheImpl(options.capacity, context, fiberId, options.lookup, exit => Duration.decode(options.timeToLive(exit))));
/** @internal */

@@ -480,0 +480,0 @@ exports.makeWith = makeWith;

@@ -11,3 +11,3 @@ "use strict";

/** @internal */
const match = (options, sequential, unbounded, withLimit) => {
const match = (options, sequential, unbounded, bounded) => {
let effect;

@@ -27,3 +27,3 @@ switch (options?.concurrency) {

{
effect = core.fiberRefGetWith(core.currentConcurrency, concurrency => concurrency._tag === "None" ? unbounded() : concurrency.value > 1 ? withLimit(concurrency.value) : sequential());
effect = core.fiberRefGetWith(core.currentConcurrency, concurrency => concurrency === "unbounded" ? unbounded() : concurrency > 1 ? bounded(concurrency) : sequential());
break;

@@ -33,11 +33,11 @@ }

{
effect = options.concurrency > 1 ? withLimit(options.concurrency) : sequential();
effect = options.concurrency > 1 ? bounded(options.concurrency) : sequential();
break;
}
}
return options?.batchRequests !== undefined && options.batchRequests !== "inherit" ? core.fiberRefLocally(effect, core.currentRequestBatchingEnabled, options.batchRequests) : effect;
return effect;
};
/** @internal */
exports.match = match;
const matchSimple = (options, sequential, parallel) => {
const matchSimple = (options, sequential, concurrent) => {
let effect;

@@ -52,3 +52,3 @@ switch (options?.concurrency) {

{
effect = parallel();
effect = concurrent();
break;

@@ -58,3 +58,3 @@ }

{
effect = core.fiberRefGetWith(core.currentConcurrency, concurrency => concurrency._tag === "None" ? parallel() : concurrency.value > 1 ? parallel() : sequential());
effect = core.fiberRefGetWith(core.currentConcurrency, concurrency => concurrency === "unbounded" ? concurrent() : concurrency > 1 ? concurrent() : sequential());
break;

@@ -64,9 +64,9 @@ }

{
effect = options.concurrency > 1 ? parallel() : sequential();
effect = options.concurrency > 1 ? concurrent() : sequential();
break;
}
}
return options?.batchRequests !== undefined && options.batchRequests !== "inherit" ? core.fiberRefLocally(effect, core.currentRequestBatchingEnabled, options.batchRequests) : effect;
return effect;
};
exports.matchSimple = matchSimple;
//# sourceMappingURL=concurrency.js.map

@@ -6,3 +6,3 @@ "use strict";

});
exports.within = exports.upperCase = exports.unnested = exports.snakeCase = exports.orElse = exports.nested = exports.makeFlat = exports.make = exports.lowerCase = exports.kebabCase = exports.fromMap = exports.fromFlat = exports.fromEnv = exports.contramapPath = exports.constantCase = exports.configProviderTag = exports.FlatConfigProviderTypeId = exports.ConfigProviderTypeId = void 0;
exports.within = exports.upperCase = exports.unnested = exports.snakeCase = exports.orElse = exports.nested = exports.mapInputPath = exports.makeFlat = exports.make = exports.lowerCase = exports.kebabCase = exports.fromMap = exports.fromFlat = exports.fromEnv = exports.constantCase = exports.configProviderTag = exports.FlatConfigProviderTypeId = exports.ConfigProviderTypeId = void 0;
var Context = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("@effect/data/Context"));

@@ -181,3 +181,3 @@ var Either = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("@effect/data/Either"));

{
return core.suspend(() => core.flatMap(core.forEach(a => core.mapError(configError.prefixed(prefix))(op.mapOrFail(a))))(fromFlatLoop(flat, prefix, op.original, split)));
return core.suspend(() => core.flatMap(core.forEachSequential(a => core.mapError(configError.prefixed(prefix))(op.mapOrFail(a))))(fromFlatLoop(flat, prefix, op.original, split)));
}

@@ -210,3 +210,3 @@ case OpCodes.OP_NESTED:

return RA.of(flattened);
})(core.forEach(indices, index => fromFlatLoop(flat, RA.append(prefix, `[${index}]`), op.config, true)));
})(core.forEachSequential(indices, index => fromFlatLoop(flat, RA.append(prefix, `[${index}]`), op.config, true)));
})(core.flatMap(indicesFrom)(flat.enumerateChildren(patchedPrefix))))(pathPatch.patch(prefix, flat.patch));

@@ -223,3 +223,3 @@ }

return RA.map(values => HashMap.fromIterable(RA.zip(RA.fromIterable(keys), values)))(transpose(matrix));
})(core.forEach(key => fromFlatLoop(flat, concat(prefix, RA.of(key)), op.valueConfig, split))(keys));
})(core.forEachSequential(key => fromFlatLoop(flat, concat(prefix, RA.of(key)), op.valueConfig, split))(keys));
})(flat.enumerateChildren(prefix)))(pathPatch.patch(prefix, flat.patch)));

@@ -243,3 +243,3 @@ }

const [lefts, rights] = extend(fail, fail, RA.map(Either.right)(left.right), RA.map(Either.right)(right.right));
return core.forEach(([left, right]) => core.map(([left, right]) => op.zip(left, right))(core.zip(left, right)))(RA.zip(rights)(lefts));
return core.forEachSequential(([left, right]) => core.map(([left, right]) => op.zip(left, right))(core.zip(left, right)))(RA.zip(rights)(lefts));
}

@@ -253,5 +253,5 @@ throw new Error("BUG: ConfigProvider.fromFlatLoop - please report an issue at https://github.com/Effect-TS/io/issues");

/** @internal */
const contramapPath = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => fromFlat(contramapPathFlat(self.flattened, f)));
exports.contramapPath = contramapPath;
const contramapPathFlat = (self, f) => makeFlat({
const mapInputPath = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => fromFlat(mapInputPathFlat(self.flattened, f)));
exports.mapInputPath = mapInputPath;
const mapInputPathFlat = (self, f) => makeFlat({
load: (path, config, split = true) => self.load(path, config, split),

@@ -298,15 +298,15 @@ enumerateChildren: path => self.enumerateChildren(path),

/** @internal */
const constantCase = self => contramapPath(self, StringUtils.constantCase);
const constantCase = self => mapInputPath(self, StringUtils.constantCase);
/** @internal */
exports.constantCase = constantCase;
const kebabCase = self => contramapPath(self, StringUtils.kebabCase);
const kebabCase = self => mapInputPath(self, StringUtils.kebabCase);
/** @internal */
exports.kebabCase = kebabCase;
const lowerCase = self => contramapPath(self, StringUtils.lowerCase);
const lowerCase = self => mapInputPath(self, StringUtils.lowerCase);
/** @internal */
exports.lowerCase = lowerCase;
const snakeCase = self => contramapPath(self, StringUtils.snakeCase);
const snakeCase = self => mapInputPath(self, StringUtils.snakeCase);
/** @internal */
exports.snakeCase = snakeCase;
const upperCase = self => contramapPath(self, StringUtils.upperCase);
const upperCase = self => mapInputPath(self, StringUtils.upperCase);
/** @internal */

@@ -328,3 +328,3 @@ exports.upperCase = upperCase;

}
return core.mapError(configError.prefixed(path))(core.forEach(char => primitive.parse(char.trim()))(splitPathString(text, delimiter)));
return core.mapError(configError.prefixed(path))(core.forEachSequential(char => primitive.parse(char.trim()))(splitPathString(text, delimiter)));
};

@@ -340,3 +340,3 @@ const transpose = array => {

onSuccess: RA.sort(number.Order)
})(core.forEach(quotedIndices, parseQuotedIndex))));
})(core.forEachSequential(quotedIndices, parseQuotedIndex))));
const STR_INDEX_REGEX = /(^.+)(\[(\d+)\])$/;

@@ -343,0 +343,0 @@ const QUOTED_INDEX_REGEX = /^(\[(\d+)\])$/;

@@ -6,5 +6,5 @@ "use strict";

});
exports.exitZipWith = exports.exitZipRight = exports.exitZipParRight = exports.exitZipParLeft = exports.exitZipPar = exports.exitZipLeft = exports.exitZip = exports.exitUnit = exports.exitUnannotate = exports.exitSucceed = exports.exitMatchEffect = exports.exitMatch = exports.exitMapErrorCause = exports.exitMapError = exports.exitMapBoth = exports.exitMap = exports.exitIsSuccess = exports.exitIsInterrupted = exports.exitIsFailure = exports.exitIsExit = exports.exitInterrupt = exports.exitGetOrElse = exports.exitFromOption = exports.exitFromEither = exports.exitForEachEffect = exports.exitFlatten = exports.exitFlatMapEffect = exports.exitFlatMap = exports.exitFailCause = exports.exitFail = exports.exitExists = exports.exitDie = exports.exitCollectAll = exports.exitCauseOption = exports.exitAsUnit = exports.exitAs = exports.exit = exports.either = exports.dieSync = exports.dieMessage = exports.die = exports.deferredUnsafeMake = exports.deferredUnsafeDone = exports.deferredSync = exports.deferredSucceed = exports.deferredPoll = exports.deferredMakeAs = exports.deferredMake = exports.deferredIsDone = exports.deferredInterruptWith = exports.deferredInterrupt = exports.deferredFailSync = exports.deferredFailCauseSync = exports.deferredFailCause = exports.deferredFail = exports.deferredDone = exports.deferredDieSync = exports.deferredDie = exports.deferredCompleteWith = exports.deferredComplete = exports.deferredAwait = exports.currentUnhandledErrorLogLevel = exports.currentTracerSpanAnnotations = exports.currentTracerSpan = exports.currentSchedulingPriority = exports.currentScheduler = exports.currentRequestBatchingEnabled = exports.currentMetricLabels = exports.currentMaxFiberOps = exports.currentLogSpan = exports.currentLogLevel = exports.currentLogAnnotations = exports.currentInterruptedCause = exports.currentForkScopeOverride = exports.currentContext = exports.currentConcurrency = exports.contramapContext = exports.contextWithEffect = exports.context = exports.checkInterruptible = exports.catchSome = exports.catchAllCause = exports.catchAll = exports.blocked = exports.attemptOrElse = exports.asyncInterruptEither = exports.asyncInterrupt = exports.async = exports.asUnit = exports.as = exports.allLogLevels = exports.acquireUseRelease = exports.ScopeTypeId = exports.RevertFlags = exports.RequestResolverTypeId = exports.RequestResolverImpl = exports.FiberRefTypeId = exports.EffectTypeId = exports.EffectErrorTypeId = exports.CloseableScopeTypeId = void 0;
exports.withInheritedConcurrency = exports.withFiberRuntime = exports.whileLoop = exports.whenEffect = exports.updateRuntimeFlags = exports.unit = exports.uninterruptibleMask = exports.uninterruptible = exports.unified = exports.transplant = exports.tap = exports.sync = exports.suspend = exports.succeed = exports.step = exports.scopeFork = exports.scopeClose = exports.scopeAddFinalizerExit = exports.scopeAddFinalizer = exports.runtimeFlags = exports.runRequestBlock = exports.resolverLocally = exports.requestBlockLocally = exports.releaseMapReplace = exports.releaseMapRemove = exports.releaseMapRelease = exports.releaseMapMake = exports.releaseMapGet = exports.releaseMapAddIfOpen = exports.releaseMapAdd = exports.provideSomeContext = exports.provideContext = exports.partitionMap = exports.orElse = exports.orDieWith = exports.orDie = exports.onInterrupt = exports.onExit = exports.onError = exports.never = exports.metricLabels = exports.matchEffect = exports.matchCauseEffect = exports.matchCause = exports.mapError = exports.mapBoth = exports.map = exports.makeEffectError = exports.logLevelWarning = exports.logLevelTrace = exports.logLevelNone = exports.logLevelInfo = exports.logLevelFatal = exports.logLevelError = exports.logLevelDebug = exports.logLevelAll = exports.isRequestResolver = exports.isEffectError = exports.isEffect = exports.intoDeferred = exports.interruptibleMask = exports.interruptible = exports.interruptWith = exports.interruptFiber = exports.interruptAsFiber = exports.interrupt = exports.if_ = exports.forEachDiscard = exports.forEach = exports.flip = exports.flatten = exports.flatMapStep = exports.flatMap = exports.fiberRefUpdateSomeAndGet = exports.fiberRefUpdateSome = exports.fiberRefUpdateAndGet = exports.fiberRefUpdate = exports.fiberRefUnsafeMakeRuntimeFlags = exports.fiberRefUnsafeMakePatch = exports.fiberRefUnsafeMakeHashSet = exports.fiberRefUnsafeMakeContext = exports.fiberRefUnsafeMake = exports.fiberRefSet = exports.fiberRefReset = exports.fiberRefModifySome = exports.fiberRefModify = exports.fiberRefLocallyWith = exports.fiberRefLocally = exports.fiberRefGetWith = exports.fiberRefGetAndUpdateSome = exports.fiberRefGetAndUpdate = exports.fiberRefGetAndSet = exports.fiberRefGet = exports.fiberRefDelete = exports.fiberIdWith = exports.fiberId = exports.failSync = exports.failCauseSync = exports.failCause = exports.fail = void 0;
exports.zipWith = exports.zipRight = exports.zipLeft = exports.zipFlatten = exports.zip = exports.yieldNow = exports.withUnhandledErrorLogLevel = exports.withSchedulingPriority = exports.withScheduler = exports.withRuntimeFlags = exports.withMaxFiberOps = void 0;
exports.fail = exports.exitZipWith = exports.exitZipRight = exports.exitZipParRight = exports.exitZipParLeft = exports.exitZipPar = exports.exitZipLeft = exports.exitZip = exports.exitUnit = exports.exitUnannotate = exports.exitSucceed = exports.exitMatchEffect = exports.exitMatch = exports.exitMapErrorCause = exports.exitMapError = exports.exitMapBoth = exports.exitMap = exports.exitIsSuccess = exports.exitIsInterrupted = exports.exitIsFailure = exports.exitIsExit = exports.exitInterrupt = exports.exitGetOrElse = exports.exitFromOption = exports.exitFromEither = exports.exitForEachEffect = exports.exitFlatten = exports.exitFlatMapEffect = exports.exitFlatMap = exports.exitFailCause = exports.exitFail = exports.exitExists = exports.exitDie = exports.exitCollectAll = exports.exitCauseOption = exports.exitAsUnit = exports.exitAs = exports.exit = exports.either = exports.dieSync = exports.dieMessage = exports.die = exports.deferredUnsafeMake = exports.deferredUnsafeDone = exports.deferredSync = exports.deferredSucceed = exports.deferredPoll = exports.deferredMakeAs = exports.deferredMake = exports.deferredIsDone = exports.deferredInterruptWith = exports.deferredInterrupt = exports.deferredFailSync = exports.deferredFailCauseSync = exports.deferredFailCause = exports.deferredFail = exports.deferredDone = exports.deferredDieSync = exports.deferredDie = exports.deferredCompleteWith = exports.deferredComplete = exports.deferredAwait = exports.currentUnhandledErrorLogLevel = exports.currentTracerSpanAnnotations = exports.currentTracerSpan = exports.currentSchedulingPriority = exports.currentScheduler = exports.currentRequestBatching = exports.currentMetricLabels = exports.currentMaxFiberOps = exports.currentLogSpan = exports.currentLogLevel = exports.currentLogAnnotations = exports.currentInterruptedCause = exports.currentForkScopeOverride = exports.currentContext = exports.currentConcurrency = exports.contextWithEffect = exports.context = exports.checkInterruptible = exports.catchSome = exports.catchAllCause = exports.catchAll = exports.blocked = exports.attemptOrElse = exports.asyncInterruptEither = exports.asyncInterrupt = exports.async = exports.asUnit = exports.as = exports.allLogLevels = exports.acquireUseRelease = exports.ScopeTypeId = exports.RevertFlags = exports.RequestResolverTypeId = exports.RequestResolverImpl = exports.FiberRefTypeId = exports.EffectTypeId = exports.EffectErrorTypeId = exports.CloseableScopeTypeId = void 0;
exports.withFiberRuntime = exports.withConcurrency = exports.whileLoop = exports.whenEffect = exports.updateRuntimeFlags = exports.unit = exports.uninterruptibleMask = exports.uninterruptible = exports.unified = exports.transplant = exports.tap = exports.sync = exports.suspend = exports.succeed = exports.step = exports.scopeFork = exports.scopeClose = exports.scopeAddFinalizerExit = exports.scopeAddFinalizer = exports.runtimeFlags = exports.runRequestBlock = exports.resolverLocally = exports.requestBlockLocally = exports.releaseMapReplace = exports.releaseMapRemove = exports.releaseMapRelease = exports.releaseMapMake = exports.releaseMapGet = exports.releaseMapAddIfOpen = exports.releaseMapAdd = exports.provideSomeContext = exports.provideContext = exports.partitionMap = exports.orElse = exports.orDieWith = exports.orDie = exports.onInterrupt = exports.onExit = exports.onError = exports.never = exports.metricLabels = exports.matchEffect = exports.matchCauseEffect = exports.matchCause = exports.mapInputContext = exports.mapError = exports.mapBoth = exports.map = exports.makeEffectError = exports.logLevelWarning = exports.logLevelTrace = exports.logLevelNone = exports.logLevelInfo = exports.logLevelFatal = exports.logLevelError = exports.logLevelDebug = exports.logLevelAll = exports.isRequestResolver = exports.isEffectError = exports.isEffect = exports.intoDeferred = exports.interruptibleMask = exports.interruptible = exports.interruptWith = exports.interruptFiber = exports.interruptAsFiber = exports.interrupt = exports.if_ = exports.forEachSequentialDiscard = exports.forEachSequential = exports.flip = exports.flatten = exports.flatMapStep = exports.flatMap = exports.fiberRefUpdateSomeAndGet = exports.fiberRefUpdateSome = exports.fiberRefUpdateAndGet = exports.fiberRefUpdate = exports.fiberRefUnsafeMakeRuntimeFlags = exports.fiberRefUnsafeMakePatch = exports.fiberRefUnsafeMakeHashSet = exports.fiberRefUnsafeMakeContext = exports.fiberRefUnsafeMake = exports.fiberRefSet = exports.fiberRefReset = exports.fiberRefModifySome = exports.fiberRefModify = exports.fiberRefLocallyWith = exports.fiberRefLocally = exports.fiberRefGetWith = exports.fiberRefGetAndUpdateSome = exports.fiberRefGetAndUpdate = exports.fiberRefGetAndSet = exports.fiberRefGet = exports.fiberRefDelete = exports.fiberIdWith = exports.fiberId = exports.failSync = exports.failCauseSync = exports.failCause = void 0;
exports.zipWith = exports.zipRight = exports.zipLeft = exports.zipFlatten = exports.zip = exports.yieldNow = exports.withUnhandledErrorLogLevel = exports.withSchedulingPriority = exports.withScheduler = exports.withRuntimeFlags = exports.withRequestBatching = exports.withMaxFiberOps = void 0;
var Chunk = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("@effect/data/Chunk"));

@@ -386,3 +386,3 @@ var Context = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("@effect/data/Context"));

exports.matchEffect = matchEffect;
const forEach = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => suspend(() => {
const forEachSequential = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => suspend(() => {
const arr = ReadonlyArray.fromIterable(self);

@@ -400,4 +400,4 @@ const ret = new Array(arr.length);

/* @internal */
exports.forEach = forEach;
const forEachDiscard = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => suspend(() => {
exports.forEachSequential = forEachSequential;
const forEachSequentialDiscard = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => suspend(() => {
const arr = ReadonlyArray.fromIterable(self);

@@ -414,3 +414,3 @@ let i = 0;

/* @internal */
exports.forEachDiscard = forEachDiscard;
exports.forEachSequentialDiscard = forEachSequentialDiscard;
const if_ = /*#__PURE__*/(0, _Function.dual)(args => typeof args[0] === "boolean" || isEffect(args[0]), (self, {

@@ -547,5 +547,5 @@ onFalse,

exports.provideSomeContext = provideSomeContext;
const contramapContext = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => contextWithEffect(context => provideContext(self, f(context))));
const mapInputContext = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => contextWithEffect(context => provideContext(self, f(context))));
/* @internal */
exports.contramapContext = contramapContext;
exports.mapInputContext = mapInputContext;
const runtimeFlags = /*#__PURE__*/withFiberRuntime((_, status) => succeed(status.runtimeFlags));

@@ -650,5 +650,8 @@ /* @internal */

exports.whileLoop = whileLoop;
const withInheritedConcurrency = /*#__PURE__*/(0, _Function.dual)(2, (self, concurrency) => fiberRefLocally(self, currentConcurrency, concurrency === "unbounded" ? Option.none() : Option.some(concurrency)));
const withConcurrency = /*#__PURE__*/(0, _Function.dual)(2, (self, concurrency) => fiberRefLocally(self, currentConcurrency, concurrency));
/* @internal */
exports.withInheritedConcurrency = withInheritedConcurrency;
exports.withConcurrency = withConcurrency;
const withRequestBatching = /*#__PURE__*/(0, _Function.dual)(2, (self, requestBatching) => fiberRefLocally(self, currentRequestBatching, requestBatching));
/* @internal */
exports.withRequestBatching = withRequestBatching;
const withRuntimeFlags = /*#__PURE__*/(0, _Function.dual)(2, (self, update) => {

@@ -989,3 +992,3 @@ const effect = new EffectPrimitive(OpCodes.OP_UPDATE_RUNTIME_FLAGS);

exports.withMaxFiberOps = withMaxFiberOps;
const currentConcurrency = /*#__PURE__*/(0, _Global.globalValue)( /*#__PURE__*/Symbol.for("@effect/io/FiberRef/currentConcurrency"), () => fiberRefUnsafeMake(Option.none()));
const currentConcurrency = /*#__PURE__*/(0, _Global.globalValue)( /*#__PURE__*/Symbol.for("@effect/io/FiberRef/currentConcurrency"), () => fiberRefUnsafeMake("unbounded"));
/**

@@ -995,5 +998,5 @@ * @internal

exports.currentConcurrency = currentConcurrency;
const currentRequestBatchingEnabled = /*#__PURE__*/(0, _Global.globalValue)( /*#__PURE__*/Symbol.for("@effect/io/FiberRef/currentRequestBatchingEnabled"), () => fiberRefUnsafeMake(true));
const currentRequestBatching = /*#__PURE__*/(0, _Global.globalValue)( /*#__PURE__*/Symbol.for("@effect/io/FiberRef/currentRequestBatching"), () => fiberRefUnsafeMake(true));
/** @internal */
exports.currentRequestBatchingEnabled = currentRequestBatchingEnabled;
exports.currentRequestBatching = currentRequestBatching;
const currentUnhandledErrorLogLevel = /*#__PURE__*/(0, _Global.globalValue)( /*#__PURE__*/Symbol.for("@effect/io/FiberRef/currentUnhandledErrorLogLevel"), () => fiberRefUnsafeMake(Option.some(logLevelDebug)));

@@ -1000,0 +1003,0 @@ /** @internal */

@@ -6,3 +6,3 @@ "use strict";

});
exports.race = exports.provideContext = exports.never = exports.makeWithEntry = exports.makeBatched = exports.make = exports.fromFunctionEffect = exports.fromFunctionBatched = exports.fromFunction = exports.eitherWith = exports.contramapContext = exports.batchN = exports.around = void 0;
exports.race = exports.provideContext = exports.never = exports.mapInputContext = exports.makeWithEntry = exports.makeBatched = exports.make = exports.fromFunctionEffect = exports.fromFunctionBatched = exports.fromFunction = exports.eitherWith = exports.batchN = exports.around = void 0;
var Chunk = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("@effect/data/Chunk"));

@@ -25,3 +25,3 @@ var _Function = /*#__PURE__*/require("@effect/data/Function");

exports.makeWithEntry = makeWithEntry;
const makeBatched = run => new core.RequestResolverImpl(requests => requests.length > 1 ? core.forEachDiscard(requests, block => (0, _fiberRuntime.invokeWithInterrupt)(run(block.filter(_ => !_.state.completed).map(_ => _.request)), block)) : requests.length === 1 ? run(requests[0].filter(_ => !_.state.completed).map(_ => _.request)) : core.unit);
const makeBatched = run => new core.RequestResolverImpl(requests => requests.length > 1 ? core.forEachSequentialDiscard(requests, block => (0, _fiberRuntime.invokeWithInterrupt)(run(block.filter(_ => !_.state.completed).map(_ => _.request)), block)) : requests.length === 1 ? run(requests[0].filter(_ => !_.state.completed).map(_ => _.request)) : core.unit);
/** @internal */

@@ -37,9 +37,9 @@ exports.makeBatched = makeBatched;

exports.batchN = batchN;
const contramapContext = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => new core.RequestResolverImpl(requests => core.contramapContext(self.runAll(requests), context => f(context)), Chunk.make("ContramapContext", self, f)));
const mapInputContext = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => new core.RequestResolverImpl(requests => core.mapInputContext(self.runAll(requests), context => f(context)), Chunk.make("MapInputContext", self, f)));
/** @internal */
exports.contramapContext = contramapContext;
const eitherWith = /*#__PURE__*/(0, _Function.dual)(3, (self, that, f) => new core.RequestResolverImpl(batch => core.forEach(batch, requests => {
exports.mapInputContext = mapInputContext;
const eitherWith = /*#__PURE__*/(0, _Function.dual)(3, (self, that, f) => new core.RequestResolverImpl(batch => core.forEachSequential(batch, requests => {
const [as, bs] = RA.partitionMap(f)(requests);
return (0, _fiberRuntime.zipWithOptions)(self.runAll(Array.of(as)), that.runAll(Array.of(bs)), () => void 0, {
parallel: true
concurrent: true
});

@@ -49,3 +49,3 @@ }), Chunk.make("EitherWith", self, that, f)));

exports.eitherWith = eitherWith;
const fromFunction = f => makeBatched(requests => core.forEachDiscard(requests, request => (0, _request.complete)(request, core.exitSucceed(f(request))))).identified("FromFunction", f);
const fromFunction = f => makeBatched(requests => core.forEachSequentialDiscard(requests, request => (0, _request.complete)(request, core.exitSucceed(f(request))))).identified("FromFunction", f);
/** @internal */

@@ -67,3 +67,3 @@ exports.fromFunction = fromFunction;

exports.never = never;
const provideContext = /*#__PURE__*/(0, _Function.dual)(2, (self, context) => contramapContext(self, _ => context).identified("ProvideContext", self, context));
const provideContext = /*#__PURE__*/(0, _Function.dual)(2, (self, context) => mapInputContext(self, _ => context).identified("ProvideContext", self, context));
/** @internal */

@@ -70,0 +70,0 @@ exports.provideContext = provideContext;

@@ -257,3 +257,3 @@ "use strict";

exports.eventually = eventually;
const filterMap = /*#__PURE__*/(0, _Function.dual)(2, (elements, pf) => core.map(core.forEach(elements, _Function.identity), ReadonlyArray.filterMap(pf)));
const filterMap = /*#__PURE__*/(0, _Function.dual)(2, (elements, pf) => core.map(core.forEachSequential(elements, _Function.identity), ReadonlyArray.filterMap(pf)));
/* @internal */

@@ -763,3 +763,3 @@ exports.filterMap = filterMap;

exports.updateFiberRefs = updateFiberRefs;
const updateService = /*#__PURE__*/(0, _Function.dual)(3, (self, tag, f) => core.contramapContext(self, context => Context.add(context, tag, f(Context.unsafeGet(context, tag)))));
const updateService = /*#__PURE__*/(0, _Function.dual)(3, (self, tag, f) => core.mapInputContext(self, context => Context.add(context, tag, f(Context.unsafeGet(context, tag)))));
/** @internal */

@@ -766,0 +766,0 @@ exports.updateService = updateService;

@@ -121,3 +121,3 @@ "use strict";

exports.ensuringChildren = ensuringChildren;
const forkAll = /*#__PURE__*/(0, _Function.dual)(args => Predicate.isIterable(args[0]), (effects, options) => options?.discard ? core.forEachDiscard(effects, fiberRuntime.fork) : core.map(core.forEach(effects, fiberRuntime.fork), fiberRuntime.fiberAll));
const forkAll = /*#__PURE__*/(0, _Function.dual)(args => Predicate.isIterable(args[0]), (effects, options) => options?.discard ? core.forEachSequentialDiscard(effects, fiberRuntime.fork) : core.map(core.forEachSequential(effects, fiberRuntime.fork), fiberRuntime.fiberAll));
/** @internal */

@@ -284,3 +284,3 @@ exports.forkAll = forkAll;

await: () => core.exit(fiberRuntime.zipWithOptions(core.flatten(that.await()), f, {
parallel: true
concurrent: true
})(core.flatten(self.await()))),

@@ -287,0 +287,0 @@ children: () => self.children(),

@@ -79,3 +79,3 @@ "use strict";

exports.dump = dump;
const dumpAll = fibers => core.forEach(fibers, dump);
const dumpAll = fibers => core.forEachSequential(fibers, dump);
/** @internal */

@@ -104,3 +104,3 @@ exports.dumpAll = dumpAll;

exports.interruptAll = interruptAll;
const interruptAllAs = /*#__PURE__*/(0, _Function.dual)(2, (fibers, fiberId) => core.zipRight(core.forEachDiscard(_await)(fibers))(core.forEachDiscard(fibers, interruptAsFork(fiberId))));
const interruptAllAs = /*#__PURE__*/(0, _Function.dual)(2, (fibers, fiberId) => core.zipRight(core.forEachSequentialDiscard(_await)(fibers))(core.forEachSequentialDiscard(fibers, interruptAsFork(fiberId))));
/** @internal */

@@ -107,0 +107,0 @@ exports.interruptAllAs = interruptAllAs;

@@ -127,3 +127,3 @@ "use strict";

exports.fiberRefs = fiberRefs;
const setAll = self => core.forEachDiscard(fiberRefs(self), fiberRef => core.fiberRefSet(fiberRef, getOrDefault(self, fiberRef)));
const setAll = self => core.forEachSequentialDiscard(fiberRefs(self), fiberRef => core.fiberRefSet(fiberRef, getOrDefault(self, fiberRef)));
/** @internal */

@@ -130,0 +130,0 @@ exports.setAll = setAll;

@@ -7,3 +7,3 @@ import type { Concurrency } from "@effect/io/Concurrency";

readonly concurrency?: Concurrency;
readonly batchRequests?: boolean | "inherit";
readonly batching?: boolean | "inherit";
readonly discard?: false;

@@ -13,3 +13,3 @@ }): (self: Iterable<A>) => Effect.Effect<R, E, B[]>;

readonly concurrency?: Concurrency;
readonly batchRequests?: boolean | "inherit";
readonly batching?: boolean | "inherit";
readonly discard: true;

@@ -20,3 +20,3 @@ }): (self: Iterable<A_1>) => Effect.Effect<R_1, E_1, void>;

readonly concurrency?: Concurrency;
readonly batchRequests?: boolean | "inherit";
readonly batching?: boolean | "inherit";
readonly discard?: false;

@@ -26,3 +26,3 @@ }): Effect.Effect<R_2, E_2, B_2[]>;

readonly concurrency?: Concurrency;
readonly batchRequests?: boolean | "inherit";
readonly batching?: boolean | "inherit";
readonly discard: true;

@@ -29,0 +29,0 @@ }): Effect.Effect<R_3, E_3, void>;

@@ -678,3 +678,3 @@ "use strict";

this.strategy.unsafeOnHubEmptySpace(this.hub, this.subscribers);
}))(fiberRuntime.forEachParUnbounded(unsafePollAllQueue(this.pollers), d => core.deferredInterruptWith(d, state.id())))));
}))(fiberRuntime.forEachParUnbounded(unsafePollAllQueue(this.pollers), d => core.deferredInterruptWith(d, state.id()), false))));
}));

@@ -823,3 +823,3 @@ }

subscribe() {
const acquire = core.tap(fiberRuntime.all(this.scope.fork(executionStrategy.sequential), makeSubscription(this.hub, this.subscribers, this.strategy)), tuple => tuple[0].addFinalizer(() => tuple[1].shutdown()));
const acquire = core.tap(fiberRuntime.all([this.scope.fork(executionStrategy.sequential), makeSubscription(this.hub, this.subscribers, this.strategy)]), tuple => tuple[0].addFinalizer(() => tuple[1].shutdown()));
return core.map(fiberRuntime.acquireRelease(acquire, (tuple, exit) => tuple[0].close(exit)), tuple => tuple[1]);

@@ -899,3 +899,3 @@ }

shutdown() {
return core.flatMap(core.fiberId, fiberId => core.flatMap(core.sync(() => unsafePollAllQueue(this.publishers)), publishers => fiberRuntime.forEachParUnboundedDiscard(publishers, ([_, deferred, last]) => last ? core.asUnit(core.deferredInterruptWith(deferred, fiberId)) : core.unit)));
return core.flatMap(core.fiberId, fiberId => core.flatMap(core.sync(() => unsafePollAllQueue(this.publishers)), publishers => fiberRuntime.forEachParUnboundedDiscard(publishers, ([_, deferred, last]) => last ? core.asUnit(core.deferredInterruptWith(deferred, fiberId)) : core.unit, false)));
}

@@ -902,0 +902,0 @@ handleSurplus(hub, subscribers, elements, isShutdown) {

@@ -42,3 +42,3 @@ "use strict";

invalidate(item) {
return core.flatMap(this.activePools(), core.forEachDiscard(pool => pool.invalidate(item)));
return core.flatMap(this.activePools(), core.forEachSequentialDiscard(pool => pool.invalidate(item)));
}

@@ -134,3 +134,3 @@ pipe() {

});
const activePools = () => core.suspend(() => core.forEach(Array.from(HashMap.values(MutableRef.get(map))), value => {
const activePools = () => core.suspend(() => core.forEachSequential(Array.from(HashMap.values(MutableRef.get(map))), value => {
switch (value._tag) {

@@ -148,3 +148,3 @@ case "Complete":

return new KeyedPoolImpl(getOrCreatePool, activePools);
})(fiberRuntime.all(core.context(), core.fiberId, core.sync(() => MutableRef.make(HashMap.empty())), fiberRuntime.scopeMake()));
})(fiberRuntime.all([core.context(), core.fiberId, core.sync(() => MutableRef.make(HashMap.empty())), fiberRuntime.scopeMake()]));
/** @internal */

@@ -151,0 +151,0 @@ const make = options => makeImpl(options.acquire, () => options.size, () => options.size, () => Option.none());

@@ -155,3 +155,3 @@ "use strict";

return core.sync(() => memoMap => fiberRuntime.zipWithOptions(memoMap.getOrElseMemoize(op.second, scope), op.zipK, {
parallel: true
concurrent: true
})(memoMap.getOrElseMemoize(op.first, scope)));

@@ -158,0 +158,0 @@ }

@@ -6,3 +6,3 @@ "use strict";

});
exports.zipRight = exports.zipLeft = exports.zip = exports.sync = exports.succeed = exports.stringLogger = exports.simple = exports.none = exports.map = exports.makeLogger = exports.logfmtLogger = exports.filterLogLevel = exports.contramap = exports.LoggerTypeId = void 0;
exports.zipRight = exports.zipLeft = exports.zip = exports.sync = exports.succeed = exports.stringLogger = exports.simple = exports.none = exports.mapInput = exports.map = exports.makeLogger = exports.logfmtLogger = exports.filterLogLevel = exports.LoggerTypeId = void 0;
var _Function = /*#__PURE__*/require("@effect/data/Function");

@@ -39,3 +39,3 @@ var HashMap = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("@effect/data/HashMap"));

exports.makeLogger = makeLogger;
const contramap = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => makeLogger(options => self.log({
const mapInput = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => makeLogger(options => self.log({
...options,

@@ -45,3 +45,3 @@ message: f(options.message)

/** @internal */
exports.contramap = contramap;
exports.mapInput = mapInput;
const filterLogLevel = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => makeLogger(options => f(options.logLevel) ? Option.some(self.log(options)) : Option.none()));

@@ -48,0 +48,0 @@ /** @internal */

@@ -6,3 +6,3 @@ "use strict";

});
exports.zip = exports.withNow = exports.withConstantInput = exports.value = exports.update = exports.unsafeSnapshot = exports.trackSuccessWith = exports.trackSuccess = exports.trackErrorWith = exports.trackError = exports.trackDurationWith = exports.trackDuration = exports.trackDefectWith = exports.trackDefect = exports.trackAll = exports.timerWithBoundaries = exports.timer = exports.taggedWithLabelsInput = exports.taggedWithLabels = exports.tagged = exports.sync = exports.summaryTimestamp = exports.summary = exports.succeed = exports.snapshot = exports.set = exports.mapType = exports.map = exports.make = exports.incrementBy = exports.increment = exports.histogram = exports.globalMetricRegistry = exports.gauge = exports.fromMetricKey = exports.frequency = exports.counter = exports.contramap = exports.MetricTypeId = void 0;
exports.zip = exports.withNow = exports.withConstantInput = exports.value = exports.update = exports.unsafeSnapshot = exports.trackSuccessWith = exports.trackSuccess = exports.trackErrorWith = exports.trackError = exports.trackDurationWith = exports.trackDuration = exports.trackDefectWith = exports.trackDefect = exports.trackAll = exports.timerWithBoundaries = exports.timer = exports.taggedWithLabelsInput = exports.taggedWithLabels = exports.tagged = exports.sync = exports.summaryTimestamp = exports.summary = exports.succeed = exports.snapshot = exports.set = exports.mapType = exports.mapInput = exports.map = exports.make = exports.incrementBy = exports.increment = exports.histogram = exports.globalMetricRegistry = exports.gauge = exports.fromMetricKey = exports.frequency = exports.counter = exports.MetricTypeId = void 0;
var Duration = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("@effect/data/Duration"));

@@ -53,5 +53,5 @@ var _Function = /*#__PURE__*/require("@effect/data/Function");

exports.make = make;
const contramap = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => make(self.keyType, (input, extraTags) => self.unsafeUpdate(f(input), extraTags), self.unsafeValue));
const mapInput = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => make(self.keyType, (input, extraTags) => self.unsafeUpdate(f(input), extraTags), self.unsafeValue));
/** @internal */
exports.contramap = contramap;
exports.mapInput = mapInput;
const counter = (name, description) => fromMetricKey(metricKey.counter(name, description));

@@ -63,3 +63,3 @@ /** @internal */

exports.frequency = frequency;
const withConstantInput = /*#__PURE__*/(0, _Function.dual)(2, (self, input) => contramap(self, () => input));
const withConstantInput = /*#__PURE__*/(0, _Function.dual)(2, (self, input) => mapInput(self, () => input));
/** @internal */

@@ -128,3 +128,3 @@ exports.withConstantInput = withConstantInput;

const base = tagged("time_unit", "milliseconds")(histogram(name, boundaries));
return contramap(base, Duration.toMillis);
return mapInput(base, Duration.toMillis);
};

@@ -135,3 +135,3 @@ /** @internal */

const base = tagged("time_unit", "milliseconds")(histogram(name, metricBoundaries.fromChunk(boundaries)));
return contramap(base, Duration.toMillis);
return mapInput(base, Duration.toMillis);
};

@@ -199,3 +199,3 @@ /* @internal */

exports.value = value;
const withNow = self => contramap(self, input => [input, Date.now()]);
const withNow = self => mapInput(self, input => [input, Date.now()]);
/** @internal */

@@ -202,0 +202,0 @@ exports.withNow = withNow;

@@ -47,3 +47,3 @@ "use strict";

}, extraTags => Array.from(metrics.map(pollingMetric => pollingMetric.metric.unsafeValue(extraTags)))),
poll: core.forEach(metrics, metric => metric.poll)
poll: core.forEachSequential(metrics, metric => metric.poll)
};

@@ -50,0 +50,0 @@ };

@@ -235,8 +235,8 @@ "use strict";

const makeWith = options => core.uninterruptibleMask(restore => core.flatMap(([context, down, state, items, inv, initial]) => {
const pool = new PoolImpl(core.contramapContext(options.acquire, old => Context.merge(old)(context)), options.min, options.max, down, state, items, inv, exit => options.strategy.track(initial, exit));
const pool = new PoolImpl(core.mapInputContext(options.acquire, old => Context.merge(old)(context)), options.min, options.max, down, state, items, inv, exit => options.strategy.track(initial, exit));
return core.as(pool)(core.flatMap(fiber => core.flatMap(fiberRuntime.forkDaemon(restore(options.strategy.run(initial, excess(pool), shrink(pool)))), shrink => fiberRuntime.addFinalizer(() => core.zipRight(core.interruptFiber(shrink))(core.zipRight(core.interruptFiber(fiber))(shutdown(pool))))))(fiberRuntime.forkDaemon(restore(initialize(pool)))));
})(fiberRuntime.all(core.context(), ref.make(false), ref.make({
})(fiberRuntime.all([core.context(), ref.make(false), ref.make({
size: 0,
free: 0
}), queue.bounded(options.max), ref.make(HashSet.empty()), options.strategy.initial())));
}), queue.bounded(options.max), ref.make(HashSet.empty()), options.strategy.initial()])));
/** @internal */

@@ -243,0 +243,0 @@ const isPool = u => typeof u === "object" && u != null && PoolTypeId in u;

@@ -6,3 +6,3 @@ "use strict";

});
exports.withRequestCaching = exports.withRequestCache = exports.withRequestBatching = exports.fromRequest = exports.currentCacheEnabled = exports.currentCache = exports.cacheRequest = void 0;
exports.withRequestCaching = exports.withRequestCache = exports.fromRequest = exports.currentCacheEnabled = exports.currentCache = exports.cacheRequest = void 0;
var _Duration = /*#__PURE__*/require("@effect/data/Duration");

@@ -101,21 +101,4 @@ var _Function = /*#__PURE__*/require("@effect/data/Function");

exports.cacheRequest = cacheRequest;
const withRequestBatching = /*#__PURE__*/(0, _Function.dual)(2, (self, strategy) => core.fiberRefGetWith(core.currentRequestBatchingEnabled, enabled => {
switch (strategy) {
case "off":
return enabled ? core.fiberRefLocally(self, core.currentRequestBatchingEnabled, false) : self;
case "on":
return enabled ? self : core.fiberRefLocally(self, core.currentRequestBatchingEnabled, true);
}
}));
const withRequestCaching = /*#__PURE__*/(0, _Function.dual)(2, (self, strategy) => core.fiberRefLocally(self, currentCacheEnabled, strategy));
/** @internal */
exports.withRequestBatching = withRequestBatching;
const withRequestCaching = /*#__PURE__*/(0, _Function.dual)(2, (self, strategy) => core.fiberRefGetWith(currentCacheEnabled, enabled => {
switch (strategy) {
case "off":
return enabled ? core.fiberRefLocally(self, currentCacheEnabled, false) : self;
case "on":
return enabled ? self : core.fiberRefLocally(self, currentCacheEnabled, true);
}
}));
/** @internal */
exports.withRequestCaching = withRequestCaching;

@@ -122,0 +105,0 @@ const withRequestCache = /*#__PURE__*/(0, _Function.dual)(2,

@@ -81,3 +81,3 @@ "use strict";

MutableRef.set(true)(this.shutdownFlag);
return core.asUnit(core.whenEffect(core.deferredSucceed(this.shutdownHook, void 0))(core.zipRight(this.strategy.shutdown())(fiberRuntime.forEachParUnboundedDiscard(unsafePollAll(this.takers), d => core.deferredInterruptWith(d, state.id())))));
return core.asUnit(core.whenEffect(core.deferredSucceed(this.shutdownHook, void 0))(core.zipRight(this.strategy.shutdown())(fiberRuntime.forEachParUnboundedDiscard(unsafePollAll(this.takers), d => core.deferredInterruptWith(d, state.id()), false))));
}));

@@ -325,3 +325,3 @@ }

shutdown() {
return core.flatMap(fiberId => core.flatMap(putters => fiberRuntime.forEachParUnboundedDiscard(putters, ([_, deferred, isLastItem]) => isLastItem ? core.asUnit(core.deferredInterruptWith(deferred, fiberId)) : core.unit))(core.sync(() => unsafePollAll(this.putters))))(core.fiberId);
return core.flatMap(fiberId => core.flatMap(putters => fiberRuntime.forEachParUnboundedDiscard(putters, ([_, deferred, isLastItem]) => isLastItem ? core.asUnit(core.deferredInterruptWith(deferred, fiberId)) : core.unit, false))(core.sync(() => unsafePollAll(this.putters))))(core.fiberId);
}

@@ -328,0 +328,0 @@ handleSurplus(iterable, queue, takers, isShutdown) {

@@ -55,3 +55,3 @@ "use strict";

}
return core.as(Chunk.fromIterable(buffer))(core.forEachDiscard(n => core.map(k => swap(buffer, n - 1, k))(nextIntBounded(n)))(numbers));
return core.as(Chunk.fromIterable(buffer))(core.forEachSequentialDiscard(n => core.map(k => swap(buffer, n - 1, k))(nextIntBounded(n)))(numbers));
})(core.sync(() => Array.from(elements))));

@@ -58,0 +58,0 @@ };

@@ -225,3 +225,3 @@ "use strict";

exports.runtime = runtime;
const defaultRuntimeFlags = /*#__PURE__*/runtimeFlags.make(runtimeFlags.Interruption, runtimeFlags.CooperativeYielding);
const defaultRuntimeFlags = /*#__PURE__*/runtimeFlags.make(runtimeFlags.Interruption, runtimeFlags.CooperativeYielding, runtimeFlags.RuntimeMetrics);
/** @internal */

@@ -228,0 +228,0 @@ exports.defaultRuntimeFlags = defaultRuntimeFlags;

@@ -6,3 +6,3 @@ "use strict";

});
exports.retryWhile_Effect = exports.retryWhileEffect_Effect = exports.retryUntil_Effect = exports.retryUntilEffect_Effect = exports.retryOrElse_Effect = exports.retryN_Effect = exports.resetWhen = exports.resetAfter = exports.repetitions = exports.repeat_Effect = exports.repeatWhile_Effect = exports.repeatWhileEffect_Effect = exports.repeatUntil_Effect = exports.repeatUntilEffect_Effect = exports.repeatOrElse_Effect = exports.repeatForever = exports.reduceEffect = exports.reduce = exports.recurs = exports.recurWhileEffect = exports.recurWhile = exports.recurUpTo = exports.recurUntilOption = exports.recurUntilEffect = exports.recurUntil = exports.provideService = exports.provideContext = exports.passthrough = exports.once = exports.onDecision = exports.nextSecond = exports.nextMinute = exports.nextHour = exports.nextDayOfMonth = exports.nextDay = exports.modifyDelayEffect = exports.modifyDelay = exports.minuteOfHour = exports.mapEffect = exports.map = exports.makeWithState = exports.linear = exports.jitteredWith = exports.jittered = exports.intersectWith = exports.intersect = exports.identity = exports.hourOfDay = exports.fromFunction = exports.fromDelays = exports.fromDelay = exports.forever = exports.fixed = exports.findNextMonth = exports.fibonacci = exports.exponential = exports.ensuring = exports.endOfSecond = exports.endOfMinute = exports.endOfHour = exports.endOfDay = exports.elapsed = exports.eitherWith = exports.either = exports.duration = exports.driver = exports.dimapEffect = exports.dimap = exports.delays = exports.delayedSchedule = exports.delayedEffect = exports.delayed = exports.dayOfWeek = exports.dayOfMonth = exports.count = exports.contramapEffect = exports.contramapContext = exports.contramap = exports.compose = exports.collectWhileEffect = exports.collectWhile = exports.collectUntilEffect = exports.collectUntil = exports.collectAllOutputs = exports.collectAllInputs = exports.checkEffect = exports.check = exports.bothInOut = exports.beginningOfSecond = exports.beginningOfMinute = exports.beginningOfHour = exports.beginningOfDay = exports.asUnit = exports.as = exports.andThenEither = exports.andThen = exports.addDelayEffect = exports.addDelay = exports.ScheduleTypeId = exports.ScheduleDriverTypeId = void 0;
exports.retryWhile_Effect = exports.retryWhileEffect_Effect = exports.retryUntil_Effect = exports.retryUntilEffect_Effect = exports.retryOrElse_Effect = exports.retryN_Effect = exports.resetWhen = exports.resetAfter = exports.repetitions = exports.repeat_Effect = exports.repeatWhile_Effect = exports.repeatWhileEffect_Effect = exports.repeatUntil_Effect = exports.repeatUntilEffect_Effect = exports.repeatOrElse_Effect = exports.repeatForever = exports.reduceEffect = exports.reduce = exports.recurs = exports.recurWhileEffect = exports.recurWhile = exports.recurUpTo = exports.recurUntilOption = exports.recurUntilEffect = exports.recurUntil = exports.provideService = exports.provideContext = exports.passthrough = exports.once = exports.onDecision = exports.nextSecond = exports.nextMinute = exports.nextHour = exports.nextDayOfMonth = exports.nextDay = exports.modifyDelayEffect = exports.modifyDelay = exports.minuteOfHour = exports.mapInputEffect = exports.mapInputContext = exports.mapInput = exports.mapEffect = exports.mapBothEffect = exports.mapBoth = exports.map = exports.makeWithState = exports.linear = exports.jitteredWith = exports.jittered = exports.intersectWith = exports.intersect = exports.identity = exports.hourOfDay = exports.fromFunction = exports.fromDelays = exports.fromDelay = exports.forever = exports.fixed = exports.findNextMonth = exports.fibonacci = exports.exponential = exports.ensuring = exports.endOfSecond = exports.endOfMinute = exports.endOfHour = exports.endOfDay = exports.elapsed = exports.eitherWith = exports.either = exports.duration = exports.driver = exports.delays = exports.delayedSchedule = exports.delayedEffect = exports.delayed = exports.dayOfWeek = exports.dayOfMonth = exports.count = exports.compose = exports.collectWhileEffect = exports.collectWhile = exports.collectUntilEffect = exports.collectUntil = exports.collectAllOutputs = exports.collectAllInputs = exports.checkEffect = exports.check = exports.bothInOut = exports.beginningOfSecond = exports.beginningOfMinute = exports.beginningOfHour = exports.beginningOfDay = exports.asUnit = exports.as = exports.andThenEither = exports.andThen = exports.addDelayEffect = exports.addDelay = exports.ScheduleTypeId = exports.ScheduleDriverTypeId = void 0;
exports.zipWith = exports.zipRight = exports.zipLeft = exports.windowed = exports.whileOutputEffect = exports.whileOutput = exports.whileInputEffect = exports.whileInput = exports.upTo = exports.untilOutputEffect = exports.untilOutput = exports.untilInputEffect = exports.untilInput = exports.unionWith = exports.union = exports.unfold = exports.tapOutput = exports.tapInput = exports.sync = exports.succeed = exports.stop = exports.spaced = exports.secondOfMinute = exports.schedule_Effect = exports.scheduleFrom_Effect = exports.run = exports.retry_Effect = void 0;

@@ -162,11 +162,11 @@ var Chunk = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("@effect/data/Chunk"));

exports.compose = compose;
const contramap = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => contramapEffect(self, input2 => core.sync(() => f(input2))));
const mapInput = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => mapInputEffect(self, input2 => core.sync(() => f(input2))));
/** @internal */
exports.contramap = contramap;
const contramapContext = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => makeWithState(self.initial, (now, input, state) => core.contramapContext(self.step(now, input, state), f)));
exports.mapInput = mapInput;
const mapInputContext = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => makeWithState(self.initial, (now, input, state) => core.mapInputContext(self.step(now, input, state), f)));
/** @internal */
exports.contramapContext = contramapContext;
const contramapEffect = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => makeWithState(self.initial, (now, input2, state) => core.flatMap(f(input2), input => self.step(now, input, state))));
exports.mapInputContext = mapInputContext;
const mapInputEffect = /*#__PURE__*/(0, _Function.dual)(2, (self, f) => makeWithState(self.initial, (now, input2, state) => core.flatMap(f(input2), input => self.step(now, input, state))));
/** @internal */
exports.contramapEffect = contramapEffect;
exports.mapInputEffect = mapInputEffect;
const dayOfMonth = day => {

@@ -221,14 +221,14 @@ return makeWithState([Number.NEGATIVE_INFINITY, 0], (now, _, state) => {

exports.delays = delays;
const dimap = /*#__PURE__*/(0, _Function.dual)(2, (self, {
const mapBoth = /*#__PURE__*/(0, _Function.dual)(2, (self, {
onInput,
onOutput
}) => map(contramap(self, onInput), onOutput));
}) => map(mapInput(self, onInput), onOutput));
/** @internal */
exports.dimap = dimap;
const dimapEffect = /*#__PURE__*/(0, _Function.dual)(2, (self, {
exports.mapBoth = mapBoth;
const mapBothEffect = /*#__PURE__*/(0, _Function.dual)(2, (self, {
onInput,
onOutput
}) => mapEffect(contramapEffect(self, onInput), onOutput));
}) => mapEffect(mapInputEffect(self, onInput), onOutput));
/** @internal */
exports.dimapEffect = dimapEffect;
exports.mapBothEffect = mapBothEffect;
const driver = self => core.map(ref => new ScheduleDriverImpl(self, ref))(ref.make([Option.none(), self.initial]));

@@ -235,0 +235,0 @@ /** @internal */

@@ -199,3 +199,3 @@ "use strict";

invalidateAll() {
return fiberRuntime.forEachParUnboundedDiscard(HashSet.fromIterable(Array.from(this.cacheState.map).map(([key]) => key)), key => this.invalidate(key));
return fiberRuntime.forEachParUnboundedDiscard(HashSet.fromIterable(Array.from(this.cacheState.map).map(([key]) => key)), key => this.invalidate(key), false);
}

@@ -375,3 +375,3 @@ refresh(key) {

ensureMapSizeNotExceeded(key) {
return fiberRuntime.forEachParUnboundedDiscard(this.trackAccess(key), cleanedMapValue => this.cleanMapValue(cleanedMapValue));
return fiberRuntime.forEachParUnboundedDiscard(this.trackAccess(key), cleanedMapValue => this.cleanMapValue(cleanedMapValue), false);
}

@@ -378,0 +378,0 @@ }

@@ -39,3 +39,3 @@ "use strict";

return core.as(scopedRef)(fiberRuntime.addFinalizer(() => close(scopedRef)));
})(circular.makeSynchronized([newScope, value])))(core.onError(cause => newScope.close(core.exitFail(cause)))(restore(core.contramapContext(Context.add(fiberRuntime.scopeTag, newScope))(acquire)))))(fiberRuntime.scopeMake()));
})(circular.makeSynchronized([newScope, value])))(core.onError(cause => newScope.close(core.exitFail(cause)))(restore(core.mapInputContext(Context.add(fiberRuntime.scopeTag, newScope))(acquire)))))(fiberRuntime.scopeMake()));
/** @internal */

@@ -52,4 +52,4 @@ exports.fromAcquire = fromAcquire;

onSuccess: value => core.as([core.unit, [newScope, value]])(effect.ignore(oldScope.close(core.exitUnit)))
}))(core.exit(restore(core.contramapContext(Context.add(fiberRuntime.scopeTag, newScope))(acquire)))))(fiberRuntime.scopeMake())))));
}))(core.exit(restore(core.mapInputContext(Context.add(fiberRuntime.scopeTag, newScope))(acquire)))))(fiberRuntime.scopeMake())))));
exports.set = set;
//# sourceMappingURL=scopedRef.js.map

@@ -45,3 +45,3 @@ "use strict";

{
return core.map(SortedSet.filter(fiber => !Equal.equals(fiber.id(), descriptor.id)))(core.map(RA.reduce(SortedSet.empty(fiber.Order), (a, b) => SortedSet.union(a, b)))(core.forEach(ref => core.sync(() => MutableRef.get(ref)))(either.right)));
return core.map(SortedSet.filter(fiber => !Equal.equals(fiber.id(), descriptor.id)))(core.map(RA.reduce(SortedSet.empty(fiber.Order), (a, b) => SortedSet.union(a, b)))(core.forEachSequential(ref => core.sync(() => MutableRef.get(ref)))(either.right)));
}

@@ -48,0 +48,0 @@ }

@@ -134,3 +134,3 @@ "use strict";

return effect => fiberRuntime.zipLeftOptions(effect, this.adjust(duration), {
parallel: true
concurrent: true
});

@@ -137,0 +137,0 @@ }

@@ -25,3 +25,3 @@ /**

*/
export interface KeyedPool<K, E, A> extends KeyedPool.Variance<K, E, A>, Pipeable<KeyedPool<K, E, A>> {
export interface KeyedPool<K, E, A> extends KeyedPool.Variance<K, E, A>, Pipeable {
/**

@@ -28,0 +28,0 @@ * Retrieves an item from the pool belonging to the given key in a scoped

@@ -43,3 +43,3 @@ /**

*/
export interface Layer<RIn, E, ROut> extends Layer.Variance<RIn, E, ROut>, Pipeable<Layer<RIn, E, ROut>> {
export interface Layer<RIn, E, ROut> extends Layer.Variance<RIn, E, ROut>, Pipeable {
}

@@ -46,0 +46,0 @@ /**

@@ -31,3 +31,3 @@ /**

*/
export interface Logger<Message, Output> extends Logger.Variance<Message, Output>, Pipeable<Logger<Message, Output>> {
export interface Logger<Message, Output> extends Logger.Variance<Message, Output>, Pipeable {
readonly log: (options: {

@@ -92,3 +92,3 @@ readonly fiberId: FiberId.FiberId;

*/
export declare const contramap: {
export declare const mapInput: {
<Message, Message2>(f: (message: Message2) => Message): <Output>(self: Logger<Message, Output>) => Logger<Message2, Output>;

@@ -95,0 +95,0 @@ <Output, Message, Message2>(self: Logger<Message, Output>, f: (message: Message2) => Message): Logger<Message2, Output>;

@@ -6,3 +6,3 @@ "use strict";

});
exports.zipRight = exports.zipLeft = exports.zip = exports.withMinimumLogLevel = exports.tracerLogger = exports.test = exports.sync = exports.succeed = exports.stringLogger = exports.simple = exports.replaceScoped = exports.replaceEffect = exports.replace = exports.remove = exports.none = exports.minimumLogLevel = exports.map = exports.make = exports.logfmtLogger = exports.logFmt = exports.filterLogLevel = exports.defaultLogger = exports.contramap = exports.addScoped = exports.addEffect = exports.add = exports.LoggerTypeId = void 0;
exports.zipRight = exports.zipLeft = exports.zip = exports.withMinimumLogLevel = exports.tracerLogger = exports.test = exports.sync = exports.succeed = exports.stringLogger = exports.simple = exports.replaceScoped = exports.replaceEffect = exports.replace = exports.remove = exports.none = exports.minimumLogLevel = exports.mapInput = exports.map = exports.make = exports.logfmtLogger = exports.logFmt = exports.filterLogLevel = exports.defaultLogger = exports.addScoped = exports.addEffect = exports.add = exports.LoggerTypeId = void 0;
var fiberRuntime = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("@effect/io/internal/fiberRuntime"));

@@ -48,3 +48,3 @@ var circular = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("@effect/io/internal/layer/circular"));

exports.addScoped = addScoped;
const contramap = internal.contramap;
const mapInput = internal.mapInput;
/**

@@ -57,3 +57,3 @@ * Returns a version of this logger that only logs messages when the log level

*/
exports.contramap = contramap;
exports.mapInput = mapInput;
const filterLogLevel = internal.filterLogLevel;

@@ -60,0 +60,0 @@ /**

@@ -16,3 +16,3 @@ import * as order from "@effect/data/Order";

*/
export type LogLevel = (All | Fatal | Error | Warning | Info | Debug | Trace | None) & Pipeable<LogLevel>;
export type LogLevel = All | Fatal | Error | Warning | Info | Debug | Trace | None;
/**

@@ -27,3 +27,3 @@ * @since 1.0.0

*/
export interface All extends Pipeable<All> {
export interface All extends Pipeable {
readonly _tag: "All";

@@ -38,3 +38,3 @@ readonly label: "ALL";

*/
export interface Fatal extends Pipeable<Fatal> {
export interface Fatal extends Pipeable {
readonly _tag: "Fatal";

@@ -49,3 +49,3 @@ readonly label: "FATAL";

*/
export interface Error extends Pipeable<Error> {
export interface Error extends Pipeable {
readonly _tag: "Error";

@@ -60,3 +60,3 @@ readonly label: "ERROR";

*/
export interface Warning extends Pipeable<Warning> {
export interface Warning extends Pipeable {
readonly _tag: "Warning";

@@ -71,3 +71,3 @@ readonly label: "WARN";

*/
export interface Info extends Pipeable<Info> {
export interface Info extends Pipeable {
readonly _tag: "Info";

@@ -82,3 +82,3 @@ readonly label: "INFO";

*/
export interface Debug extends Pipeable<Debug> {
export interface Debug extends Pipeable {
readonly _tag: "Debug";

@@ -93,3 +93,3 @@ readonly label: "DEBUG";

*/
export interface Trace extends Pipeable<Trace> {
export interface Trace extends Pipeable {
readonly _tag: "Trace";

@@ -104,3 +104,3 @@ readonly label: "TRACE";

*/
export interface None extends Pipeable<None> {
export interface None extends Pipeable {
readonly _tag: "None";

@@ -107,0 +107,0 @@ readonly label: "OFF";

@@ -47,3 +47,3 @@ /**

*/
export interface Metric<Type, In, Out> extends Metric.Variance<Type, In, Out>, Pipeable<Metric<Type, In, Out>> {
export interface Metric<Type, In, Out> extends Metric.Variance<Type, In, Out>, Pipeable {
/**

@@ -130,3 +130,3 @@ * The type of the underlying primitive metric. For example, this could be

*/
export declare const contramap: {
export declare const mapInput: {
<In, In2>(f: (input: In2) => In): <Type, Out>(self: Metric<Type, In, Out>) => Metric<Type, In2, Out>;

@@ -133,0 +133,0 @@ <Type, In, Out, In2>(self: Metric<Type, In, Out>, f: (input: In2) => In): Metric<Type, In2, Out>;

@@ -6,3 +6,3 @@ "use strict";

});
exports.zip = exports.withNow = exports.withConstantInput = exports.value = exports.update = exports.unsafeSnapshot = exports.trackSuccessWith = exports.trackSuccess = exports.trackErrorWith = exports.trackError = exports.trackDurationWith = exports.trackDuration = exports.trackDefectWith = exports.trackDefect = exports.trackAll = exports.timerWithBoundaries = exports.timer = exports.taggedWithLabelsInput = exports.taggedWithLabels = exports.tagged = exports.sync = exports.summaryTimestamp = exports.summary = exports.succeed = exports.snapshot = exports.set = exports.mapType = exports.map = exports.make = exports.incrementBy = exports.increment = exports.histogram = exports.globalMetricRegistry = exports.gauge = exports.fromMetricKey = exports.frequency = exports.fiberSuccesses = exports.fiberStarted = exports.fiberLifetimes = exports.fiberFailures = exports.fiberActive = exports.counter = exports.contramap = exports.MetricTypeId = void 0;
exports.zip = exports.withNow = exports.withConstantInput = exports.value = exports.update = exports.unsafeSnapshot = exports.trackSuccessWith = exports.trackSuccess = exports.trackErrorWith = exports.trackError = exports.trackDurationWith = exports.trackDuration = exports.trackDefectWith = exports.trackDefect = exports.trackAll = exports.timerWithBoundaries = exports.timer = exports.taggedWithLabelsInput = exports.taggedWithLabels = exports.tagged = exports.sync = exports.summaryTimestamp = exports.summary = exports.succeed = exports.snapshot = exports.set = exports.mapType = exports.mapInput = exports.map = exports.make = exports.incrementBy = exports.increment = exports.histogram = exports.globalMetricRegistry = exports.gauge = exports.fromMetricKey = exports.frequency = exports.fiberSuccesses = exports.fiberStarted = exports.fiberLifetimes = exports.fiberFailures = exports.fiberActive = exports.counter = exports.MetricTypeId = void 0;
var fiberRuntime = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("@effect/io/internal/fiberRuntime"));

@@ -38,3 +38,3 @@ var internal = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("@effect/io/internal/metric"));

exports.make = make;
const contramap = internal.contramap;
const mapInput = internal.mapInput;
/**

@@ -46,3 +46,3 @@ * A counter, which can be incremented by numbers.

*/
exports.contramap = contramap;
exports.mapInput = mapInput;
const counter = internal.counter;

@@ -49,0 +49,0 @@ /**

@@ -21,3 +21,3 @@ /**

*/
export interface MetricBoundaries extends Equal.Equal, Pipeable<MetricBoundaries> {
export interface MetricBoundaries extends Equal.Equal, Pipeable {
readonly [MetricBoundariesTypeId]: MetricBoundariesTypeId;

@@ -24,0 +24,0 @@ readonly values: Chunk.Chunk<number>;

@@ -22,3 +22,3 @@ /**

*/
export interface MetricHook<In, Out> extends MetricHook.Variance<In, Out>, Pipeable<MetricHook<In, Out>> {
export interface MetricHook<In, Out> extends MetricHook.Variance<In, Out>, Pipeable {
readonly get: () => Out;

@@ -25,0 +25,0 @@ readonly update: (input: In) => void;

@@ -33,3 +33,3 @@ /**

*/
export interface MetricKey<Type extends MetricKeyType.MetricKeyType<any, any>> extends MetricKey.Variance<Type>, Equal.Equal, Pipeable<MetricKey<Type>> {
export interface MetricKey<Type extends MetricKeyType.MetricKeyType<any, any>> extends MetricKey.Variance<Type>, Equal.Equal, Pipeable {
readonly name: string;

@@ -36,0 +36,0 @@ readonly keyType: Type;

@@ -74,3 +74,3 @@ /**

*/
export interface MetricKeyType<In, Out> extends MetricKeyType.Variance<In, Out>, Equal.Equal, Pipeable<MetricKeyType<In, Out>> {
export interface MetricKeyType<In, Out> extends MetricKeyType.Variance<In, Out>, Equal.Equal, Pipeable {
}

@@ -77,0 +77,0 @@ /**

@@ -27,3 +27,3 @@ /**

*/
export interface MetricLabel extends Equal.Equal, Pipeable<MetricLabel> {
export interface MetricLabel extends Equal.Equal, Pipeable {
readonly [MetricLabelTypeId]: MetricLabelTypeId;

@@ -30,0 +30,0 @@ readonly key: string;

@@ -22,3 +22,3 @@ /**

*/
export interface MetricPair<Type extends MetricKeyType.MetricKeyType<any, any>> extends MetricPair.Variance<Type>, Pipeable<MetricPair<Type>> {
export interface MetricPair<Type extends MetricKeyType.MetricKeyType<any, any>> extends MetricPair.Variance<Type>, Pipeable {
readonly metricKey: MetricKey.MetricKey<Type>;

@@ -25,0 +25,0 @@ readonly metricState: MetricState.MetricState<MetricKeyType.MetricKeyType.OutType<Type>>;

@@ -27,3 +27,3 @@ /**

*/
export interface PollingMetric<Type, In, R, E, Out> extends Pipeable<PollingMetric<Type, In, R, E, Out>> {
export interface PollingMetric<Type, In, R, E, Out> extends Pipeable {
readonly [PollingMetricTypeId]: PollingMetricTypeId;

@@ -30,0 +30,0 @@ /**

@@ -78,3 +78,3 @@ /**

*/
export interface MetricState<A> extends MetricState.Variance<A>, Equal.Equal, Pipeable<MetricState<A>> {
export interface MetricState<A> extends MetricState.Variance<A>, Equal.Equal, Pipeable {
}

@@ -81,0 +81,0 @@ /**

{
"name": "@effect/io",
"version": "0.31.4",
"version": "0.32.0",
"license": "MIT",

@@ -10,3 +10,3 @@ "repository": {

"dependencies": {
"@effect/data": "^0.13.5"
"@effect/data": "^0.14.1"
},

@@ -13,0 +13,0 @@ "publishConfig": {

@@ -27,3 +27,3 @@ /**

*/
export interface Pool<E, A> extends Data.Case, Pool.Variance<E, A>, Pipeable<Pool<E, A>> {
export interface Pool<E, A> extends Data.Case, Pool.Variance<E, A>, Pipeable {
/**

@@ -30,0 +30,0 @@ * Retrieves an item from the pool in a scoped effect. Note that if

@@ -8,3 +8,3 @@ /**

import type * as Option from "@effect/data/Option";
import type { Pipeable, PipeableOverride } from "@effect/data/Pipeable";
import type { Pipeable } from "@effect/data/Pipeable";
import type * as Deferred from "@effect/io/Deferred";

@@ -46,3 +46,3 @@ import type * as Effect from "@effect/io/Effect";

*/
export interface Queue<A> extends PipeableOverride<Enqueue<A>, Queue<A>>, PipeableOverride<Dequeue<A>, Queue<A>> {
export interface Queue<A> extends Enqueue<A>, Dequeue<A>, Pipeable {
}

@@ -53,3 +53,3 @@ /**

*/
export interface Enqueue<A> extends Queue.EnqueueVariance<A>, BaseQueue, Pipeable<Enqueue<A>> {
export interface Enqueue<A> extends Queue.EnqueueVariance<A>, BaseQueue, Pipeable {
/**

@@ -84,3 +84,3 @@ * Places one value in the queue.

*/
export interface Dequeue<A> extends Queue.DequeueVariance<A>, BaseQueue, Pipeable<Dequeue<A>> {
export interface Dequeue<A> extends Queue.DequeueVariance<A>, BaseQueue, Pipeable {
/**

@@ -87,0 +87,0 @@ * Takes the oldest value in the queue. If the queue is empty, this will return

@@ -21,3 +21,3 @@ /**

*/
export interface Ref<A> extends Ref.Variance<A>, Pipeable<Ref<A>> {
export interface Ref<A> extends Ref.Variance<A>, Pipeable {
modify<B>(f: (a: A) => readonly [B, A]): Effect.Effect<never, never, B>;

@@ -24,0 +24,0 @@ }

@@ -105,3 +105,3 @@ /**

*/
export declare const contramapContext: <R0, R>(self: RequestBlock<R>, f: (context: Context.Context<R0>) => Context.Context<R>) => RequestBlock<R0>;
export declare const mapInputContext: <R0, R>(self: RequestBlock<R>, f: (context: Context.Context<R0>) => Context.Context<R>) => RequestBlock<R0>;
/**

@@ -108,0 +108,0 @@ * Provides each data source with a fiber ref value.

@@ -6,3 +6,3 @@ "use strict";

});
exports.single = exports.sequential = exports.reduce = exports.parallel = exports.mapRequestResolvers = exports.locally = exports.empty = exports.contramapContext = void 0;
exports.single = exports.sequential = exports.reduce = exports.parallel = exports.mapRequestResolvers = exports.mapInputContext = exports.locally = exports.empty = void 0;
var _RequestBlock = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("@effect/io/internal/blockedRequests"));

@@ -55,9 +55,9 @@ var core = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("@effect/io/internal/core"));

exports.sequential = sequential;
const contramapContext = (self, f) => reduce(self, ContramapContextReducer(f));
exports.contramapContext = contramapContext;
const ContramapContextReducer = f => ({
const mapInputContext = (self, f) => reduce(self, MapInputContextReducer(f));
exports.mapInputContext = mapInputContext;
const MapInputContextReducer = f => ({
emptyCase: () => empty,
parCase: (left, right) => parallel(left, right),
seqCase: (left, right) => sequential(left, right),
singleCase: (dataSource, blockedRequest) => single(_dataSource.contramapContext(dataSource, f), blockedRequest)
singleCase: (dataSource, blockedRequest) => single(_dataSource.mapInputContext(dataSource, f), blockedRequest)
});

@@ -64,0 +64,0 @@ /**

@@ -45,3 +45,3 @@ /**

*/
export interface RequestResolver<A, R = never> extends Equal.Equal, Pipeable<RequestResolver<A, R>> {
export interface RequestResolver<A, R = never> extends Equal.Equal, Pipeable {
/**

@@ -141,3 +141,3 @@ * Execute a collection of requests. The outer `Chunk` represents batches

*/
export declare const contramapContext: {
export declare const mapInputContext: {
<R0, R>(f: (context: Context.Context<R0>) => Context.Context<R>): <A extends Request.Request<any, any>>(self: RequestResolver<A, R>) => RequestResolver<A, R0>;

@@ -144,0 +144,0 @@ <R, A extends Request.Request<any, any>, R0>(self: RequestResolver<A, R>, f: (context: Context.Context<R0>) => Context.Context<R>): RequestResolver<A, R0>;

@@ -6,3 +6,3 @@ "use strict";

});
exports.race = exports.provideContext = exports.never = exports.makeWithEntry = exports.makeBatched = exports.make = exports.locally = exports.isRequestResolver = exports.fromFunctionEffect = exports.fromFunctionBatched = exports.fromFunction = exports.eitherWith = exports.contramapContext = exports.contextFromServices = exports.contextFromEffect = exports.batchN = exports.around = exports.RequestResolverTypeId = void 0;
exports.race = exports.provideContext = exports.never = exports.mapInputContext = exports.makeWithEntry = exports.makeBatched = exports.make = exports.locally = exports.isRequestResolver = exports.fromFunctionEffect = exports.fromFunctionBatched = exports.fromFunction = exports.eitherWith = exports.contextFromServices = exports.contextFromEffect = exports.batchN = exports.around = exports.RequestResolverTypeId = void 0;
var Context = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("@effect/data/Context"));

@@ -94,3 +94,3 @@ var Effect = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("@effect/io/Effect"));

exports.batchN = batchN;
const contramapContext = internal.contramapContext;
const mapInputContext = internal.mapInputContext;
/**

@@ -104,3 +104,3 @@ * Returns a new data source that executes requests of type `C` using the

*/
exports.contramapContext = contramapContext;
exports.mapInputContext = mapInputContext;
const eitherWith = internal.eitherWith;

@@ -107,0 +107,0 @@ /**

@@ -33,3 +33,3 @@ /**

*/
export interface Runtime<R> extends Pipeable<Runtime<R>> {
export interface Runtime<R> extends Pipeable {
/**

@@ -159,11 +159,2 @@ * The context used as initial for forks

* @since 1.0.0
* @category models
*/
export interface FiberFailure extends Error {
readonly [FiberFailureId]: FiberFailureId;
readonly [FiberFailureCauseId]: Cause<unknown>;
readonly [NodePrint]: () => string;
}
/**
* @since 1.0.0
* @category symbols

@@ -179,2 +170,11 @@ */

* @since 1.0.0
* @category models
*/
export interface FiberFailure extends Error {
readonly [FiberFailureId]: FiberFailureId;
readonly [FiberFailureCauseId]: Cause<unknown>;
readonly [NodePrint]: () => string;
}
/**
* @since 1.0.0
* @category guards

@@ -181,0 +181,0 @@ */

@@ -64,3 +64,3 @@ /**

*/
export interface Schedule<Env, In, Out> extends Schedule.Variance<Env, In, Out>, Pipeable<Schedule<Env, In, Out>> {
export interface Schedule<Env, In, Out> extends Schedule.Variance<Env, In, Out>, Pipeable {
/**

@@ -277,3 +277,3 @@ * Initial State

*/
export declare const contramap: {
export declare const mapInput: {
<In, In2>(f: (in2: In2) => In): <Env, Out>(self: Schedule<Env, In, Out>) => Schedule<Env, In2, Out>;

@@ -289,3 +289,3 @@ <Env, In, Out, In2>(self: Schedule<Env, In, Out>, f: (in2: In2) => In): Schedule<Env, In2, Out>;

*/
export declare const contramapContext: {
export declare const mapInputContext: {
<Env0, Env>(f: (env0: Context.Context<Env0>) => Context.Context<Env>): <In, Out>(self: Schedule<Env, In, Out>) => Schedule<Env0, In, Out>;

@@ -301,3 +301,3 @@ <Env0, Env, In, Out>(self: Schedule<Env, In, Out>, f: (env0: Context.Context<Env0>) => Context.Context<Env>): Schedule<Env0, In, Out>;

*/
export declare const contramapEffect: {
export declare const mapInputEffect: {
<In, Env2, In2>(f: (in2: In2) => Effect.Effect<Env2, never, In>): <Env, Out>(self: Schedule<Env, In, Out>) => Schedule<Env2 | Env, In2, Out>;

@@ -374,3 +374,3 @@ <Env, In, Out, Env2, In2>(self: Schedule<Env, In, Out>, f: (in2: In2) => Effect.Effect<Env2, never, In>): Schedule<Env | Env2, In2, Out>;

/**
* Returns a new schedule that contramaps the input and maps the output.
* Returns a new schedule that maps both the input and output.
*

@@ -380,3 +380,3 @@ * @since 1.0.0

*/
export declare const dimap: {
export declare const mapBoth: {
<In, Out, In2, Out2>(options: {

@@ -392,3 +392,3 @@ readonly onInput: (in2: In2) => In;

/**
* Returns a new schedule that contramaps the input and maps the output.
* Returns a new schedule that maps both the input and output.
*

@@ -398,3 +398,3 @@ * @since 1.0.0

*/
export declare const dimapEffect: {
export declare const mapBothEffect: {
<In2, Env2, In, Out, Env3, Out2>(options: {

@@ -401,0 +401,0 @@ readonly onInput: (input: In2) => Effect.Effect<Env2, never, In>;

@@ -6,3 +6,3 @@ "use strict";

});
exports.zipWith = exports.zipRight = exports.zipLeft = exports.windowed = exports.whileOutputEffect = exports.whileOutput = exports.whileInputEffect = exports.whileInput = exports.upTo = exports.untilOutputEffect = exports.untilOutput = exports.untilInputEffect = exports.untilInput = exports.unionWith = exports.union = exports.unfold = exports.tapOutput = exports.tapInput = exports.sync = exports.succeed = exports.stop = exports.spaced = exports.secondOfMinute = exports.run = exports.resetWhen = exports.resetAfter = exports.repetitions = exports.repeatForever = exports.reduceEffect = exports.reduce = exports.recurs = exports.recurWhileEffect = exports.recurWhile = exports.recurUpTo = exports.recurUntilOption = exports.recurUntilEffect = exports.recurUntil = exports.provideService = exports.provideContext = exports.passthrough = exports.once = exports.onDecision = exports.modifyDelayEffect = exports.modifyDelay = exports.minuteOfHour = exports.mapEffect = exports.map = exports.makeWithState = exports.linear = exports.jitteredWith = exports.jittered = exports.intersectWith = exports.intersect = exports.identity = exports.hourOfDay = exports.fromFunction = exports.fromDelays = exports.fromDelay = exports.forever = exports.fixed = exports.fibonacci = exports.exponential = exports.ensuring = exports.elapsed = exports.eitherWith = exports.either = exports.duration = exports.driver = exports.dimapEffect = exports.dimap = exports.delays = exports.delayedSchedule = exports.delayedEffect = exports.delayed = exports.dayOfWeek = exports.dayOfMonth = exports.count = exports.contramapEffect = exports.contramapContext = exports.contramap = exports.compose = exports.collectWhileEffect = exports.collectWhile = exports.collectUntilEffect = exports.collectUntil = exports.collectAllOutputs = exports.collectAllInputs = exports.checkEffect = exports.check = exports.bothInOut = exports.asUnit = exports.as = exports.andThenEither = exports.andThen = exports.addDelayEffect = exports.addDelay = exports.ScheduleTypeId = exports.ScheduleDriverTypeId = void 0;
exports.zipWith = exports.zipRight = exports.zipLeft = exports.windowed = exports.whileOutputEffect = exports.whileOutput = exports.whileInputEffect = exports.whileInput = exports.upTo = exports.untilOutputEffect = exports.untilOutput = exports.untilInputEffect = exports.untilInput = exports.unionWith = exports.union = exports.unfold = exports.tapOutput = exports.tapInput = exports.sync = exports.succeed = exports.stop = exports.spaced = exports.secondOfMinute = exports.run = exports.resetWhen = exports.resetAfter = exports.repetitions = exports.repeatForever = exports.reduceEffect = exports.reduce = exports.recurs = exports.recurWhileEffect = exports.recurWhile = exports.recurUpTo = exports.recurUntilOption = exports.recurUntilEffect = exports.recurUntil = exports.provideService = exports.provideContext = exports.passthrough = exports.once = exports.onDecision = exports.modifyDelayEffect = exports.modifyDelay = exports.minuteOfHour = exports.mapInputEffect = exports.mapInputContext = exports.mapInput = exports.mapEffect = exports.mapBothEffect = exports.mapBoth = exports.map = exports.makeWithState = exports.linear = exports.jitteredWith = exports.jittered = exports.intersectWith = exports.intersect = exports.identity = exports.hourOfDay = exports.fromFunction = exports.fromDelays = exports.fromDelay = exports.forever = exports.fixed = exports.fibonacci = exports.exponential = exports.ensuring = exports.elapsed = exports.eitherWith = exports.either = exports.duration = exports.driver = exports.delays = exports.delayedSchedule = exports.delayedEffect = exports.delayed = exports.dayOfWeek = exports.dayOfMonth = exports.count = exports.compose = exports.collectWhileEffect = exports.collectWhile = exports.collectUntilEffect = exports.collectUntil = exports.collectAllOutputs = exports.collectAllInputs = exports.checkEffect = exports.check = exports.bothInOut = exports.asUnit = exports.as = exports.andThenEither = exports.andThen = exports.addDelayEffect = exports.addDelay = exports.ScheduleTypeId = exports.ScheduleDriverTypeId = void 0;
var internal = /*#__PURE__*/_interopRequireWildcard( /*#__PURE__*/require("@effect/io/internal/schedule"));

@@ -182,3 +182,3 @@ function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function (nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }

exports.compose = compose;
const contramap = internal.contramap;
const mapInput = internal.mapInput;
/**

@@ -191,4 +191,4 @@ * Transforms the context being provided to this schedule with the

*/
exports.contramap = contramap;
const contramapContext = internal.contramapContext;
exports.mapInput = mapInput;
const mapInputContext = internal.mapInputContext;
/**

@@ -201,4 +201,4 @@ * Returns a new schedule that deals with a narrower class of inputs than this

*/
exports.contramapContext = contramapContext;
const contramapEffect = internal.contramapEffect;
exports.mapInputContext = mapInputContext;
const mapInputEffect = internal.mapInputEffect;
/**

@@ -210,3 +210,3 @@ * A schedule that always recurs, which counts the number of recurrences.

*/
exports.contramapEffect = contramapEffect;
exports.mapInputEffect = mapInputEffect;
const count = internal.count;

@@ -274,3 +274,3 @@ /**

/**
* Returns a new schedule that contramaps the input and maps the output.
* Returns a new schedule that maps both the input and output.
*

@@ -281,5 +281,5 @@ * @since 1.0.0

exports.delays = delays;
const dimap = internal.dimap;
const mapBoth = internal.mapBoth;
/**
* Returns a new schedule that contramaps the input and maps the output.
* Returns a new schedule that maps both the input and output.
*

@@ -289,4 +289,4 @@ * @since 1.0.0

*/
exports.dimap = dimap;
const dimapEffect = internal.dimapEffect;
exports.mapBoth = mapBoth;
const mapBothEffect = internal.mapBothEffect;
/**

@@ -299,3 +299,3 @@ * Returns a driver that can be used to step the schedule, appropriately

*/
exports.dimapEffect = dimapEffect;
exports.mapBothEffect = mapBothEffect;
const driver = internal.driver;

@@ -302,0 +302,0 @@ /**

@@ -5,3 +5,3 @@ /**

import type * as Context from "@effect/data/Context";
import type { Pipeable, PipeableOverride } from "@effect/data/Pipeable";
import type { Pipeable } from "@effect/data/Pipeable";
import type * as Effect from "@effect/io/Effect";

@@ -34,3 +34,3 @@ import type * as ExecutionStrategy from "@effect/io/ExecutionStrategy";

*/
export interface Scope extends Pipeable<Scope> {
export interface Scope extends Pipeable {
readonly [ScopeTypeId]: ScopeTypeId;

@@ -42,3 +42,3 @@ }

*/
export interface CloseableScope extends PipeableOverride<Scope, CloseableScope> {
export interface CloseableScope extends Scope, Pipeable {
readonly [CloseableScopeTypeId]: CloseableScopeTypeId;

@@ -45,0 +45,0 @@ }

@@ -25,3 +25,3 @@ /**

*/
export interface ScopedCache<Key, Error, Value> extends ScopedCache.Variance<Key, Error, Value>, Pipeable<ScopedCache<Key, Error, Value>> {
export interface ScopedCache<Key, Error, Value> extends ScopedCache.Variance<Key, Error, Value>, Pipeable {
/**

@@ -28,0 +28,0 @@ * Retrieves the value associated with the specified key if it exists.

@@ -28,3 +28,3 @@ /**

*/
export interface ScopedRef<A> extends ScopedRef.Variance<A>, Pipeable<ScopedRef<A>> {
export interface ScopedRef<A> extends ScopedRef.Variance<A>, Pipeable {
}

@@ -31,0 +31,0 @@ /**

@@ -132,12 +132,9 @@ /**

export type Cause<E> =
& (
| Empty
| Fail<E>
| Die
| Interrupt
| Annotated<E>
| Sequential<E>
| Parallel<E>
)
& Pipeable<Cause<E>>
| Empty
| Fail<E>
| Die
| Interrupt
| Annotated<E>
| Sequential<E>
| Parallel<E>

@@ -251,3 +248,3 @@ /**

*/
export interface Empty extends Cause.Variance<never>, Equal.Equal, Pipeable<Empty> {
export interface Empty extends Cause.Variance<never>, Equal.Equal, Pipeable {
readonly _tag: "Empty"

@@ -263,3 +260,3 @@ }

*/
export interface Fail<E> extends Cause.Variance<E>, Equal.Equal, Pipeable<Fail<E>> {
export interface Fail<E> extends Cause.Variance<E>, Equal.Equal, Pipeable {
readonly _tag: "Fail"

@@ -277,3 +274,3 @@ readonly error: E

*/
export interface Die extends Cause.Variance<never>, Equal.Equal, Pipeable<Die> {
export interface Die extends Cause.Variance<never>, Equal.Equal, Pipeable {
readonly _tag: "Die"

@@ -290,3 +287,3 @@ readonly defect: unknown

*/
export interface Interrupt extends Cause.Variance<never>, Equal.Equal, Pipeable<Interrupt> {
export interface Interrupt extends Cause.Variance<never>, Equal.Equal, Pipeable {
readonly _tag: "Interrupt"

@@ -305,3 +302,3 @@ readonly fiberId: FiberId.FiberId

*/
export interface Annotated<E> extends Cause.Variance<E>, Equal.Equal, Pipeable<Annotated<E>> {
export interface Annotated<E> extends Cause.Variance<E>, Equal.Equal, Pipeable {
readonly _tag: "Annotated"

@@ -325,3 +322,3 @@ readonly cause: Cause<E>

*/
export interface Parallel<E> extends Cause.Variance<E>, Equal.Equal, Pipeable<Parallel<E>> {
export interface Parallel<E> extends Cause.Variance<E>, Equal.Equal, Pipeable {
readonly _tag: "Parallel"

@@ -344,3 +341,3 @@ readonly left: Cause<E>

*/
export interface Sequential<E> extends Cause.Variance<E>, Equal.Equal, Pipeable<Sequential<E>> {
export interface Sequential<E> extends Cause.Variance<E>, Equal.Equal, Pipeable {
readonly _tag: "Sequential"

@@ -347,0 +344,0 @@ readonly left: Cause<E>

@@ -47,3 +47,3 @@ /**

*/
export interface Config<A> extends Config.Variance<A>, Pipeable<Config<A>> {}
export interface Config<A> extends Config.Variance<A>, Pipeable {}

@@ -50,0 +50,0 @@ /**

@@ -45,3 +45,3 @@ /**

*/
export interface ConfigProvider extends ConfigProvider.Proto, Pipeable<ConfigProvider> {
export interface ConfigProvider extends ConfigProvider.Proto, Pipeable {
/**

@@ -197,6 +197,6 @@ * Loads the specified configuration, or fails with a config error.

*/
export const contramapPath: {
export const mapInputPath: {
(f: (path: string) => string): (self: ConfigProvider) => ConfigProvider
(self: ConfigProvider, f: (path: string) => string): ConfigProvider
} = internal.contramapPath
} = internal.mapInputPath

@@ -203,0 +203,0 @@ /**

@@ -39,3 +39,3 @@ /**

*/
export interface Deferred<E, A> extends Deferred.Variance<E, A>, Pipeable<Deferred<E, A>> {
export interface Deferred<E, A> extends Deferred.Variance<E, A>, Pipeable {
/** @internal */

@@ -42,0 +42,0 @@ readonly state: MutableRef.MutableRef<internal.State<E, A>>

@@ -6,3 +6,3 @@ /**

import type * as Option from "@effect/data/Option"
import type { Pipeable, PipeableOverride } from "@effect/data/Pipeable"
import type { Pipeable } from "@effect/data/Pipeable"
import type { Predicate } from "@effect/data/Predicate"

@@ -25,3 +25,3 @@ import type * as Unify from "@effect/data/Unify"

*/
export type Exit<E, A> = (Failure<E, A> | Success<E, A>) & Pipeable<Exit<E, A>>
export type Exit<E, A> = Failure<E, A> | Success<E, A>

@@ -35,3 +35,3 @@ /**

*/
export interface Failure<E, A> extends PipeableOverride<Effect.Effect<never, E, A>, Failure<E, A>> {
export interface Failure<E, A> extends Effect.Effect<never, E, A>, Pipeable {
readonly _tag: "Failure"

@@ -69,3 +69,3 @@ readonly cause: Cause.Cause<E>

*/
export interface Success<E, A> extends PipeableOverride<Effect.Effect<never, E, A>, Success<E, A>> {
export interface Success<E, A> extends Effect.Effect<never, E, A>, Pipeable {
readonly _tag: "Success"

@@ -72,0 +72,0 @@ readonly value: A

@@ -58,3 +58,3 @@ /**

*/
export interface Fiber<E, A> extends Fiber.Variance<E, A>, Pipeable<Fiber<E, A>> {
export interface Fiber<E, A> extends Fiber.Variance<E, A>, Pipeable {
/**

@@ -61,0 +61,0 @@ * The identity of the fiber.

@@ -44,3 +44,3 @@ /**

*/
export interface FiberRef<A> extends Variance<A>, Pipeable<FiberRef<A>> {
export interface FiberRef<A> extends Variance<A>, Pipeable {
/** @internal */

@@ -277,3 +277,3 @@ readonly initial: A

*/
export const currentRequestBatchingEnabled: FiberRef<boolean> = core.currentRequestBatchingEnabled
export const currentRequestBatchingEnabled: FiberRef<boolean> = core.currentRequestBatching

@@ -280,0 +280,0 @@ /**

@@ -34,3 +34,3 @@ /**

*/
export interface FiberRefs extends Pipeable<FiberRefs> {
export interface FiberRefs extends Pipeable {
readonly [FiberRefsSym]: FiberRefsSym

@@ -37,0 +37,0 @@ readonly locals: Map<FiberRef.FiberRef<any>, Arr.NonEmptyReadonlyArray<readonly [FiberId.Runtime, any]>>

/**
* @since 1.0.0
*/
import type { PipeableOverride } from "@effect/data/Pipeable"
import type { Pipeable } from "@effect/data/Pipeable"
import type * as Effect from "@effect/io/Effect"

@@ -18,3 +18,3 @@ import * as internal from "@effect/io/internal/hub"

*/
export interface Hub<A> extends PipeableOverride<Queue.Enqueue<A>, Hub<A>> {
export interface Hub<A> extends Queue.Enqueue<A>, Pipeable {
/**

@@ -21,0 +21,0 @@ * Publishes a message to the hub, returning whether the message was published

@@ -104,6 +104,6 @@ import * as Either from "@effect/data/Either"

): Z => {
let input = List.of(self)
let input: List.List<RequestBlock.RequestBlock<R>> = List.of(self)
let output = List.empty<Either.Either<BlockedRequestsCase, Z>>()
while (List.isCons(input)) {
const current = input.head
const current: RequestBlock.RequestBlock<R> = input.head
switch (current._tag) {

@@ -110,0 +110,0 @@ case "Empty": {

@@ -691,3 +691,3 @@ import * as Context from "@effect/data/Context"

core.map(
fiberRuntime.all(core.context<Environment>(), core.fiberId),
fiberRuntime.all([core.context<Environment>(), core.fiberId]),
([context, fiberId]) =>

@@ -694,0 +694,0 @@ new CacheImpl(

@@ -6,10 +6,16 @@ import type { Concurrency } from "@effect/io/Concurrency"

/** @internal */
export const match = <R, E, A>(
export const match: <R, E, A>(
options: {
readonly concurrency?: Concurrency
readonly batchRequests?: boolean | "inherit"
} | undefined,
sequential: () => Effect<R, E, A>,
unbounded: () => Effect<R, E, A>,
withLimit: (limit: number) => Effect<R, E, A>
bounded: (limit: number) => Effect<R, E, A>
) => Effect<R, E, A> = <R, E, A>(
options: {
readonly concurrency?: Concurrency
} | undefined,
sequential: () => Effect<R, E, A>,
unbounded: () => Effect<R, E, A>,
bounded: (limit: number) => Effect<R, E, A>
) => {

@@ -30,6 +36,6 @@ let effect: Effect<R, E, A>

(concurrency) =>
concurrency._tag === "None" ?
concurrency === "unbounded" ?
unbounded() :
concurrency.value > 1 ?
withLimit(concurrency.value) :
concurrency > 1 ?
bounded(concurrency) :
sequential()

@@ -41,3 +47,3 @@ )

effect = options!.concurrency > 1 ?
withLimit(options!.concurrency) :
bounded(options!.concurrency) :
sequential()

@@ -47,15 +53,18 @@ break

}
return options?.batchRequests !== undefined && options.batchRequests !== "inherit" ?
core.fiberRefLocally(effect, core.currentRequestBatchingEnabled, options.batchRequests) :
effect
return effect
}
/** @internal */
export const matchSimple = <R, E, A>(
export const matchSimple: <R, E, A>(
options: {
readonly concurrency?: Concurrency
readonly batchRequests?: boolean | "inherit"
} | undefined,
sequential: () => Effect<R, E, A>,
parallel: () => Effect<R, E, A>
concurrent: () => Effect<R, E, A>
) => Effect<R, E, A> = <R, E, A>(
options: {
readonly concurrency?: Concurrency
} | undefined,
sequential: () => Effect<R, E, A>,
concurrent: () => Effect<R, E, A>
) => {

@@ -69,3 +78,3 @@ let effect: Effect<R, E, A>

case "unbounded": {
effect = parallel()
effect = concurrent()
break

@@ -77,6 +86,6 @@ }

(concurrency) =>
concurrency._tag === "None" ?
parallel() :
concurrency.value > 1 ?
parallel() :
concurrency === "unbounded" ?
concurrent() :
concurrency > 1 ?
concurrent() :
sequential()

@@ -87,3 +96,3 @@ )

default: {
effect = options!.concurrency > 1 ? parallel() : sequential()
effect = options!.concurrency > 1 ? concurrent() : sequential()
break

@@ -93,5 +102,3 @@ }

return options?.batchRequests !== undefined && options.batchRequests !== "inherit" ?
core.fiberRefLocally(effect, core.currentRequestBatchingEnabled, options.batchRequests) :
effect
return effect
}

@@ -274,3 +274,3 @@ import * as Context from "@effect/data/Context"

core.flatMap(
core.forEach((a) =>
core.forEachSequential((a) =>
pipe(

@@ -326,3 +326,3 @@ op.mapOrFail(a),

return pipe(
core.forEach(
core.forEachSequential(
indices,

@@ -354,3 +354,3 @@ (index) => fromFlatLoop(flat, RA.append(prefix, `[${index}]`), op.config, true)

keys,
core.forEach((key) =>
core.forEachSequential((key) =>
fromFlatLoop(

@@ -411,3 +411,3 @@ flat,

RA.zip(rights),
core.forEach(([left, right]) =>
core.forEachSequential(([left, right]) =>
pipe(

@@ -442,8 +442,8 @@ core.zip(left, right),

/** @internal */
export const contramapPath = dual<
export const mapInputPath = dual<
(f: (path: string) => string) => (self: ConfigProvider.ConfigProvider) => ConfigProvider.ConfigProvider,
(self: ConfigProvider.ConfigProvider, f: (path: string) => string) => ConfigProvider.ConfigProvider
>(2, (self, f) => fromFlat(contramapPathFlat(self.flattened, f)))
>(2, (self, f) => fromFlat(mapInputPathFlat(self.flattened, f)))
const contramapPathFlat = (
const mapInputPathFlat = (
self: ConfigProvider.ConfigProvider.Flat,

@@ -555,19 +555,19 @@ f: (path: string) => string

export const constantCase = (self: ConfigProvider.ConfigProvider): ConfigProvider.ConfigProvider =>
contramapPath(self, StringUtils.constantCase)
mapInputPath(self, StringUtils.constantCase)
/** @internal */
export const kebabCase = (self: ConfigProvider.ConfigProvider): ConfigProvider.ConfigProvider =>
contramapPath(self, StringUtils.kebabCase)
mapInputPath(self, StringUtils.kebabCase)
/** @internal */
export const lowerCase = (self: ConfigProvider.ConfigProvider): ConfigProvider.ConfigProvider =>
contramapPath(self, StringUtils.lowerCase)
mapInputPath(self, StringUtils.lowerCase)
/** @internal */
export const snakeCase = (self: ConfigProvider.ConfigProvider): ConfigProvider.ConfigProvider =>
contramapPath(self, StringUtils.snakeCase)
mapInputPath(self, StringUtils.snakeCase)
/** @internal */
export const upperCase = (self: ConfigProvider.ConfigProvider): ConfigProvider.ConfigProvider =>
contramapPath(self, StringUtils.upperCase)
mapInputPath(self, StringUtils.upperCase)

@@ -612,3 +612,3 @@ /** @internal */

splitPathString(text, delimiter),
core.forEach((char) => primitive.parse(char.trim())),
core.forEachSequential((char) => primitive.parse(char.trim())),
core.mapError(configError.prefixed(path))

@@ -628,3 +628,3 @@ )

pipe(
core.forEach(quotedIndices, parseQuotedIndex),
core.forEachSequential(quotedIndices, parseQuotedIndex),
core.mapBoth({

@@ -631,0 +631,0 @@ onFailure: () => RA.empty<number>(),

@@ -56,6 +56,6 @@ import * as Either from "@effect/data/Either"

>(2, (path, patch) => {
let input = List.of(patch)
let input: List.List<PathPatch.PathPatch> = List.of(patch)
let output: ReadonlyArray<string> = path
while (List.isCons(input)) {
const patch = input.head
const patch: PathPatch.PathPatch = input.head
switch (patch._tag) {

@@ -62,0 +62,0 @@ case "Empty": {

@@ -32,3 +32,3 @@ import * as Chunk from "@effect/data/Chunk"

requests.length > 1 ?
core.forEachDiscard(requests, (block) =>
core.forEachSequentialDiscard(requests, (block) =>
invokeWithInterrupt(

@@ -107,3 +107,3 @@ run(

/** @internal */
export const contramapContext = dual<
export const mapInputContext = dual<
<R0, R>(

@@ -124,7 +124,7 @@ f: (context: Context.Context<R0>) => Context.Context<R>

(requests) =>
core.contramapContext(
core.mapInputContext(
self.runAll(requests),
(context: Context.Context<R0>) => f(context)
),
Chunk.make("ContramapContext", self, f)
Chunk.make("MapInputContext", self, f)
))

@@ -170,3 +170,3 @@

pipe(
core.forEach(batch, (requests) => {
core.forEachSequential(batch, (requests) => {
const [as, bs] = pipe(

@@ -180,3 +180,3 @@ requests,

() => void 0,
{ parallel: true }
{ concurrent: true }
)

@@ -193,3 +193,3 @@ })

makeBatched((requests: Array<A>) =>
core.forEachDiscard(
core.forEachSequentialDiscard(
requests,

@@ -239,3 +239,3 @@ (request) => complete(request, core.exitSucceed(f(request)) as any)

>(2, (self, context) =>
contramapContext(
mapInputContext(
self,

@@ -242,0 +242,0 @@ (_: Context.Context<never>) => context

@@ -250,4 +250,4 @@ import * as Duration from "@effect/data/Duration"

options?.discard ?
core.forEachDiscard(effects, fiberRuntime.fork) :
core.map(core.forEach(effects, fiberRuntime.fork), fiberRuntime.fiberAll))
core.forEachSequentialDiscard(effects, fiberRuntime.fork) :
core.map(core.forEachSequential(effects, fiberRuntime.fork), fiberRuntime.fiberAll))

@@ -585,3 +585,3 @@ /** @internal */

core.flatten,
fiberRuntime.zipWithOptions(core.flatten(that.await()), f, { parallel: true }),
fiberRuntime.zipWithOptions(core.flatten(that.await()), f, { concurrent: true }),
core.exit

@@ -588,0 +588,0 @@ ),

@@ -94,3 +94,3 @@ import * as Either from "@effect/data/Either"

fibers: Iterable<Fiber.RuntimeFiber<unknown, unknown>>
): Effect.Effect<never, never, Array<Fiber.Fiber.Dump>> => core.forEach(fibers, dump)
): Effect.Effect<never, never, Array<Fiber.Fiber.Dump>> => core.forEachSequential(fibers, dump)

@@ -126,4 +126,4 @@ /** @internal */

pipe(
core.forEachDiscard(fibers, interruptAsFork(fiberId)),
core.zipRight(pipe(fibers, core.forEachDiscard(_await)))
core.forEachSequentialDiscard(fibers, interruptAsFork(fiberId)),
core.zipRight(pipe(fibers, core.forEachSequentialDiscard(_await)))
))

@@ -130,0 +130,0 @@

@@ -151,3 +151,3 @@ import * as Equal from "@effect/data/Equal"

export const setAll = (self: FiberRefs.FiberRefs): Effect.Effect<never, never, void> =>
core.forEachDiscard(
core.forEachSequentialDiscard(
fiberRefs(self),

@@ -154,0 +154,0 @@ (fiberRef) => core.fiberRefSet(fiberRef, getOrDefault(self, fiberRef))

@@ -866,3 +866,4 @@ import * as Chunk from "@effect/data/Chunk"

unsafePollAllQueue(this.pollers),
(d) => core.deferredInterruptWith(d, state.id())
(d) => core.deferredInterruptWith(d, state.id()),
false
),

@@ -1098,6 +1099,6 @@ core.zipRight(core.sync(() => {

const acquire = core.tap(
fiberRuntime.all(
fiberRuntime.all([
this.scope.fork(executionStrategy.sequential),
makeSubscription(this.hub, this.subscribers, this.strategy)
),
]),
(tuple) => tuple[0].addFinalizer(() => tuple[1].shutdown())

@@ -1287,6 +1288,10 @@ )

(publishers) =>
fiberRuntime.forEachParUnboundedDiscard(publishers, ([_, deferred, last]) =>
last ?
pipe(core.deferredInterruptWith(deferred, fiberId), core.asUnit) :
core.unit)
fiberRuntime.forEachParUnboundedDiscard(
publishers,
([_, deferred, last]) =>
last ?
pipe(core.deferredInterruptWith(deferred, fiberId), core.asUnit) :
core.unit,
false
)
))

@@ -1293,0 +1298,0 @@ }

@@ -45,3 +45,3 @@ import * as Duration from "@effect/data/Duration"

invalidate(item: A): Effect.Effect<never, never, void> {
return core.flatMap(this.activePools(), core.forEachDiscard((pool) => pool.invalidate(item)))
return core.flatMap(this.activePools(), core.forEachSequentialDiscard((pool) => pool.invalidate(item)))
}

@@ -98,3 +98,3 @@ pipe() {

pipe(
fiberRuntime.all(
fiberRuntime.all([
core.context<R>(),

@@ -104,3 +104,3 @@ core.fiberId,

fiberRuntime.scopeMake()
),
]),
core.map(([context, fiberId, map, scope]) => {

@@ -175,3 +175,3 @@ const getOrCreatePool = (key: K): Effect.Effect<never, never, Pool.Pool<E, A>> =>

core.suspend(() =>
core.forEach(Array.from(HashMap.values(MutableRef.get(map))), (value) => {
core.forEachSequential(Array.from(HashMap.values(MutableRef.get(map))), (value) => {
switch (value._tag) {

@@ -178,0 +178,0 @@ case "Complete": {

@@ -412,3 +412,3 @@ import * as Context from "@effect/data/Context"

op.zipK,
{ parallel: true }
{ concurrent: true }
)

@@ -415,0 +415,0 @@ )

@@ -54,3 +54,3 @@ import type { LazyArg } from "@effect/data/Function"

/** @internal */
export const contramap = dual<
export const mapInput = dual<
<Message, Message2>(

@@ -57,0 +57,0 @@ f: (message: Message2) => Message

@@ -75,3 +75,3 @@ import type * as Chunk from "@effect/data/Chunk"

/** @internal */
export const contramap = dual<
export const mapInput = dual<
<In, In2>(f: (input: In2) => In) => <Type, Out>(self: Metric.Metric<Type, In, Out>) => Metric.Metric<Type, In2, Out>,

@@ -98,3 +98,3 @@ <Type, In, Out, In2>(self: Metric.Metric<Type, In, Out>, f: (input: In2) => In) => Metric.Metric<Type, In2, Out>

<Type, In, Out>(self: Metric.Metric<Type, In, Out>, input: In) => Metric.Metric<Type, unknown, Out>
>(2, (self, input) => contramap(self, () => input))
>(2, (self, input) => mapInput(self, () => input))

@@ -260,3 +260,3 @@ /** @internal */

const base = pipe(histogram(name, boundaries), tagged("time_unit", "milliseconds"))
return contramap(base, Duration.toMillis)
return mapInput(base, Duration.toMillis)
}

@@ -277,3 +277,3 @@

)
return contramap(base, Duration.toMillis)
return mapInput(base, Duration.toMillis)
}

@@ -461,3 +461,3 @@

self: Metric.Metric<Type, readonly [In, number], Out>
): Metric.Metric<Type, In, Out> => contramap(self, (input: In) => [input, Date.now()] as const)
): Metric.Metric<Type, In, Out> => mapInput(self, (input: In) => [input, Date.now()] as const)

@@ -464,0 +464,0 @@ /** @internal */

@@ -61,3 +61,3 @@ import { dual, pipe } from "@effect/data/Function"

),
poll: core.forEach(metrics, (metric) => metric.poll)
poll: core.forEachSequential(metrics, (metric) => metric.poll)
}

@@ -64,0 +64,0 @@ }

@@ -418,3 +418,3 @@ import * as Context from "@effect/data/Context"

pipe(
fiberRuntime.all(
fiberRuntime.all([
core.context<R>(),

@@ -426,6 +426,6 @@ ref.make(false),

options.strategy.initial()
),
]),
core.flatMap(([context, down, state, items, inv, initial]) => {
const pool = new PoolImpl<E, A>(
core.contramapContext(options.acquire, (old) => Context.merge(old)(context)),
core.mapInputContext(options.acquire, (old) => Context.merge(old)(context)),
options.min,

@@ -432,0 +432,0 @@ options.max,

@@ -52,3 +52,7 @@ import { seconds } from "@effect/data/Duration"

core.flatMap(
core.isEffect(dataSource) ? dataSource : core.succeed(dataSource),
(core.isEffect(dataSource) ? dataSource : core.succeed(dataSource)) as Effect.Effect<
never,
never,
RequestResolver.RequestResolver<A, never>
>,
(ds) =>

@@ -88,3 +92,3 @@ core.fiberIdWith((id) => {

BlockedRequests.single(
ds as any,
ds as RequestResolver.RequestResolver<A, never>,
BlockedRequests.makeEntry({

@@ -119,3 +123,3 @@ request: proxy,

BlockedRequests.single(
ds as any,
ds as RequestResolver.RequestResolver<A, never>,
BlockedRequests.makeEntry({

@@ -163,50 +167,17 @@ request: proxy,

/** @internal */
export const withRequestBatching: {
(strategy: "on" | "off"): <R, E, A>(self: Effect.Effect<R, E, A>) => Effect.Effect<R, E, A>
<R, E, A>(
self: Effect.Effect<R, E, A>,
strategy: "on" | "off"
): Effect.Effect<R, E, A>
} = dual<
(
strategy: "on" | "off"
) => <R, E, A>(self: Effect.Effect<R, E, A>) => Effect.Effect<R, E, A>,
<R, E, A>(
self: Effect.Effect<R, E, A>,
strategy: "on" | "off"
) => Effect.Effect<R, E, A>
>(2, (self, strategy) =>
core.fiberRefGetWith(core.currentRequestBatchingEnabled, (enabled) => {
switch (strategy) {
case "off":
return enabled ? core.fiberRefLocally(self, core.currentRequestBatchingEnabled, false) : self
case "on":
return enabled ? self : core.fiberRefLocally(self, core.currentRequestBatchingEnabled, true)
}
}))
/** @internal */
export const withRequestCaching: {
(strategy: "on" | "off"): <R, E, A>(self: Effect.Effect<R, E, A>) => Effect.Effect<R, E, A>
(strategy: boolean): <R, E, A>(self: Effect.Effect<R, E, A>) => Effect.Effect<R, E, A>
<R, E, A>(
self: Effect.Effect<R, E, A>,
strategy: "on" | "off"
strategy: boolean
): Effect.Effect<R, E, A>
} = dual<
(
strategy: "on" | "off"
strategy: boolean
) => <R, E, A>(self: Effect.Effect<R, E, A>) => Effect.Effect<R, E, A>,
<R, E, A>(
self: Effect.Effect<R, E, A>,
strategy: "on" | "off"
strategy: boolean
) => Effect.Effect<R, E, A>
>(2, (self, strategy) =>
core.fiberRefGetWith(currentCacheEnabled, (enabled) => {
switch (strategy) {
case "off":
return enabled ? core.fiberRefLocally(self, currentCacheEnabled, false) : self
case "on":
return enabled ? self : core.fiberRefLocally(self, currentCacheEnabled, true)
}
}))
>(2, (self, strategy) => core.fiberRefLocally(self, currentCacheEnabled, strategy))

@@ -213,0 +184,0 @@ /** @internal */

@@ -103,3 +103,4 @@ import * as Chunk from "@effect/data/Chunk"

unsafePollAll(this.takers),
(d) => core.deferredInterruptWith(d, state.id())
(d) => core.deferredInterruptWith(d, state.id()),
false
),

@@ -494,3 +495,8 @@ core.zipRight(this.strategy.shutdown()),

fiberRuntime.forEachParUnboundedDiscard(putters, ([_, deferred, isLastItem]) =>
isLastItem ? pipe(core.deferredInterruptWith(deferred, fiberId), core.asUnit) : core.unit)
isLastItem ?
pipe(
core.deferredInterruptWith(deferred, fiberId),
core.asUnit
) :
core.unit, false)
)

@@ -497,0 +503,0 @@ )

@@ -68,3 +68,3 @@ import * as Chunk from "@effect/data/Chunk"

numbers,
core.forEachDiscard((n) =>
core.forEachSequentialDiscard((n) =>
pipe(

@@ -71,0 +71,0 @@ nextIntBounded(n),

@@ -279,3 +279,4 @@ import * as Context from "@effect/data/Context"

runtimeFlags.Interruption,
runtimeFlags.CooperativeYielding
runtimeFlags.CooperativeYielding,
runtimeFlags.RuntimeMetrics
)

@@ -282,0 +283,0 @@

@@ -327,3 +327,4 @@ import * as Context from "@effect/data/Context"

HashSet.fromIterable(Array.from(this.cacheState.map).map(([key]) => key)),
(key) => this.invalidate(key)
(key) => this.invalidate(key),
false
)

@@ -572,3 +573,4 @@ }

this.trackAccess(key),
(cleanedMapValue) => this.cleanMapValue(cleanedMapValue)
(cleanedMapValue) => this.cleanMapValue(cleanedMapValue),
false
)

@@ -575,0 +577,0 @@ }

@@ -44,3 +44,3 @@ import * as Context from "@effect/data/Context"

acquire,
core.contramapContext<R, Scope.Scope | R>(Context.add(fiberRuntime.scopeTag, newScope))
core.mapInputContext<R, Scope.Scope | R>(Context.add(fiberRuntime.scopeTag, newScope))
)

@@ -103,3 +103,3 @@ ),

acquire,
core.contramapContext<Exclude<R, Scope.Scope>, R>(
core.mapInputContext<Exclude<R, Scope.Scope>, R>(
Context.add(fiberRuntime.scopeTag, newScope) as any

@@ -106,0 +106,0 @@ )

@@ -77,3 +77,3 @@ import * as Context from "@effect/data/Context"

either.right,
core.forEach((ref) => core.sync(() => MutableRef.get(ref))),
core.forEachSequential((ref) => core.sync(() => MutableRef.get(ref))),
core.map(RA.reduce(SortedSet.empty(fiber.Order), (a, b) => SortedSet.union(a, b))),

@@ -80,0 +80,0 @@ core.map(SortedSet.filter((fiber) => !Equal.equals(fiber.id(), descriptor.id)))

@@ -217,3 +217,3 @@ import * as Chunk from "@effect/data/Chunk"

return <R, E, A>(effect: Effect.Effect<R, E, A>): Effect.Effect<R, E, A> =>
fiberRuntime.zipLeftOptions(effect, this.adjust(duration), { parallel: true })
fiberRuntime.zipLeftOptions(effect, this.adjust(duration), { concurrent: true })
}

@@ -220,0 +220,0 @@ /**

@@ -29,3 +29,3 @@ /**

*/
export interface KeyedPool<K, E, A> extends KeyedPool.Variance<K, E, A>, Pipeable<KeyedPool<K, E, A>> {
export interface KeyedPool<K, E, A> extends KeyedPool.Variance<K, E, A>, Pipeable {
/**

@@ -32,0 +32,0 @@ * Retrieves an item from the pool belonging to the given key in a scoped

@@ -47,3 +47,3 @@ /**

*/
export interface Layer<RIn, E, ROut> extends Layer.Variance<RIn, E, ROut>, Pipeable<Layer<RIn, E, ROut>> {}
export interface Layer<RIn, E, ROut> extends Layer.Variance<RIn, E, ROut>, Pipeable {}

@@ -50,0 +50,0 @@ /**

@@ -38,3 +38,3 @@ /**

*/
export interface Logger<Message, Output> extends Logger.Variance<Message, Output>, Pipeable<Logger<Message, Output>> {
export interface Logger<Message, Output> extends Logger.Variance<Message, Output>, Pipeable {
readonly log: (

@@ -114,3 +114,3 @@ options: {

*/
export const contramap: {
export const mapInput: {
<Message, Message2>(

@@ -123,3 +123,3 @@ f: (message: Message2) => Message

): Logger<Message2, Output>
} = internal.contramap
} = internal.mapInput

@@ -126,0 +126,0 @@ /**

@@ -23,3 +23,3 @@ /**

*/
export type LogLevel = (All | Fatal | Error | Warning | Info | Debug | Trace | None) & Pipeable<LogLevel>
export type LogLevel = All | Fatal | Error | Warning | Info | Debug | Trace | None

@@ -36,3 +36,3 @@ /**

*/
export interface All extends Pipeable<All> {
export interface All extends Pipeable {
readonly _tag: "All"

@@ -48,3 +48,3 @@ readonly label: "ALL"

*/
export interface Fatal extends Pipeable<Fatal> {
export interface Fatal extends Pipeable {
readonly _tag: "Fatal"

@@ -60,3 +60,3 @@ readonly label: "FATAL"

*/
export interface Error extends Pipeable<Error> {
export interface Error extends Pipeable {
readonly _tag: "Error"

@@ -72,3 +72,3 @@ readonly label: "ERROR"

*/
export interface Warning extends Pipeable<Warning> {
export interface Warning extends Pipeable {
readonly _tag: "Warning"

@@ -84,3 +84,3 @@ readonly label: "WARN"

*/
export interface Info extends Pipeable<Info> {
export interface Info extends Pipeable {
readonly _tag: "Info"

@@ -96,3 +96,3 @@ readonly label: "INFO"

*/
export interface Debug extends Pipeable<Debug> {
export interface Debug extends Pipeable {
readonly _tag: "Debug"

@@ -108,3 +108,3 @@ readonly label: "DEBUG"

*/
export interface Trace extends Pipeable<Trace> {
export interface Trace extends Pipeable {
readonly _tag: "Trace"

@@ -120,3 +120,3 @@ readonly label: "TRACE"

*/
export interface None extends Pipeable<None> {
export interface None extends Pipeable {
readonly _tag: "None"

@@ -123,0 +123,0 @@ readonly label: "OFF"

@@ -52,3 +52,3 @@ /**

*/
export interface Metric<Type, In, Out> extends Metric.Variance<Type, In, Out>, Pipeable<Metric<Type, In, Out>> {
export interface Metric<Type, In, Out> extends Metric.Variance<Type, In, Out>, Pipeable {
/**

@@ -152,6 +152,6 @@ * The type of the underlying primitive metric. For example, this could be

*/
export const contramap: {
export const mapInput: {
<In, In2>(f: (input: In2) => In): <Type, Out>(self: Metric<Type, In, Out>) => Metric<Type, In2, Out>
<Type, In, Out, In2>(self: Metric<Type, In, Out>, f: (input: In2) => In): Metric<Type, In2, Out>
} = internal.contramap
} = internal.mapInput

@@ -158,0 +158,0 @@ /**

@@ -25,3 +25,3 @@ /**

*/
export interface MetricBoundaries extends Equal.Equal, Pipeable<MetricBoundaries> {
export interface MetricBoundaries extends Equal.Equal, Pipeable {
readonly [MetricBoundariesTypeId]: MetricBoundariesTypeId

@@ -28,0 +28,0 @@ readonly values: Chunk.Chunk<number>

@@ -26,3 +26,3 @@ /**

*/
export interface MetricHook<In, Out> extends MetricHook.Variance<In, Out>, Pipeable<MetricHook<In, Out>> {
export interface MetricHook<In, Out> extends MetricHook.Variance<In, Out>, Pipeable {
readonly get: () => Out

@@ -29,0 +29,0 @@ readonly update: (input: In) => void

@@ -38,3 +38,3 @@ /**

export interface MetricKey<Type extends MetricKeyType.MetricKeyType<any, any>>
extends MetricKey.Variance<Type>, Equal.Equal, Pipeable<MetricKey<Type>>
extends MetricKey.Variance<Type>, Equal.Equal, Pipeable
{

@@ -41,0 +41,0 @@ readonly name: string

@@ -88,5 +88,3 @@ /**

*/
export interface MetricKeyType<In, Out>
extends MetricKeyType.Variance<In, Out>, Equal.Equal, Pipeable<MetricKeyType<In, Out>>
{}
export interface MetricKeyType<In, Out> extends MetricKeyType.Variance<In, Out>, Equal.Equal, Pipeable {}

@@ -93,0 +91,0 @@ /**

@@ -31,3 +31,3 @@ /**

*/
export interface MetricLabel extends Equal.Equal, Pipeable<MetricLabel> {
export interface MetricLabel extends Equal.Equal, Pipeable {
readonly [MetricLabelTypeId]: MetricLabelTypeId

@@ -34,0 +34,0 @@ readonly key: string

@@ -27,3 +27,3 @@ /**

export interface MetricPair<Type extends MetricKeyType.MetricKeyType<any, any>>
extends MetricPair.Variance<Type>, Pipeable<MetricPair<Type>>
extends MetricPair.Variance<Type>, Pipeable
{

@@ -30,0 +30,0 @@ readonly metricKey: MetricKey.MetricKey<Type>

@@ -31,3 +31,3 @@ /**

*/
export interface PollingMetric<Type, In, R, E, Out> extends Pipeable<PollingMetric<Type, In, R, E, Out>> {
export interface PollingMetric<Type, In, R, E, Out> extends Pipeable {
readonly [PollingMetricTypeId]: PollingMetricTypeId

@@ -34,0 +34,0 @@ /**

@@ -92,3 +92,3 @@ /**

*/
export interface MetricState<A> extends MetricState.Variance<A>, Equal.Equal, Pipeable<MetricState<A>> {}
export interface MetricState<A> extends MetricState.Variance<A>, Equal.Equal, Pipeable {}

@@ -95,0 +95,0 @@ /**

@@ -31,3 +31,3 @@ /**

*/
export interface Pool<E, A> extends Data.Case, Pool.Variance<E, A>, Pipeable<Pool<E, A>> {
export interface Pool<E, A> extends Data.Case, Pool.Variance<E, A>, Pipeable {
/**

@@ -34,0 +34,0 @@ * Retrieves an item from the pool in a scoped effect. Note that if

@@ -8,3 +8,3 @@ /**

import type * as Option from "@effect/data/Option"
import type { Pipeable, PipeableOverride } from "@effect/data/Pipeable"
import type { Pipeable } from "@effect/data/Pipeable"
import type * as Deferred from "@effect/io/Deferred"

@@ -54,3 +54,3 @@ import type * as Effect from "@effect/io/Effect"

*/
export interface Queue<A> extends PipeableOverride<Enqueue<A>, Queue<A>>, PipeableOverride<Dequeue<A>, Queue<A>> {
export interface Queue<A> extends Enqueue<A>, Dequeue<A>, Pipeable {
/** @internal */

@@ -72,3 +72,3 @@ readonly queue: MutableQueue.MutableQueue<A>

*/
export interface Enqueue<A> extends Queue.EnqueueVariance<A>, BaseQueue, Pipeable<Enqueue<A>> {
export interface Enqueue<A> extends Queue.EnqueueVariance<A>, BaseQueue, Pipeable {
/**

@@ -106,3 +106,3 @@ * Places one value in the queue.

*/
export interface Dequeue<A> extends Queue.DequeueVariance<A>, BaseQueue, Pipeable<Dequeue<A>> {
export interface Dequeue<A> extends Queue.DequeueVariance<A>, BaseQueue, Pipeable {
/**

@@ -109,0 +109,0 @@ * Takes the oldest value in the queue. If the queue is empty, this will return

@@ -25,3 +25,3 @@ /**

*/
export interface Ref<A> extends Ref.Variance<A>, Pipeable<Ref<A>> {
export interface Ref<A> extends Ref.Variance<A>, Pipeable {
modify<B>(f: (a: A) => readonly [B, A]): Effect.Effect<never, never, B>

@@ -28,0 +28,0 @@ }

@@ -132,8 +132,8 @@ /**

*/
export const contramapContext = <R0, R>(
export const mapInputContext = <R0, R>(
self: RequestBlock<R>,
f: (context: Context.Context<R0>) => Context.Context<R>
): RequestBlock<R0> => reduce(self, ContramapContextReducer(f))
): RequestBlock<R0> => reduce(self, MapInputContextReducer(f))
const ContramapContextReducer = <R0, R>(
const MapInputContextReducer = <R0, R>(
f: (context: Context.Context<R0>) => Context.Context<R>

@@ -146,3 +146,3 @@ ): RequestBlock.Reducer<R, RequestBlock<R0>> => ({

single(
_dataSource.contramapContext(dataSource, f),
_dataSource.mapInputContext(dataSource, f),
blockedRequest

@@ -149,0 +149,0 @@ )

@@ -51,3 +51,3 @@ /**

*/
export interface RequestResolver<A, R = never> extends Equal.Equal, Pipeable<RequestResolver<A, R>> {
export interface RequestResolver<A, R = never> extends Equal.Equal, Pipeable {
/**

@@ -179,3 +179,3 @@ * Execute a collection of requests. The outer `Chunk` represents batches

*/
export const contramapContext: {
export const mapInputContext: {
<R0, R>(

@@ -188,3 +188,3 @@ f: (context: Context.Context<R0>) => Context.Context<R>

): RequestResolver<A, R0>
} = internal.contramapContext
} = internal.mapInputContext

@@ -191,0 +191,0 @@ /**

@@ -37,3 +37,3 @@ /**

*/
export interface Runtime<R> extends Pipeable<Runtime<R>> {
export interface Runtime<R> extends Pipeable {
/**

@@ -192,9 +192,5 @@ * The context used as initial for forks

* @since 1.0.0
* @category models
* @category symbols
*/
export interface FiberFailure extends Error {
readonly [FiberFailureId]: FiberFailureId
readonly [FiberFailureCauseId]: Cause<unknown>
readonly [NodePrint]: () => string
}
export const NodePrint: unique symbol = internal.NodePrint

@@ -205,9 +201,13 @@ /**

*/
export const NodePrint: unique symbol = internal.NodePrint
export type NodePrint = typeof NodePrint
/**
* @since 1.0.0
* @category symbols
* @category models
*/
export type NodePrint = typeof NodePrint
export interface FiberFailure extends Error {
readonly [FiberFailureId]: FiberFailureId
readonly [FiberFailureCauseId]: Cause<unknown>
readonly [NodePrint]: () => string
}

@@ -214,0 +214,0 @@ /**

@@ -70,3 +70,3 @@ /**

*/
export interface Schedule<Env, In, Out> extends Schedule.Variance<Env, In, Out>, Pipeable<Schedule<Env, In, Out>> {
export interface Schedule<Env, In, Out> extends Schedule.Variance<Env, In, Out>, Pipeable {
/**

@@ -350,6 +350,6 @@ * Initial State

*/
export const contramap: {
export const mapInput: {
<In, In2>(f: (in2: In2) => In): <Env, Out>(self: Schedule<Env, In, Out>) => Schedule<Env, In2, Out>
<Env, In, Out, In2>(self: Schedule<Env, In, Out>, f: (in2: In2) => In): Schedule<Env, In2, Out>
} = internal.contramap
} = internal.mapInput

@@ -363,3 +363,3 @@ /**

*/
export const contramapContext: {
export const mapInputContext: {
<Env0, Env>(

@@ -372,3 +372,3 @@ f: (env0: Context.Context<Env0>) => Context.Context<Env>

): Schedule<Env0, In, Out>
} = internal.contramapContext
} = internal.mapInputContext

@@ -382,3 +382,3 @@ /**

*/
export const contramapEffect: {
export const mapInputEffect: {
<In, Env2, In2>(

@@ -391,3 +391,3 @@ f: (in2: In2) => Effect.Effect<Env2, never, In>

): Schedule<Env | Env2, In2, Out>
} = internal.contramapEffect
} = internal.mapInputEffect

@@ -482,3 +482,3 @@ /**

/**
* Returns a new schedule that contramaps the input and maps the output.
* Returns a new schedule that maps both the input and output.
*

@@ -488,3 +488,3 @@ * @since 1.0.0

*/
export const dimap: {
export const mapBoth: {
<In, Out, In2, Out2>(

@@ -503,6 +503,6 @@ options: {

): Schedule<Env, In2, Out2>
} = internal.dimap
} = internal.mapBoth
/**
* Returns a new schedule that contramaps the input and maps the output.
* Returns a new schedule that maps both the input and output.
*

@@ -512,3 +512,3 @@ * @since 1.0.0

*/
export const dimapEffect: {
export const mapBothEffect: {
<In2, Env2, In, Out, Env3, Out2>(

@@ -527,3 +527,3 @@ options: {

): Schedule<Env | Env2 | Env3, In2, Out2>
} = internal.dimapEffect
} = internal.mapBothEffect

@@ -530,0 +530,0 @@ /**

@@ -6,3 +6,3 @@ /**

import type * as Context from "@effect/data/Context"
import type { Pipeable, PipeableOverride } from "@effect/data/Pipeable"
import type { Pipeable } from "@effect/data/Pipeable"
import type * as Effect from "@effect/io/Effect"

@@ -42,3 +42,3 @@ import type * as ExecutionStrategy from "@effect/io/ExecutionStrategy"

*/
export interface Scope extends Pipeable<Scope> {
export interface Scope extends Pipeable {
readonly [ScopeTypeId]: ScopeTypeId

@@ -60,3 +60,3 @@

*/
export interface CloseableScope extends PipeableOverride<Scope, CloseableScope> {
export interface CloseableScope extends Scope, Pipeable {
readonly [CloseableScopeTypeId]: CloseableScopeTypeId

@@ -63,0 +63,0 @@

@@ -30,3 +30,3 @@ /**

export interface ScopedCache<Key, Error, Value>
extends ScopedCache.Variance<Key, Error, Value>, Pipeable<ScopedCache<Key, Error, Value>>
extends ScopedCache.Variance<Key, Error, Value>, Pipeable
{

@@ -33,0 +33,0 @@ /**

@@ -33,3 +33,3 @@ /**

*/
export interface ScopedRef<A> extends ScopedRef.Variance<A>, Pipeable<ScopedRef<A>> {
export interface ScopedRef<A> extends ScopedRef.Variance<A>, Pipeable {
/** @internal */

@@ -36,0 +36,0 @@ readonly ref: Synchronized.Synchronized<readonly [Scope.Scope.Closeable, A]>

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc