@effect/platform
Advanced tools
Comparing version
@@ -194,33 +194,64 @@ "use strict"; | ||
const schemaToResponse = ast => { | ||
const schema = Schema.make(ast); | ||
const encoding = HttpApiSchema.getEncoding(ast); | ||
const decode = Schema.decodeUnknown(schema); | ||
const decode = Schema.decode(schemaFromArrayBuffer(ast, encoding)); | ||
return response => Effect.flatMap(response.arrayBuffer, decode); | ||
}; | ||
const Uint8ArrayFromArrayBuffer = /*#__PURE__*/Schema.transform(Schema.Unknown, Schema.Uint8ArrayFromSelf, { | ||
decode(fromA) { | ||
return new Uint8Array(fromA); | ||
}, | ||
encode(arr) { | ||
return arr.byteLength === arr.buffer.byteLength ? arr.buffer : arr.buffer.slice(arr.byteOffset, arr.byteOffset + arr.byteLength); | ||
} | ||
}); | ||
const StringFromArrayBuffer = /*#__PURE__*/Schema.transform(Schema.Unknown, Schema.String, { | ||
decode(fromA) { | ||
return new TextDecoder().decode(fromA); | ||
}, | ||
encode(toI) { | ||
const arr = new TextEncoder().encode(toI); | ||
return arr.byteLength === arr.buffer.byteLength ? arr.buffer : arr.buffer.slice(arr.byteOffset, arr.byteOffset + arr.byteLength); | ||
} | ||
}); | ||
const parseJsonOrVoid = /*#__PURE__*/Schema.transformOrFail(Schema.String, Schema.Unknown, { | ||
strict: true, | ||
decode: (i, _, ast) => { | ||
if (i === "") return ParseResult.succeed(void 0); | ||
return ParseResult.try({ | ||
try: () => JSON.parse(i), | ||
catch: () => new ParseResult.Type(ast, i, "Could not parse JSON") | ||
}); | ||
}, | ||
encode: (a, _, ast) => { | ||
if (a === undefined) return ParseResult.succeed(""); | ||
return ParseResult.try({ | ||
try: () => JSON.stringify(a), | ||
catch: () => new ParseResult.Type(ast, a, "Could not encode as JSON") | ||
}); | ||
} | ||
}); | ||
const parseJsonArrayBuffer = /*#__PURE__*/Schema.compose(StringFromArrayBuffer, parseJsonOrVoid); | ||
const schemaFromArrayBuffer = (ast, encoding) => { | ||
if (ast._tag === "Union") { | ||
return Schema.Union(...ast.types.map(ast => schemaFromArrayBuffer(ast, HttpApiSchema.getEncoding(ast, encoding)))); | ||
} | ||
switch (encoding.kind) { | ||
case "Json": | ||
{ | ||
return response => Effect.flatMap(responseJson(response), decode); | ||
return Schema.compose(parseJsonArrayBuffer, Schema.make(ast)); | ||
} | ||
case "UrlParams": | ||
{ | ||
return HttpClientResponse.schemaBodyUrlParams(schema); | ||
return Schema.compose(StringFromArrayBuffer, UrlParams.schemaParse(Schema.make(ast))); | ||
} | ||
case "Uint8Array": | ||
{ | ||
return response => response.arrayBuffer.pipe(Effect.map(buffer => new Uint8Array(buffer)), Effect.flatMap(decode)); | ||
return Uint8ArrayFromArrayBuffer; | ||
} | ||
case "Text": | ||
{ | ||
return response => Effect.flatMap(response.text, decode); | ||
return StringFromArrayBuffer; | ||
} | ||
} | ||
}; | ||
const responseJson = response => Effect.flatMap(response.text, text => text === "" ? Effect.void : Effect.try({ | ||
try: () => JSON.parse(text), | ||
catch: cause => new HttpClientError.ResponseError({ | ||
reason: "Decode", | ||
request: response.request, | ||
response, | ||
cause | ||
}) | ||
})); | ||
const statusOrElse = response => Effect.fail(new HttpClientError.ResponseError({ | ||
@@ -227,0 +258,0 @@ reason: "Decode", |
@@ -96,3 +96,3 @@ "use strict"; | ||
*/ | ||
const getEncoding = ast => getAnnotation(ast, AnnotationEncoding) ?? encodingJson; | ||
const getEncoding = (ast, fallback = encodingJson) => getAnnotation(ast, AnnotationEncoding) ?? fallback; | ||
/** | ||
@@ -99,0 +99,0 @@ * @since 1.0.0 |
@@ -6,3 +6,3 @@ "use strict"; | ||
}); | ||
exports.toString = exports.toRecord = exports.setAll = exports.set = exports.schemaStruct = exports.schemaJson = exports.schema = exports.remove = exports.makeUrl = exports.getLast = exports.getFirst = exports.getAll = exports.fromInput = exports.empty = exports.appendAll = exports.append = void 0; | ||
exports.toString = exports.toRecord = exports.setAll = exports.set = exports.schemaStruct = exports.schemaRecord = exports.schemaParse = exports.schemaJson = exports.schemaFromString = exports.schemaFromSelf = exports.remove = exports.makeUrl = exports.getLast = exports.getFirst = exports.getAll = exports.fromInput = exports.empty = exports.appendAll = exports.append = void 0; | ||
var Arr = _interopRequireWildcard(require("effect/Array")); | ||
@@ -61,3 +61,3 @@ var Either = _interopRequireWildcard(require("effect/Either")); | ||
*/ | ||
const schema = exports.schema = /*#__PURE__*/Schema.Array(Schema.Tuple(Schema.String, Schema.String)).annotations({ | ||
const schemaFromSelf = exports.schemaFromSelf = /*#__PURE__*/Schema.Array(Schema.Tuple(Schema.String, Schema.String)).annotations({ | ||
identifier: "UrlParams" | ||
@@ -231,3 +231,34 @@ }); | ||
}; | ||
/** | ||
* @since 1.0.0 | ||
* @category schema | ||
*/ | ||
exports.schemaStruct = schemaStruct; | ||
const schemaFromString = exports.schemaFromString = /*#__PURE__*/Schema.transform(Schema.String, schemaFromSelf, { | ||
decode(fromA) { | ||
return fromInput(new URLSearchParams(fromA)); | ||
}, | ||
encode(toI) { | ||
return toString(toI); | ||
} | ||
}); | ||
/** | ||
* @since 1.0.0 | ||
* @category schema | ||
*/ | ||
const schemaRecord = schema => Schema.transform(schemaFromSelf, schema, { | ||
decode(fromA) { | ||
return toRecord(fromA); | ||
}, | ||
encode(toI) { | ||
return fromInput(toI); | ||
} | ||
}); | ||
/** | ||
* @since 1.0.0 | ||
* @category schema | ||
*/ | ||
exports.schemaRecord = schemaRecord; | ||
const schemaParse = schema => Schema.compose(schemaFromString, schemaRecord(schema)); | ||
exports.schemaParse = schemaParse; | ||
//# sourceMappingURL=UrlParams.js.map |
@@ -7,6 +7,6 @@ import * as Effect from "effect/Effect"; | ||
import type { HttpApiGroup } from "./HttpApiGroup.js"; | ||
import type * as HttpApiMiddleware from "./HttpApiMiddleware.js"; | ||
import * as HttpClient from "./HttpClient.js"; | ||
import * as HttpClientError from "./HttpClientError.js"; | ||
import * as HttpClientResponse from "./HttpClientResponse.js"; | ||
import type { HttpApiMiddleware } from "./index.js"; | ||
/** | ||
@@ -13,0 +13,0 @@ * @since 1.0.0 |
@@ -58,3 +58,3 @@ /** | ||
*/ | ||
export declare const getEncoding: (ast: AST.AST) => Encoding; | ||
export declare const getEncoding: (ast: AST.AST, fallback?: Encoding) => Encoding; | ||
/** | ||
@@ -61,0 +61,0 @@ * @since 1.0.0 |
@@ -43,3 +43,3 @@ /** | ||
*/ | ||
export declare const schema: Schema.Schema<UrlParams, ReadonlyArray<readonly [string, string]>>; | ||
export declare const schemaFromSelf: Schema.Schema<UrlParams>; | ||
/** | ||
@@ -248,2 +248,17 @@ * @since 1.0.0 | ||
export declare const schemaStruct: <A, I extends Record<string, string | ReadonlyArray<string> | undefined>, R>(schema: Schema.Schema<A, I, R>, options?: ParseOptions | undefined) => (self: UrlParams) => Effect.Effect<A, ParseResult.ParseError, R>; | ||
/** | ||
* @since 1.0.0 | ||
* @category schema | ||
*/ | ||
export declare const schemaFromString: Schema.Schema<UrlParams, string>; | ||
/** | ||
* @since 1.0.0 | ||
* @category schema | ||
*/ | ||
export declare const schemaRecord: <A, I extends Record<string, string | ReadonlyArray<string> | undefined>, R>(schema: Schema.Schema<A, I, R>) => Schema.Schema<A, UrlParams, R>; | ||
/** | ||
* @since 1.0.0 | ||
* @category schema | ||
*/ | ||
export declare const schemaParse: <A, I extends Record<string, string | ReadonlyArray<string> | undefined>, R>(schema: Schema.Schema<A, I, R>) => Schema.Schema<A, string, R>; | ||
//# sourceMappingURL=UrlParams.d.ts.map |
@@ -183,33 +183,64 @@ /** | ||
const schemaToResponse = ast => { | ||
const schema = Schema.make(ast); | ||
const encoding = HttpApiSchema.getEncoding(ast); | ||
const decode = Schema.decodeUnknown(schema); | ||
const decode = Schema.decode(schemaFromArrayBuffer(ast, encoding)); | ||
return response => Effect.flatMap(response.arrayBuffer, decode); | ||
}; | ||
const Uint8ArrayFromArrayBuffer = /*#__PURE__*/Schema.transform(Schema.Unknown, Schema.Uint8ArrayFromSelf, { | ||
decode(fromA) { | ||
return new Uint8Array(fromA); | ||
}, | ||
encode(arr) { | ||
return arr.byteLength === arr.buffer.byteLength ? arr.buffer : arr.buffer.slice(arr.byteOffset, arr.byteOffset + arr.byteLength); | ||
} | ||
}); | ||
const StringFromArrayBuffer = /*#__PURE__*/Schema.transform(Schema.Unknown, Schema.String, { | ||
decode(fromA) { | ||
return new TextDecoder().decode(fromA); | ||
}, | ||
encode(toI) { | ||
const arr = new TextEncoder().encode(toI); | ||
return arr.byteLength === arr.buffer.byteLength ? arr.buffer : arr.buffer.slice(arr.byteOffset, arr.byteOffset + arr.byteLength); | ||
} | ||
}); | ||
const parseJsonOrVoid = /*#__PURE__*/Schema.transformOrFail(Schema.String, Schema.Unknown, { | ||
strict: true, | ||
decode: (i, _, ast) => { | ||
if (i === "") return ParseResult.succeed(void 0); | ||
return ParseResult.try({ | ||
try: () => JSON.parse(i), | ||
catch: () => new ParseResult.Type(ast, i, "Could not parse JSON") | ||
}); | ||
}, | ||
encode: (a, _, ast) => { | ||
if (a === undefined) return ParseResult.succeed(""); | ||
return ParseResult.try({ | ||
try: () => JSON.stringify(a), | ||
catch: () => new ParseResult.Type(ast, a, "Could not encode as JSON") | ||
}); | ||
} | ||
}); | ||
const parseJsonArrayBuffer = /*#__PURE__*/Schema.compose(StringFromArrayBuffer, parseJsonOrVoid); | ||
const schemaFromArrayBuffer = (ast, encoding) => { | ||
if (ast._tag === "Union") { | ||
return Schema.Union(...ast.types.map(ast => schemaFromArrayBuffer(ast, HttpApiSchema.getEncoding(ast, encoding)))); | ||
} | ||
switch (encoding.kind) { | ||
case "Json": | ||
{ | ||
return response => Effect.flatMap(responseJson(response), decode); | ||
return Schema.compose(parseJsonArrayBuffer, Schema.make(ast)); | ||
} | ||
case "UrlParams": | ||
{ | ||
return HttpClientResponse.schemaBodyUrlParams(schema); | ||
return Schema.compose(StringFromArrayBuffer, UrlParams.schemaParse(Schema.make(ast))); | ||
} | ||
case "Uint8Array": | ||
{ | ||
return response => response.arrayBuffer.pipe(Effect.map(buffer => new Uint8Array(buffer)), Effect.flatMap(decode)); | ||
return Uint8ArrayFromArrayBuffer; | ||
} | ||
case "Text": | ||
{ | ||
return response => Effect.flatMap(response.text, decode); | ||
return StringFromArrayBuffer; | ||
} | ||
} | ||
}; | ||
const responseJson = response => Effect.flatMap(response.text, text => text === "" ? Effect.void : Effect.try({ | ||
try: () => JSON.parse(text), | ||
catch: cause => new HttpClientError.ResponseError({ | ||
reason: "Decode", | ||
request: response.request, | ||
response, | ||
cause | ||
}) | ||
})); | ||
const statusOrElse = response => Effect.fail(new HttpClientError.ResponseError({ | ||
@@ -216,0 +247,0 @@ reason: "Decode", |
@@ -85,3 +85,3 @@ import * as Effect from "effect/Effect"; | ||
*/ | ||
export const getEncoding = ast => getAnnotation(ast, AnnotationEncoding) ?? encodingJson; | ||
export const getEncoding = (ast, fallback = encodingJson) => getAnnotation(ast, AnnotationEncoding) ?? fallback; | ||
/** | ||
@@ -88,0 +88,0 @@ * @since 1.0.0 |
@@ -51,3 +51,3 @@ /** | ||
*/ | ||
export const schema = /*#__PURE__*/Schema.Array(Schema.Tuple(Schema.String, Schema.String)).annotations({ | ||
export const schemaFromSelf = /*#__PURE__*/Schema.Array(Schema.Tuple(Schema.String, Schema.String)).annotations({ | ||
identifier: "UrlParams" | ||
@@ -217,2 +217,31 @@ }); | ||
}; | ||
/** | ||
* @since 1.0.0 | ||
* @category schema | ||
*/ | ||
export const schemaFromString = /*#__PURE__*/Schema.transform(Schema.String, schemaFromSelf, { | ||
decode(fromA) { | ||
return fromInput(new URLSearchParams(fromA)); | ||
}, | ||
encode(toI) { | ||
return toString(toI); | ||
} | ||
}); | ||
/** | ||
* @since 1.0.0 | ||
* @category schema | ||
*/ | ||
export const schemaRecord = schema => Schema.transform(schemaFromSelf, schema, { | ||
decode(fromA) { | ||
return toRecord(fromA); | ||
}, | ||
encode(toI) { | ||
return fromInput(toI); | ||
} | ||
}); | ||
/** | ||
* @since 1.0.0 | ||
* @category schema | ||
*/ | ||
export const schemaParse = schema => Schema.compose(schemaFromString, schemaRecord(schema)); | ||
//# sourceMappingURL=UrlParams.js.map |
{ | ||
"name": "@effect/platform", | ||
"version": "0.84.2", | ||
"version": "0.84.3", | ||
"description": "Unified interfaces for common platform-specific services", | ||
@@ -5,0 +5,0 @@ "license": "MIT", |
@@ -17,2 +17,3 @@ /** | ||
import type { HttpApiGroup } from "./HttpApiGroup.js" | ||
import type * as HttpApiMiddleware from "./HttpApiMiddleware.js" | ||
import * as HttpApiSchema from "./HttpApiSchema.js" | ||
@@ -25,3 +26,2 @@ import * as HttpBody from "./HttpBody.js" | ||
import * as HttpMethod from "./HttpMethod.js" | ||
import type { HttpApiMiddleware } from "./index.js" | ||
import * as UrlParams from "./UrlParams.js" | ||
@@ -390,21 +390,81 @@ | ||
): (response: HttpClientResponse.HttpClientResponse) => Effect.Effect<any, any> => { | ||
const schema = Schema.make(ast) | ||
const encoding = HttpApiSchema.getEncoding(ast) | ||
const decode = Schema.decodeUnknown(schema) | ||
const decode = Schema.decode(schemaFromArrayBuffer(ast, encoding)) | ||
return (response) => Effect.flatMap(response.arrayBuffer, decode) | ||
} | ||
const Uint8ArrayFromArrayBuffer = Schema.transform( | ||
Schema.Unknown as Schema.Schema<ArrayBuffer>, | ||
Schema.Uint8ArrayFromSelf, | ||
{ | ||
decode(fromA) { | ||
return new Uint8Array(fromA) | ||
}, | ||
encode(arr) { | ||
return arr.byteLength === arr.buffer.byteLength ? | ||
arr.buffer : | ||
arr.buffer.slice(arr.byteOffset, arr.byteOffset + arr.byteLength) | ||
} | ||
} | ||
) | ||
const StringFromArrayBuffer = Schema.transform( | ||
Schema.Unknown as Schema.Schema<ArrayBuffer>, | ||
Schema.String, | ||
{ | ||
decode(fromA) { | ||
return new TextDecoder().decode(fromA) | ||
}, | ||
encode(toI) { | ||
const arr = new TextEncoder().encode(toI) | ||
return arr.byteLength === arr.buffer.byteLength ? | ||
arr.buffer : | ||
arr.buffer.slice(arr.byteOffset, arr.byteOffset + arr.byteLength) | ||
} | ||
} | ||
) | ||
const parseJsonOrVoid = Schema.transformOrFail( | ||
Schema.String, | ||
Schema.Unknown, | ||
{ | ||
strict: true, | ||
decode: (i, _, ast) => { | ||
if (i === "") return ParseResult.succeed(void 0) | ||
return ParseResult.try({ | ||
try: () => JSON.parse(i), | ||
catch: () => new ParseResult.Type(ast, i, "Could not parse JSON") | ||
}) | ||
}, | ||
encode: (a, _, ast) => { | ||
if (a === undefined) return ParseResult.succeed("") | ||
return ParseResult.try({ | ||
try: () => JSON.stringify(a), | ||
catch: () => new ParseResult.Type(ast, a, "Could not encode as JSON") | ||
}) | ||
} | ||
} | ||
) | ||
const parseJsonArrayBuffer = Schema.compose(StringFromArrayBuffer, parseJsonOrVoid) | ||
const schemaFromArrayBuffer = ( | ||
ast: AST.AST, | ||
encoding: HttpApiSchema.Encoding | ||
): Schema.Schema<any, ArrayBuffer> => { | ||
if (ast._tag === "Union") { | ||
return Schema.Union(...ast.types.map((ast) => schemaFromArrayBuffer(ast, HttpApiSchema.getEncoding(ast, encoding)))) | ||
} | ||
switch (encoding.kind) { | ||
case "Json": { | ||
return (response) => Effect.flatMap(responseJson(response), decode) | ||
return Schema.compose(parseJsonArrayBuffer, Schema.make(ast)) | ||
} | ||
case "UrlParams": { | ||
return HttpClientResponse.schemaBodyUrlParams(schema as any) | ||
return Schema.compose(StringFromArrayBuffer, UrlParams.schemaParse(Schema.make(ast) as any)) as any | ||
} | ||
case "Uint8Array": { | ||
return (response: HttpClientResponse.HttpClientResponse) => | ||
response.arrayBuffer.pipe( | ||
Effect.map((buffer) => new Uint8Array(buffer)), | ||
Effect.flatMap(decode) | ||
) | ||
return Uint8ArrayFromArrayBuffer | ||
} | ||
case "Text": { | ||
return (response) => Effect.flatMap(response.text, decode) | ||
return StringFromArrayBuffer | ||
} | ||
@@ -414,15 +474,2 @@ } | ||
const responseJson = (response: HttpClientResponse.HttpClientResponse) => | ||
Effect.flatMap(response.text, (text) => | ||
text === "" ? Effect.void : Effect.try({ | ||
try: () => JSON.parse(text), | ||
catch: (cause) => | ||
new HttpClientError.ResponseError({ | ||
reason: "Decode", | ||
request: response.request, | ||
response, | ||
cause | ||
}) | ||
})) | ||
const statusOrElse = (response: HttpClientResponse.HttpClientResponse) => | ||
@@ -429,0 +476,0 @@ Effect.fail( |
@@ -114,3 +114,4 @@ /** | ||
*/ | ||
export const getEncoding = (ast: AST.AST): Encoding => getAnnotation<Encoding>(ast, AnnotationEncoding) ?? encodingJson | ||
export const getEncoding = (ast: AST.AST, fallback = encodingJson): Encoding => | ||
getAnnotation<Encoding>(ast, AnnotationEncoding) ?? fallback | ||
@@ -117,0 +118,0 @@ /** |
@@ -86,3 +86,3 @@ /** | ||
*/ | ||
export const schema: Schema.Schema<UrlParams, ReadonlyArray<readonly [string, string]>> = Schema.Array( | ||
export const schemaFromSelf: Schema.Schema<UrlParams> = Schema.Array( | ||
Schema.Tuple(Schema.String, Schema.String) | ||
@@ -398,1 +398,50 @@ ).annotations({ identifier: "UrlParams" }) | ||
} | ||
/** | ||
* @since 1.0.0 | ||
* @category schema | ||
*/ | ||
export const schemaFromString: Schema.Schema<UrlParams, string> = Schema.transform( | ||
Schema.String, | ||
schemaFromSelf, | ||
{ | ||
decode(fromA) { | ||
return fromInput(new URLSearchParams(fromA)) | ||
}, | ||
encode(toI) { | ||
return toString(toI) | ||
} | ||
} | ||
) | ||
/** | ||
* @since 1.0.0 | ||
* @category schema | ||
*/ | ||
export const schemaRecord = <A, I extends Record<string, string | ReadonlyArray<string> | undefined>, R>( | ||
schema: Schema.Schema<A, I, R> | ||
): Schema.Schema<A, UrlParams, R> => | ||
Schema.transform( | ||
schemaFromSelf, | ||
schema, | ||
{ | ||
decode(fromA) { | ||
return toRecord(fromA) as I | ||
}, | ||
encode(toI) { | ||
return fromInput(toI as Input) as UrlParams | ||
} | ||
} | ||
) | ||
/** | ||
* @since 1.0.0 | ||
* @category schema | ||
*/ | ||
export const schemaParse = <A, I extends Record<string, string | ReadonlyArray<string> | undefined>, R>( | ||
schema: Schema.Schema<A, I, R> | ||
): Schema.Schema<A, string, R> => | ||
Schema.compose( | ||
schemaFromString, | ||
schemaRecord(schema) | ||
) |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
17147007
0.07%115155
0.2%