
Security News
Attackers Are Hunting High-Impact Node.js Maintainers in a Coordinated Social Engineering Campaign
Multiple high-impact npm maintainers confirm they have been targeted in the same social engineering campaign that compromised Axios.
distilled-aws
Advanced tools
A fully typed AWS SDK for Effect, generated from Smithy specifications.
TaggedError classes for pattern matching.pages() and .items()npm install distilled-aws effect
# or
bun add distilled-aws effect
import { Effect } from "effect";
import * as FetchHttpClient from "effect/unstable/http/FetchHttpClient";
import * as s3 from "distilled-aws/s3";
import { Credentials, Region } from "distilled-aws";
const program = Effect.gen(function* () {
// Upload a file
yield* s3.putObject({
Bucket: "my-bucket",
Key: "hello.txt",
Body: "Hello, World!",
ContentType: "text/plain",
});
// Download a file
const result = yield* s3.getObject({
Bucket: "my-bucket",
Key: "hello.txt",
});
return result.ContentType; // "text/plain"
});
// Run with required services
program.pipe(
Effect.provide(FetchHttpClient.layer),
Effect.provideService(Region, "us-east-1"),
Effect.provide(Credentials.fromChain()),
Effect.runPromise,
);
Import service modules as namespaces:
import * as s3 from "distilled-aws/s3";
import * as dynamodb from "distilled-aws/dynamodb";
import * as lambda from "distilled-aws/lambda";
import * as kms from "distilled-aws/kms";
import * as sfn from "distilled-aws/sfn";
// Then use operations via the namespace
s3.getObject({ Bucket: "my-bucket", Key: "file.txt" });
dynamodb.getItem({ TableName: "users", Key: { ... } });
lambda.invoke({ FunctionName: "my-function" });
All operations require three context services: Region, Credentials, and HttpClient.
The AWS region to use for API calls:
import { Region } from "distilled-aws";
Effect.provideService(Region, "us-east-1")
AWS credentials for signing requests. Multiple providers are available:
import { Credentials } from "distilled-aws";
// AWS credential provider chain (recommended for production)
// Checks: env vars → SSO → shared credentials → EC2/ECS metadata
Effect.provide(Credentials.fromChain())
// Environment variables (AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
Effect.provide(Credentials.fromEnv())
// SSO profile from ~/.aws/config
Effect.provide(Credentials.fromSSO("my-profile"))
// Shared credentials file (~/.aws/credentials)
Effect.provide(Credentials.fromIni())
// EC2 instance metadata
Effect.provide(Credentials.fromInstanceMetadata())
// ECS container credentials
Effect.provide(Credentials.fromContainerMetadata())
// Web identity token (for EKS)
Effect.provide(Credentials.fromWebToken({ roleArn: "...", webIdentityToken: "..." }))
// Mock credentials (for testing with LocalStack)
Effect.provide(Credentials.mock)
Requires an HTTP client:
import * as FetchHttpClient from "effect/unstable/http/FetchHttpClient";
// or for Node.js
import { NodeHttpClient } from "@effect/platform-node";
Effect.provide(FetchHttpClient.layer)
// or
Effect.provide(NodeHttpClient.layer)
For LocalStack or other custom endpoints:
import { Endpoint } from "distilled-aws";
Effect.provideService(Endpoint, "http://localhost:4566")
import { Console, Effect, Stream } from "effect";
import * as FetchHttpClient from "effect/unstable/http/FetchHttpClient";
import * as s3 from "distilled-aws/s3";
import { Credentials, Region } from "distilled-aws";
const program = Effect.gen(function* () {
const bucket = "my-test-bucket";
// Create bucket
yield* s3.createBucket({ Bucket: bucket });
// Upload with string body
yield* s3.putObject({
Bucket: bucket,
Key: "message.txt",
Body: "Hello from distilled-aws!",
ContentType: "text/plain",
});
// Download and stream response
const result = yield* s3.getObject({ Bucket: bucket, Key: "message.txt" });
// Body is a Stream<Uint8Array> - collect and decode
const content = yield* result.Body!.pipe(
Stream.decodeText(),
Stream.mkString,
);
yield* Console.log(content); // "Hello from distilled-aws!"
// Cleanup
yield* s3.deleteBucket({ Bucket: bucket });
});
program.pipe(
Effect.provide(FetchHttpClient.layer),
Effect.provideService(Region, "us-east-1"),
Effect.provide(Credentials.fromChain()),
Effect.runPromise,
);
DynamoDB uses AttributeValue tagged unions for item data:
import { Console, Effect } from "effect";
import * as FetchHttpClient from "effect/unstable/http/FetchHttpClient";
import * as dynamodb from "distilled-aws/dynamodb";
import { Credentials, Region } from "distilled-aws";
const program = Effect.gen(function* () {
// Put item - values use AttributeValue format: { S: string }, { N: string }, { BOOL: boolean }
yield* dynamodb.putItem({
TableName: "users",
Item: {
pk: { S: "user#123" },
sk: { S: "profile" },
name: { S: "John Doe" },
age: { N: "30" },
active: { BOOL: true },
},
});
// Get item
const result = yield* dynamodb.getItem({
TableName: "users",
Key: {
pk: { S: "user#123" },
sk: { S: "profile" },
},
});
const name = (result.Item?.name as { S: string })?.S;
yield* Console.log(name); // "John Doe"
// Query by partition key
const queryResult = yield* dynamodb.query({
TableName: "users",
KeyConditionExpression: "pk = :pk",
ExpressionAttributeValues: {
":pk": { S: "user#123" },
},
});
yield* Console.log(queryResult.Count); // 1
});
program.pipe(
Effect.provide(FetchHttpClient.layer),
Effect.provideService(Region, "us-east-1"),
Effect.provide(Credentials.fromChain()),
Effect.runPromise,
);
import { Console, Effect, Stream } from "effect";
import * as FetchHttpClient from "effect/unstable/http/FetchHttpClient";
import * as lambda from "distilled-aws/lambda";
import { Credentials, Region } from "distilled-aws";
const program = Effect.gen(function* () {
const functionName = "my-hello-function";
// Create a Lambda function (requires a ZIP file with your handler code)
yield* lambda.createFunction({
FunctionName: functionName,
Runtime: "nodejs20.x",
Role: "arn:aws:iam::123456789012:role/lambda-execution-role",
Handler: "index.handler",
Code: { ZipFile: myZipFileAsUint8Array },
});
// Invoke the function
const response = yield* lambda.invoke({
FunctionName: functionName,
InvocationType: "RequestResponse",
Payload: new TextEncoder().encode(JSON.stringify({ name: "World" })),
});
yield* Console.log(`Status: ${response.StatusCode}`); // 200
// Read the response payload (it's a stream)
const payload = yield* response.Payload!.pipe(
Stream.decodeText(),
Stream.mkString,
);
yield* Console.log(JSON.parse(payload)); // { statusCode: 200, body: "..." }
// Update function configuration
yield* lambda.updateFunctionConfiguration({
FunctionName: functionName,
MemorySize: 256,
Timeout: 60,
});
// Tag the function
const funcInfo = yield* lambda.getFunction({ FunctionName: functionName });
yield* lambda.tagResource({
Resource: funcInfo.Configuration!.FunctionArn!,
Tags: { Environment: "production", Team: "platform" },
});
// List all functions
const functions = yield* lambda.listFunctions({});
yield* Console.log(`Total functions: ${functions.Functions?.length}`);
// Cleanup
yield* lambda.deleteFunction({ FunctionName: functionName });
});
program.pipe(
Effect.provide(FetchHttpClient.layer),
Effect.provideService(Region, "us-east-1"),
Effect.provide(Credentials.fromChain()),
Effect.runPromise,
);
import { Effect } from "effect";
import * as FetchHttpClient from "effect/unstable/http/FetchHttpClient";
import * as s3 from "distilled-aws/s3";
import { Credentials, Endpoint, Region } from "distilled-aws";
const program = s3.listBuckets({});
program.pipe(
Effect.provide(FetchHttpClient.layer),
Effect.provideService(Region, "us-east-1"),
Effect.provideService(Endpoint, "http://localhost:4566"),
Effect.provide(Credentials.mock),
Effect.runPromise,
);
By default, distilled-aws automatically retries transient errors, throttling errors, and errors with the @retryable trait using exponential backoff with jitter (up to 5 attempts).
You can customize or disable this behavior using the Retry module:
import { Retry } from "distilled-aws";
// or
import * as Retry from "distilled-aws/Retry";
import { Retry } from "distilled-aws";
myEffect.pipe(Retry.none)
import { Retry } from "distilled-aws";
// Retries all throttling errors with exponential backoff (capped at 5s)
myEffect.pipe(Retry.throttling)
import { Retry } from "distilled-aws";
// Retries throttling, server errors, and @retryable errors indefinitely
myEffect.pipe(Retry.transient)
import { Retry } from "distilled-aws";
import * as Schedule from "effect/Schedule";
myEffect.pipe(
Retry.policy({
while: (error) => isThrottlingError(error),
schedule: Schedule.exponential(1000),
})
)
For advanced use cases like respecting Retry-After headers, you can access the last error via a Ref:
import { Retry } from "distilled-aws";
import * as Duration from "effect/Duration";
import * as Effect from "effect/Effect";
import * as Schedule from "effect/Schedule";
myEffect.pipe(
Retry.policy((lastError) => ({
while: (error) => isThrottlingError(error),
schedule: Schedule.exponential(1000).pipe(
Schedule.modifyDelayEffect(
Effect.gen(function* (duration) {
const error = yield* lastError;
// Respect retry-after header if present
if (error?.retryAfterSeconds) {
return Duration.seconds(error.retryAfterSeconds);
}
return duration;
})
)
),
}))
)
All operations return typed errors that can be pattern-matched:
import { Effect } from "effect";
import * as s3 from "distilled-aws/s3";
const program = s3.getObject({
Bucket: "my-bucket",
Key: "missing-file.txt",
}).pipe(
Effect.catchTags({
NoSuchKey: (error) =>
Effect.succeed({ found: false, message: "File not found" }),
InvalidObjectState: (error) =>
Effect.fail(new Error(`Object in ${error.StorageClass} storage`)),
}),
);
Lambda error handling:
import { Effect } from "effect";
import * as lambda from "distilled-aws/lambda";
const program = lambda.invoke({
FunctionName: "my-function",
Payload: new TextEncoder().encode("{}"),
}).pipe(
Effect.catchTags({
ResourceNotFoundException: () =>
Effect.succeed({ error: "Function not found" }),
InvalidRequestContentException: (error) =>
Effect.fail(new Error(`Bad request: ${error.message}`)),
ServiceException: () =>
Effect.succeed({ error: "Lambda service unavailable" }),
}),
);
AWS errors are classified into categories for easier handling. You can catch errors by category instead of individual error types:
import { Category } from "distilled-aws";
// or
import * as Category from "distilled-aws/Category";
| Category | Description | HTTP Codes |
|---|---|---|
AuthError | Authentication/authorization failures | 401, 403 |
BadRequestError | Invalid request parameters | 400, 404, 405, 406, 410, 413, 415, 422 |
ConflictError | Resource state conflicts | 409 |
QuotaError | Service quota exceeded | 402 |
ThrottlingError | Rate limiting | 429 |
TimeoutError | Request timeouts | 408, 504 |
ServerError | AWS service errors | 5xx |
RetryableError | Errors with Smithy @retryable trait | - |
NetworkError | Network/transport failures | - |
AbortedError | Aborted operations | - |
Use catchErrors to catch multiple categories at once:
import { Effect } from "effect";
import { Category } from "distilled-aws";
import * as s3 from "distilled-aws/s3";
const program = s3.getObject({ Bucket: "my-bucket", Key: "file.txt" }).pipe(
// Catch auth and quota errors together
Category.catchErrors(
"AuthError",
"QuotaError",
(error) => Effect.succeed({ fallback: true }),
),
);
Or use the convenience helpers for single categories:
const program = s3.getObject({ Bucket: "my-bucket", Key: "file.txt" }).pipe(
Category.catchAuthError((error) => Effect.succeed({ unauthorized: true })),
Category.catchThrottlingError((error) => Effect.succeed({ retry: true })),
);
Category predicates work with Effect.retry:
import { Effect } from "effect";
import { Category } from "distilled-aws";
import * as s3 from "distilled-aws/s3";
const program = s3.putObject({
Bucket: "my-bucket",
Key: "file.txt",
Body: "content",
}).pipe(
Effect.retry({
times: 3,
while: Category.isThrottlingError,
}),
);
Available predicates: isAuthError, isBadRequestError, isConflictError, isQuotaError, isThrottlingError, isTimeoutError, isServerError, isRetryableError, isNetworkError, isAbortedError.
AWS APIs often include sensitive data like passwords, secret keys, and encryption keys. Fields marked with the Smithy @sensitive trait are automatically wrapped in Effect's Redacted type to prevent accidental exposure in logs.
Redacted — When you receive sensitive data from AWS, it's wrapped in Redacted to prevent accidental loggingRedacted — You can pass plain strings or Redacted values for convenienceimport { Effect } from "effect";
import * as Redacted from "effect/Redacted";
import * as iam from "distilled-aws/iam";
const program = Effect.gen(function* () {
// Create an access key
const result = yield* iam.createAccessKey({ UserName: "my-user" });
// SecretAccessKey is Redacted<string> - safe to log the whole object
console.log(result); // { AccessKeyId: "AKIA...", SecretAccessKey: <redacted>, ... }
// Extract the actual value when needed
const secret = Redacted.value(result.AccessKey!.SecretAccessKey);
// Use 'secret' to configure credentials, etc.
});
| Service | Field | Type |
|---|---|---|
| IAM | SecretAccessKey | Redacted<string> |
| IAM | Password | Redacted<string> |
| KMS | Plaintext | Redacted<Uint8Array> |
| Lambda | Environment.Variables (values) | Redacted<string> |
For operations that accept sensitive input, you can pass either raw values or Redacted:
import * as Redacted from "effect/Redacted";
import * as iam from "distilled-aws/iam";
// Raw value (convenient)
yield* iam.changePassword({
OldPassword: "old-password",
NewPassword: "new-password",
});
// Redacted value (explicit)
yield* iam.changePassword({
OldPassword: Redacted.make("old-password"),
NewPassword: Redacted.make("new-password"),
});
import { Effect, Stream } from "effect";
import * as s3 from "distilled-aws/s3";
// Create a stream from chunks
const chunks = ["Hello, ", "streaming ", "world!"];
const encoder = new TextEncoder();
const stream = Stream.fromIterable(chunks.map((s) => encoder.encode(s)));
const upload = s3.putObject({
Bucket: "my-bucket",
Key: "streamed.txt",
Body: stream,
ContentLength: chunks.reduce((acc, s) => acc + encoder.encode(s).length, 0),
ContentType: "text/plain",
});
import { Effect, Stream, Console } from "effect";
import * as s3 from "distilled-aws/s3";
const download = Effect.gen(function* () {
const result = yield* s3.getObject({
Bucket: "my-bucket",
Key: "document.txt",
});
// Body is Stream<Uint8Array> - decode and collect as string
const content = yield* result.Body!.pipe(
Stream.decodeText(),
Stream.mkString,
);
yield* Console.log(content);
});
Paginated operations expose .pages() and .items() methods that return Effect Streams for automatic pagination.
.pages()Use .pages() to stream complete response objects. Each emission is a full API response:
import { Effect, Stream } from "effect";
import * as s3 from "distilled-aws/s3";
const program = Effect.gen(function* () {
// Stream all pages of objects
const allKeys = yield* s3.listObjectsV2
.pages({ Bucket: "my-bucket", MaxKeys: 100 })
.pipe(
Stream.flatMap((page) => Stream.fromIterable(page.Contents ?? [])),
Stream.map((obj) => obj.Key!),
Stream.runCollect,
);
console.log(`Found ${allKeys.length} objects`);
});
.items()Use .items() to stream individual items directly. Only available for operations with an items field in their Smithy pagination trait:
import { Effect, Stream } from "effect";
import * as dynamodb from "distilled-aws/dynamodb";
const program = Effect.gen(function* () {
// Stream individual DynamoDB items across all pages
const allItems = yield* dynamodb.query
.items({
TableName: "users",
KeyConditionExpression: "pk = :pk",
ExpressionAttributeValues: { ":pk": { S: "user#123" } },
Limit: 25, // Items per page
})
.pipe(Stream.runCollect);
console.log(`Found ${allItems.length} items`);
});
.items()Most AWS list operations support pagination and expose both .pages() and .items() methods. The .items() method automatically extracts items from each page based on the operation's Smithy pagination trait.
Note: Some operations don't specify an
itemsfield in their pagination trait, or use nested paths (e.g.,DistributionList.Items). In these cases,.items()returns an empty stream - use.pages()and extract items manually instead.
Services are generated by scripts/generate-clients.ts from the Smithy JSON AST models. The generator:
S.suspend()src/services/# Generate a single service
bun generate --sdk s3
# Generate all services
bun generate
Smithy traits are modeled 1:1 with Effect Schema annotations in src/traits.ts. This allows protocol implementations to introspect schemas and serialize/deserialize correctly.
| Smithy Trait | Effect Annotation | Purpose |
|---|---|---|
@httpLabel | T.HttpLabel() | Bind member to URI path parameter |
@httpHeader | T.HttpHeader("X-Custom") | Bind member to HTTP header |
@httpQuery | T.HttpQuery("param") | Bind member to query string |
@httpPayload | T.HttpPayload() | Bind member to request/response body |
@httpPrefixHeaders | T.HttpPrefixHeaders("x-amz-meta-") | Bind map to prefixed headers |
@xmlName | T.XmlName("CustomName") | Custom XML element name |
@xmlFlattened | T.XmlFlattened() | Flatten list/map (no wrapper) |
@xmlAttribute | T.XmlAttribute() | Serialize as XML attribute |
@xmlNamespace | T.XmlNamespace("http://...") | XML namespace URI |
@jsonName | T.JsonName("custom_name") | Custom JSON key (uses S.fromKey) |
@timestampFormat | T.TimestampFormat("http-date") | Timestamp wire format |
@streaming | T.Streaming() | Streaming blob type |
@contextParam | T.ContextParam("Bucket") | Endpoint resolution parameter |
@sensitive | SensitiveString / SensitiveBlob | Wrap in Redacted to prevent logging |
Request schemas are Effect S.Class definitions with HTTP binding annotations:
export class GetObjectRequest extends S.Class<GetObjectRequest>(
"GetObjectRequest",
)(
{
// Path parameters
Bucket: S.String.pipe(T.HttpLabel(), T.ContextParam("Bucket")),
Key: S.String.pipe(T.HttpLabel(), T.ContextParam("Key")),
// HTTP headers with timestamp formatting
IfModifiedSince: S.optional(
S.Date.pipe(T.TimestampFormat("http-date")),
).pipe(T.HttpHeader("If-Modified-Since")),
IfMatch: S.optional(S.String).pipe(T.HttpHeader("If-Match")),
Range: S.optional(S.String).pipe(T.HttpHeader("Range")),
// Query parameters
VersionId: S.optional(S.String).pipe(T.HttpQuery("versionId")),
PartNumber: S.optional(S.Number).pipe(T.HttpQuery("partNumber")),
},
T.all(
T.XmlNamespace("http://s3.amazonaws.com/doc/2006-03-01/"),
T.Http({ method: "GET", uri: "/{Bucket}/{Key+}?x-id=GetObject" }),
T.AwsApiService({ sdkId: "S3" }),
T.AwsAuthSigv4({ name: "s3" }),
T.AwsProtocolsRestXml(),
T.ServiceVersion("2006-03-01"),
T.EndpointRuleSet({ /* ... */ }),
),
) {}
Response schemas extract data from headers and body:
export class GetObjectOutput extends S.Class<GetObjectOutput>(
"GetObjectOutput",
)(
{
// Streaming body payload
Body: S.optional(T.StreamingOutput).pipe(T.HttpPayload()),
// Response headers
ContentLength: S.optional(S.Number).pipe(T.HttpHeader("Content-Length")),
ContentType: S.optional(S.String).pipe(T.HttpHeader("Content-Type")),
ETag: S.optional(S.String).pipe(T.HttpHeader("ETag")),
LastModified: S.optional(
S.Date.pipe(T.TimestampFormat("http-date"))
).pipe(T.HttpHeader("Last-Modified")),
VersionId: S.optional(S.String).pipe(T.HttpHeader("x-amz-version-id")),
DeleteMarker: S.optional(S.Boolean).pipe(T.HttpHeader("x-amz-delete-marker")),
},
) {}
Errors are S.TaggedError classes for typed error handling:
export class NoSuchKey extends S.TaggedErrorClass<NoSuchKey>()("NoSuchKey", {}) {}
export class InvalidObjectState extends S.TaggedErrorClass<InvalidObjectState>()(
"InvalidObjectState",
{
StorageClass: S.optional(S.String),
AccessTier: S.optional(S.String),
},
) {}
Operations tie input, output, and errors together:
export const getObject = /*@__PURE__*/ API.make(() => ({
input: GetObjectRequest,
output: GetObjectOutput,
errors: [InvalidObjectState, NoSuchKey],
}));
The API.make() function (src/api.ts) creates an Effect-returning function that:
All AWS protocols are implemented in src/protocols/:
aws.protocols#restXml)Used by: S3, CloudFront, Route 53
@http trait@xmlName, @xmlFlattened, @xmlAttribute, @xmlNamespace traitsaws.protocols#restJson1)Used by: Lambda, API Gateway, DynamoDB Streams, Glacier
@http trait@jsonName trait for custom property keysepoch-secondsaws.protocols#awsJson1_0, aws.protocols#awsJson1_1)Used by: DynamoDB (1.0), SQS (1.0), SNS (1.0), STS (1.1), KMS (1.1)
POST /X-Amz-Target headerapplication/x-amz-json-1.0 or application/x-amz-json-1.1aws.protocols#awsQuery)Used by: IAM, STS, SES, CloudWatch, Auto Scaling, Elastic Load Balancing
POST / with application/x-www-form-urlencoded bodyAction and Version parameters identify the operationaws.protocols#ec2Query)Used by: EC2
@ec2QueryName trait for custom query key names# Run protocol tests
bun test:protocols
# Run a single protocol test suite
bun vitest run ./test/protocols/rest-xml.test.ts
# Run S3 service tests (requires LocalStack)
bun test:local ./test/services/s3.test.ts
# Run all service tests
bun test:local ./test/services/
MIT
FAQs
Unknown package
We found that distilled-aws demonstrated a healthy version release cadence and project activity because the last version was released less than a year ago. It has 2 open source maintainers collaborating on the project.
Did you know?

Socket for GitHub automatically highlights issues in each pull request and monitors the health of all your open source dependencies. Discover the contents of your packages and block harmful activity before you install or update your dependencies.

Security News
Multiple high-impact npm maintainers confirm they have been targeted in the same social engineering campaign that compromised Axios.

Security News
Axios compromise traced to social engineering, showing how attacks on maintainers can bypass controls and expose the broader software supply chain.

Security News
Node.js has paused its bug bounty program after funding ended, removing payouts for vulnerability reports but keeping its security process unchanged.