
Product
Introducing Webhook Events for Alert Changes
Add real-time Socket webhook events to your workflows to automatically receive software supply chain alert changes in real time.
Complete Node.js logging solution with 6 integration methods, S3 bidirectional operations, advanced analytics, and multi-cloud storage support for enterprise-scale applications.
Complete Node.js logging solution with S3 integration, automatic processing, and enterprise features.
npm install log-archiver
const { init, saveApiLog } = require("log-archiver");
// Initialize with S3
await init({
dbUri: "mongodb://localhost:27017/myapp",
uploadProvider: "s3",
// S3 Configuration
s3: {
bucketName: "my-app-logs",
region: "us-east-1",
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
keyPrefix: "app_logs", // Optional: Organize files in a main folder
},
// File processing
fileFormat: "json",
compression: {
enabled: true,
format: "gzip",
},
// Automatic processing
cron: {
dailyCron: "0 0 * * *", // Daily at midnight
hourlyCron: "0 * * * *", // Every hour
},
});
# Database
MONGODB_URI=mongodb://localhost:27017/myapp
# AWS S3 Configuration
AWS_ACCESS_KEY_ID=your_access_key_here
AWS_SECRET_ACCESS_KEY=your_secret_key_here
AWS_REGION=us-east-1
S3_BUCKET=your-app-logs-bucket
# Optional: Custom settings
NODE_ENV=production
LOG_LEVEL=info
const express = require("express");
const {
init,
saveApiLog,
createDailyJobs,
runHourlyJob,
getApiLogs,
getLogs,
} = require("log-archiver");
const app = express();
app.use(express.json());
// Initialize LogStack
async function initializeLogStack() {
await init({
dbUri: process.env.MONGODB_URI,
uploadProvider: "s3",
s3: {
bucketName: process.env.S3_BUCKET,
region: process.env.AWS_REGION,
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
keyPrefix: "server_logs", // Optional: Organize files in a main folder
},
compression: {
enabled: true,
format: "gzip",
},
// Data retention
retention: {
enabled: true,
days: 90, // Keep in DB for 90 days
s3Retention: {
enabled: true,
days: 365, // Keep in S3 for 1 year
},
},
// Environment-specific collections
collections: {
apiLogsCollectionName: `${process.env.NODE_ENV}_api_logs`,
logsCollectionName: `${process.env.NODE_ENV}_app_logs`,
jobsCollectionName: `${process.env.NODE_ENV}_jobs`,
},
cron: {
dailyCron: "0 0 * * *", // Midnight daily
hourlyCron: "0 * * * *", // Every hour
timezone: "UTC",
},
});
console.log("β
LogStack initialized with S3 integration");
}
// Auto-logging middleware for API requests
app.use(async (req, res, next) => {
const startTime = new Date();
res.on("finish", async () => {
try {
await saveApiLog({
method: req.method,
path: req.path,
status: res.statusCode,
request_time: startTime,
response_time: new Date(),
headers: req.headers,
request_body: req.body,
client_ip: req.ip,
user_agent: req.get("User-Agent"),
response_time_ms: Date.now() - startTime.getTime(),
});
} catch (error) {
console.error("β Failed to log API request:", error);
}
});
next();
});
// Start server
async function startServer() {
await initializeLogStack();
const port = process.env.PORT || 3000;
app.listen(port, () => {
console.log(`π Server running on port ${port}`);
console.log(`π Logs will be processed hourly and uploaded to S3`);
});
}
startServer().catch(console.error);
## βοΈ Advanced S3 Configuration
### Custom S3 Settings
```javascript
await init({
uploadProvider: "s3",
s3: {
bucketName: "my-logs-bucket",
region: "us-west-2",
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
// Optional: Custom S3 settings
endpoint: "https://s3.amazonaws.com", // Custom endpoint
forcePathStyle: false, // Use virtual hosted-style URLs
signatureVersion: "v4", // Signature version
maxRetries: 3, // Upload retry attempts
// File organization
keyPrefix: "logs/", // Creates organized structure: logs/2025-01-20/hour-files
dateFolder: true, // Organize by date folders
// Security
serverSideEncryption: "AES256", // Server-side encryption
storageClass: "STANDARD_IA", // Storage class
},
// File processing options
compression: {
enabled: true,
format: "gzip", // 'gzip', 'zip', 'brotli'
level: 6, // Compression level (1-9)
},
// Automatic cleanup
retention: {
enabled: true,
days: 90, // Database retention
cleanupSchedule: "0 2 * * *", // Daily at 2 AM
s3Retention: {
enabled: true,
days: 365, // S3 retention (1 year)
cleanupSchedule: "0 3 * * 0", // Weekly on Sunday at 3 AM
},
},
});
# Required AWS Credentials
AWS_ACCESS_KEY_ID=AKIA...
AWS_SECRET_ACCESS_KEY=abcd...
AWS_REGION=us-east-1
# S3 Configuration
S3_BUCKET=my-app-logs-production
S3_KEY_PREFIX=logs/
S3_STORAGE_CLASS=STANDARD_IA
S3_SERVER_SIDE_ENCRYPTION=AES256
# Database
MONGODB_URI=mongodb://localhost:27017/myapp
# Application
NODE_ENV=production
API_KEY=your-secure-api-key-here
PORT=4000
LogStack provides multiple ways to download and retrieve logs stored in S3:
const {
downloadFromS3,
listS3Files,
getFileDetails,
searchFiles,
} = require("log-archiver");
// Download specific file from S3
const fileContent = await downloadFromS3("logs/2025-09-05/14-15.json.gz");
console.log("Downloaded logs:", JSON.parse(fileContent));
// List all files in S3 bucket
const files = await listS3Files({
prefix: "logs/2025-09-05/", // Optional: filter by date
maxKeys: 100, // Optional: limit results
});
console.log("Available files:", files);
// Get file details and metadata
const fileDetails = await getFileDetails("logs/2025-09-05/14-15.json.gz");
console.log("File info:", fileDetails);
const { searchFiles, initializeFileSearch } = require("log-archiver");
// Initialize file search (call once)
await initializeFileSearch();
// Search files by date range
const dateRangeFiles = await searchFiles({
startDate: "2025-09-01",
endDate: "2025-09-05",
limit: 50,
});
// Search files by service
const serviceFiles = await searchFiles({
service: "user-service",
limit: 20,
});
// Search by file size
const largeFiles = await searchFiles({
minSize: 1024 * 1024, // Files larger than 1MB
maxSize: 10 * 1024 * 1024, // Files smaller than 10MB
});
console.log("Search results:", dateRangeFiles);
const { downloadFromS3 } = require("log-archiver");
// Download and automatically decompress gzipped files
const compressedFile = await downloadFromS3("logs/2025-09-05/14-15.json.gz", {
decompress: true, // Automatically decompress
format: "json", // Parse as JSON
});
console.log("Decompressed logs:", compressedFile);
// Download multiple files
const files = ["14-15.json.gz", "15-16.json.gz", "16-17.json.gz"];
const downloadedFiles = await Promise.all(
files.map((file) =>
downloadFromS3(`logs/2025-09-05/${file}`, { decompress: true })
)
);
const { downloadFromS3, listS3Files } = require("log-archiver");
async function downloadDateRange(startDate, endDate) {
console.log(`π₯ Downloading logs from ${startDate} to ${endDate}`);
// Get all files in date range
const files = await listS3Files({
prefix: "logs/",
startAfter: `logs/${startDate}/`,
endBefore: `logs/${endDate}/`,
});
console.log(`Found ${files.length} files to download`);
const downloadedLogs = [];
for (let i = 0; i < files.length; i++) {
const file = files[i];
const progress = (((i + 1) / files.length) * 100).toFixed(1);
console.log(`β³ Downloading ${file.Key} (${progress}%)`);
try {
const content = await downloadFromS3(file.Key, { decompress: true });
downloadedLogs.push({
file: file.Key,
size: file.Size,
data: content,
downloadedAt: new Date(),
});
console.log(`β
Downloaded ${file.Key}`);
} catch (error) {
console.error(`β Failed to download ${file.Key}:`, error.message);
}
}
console.log(`π Downloaded ${downloadedLogs.length}/${files.length} files`);
return downloadedLogs;
}
// Usage
downloadDateRange("2025-09-01", "2025-09-05").then((logs) => {
console.log("All logs downloaded:", logs.length);
});
const { downloadFromS3 } = require("log-archiver");
// Stream logs in real-time as they're created
async function streamRecentLogs() {
const today = new Date().toISOString().split("T")[0];
const currentHour = new Date().getHours();
// Download logs from last few hours
for (let h = Math.max(0, currentHour - 3); h <= currentHour; h++) {
const hourRange = `${h.toString().padStart(2, "0")}-${(h + 1)
.toString()
.padStart(2, "0")}`;
const fileName = `logs/${today}/${hourRange}.json.gz`;
try {
const logs = await downloadFromS3(fileName, { decompress: true });
console.log(`π Logs for ${hourRange}:`, logs);
// Process logs in real-time
if (logs && logs.length > 0) {
logs.forEach((log) => {
if (log.level === "error") {
console.error(`π¨ Error detected: ${log.message}`);
}
});
}
} catch (error) {
console.log(`β³ No logs yet for ${hourRange}`);
}
}
}
// Run every 5 minutes
setInterval(streamRecentLogs, 5 * 60 * 1000);
// Check your AWS credentials
console.log(
"AWS_ACCESS_KEY_ID:",
process.env.AWS_ACCESS_KEY_ID?.substring(0, 4) + "***"
);
console.log("AWS_REGION:", process.env.AWS_REGION);
console.log("S3_BUCKET:", process.env.S3_BUCKET);
// Test S3 connection
const { testS3Connection } = require("log-archiver");
await testS3Connection();
// Check MongoDB connection
const mongoose = require("mongoose");
mongoose.connection.on("connected", () => {
console.log("β
MongoDB connected");
});
mongoose.connection.on("error", (err) => {
console.error("β MongoDB error:", err);
});
// Check cron job status
const { getCronStatus } = require("log-archiver");
const status = await getCronStatus();
console.log("Cron jobs status:", status);
| Code | Message | Solution |
|---|---|---|
LOGSTACK_001 | S3 bucket not found | Check bucket name and permissions |
LOGSTACK_002 | Database connection failed | Verify MongoDB URI |
LOGSTACK_003 | Invalid configuration | Check config object |
LOGSTACK_004 | Cron job failed | Check logs for specific error |
init(config)Initialize LogStack with configuration
saveApiLog(logData)createDailyJobs(date?, config?)Create daily job slots for processing
runHourlyJob(config?)Process logs for previous hour
git checkout -b feature/amazing-feature)git commit -m 'Add amazing feature')git push origin feature/amazing-feature)This project is licensed under the MIT License - see the LICENSE file for details.
Made with β€οΈ
const { LogStackAdvanced } = require("logstack/lib/advancedFeatures");
const advanced = new LogStackAdvanced(config);
// Download logs from S3
const files = await advanced.downloadDateRange("2025-01-01", "2025-01-31");
// Search archived logs
const errors = await advanced.searchS3({ responseStatus: /^5\d{2}$/ });
// Generate analytics
const analytics = await advanced.generateAnalytics({
start: "2025-01-01T00:00:00Z",
end: "2025-01-31T23:59:59Z",
});
# Start specific services
npm run service:api # REST API server
### Basic Configuration
```javascript
await logstack.init({
dbUri: "mongodb://localhost:27017/myapp",
uploadProvider: "s3",
outputDirectory: "production-logs",
// S3 Configuration
s3: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
region: "us-east-1",
bucket: "my-logs-bucket",
},
// Security & Performance
compression: { enabled: true },
dataMasking: { enabled: true, fields: ["password", "token"] },
retention: { enabled: true, days: 90 },
});
const express = require("express");
const logstack = require("logstack");
const app = express();
await logstack.init(config);
// Automatic API logging middleware
app.use(async (req, res, next) => {
const start = new Date();
res.on("finish", async () => {
await logstack.saveApiLog({
method: req.method,
path: req.path,
responseStatus: res.statusCode,
request_time: start,
response_time: new Date(),
});
});
next();
});
| Provider | Best For | Setup |
|---|---|---|
| π Local | Development | uploadProvider: 'local' |
| βοΈ AWS S3 | Production | uploadProvider: 's3' |
| π Google Cloud | GCP environments | uploadProvider: 'gcs' |
| π Azure | Microsoft ecosystem | uploadProvider: 'azure' |
// Simple setup
await logstack.init();
await logstack.saveLog({ level: "info", message: "App started" });
| Metric | Performance |
|---|---|
| Throughput | 10,000+ logs/second |
| Storage | 60-80% compression |
| Reliability | 99.9% uptime |
| Scalability | Unlimited (cloud) |
MIT License
π LogArchiver - Making logging simple, scalable, and secure for every Node.js application!
FAQs
Complete Node.js logging solution with 6 integration methods, S3 bidirectional operations, advanced analytics, and multi-cloud storage support for enterprise-scale applications.
The npm package log-zee receives a total of 0 weekly downloads. As such, log-zee popularity was classified as not popular.
We found that log-zee demonstrated a healthy version release cadence and project activity because the last version was released less than a year ago.Β It has 1 open source maintainer collaborating on the project.
Did you know?

Socket for GitHub automatically highlights issues in each pull request and monitors the health of all your open source dependencies. Discover the contents of your packages and block harmful activity before you install or update your dependencies.

Product
Add real-time Socket webhook events to your workflows to automatically receive software supply chain alert changes in real time.

Security News
ENISA has become a CVE Program Root, giving the EU a central authority for coordinating vulnerability reporting, disclosure, and cross-border response.

Product
Socket now scans OpenVSX extensions, giving teams early detection of risky behaviors, hidden capabilities, and supply chain threats in developer tools.