mongo-to-s3 (WIP)
var AWS = require("aws-sdk");
var MongoToS3 = require("mongo-to-s3");
var through = require("through");
var s3 = new AWS.S3({
accessKeyId: "myAccessKey",
secretAccessKey: "mySecretAccessKey",
region: "us-east-1"
});
mongoToS3 = new MongoToS3(s3);
mongoToS3.createS3Sink({
s3: {
Bucket: "myBucket",
Key: "myKey",
ACL: "public-read"
},
chunkUploadSize: 5242880,
workingDirectory: "/tmp"
}, function(err, myS3Sink) {
mongoToS3.fromMongo([{
exportOptions: "-h localhost:27017 -d database -c collection",
workingDirectory: "/tmp"
}],
function(err, exports) {
exports
.streams
.pipe(through(function(chunk, enc, cb) {
console.log("Processing:", chunk);
this.push(chunk);
cb();
}))
.pipe(myS3Sink);
exports.resume();
});
});
mongoToS3.fromMongo([
{
exportOptions: "-h localhost:27017 -d database -c collection1",
workingDirectory: "/tmp"
},
{
exportOptions: "-h localhost:27017 -d database -c collection2",
workingDirectory: "/tmp"
}
],
function(err, exports) {
exports
.streams
.pipe(through(function(chunk, enc, cb) {
this.push(chunk);
cb();
}))
.pipe(someWritableStream);
exports.resume();
});
mongoToS3.fromMongo([
{
exportOptions: "-h localhost:27017 -d database -c collection1",
workingDirectory: "/tmp"
},
{
exportOptions: "-h localhost:27017 -d database -c collection2",
workingDirectory: "/tmp"
}
])
.thoughPipeline(__dirname + "/somePipeline.js")
.pipe(someWritableStream);