Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

s3

Package Overview
Dependencies
Maintainers
1
Versions
29
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

s3 - npm Package Compare versions

Comparing version 1.2.1 to 1.3.0

89

index.js

@@ -145,2 +145,3 @@ var AWS = require('aws-sdk');

});
uploader.emit('stream', inStream);
var hash = crypto.createHash('md5');

@@ -227,3 +228,3 @@ hash.on('data', function(digest) {

doWithRetry(doTheDownload, self.s3RetryCount, self.s3RetryDelay, function(err) {
doWithRetry(doDownloadWithPend, self.s3RetryCount, self.s3RetryDelay, function(err) {
if (err) {

@@ -239,2 +240,11 @@ downloader.emit('error', err);

function doDownloadWithPend(cb) {
self.s3Pend.go(function(pendCb) {
doTheDownload(function(err) {
pendCb();
cb(err);
});
});
}
function doTheDownload(cb) {

@@ -455,2 +465,69 @@ var request = self.s3.getObject(s3Params);

Client.prototype.copyObject = function(_s3Params) {
var self = this;
var ee = new EventEmitter();
var s3Params = extend({}, _s3Params);
delete s3Params.MFA;
doWithRetry(doCopyWithPend, self.s3RetryCount, self.s3RetryDelay, function(err, data) {
if (err) {
ee.emit('error', err);
} else {
ee.emit('end', data);
}
});
function doCopyWithPend(cb) {
self.s3Pend.go(function(pendCb) {
doTheCopy(function(err, data) {
pendCb();
cb(err, data);
});
});
}
function doTheCopy(cb) {
self.s3.copyObject(s3Params, cb);
}
return ee;
};
Client.prototype.moveObject = function(s3Params) {
var self = this;
var ee = new EventEmitter();
var copier = self.copyObject(s3Params);
var copySource = s3Params.CopySource;
var mfa = s3Params.MFA;
copier.on('error', function(err) {
ee.emit('error', err);
});
copier.on('end', function(data) {
ee.emit('copySuccess', data);
var slashIndex = copySource.indexOf('/');
var sourceBucket = copySource.substring(0, slashIndex);
var sourceKey = copySource.substring(slashIndex + 1);
var deleteS3Params = {
Bucket: sourceBucket,
Delete: {
Objects: [
{
Key: sourceKey,
},
],
Quiet: true,
},
MFA: mfa,
};
var deleter = self.deleteObjects(deleteS3Params);
deleter.on('error', function(err) {
ee.emit('error', err);
});
var deleteData;
deleter.on('data', function(data) {
deleteData = data;
});
deleter.on('end', function() {
ee.emit('end', deleteData);
});
});
return ee;
};
function syncDir(self, params, directionIsToS3) {

@@ -797,7 +874,11 @@ var ee = new EventEmitter();

if (err) {
tryIndex += 1;
if (tryIndex >= tryCount) {
if (err.retryable === false) {
cb(err);
} else {
setTimeout(tryOnce, delay);
tryIndex += 1;
if (tryIndex >= tryCount) {
cb(err);
} else {
setTimeout(tryOnce, delay);
}
}

@@ -804,0 +885,0 @@ } else {

4

package.json
{
"name": "s3",
"version": "1.2.1",
"version": "1.3.0",
"description": "high level amazon s3 client. upload and download files and directories",

@@ -31,3 +31,3 @@ "main": "index.js",

"dependencies": {
"aws-sdk": "^2.0.0-rc.16",
"aws-sdk": "^2.0.0-rc.18",
"findit": "^1.2.0",

@@ -34,0 +34,0 @@ "pend": "^1.1.1",

@@ -125,2 +125,13 @@ # High Level Amazon S3 Client

## Tips
* Consider adding [graceful-fs](https://github.com/isaacs/node-graceful-fs) to
your application. This will improve performance when using the `uploadDir`
and `downloadDir` functions.
* Consider increasing the ulimit for the number of open files. This will also
improve performance when using the `uploadDir` and `downloadDir` functions.
* Consider increasing the socket pool size in the `http` and `https` global
agents. This will improve bandwidth when using `uploadDir` and `downloadDir`
functions.
## API Documentation

@@ -185,2 +196,5 @@

`progressTotal` properties change.
* `'stream' (stream)` - emitted when a `ReadableStream` for `localFile` has
been opened. Be aware that this might fire multiple times if a request to S3
must be retried.

@@ -384,2 +398,34 @@ ### client.downloadFile(params)

### client.copyObject(s3Params)
See http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#copyObject-property
`s3Params` are the same. Don't forget that `CopySource` must contain the
source bucket name as well as the source key name.
The difference between using AWS SDK `copyObject` and this one will:
* Retry based on the client's retry settings.
Returns an `EventEmitter` with these events:
* `'error' (err)`
* `'end' (data)`
### client.moveObject(s3Params)
See http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#copyObject-property
`s3Params` are the same. Don't forget that `CopySource` must contain the
source bucket name as well as the source key name.
Under the hood, this uses `copyObject` and then `deleteObjects` only if the
copy succeeded.
Returns an `EventEmitter` with these events:
* `'error' (err)`
* `'copySuccess' (data)`
* `'end' (data)`
## Testing

@@ -391,2 +437,11 @@

### 1.3.0
* `downloadFile` respects `maxAsyncS3`
* Add `copyObject` API
* AWS JS SDK updated to 2.0.0-rc.18
* errors with `retryable` set to `false` are not retried
* Add `moveObject` API
* `uploadFile` emits a `stream` event.
### 1.2.1

@@ -393,0 +448,0 @@

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc