Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

ali-oss

Package Overview
Dependencies
Maintainers
7
Versions
127
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

ali-oss - npm Package Compare versions

Comparing version 6.13.2 to 6.14.0

12

CHANGELOG.md

@@ -5,2 +5,14 @@ # Changelog

## [6.14.0](https://github.com/aliyun/oss-nodejs-sdk/compare/v6.13.2...v6.14.0) (2021-03-19)
### Features
* add refreshSTSToken example ([#924](https://github.com/aliyun/oss-nodejs-sdk/issues/924)) ([f69d8ed](https://github.com/aliyun/oss-nodejs-sdk/commit/f69d8ed8dcebdefaee5acf540d1baef9866ffb26))
### Bug Fixes
* link in README.md ([#934](https://github.com/aliyun/oss-nodejs-sdk/issues/934)) ([1c676da](https://github.com/aliyun/oss-nodejs-sdk/commit/1c676da5a101dba16b5f355854c4fe38e818a87e))
### [6.13.2](https://github.com/aliyun/oss-nodejs-sdk/compare/v6.13.1...v6.13.2) (2021-01-21)

@@ -7,0 +19,0 @@

12

lib/browser/client.js

@@ -198,7 +198,7 @@ const debug = require('debug')('ali-oss');

proto.request = async function (params) {
const isAvailableStream = params.stream ? params.stream.readable : true;
if (this.options.retryMax && isAvailableStream) {
this.request = retry(request.bind(this), this.options.retryMax, {
if (this.options.retryMax) {
return await retry(request.bind(this), this.options.retryMax, {
errorHandler: (err) => {
const _errHandle = (_err) => {
if (params.stream) return false;
const statusErr = [-1, -2].includes(_err.status);

@@ -211,8 +211,6 @@ const requestErrorRetryHandle = this.options.requestErrorRetryHandle || (() => true);

}
});
})(params);
} else {
this.request = request.bind(this);
return request.call(this, params);
}
return await this.request(params);
};

@@ -219,0 +217,0 @@

@@ -1,3 +0,1 @@

// var debug = require('debug')('ali-oss:multipart');

@@ -12,2 +10,3 @@ const util = require('util');

const { isBuffer } = require('../common/utils/isBuffer');
const { retry } = require('../common/utils/retry');

@@ -107,3 +106,2 @@ const proto = exports;

return await this._resumeMultipart(checkpoint, options);

@@ -123,5 +121,3 @@ };

}
const {
file, fileSize, partSize, uploadId, doneParts, name
} = checkpoint;
const { file, fileSize, partSize, uploadId, doneParts, name } = checkpoint;

@@ -138,3 +134,3 @@ const internalDoneParts = [];

let uploadPartJob = function uploadPartJob(self, partNo) {
let uploadPartJob = (self, partNo) => {
// eslint-disable-next-line no-async-promise-executor

@@ -145,33 +141,14 @@ return new Promise(async (resolve, reject) => {

const pi = partOffs[partNo - 1];
const stream = self._createStream(file, pi.start, pi.end);
const content = await self._createBuffer(file, pi.start, pi.end);
const data = {
stream,
content,
size: pi.end - pi.start
};
if (isArray(self.multipartUploadStreams)) {
self.multipartUploadStreams.push(stream);
} else {
self.multipartUploadStreams = [stream];
}
const removeStreamFromMultipartUploadStreams = function () {
if (!stream.destroyed) {
stream.destroy();
}
const index = self.multipartUploadStreams.indexOf(stream);
if (index !== -1) {
self.multipartUploadStreams.splice(index, 1);
}
};
stream.on('close', removeStreamFromMultipartUploadStreams);
stream.on('end', removeStreamFromMultipartUploadStreams);
stream.on('error', removeStreamFromMultipartUploadStreams);
let result;
try {
result = await self._uploadPart(name, uploadId, partNo, data);
result = await self._uploadPart(name, uploadId, partNo, data, {
timeout: options.timeout
});
} catch (error) {
removeStreamFromMultipartUploadStreams();
if (error.status === 404) {

@@ -221,12 +198,18 @@ throw self._makeAbortEvent();

// upload in parallel
const jobErr = await this._parallel(todo, parallel, value => new Promise((resolve, reject) => {
uploadPartJob(that, value).then((result) => {
if (result) {
internalDoneParts.push(result);
}
resolve();
}).catch((err) => {
reject(err);
});
}));
const jobErr = await this._parallel(
todo,
parallel,
value => new Promise((resolve, reject) => {
uploadPartJob(that, value)
.then(result => {
if (result) {
internalDoneParts.push(result);
}
resolve();
})
.catch(err => {
reject(err);
});
})
);
multipartFinish = true;

@@ -243,3 +226,5 @@

if (jobErr && jobErr.length > 0) {
jobErr[0].message = `Failed to upload some parts with error: ${jobErr[0].toString()} part_num: ${jobErr[0].partNum}`;
jobErr[0].message = `Failed to upload some parts with error: ${jobErr[0].toString()} part_num: ${
jobErr[0].partNum
}`;
throw jobErr[0];

@@ -297,5 +282,8 @@ }

WebFileReadStream.prototype._read = function _read(size) {
if ((this.file && this.start >= this.file.size) ||
(this.fileBuffer && this.start >= this.fileBuffer.length) ||
(this.finish) || (this.start === 0 && !this.file)) {
if (
(this.file && this.start >= this.file.size) ||
(this.fileBuffer && this.start >= this.fileBuffer.length) ||
this.finish ||
(this.start === 0 && !this.file)
) {
if (!this.finish) {

@@ -326,17 +314,12 @@ this.fileBuffer = null;

proto._createStream = function _createStream(file, start, end) {
proto._createBuffer = async function _createBuffer(file, start, end) {
if (isBlob(file) || isFile(file)) {
return new WebFileReadStream(file.slice(start, end));
const _file = file.slice(start, end);
const fileContent = await _file.arrayBuffer();
return Buffer.from(fileContent);
} else if (isBuffer(file)) {
// we can't use Readable.from() since it is only support in Node v10
const iterable = file.subarray(start, end);
return new Readable({
read() {
this.push(iterable);
this.push(null);
}
});
return file.subarray(start, end);
} else {
throw new Error('_createBuffer requires File/Blob/Buffer.');
}
throw new Error('_createStream requires Buffer/File/Blob.');
};

@@ -353,3 +336,5 @@

partSize = safeSize;
console.warn(`partSize has been set to ${partSize}, because the partSize you provided causes partNumber to be greater than 10,000`);
console.warn(
`partSize has been set to ${partSize}, because the partSize you provided causes partNumber to be greater than 10,000`
);
}

@@ -356,0 +341,0 @@ return partSize;

@@ -61,2 +61,3 @@ // const debug = require('debug')('ali-oss:object');

options = options || {};
options.headers = options.headers || {};
name = this._objectName(name);

@@ -74,15 +75,4 @@ if (isBuffer(file)) {

const stream = this._createStream(file, 0, file.size);
content = await this._createBuffer(file, 0, file.size);
options.contentLength = await this._getFileSize(file);
try {
const result = await this.putStream(name, stream, options);
return result;
} catch (err) {
if (err.code === 'RequestTimeTooSkewed') {
this.options.amendTimeSkewed = +new Date(err.serverTime) - new Date();
return await this.put(name, file, options);
} else {
throw err;
}
}
} else {

@@ -92,3 +82,2 @@ throw new TypeError('Must provide Buffer/Blob/File for put.');

options.headers = options.headers || {};
this._convertMetaToHeaders(options.meta, options.headers);

@@ -95,0 +84,0 @@

@@ -1,1 +0,1 @@

exports.version="6.13.2"
exports.version="6.14.0"

@@ -56,2 +56,3 @@

creationDate: item.CreationDate,
storageClass: item.StorageClass,
StorageClass: item.StorageClass,

@@ -58,0 +59,0 @@ tag: formatTag(item)

@@ -170,7 +170,7 @@

proto.request = async function (params) {
const isAvailableStream = params.stream ? params.stream.readable : true;
if (this.options.retryMax && isAvailableStream) {
this.request = retry(request.bind(this), this.options.retryMax, {
errorHandler: (err) => {
const _errHandle = (_err) => {
if (this.options.retryMax) {
return await retry(request.bind(this), this.options.retryMax, {
errorHandler: err => {
const _errHandle = _err => {
if (params.stream) return false;
const statusErr = [-1, -2].includes(_err.status);

@@ -183,8 +183,6 @@ const requestErrorRetryHandle = this.options.requestErrorRetryHandle || (() => true);

}
});
})(params);
} else {
this.request = request.bind(this);
return await request.call(this, params);
}
return await this.request(params);
};

@@ -223,3 +221,5 @@

await setSTSToken.call(this);
return this.request(params);
if (!params.stream) {
return this.request(params);
}
}

@@ -226,0 +226,0 @@ }

@@ -23,23 +23,30 @@ const ms = require('humanize-ms');

module.exports = function (options) {
if (!options
|| !options.accessKeyId
|| !options.accessKeySecret) {
if (!options || !options.accessKeyId || !options.accessKeySecret) {
throw new Error('require accessKeyId, accessKeySecret');
}
if (options.stsToken && !options.refreshSTSToken) {
console.warn(
"It's recommended to set `refreshSTSToken` to refresh stsToken、accessKeyId、accessKeySecret automatically when sts info expires"
);
}
if (options.bucket) {
_checkBucketName(options.bucket);
}
const opts = Object.assign({
region: 'oss-cn-hangzhou',
internal: false,
secure: false,
timeout: 60000,
bucket: null,
endpoint: null,
cname: false,
isRequestPay: false,
sldEnable: false,
headerEncoding: 'utf-8',
refreshSTSToken: null
}, options);
const opts = Object.assign(
{
region: 'oss-cn-hangzhou',
internal: false,
secure: false,
timeout: 60000,
bucket: null,
endpoint: null,
cname: false,
isRequestPay: false,
sldEnable: false,
headerEncoding: 'utf-8',
refreshSTSToken: null,
retryMax: 0
},
options
);

@@ -46,0 +53,0 @@ opts.accessKeyId = opts.accessKeyId.trim();

@@ -145,5 +145,8 @@ const copy = require('copy-to');

const data = {
stream: this._createStream(file, start, end),
size: end - start
};
const isBrowserEnv = process && process.browser;
isBrowserEnv
? (data.content = await this._createBuffer(file, start, end))
: (data.stream = await this._createStream(file, start, end));
return await this._uploadPart(name, uploadId, partNo, data, options);

@@ -172,3 +175,5 @@ };

proto.completeMultipartUpload = async function completeMultipartUpload(name, uploadId, parts, options) {
const completeParts = parts.concat().sort((a, b) => a.number - b.number)
const completeParts = parts
.concat()
.sort((a, b) => a.number - b.number)
.filter((item, index, arr) => !index || item.number !== arr[index - 1].number);

@@ -187,3 +192,3 @@ let xml = '<?xml version="1.0" encoding="UTF-8"?>\n<CompleteMultipartUpload>\n';

let opt = {};
opt = deepCopyWith(options, (_) => {
opt = deepCopyWith(options, _ => {
if (isBuffer(_)) return null;

@@ -241,3 +246,4 @@ });

params.mime = opt.mime;
params.stream = data.stream;
const isBrowserEnv = process && process.browser;
isBrowserEnv ? (params.content = data.content) : (params.stream = data.stream);
params.successStatuses = [200];

@@ -252,5 +258,6 @@

}
data.stream = null;
params.stream = null;
if (data.stream) {
data.stream = null;
params.stream = null;
}
return {

@@ -257,0 +264,0 @@ name,

@@ -1,2 +0,1 @@

const fs = require('fs');

@@ -10,2 +9,3 @@ const is = require('is-type-of');

const { isBuffer } = require('./common/utils/isBuffer');
const { retry } = require('./common/utils/retry');

@@ -56,6 +56,4 @@ const proto = exports;

if (fileSize < minPartSize) {
const stream = this._createStream(file, 0, fileSize);
options.contentLength = fileSize;
const result = await this.putStream(name, stream, options);
const result = await this.put(name, file, options);
if (options && options.progress) {

@@ -117,69 +115,81 @@ await options.progress(1);

}
const {
file, fileSize, partSize, uploadId, doneParts, name
} = checkpoint;
const { file, fileSize, partSize, uploadId, doneParts, name } = checkpoint;
const partOffs = this._divideParts(fileSize, partSize);
const numParts = partOffs.length;
let uploadPartJob = retry(
(self, partNo) => {
// eslint-disable-next-line no-async-promise-executor
return new Promise(async (resolve, reject) => {
try {
if (!self.isCancel()) {
const pi = partOffs[partNo - 1];
const stream = await self._createStream(file, pi.start, pi.end);
const data = {
stream,
size: pi.end - pi.start
};
let uploadPartJob = function uploadPartJob(self, partNo) {
// eslint-disable-next-line no-async-promise-executor
return new Promise(async (resolve, reject) => {
try {
if (!self.isCancel()) {
const pi = partOffs[partNo - 1];
const stream = self._createStream(file, pi.start, pi.end);
const data = {
stream,
size: pi.end - pi.start
};
if (isArray(self.multipartUploadStreams)) {
self.multipartUploadStreams.push(data.stream);
} else {
self.multipartUploadStreams = [data.stream];
}
if (isArray(self.multipartUploadStreams)) {
self.multipartUploadStreams.push(data.stream);
} else {
self.multipartUploadStreams = [data.stream];
}
const removeStreamFromMultipartUploadStreams = function () {
if (!stream.destroyed) {
stream.destroy();
}
const index = self.multipartUploadStreams.indexOf(stream);
if (index !== -1) {
self.multipartUploadStreams.splice(index, 1);
}
};
const removeStreamFromMultipartUploadStreams = function () {
if (!stream.destroyed) {
stream.destroy();
}
const index = self.multipartUploadStreams.indexOf(stream);
if (index !== -1) {
self.multipartUploadStreams.splice(index, 1);
}
};
stream.on('close', removeStreamFromMultipartUploadStreams);
stream.on('error', removeStreamFromMultipartUploadStreams);
stream.on('close', removeStreamFromMultipartUploadStreams);
stream.on('error', removeStreamFromMultipartUploadStreams);
let result;
try {
result = await self._uploadPart(name, uploadId, partNo, data);
} catch (error) {
removeStreamFromMultipartUploadStreams();
if (error.status === 404) {
throw self._makeAbortEvent();
let result;
try {
result = await self._uploadPart(name, uploadId, partNo, data, {
timeout: options.timeout
});
} catch (error) {
removeStreamFromMultipartUploadStreams();
if (error.status === 404) {
throw self._makeAbortEvent();
}
throw error;
}
throw error;
}
if (!self.isCancel()) {
doneParts.push({
number: partNo,
etag: result.res.headers.etag
});
checkpoint.doneParts = doneParts;
if (!self.isCancel()) {
doneParts.push({
number: partNo,
etag: result.res.headers.etag
});
checkpoint.doneParts = doneParts;
if (options.progress) {
await options.progress(doneParts.length / numParts, checkpoint, result.res);
if (options.progress) {
await options.progress(doneParts.length / numParts, checkpoint, result.res);
}
}
}
resolve();
} catch (err) {
err.partNum = partNo;
reject(err);
}
resolve();
} catch (err) {
err.partNum = partNo;
reject(err);
});
},
this.options.retryMax,
{
errorHandler: err => {
const _errHandle = _err => {
const statusErr = [-1, -2].includes(_err.status);
const requestErrorRetryHandle = this.options.requestErrorRetryHandle || (() => true);
return statusErr && requestErrorRetryHandle(_err);
};
return !!_errHandle(err);
}
});
};
}
);

@@ -214,3 +224,5 @@ const all = Array.from(new Array(numParts), (x, i) => i + 1);

if (jobErr && jobErr.length > 0) {
jobErr[0].message = `Failed to upload some parts with error: ${jobErr[0].toString()} part_num: ${jobErr[0].partNum}`;
jobErr[0].message = `Failed to upload some parts with error: ${jobErr[0].toString()} part_num: ${
jobErr[0].partNum
}`;
throw jobErr[0];

@@ -231,3 +243,3 @@ }

return file.size;
} if (is.string(file)) {
} else if (is.string(file)) {
const stat = await this._statFile(file);

@@ -274,5 +286,8 @@ return stat.size;

WebFileReadStream.prototype._read = function _read(size) {
if ((this.file && this.start >= this.file.size) ||
(this.fileBuffer && this.start >= this.fileBuffer.length) ||
(this.finish) || (this.start === 0 && !this.file)) {
if (
(this.file && this.start >= this.file.size) ||
(this.fileBuffer && this.start >= this.fileBuffer.length) ||
this.finish ||
(this.start === 0 && !this.file)
) {
if (!this.finish) {

@@ -342,3 +357,5 @@ this.fileBuffer = null;

partSize = safeSize;
console.warn(`partSize has been set to ${partSize}, because the partSize you provided causes partNumber to be greater than 10,000`);
console.warn(
`partSize has been set to ${partSize}, because the partSize you provided causes partNumber to be greater than 10,000`
);
}

@@ -345,0 +362,0 @@ return partSize;

@@ -11,2 +11,3 @@ const debug = require('debug')('ali-oss:object');

const { isBuffer } = require('./common/utils/isBuffer');
const { retry } = require('./common/utils/retry');

@@ -70,5 +71,18 @@ const proto = exports;

options.mime = options.mime || mime.getType(path.extname(file));
const stream = fs.createReadStream(file);
options.contentLength = await this._getFileSize(file);
return await this.putStream(name, stream, options);
const getStream = () => fs.createReadStream(file);
const putStreamStb = (objectName, makeStream, configOption) => {
return this.putStream(objectName, makeStream(), configOption);
};
return await retry(putStreamStb, this.options.retryMax, {
errorHandler: err => {
const _errHandle = _err => {
const statusErr = [-1, -2].includes(_err.status);
const requestErrorRetryHandle = this.options.requestErrorRetryHandle || (() => true);
return statusErr && requestErrorRetryHandle(_err);
};
if (_errHandle(err)) return true;
return false;
}
})(name, getStream, options);
} else if (is.readableStream(file)) {

@@ -75,0 +89,0 @@ return await this.putStream(name, file, options);

{
"name": "ali-oss",
"version": "6.13.2",
"version": "6.14.0",
"description": "aliyun oss(object storage service) node client",

@@ -114,3 +114,3 @@ "main": "lib/client.js",

"sinon": "^1.17.7",
"snyk": "^1.231.0",
"snyk": "1.454.0",
"standard-version": "^8.0.1",

@@ -117,0 +117,0 @@ "stream-equal": "^1.1.0",

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc