Socket
Socket
Sign inDemoInstall

@google-cloud/storage

Package Overview
Dependencies
Maintainers
1
Versions
182
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@google-cloud/storage - npm Package Compare versions

Comparing version 6.11.0 to 6.12.0

3

build/src/nodejs-common/service.js

@@ -22,2 +22,3 @@ "use strict";

const util_1 = require("./util");
const util_2 = require("../util");
exports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}';

@@ -147,3 +148,3 @@ class Service {

'User-Agent': userAgent,
'x-goog-api-client': `gl-node/${process.versions.node} gccl/${pkg.version} gccl-invocation-id/${uuid.v4()}`,
'x-goog-api-client': `${(0, util_2.getRuntimeTrackingString)()} gccl/${pkg.version} gccl-invocation-id/${uuid.v4()}`,
});

@@ -150,0 +151,0 @@ if (reqOpts.shouldReturnStream) {

@@ -31,2 +31,3 @@ "use strict";

const service_1 = require("./service");
const util_1 = require("../util");
const packageJson = require('../../../package.json');

@@ -641,3 +642,3 @@ // eslint-disable-next-line @typescript-eslint/no-var-requires

'User-Agent': util.getUserAgentFromPackageJson(packageJson),
'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${uuid.v4()}`,
'x-goog-api-client': `${(0, util_1.getRuntimeTrackingString)()} gccl/${packageJson.version} gccl-invocation-id/${uuid.v4()}`,
};

@@ -644,0 +645,0 @@ }

@@ -31,2 +31,3 @@ "use strict";

const uuid = require("uuid");
const util_1 = require("./util");
const NOT_FOUND_STATUS_CODE = 404;

@@ -318,3 +319,3 @@ const RESUMABLE_INCOMPLETE_STATUS_CODE = 308;

headers: {
'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.uri}`,
'x-goog-api-client': `${(0, util_1.getRuntimeTrackingString)()} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.uri}`,
...headers,

@@ -458,3 +459,3 @@ },

const headers = {
'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.chunk}`,
'x-goog-api-client': `${(0, util_1.getRuntimeTrackingString)()} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.chunk}`,
};

@@ -580,3 +581,3 @@ // If using multiple chunk upload, set appropriate header

'Content-Range': 'bytes */*',
'x-goog-api-client': `gl-node/${process.versions.node} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.offset}`,
'x-goog-api-client': `${(0, util_1.getRuntimeTrackingString)()} gccl/${packageJson.version} gccl-invocation-id/${this.currentInvocationId.offset}`,
},

@@ -583,0 +584,0 @@ };

@@ -16,4 +16,6 @@ /*!

*/
/// <reference types="node" />
import { Bucket, UploadOptions, UploadResponse } from './bucket';
import { DownloadOptions, DownloadResponse, File } from './file';
import { GaxiosResponse } from 'gaxios';
export interface UploadManyFilesOptions {

@@ -37,2 +39,26 @@ concurrencyLimit?: number;

}
export interface UploadFileInChunksOptions {
concurrencyLimit?: number;
chunkSizeBytes?: number;
uploadName?: string;
maxQueueSize?: number;
uploadId?: string;
partsMap?: Map<number, string>;
validation?: 'md5' | false;
}
export interface MultiPartUploadHelper {
bucket: Bucket;
fileName: string;
uploadId?: string;
partsMap?: Map<number, string>;
initiateUpload(): Promise<void>;
uploadPart(partNumber: number, chunk: Buffer, validation?: 'md5' | false): Promise<void>;
completeUpload(): Promise<GaxiosResponse | undefined>;
}
export type MultiPartHelperGenerator = (bucket: Bucket, fileName: string, uploadId?: string, partsMap?: Map<number, string>) => MultiPartUploadHelper;
export declare class MultiPartUploadError extends Error {
private uploadId;
private partsMap;
constructor(message: string, uploadId: string, partsMap: Map<number, string>);
}
/**

@@ -160,3 +186,3 @@ * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket.

* //-
* const response = await transferManager.downloadLargeFile(bucket.file('large-file.txt');
* const response = await transferManager.downloadFileInChunks(bucket.file('large-file.txt');
* // Your local directory now contains:

@@ -168,3 +194,44 @@ * // - "large-file.txt" (with the contents from my-bucket.large-file.txt)

downloadFileInChunks(fileOrName: File | string, options?: DownloadFileInChunksOptions): Promise<void | DownloadResponse>;
/**
* @typedef {object} UploadFileInChunksOptions
* @property {number} [concurrencyLimit] The number of concurrently executing promises
* to use when uploading the file.
* @property {number} [chunkSizeBytes] The size in bytes of each chunk to be uploaded.
* @property {string} [uploadName] Name of the file when saving to GCS. If ommitted the name is taken from the file path.
* @property {number} [maxQueueSize] The number of chunks to be uploaded to hold in memory concurrently. If not specified
* defaults to the specified concurrency limit.
* @property {string} [uploadId] If specified attempts to resume a previous upload.
* @property {Map} [partsMap] If specified alongside uploadId, attempts to resume a previous upload from the last chunk
* specified in partsMap
* @experimental
*/
/**
* Upload a large file in chunks utilizing parallel upload opertions. If the upload fails, an uploadId and
* map containing all the successfully uploaded parts will be returned to the caller. These arguments can be used to
* resume the upload.
*
* @param {string} [filePath] The path of the file to be uploaded
* @param {UploadFileInChunksOptions} [options] Configuration options.
* @param {MultiPartHelperGenerator} [generator] A function that will return a type that implements the MPU interface. Most users will not need to use this.
* @returns {Promise<void>} If successful a promise resolving to void, otherwise a error containing the message, uploadid, and parts map.
*
* @example
* ```
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const bucket = storage.bucket('my-bucket');
* const transferManager = new TransferManager(bucket);
*
* //-
* // Upload a large file in chunks utilizing parallel operations.
* //-
* const response = await transferManager.uploadFileInChunks('large-file.txt');
* // Your bucket now contains:
* // - "large-file.txt"
* ```
*
* @experimental
*/
uploadFileInChunks(filePath: string, options?: UploadFileInChunksOptions, generator?: MultiPartHelperGenerator): Promise<GaxiosResponse | undefined>;
private getPathsFromDirectory;
}

@@ -17,4 +17,10 @@ "use strict";

*/
var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
};
var _XMLMultiPartUploadHelper_instances, _XMLMultiPartUploadHelper_handleErrorResponse;
Object.defineProperty(exports, "__esModule", { value: true });
exports.TransferManager = void 0;
exports.TransferManager = exports.MultiPartUploadError = void 0;
const pLimit = require("p-limit");

@@ -25,2 +31,6 @@ const path = require("path");

const crc32c_1 = require("./crc32c");
const google_auth_library_1 = require("google-auth-library");
const fast_xml_parser_1 = require("fast-xml-parser");
const retry = require("async-retry");
const crypto_1 = require("crypto");
/**

@@ -51,4 +61,151 @@ * Default number of concurrently executing promises to use when calling uploadManyFiles.

const DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 10 * 1024 * 1024;
/**
* The chunk size in bytes to use when calling uploadFileInChunks.
* @experimental
*/
const UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024;
/**
* Default number of concurrently executing promises to use when calling uploadFileInChunks.
* @experimental
*/
const DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT = 2;
const EMPTY_REGEX = '(?:)';
const defaultMultiPartGenerator = (bucket, fileName, uploadId, partsMap) => {
return new XMLMultiPartUploadHelper(bucket, fileName, uploadId, partsMap);
};
class MultiPartUploadError extends Error {
constructor(message, uploadId, partsMap) {
super(message);
this.uploadId = uploadId;
this.partsMap = partsMap;
}
}
exports.MultiPartUploadError = MultiPartUploadError;
/**
* Class representing an implementation of MPU in the XML API. This class is not meant for public usage.
*
* @private
* @experimental
*/
class XMLMultiPartUploadHelper {
constructor(bucket, fileName, uploadId, partsMap) {
_XMLMultiPartUploadHelper_instances.add(this);
this.authClient = bucket.storage.authClient || new google_auth_library_1.GoogleAuth();
this.uploadId = uploadId || '';
this.bucket = bucket;
this.fileName = fileName;
// eslint-disable-next-line prettier/prettier
this.baseUrl = `https://${bucket.name}.${new URL(this.bucket.storage.apiEndpoint).hostname}/${fileName}`;
this.xmlBuilder = new fast_xml_parser_1.XMLBuilder({ arrayNodeName: 'Part' });
this.xmlParser = new fast_xml_parser_1.XMLParser();
this.partsMap = partsMap || new Map();
this.retryOptions = {
retries: this.bucket.storage.retryOptions.maxRetries,
factor: this.bucket.storage.retryOptions.retryDelayMultiplier,
maxTimeout: this.bucket.storage.retryOptions.maxRetryDelay * 1000,
maxRetryTime: this.bucket.storage.retryOptions.totalTimeout * 1000,
};
}
/**
* Initiates a multipart upload (MPU) to the XML API and stores the resultant upload id.
*
* @returns {Promise<void>}
*/
async initiateUpload() {
const url = `${this.baseUrl}?uploads`;
return retry(async (bail) => {
try {
const res = await this.authClient.request({
method: 'POST',
url,
});
if (res.data && res.data.error) {
throw res.data.error;
}
const parsedXML = this.xmlParser.parse(res.data);
this.uploadId = parsedXML.InitiateMultipartUploadResult.UploadId;
}
catch (e) {
__classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail);
}
}, this.retryOptions);
}
/**
* Uploads the provided chunk of data to the XML API using the previously created upload id.
*
* @param {number} partNumber the sequence number of this chunk.
* @param {Buffer} chunk the chunk of data to be uploaded.
* @param {string | false} validation whether or not to include the md5 hash in the headers to cause the server
* to validate the chunk was not corrupted.
* @returns {Promise<void>}
*/
async uploadPart(partNumber, chunk, validation) {
const url = `${this.baseUrl}?partNumber=${partNumber}&uploadId=${this.uploadId}`;
let headers = {};
if (validation === 'md5') {
const hash = (0, crypto_1.createHash)('md5').update(chunk).digest('base64');
headers = {
'Content-MD5': hash,
};
}
return retry(async (bail) => {
try {
const res = await this.authClient.request({
url,
method: 'PUT',
body: chunk,
headers,
});
if (res.data && res.data.error) {
throw res.data.error;
}
this.partsMap.set(partNumber, res.headers['etag']);
}
catch (e) {
__classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail);
}
}, this.retryOptions);
}
/**
* Sends the final request of the MPU to tell GCS the upload is now complete.
*
* @returns {Promise<void>}
*/
async completeUpload() {
const url = `${this.baseUrl}?uploadId=${this.uploadId}`;
const sortedMap = new Map([...this.partsMap.entries()].sort((a, b) => a[0] - b[0]));
const parts = [];
for (const entry of sortedMap.entries()) {
parts.push({ PartNumber: entry[0], ETag: entry[1] });
}
const body = `<CompleteMultipartUpload>${this.xmlBuilder.build(parts)}</CompleteMultipartUpload>`;
return retry(async (bail) => {
try {
const res = await this.authClient.request({
url,
method: 'POST',
body,
});
if (res.data && res.data.error) {
throw res.data.error;
}
return res;
}
catch (e) {
__classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail);
return;
}
}, this.retryOptions);
}
}
_XMLMultiPartUploadHelper_instances = new WeakSet(), _XMLMultiPartUploadHelper_handleErrorResponse = function _XMLMultiPartUploadHelper_handleErrorResponse(err, bail) {
if (this.bucket.storage.retryOptions.autoRetry &&
this.bucket.storage.retryOptions.retryableErrorFn(err)) {
throw err;
}
else {
bail(err);
}
};
/**
* Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket.

@@ -246,3 +403,3 @@ *

* //-
* const response = await transferManager.downloadLargeFile(bucket.file('large-file.txt');
* const response = await transferManager.downloadFileInChunks(bucket.file('large-file.txt');
* // Your local directory now contains:

@@ -300,2 +457,77 @@ * // - "large-file.txt" (with the contents from my-bucket.large-file.txt)

}
/**
* @typedef {object} UploadFileInChunksOptions
* @property {number} [concurrencyLimit] The number of concurrently executing promises
* to use when uploading the file.
* @property {number} [chunkSizeBytes] The size in bytes of each chunk to be uploaded.
* @property {string} [uploadName] Name of the file when saving to GCS. If ommitted the name is taken from the file path.
* @property {number} [maxQueueSize] The number of chunks to be uploaded to hold in memory concurrently. If not specified
* defaults to the specified concurrency limit.
* @property {string} [uploadId] If specified attempts to resume a previous upload.
* @property {Map} [partsMap] If specified alongside uploadId, attempts to resume a previous upload from the last chunk
* specified in partsMap
* @experimental
*/
/**
* Upload a large file in chunks utilizing parallel upload opertions. If the upload fails, an uploadId and
* map containing all the successfully uploaded parts will be returned to the caller. These arguments can be used to
* resume the upload.
*
* @param {string} [filePath] The path of the file to be uploaded
* @param {UploadFileInChunksOptions} [options] Configuration options.
* @param {MultiPartHelperGenerator} [generator] A function that will return a type that implements the MPU interface. Most users will not need to use this.
* @returns {Promise<void>} If successful a promise resolving to void, otherwise a error containing the message, uploadid, and parts map.
*
* @example
* ```
* const {Storage} = require('@google-cloud/storage');
* const storage = new Storage();
* const bucket = storage.bucket('my-bucket');
* const transferManager = new TransferManager(bucket);
*
* //-
* // Upload a large file in chunks utilizing parallel operations.
* //-
* const response = await transferManager.uploadFileInChunks('large-file.txt');
* // Your bucket now contains:
* // - "large-file.txt"
* ```
*
* @experimental
*/
async uploadFileInChunks(filePath, options = {}, generator = defaultMultiPartGenerator) {
const chunkSize = options.chunkSizeBytes || UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE;
const limit = pLimit(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT);
const maxQueueSize = options.maxQueueSize ||
options.concurrencyLimit ||
DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT;
const fileName = options.uploadName || path.basename(filePath);
const mpuHelper = generator(this.bucket, fileName, options.uploadId, options.partsMap);
let partNumber = 1;
let promises = [];
try {
if (options.uploadId === undefined) {
await mpuHelper.initiateUpload();
}
const startOrResumptionByte = mpuHelper.partsMap.size * chunkSize;
const readStream = (0, fs_1.createReadStream)(filePath, {
highWaterMark: chunkSize,
start: startOrResumptionByte,
});
// p-limit only limits the number of running promises. We do not want to hold an entire
// large file in memory at once so promises acts a queue that will hold only maxQueueSize in memory.
for await (const curChunk of readStream) {
if (promises.length >= maxQueueSize) {
await Promise.all(promises);
promises = [];
}
promises.push(limit(() => mpuHelper.uploadPart(partNumber++, curChunk, options.validation)));
}
await Promise.all(promises);
return await mpuHelper.completeUpload();
}
catch (e) {
throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap);
}
}
async *getPathsFromDirectory(directory) {

@@ -302,0 +534,0 @@ const filesAndSubdirectories = await fs_1.promises.readdir(directory, {

@@ -70,2 +70,7 @@ /// <reference types="node" />

export declare function formatAsUTCISO(dateTimeToFormat: Date, includeTime?: boolean, dateDelimiter?: string, timeDelimiter?: string): string;
/**
* Examines the runtime environment and returns the appropriate tracking string.
* @returns {string} metrics tracking string based on the current runtime environment.
*/
export declare function getRuntimeTrackingString(): string;
export declare class PassThroughShim extends PassThrough {

@@ -72,0 +77,0 @@ private shouldEmitReading;

@@ -16,3 +16,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.PassThroughShim = exports.formatAsUTCISO = exports.convertObjKeysToSnakeCase = exports.unicodeJSONStringify = exports.objectKeyToLowercase = exports.qsStringify = exports.encodeURI = exports.fixedEncodeURIComponent = exports.objectEntries = exports.normalize = void 0;
exports.PassThroughShim = exports.getRuntimeTrackingString = exports.formatAsUTCISO = exports.convertObjKeysToSnakeCase = exports.unicodeJSONStringify = exports.objectKeyToLowercase = exports.qsStringify = exports.encodeURI = exports.fixedEncodeURIComponent = exports.objectEntries = exports.normalize = void 0;
const querystring = require("querystring");

@@ -149,2 +149,26 @@ const stream_1 = require("stream");

exports.formatAsUTCISO = formatAsUTCISO;
/**
* Examines the runtime environment and returns the appropriate tracking string.
* @returns {string} metrics tracking string based on the current runtime environment.
*/
function getRuntimeTrackingString() {
if (
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
globalThis.Deno &&
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
globalThis.Deno.version &&
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
globalThis.Deno.version.deno) {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
return `gl-deno/${globalThis.Deno.version.deno}`;
}
else {
return `gl-node/${process.versions.node}`;
}
}
exports.getRuntimeTrackingString = getRuntimeTrackingString;
class PassThroughShim extends stream_1.PassThrough {

@@ -151,0 +175,0 @@ constructor() {

{
"name": "@google-cloud/storage",
"description": "Cloud Storage Client Library for Node.js",
"version": "6.11.0",
"version": "6.12.0",
"license": "Apache-2.0",

@@ -62,2 +62,3 @@ "author": "Google Inc.",

"extend": "^3.0.2",
"fast-xml-parser": "^4.2.2",
"gaxios": "^5.0.0",

@@ -92,3 +93,3 @@ "google-auth-library": "^8.0.1",

"@types/yargs": "^17.0.10",
"c8": "^7.0.0",
"c8": "^8.0.0",
"form-data": "^4.0.0",

@@ -95,0 +96,0 @@ "gts": "^3.1.0",

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc