Socket
Socket
Sign inDemoInstall

@google-cloud/storage

Package Overview
Dependencies
Maintainers
1
Versions
183
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@google-cloud/storage - npm Package Compare versions

Comparing version 7.5.0 to 7.6.0

37

build/cjs/src/file.d.ts

@@ -14,3 +14,3 @@ /// <reference types="node" />

import { Duplexify, GCCL_GCS_CMD_KEY } from './nodejs-common/util.js';
import { CRC32CValidatorGenerator } from './crc32c.js';
import { CRC32C, CRC32CValidatorGenerator } from './crc32c.js';
import { URL } from 'url';

@@ -108,4 +108,13 @@ import { BaseMetadata, DeleteCallback, DeleteOptions, RequestResponse, SetMetadataOptions } from './nodejs-common/service-object.js';

export type PredefinedAcl = 'authenticatedRead' | 'bucketOwnerFullControl' | 'bucketOwnerRead' | 'private' | 'projectPrivate' | 'publicRead';
type PublicResumableUploadOptions = 'chunkSize' | 'highWaterMark' | 'metadata' | 'origin' | 'offset' | 'predefinedAcl' | 'private' | 'public' | 'uri' | 'userProject';
type PublicResumableUploadOptions = 'chunkSize' | 'highWaterMark' | 'isPartialUpload' | 'metadata' | 'origin' | 'offset' | 'predefinedAcl' | 'private' | 'public' | 'uri' | 'userProject';
export interface CreateResumableUploadOptions extends Pick<resumableUpload.UploadConfig, PublicResumableUploadOptions> {
/**
* A CRC32C to resume from when continuing a previous upload. It is recommended
* to capture the `crc32c` event from previous upload sessions to provide in
* subsequent requests in order to accurately track the upload. This is **required**
* when validating a final portion of the uploaded object.
*
* @see {@link CRC32C.from} for possible values.
*/
resumeCRC32C?: Parameters<(typeof CRC32C)['from']>[0];
preconditionOpts?: PreconditionOptions;

@@ -280,3 +289,5 @@ [GCCL_GCS_CMD_KEY]?: resumableUpload.UploadConfig[typeof GCCL_GCS_CMD_KEY];

UPLOAD_MISMATCH_DELETE_FAIL = "The uploaded data did not match the data from the server.\n As a precaution, we attempted to delete the file, but it was not successful.\n To be sure the content is the same, you should try removing the file manually,\n then uploading the file again.\n \n\nThe delete attempt failed with this message:\n\n ",
UPLOAD_MISMATCH = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again."
UPLOAD_MISMATCH = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again.",
MD5_RESUMED_UPLOAD = "MD5 cannot be used with a continued resumable upload as MD5 cannot be extended from an existing value",
MISSING_RESUME_CRC32C_FINAL_UPLOAD = "The CRC32C is missing for the final portion of a resumed upload, which is required for validation. Please provide `resumeCRC32C` if validation is required, or disable `validation`."
}

@@ -660,4 +671,4 @@ /**

*
* See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload| Upload Options (Simple or Resumable)}
* See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation}
* See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload Upload Options (Simple or Resumable)}
* See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert Objects: insert API Documentation}
*

@@ -727,2 +738,18 @@ * @param {CreateWriteStreamOptions} [options] Configuration options.

* ```
*
* //-
* // <h4>Continuing a Resumable Upload</h4>
* //
* // One can capture a `uri` from a resumable upload to reuse later.
* // Additionally, for validation, one can also capture and pass `crc32c`.
* //-
* let uri: string | undefined = undefined;
* let resumeCRC32C: string | undefined = undefined;
*
* fs.createWriteStream()
* .on('uri', link => {uri = link})
* .on('crc32', crc32c => {resumeCRC32C = crc32c});
*
* // later...
* fs.createWriteStream({uri, resumeCRC32C});
*/

@@ -729,0 +756,0 @@ createWriteStream(options?: CreateWriteStreamOptions): Writable;

10

build/cjs/src/hash-stream-validator.d.ts
/// <reference types="node" />
/// <reference types="node" />
import { Transform } from 'stream';
import { CRC32CValidatorGenerator } from './crc32c.js';
import { CRC32CValidatorGenerator, CRC32CValidator } from './crc32c.js';
interface HashStreamValidatorOptions {

@@ -10,3 +10,5 @@ /** Enables CRC32C calculation. To validate a provided value use `crc32cExpected`. */

md5: boolean;
/** Set a custom CRC32C generator */
/** A CRC32C instance for validation. To validate a provided value use `crc32cExpected`. */
crc32cInstance: CRC32CValidator;
/** Set a custom CRC32C generator. Used if `crc32cInstance` has not been provided. */
crc32cGenerator: CRC32CValidatorGenerator;

@@ -28,2 +30,6 @@ /** Sets the expected CRC32C value to verify once all data has been consumed. Also sets the `crc32c` option to `true` */

constructor(options?: Partial<HashStreamValidatorOptions>);
/**
* Return the current CRC32C value, if available.
*/
get crc32c(): string | undefined;
_flush(callback: (error?: Error | null | undefined) => void): void;

@@ -30,0 +36,0 @@ _transform(chunk: Buffer, encoding: BufferEncoding, callback: (e?: Error) => void): void;

@@ -46,4 +46,9 @@ "use strict";

if (this.crc32cEnabled) {
const crc32cGenerator = options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR;
__classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), "f");
if (options.crc32cInstance) {
__classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, options.crc32cInstance, "f");
}
else {
const crc32cGenerator = options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR;
__classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), "f");
}
}

@@ -54,2 +59,9 @@ if (this.md5Enabled) {

}
/**
* Return the current CRC32C value, if available.
*/
get crc32c() {
var _a;
return (_a = __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) === null || _a === void 0 ? void 0 : _a.toString();
}
_flush(callback) {

@@ -56,0 +68,0 @@ if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) {

@@ -8,2 +8,3 @@ /// <reference types="node" />

import { GCCL_GCS_CMD_KEY } from './nodejs-common/util.js';
import { FileMetadata } from './file.js';
export declare const PROTOCOL_REGEX: RegExp;

@@ -78,2 +79,15 @@ export interface ErrorWithCode extends Error {

/**
* Set to `true` if the upload is only a subset of the overall object to upload.
* This can be used when planning to continue the upload an object in another
* session.
*
* **Must be used with {@link UploadConfig.chunkSize} != `0`**.
*
* If this is a continuation of a previous upload, {@link UploadConfig.offset}
* should be set.
*
* @see {@link checkUploadStatus} for checking the status of an existing upload.
*/
isPartialUpload?: boolean;
/**
* A customer-supplied encryption key. See

@@ -95,5 +109,15 @@ * https://cloud.google.com/storage/docs/encryption#customer-supplied.

/**
* The starting byte of the upload stream, for resuming an interrupted upload.
* See
* https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload#resume-upload.
* The starting byte in relation to the final uploaded object.
* **Must be used with {@link UploadConfig.uri}**.
*
* If resuming an interrupted stream, do not supply this argument unless you
* know the exact number of bytes the service has AND the provided stream's
* first byte is a continuation from that provided offset. If resuming an
* interrupted stream and this option has not been provided, we will treat
* the provided upload stream as the object to upload - where the first byte
* of the upload stream is the first byte of the object to upload; skipping
* any bytes that are already present on the server.
*
* @see {@link checkUploadStatus} for checking the status of an existing upload.
* @see {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload#resume-upload.}
*/

@@ -127,2 +151,10 @@ offset?: number;

* upload, just pass it in here and we'll use that.
*
* If resuming an interrupted stream and the {@link UploadConfig.offset}
* option has not been provided, we will treat the provided upload stream as
* the object to upload - where the first byte of the upload stream is the
* first byte of the object to upload; skipping any bytes that are already
* present on the server.
*
* @see {@link checkUploadStatus} for checking the status of an existing upload.
*/

@@ -144,3 +176,4 @@ uri?: string;

/**
* Set the length of the file being uploaded.
* Set the length of the object being uploaded. If uploading a partial
* object, this is the overall size of the finalized object.
*/

@@ -160,2 +193,10 @@ contentLength?: number;

}
export interface CheckUploadStatusConfig {
/**
* Set to `false` to disable retries within this method.
*
* @defaultValue `true`
*/
retry?: boolean;
}
export declare class Upload extends Writable {

@@ -195,2 +236,3 @@ #private;

timeOfFirstRequest: number;
isPartialUpload: boolean;
private currentInvocationId;

@@ -235,3 +277,2 @@ /**

* @param limit The maximum amount to return from the buffer.
* @returns The data requested.
*/

@@ -258,2 +299,9 @@ private pullFromChunkBuffer;

private responseHandler;
/**
* Check the status of an existing resumable upload.
*
* @param cfg A configuration to use. `uri` is required.
* @returns the current upload status
*/
checkUploadStatus(config?: CheckUploadStatusConfig): Promise<GaxiosResponse<FileMetadata | void>>;
private getAndSetOffset;

@@ -271,3 +319,6 @@ private makeRequest;

/**
* @returns {number} the amount of time to wait before retrying the request
* The amount of time to wait before retrying the request, in milliseconds.
* If negative, do not retry.
*
* @returns the amount of time to wait, in milliseconds.
*/

@@ -287,1 +338,8 @@ private getRetryDelay;

export declare function createURI(cfg: UploadConfig, callback: CreateUriCallback): void;
/**
* Check the status of an existing resumable upload.
*
* @param cfg A configuration to use. `uri` is required.
* @returns the current upload status
*/
export declare function checkUploadStatus(cfg: UploadConfig & Required<Pick<UploadConfig, 'uri'>>): Promise<GaxiosResponse<void | FileMetadata>>;

@@ -54,3 +54,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.createURI = exports.upload = exports.Upload = exports.PROTOCOL_REGEX = void 0;
exports.checkUploadStatus = exports.createURI = exports.upload = exports.Upload = exports.PROTOCOL_REGEX = void 0;
const abort_controller_1 = __importDefault(require("abort-controller"));

@@ -63,4 +63,4 @@ const crypto_1 = require("crypto");

const uuid = __importStar(require("uuid"));
const util_js_1 = require("./nodejs-common/util.js");
const util_js_2 = require("./util.js");
const util_js_1 = require("./util.js");
const util_js_2 = require("./nodejs-common/util.js");
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -76,2 +76,3 @@ // @ts-ignore

constructor(cfg) {
var _a;
super(cfg);

@@ -82,5 +83,5 @@ _Upload_instances.add(this);

this.currentInvocationId = {
checkUploadStatus: uuid.v4(),
chunk: uuid.v4(),
uri: uuid.v4(),
offset: uuid.v4(),
};

@@ -105,2 +106,8 @@ /**

}
if (cfg.offset && !cfg.uri) {
throw new RangeError('Cannot provide an `offset` without providing a `uri`');
}
if (cfg.isPartialUpload && !cfg.chunkSize) {
throw new RangeError('Cannot set `isPartialUpload` without providing a `chunkSize`');
}
cfg.authConfig = cfg.authConfig || {};

@@ -136,2 +143,3 @@ cfg.authConfig.scopes = [

this.retryOptions = cfg.retryOptions;
this.isPartialUpload = (_a = cfg.isPartialUpload) !== null && _a !== void 0 ? _a : false;
if (cfg.key) {

@@ -152,3 +160,6 @@ const base64Key = Buffer.from(cfg.key).toString('base64');

this.uri = cfg.uri;
this.numBytesWritten = 0;
if (this.offset) {
// we're resuming an incomplete upload
this.numBytesWritten = this.offset;
}
this.numRetries = 0; // counter for number of retries currently executed

@@ -163,3 +174,3 @@ if (!autoRetry) {

this.contentLength = isNaN(contentLength) ? '*' : contentLength;
__classPrivateFieldSet(this, _Upload_gcclGcsCmd, cfg[util_js_1.GCCL_GCS_CMD_KEY], "f");
__classPrivateFieldSet(this, _Upload_gcclGcsCmd, cfg[util_js_2.GCCL_GCS_CMD_KEY], "f");
this.once('writing', () => {

@@ -255,3 +266,2 @@ if (this.uri) {

* @param limit The maximum amount to return from the buffer.
* @returns The data requested.
*/

@@ -354,3 +364,3 @@ *pullFromChunkBuffer(limit) {

}
let googAPIClient = `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_2.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.uri}`;
let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.uri}`;
if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) {

@@ -369,3 +379,3 @@ googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`;

headers: {
'User-Agent': (0, util_js_2.getUserAgentString)(),
'User-Agent': (0, util_js_1.getUserAgentString)(),
'x-goog-api-client': googAPIClient,

@@ -430,11 +440,10 @@ ...headers,

this.offset = 0;
// emit the newly generated URI for future reuse, if necessary.
this.emit('uri', uri);
return uri;
}
async continueUploading() {
if (typeof this.offset === 'number') {
this.startUploading();
return;
}
await this.getAndSetOffset();
this.startUploading();
var _a;
(_a = this.offset) !== null && _a !== void 0 ? _a : (await this.getAndSetOffset());
return this.startUploading();
}

@@ -510,3 +519,3 @@ async startUploading() {

});
let googAPIClient = `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_2.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.chunk}`;
let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.chunk}`;
if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) {

@@ -516,3 +525,3 @@ googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`;

const headers = {
'User-Agent': (0, util_js_2.getUserAgentString)(),
'User-Agent': (0, util_js_1.getUserAgentString)(),
'x-goog-api-client': googAPIClient,

@@ -525,6 +534,7 @@ };

for await (const chunk of this.upstreamIterator(expectedUploadSize)) {
// This will conveniently track and keep the size of the buffers
// This will conveniently track and keep the size of the buffers.
// We will reach either the expected upload size or the remainder of the stream.
__classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, chunk);
}
// We hit either the expected upload size or the remainder
// This is the sum from the `#addLocalBufferCache` calls
const bytesToUpload = this.localWriteCacheByteLength;

@@ -537,5 +547,6 @@ // Important: we want to know if the upstream has ended and the queue is empty before

let totalObjectSize = this.contentLength;
if (typeof this.contentLength !== 'number' && isLastChunkOfUpload) {
// Let's let the server know this is the last chunk since
// we didn't know the content-length beforehand.
if (typeof this.contentLength !== 'number' &&
isLastChunkOfUpload &&
!this.isPartialUpload) {
// Let's let the server know this is the last chunk of the object since we didn't set it before.
totalObjectSize = bytesToUpload + this.numBytesWritten;

@@ -563,3 +574,3 @@ }

responseReceived = true;
this.responseHandler(resp);
await this.responseHandler(resp);
}

@@ -581,3 +592,3 @@ }

// the response body.
responseHandler(resp) {
async responseHandler(resp) {
if (resp.data.error) {

@@ -589,5 +600,14 @@ this.destroy(resp.data.error);

this.currentInvocationId.chunk = uuid.v4();
const moreDataToUpload = await this.waitForNextChunk();
const shouldContinueWithNextMultiChunkRequest = this.chunkSize &&
resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE &&
resp.headers.range;
resp.headers.range &&
moreDataToUpload;
/**
* This is true when we're expecting to upload more data in a future request,
* yet the upstream for the upload session has been exhausted.
*/
const shouldContinueUploadInAnotherRequest = this.isPartialUpload &&
resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE &&
!moreDataToUpload;
if (shouldContinueWithNextMultiChunkRequest) {

@@ -616,3 +636,4 @@ // Use the upper value in this header to determine where to start the next chunk.

}
else if (!this.isSuccessfulResponse(resp.status)) {
else if (!this.isSuccessfulResponse(resp.status) &&
!shouldContinueUploadInAnotherRequest) {
const err = new Error('Upload failed');

@@ -638,4 +659,10 @@ err.code = resp.status;

}
async getAndSetOffset() {
let googAPIClient = `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_2.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.offset}`;
/**
* Check the status of an existing resumable upload.
*
* @param cfg A configuration to use. `uri` is required.
* @returns the current upload status
*/
async checkUploadStatus(config = {}) {
let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.checkUploadStatus}`;
if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) {

@@ -650,3 +677,3 @@ googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`;

'Content-Range': 'bytes */*',
'User-Agent': (0, util_js_2.getUserAgentString)(),
'User-Agent': (0, util_js_1.getUserAgentString)(),
'x-goog-api-client': googAPIClient,

@@ -658,7 +685,26 @@ },

// Successfully got the offset we can now create a new offset invocation id
this.currentInvocationId.offset = uuid.v4();
this.currentInvocationId.checkUploadStatus = uuid.v4();
return resp;
}
catch (e) {
if (config.retry === false ||
!(e instanceof Error) ||
!this.retryOptions.retryableErrorFn(e)) {
throw e;
}
const retryDelay = this.getRetryDelay();
if (retryDelay <= 0) {
throw e;
}
await new Promise(res => setTimeout(res, retryDelay));
return this.checkUploadStatus(config);
}
}
async getAndSetOffset() {
try {
// we want to handle retries in this method.
const resp = await this.checkUploadStatus({ retry: false });
if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) {
if (resp.headers.range) {
const range = resp.headers.range;
this.offset = Number(range.split('-')[1]) + 1;
if (typeof resp.headers.range === 'string') {
this.offset = Number(resp.headers.range.split('-')[1]) + 1;
return;

@@ -785,3 +831,6 @@ }

/**
* @returns {number} the amount of time to wait before retrying the request
* The amount of time to wait before retrying the request, in milliseconds.
* If negative, do not retry.
*
* @returns the amount of time to wait, in milliseconds.
*/

@@ -837,1 +886,12 @@ getRetryDelay() {

exports.createURI = createURI;
/**
* Check the status of an existing resumable upload.
*
* @param cfg A configuration to use. `uri` is required.
* @returns the current upload status
*/
function checkUploadStatus(cfg) {
const up = new Upload(cfg);
return up.checkUploadStatus();
}
exports.checkUploadStatus = checkUploadStatus;

@@ -14,3 +14,3 @@ /// <reference types="node" />

import { Duplexify, GCCL_GCS_CMD_KEY } from './nodejs-common/util.js';
import { CRC32CValidatorGenerator } from './crc32c.js';
import { CRC32C, CRC32CValidatorGenerator } from './crc32c.js';
import { URL } from 'url';

@@ -108,4 +108,13 @@ import { BaseMetadata, DeleteCallback, DeleteOptions, RequestResponse, SetMetadataOptions } from './nodejs-common/service-object.js';

export type PredefinedAcl = 'authenticatedRead' | 'bucketOwnerFullControl' | 'bucketOwnerRead' | 'private' | 'projectPrivate' | 'publicRead';
type PublicResumableUploadOptions = 'chunkSize' | 'highWaterMark' | 'metadata' | 'origin' | 'offset' | 'predefinedAcl' | 'private' | 'public' | 'uri' | 'userProject';
type PublicResumableUploadOptions = 'chunkSize' | 'highWaterMark' | 'isPartialUpload' | 'metadata' | 'origin' | 'offset' | 'predefinedAcl' | 'private' | 'public' | 'uri' | 'userProject';
export interface CreateResumableUploadOptions extends Pick<resumableUpload.UploadConfig, PublicResumableUploadOptions> {
/**
* A CRC32C to resume from when continuing a previous upload. It is recommended
* to capture the `crc32c` event from previous upload sessions to provide in
* subsequent requests in order to accurately track the upload. This is **required**
* when validating a final portion of the uploaded object.
*
* @see {@link CRC32C.from} for possible values.
*/
resumeCRC32C?: Parameters<(typeof CRC32C)['from']>[0];
preconditionOpts?: PreconditionOptions;

@@ -280,3 +289,5 @@ [GCCL_GCS_CMD_KEY]?: resumableUpload.UploadConfig[typeof GCCL_GCS_CMD_KEY];

UPLOAD_MISMATCH_DELETE_FAIL = "The uploaded data did not match the data from the server.\n As a precaution, we attempted to delete the file, but it was not successful.\n To be sure the content is the same, you should try removing the file manually,\n then uploading the file again.\n \n\nThe delete attempt failed with this message:\n\n ",
UPLOAD_MISMATCH = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again."
UPLOAD_MISMATCH = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again.",
MD5_RESUMED_UPLOAD = "MD5 cannot be used with a continued resumable upload as MD5 cannot be extended from an existing value",
MISSING_RESUME_CRC32C_FINAL_UPLOAD = "The CRC32C is missing for the final portion of a resumed upload, which is required for validation. Please provide `resumeCRC32C` if validation is required, or disable `validation`."
}

@@ -660,4 +671,4 @@ /**

*
* See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload| Upload Options (Simple or Resumable)}
* See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation}
* See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload Upload Options (Simple or Resumable)}
* See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert Objects: insert API Documentation}
*

@@ -727,2 +738,18 @@ * @param {CreateWriteStreamOptions} [options] Configuration options.

* ```
*
* //-
* // <h4>Continuing a Resumable Upload</h4>
* //
* // One can capture a `uri` from a resumable upload to reuse later.
* // Additionally, for validation, one can also capture and pass `crc32c`.
* //-
* let uri: string | undefined = undefined;
* let resumeCRC32C: string | undefined = undefined;
*
* fs.createWriteStream()
* .on('uri', link => {uri = link})
* .on('crc32', crc32c => {resumeCRC32C = crc32c});
*
* // later...
* fs.createWriteStream({uri, resumeCRC32C});
*/

@@ -729,0 +756,0 @@ createWriteStream(options?: CreateWriteStreamOptions): Writable;

/// <reference types="node" />
/// <reference types="node" />
import { Transform } from 'stream';
import { CRC32CValidatorGenerator } from './crc32c.js';
import { CRC32CValidatorGenerator, CRC32CValidator } from './crc32c.js';
interface HashStreamValidatorOptions {

@@ -10,3 +10,5 @@ /** Enables CRC32C calculation. To validate a provided value use `crc32cExpected`. */

md5: boolean;
/** Set a custom CRC32C generator */
/** A CRC32C instance for validation. To validate a provided value use `crc32cExpected`. */
crc32cInstance: CRC32CValidator;
/** Set a custom CRC32C generator. Used if `crc32cInstance` has not been provided. */
crc32cGenerator: CRC32CValidatorGenerator;

@@ -28,2 +30,6 @@ /** Sets the expected CRC32C value to verify once all data has been consumed. Also sets the `crc32c` option to `true` */

constructor(options?: Partial<HashStreamValidatorOptions>);
/**
* Return the current CRC32C value, if available.
*/
get crc32c(): string | undefined;
_flush(callback: (error?: Error | null | undefined) => void): void;

@@ -30,0 +36,0 @@ _transform(chunk: Buffer, encoding: BufferEncoding, callback: (e?: Error) => void): void;

@@ -43,4 +43,9 @@ // Copyright 2022 Google LLC

if (this.crc32cEnabled) {
const crc32cGenerator = options.crc32cGenerator || CRC32C_DEFAULT_VALIDATOR_GENERATOR;
__classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), "f");
if (options.crc32cInstance) {
__classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, options.crc32cInstance, "f");
}
else {
const crc32cGenerator = options.crc32cGenerator || CRC32C_DEFAULT_VALIDATOR_GENERATOR;
__classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), "f");
}
}

@@ -51,2 +56,9 @@ if (this.md5Enabled) {

}
/**
* Return the current CRC32C value, if available.
*/
get crc32c() {
var _a;
return (_a = __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) === null || _a === void 0 ? void 0 : _a.toString();
}
_flush(callback) {

@@ -53,0 +65,0 @@ if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) {

@@ -8,2 +8,3 @@ /// <reference types="node" />

import { GCCL_GCS_CMD_KEY } from './nodejs-common/util.js';
import { FileMetadata } from './file.js';
export declare const PROTOCOL_REGEX: RegExp;

@@ -78,2 +79,15 @@ export interface ErrorWithCode extends Error {

/**
* Set to `true` if the upload is only a subset of the overall object to upload.
* This can be used when planning to continue the upload an object in another
* session.
*
* **Must be used with {@link UploadConfig.chunkSize} != `0`**.
*
* If this is a continuation of a previous upload, {@link UploadConfig.offset}
* should be set.
*
* @see {@link checkUploadStatus} for checking the status of an existing upload.
*/
isPartialUpload?: boolean;
/**
* A customer-supplied encryption key. See

@@ -95,5 +109,15 @@ * https://cloud.google.com/storage/docs/encryption#customer-supplied.

/**
* The starting byte of the upload stream, for resuming an interrupted upload.
* See
* https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload#resume-upload.
* The starting byte in relation to the final uploaded object.
* **Must be used with {@link UploadConfig.uri}**.
*
* If resuming an interrupted stream, do not supply this argument unless you
* know the exact number of bytes the service has AND the provided stream's
* first byte is a continuation from that provided offset. If resuming an
* interrupted stream and this option has not been provided, we will treat
* the provided upload stream as the object to upload - where the first byte
* of the upload stream is the first byte of the object to upload; skipping
* any bytes that are already present on the server.
*
* @see {@link checkUploadStatus} for checking the status of an existing upload.
* @see {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload#resume-upload.}
*/

@@ -127,2 +151,10 @@ offset?: number;

* upload, just pass it in here and we'll use that.
*
* If resuming an interrupted stream and the {@link UploadConfig.offset}
* option has not been provided, we will treat the provided upload stream as
* the object to upload - where the first byte of the upload stream is the
* first byte of the object to upload; skipping any bytes that are already
* present on the server.
*
* @see {@link checkUploadStatus} for checking the status of an existing upload.
*/

@@ -144,3 +176,4 @@ uri?: string;

/**
* Set the length of the file being uploaded.
* Set the length of the object being uploaded. If uploading a partial
* object, this is the overall size of the finalized object.
*/

@@ -160,2 +193,10 @@ contentLength?: number;

}
export interface CheckUploadStatusConfig {
/**
* Set to `false` to disable retries within this method.
*
* @defaultValue `true`
*/
retry?: boolean;
}
export declare class Upload extends Writable {

@@ -195,2 +236,3 @@ #private;

timeOfFirstRequest: number;
isPartialUpload: boolean;
private currentInvocationId;

@@ -235,3 +277,2 @@ /**

* @param limit The maximum amount to return from the buffer.
* @returns The data requested.
*/

@@ -258,2 +299,9 @@ private pullFromChunkBuffer;

private responseHandler;
/**
* Check the status of an existing resumable upload.
*
* @param cfg A configuration to use. `uri` is required.
* @returns the current upload status
*/
checkUploadStatus(config?: CheckUploadStatusConfig): Promise<GaxiosResponse<FileMetadata | void>>;
private getAndSetOffset;

@@ -271,3 +319,6 @@ private makeRequest;

/**
* @returns {number} the amount of time to wait before retrying the request
* The amount of time to wait before retrying the request, in milliseconds.
* If negative, do not retry.
*
* @returns the amount of time to wait, in milliseconds.
*/

@@ -287,1 +338,8 @@ private getRetryDelay;

export declare function createURI(cfg: UploadConfig, callback: CreateUriCallback): void;
/**
* Check the status of an existing resumable upload.
*
* @param cfg A configuration to use. `uri` is required.
* @returns the current upload status
*/
export declare function checkUploadStatus(cfg: UploadConfig & Required<Pick<UploadConfig, 'uri'>>): Promise<GaxiosResponse<void | FileMetadata>>;

@@ -33,4 +33,4 @@ // Copyright 2022 Google LLC

import * as uuid from 'uuid';
import { getRuntimeTrackingString, getModuleFormat, getUserAgentString, } from './util.js';
import { GCCL_GCS_CMD_KEY } from './nodejs-common/util.js';
import { getRuntimeTrackingString, getModuleFormat, getUserAgentString, } from './util.js';
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -46,2 +46,3 @@ // @ts-ignore

constructor(cfg) {
var _a;
super(cfg);

@@ -52,5 +53,5 @@ _Upload_instances.add(this);

this.currentInvocationId = {
checkUploadStatus: uuid.v4(),
chunk: uuid.v4(),
uri: uuid.v4(),
offset: uuid.v4(),
};

@@ -75,2 +76,8 @@ /**

}
if (cfg.offset && !cfg.uri) {
throw new RangeError('Cannot provide an `offset` without providing a `uri`');
}
if (cfg.isPartialUpload && !cfg.chunkSize) {
throw new RangeError('Cannot set `isPartialUpload` without providing a `chunkSize`');
}
cfg.authConfig = cfg.authConfig || {};

@@ -106,2 +113,3 @@ cfg.authConfig.scopes = [

this.retryOptions = cfg.retryOptions;
this.isPartialUpload = (_a = cfg.isPartialUpload) !== null && _a !== void 0 ? _a : false;
if (cfg.key) {

@@ -122,3 +130,6 @@ const base64Key = Buffer.from(cfg.key).toString('base64');

this.uri = cfg.uri;
this.numBytesWritten = 0;
if (this.offset) {
// we're resuming an incomplete upload
this.numBytesWritten = this.offset;
}
this.numRetries = 0; // counter for number of retries currently executed

@@ -224,3 +235,2 @@ if (!autoRetry) {

* @param limit The maximum amount to return from the buffer.
* @returns The data requested.
*/

@@ -397,11 +407,10 @@ *pullFromChunkBuffer(limit) {

this.offset = 0;
// emit the newly generated URI for future reuse, if necessary.
this.emit('uri', uri);
return uri;
}
async continueUploading() {
if (typeof this.offset === 'number') {
this.startUploading();
return;
}
await this.getAndSetOffset();
this.startUploading();
var _a;
(_a = this.offset) !== null && _a !== void 0 ? _a : (await this.getAndSetOffset());
return this.startUploading();
}

@@ -490,6 +499,7 @@ async startUploading() {

for await (const chunk of this.upstreamIterator(expectedUploadSize)) {
// This will conveniently track and keep the size of the buffers
// This will conveniently track and keep the size of the buffers.
// We will reach either the expected upload size or the remainder of the stream.
__classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, chunk);
}
// We hit either the expected upload size or the remainder
// This is the sum from the `#addLocalBufferCache` calls
const bytesToUpload = this.localWriteCacheByteLength;

@@ -502,5 +512,6 @@ // Important: we want to know if the upstream has ended and the queue is empty before

let totalObjectSize = this.contentLength;
if (typeof this.contentLength !== 'number' && isLastChunkOfUpload) {
// Let's let the server know this is the last chunk since
// we didn't know the content-length beforehand.
if (typeof this.contentLength !== 'number' &&
isLastChunkOfUpload &&
!this.isPartialUpload) {
// Let's let the server know this is the last chunk of the object since we didn't set it before.
totalObjectSize = bytesToUpload + this.numBytesWritten;

@@ -528,3 +539,3 @@ }

responseReceived = true;
this.responseHandler(resp);
await this.responseHandler(resp);
}

@@ -546,3 +557,3 @@ }

// the response body.
responseHandler(resp) {
async responseHandler(resp) {
if (resp.data.error) {

@@ -554,5 +565,14 @@ this.destroy(resp.data.error);

this.currentInvocationId.chunk = uuid.v4();
const moreDataToUpload = await this.waitForNextChunk();
const shouldContinueWithNextMultiChunkRequest = this.chunkSize &&
resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE &&
resp.headers.range;
resp.headers.range &&
moreDataToUpload;
/**
* This is true when we're expecting to upload more data in a future request,
* yet the upstream for the upload session has been exhausted.
*/
const shouldContinueUploadInAnotherRequest = this.isPartialUpload &&
resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE &&
!moreDataToUpload;
if (shouldContinueWithNextMultiChunkRequest) {

@@ -581,3 +601,4 @@ // Use the upper value in this header to determine where to start the next chunk.

}
else if (!this.isSuccessfulResponse(resp.status)) {
else if (!this.isSuccessfulResponse(resp.status) &&
!shouldContinueUploadInAnotherRequest) {
const err = new Error('Upload failed');

@@ -603,4 +624,10 @@ err.code = resp.status;

}
async getAndSetOffset() {
let googAPIClient = `${getRuntimeTrackingString()} gccl/${packageJson.version}-${getModuleFormat()} gccl-invocation-id/${this.currentInvocationId.offset}`;
/**
* Check the status of an existing resumable upload.
*
* @param cfg A configuration to use. `uri` is required.
* @returns the current upload status
*/
async checkUploadStatus(config = {}) {
let googAPIClient = `${getRuntimeTrackingString()} gccl/${packageJson.version}-${getModuleFormat()} gccl-invocation-id/${this.currentInvocationId.checkUploadStatus}`;
if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) {

@@ -622,7 +649,26 @@ googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`;

// Successfully got the offset we can now create a new offset invocation id
this.currentInvocationId.offset = uuid.v4();
this.currentInvocationId.checkUploadStatus = uuid.v4();
return resp;
}
catch (e) {
if (config.retry === false ||
!(e instanceof Error) ||
!this.retryOptions.retryableErrorFn(e)) {
throw e;
}
const retryDelay = this.getRetryDelay();
if (retryDelay <= 0) {
throw e;
}
await new Promise(res => setTimeout(res, retryDelay));
return this.checkUploadStatus(config);
}
}
async getAndSetOffset() {
try {
// we want to handle retries in this method.
const resp = await this.checkUploadStatus({ retry: false });
if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) {
if (resp.headers.range) {
const range = resp.headers.range;
this.offset = Number(range.split('-')[1]) + 1;
if (typeof resp.headers.range === 'string') {
this.offset = Number(resp.headers.range.split('-')[1]) + 1;
return;

@@ -749,3 +795,6 @@ }

/**
* @returns {number} the amount of time to wait before retrying the request
* The amount of time to wait before retrying the request, in milliseconds.
* If negative, do not retry.
*
* @returns the amount of time to wait, in milliseconds.
*/

@@ -798,1 +847,11 @@ getRetryDelay() {

}
/**
* Check the status of an existing resumable upload.
*
* @param cfg A configuration to use. `uri` is required.
* @returns the current upload status
*/
export function checkUploadStatus(cfg) {
const up = new Upload(cfg);
return up.checkUploadStatus();
}
{
"name": "@google-cloud/storage",
"description": "Cloud Storage Client Library for Node.js",
"version": "7.5.0",
"version": "7.6.0",
"license": "Apache-2.0",

@@ -110,4 +110,4 @@ "author": "Google Inc.",

"@types/request": "^2.48.4",
"@types/sinon": "^10.0.15",
"@types/tmp": "0.2.5",
"@types/sinon": "^17.0.0",
"@types/tmp": "0.2.6",
"@types/uuid": "^8.0.0",

@@ -114,0 +114,0 @@ "@types/yargs": "^17.0.10",

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc