Socket
Socket
Sign inDemoInstall

google-gax

Package Overview
Dependencies
Maintainers
3
Versions
362
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

google-gax - npm Package Compare versions

Comparing version 2.28.2-alpha.1 to 2.28.3-alpha.1

build/protos/google/api/servicecontrol/v2/service_controller.proto

6

build/src/apitypes.d.ts

@@ -33,3 +33,7 @@ /**

export declare type RawResponseType = Operation | {} | null;
export declare type ResultTuple = [ResponseType | [ResponseType], NextPageRequestType | undefined, RawResponseType | undefined];
export declare type ResultTuple = [
ResponseType | [ResponseType],
NextPageRequestType | undefined,
RawResponseType | undefined
];
export interface SimpleCallbackFunction {

@@ -36,0 +40,0 @@ (request: RequestType, callback: APICallback): GRPCCallResult;

2

build/src/bundlingCalls/bundleDescriptor.js

@@ -61,3 +61,3 @@ "use strict";

this.requestDiscriminatorFields =
requestDiscriminatorFields.map(util_1.snakeToCamelCase);
requestDiscriminatorFields.map(util_1.toCamelCase);
this.subresponseField = subresponseField;

@@ -64,0 +64,0 @@ this.byteLengthFunction = byteLengthFunction;

@@ -58,6 +58,6 @@ "use strict";

schedule(apiCall, request, callback) {
const bundleId = bundlingUtils_1.computeBundleId(request, this._descriptor.requestDiscriminatorFields);
const bundleId = (0, bundlingUtils_1.computeBundleId)(request, this._descriptor.requestDiscriminatorFields);
callback = (callback || noop);
if (bundleId === undefined) {
warnings_1.warn('bundling_schedule_bundleid_undefined', 'The request does not have enough information for request bundling. ' +
(0, warnings_1.warn)('bundling_schedule_bundleid_undefined', 'The request does not have enough information for request bundling. ' +
`Invoking immediately. Request: ${JSON.stringify(request)} ` +

@@ -68,3 +68,3 @@ `discriminator fields: ${this._descriptor.requestDiscriminatorFields}`);

if (request[this._descriptor.bundledField] === undefined) {
warnings_1.warn('bundling_no_bundled_field', `Request does not contain field ${this._descriptor.bundledField} that must present for bundling. ` +
(0, warnings_1.warn)('bundling_no_bundled_field', `Request does not contain field ${this._descriptor.bundledField} that must present for bundling. ` +
`Invoking immediately. Request: ${JSON.stringify(request)}`);

@@ -185,3 +185,3 @@ return apiCall(request, callback);

if (!(bundleId in this._tasks)) {
warnings_1.warn('bundle_runnow_bundleid_unknown', `No such bundleid: ${bundleId}`);
(0, warnings_1.warn)('bundle_runnow_bundleid_unknown', `No such bundleid: ${bundleId}`);
return;

@@ -188,0 +188,0 @@ }

@@ -71,2 +71,7 @@ "use strict";

});
if (canceller instanceof Promise) {
canceller.catch(err => {
setImmediate(this.callback, new googleError_1.GoogleError(err), null, null, null);
});
}
this.cancelFunc = () => canceller.cancel();

@@ -73,0 +78,0 @@ }

@@ -32,3 +32,7 @@ import { GrpcClientOptions, ClientStubOptions } from './grpc';

export interface LROperation<ResultType, MetadataType> extends longrunning.Operation {
promise(): Promise<[ResultType, MetadataType, operationProtos.google.longrunning.Operation]>;
promise(): Promise<[
ResultType,
MetadataType,
operationProtos.google.longrunning.Operation
]>;
}

@@ -35,0 +39,0 @@ export interface PaginationCallback<RequestObject, ResponseObject, ResponseType> {

@@ -50,3 +50,3 @@ "use strict";

// the following apiCaller will be used for all calls of this function...
const apiCaller = apiCaller_1.createAPICaller(settings, descriptor);
const apiCaller = (0, apiCaller_1.createAPICaller)(settings, descriptor);
return (request, callOptions, callback) => {

@@ -58,3 +58,3 @@ const thisSettings = settings.merge(callOptions);

if (settings.isBundling && !thisSettings.isBundling) {
currentApiCaller = apiCaller_1.createAPICaller(settings, undefined);
currentApiCaller = (0, apiCaller_1.createAPICaller)(settings, undefined);
}

@@ -76,5 +76,5 @@ const ongoingCall = currentApiCaller.init(callback);

thisSettings.timeout;
return retries_1.retryable(func, thisSettings.retry, thisSettings.otherArgs, thisSettings.apiName);
return (0, retries_1.retryable)(func, thisSettings.retry, thisSettings.otherArgs, thisSettings.apiName);
}
return timeout_1.addTimeoutArg(func, thisSettings.timeout, thisSettings.otherArgs);
return (0, timeout_1.addTimeoutArg)(func, thisSettings.timeout, thisSettings.otherArgs);
})

@@ -81,0 +81,0 @@ .then((apiCall) => {

@@ -18,2 +18,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.BundleDescriptor = exports.StreamDescriptor = exports.PageDescriptor = exports.LongrunningDescriptor = void 0;
var longRunningDescriptor_1 = require("./longRunningCalls/longRunningDescriptor");

@@ -20,0 +21,0 @@ Object.defineProperty(exports, "LongrunningDescriptor", { enumerable: true, get: function () { return longRunningDescriptor_1.LongRunningDescriptor; } });

@@ -17,6 +17,6 @@ /**

/// <reference types="node" />
import { OutgoingHttpHeaders } from 'http';
import * as protobuf from 'protobufjs';
import * as gax from './gax';
import * as routingHeader from './routingHeader';
import { OutgoingHttpHeaders } from 'http';
import { GoogleAuth, OAuth2Client, Compute, JWT, UserRefreshClient, BaseExternalAccountClient } from 'google-auth-library';

@@ -28,2 +28,3 @@ import { OperationsClientBuilder } from './operationsClient';

import { FallbackServiceError } from './googleError';
import { google } from '../protos/http';
export { FallbackServiceError };

@@ -53,2 +54,3 @@ export { PathTemplate } from './pathTemplate';

private static protoCache;
httpRules?: Array<google.api.IHttpRule>;
/**

@@ -55,0 +57,0 @@ * In rare cases users might need to deallocate all memory consumed by loaded protos.

@@ -18,3 +18,4 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.fallback = exports.protobuf = exports.createApiCall = exports.lro = exports.GrpcClient = exports.defaultToObjectOptions = exports.version = exports.routingHeader = void 0;
exports.fallback = exports.protobufMinimal = exports.protobuf = exports.createApiCall = exports.lro = exports.GrpcClient = exports.defaultToObjectOptions = exports.StreamType = exports.StreamDescriptor = exports.PageDescriptor = exports.LongrunningDescriptor = exports.BundleDescriptor = exports.version = exports.RetryOptions = exports.constructSettings = exports.CallSettings = exports.routingHeader = exports.PathTemplate = void 0;
const objectHash = require("object-hash");
const protobuf = require("protobufjs");

@@ -27,3 +28,2 @@ exports.protobuf = protobuf;

const google_auth_library_1 = require("google-auth-library");
const objectHash = require("object-hash");
const operationsClient_1 = require("./operationsClient");

@@ -35,3 +35,4 @@ const createApiCall_1 = require("./createApiCall");

const fallbackServiceStub_1 = require("./fallbackServiceStub");
const _1 = require(".");
const streaming_1 = require("./streamingCalls/streaming");
const util_1 = require("./util");
var pathTemplate_1 = require("./pathTemplate");

@@ -49,4 +50,4 @@ Object.defineProperty(exports, "PathTemplate", { enumerable: true, get: function () { return pathTemplate_1.PathTemplate; } });

Object.defineProperty(exports, "StreamDescriptor", { enumerable: true, get: function () { return descriptor_1.StreamDescriptor; } });
var streaming_1 = require("./streamingCalls/streaming");
Object.defineProperty(exports, "StreamType", { enumerable: true, get: function () { return streaming_1.StreamType; } });
var streaming_2 = require("./streamingCalls/streaming");
Object.defineProperty(exports, "StreamType", { enumerable: true, get: function () { return streaming_2.StreamType; } });
exports.defaultToObjectOptions = {

@@ -70,3 +71,3 @@ keepCase: false,

constructor(options = {}) {
if (!featureDetection_1.isNodeJS()) {
if (!(0, featureDetection_1.isNodeJS)()) {
if (!options.auth) {

@@ -85,2 +86,3 @@ throw new Error(JSON.stringify(options) +

this.grpcVersion = require('../../package.json').version;
this.httpRules = options.httpRules;
}

@@ -105,3 +107,3 @@ /**

loadProtoJSON(json, ignoreCache = false) {
const hash = objectHash(json).toString();
const hash = objectHash(JSON.stringify(json)).toString();
const cached = GrpcClient.protoCache.get(hash);

@@ -118,3 +120,3 @@ if (cached && !ignoreCache) {

for (const [methodName, methodObject] of Object.entries(service.methods)) {
const methodNameLowerCamelCase = methodName[0].toLowerCase() + methodName.substring(1);
const methodNameLowerCamelCase = (0, util_1.toLowerCamelCase)(methodName);
methods[methodNameLowerCamelCase] = methodObject;

@@ -241,3 +243,3 @@ }

: fallbackProto.decodeResponse;
const serviceStub = fallbackServiceStub_1.generateServiceStub(methods, protocol, servicePath, servicePort, this.authClient, encoder, decoder);
const serviceStub = (0, fallbackServiceStub_1.generateServiceStub)(methods, protocol, servicePath, servicePort, this.authClient, encoder, decoder);
return serviceStub;

@@ -256,4 +258,7 @@ }

options = Object.assign({ scopes: [] }, options);
if (options.protoJson) {
options = Object.assign(options, { fallback: 'rest' });
}
const gaxGrpc = new GrpcClient(options);
return new operationsClient_1.OperationsClientBuilder(gaxGrpc);
return new operationsClient_1.OperationsClientBuilder(gaxGrpc, options.protoJson);
}

@@ -287,3 +292,3 @@ exports.lro = lro;

'streaming' in descriptor &&
descriptor.type !== _1.StreamType.SERVER_STREAMING) {
descriptor.type !== streaming_1.StreamType.SERVER_STREAMING) {
return () => {

@@ -293,3 +298,3 @@ throw new Error('The gRPC-fallback client library (e.g. browser version of the library) currently does not support client-streaming or bidi-stream calls.');

}
return createApiCall_1.createApiCall(func, settings, descriptor);
return (0, createApiCall_1.createApiCall)(func, settings, descriptor);
}

@@ -296,0 +301,0 @@ exports.createApiCall = createApiCall;

@@ -25,4 +25,4 @@ "use strict";

const transcoding_1 = require("./transcoding");
if (!featureDetection_1.hasTextEncoder() || !featureDetection_1.hasTextDecoder()) {
if (featureDetection_1.isNodeJS()) {
if (!(0, featureDetection_1.hasTextEncoder)() || !(0, featureDetection_1.hasTextDecoder)()) {
if ((0, featureDetection_1.isNodeJS)()) {
// Node.js 10 does not have global TextDecoder

@@ -53,3 +53,3 @@ // TODO(@alexander-fenster): remove this logic after Node.js 10 is EOL.

}
const transcoded = transcoding_1.transcode(json, rpc.parsedOptions, rpc.resolvedRequestType.fields);
const transcoded = (0, transcoding_1.transcode)(json, rpc.parsedOptions, rpc.resolvedRequestType.fields);
if (!transcoded) {

@@ -56,0 +56,0 @@ throw new Error(`Cannot build HTTP request for ${JSON.stringify(json)}, method: ${rpc.name}`);

@@ -27,6 +27,11 @@ "use strict";

function generateServiceStub(rpcs, protocol, servicePath, servicePort, authClient, requestEncoder, responseDecoder) {
const fetch = featureDetection_1.hasWindowFetch()
const fetch = (0, featureDetection_1.hasWindowFetch)()
? window.fetch
: node_fetch_1.default;
const serviceStub = {};
const serviceStub = {
// close method should close all cancel controllers. If this feature request in the future, we can have a cancelControllerFactory that tracks created cancel controllers, and abort them all in close method.
close: () => {
return { cancel: () => { } };
},
};
for (const [rpcName, rpc] of Object.entries(rpcs)) {

@@ -36,3 +41,3 @@ serviceStub[rpcName] = (request, options, _metadata, callback) => {

// Using plain old promises instead.
const cancelController = featureDetection_1.hasAbortController()
const cancelController = (0, featureDetection_1.hasAbortController)()
? new AbortController()

@@ -69,7 +74,10 @@ : new abort_controller_1.AbortController();

if (response.ok && rpc.responseStream) {
stream_1.pipeline(response.body, streamArrayParser, (err) => {
(0, stream_1.pipeline)(response.body, streamArrayParser, (err) => {
if (err &&
(!cancelRequested ||
(err instanceof Error && err.name !== 'AbortError'))) {
callback(err);
if (callback) {
callback(err);
}
streamArrayParser.emit('error', err);
}

@@ -90,7 +98,16 @@ });

if (!cancelRequested || err.name !== 'AbortError') {
callback(err);
if (rpc.responseStream) {
if (callback) {
callback(err);
}
streamArrayParser.emit('error', err);
}
else {
callback(err);
}
}
});
}
});
})
.catch((err) => callback(err));
if (rpc.responseStream) {

@@ -97,0 +114,0 @@ return streamArrayParser;

@@ -22,3 +22,4 @@ "use strict";

const features = {
windowFetch: typeof window !== 'undefined' && (window === null || window === void 0 ? void 0 : window.fetch) &&
windowFetch: typeof window !== 'undefined' &&
(window === null || window === void 0 ? void 0 : window.fetch) &&
typeof (window === null || window === void 0 ? void 0 : window.fetch) === 'function',

@@ -25,0 +26,0 @@ // eslint-disable-next-line node/no-unsupported-features/node-builtins

@@ -117,4 +117,2 @@ /**

autoPaginate?: boolean;
pageToken?: string;
pageSize?: number;
maxResults?: number;

@@ -121,0 +119,0 @@ maxRetries?: number;

@@ -19,2 +19,3 @@ "use strict";

exports.constructSettings = exports.createBundleOptions = exports.createMaxRetriesBackoffSettings = exports.createDefaultBackoffSettings = exports.createBackoffSettings = exports.createRetryOptions = exports.CallSettings = exports.RetryOptions = void 0;
const util_1 = require("./util");
/**

@@ -100,3 +101,2 @@ * Encapsulates the overridable settings for a particular API call.

'autoPaginate' in settings ? settings.autoPaginate : true;
this.pageToken = settings.pageToken;
this.maxResults = settings.maxResults;

@@ -126,4 +126,2 @@ this.otherArgs = settings.otherArgs || {};

let autoPaginate = this.autoPaginate;
let pageToken = this.pageToken;
let pageSize = this.pageSize;
let maxResults = this.maxResults;

@@ -164,9 +162,2 @@ let otherArgs = this.otherArgs;

}
if ('pageToken' in options) {
autoPaginate = false;
pageToken = options.pageToken;
}
if ('pageSize' in options) {
pageSize = options.pageSize;
}
if ('maxResults' in options) {

@@ -206,4 +197,2 @@ maxResults = options.maxResults;

autoPaginate,
pageToken,
pageSize,
maxResults,

@@ -495,3 +484,3 @@ otherArgs,

const methodConfig = methods[methodName];
const jsName = methodName[0].toLowerCase() + methodName.slice(1);
const jsName = (0, util_1.toLowerCamelCase)(methodName);
let retry = constructRetry(methodConfig, serviceConfig.retry_codes, serviceConfig.retry_params, retryNames);

@@ -498,0 +487,0 @@ let bundlingConfig = methodConfig.bundling;

@@ -20,2 +20,3 @@ /**

import { Metadata } from './grpc';
import { JSONValue } from 'proto3-json-serializer';
export declare class GoogleError extends Error {

@@ -29,3 +30,3 @@ code?: Status;

errorInfoMetadata?: {
string: string;
[propName: string]: string;
};

@@ -71,3 +72,7 @@ static parseGRPCStatusDetails(err: GoogleError): GoogleError;

decodeGRPCStatusDetails(bufferArr: Buffer[] | ArrayBuffer[]): GRPCStatusDetailsObject;
decodeHTTPError(json: JSONValue): {
[k: string]: any;
};
decodeHttpStatusDetails(rawDetails: Array<ProtobufAny>): GRPCStatusDetailsObject;
}
export {};

@@ -21,2 +21,4 @@ "use strict";

const protobuf = require("protobufjs");
const serializer = require("proto3-json-serializer");
const fallback_1 = require("./fallback");
class GoogleError extends Error {

@@ -50,22 +52,42 @@ // Parse details field in google.rpc.status wire over gRPC medatadata.

static parseHttpError(json) {
const error = Object.assign(new GoogleError(json['error']['message']), json.error);
if (Array.isArray(json)) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
json = json.find((obj) => {
return 'error' in obj;
});
}
const decoder = new GoogleErrorDecoder();
const proto3Error = decoder.decodeHTTPError(json['error']);
const error = Object.assign(new GoogleError(json['error']['message']), proto3Error);
// Map Http Status Code to gRPC Status Code
if (json['error']['code']) {
error.code = status_1.rpcCodeFromHttpStatusCode(json['error']['code']);
error.code = (0, status_1.rpcCodeFromHttpStatusCode)(json['error']['code']);
}
else {
// If error code is absent, proto3 message default value is 0. We should
// keep error code as undefined.
delete error.code;
}
// Keep consistency with gRPC statusDetails fields. gRPC details has been occupied before.
// Rename "detials" to "statusDetails".
error.statusDetails = json['error']['details'];
delete error.details;
// Promote the ErrorInfo fields as error's top-level.
const errorInfo = !json['error']['details']
? undefined
: json['error']['details'].find((item) => item['@type'] === 'type.googleapis.com/google.rpc.ErrorInfo');
if (errorInfo) {
error.reason = errorInfo.reason;
error.domain = errorInfo.domain;
// error.metadata has been occupied for gRPC metadata, so we use
// errorInfoMetadat to represent ErrorInfo' metadata field. Keep
// consistency with gRPC ErrorInfo metadata field name.
error.errorInfoMetadata = errorInfo.metadata;
if (error.details) {
try {
const statusDetailsObj = decoder.decodeHttpStatusDetails(error.details);
if (statusDetailsObj &&
statusDetailsObj.details &&
statusDetailsObj.details.length > 0) {
error.statusDetails = statusDetailsObj.details;
}
if (statusDetailsObj && statusDetailsObj.errorInfo) {
error.reason = statusDetailsObj.errorInfo.reason;
error.domain = statusDetailsObj.errorInfo.domain;
// error.metadata has been occupied for gRPC metadata, so we use
// errorInfoMetadata to represent ErrorInfo' metadata field. Keep
// consistency with gRPC ErrorInfo metadata field name.
error.errorInfoMetadata = statusDetailsObj.errorInfo.metadata;
}
}
catch (decodeErr) {
// ignoring the error
}
}

@@ -79,3 +101,3 @@ return error;

// eslint-disable-next-line @typescript-eslint/no-var-requires
const errorProtoJson = require('../../protos/status.json');
const errorProtoJson = require('../../build/protos/status.json');
this.root = protobuf.Root.fromJSON(errorProtoJson);

@@ -164,4 +186,31 @@ this.anyType = this.root.lookupType('google.protobuf.Any');

}
// Decodes http error which is an instance of google.rpc.Status.
decodeHTTPError(json) {
const errorMessage = serializer.fromProto3JSON(this.statusType, json);
if (!errorMessage) {
throw new Error(`Received error message ${json}, but failed to serialize as proto3 message`);
}
return this.statusType.toObject(errorMessage, fallback_1.defaultToObjectOptions);
}
// Decodes http error details which is an instance of Array<google.protobuf.Any>.
decodeHttpStatusDetails(rawDetails) {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const details = [];
let errorInfo;
for (const detail of rawDetails) {
try {
const decodedDetail = this.decodeProtobufAny(detail);
details.push(decodedDetail);
if (detail.type_url === 'type.googleapis.com/google.rpc.ErrorInfo') {
errorInfo = decodedDetail;
}
}
catch (err) {
// cannot decode detail, likely because of the unknown type - just skip it
}
}
return { details, errorInfo };
}
}
exports.GoogleErrorDecoder = GoogleErrorDecoder;
//# sourceMappingURL=googleError.js.map

@@ -24,5 +24,8 @@ /**

import { ClientOptions } from '@grpc/grpc-js/build/src/client';
import { google } from '../protos/http';
export interface GrpcClientOptions extends GoogleAuthOptions {
auth?: GoogleAuth;
grpc?: GrpcModule;
protoJson?: protobuf.Root;
httpRules?: Array<google.api.IHttpRule>;
}

@@ -58,2 +61,3 @@ export interface MetadataValue {

private static protoCache;
httpRules?: Array<google.api.IHttpRule>;
/**

@@ -125,3 +129,3 @@ * Key for proto cache map. We are doing our best to make sure we respect

loadProtoJSON(json: protobuf.INamespace, ignoreCache?: boolean): grpc.GrpcObject;
metadataBuilder(headers: OutgoingHttpHeaders): (abTests?: {} | undefined, moreHeaders?: OutgoingHttpHeaders | undefined) => grpc.Metadata;
metadataBuilder(headers: OutgoingHttpHeaders): (abTests?: {}, moreHeaders?: OutgoingHttpHeaders) => grpc.Metadata;
/**

@@ -128,0 +132,0 @@ * A wrapper of {@link constructSettings} function under the gRPC context.

@@ -30,3 +30,3 @@ "use strict";

const gax = require("./gax");
const googleProtoFilesDir = path.join(__dirname, '..', '..', 'protos');
const googleProtoFilesDir = path.join(__dirname, '..', '..', 'build', 'protos');
// INCLUDE_DIRS is passed to @grpc/proto-loader

@@ -62,3 +62,3 @@ const INCLUDE_DIRS = [];

return new Promise((resolve, reject) => {
child_process_1.execFile(command, args, (err, stdout) => {
(0, child_process_1.execFile)(command, args, (err, stdout) => {
if (err)

@@ -224,3 +224,3 @@ return reject(err);

loadProtoJSON(json, ignoreCache = false) {
const hash = objectHash(json).toString();
const hash = objectHash(JSON.stringify(json)).toString();
const cached = GrpcClient.protoCache.get(hash);

@@ -359,3 +359,3 @@ if (cached && !ignoreCache) {

// parse the output to extract cert and key, and use this cert/key.
const metadataPath = path_1.join(os.homedir(), '.secureConnect', 'context_aware_metadata.json');
const metadataPath = (0, path_1.join)(os.homedir(), '.secureConnect', 'context_aware_metadata.json');
const metadata = JSON.parse(await readFileAsync(metadataPath));

@@ -362,0 +362,0 @@ if (!metadata.cert_provider_command) {

@@ -116,3 +116,3 @@ "use strict";

});
this.innerApiCalls[methodName] = createApiCall_1.createApiCall(innerCallPromise, this._defaults[methodName], this.descriptors.page[methodName]);
this.innerApiCalls[methodName] = (0, createApiCall_1.createApiCall)(innerCallPromise, this._defaults[methodName], this.descriptors.page[methodName]);
}

@@ -119,0 +119,0 @@ return this.iamPolicyStub;

@@ -18,3 +18,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.serializer = exports.fallback = exports.protobuf = exports.version = exports.createByteLengthFunction = exports.LocationProtos = exports.IamProtos = exports.operationsProtos = exports.lro = exports.routingHeader = exports.grpc = void 0;
exports.serializer = exports.warn = exports.ChannelCredentials = exports.fallback = exports.protobufMinimal = exports.protobuf = exports.version = exports.createByteLengthFunction = exports.LocationsClient = exports.IamClient = exports.OperationsClient = exports.LocationProtos = exports.IamProtos = exports.operationsProtos = exports.lro = exports.routingHeader = exports.StreamType = exports.Status = exports.PathTemplate = exports.operation = exports.Operation = exports.GrpcClient = exports.GoogleProtoFilesRoot = exports.ClientStub = exports.GoogleError = exports.createMaxRetriesBackoffSettings = exports.createDefaultBackoffSettings = exports.createBackoffSettings = exports.createBundleOptions = exports.createRetryOptions = exports.RetryOptions = exports.constructSettings = exports.CallSettings = exports.StreamDescriptor = exports.PageDescriptor = exports.LongrunningDescriptor = exports.BundleDescriptor = exports.createApiCall = exports.OngoingCall = exports.grpc = exports.GoogleAuth = void 0;
const grpc = require("@grpc/grpc-js");

@@ -21,0 +21,0 @@ exports.grpc = grpc;

@@ -109,3 +109,7 @@ import * as gax from './gax';

getLocation(request: protos.google.cloud.location.IGetLocationRequest, callback: Callback<protos.google.cloud.location.ILocation, protos.google.cloud.location.IGetLocationRequest | null | undefined, {} | null | undefined>): void;
listLocations(request?: protos.google.cloud.location.IListLocationsRequest, options?: gax.CallOptions): Promise<[protos.google.cloud.location.ILocation[], protos.google.cloud.location.IListLocationsRequest | null, protos.google.cloud.location.IListLocationsResponse]>;
listLocations(request?: protos.google.cloud.location.IListLocationsRequest, options?: gax.CallOptions): Promise<[
protos.google.cloud.location.ILocation[],
protos.google.cloud.location.IListLocationsRequest | null,
protos.google.cloud.location.IListLocationsResponse
]>;
listLocations(request: protos.google.cloud.location.IListLocationsRequest, options: gax.CallOptions, callback: PaginationCallback<protos.google.cloud.location.IListLocationsRequest, protos.google.cloud.location.IListLocationsResponse | null | undefined, protos.google.cloud.location.ILocation>): void;

@@ -112,0 +116,0 @@ listLocations(request: protos.google.cloud.location.IListLocationsRequest, callback: PaginationCallback<protos.google.cloud.location.IListLocationsRequest, protos.google.cloud.location.IListLocationsResponse | null | undefined, protos.google.cloud.location.ILocation>): void;

@@ -173,3 +173,3 @@ "use strict";

const descriptor = this.descriptors.page[methodName] || undefined;
const apiCall = createApiCall_1.createApiCall(callPromise, this._defaults[methodName], descriptor);
const apiCall = (0, createApiCall_1.createApiCall)(callPromise, this._defaults[methodName], descriptor);
this.innerApiCalls[methodName] = apiCall;

@@ -176,0 +176,0 @@ }

@@ -52,3 +52,3 @@ "use strict";

if (!backoffSettings) {
backoffSettings = gax_1.createDefaultBackoffSettings();
backoffSettings = (0, gax_1.createDefaultBackoffSettings)();
}

@@ -55,0 +55,0 @@ const longrunningDescriptor = this.longrunningDescriptor;

@@ -79,3 +79,3 @@ "use strict";

retries++;
const toCall = timeout_1.addTimeoutArg(func, timeout, otherArgs);
const toCall = (0, timeout_1.addTimeoutArg)(func, timeout, otherArgs);
canceller = toCall(argument, (err, response, next, rawResponse) => {

@@ -106,2 +106,7 @@ if (!err) {

});
if (canceller instanceof Promise) {
canceller.catch(err => {
callback(new googleError_1.GoogleError(err));
});
}
}

@@ -108,0 +113,0 @@ if (maxRetries && deadline) {

@@ -315,3 +315,3 @@ /**

*/
constructor(gaxGrpc: GrpcClient | FallbackGrpcClient);
constructor(gaxGrpc: GrpcClient | FallbackGrpcClient, protoJson?: protobuf.Root);
}

@@ -23,3 +23,4 @@ "use strict";

const configData = require("./operations_client_config.json");
const protoJson = require("../protos/operations.json");
const operationProtoJson = require("../protos/operations.json");
const transcoding_1 = require("./transcoding");
exports.SERVICE_ADDRESS = 'longrunning.googleapis.com';

@@ -99,3 +100,3 @@ const version = require('../../package.json').version;

});
this.innerApiCalls[methodName] = createApiCall_1.createApiCall(innerCallPromise, defaults[methodName], this.descriptor[methodName]);
this.innerApiCalls[methodName] = (0, createApiCall_1.createApiCall)(innerCallPromise, defaults[methodName], this.descriptor[methodName]);
}

@@ -421,5 +422,8 @@ }

*/
constructor(gaxGrpc) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const operationsProtos = gaxGrpc.loadProtoJSON(protoJson);
constructor(gaxGrpc, protoJson) {
if (protoJson && gaxGrpc.httpRules) {
// overwrite the http rules if provide in service yaml.
(0, transcoding_1.overrideHttpRules)(gaxGrpc.httpRules, protoJson);
}
const operationsProtos = protoJson !== null && protoJson !== void 0 ? protoJson : gaxGrpc.loadProtoJSON(operationProtoJson);
/**

@@ -436,3 +440,3 @@ * Build a new instance of {@link OperationsClient}.

if (gaxGrpc.fallback) {
opts.fallback = true;
opts.fallback = gaxGrpc.fallback;
}

@@ -439,0 +443,0 @@ return new OperationsClient(gaxGrpc, operationsProtos, opts);

@@ -73,4 +73,3 @@ /**

* @param request A request object that came from the user.
* @param settings Call settings. We are interested in `maxResults`, autoPaginate`, `pageToken`, and `pageSize`
* (they are all optional).
* @param settings Call settings. We are interested in `maxResults` and `autoPaginate` (they are optional).
* @param ongoingCall An instance of OngoingCall or OngoingCallPromise that can be used for call cancellation,

@@ -77,0 +76,0 @@ * and is used to return results to the user.

@@ -110,4 +110,3 @@ "use strict";

* @param request A request object that came from the user.
* @param settings Call settings. We are interested in `maxResults`, autoPaginate`, `pageToken`, and `pageSize`
* (they are all optional).
* @param settings Call settings. We are interested in `maxResults` and `autoPaginate` (they are optional).
* @param ongoingCall An instance of OngoingCall or OngoingCallPromise that can be used for call cancellation,

@@ -118,9 +117,2 @@ * and is used to return results to the user.

request = Object.assign({}, request);
// If settings object contain pageToken or pageSize, override the corresponding fields in the request object.
if (settings.pageToken) {
request[this.pageDescriptor.requestPageTokenField] = settings.pageToken;
}
if (settings.pageSize) {
request[this.pageDescriptor.requestPageSizeField] = settings.pageSize;
}
if (!settings.autoPaginate) {

@@ -127,0 +119,0 @@ // they don't want auto-pagination this time - okay, just call once

@@ -17,2 +17,4 @@ /**

/// <reference types="node" />
/// <reference types="node" />
import { AbortController as NodeAbortController } from 'abort-controller';
import { Transform } from 'stream';

@@ -26,3 +28,3 @@ export declare class StreamArrayParser extends Transform {

rpc: protobuf.Method;
cancelController: AbortController;
cancelController: AbortController | NodeAbortController;
cancelSignal: AbortSignal;

@@ -29,0 +31,0 @@ cancelRequested: boolean;

@@ -53,5 +53,4 @@ "use strict";

this.rpc = rpc;
this.cancelController = featureDetection_1.hasAbortController()
? // eslint-disable-next-line no-undef
new AbortController()
this.cancelController = (0, featureDetection_1.hasAbortController)()
? new AbortController()
: new abort_controller_1.AbortController();

@@ -104,3 +103,3 @@ this.cancelSignal = this.cancelController.signal;

// HTTP reponse.ok is true.
const msgObj = fallbackRest_1.decodeResponse(this.rpc, true, objBuff);
const msgObj = (0, fallbackRest_1.decodeResponse)(this.rpc, true, objBuff);
this.push(msgObj);

@@ -107,0 +106,0 @@ }

@@ -26,4 +26,5 @@ /**

streaming: boolean;
constructor(streamType: StreamType);
rest?: boolean;
constructor(streamType: StreamType, rest?: boolean);
getApiCaller(settings: CallSettings): APICaller;
}

@@ -24,5 +24,6 @@ "use strict";

class StreamDescriptor {
constructor(streamType) {
constructor(streamType, rest) {
this.type = streamType;
this.streaming = true;
this.rest = rest;
}

@@ -29,0 +30,0 @@ getApiCaller(settings) {

@@ -53,2 +53,3 @@ /**

private _responseHasSent;
rest?: boolean;
/**

@@ -62,3 +63,3 @@ * StreamProxy is a proxy to gRPC-streaming method.

*/
constructor(type: StreamType, callback: APICallback);
constructor(type: StreamType, callback: APICallback, rest?: boolean);
cancel(): void;

@@ -65,0 +66,0 @@ /**

@@ -19,3 +19,3 @@ "use strict";

exports.StreamProxy = exports.StreamType = void 0;
const streamArrayParser_1 = require("../streamArrayParser");
const googleError_1 = require("../googleError");
// eslint-disable-next-line @typescript-eslint/no-var-requires

@@ -47,3 +47,3 @@ const duplexify = require('duplexify');

*/
constructor(type, callback) {
constructor(type, callback, rest) {
super(undefined, undefined, {

@@ -58,2 +58,3 @@ objectMode: true,

this._responseHasSent = false;
this.rest = rest;
}

@@ -74,5 +75,2 @@ cancel() {

const eventsToForward = ['metadata', 'response', 'status'];
if (stream instanceof streamArrayParser_1.StreamArrayParser) {
eventsToForward.push('data', 'end', 'error');
}
eventsToForward.forEach(event => {

@@ -110,2 +108,5 @@ stream.on(event, this.emit.bind(this, event));

});
stream.on('error', error => {
googleError_1.GoogleError.parseGRPCStatusDetails(error);
});
}

@@ -119,22 +120,29 @@ /**

if (this.type === StreamType.SERVER_STREAMING) {
const retryStream = retryRequest(null, {
objectMode: true,
request: () => {
if (this._isCancelCalled) {
if (this.stream) {
this.stream.cancel();
if (this.rest) {
const stream = apiCall(argument, this._callback);
this.stream = stream;
this.setReadable(stream);
}
else {
const retryStream = retryRequest(null, {
objectMode: true,
request: () => {
if (this._isCancelCalled) {
if (this.stream) {
this.stream.cancel();
}
return;
}
return;
}
const stream = apiCall(argument, this._callback);
this.stream = stream;
this.forwardEvents(stream);
return stream;
},
retries: retryRequestOptions.retries,
currentRetryAttempt: retryRequestOptions.currentRetryAttempt,
noResponseRetries: retryRequestOptions.noResponseRetries,
shouldRetryFn: retryRequestOptions.shouldRetryFn,
});
this.setReadable(retryStream);
const stream = apiCall(argument, this._callback);
this.stream = stream;
this.forwardEvents(stream);
return stream;
},
retries: retryRequestOptions.retries,
currentRetryAttempt: retryRequestOptions.currentRetryAttempt,
noResponseRetries: retryRequestOptions.noResponseRetries,
shouldRetryFn: retryRequestOptions.shouldRetryFn,
});
this.setReadable(retryStream);
}
return;

@@ -141,0 +149,0 @@ }

@@ -32,3 +32,3 @@ "use strict";

init(callback) {
return new streaming_1.StreamProxy(this.descriptor.type, callback);
return new streaming_1.StreamProxy(this.descriptor.type, callback, this.descriptor.rest);
}

@@ -50,3 +50,3 @@ wrap(func) {

default:
warnings_1.warn('streaming_wrap_unknown_stream_type', `Unknown stream type: ${this.descriptor.type}`);
(0, warnings_1.warn)('streaming_wrap_unknown_stream_type', `Unknown stream type: ${this.descriptor.type}`);
}

@@ -53,0 +53,0 @@ return func;

@@ -45,3 +45,3 @@ /**

export declare function flattenObject(request: JSONObject): JSONObject;
export declare function requestChangeCaseAndCleanup(request: JSONObject, caseChangeFunc: (key: string) => string): JSONObject;
export declare function requestChangeCaseAndCleanup(request: JSONObject, caseChangeFunc: (key: string) => string, fieldsToChange?: string[]): JSONObject;
export declare function isProto3OptionalField(field: Field): any;

@@ -55,5 +55,9 @@ export declare function isRequiredField(field: Field): boolean | undefined;

};
export declare function getAllFieldNames(fields: {
[k: string]: Field;
} | undefined, fieldNames: string[]): string[];
export declare function transcode(request: JSONObject, parsedOptions: ParsedOptionsType, requestFields?: {
[k: string]: Field;
}): TranscodedRequest | undefined;
export declare function overrideHttpRules(httpRules: Array<google.api.IHttpRule>, protoJson: protobuf.Root): void;
export {};

@@ -18,3 +18,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.transcode = exports.getFieldNameOnBehavior = exports.isRequiredField = exports.isProto3OptionalField = exports.requestChangeCaseAndCleanup = exports.flattenObject = exports.match = exports.applyPattern = exports.encodeWithoutSlashes = exports.encodeWithSlashes = exports.buildQueryStringComponents = exports.deleteField = exports.deepCopy = exports.getField = void 0;
exports.overrideHttpRules = exports.transcode = exports.getAllFieldNames = exports.getFieldNameOnBehavior = exports.isRequiredField = exports.isProto3OptionalField = exports.requestChangeCaseAndCleanup = exports.flattenObject = exports.match = exports.applyPattern = exports.encodeWithoutSlashes = exports.encodeWithSlashes = exports.buildQueryStringComponents = exports.deleteField = exports.deepCopy = exports.getField = void 0;
const util_1 = require("./util");

@@ -174,6 +174,7 @@ const httpOptionName = '(google.api.http)';

exports.flattenObject = flattenObject;
function requestChangeCaseAndCleanup(request, caseChangeFunc) {
function requestChangeCaseAndCleanup(request, caseChangeFunc, fieldsToChange) {
if (!request || typeof request !== 'object') {
return request;
}
const fieldsToChangeWithFunc = new Set(fieldsToChange === null || fieldsToChange === void 0 ? void 0 : fieldsToChange.map(x => caseChangeFunc(x)));
const convertedRequest = {};

@@ -185,9 +186,15 @@ for (const field in request) {

}
const convertedField = caseChangeFunc(field);
let convertedField = caseChangeFunc(field);
// Here, we want to check if the fields in the proto match
// the fields we are changing; if not, we assume it's user
// input and revert back to its original form
if (fieldsToChange && !(fieldsToChangeWithFunc === null || fieldsToChangeWithFunc === void 0 ? void 0 : fieldsToChangeWithFunc.has(convertedField))) {
convertedField = field;
}
const value = request[field];
if (Array.isArray(value)) {
convertedRequest[convertedField] = value.map(v => requestChangeCaseAndCleanup(v, caseChangeFunc));
convertedRequest[convertedField] = value.map(v => requestChangeCaseAndCleanup(v, caseChangeFunc, fieldsToChange));
}
else {
convertedRequest[convertedField] = requestChangeCaseAndCleanup(value, caseChangeFunc);
convertedRequest[convertedField] = requestChangeCaseAndCleanup(value, caseChangeFunc, fieldsToChange);
}

@@ -223,2 +230,16 @@ }

exports.getFieldNameOnBehavior = getFieldNameOnBehavior;
// This function gets all the fields recursively
function getAllFieldNames(fields, fieldNames) {
var _a, _b;
if (fields) {
for (const field in fields) {
fieldNames.push(field);
if ((_b = (_a = fields === null || fields === void 0 ? void 0 : fields[field]) === null || _a === void 0 ? void 0 : _a.resolvedType) === null || _b === void 0 ? void 0 : _b.fields) {
getAllFieldNames(fields[field].resolvedType.fields, fieldNames);
}
}
}
return fieldNames;
}
exports.getAllFieldNames = getAllFieldNames;
function transcode(request, parsedOptions, requestFields) {

@@ -233,3 +254,7 @@ const { requiredFields, optionalFields } = getFieldNameOnBehavior(requestFields);

// request is supposed to have keys in camelCase.
const snakeRequest = requestChangeCaseAndCleanup(request, util_1.camelToSnakeCase);
let fieldsToChange = undefined;
if (requestFields) {
fieldsToChange = getAllFieldNames(requestFields, []);
}
const snakeRequest = requestChangeCaseAndCleanup(request, util_1.camelToSnakeCase, fieldsToChange);
const httpRules = [];

@@ -268,3 +293,3 @@ for (const option of parsedOptions) {

for (const key in data) {
if (optionalFields.has(util_1.snakeToCamelCase(key)) &&
if (optionalFields.has((0, util_1.toCamelCase)(key)) &&
(!(key in snakeRequest) || snakeRequest[key] === 'undefined')) {

@@ -275,3 +300,3 @@ delete data[key];

// HTTP endpoint expects camelCase but we have snake_case at this point
const camelCaseData = requestChangeCaseAndCleanup(data, util_1.snakeToCamelCase);
const camelCaseData = requestChangeCaseAndCleanup(data, util_1.toCamelCase, fieldsToChange);
return { httpMethod, url, queryString: '', data: camelCaseData };

@@ -284,3 +309,3 @@ }

if (body) {
deleteField(queryStringObject, util_1.snakeToCamelCase(body));
deleteField(queryStringObject, (0, util_1.toCamelCase)(body));
// Unset optional field should not add in body request.

@@ -293,3 +318,3 @@ data =

for (const field of matchedFields) {
deleteField(queryStringObject, util_1.snakeToCamelCase(field));
deleteField(queryStringObject, (0, util_1.toCamelCase)(field));
}

@@ -309,3 +334,3 @@ // Unset proto3 optional field does not appear in the query params.

else {
camelCaseData = requestChangeCaseAndCleanup(data, util_1.snakeToCamelCase);
camelCaseData = requestChangeCaseAndCleanup(data, util_1.toCamelCase, fieldsToChange);
}

@@ -318,2 +343,41 @@ return { httpMethod, url, queryString, data: camelCaseData };

exports.transcode = transcode;
// Override the protobuf json's the http rules.
function overrideHttpRules(httpRules, protoJson) {
for (const rule of httpRules) {
if (!rule.selector) {
continue;
}
const rpc = protoJson.lookup(rule.selector);
// Not support override on non-exist RPC or a RPC without an annotation.
// We could reconsider if we have the use case later.
if (!rpc || !rpc.parsedOptions) {
continue;
}
for (const item of rpc.parsedOptions) {
if (!(httpOptionName in item)) {
continue;
}
const httpOptions = item[httpOptionName];
for (const httpMethod in httpOptions) {
if (httpMethod in rule) {
if (httpMethod === 'additional_bindings') {
continue;
}
httpOptions[httpMethod] =
rule[httpMethod];
}
if (rule.additional_bindings) {
httpOptions['additional_bindings'] = !httpOptions['additional_bindings']
? []
: Array.isArray(httpOptions['additional_bindings'])
? httpOptions['additional_bindings']
: [httpOptions['additional_bindings']];
// Make the additional_binding to be an array if it is not.
httpOptions['additional_bindings'].push(...rule.additional_bindings);
}
}
}
}
}
exports.overrideHttpRules = overrideHttpRules;
//# sourceMappingURL=transcoding.js.map

@@ -22,5 +22,11 @@ /**

/**
* Converts a given string from snake_case (normally used in proto definitions) to
* camelCase (used by protobuf.js)
* Converts a given string from snake_case (normally used in proto definitions) or
* PascalCase (also used in proto definitions) to camelCase (used by protobuf.js).
* Preserves capitalization of the first character.
*/
export declare function snakeToCamelCase(str: string): string;
export declare function toCamelCase(str: string): string;
/**
* Converts a given string to lower camel case (forcing the first character to be
* in lower case).
*/
export declare function toLowerCamelCase(str: string): string;

@@ -18,4 +18,31 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.snakeToCamelCase = exports.camelToSnakeCase = void 0;
exports.toLowerCamelCase = exports.toCamelCase = exports.camelToSnakeCase = void 0;
function words(str, normalize = false) {
if (normalize) {
// strings like somethingABCSomething are special case for protobuf.js,
// they should be split as "something", "abc", "something".
// Deal with sequences of capital letters first.
str = str.replace(/([A-Z])([A-Z]+)([A-Z])/g, (str) => {
return (str[0] +
str.slice(1, str.length - 1).toLowerCase() +
str[str.length - 1]);
});
}
// split on spaces, non-alphanumeric, or capital letters
// note: we keep the capitalization of the first word (special case: IPProtocol)
return str
.split(/(?=[A-Z])|[^A-Za-z0-9.]+/)
.filter(w => w.length > 0)
.map((w, index) => (index === 0 ? w : w.toLowerCase()));
}
/**
* Converts the first character of the given string to lower case.
*/
function lowercase(str) {
if (str.length === 0) {
return str;
}
return str[0].toLowerCase() + str.slice(1);
}
/**
* Converts a given string from camelCase (used by protobuf.js and in JSON)

@@ -26,3 +53,9 @@ * to snake_case (normally used in proto definitions).

// Keep the first position capitalization, otherwise decapitalize with underscore.
return str.replace(/(?!^)[A-Z]/g, letter => `_${letter.toLowerCase()}`);
const wordsList = words(str);
if (wordsList.length === 0) {
return str;
}
const result = [wordsList[0]];
result.push(...wordsList.slice(1).map(lowercase));
return result.join('_');
}

@@ -40,18 +73,33 @@ exports.camelToSnakeCase = camelToSnakeCase;

/**
* Converts a given string from snake_case (normally used in proto definitions) to
* camelCase (used by protobuf.js)
* Converts a given string from snake_case (normally used in proto definitions) or
* PascalCase (also used in proto definitions) to camelCase (used by protobuf.js).
* Preserves capitalization of the first character.
*/
function snakeToCamelCase(str) {
// split on spaces, non-alphanumeric, or capital letters
const splitted = str
.split(/(?=[A-Z])|[\s\W_]+/)
.filter(w => w.length > 0)
// Keep the capitalization for the first split.
.map((word, index) => (index === 0 ? word : word.toLowerCase()));
if (splitted.length === 0) {
function toCamelCase(str) {
const wordsList = words(str, /*normalize:*/ true);
if (wordsList.length === 0) {
return str;
}
return [splitted[0], ...splitted.slice(1).map(capitalize)].join('');
const result = [wordsList[0]];
result.push(...wordsList.slice(1).map(w => {
if (w.match(/^\d+$/)) {
return '_' + w;
}
return capitalize(w);
}));
return result.join('');
}
exports.snakeToCamelCase = snakeToCamelCase;
exports.toCamelCase = toCamelCase;
/**
* Converts a given string to lower camel case (forcing the first character to be
* in lower case).
*/
function toLowerCamelCase(str) {
const camelCase = toCamelCase(str);
if (camelCase.length === 0) {
return camelCase;
}
return camelCase[0].toLowerCase() + camelCase.slice(1);
}
exports.toLowerCamelCase = toLowerCamelCase;
//# sourceMappingURL=util.js.map

@@ -28,3 +28,3 @@ "use strict";

emittedWarnings.add(code);
if (!featureDetection_1.isNodeJS()) {
if (!(0, featureDetection_1.isNodeJS)()) {
console.warn(message);

@@ -31,0 +31,0 @@ }

@@ -23,4 +23,4 @@ #!/usr/bin/env node

const util = require("util");
const pbjs = require("protobufjs/cli/pbjs");
const pbts = require("protobufjs/cli/pbts");
const pbjs = require("protobufjs-cli/pbjs");
const pbts = require("protobufjs-cli/pbts");
const readdir = util.promisify(fs.readdir);

@@ -150,2 +150,4 @@ const readFile = util.promisify(fs.readFile);

js = apacheLicense + js;
// 3. reformat JSDoc reference link in the comments
js = js.replace(/{@link (.*?)#(.*?)}/g, '{@link $1|$2}');
return js;

@@ -210,3 +212,3 @@ }

'-p',
path.join(__dirname, '..', '..', 'protos'),
path.join(__dirname, '..', '..', 'build', 'protos'),
'-o',

@@ -228,3 +230,3 @@ jsonOutput,

'-p',
path.join(__dirname, '..', '..', 'protos'),
path.join(__dirname, '..', '..', 'build', 'protos'),
'-o',

@@ -231,0 +233,0 @@ jsOutput,

{
"name": "google-gax",
"version": "2.28.2-alpha.1",
"version": "2.28.3-alpha.1",
"description": "Google API Extensions",

@@ -10,3 +10,2 @@ "main": "build/src/index.js",

"build/tools/compileProtos.js",
"protos",
"build/protos/"

@@ -18,4 +17,4 @@ ],

"dependencies": {
"@grpc/grpc-js": "~1.4.0",
"@grpc/proto-loader": "^0.6.1",
"@grpc/grpc-js": "~1.6.0",
"@grpc/proto-loader": "^0.7.0",
"@types/long": "^4.0.0",

@@ -25,17 +24,18 @@ "abort-controller": "^3.0.0",

"fast-text-encoding": "^1.0.3",
"google-auth-library": "^7.6.1",
"google-auth-library": "^8.0.2",
"is-stream-ended": "^0.1.4",
"node-fetch": "^2.6.1",
"object-hash": "^2.1.1",
"proto3-json-serializer": "^0.1.5",
"protobufjs": "6.11.2",
"retry-request": "^4.0.0"
"object-hash": "^3.0.0",
"proto3-json-serializer": "^1.0.0",
"protobufjs": "7.0.0",
"protobufjs-cli": "1.0.0",
"retry-request": "^5.0.0"
},
"devDependencies": {
"@compodoc/compodoc": "1.1.16",
"@compodoc/compodoc": "1.1.19",
"@types/download": "^8.0.0",
"@types/fs-extra": "^8.0.1",
"@types/mocha": "^8.0.0",
"@types/fs-extra": "^9.0.0",
"@types/mocha": "^9.0.0",
"@types/ncp": "^2.0.1",
"@types/node": ">=15.6.0",
"@types/node": "^17.0.31",
"@types/node-fetch": "^2.5.4",

@@ -53,5 +53,5 @@ "@types/object-hash": "^2.1.0",

"fs-extra": "^10.0.0",
"google-proto-files": "^2.5.0",
"gts": "^2.0.0",
"is-docker": "^2.0.0",
"google-proto-files": "^3.0.0",
"gts": "^3.1.0",
"is-docker": "^3.0.0",
"json-loader": "^0.5.7",

@@ -65,6 +65,7 @@ "karma": "^6.0.0",

"karma-sourcemap-loader": "^0.3.7",
"karma-webpack": "^4.0.2",
"linkinator": "^2.0.0",
"karma-webpack": "^5.0.0",
"linkinator": "^4.0.0",
"long": "^4.0.0",
"mkdirp": "^1.0.0",
"mocha": "^8.0.0",
"mocha": "^9.0.0",
"ncp": "^2.0.0",

@@ -74,10 +75,10 @@ "null-loader": "^4.0.0",

"pumpify": "^2.0.0",
"puppeteer": "^12.0.0",
"puppeteer": "^15.0.0",
"rimraf": "^3.0.0",
"sinon": "^12.0.0",
"sinon": "^14.0.0",
"stream-events": "^1.0.4",
"ts-loader": "^8.0.0",
"typescript": "^3.8.3",
"ts-loader": "^9.0.0",
"typescript": "^4.6.4",
"walkdir": "^0.4.0",
"webpack": "^4.34.0",
"webpack": "^5.0.0",
"webpack-cli": "^4.0.0"

@@ -121,5 +122,5 @@ },

"engines": {
"node": ">=10"
"node": ">=12"
},
"browser": "build/src/fallback.js"
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc