@google-cloud/logging
Advanced tools
Comparing version 9.2.0 to 9.2.1
@@ -49,3 +49,3 @@ /*! | ||
outputVersionFormat?: google.logging.v2.LogSink.VersionFormat; | ||
uniqueWriterIdentity?: string; | ||
uniqueWriterIdentity?: string | boolean; | ||
gaxOptions?: gax.CallOptions; | ||
@@ -52,0 +52,0 @@ } |
@@ -29,3 +29,2 @@ "use strict"; | ||
const streamEvents = require("stream-events"); | ||
const through = require("through2"); | ||
const middleware = require("./middleware"); | ||
@@ -46,2 +45,3 @@ exports.middleware = middleware; | ||
Object.defineProperty(exports, "Sink", { enumerable: true, get: function () { return sink_1.Sink; } }); | ||
const stream_1 = require("stream"); | ||
/** | ||
@@ -151,3 +151,3 @@ * @typedef {object} ClientConfig | ||
* matching the filter are written. | ||
* @property {string} [uniqueWriterIdentity] Determines the kind of IAM | ||
* @property {string|boolean} [uniqueWriterIdentity] Determines the kind of IAM | ||
* identity returned as `writerIdentity` in the new sink. See {@link https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/create#query-parameters}. | ||
@@ -452,4 +452,7 @@ */ | ||
}; | ||
const toEntryStream = through.obj((entry, _, next) => { | ||
next(null, entry_1.Entry.fromApiResponse_(entry)); | ||
const toEntryStream = new stream_1.Transform({ | ||
objectMode: true, | ||
transform: (chunk, encoding, callback) => { | ||
callback(null, entry_1.Entry.fromApiResponse_(chunk)); | ||
}, | ||
}); | ||
@@ -479,3 +482,3 @@ userStream.once('reading', () => { | ||
let gaxStream; | ||
requestStream = streamEvents(through.obj()); | ||
requestStream = streamEvents(new stream_1.PassThrough({ objectMode: true })); | ||
requestStream.abort = () => { | ||
@@ -569,14 +572,17 @@ if (gaxStream && gaxStream.cancel) { | ||
}; | ||
const transformStream = through.obj((data, _, next) => { | ||
next(null, (() => { | ||
const formattedEntries = []; | ||
data.entries.forEach((entry) => { | ||
formattedEntries.push(entry_1.Entry.fromApiResponse_(entry)); | ||
}); | ||
const resp = { | ||
entries: formattedEntries, | ||
suppressionInfo: data.suppressionInfo, | ||
}; | ||
return resp; | ||
})()); | ||
const transformStream = new stream_1.Transform({ | ||
objectMode: true, | ||
transform: (chunk, encoding, callback) => { | ||
callback(null, (() => { | ||
const formattedEntries = []; | ||
chunk.entries.forEach((entry) => { | ||
formattedEntries.push(entry_1.Entry.fromApiResponse_(entry)); | ||
}); | ||
const resp = { | ||
entries: formattedEntries, | ||
suppressionInfo: chunk.suppressionInfo, | ||
}; | ||
return resp; | ||
})()); | ||
}, | ||
}); | ||
@@ -738,4 +744,7 @@ this.auth.getProjectId().then(projectId => { | ||
}; | ||
const toLogStream = through.obj((logName, _, next) => { | ||
next(null, this.log(logName)); | ||
const toLogStream = new stream_1.Transform({ | ||
objectMode: true, | ||
transform: (chunk, encoding, callback) => { | ||
callback(null, this.log(chunk)); | ||
}, | ||
}); | ||
@@ -753,3 +762,3 @@ userStream.once('reading', () => { | ||
let gaxStream; | ||
requestStream = streamEvents(through.obj()); | ||
requestStream = streamEvents(new stream_1.PassThrough({ objectMode: true })); | ||
requestStream.abort = () => { | ||
@@ -899,6 +908,9 @@ if (gaxStream && gaxStream.cancel) { | ||
}; | ||
const toSinkStream = through.obj((sink, _, next) => { | ||
const sinkInstance = self.sink(sink.name); | ||
sinkInstance.metadata = sink; | ||
next(null, sinkInstance); | ||
const toSinkStream = new stream_1.Transform({ | ||
objectMode: true, | ||
transform: (chunk, encoding, callback) => { | ||
const sinkInstance = self.sink(chunk.name); | ||
sinkInstance.metadata = chunk; | ||
callback(null, sinkInstance); | ||
}, | ||
}); | ||
@@ -916,3 +928,3 @@ userStream.once('reading', () => { | ||
let gaxStream; | ||
requestStream = streamEvents(through.obj()); | ||
requestStream = streamEvents(new stream_1.PassThrough({ objectMode: true })); | ||
requestStream.abort = () => { | ||
@@ -1000,3 +1012,3 @@ if (gaxStream && gaxStream.cancel) { | ||
if (isStreamMode) { | ||
stream = streamEvents(through.obj()); | ||
stream = streamEvents(new stream_1.PassThrough({ objectMode: true })); | ||
stream.abort = () => { | ||
@@ -1047,3 +1059,3 @@ if (gaxStream && gaxStream.cancel) { | ||
if (global.GCLOUD_SANDBOX_ENV) { | ||
return through.obj(); | ||
return new stream_1.PassThrough({ objectMode: true }); | ||
} | ||
@@ -1050,0 +1062,0 @@ prepareGaxRequest((err, requestFn) => { |
@@ -25,4 +25,2 @@ "use strict"; | ||
const metadata_1 = require("./metadata"); | ||
// eslint-disable-next-line @typescript-eslint/no-var-requires | ||
const snakeCaseKeys = require('snakecase-keys'); | ||
var Severity; | ||
@@ -684,5 +682,4 @@ (function (Severity) { | ||
if (options.resource) { | ||
if (options.resource.labels) { | ||
options.resource.labels = snakeCaseKeys(options.resource.labels); | ||
} | ||
if (options.resource.labels) | ||
snakecaseKeys(options.resource.labels); | ||
return writeWithResource(options.resource); | ||
@@ -717,2 +714,9 @@ } | ||
} | ||
// snakecaseKeys turns label keys from camel case to snake case. | ||
function snakecaseKeys(labels) { | ||
for (const key in labels) { | ||
Object.defineProperty(labels, key.replace(/[A-Z]/g, letter => `_${letter.toLowerCase()}`), Object.getOwnPropertyDescriptor(labels, key)); | ||
delete labels[key]; | ||
} | ||
} | ||
} | ||
@@ -719,0 +723,0 @@ // TODO proper signature of `private decorateEntries` (sans underscore suffix) |
@@ -23,3 +23,3 @@ /*! | ||
*/ | ||
export declare function getCloudFunctionDescriptor(): { | ||
export declare function getCloudFunctionDescriptor(): Promise<{ | ||
type: string; | ||
@@ -30,3 +30,3 @@ labels: { | ||
}; | ||
}; | ||
}>; | ||
/** | ||
@@ -33,0 +33,0 @@ * Create a descriptor for Cloud Run. |
@@ -31,2 +31,9 @@ "use strict"; | ||
} | ||
function regionFromQualifiedZone(qualified) { | ||
// Parses the region from the zone. Used for GCF and GCR which dynamically | ||
// allocate zones. | ||
const zone = zoneFromQualifiedZone(qualified); | ||
const region = zone === undefined ? undefined : zone.slice(0, zone.lastIndexOf('-')); | ||
return region; | ||
} | ||
/** | ||
@@ -37,7 +44,12 @@ * Create a descriptor for Cloud Functions. | ||
*/ | ||
function getCloudFunctionDescriptor() { | ||
async function getCloudFunctionDescriptor() { | ||
// If the region is already available via an environment variable, don't delay the function by pinging metaserver. | ||
let region = undefined; | ||
if (!(process.env.GOOGLE_CLOUD_REGION || process.env.FUNCTION_REGION)) { | ||
const qualifiedZone = await gcpMetadata.instance('zone'); | ||
region = regionFromQualifiedZone(qualifiedZone); | ||
} | ||
/** | ||
* In GCF versions after Node 8, K_SERVICE is the preferred way to | ||
* get the function name and GOOGLE_CLOUD_REGION is the preferred way | ||
* to get the region. | ||
* get the function name. We still check for GOOGLE_CLOUD_REGION and FUNCTION_REGION for backwards Node runtime compatibility. | ||
*/ | ||
@@ -48,3 +60,5 @@ return { | ||
function_name: process.env.K_SERVICE || process.env.FUNCTION_NAME, | ||
region: process.env.GOOGLE_CLOUD_REGION || process.env.FUNCTION_REGION, | ||
region: process.env.GOOGLE_CLOUD_REGION || | ||
process.env.FUNCTION_REGION || | ||
region, | ||
}, | ||
@@ -61,3 +75,3 @@ }; | ||
const qualifiedZone = await gcpMetadata.instance('zone'); | ||
const location = zoneFromQualifiedZone(qualifiedZone); | ||
const location = regionFromQualifiedZone(qualifiedZone); | ||
return { | ||
@@ -170,3 +184,3 @@ type: 'cloud_run_revision', | ||
case google_auth_library_1.GCPEnv.CLOUD_FUNCTIONS: | ||
return getCloudFunctionDescriptor(); | ||
return getCloudFunctionDescriptor().catch(() => getGlobalDescriptor()); | ||
case google_auth_library_1.GCPEnv.COMPUTE_ENGINE: | ||
@@ -173,0 +187,0 @@ // Google Cloud Run |
@@ -24,2 +24,3 @@ /*! | ||
gaxOptions?: CallOptions; | ||
uniqueWriterIdentity?: boolean | string; | ||
} | ||
@@ -26,0 +27,0 @@ /** |
@@ -242,2 +242,7 @@ "use strict"; | ||
* | ||
* Note: If the sink was previously created or updated with | ||
* uniqueWriterIdentity = true, then you must update the sink by setting | ||
* uniqueWriterIdentity = true. Read more about using a unique writer identity | ||
* here: https://cloud.google.com/logging/docs/api/tasks/exporting-logs#using_a_unique_writer_identity | ||
* | ||
* @see [Sink Resource]{@link https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks#LogSink} | ||
@@ -279,3 +284,5 @@ * @see [projects.sink.update API Documentation]{@link https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks/update} | ||
const [currentMetadata] = await this.getMetadata(); | ||
const reqOpts = { | ||
const uniqueWriterIdentity = metadata.uniqueWriterIdentity; | ||
delete metadata.uniqueWriterIdentity; | ||
let reqOpts = { | ||
sinkName: this.formattedName_, | ||
@@ -285,2 +292,7 @@ sink: extend({}, currentMetadata, metadata), | ||
delete reqOpts.sink.gaxOptions; | ||
// Add user specified uniqueWriterIdentity boolean, if any. | ||
reqOpts = { | ||
...reqOpts, | ||
...(uniqueWriterIdentity && { uniqueWriterIdentity }), | ||
}; | ||
[this.metadata] = await this.logging.configService.updateSink(reqOpts, metadata.gaxOptions); | ||
@@ -287,0 +299,0 @@ return [this.metadata]; |
@@ -7,2 +7,11 @@ # Changelog | ||
### [9.2.1](https://www.github.com/googleapis/nodejs-logging/compare/v9.2.0...v9.2.1) (2021-04-15) | ||
### Bug Fixes | ||
* accept uniqueWriterIdentity in setMetadata ([#1034](https://www.github.com/googleapis/nodejs-logging/issues/1034)) ([02e8bb4](https://www.github.com/googleapis/nodejs-logging/commit/02e8bb4b5983c48bf7bd5e16ba4ab9c226d3f28e)) | ||
* cloud functions resource.labels.region undefined ([#1028](https://www.github.com/googleapis/nodejs-logging/issues/1028)) ([3808656](https://www.github.com/googleapis/nodejs-logging/commit/38086569ad2915785e161542d7056ae3944948c8)) | ||
* **deps:** remove dependency on through2 ([#1023](https://www.github.com/googleapis/nodejs-logging/issues/1023)) ([485347f](https://www.github.com/googleapis/nodejs-logging/commit/485347fd3712565ae90e307a314bcdfeeb581379)) | ||
## [9.2.0](https://www.github.com/googleapis/nodejs-logging/compare/v9.1.1...v9.2.0) (2021-04-05) | ||
@@ -9,0 +18,0 @@ |
{ | ||
"name": "@google-cloud/logging", | ||
"version": "9.2.0", | ||
"version": "9.2.1", | ||
"description": "Stackdriver Logging Client Library for Node.js", | ||
@@ -63,5 +63,3 @@ "keywords": [ | ||
"pumpify": "^2.0.1", | ||
"snakecase-keys": "^3.1.2", | ||
"stream-events": "^1.0.5", | ||
"through2": "^4.0.0" | ||
"stream-events": "^1.0.5" | ||
}, | ||
@@ -80,4 +78,3 @@ "devDependencies": { | ||
"@types/pumpify": "^1.4.1", | ||
"@types/sinon": "^9.0.0", | ||
"@types/through2": "^2.0.34", | ||
"@types/sinon": "^10.0.0", | ||
"@types/tmp": "^0.2.0", | ||
@@ -84,0 +81,0 @@ "@types/uuid": "^8.0.0", |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
3529047
15
39
59160
22
- Removedsnakecase-keys@^3.1.2
- Removedthrough2@^4.0.0
- Removedmap-obj@4.3.0(transitive)
- Removedsnakecase-keys@3.2.1(transitive)
- Removedthrough2@4.0.2(transitive)
- Removedto-no-case@1.0.2(transitive)
- Removedto-snake-case@1.0.0(transitive)
- Removedto-space-case@1.0.0(transitive)