Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@temporalio/common

Package Overview
Dependencies
Maintainers
8
Versions
65
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@temporalio/common - npm Package Compare versions

Comparing version 1.11.1 to 1.11.2

8

lib/activity-options.d.ts

@@ -39,4 +39,4 @@ import type { coresdk } from '@temporalio/proto';

* Maximum time of a single Activity execution attempt. Note that the Temporal Server doesn't detect Worker process
* failures directly. It relies on this timeout to detect that an Activity that didn't complete on time. So this
* timeout should be as short as the longest possible execution of the Activity body. Potentially long running
* failures directly: instead, it relies on this timeout to detect that an Activity didn't complete on time. Therefore, this
* timeout should be as short as the longest possible execution of the Activity body. Potentially long-running
* Activities must specify {@link heartbeatTimeout} and call {@link activity.Context.heartbeat} periodically for

@@ -52,3 +52,3 @@ * timely failure detection.

/**
* Time that the Activity Task can stay in the Task Queue before it is picked up by a Worker. Do not specify this timeout unless using host specific Task Queues for Activity Tasks are being used for routing.
* Time that the Activity Task can stay in the Task Queue before it is picked up by a Worker. Do not specify this timeout unless using host-specific Task Queues for Activity Tasks are being used for routing.
* `scheduleToStartTimeout` is always non-retryable. Retrying after this timeout doesn't make sense as it would just put the Activity Task back into the same Task Queue.

@@ -61,3 +61,3 @@ *

/**
* Total time that a workflow is willing to wait for Activity to complete.
* Total time that a workflow is willing to wait for the Activity to complete.
* `scheduleToCloseTimeout` limits the total time of an Activity's execution including retries (use {@link startToCloseTimeout} to limit the time of a single attempt).

@@ -64,0 +64,0 @@ *

@@ -56,3 +56,3 @@ import { Payload } from '../interfaces';

export declare function arrayFromPayloads(converter: PayloadConverter, payloads?: Payload[] | null): unknown[];
export declare function mapFromPayloads<K extends string>(converter: PayloadConverter, map?: Record<K, Payload> | null | undefined): Record<K, unknown> | undefined | null;
export declare function mapFromPayloads<K extends string>(converter: PayloadConverter, map?: Record<K, Payload> | null | undefined): Record<K, unknown> | undefined;
export interface PayloadConverterWithEncoding {

@@ -59,0 +59,0 @@ /**

@@ -63,3 +63,3 @@ "use strict";

if (map == null)
return map;
return undefined;
return Object.fromEntries(Object.entries(map).map(([k, payload]) => {

@@ -66,0 +66,0 @@ const value = converter.fromPayload(payload);

@@ -99,3 +99,3 @@ "use strict";

return payloads;
return Object.fromEntries(await Promise.all(Object.entries(payloads).map(async ([k, v]) => [k, await decode(codecs, [v])])));
return Object.fromEntries(await Promise.all(Object.entries(payloads).map(async ([k, v]) => [k, (await decode(codecs, [v]))[0]])));
}

@@ -102,0 +102,0 @@ exports.decodeOptionalMap = decodeOptionalMap;

@@ -5,6 +5,2 @@ import * as proto from '@temporalio/proto';

/**
* Convert a proto JSON representation of History to a valid History object
*/
export declare function historyFromJSON(history: unknown): History;
/**
* JSON representation of Temporal's {@link Payload} protobuf object

@@ -23,2 +19,11 @@ */

/**
* Convert a proto JSON representation of History to a valid History object
*/
export declare function historyFromJSON(history: unknown): History;
/**
* Convert an History object, e.g. as returned by `WorkflowClient.list().withHistory()`, to a JSON
* string that adheres to the same norm as JSON history files produced by other Temporal tools.
*/
export declare function historyToJSON(history: History): string;
/**
* Convert from protobuf payload to JSON

@@ -25,0 +30,0 @@ */

@@ -26,3 +26,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.JSONToPayload = exports.payloadToJSON = exports.historyFromJSON = void 0;
exports.JSONToPayload = exports.payloadToJSON = exports.historyToJSON = exports.historyFromJSON = void 0;
const proto3_json_serializer_1 = require("proto3-json-serializer");

@@ -35,67 +35,67 @@ const proto = __importStar(require("@temporalio/proto"));

const payloadType = patched.lookupType('temporal.api.common.v1.Payload');
function pascalCaseToConstantCase(s) {
return s.replace(/[^\b][A-Z]/g, (m) => `${m[0]}_${m[1]}`).toUpperCase();
}
function fixEnumValue(obj, attr, prefix) {
return (obj[attr] && {
[attr]: obj[attr].startsWith(prefix) ? obj[attr] : `${prefix}_${pascalCaseToConstantCase(obj[attr])}`,
});
}
// fromProto3JSON doesn't allow null values on 'bytes' fields. This turns out to be a problem for payloads.
// Recursively descend on objects and array, and fix in-place any payload that has a null data field
function fixPayloads(e) {
function isPayload(p) {
return p && typeof p === 'object' && 'metadata' in p && 'data' in p;
/**
* Convert a proto JSON representation of History to a valid History object
*/
function historyFromJSON(history) {
function pascalCaseToConstantCase(s) {
return s.replace(/[^\b][A-Z]/g, (m) => `${m[0]}_${m[1]}`).toUpperCase();
}
if (e && typeof e === 'object') {
if (isPayload(e)) {
if (e.data === null) {
const { data: _data, ...rest } = e;
return rest;
function fixEnumValue(obj, attr, prefix) {
return (obj[attr] && {
[attr]: obj[attr].startsWith(prefix) ? obj[attr] : `${prefix}_${pascalCaseToConstantCase(obj[attr])}`,
});
}
// fromProto3JSON doesn't allow null values on 'bytes' fields. This turns out to be a problem for payloads.
// Recursively descend on objects and array, and fix in-place any payload that has a null data field
function fixPayloads(e) {
function isPayload(p) {
return p && typeof p === 'object' && 'metadata' in p && 'data' in p;
}
if (e && typeof e === 'object') {
if (isPayload(e)) {
if (e.data === null) {
const { data: _data, ...rest } = e;
return rest;
}
return e;
}
return e;
if (Array.isArray(e))
return e.map(fixPayloads);
return Object.fromEntries(Object.entries(e).map(([k, v]) => [k, fixPayloads(v)]));
}
if (Array.isArray(e))
return e.map(fixPayloads);
return Object.fromEntries(Object.entries(e).map(([k, v]) => [k, fixPayloads(v)]));
return e;
}
return e;
}
function fixHistoryEvent(e) {
const type = Object.keys(e).find((k) => k.endsWith('EventAttributes'));
if (!type) {
throw new TypeError(`Missing attributes in history event: ${JSON.stringify(e)}`);
function fixHistoryEvent(e) {
const type = Object.keys(e).find((k) => k.endsWith('EventAttributes'));
if (!type) {
throw new TypeError(`Missing attributes in history event: ${JSON.stringify(e)}`);
}
// Fix payloads with null data
e = fixPayloads(e);
return {
...e,
...fixEnumValue(e, 'eventType', 'EVENT_TYPE'),
[type]: {
...e[type],
...(e[type].taskQueue && {
taskQueue: { ...e[type].taskQueue, ...fixEnumValue(e[type].taskQueue, 'kind', 'TASK_QUEUE_KIND') },
}),
...fixEnumValue(e[type], 'parentClosePolicy', 'PARENT_CLOSE_POLICY'),
...fixEnumValue(e[type], 'workflowIdReusePolicy', 'WORKFLOW_ID_REUSE_POLICY'),
...fixEnumValue(e[type], 'initiator', 'CONTINUE_AS_NEW_INITIATOR'),
...fixEnumValue(e[type], 'retryState', 'RETRY_STATE'),
...(e[type].childWorkflowExecutionFailureInfo && {
childWorkflowExecutionFailureInfo: {
...e[type].childWorkflowExecutionFailureInfo,
...fixEnumValue(e[type].childWorkflowExecutionFailureInfo, 'retryState', 'RETRY_STATE'),
},
}),
},
};
}
// Fix payloads with null data
e = fixPayloads(e);
return {
...e,
...fixEnumValue(e, 'eventType', 'EVENT_TYPE'),
[type]: {
...e[type],
...(e[type].taskQueue && {
taskQueue: { ...e[type].taskQueue, ...fixEnumValue(e[type].taskQueue, 'kind', 'TASK_QUEUE_KIND') },
}),
...fixEnumValue(e[type], 'parentClosePolicy', 'PARENT_CLOSE_POLICY'),
...fixEnumValue(e[type], 'workflowIdReusePolicy', 'WORKFLOW_ID_REUSE_POLICY'),
...fixEnumValue(e[type], 'initiator', 'CONTINUE_AS_NEW_INITIATOR'),
...fixEnumValue(e[type], 'retryState', 'RETRY_STATE'),
...(e[type].childWorkflowExecutionFailureInfo && {
childWorkflowExecutionFailureInfo: {
...e[type].childWorkflowExecutionFailureInfo,
...fixEnumValue(e[type].childWorkflowExecutionFailureInfo, 'retryState', 'RETRY_STATE'),
},
}),
},
};
}
function fixHistory(h) {
return {
events: h.events.map(fixHistoryEvent),
};
}
/**
* Convert a proto JSON representation of History to a valid History object
*/
function historyFromJSON(history) {
function fixHistory(h) {
return {
events: h.events.map(fixHistoryEvent),
};
}
if (typeof history !== 'object' || history == null || !Array.isArray(history.events)) {

@@ -112,2 +112,26 @@ throw new TypeError('Invalid history, expected an object with an array of events');

/**
* Convert an History object, e.g. as returned by `WorkflowClient.list().withHistory()`, to a JSON
* string that adheres to the same norm as JSON history files produced by other Temporal tools.
*/
function historyToJSON(history) {
// toProto3JSON doesn't correctly handle some of our "bytes" fields, passing them untouched to the
// output, after which JSON.stringify() would convert them to an array of numbers. As a workaround,
// recursively walk the object and convert all Buffer instances to base64 strings. Note this only
// works on proto3-json-serializer v2.0.0. v2.0.2 throws an error before we even get the chance
// to fix the buffers. See https://github.com/googleapis/proto3-json-serializer-nodejs/issues/103.
function fixBuffers(e) {
if (e && typeof e === 'object') {
if (e instanceof Buffer)
return e.toString('base64');
if (Array.isArray(e))
return e.map(fixBuffers);
return Object.fromEntries(Object.entries(e).map(([k, v]) => [k, fixBuffers(v)]));
}
return e;
}
const protoJson = (0, proto3_json_serializer_1.toProto3JSON)(proto.temporal.api.history.v1.History.fromObject(history));
return JSON.stringify(fixBuffers(protoJson), null, 2);
}
exports.historyToJSON = historyToJSON;
/**
* Convert from protobuf payload to JSON

@@ -114,0 +138,0 @@ */

{
"name": "@temporalio/common",
"version": "1.11.1",
"version": "1.11.2",
"description": "Common library for code that's used across the Client, Worker, and/or Workflow",

@@ -15,3 +15,3 @@ "main": "lib/index.js",

"dependencies": {
"@temporalio/proto": "1.11.1",
"@temporalio/proto": "1.11.2",
"long": "^5.2.3",

@@ -40,3 +40,3 @@ "ms": "^3.0.0-canary.1",

],
"gitHead": "c4163f8a2ae87fe84dd435d56f69e23c5eefeffb"
"gitHead": "e78b4f71236ccd3227e674bad68439e961fec639"
}

@@ -51,4 +51,4 @@ import type { coresdk } from '@temporalio/proto';

* Maximum time of a single Activity execution attempt. Note that the Temporal Server doesn't detect Worker process
* failures directly. It relies on this timeout to detect that an Activity that didn't complete on time. So this
* timeout should be as short as the longest possible execution of the Activity body. Potentially long running
* failures directly: instead, it relies on this timeout to detect that an Activity didn't complete on time. Therefore, this
* timeout should be as short as the longest possible execution of the Activity body. Potentially long-running
* Activities must specify {@link heartbeatTimeout} and call {@link activity.Context.heartbeat} periodically for

@@ -65,3 +65,3 @@ * timely failure detection.

/**
* Time that the Activity Task can stay in the Task Queue before it is picked up by a Worker. Do not specify this timeout unless using host specific Task Queues for Activity Tasks are being used for routing.
* Time that the Activity Task can stay in the Task Queue before it is picked up by a Worker. Do not specify this timeout unless using host-specific Task Queues for Activity Tasks are being used for routing.
* `scheduleToStartTimeout` is always non-retryable. Retrying after this timeout doesn't make sense as it would just put the Activity Task back into the same Task Queue.

@@ -75,3 +75,3 @@ *

/**
* Total time that a workflow is willing to wait for Activity to complete.
* Total time that a workflow is willing to wait for the Activity to complete.
* `scheduleToCloseTimeout` limits the total time of an Activity's execution including retries (use {@link startToCloseTimeout} to limit the time of a single attempt).

@@ -78,0 +78,0 @@ *

@@ -90,4 +90,4 @@ import { decode, encode } from '../encoding';

map?: Record<K, Payload> | null | undefined
): Record<K, unknown> | undefined | null {
if (map == null) return map;
): Record<K, unknown> | undefined {
if (map == null) return undefined;
return Object.fromEntries(

@@ -94,0 +94,0 @@ Object.entries(map).map(([k, payload]): [K, unknown] => {

@@ -125,3 +125,3 @@ import { Payload } from '../interfaces';

return Object.fromEntries(
await Promise.all(Object.entries(payloads).map(async ([k, v]) => [k, await decode(codecs, [v])]))
await Promise.all(Object.entries(payloads).map(async ([k, v]) => [k, (await decode(codecs, [v]))[0]]))
);

@@ -128,0 +128,0 @@ }

@@ -8,2 +8,16 @@ import { fromProto3JSON, toProto3JSON } from 'proto3-json-serializer';

/**
* JSON representation of Temporal's {@link Payload} protobuf object
*/
export interface JSONPayload {
/**
* Mapping of key to base64 encoded value
*/
metadata?: Record<string, string> | null;
/**
* base64 encoded value
*/
data?: string | null;
}
// Cast to any because the generated proto module types are missing the lookupType method

@@ -14,76 +28,76 @@ const patched = patchProtobufRoot(proto) as any;

function pascalCaseToConstantCase(s: string) {
return s.replace(/[^\b][A-Z]/g, (m) => `${m[0]}_${m[1]}`).toUpperCase();
}
/**
* Convert a proto JSON representation of History to a valid History object
*/
export function historyFromJSON(history: unknown): History {
function pascalCaseToConstantCase(s: string) {
return s.replace(/[^\b][A-Z]/g, (m) => `${m[0]}_${m[1]}`).toUpperCase();
}
function fixEnumValue<O extends Record<string, any>>(obj: O, attr: keyof O, prefix: string) {
return (
obj[attr] && {
[attr]: obj[attr].startsWith(prefix) ? obj[attr] : `${prefix}_${pascalCaseToConstantCase(obj[attr])}`,
function fixEnumValue<O extends Record<string, any>>(obj: O, attr: keyof O, prefix: string) {
return (
obj[attr] && {
[attr]: obj[attr].startsWith(prefix) ? obj[attr] : `${prefix}_${pascalCaseToConstantCase(obj[attr])}`,
}
);
}
// fromProto3JSON doesn't allow null values on 'bytes' fields. This turns out to be a problem for payloads.
// Recursively descend on objects and array, and fix in-place any payload that has a null data field
function fixPayloads<T>(e: T): T {
function isPayload(p: any): p is JSONPayload {
return p && typeof p === 'object' && 'metadata' in p && 'data' in p;
}
);
}
// fromProto3JSON doesn't allow null values on 'bytes' fields. This turns out to be a problem for payloads.
// Recursively descend on objects and array, and fix in-place any payload that has a null data field
function fixPayloads<T>(e: T): T {
function isPayload(p: any): p is JSONPayload {
return p && typeof p === 'object' && 'metadata' in p && 'data' in p;
}
if (e && typeof e === 'object') {
if (isPayload(e)) {
if (e.data === null) {
const { data: _data, ...rest } = e;
return rest as T;
if (e && typeof e === 'object') {
if (isPayload(e)) {
if (e.data === null) {
const { data: _data, ...rest } = e;
return rest as T;
}
return e;
}
return e;
if (Array.isArray(e)) return e.map(fixPayloads) as T;
return Object.fromEntries(Object.entries(e as object).map(([k, v]) => [k, fixPayloads(v)])) as T;
}
if (Array.isArray(e)) return e.map(fixPayloads) as T;
return Object.fromEntries(Object.entries(e as object).map(([k, v]) => [k, fixPayloads(v)])) as T;
return e;
}
return e;
}
function fixHistoryEvent(e: Record<string, any>) {
const type = Object.keys(e).find((k) => k.endsWith('EventAttributes'));
if (!type) {
throw new TypeError(`Missing attributes in history event: ${JSON.stringify(e)}`);
}
function fixHistoryEvent(e: Record<string, any>) {
const type = Object.keys(e).find((k) => k.endsWith('EventAttributes'));
if (!type) {
throw new TypeError(`Missing attributes in history event: ${JSON.stringify(e)}`);
}
// Fix payloads with null data
e = fixPayloads(e);
// Fix payloads with null data
e = fixPayloads(e);
return {
...e,
...fixEnumValue(e, 'eventType', 'EVENT_TYPE'),
[type]: {
...e[type],
...(e[type].taskQueue && {
taskQueue: { ...e[type].taskQueue, ...fixEnumValue(e[type].taskQueue, 'kind', 'TASK_QUEUE_KIND') },
}),
...fixEnumValue(e[type], 'parentClosePolicy', 'PARENT_CLOSE_POLICY'),
...fixEnumValue(e[type], 'workflowIdReusePolicy', 'WORKFLOW_ID_REUSE_POLICY'),
...fixEnumValue(e[type], 'initiator', 'CONTINUE_AS_NEW_INITIATOR'),
...fixEnumValue(e[type], 'retryState', 'RETRY_STATE'),
...(e[type].childWorkflowExecutionFailureInfo && {
childWorkflowExecutionFailureInfo: {
...e[type].childWorkflowExecutionFailureInfo,
...fixEnumValue(e[type].childWorkflowExecutionFailureInfo, 'retryState', 'RETRY_STATE'),
},
}),
},
};
}
return {
...e,
...fixEnumValue(e, 'eventType', 'EVENT_TYPE'),
[type]: {
...e[type],
...(e[type].taskQueue && {
taskQueue: { ...e[type].taskQueue, ...fixEnumValue(e[type].taskQueue, 'kind', 'TASK_QUEUE_KIND') },
}),
...fixEnumValue(e[type], 'parentClosePolicy', 'PARENT_CLOSE_POLICY'),
...fixEnumValue(e[type], 'workflowIdReusePolicy', 'WORKFLOW_ID_REUSE_POLICY'),
...fixEnumValue(e[type], 'initiator', 'CONTINUE_AS_NEW_INITIATOR'),
...fixEnumValue(e[type], 'retryState', 'RETRY_STATE'),
...(e[type].childWorkflowExecutionFailureInfo && {
childWorkflowExecutionFailureInfo: {
...e[type].childWorkflowExecutionFailureInfo,
...fixEnumValue(e[type].childWorkflowExecutionFailureInfo, 'retryState', 'RETRY_STATE'),
},
}),
},
};
}
function fixHistory(h: Record<string, any>) {
return {
events: h.events.map(fixHistoryEvent),
};
}
function fixHistory(h: Record<string, any>) {
return {
events: h.events.map(fixHistoryEvent),
};
}
/**
* Convert a proto JSON representation of History to a valid History object
*/
export function historyFromJSON(history: unknown): History {
if (typeof history !== 'object' || history == null || !Array.isArray((history as any).events)) {

@@ -100,13 +114,22 @@ throw new TypeError('Invalid history, expected an object with an array of events');

/**
* JSON representation of Temporal's {@link Payload} protobuf object
* Convert an History object, e.g. as returned by `WorkflowClient.list().withHistory()`, to a JSON
* string that adheres to the same norm as JSON history files produced by other Temporal tools.
*/
export interface JSONPayload {
/**
* Mapping of key to base64 encoded value
*/
metadata?: Record<string, string> | null;
/**
* base64 encoded value
*/
data?: string | null;
export function historyToJSON(history: History): string {
// toProto3JSON doesn't correctly handle some of our "bytes" fields, passing them untouched to the
// output, after which JSON.stringify() would convert them to an array of numbers. As a workaround,
// recursively walk the object and convert all Buffer instances to base64 strings. Note this only
// works on proto3-json-serializer v2.0.0. v2.0.2 throws an error before we even get the chance
// to fix the buffers. See https://github.com/googleapis/proto3-json-serializer-nodejs/issues/103.
function fixBuffers<T>(e: T): T {
if (e && typeof e === 'object') {
if (e instanceof Buffer) return e.toString('base64') as any;
if (Array.isArray(e)) return e.map(fixBuffers) as T;
return Object.fromEntries(Object.entries(e as object).map(([k, v]) => [k, fixBuffers(v)])) as T;
}
return e;
}
const protoJson = toProto3JSON(proto.temporal.api.history.v1.History.fromObject(history) as any);
return JSON.stringify(fixBuffers(protoJson), null, 2);
}

@@ -113,0 +136,0 @@

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc