Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@sanity/migrate

Package Overview
Dependencies
Maintainers
55
Versions
219
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@sanity/migrate - npm Package Compare versions

Comparing version 3.52.5-canary.18 to 3.52.5-canary.41

95

lib/index.esm.js

@@ -229,7 +229,5 @@ import { fromString } from "@sanity/util/paths";

async function collectDocumentMutations(migration, doc, context) {
var _a;
const documentMutations = Promise.resolve((_a = migration.document) == null ? void 0 : _a.call(migration, doc, context)), nodeMigrations = flatMapDeep(doc, async (value, path2) => {
var _a2;
const documentMutations = Promise.resolve(migration.document?.(doc, context)), nodeMigrations = flatMapDeep(doc, async (value, path2) => {
const [nodeReturnValues, nodeTypeReturnValues] = await Promise.all([
Promise.resolve((_a2 = migration.node) == null ? void 0 : _a2.call(migration, value, path2, context)),
Promise.resolve(migration.node?.(value, path2, context)),
Promise.resolve(migrateNodeType(migration, value, path2, context))

@@ -250,16 +248,15 @@ ]);

function migrateNodeType(migration, value, path2, context) {
var _a, _b, _c, _d, _e, _f;
switch (getValueType(value)) {
case "string":
return (_a = migration.string) == null ? void 0 : _a.call(migration, value, path2, context);
return migration.string?.(value, path2, context);
case "number":
return (_b = migration.number) == null ? void 0 : _b.call(migration, value, path2, context);
return migration.number?.(value, path2, context);
case "boolean":
return (_c = migration.boolean) == null ? void 0 : _c.call(migration, value, path2, context);
return migration.boolean?.(value, path2, context);
case "object":
return (_d = migration.object) == null ? void 0 : _d.call(migration, value, path2, context);
return migration.object?.(value, path2, context);
case "array":
return (_e = migration.array) == null ? void 0 : _e.call(migration, value, path2, context);
return migration.array?.(value, path2, context);
case "null":
return (_f = migration.null) == null ? void 0 : _f.call(migration, value, path2, context);
return migration.null?.(value, path2, context);
default:

@@ -297,5 +294,5 @@ throw new Error("Unknown value type");

function bufferThroughFile(source, filename, options) {
const signal = options == null ? void 0 : options.signal;
const signal = options?.signal;
let writeHandle, readHandle, bufferDone = !1;
signal == null || signal.addEventListener("abort", async () => {
signal?.addEventListener("abort", async () => {
debug$1("Aborting bufferThroughFile"), await Promise.all([

@@ -311,3 +308,3 @@ writeHandle && writeHandle.close(),

const { done, value } = await reader.read();
if (done || signal != null && signal.aborted)
if (done || signal?.aborted)
return;

@@ -329,3 +326,3 @@ await writeHandle.write(value);

);
return bytesRead === 0 && !bufferDone && !(signal != null && signal.aborted) ? (debug$1("Not enough data in buffer file, waiting for more data to be written"), tryReadFromBuffer(handle)) : (totalBytesRead += bytesRead, { bytesRead, buffer });
return bytesRead === 0 && !bufferDone && !signal?.aborted ? (debug$1("Not enough data in buffer file, waiting for more data to be written"), tryReadFromBuffer(handle)) : (totalBytesRead += bytesRead, { bytesRead, buffer });
};

@@ -349,3 +346,3 @@ }

const handle = readHandle;
readHandle = null, debug$1("Closing read handle on %s", filename), await (await handle).close(), (options == null ? void 0 : options.keepFile) !== !0 && (debug$1("Removing buffer file", filename), await unlink(filename));
readHandle = null, debug$1("Closing read handle on %s", filename), await (await handle).close(), options?.keepFile !== !0 && (debug$1("Removing buffer file", filename), await unlink(filename));
}

@@ -361,3 +358,3 @@ }

async start() {
if (signal != null && signal.aborted)
if (signal?.aborted)
throw new Error("Cannot create new buffered readers on aborted stream");

@@ -389,3 +386,3 @@ debug$1("Reader started reading from file handle"), onReaderStart(), await init(), await getReadHandle();

return new Uint8Array(0);
totalLength != null || (totalLength = arrays.reduce((accumulator, currentValue) => accumulator + currentValue.length, 0));
totalLength ??= arrays.reduce((accumulator, currentValue) => accumulator + currentValue.length, 0);
const returnValue = new Uint8Array(totalLength);

@@ -478,7 +475,10 @@ let offset = 0;

}
var __defProp$1 = Object.defineProperty, __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: !0, configurable: !0, writable: !0, value }) : obj[key] = value, __publicField$1 = (obj, key, value) => __defNormalProp$1(obj, typeof key != "symbol" ? key + "" : key, value);
const EMPTY = new Uint8Array();
class BufferList {
buffered;
shifted;
queue;
_offset;
constructor() {
__publicField$1(this, "buffered"), __publicField$1(this, "shifted"), __publicField$1(this, "queue"), __publicField$1(this, "_offset"), this.buffered = 0, this.shifted = 0, this.queue = new FIFO(), this._offset = 0;
this.buffered = 0, this.shifted = 0, this.queue = new FIFO(), this._offset = 0;
}

@@ -629,3 +629,2 @@ push(buffer) {

async pull(controller) {
var _a, _b;
if (readingChunk)

@@ -640,4 +639,4 @@ return;

headerChunk,
(_a = options.filenameEncoding) != null ? _a : "utf-8",
(_b = options.allowUnknownFormat) != null ? _b : !1
options.filenameEncoding ?? "utf-8",
options.allowUnknownFormat ?? !1
);

@@ -662,3 +661,3 @@ header ? header.size === null || header.size === 0 || header.type === "directory" ? controller.enqueue([header, emptyReadableStream()]) : (readingChunk = !0, controller.enqueue([

throw new Error("Premature end of tar stream");
controller.enqueue(chunk), totalBytesRead += chunk.byteLength, (chunk == null ? void 0 : chunk.byteLength) === remaining && (discardPadding(buffer, expectedBytes), controller.close(), next());
controller.enqueue(chunk), totalBytesRead += chunk.byteLength, chunk?.byteLength === remaining && (discardPadding(buffer, expectedBytes), controller.close(), next());
}

@@ -716,12 +715,12 @@ });

path: `/data/export/${dataset}`,
searchParams: documentTypes && (documentTypes == null ? void 0 : documentTypes.length) > 0 ? [["types", documentTypes.join(",")]] : []
searchParams: documentTypes && documentTypes?.length > 0 ? [["types", documentTypes.join(",")]] : []
}),
mutate: (dataset, options) => {
const params = [
(options == null ? void 0 : options.tag) && ["tag", options.tag],
(options == null ? void 0 : options.returnIds) && ["returnIds", "true"],
(options == null ? void 0 : options.returnDocuments) && ["returnDocuments", "true"],
(options == null ? void 0 : options.autoGenerateArrayKeys) && ["autoGenerateArrayKeys", "true"],
(options == null ? void 0 : options.visibility) && ["visibility", options.visibility],
(options == null ? void 0 : options.dryRun) && ["dryRun", "true"]
options?.tag && ["tag", options.tag],
options?.returnIds && ["returnIds", "true"],
options?.returnDocuments && ["returnDocuments", "true"],
options?.autoGenerateArrayKeys && ["autoGenerateArrayKeys", "true"],
options?.visibility && ["visibility", options.visibility],
options?.dryRun && ["dryRun", "true"]
].filter(Boolean);

@@ -737,14 +736,13 @@ return {

};
var __defProp = Object.defineProperty, __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: !0, configurable: !0, writable: !0, value }) : obj[key] = value, __publicField = (obj, key, value) => __defNormalProp(obj, key + "", value);
class HTTPError extends Error {
statusCode;
constructor(statusCode, message) {
super(message), __publicField(this, "statusCode"), this.name = "HTTPError", this.statusCode = statusCode;
super(message), this.name = "HTTPError", this.statusCode = statusCode;
}
}
async function assert2xx(res) {
var _a, _b;
if (res.status < 200 || res.status > 299) {
const jsonResponse = await res.json().catch(() => null);
let message;
throw jsonResponse != null && jsonResponse.error ? (_a = jsonResponse == null ? void 0 : jsonResponse.error) != null && _a.description ? message = `${((_b = jsonResponse == null ? void 0 : jsonResponse.error) == null ? void 0 : _b.type) || res.status}: ${jsonResponse.error.description}` : message = `${jsonResponse.error}: ${jsonResponse.message}` : message = `HTTP Error ${res.status}: ${res.statusText}`, new HTTPError(res.status, message);
throw jsonResponse?.error ? jsonResponse?.error?.description ? message = `${jsonResponse?.error?.type || res.status}: ${jsonResponse.error.description}` : message = `${jsonResponse.error}: ${jsonResponse.message}` : message = `HTTP Error ${res.status}: ${res.statusText}`, new HTTPError(res.status, message);
}

@@ -797,3 +795,2 @@ }

function fromExportEndpoint(options) {
var _a;
return fetchStream(

@@ -804,3 +801,3 @@ toFetchOptions({

token: options.token,
apiHost: (_a = options.apiHost) != null ? _a : "api.sanity.io",
apiHost: options.apiHost ?? "api.sanity.io",
tag: "sanity.migration.export",

@@ -1000,3 +997,2 @@ endpoint: endpoints.data.export(options.dataset, options.documentTypes)

async function* toFetchOptionsIterable(apiConfig, mutations) {
var _a;
for await (const transaction2 of mutations)

@@ -1008,3 +1004,3 @@ yield toFetchOptions({

tag: "sanity.migration.mutate",
apiHost: (_a = apiConfig.apiHost) != null ? _a : "api.sanity.io",
apiHost: apiConfig.apiHost ?? "api.sanity.io",
endpoint: endpoints.data.mutate(apiConfig.dataset, {

@@ -1019,3 +1015,2 @@ returnIds: !0,

async function run(config, migration) {
var _a, _b, _c;
const stats = {

@@ -1051,4 +1046,3 @@ documents: 0,

() => {
var _a2;
(_a2 = config.onProgress) == null || _a2.call(config, { ...stats, documents: ++stats.documents });
config.onProgress?.({ ...stats, documents: ++stats.documents });
}

@@ -1060,8 +1054,7 @@ ), {

}), (muts) => {
var _a2;
stats.currentTransactions = arrify(muts), (_a2 = config.onProgress) == null || _a2.call(config, {
stats.currentTransactions = arrify(muts), config.onProgress?.({
...stats,
mutations: ++stats.mutations
});
}), concurrency = (_a = config == null ? void 0 : config.concurrency) != null ? _a : DEFAULT_MUTATION_CONCURRENCY;
}), concurrency = config?.concurrency ?? DEFAULT_MUTATION_CONCURRENCY;
if (concurrency > MAX_MUTATION_CONCURRENCY)

@@ -1072,18 +1065,14 @@ throw new Error(`Concurrency exceeds maximum allowed value (${MAX_MUTATION_CONCURRENCY})`);

() => {
var _a2;
(_a2 = config.onProgress) == null || _a2.call(config, { ...stats, queuedBatches: ++stats.queuedBatches });
config.onProgress?.({ ...stats, queuedBatches: ++stats.queuedBatches });
}
), submit = async (opts) => lastValueFrom(parseJSON(concatStr(decodeText(await fetchAsyncIterator(opts))))), commits = await mapAsync(
toFetchOptionsIterable(config.api, batches),
(opts) => {
var _a2;
return (_a2 = config.onProgress) == null || _a2.call(config, { ...stats, pending: ++stats.pending }), submit(opts);
},
(opts) => (config.onProgress?.({ ...stats, pending: ++stats.pending }), submit(opts)),
concurrency
);
for await (const result of commits)
stats.completedTransactions.push(result), (_b = config.onProgress) == null || _b.call(config, {
stats.completedTransactions.push(result), config.onProgress?.({
...stats
});
(_c = config.onProgress) == null || _c.call(config, {
config.onProgress?.({
...stats,

@@ -1090,0 +1079,0 @@ done: !0

@@ -243,7 +243,5 @@ "use strict";

async function collectDocumentMutations(migration, doc, context) {
var _a;
const documentMutations = Promise.resolve((_a = migration.document) == null ? void 0 : _a.call(migration, doc, context)), nodeMigrations = flatMapDeep(doc, async (value, path2) => {
var _a2;
const documentMutations = Promise.resolve(migration.document?.(doc, context)), nodeMigrations = flatMapDeep(doc, async (value, path2) => {
const [nodeReturnValues, nodeTypeReturnValues] = await Promise.all([
Promise.resolve((_a2 = migration.node) == null ? void 0 : _a2.call(migration, value, path2, context)),
Promise.resolve(migration.node?.(value, path2, context)),
Promise.resolve(migrateNodeType(migration, value, path2, context))

@@ -264,16 +262,15 @@ ]);

function migrateNodeType(migration, value, path2, context) {
var _a, _b, _c, _d, _e, _f;
switch (getValueType(value)) {
case "string":
return (_a = migration.string) == null ? void 0 : _a.call(migration, value, path2, context);
return migration.string?.(value, path2, context);
case "number":
return (_b = migration.number) == null ? void 0 : _b.call(migration, value, path2, context);
return migration.number?.(value, path2, context);
case "boolean":
return (_c = migration.boolean) == null ? void 0 : _c.call(migration, value, path2, context);
return migration.boolean?.(value, path2, context);
case "object":
return (_d = migration.object) == null ? void 0 : _d.call(migration, value, path2, context);
return migration.object?.(value, path2, context);
case "array":
return (_e = migration.array) == null ? void 0 : _e.call(migration, value, path2, context);
return migration.array?.(value, path2, context);
case "null":
return (_f = migration.null) == null ? void 0 : _f.call(migration, value, path2, context);
return migration.null?.(value, path2, context);
default:

@@ -311,5 +308,5 @@ throw new Error("Unknown value type");

function bufferThroughFile(source, filename, options) {
const signal = options == null ? void 0 : options.signal;
const signal = options?.signal;
let writeHandle, readHandle, bufferDone = !1;
signal == null || signal.addEventListener("abort", async () => {
signal?.addEventListener("abort", async () => {
debug$1("Aborting bufferThroughFile"), await Promise.all([

@@ -325,3 +322,3 @@ writeHandle && writeHandle.close(),

const { done, value } = await reader.read();
if (done || signal != null && signal.aborted)
if (done || signal?.aborted)
return;

@@ -343,3 +340,3 @@ await writeHandle.write(value);

);
return bytesRead === 0 && !bufferDone && !(signal != null && signal.aborted) ? (debug$1("Not enough data in buffer file, waiting for more data to be written"), tryReadFromBuffer(handle)) : (totalBytesRead += bytesRead, { bytesRead, buffer });
return bytesRead === 0 && !bufferDone && !signal?.aborted ? (debug$1("Not enough data in buffer file, waiting for more data to be written"), tryReadFromBuffer(handle)) : (totalBytesRead += bytesRead, { bytesRead, buffer });
};

@@ -363,3 +360,3 @@ }

const handle = readHandle;
readHandle = null, debug$1("Closing read handle on %s", filename), await (await handle).close(), (options == null ? void 0 : options.keepFile) !== !0 && (debug$1("Removing buffer file", filename), await promises.unlink(filename));
readHandle = null, debug$1("Closing read handle on %s", filename), await (await handle).close(), options?.keepFile !== !0 && (debug$1("Removing buffer file", filename), await promises.unlink(filename));
}

@@ -375,3 +372,3 @@ }

async start() {
if (signal != null && signal.aborted)
if (signal?.aborted)
throw new Error("Cannot create new buffered readers on aborted stream");

@@ -403,3 +400,3 @@ debug$1("Reader started reading from file handle"), onReaderStart(), await init(), await getReadHandle();

return new Uint8Array(0);
totalLength != null || (totalLength = arrays.reduce((accumulator, currentValue) => accumulator + currentValue.length, 0));
totalLength ??= arrays.reduce((accumulator, currentValue) => accumulator + currentValue.length, 0);
const returnValue = new Uint8Array(totalLength);

@@ -492,7 +489,10 @@ let offset = 0;

}
var __defProp$1 = Object.defineProperty, __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: !0, configurable: !0, writable: !0, value }) : obj[key] = value, __publicField$1 = (obj, key, value) => __defNormalProp$1(obj, typeof key != "symbol" ? key + "" : key, value);
const EMPTY = new Uint8Array();
class BufferList {
buffered;
shifted;
queue;
_offset;
constructor() {
__publicField$1(this, "buffered"), __publicField$1(this, "shifted"), __publicField$1(this, "queue"), __publicField$1(this, "_offset"), this.buffered = 0, this.shifted = 0, this.queue = new FIFO__default.default(), this._offset = 0;
this.buffered = 0, this.shifted = 0, this.queue = new FIFO__default.default(), this._offset = 0;
}

@@ -643,3 +643,2 @@ push(buffer) {

async pull(controller) {
var _a, _b;
if (readingChunk)

@@ -654,4 +653,4 @@ return;

headerChunk,
(_a = options.filenameEncoding) != null ? _a : "utf-8",
(_b = options.allowUnknownFormat) != null ? _b : !1
options.filenameEncoding ?? "utf-8",
options.allowUnknownFormat ?? !1
);

@@ -676,3 +675,3 @@ header ? header.size === null || header.size === 0 || header.type === "directory" ? controller.enqueue([header, emptyReadableStream()]) : (readingChunk = !0, controller.enqueue([

throw new Error("Premature end of tar stream");
controller.enqueue(chunk), totalBytesRead += chunk.byteLength, (chunk == null ? void 0 : chunk.byteLength) === remaining && (discardPadding(buffer, expectedBytes), controller.close(), next());
controller.enqueue(chunk), totalBytesRead += chunk.byteLength, chunk?.byteLength === remaining && (discardPadding(buffer, expectedBytes), controller.close(), next());
}

@@ -730,12 +729,12 @@ });

path: `/data/export/${dataset}`,
searchParams: documentTypes && (documentTypes == null ? void 0 : documentTypes.length) > 0 ? [["types", documentTypes.join(",")]] : []
searchParams: documentTypes && documentTypes?.length > 0 ? [["types", documentTypes.join(",")]] : []
}),
mutate: (dataset, options) => {
const params = [
(options == null ? void 0 : options.tag) && ["tag", options.tag],
(options == null ? void 0 : options.returnIds) && ["returnIds", "true"],
(options == null ? void 0 : options.returnDocuments) && ["returnDocuments", "true"],
(options == null ? void 0 : options.autoGenerateArrayKeys) && ["autoGenerateArrayKeys", "true"],
(options == null ? void 0 : options.visibility) && ["visibility", options.visibility],
(options == null ? void 0 : options.dryRun) && ["dryRun", "true"]
options?.tag && ["tag", options.tag],
options?.returnIds && ["returnIds", "true"],
options?.returnDocuments && ["returnDocuments", "true"],
options?.autoGenerateArrayKeys && ["autoGenerateArrayKeys", "true"],
options?.visibility && ["visibility", options.visibility],
options?.dryRun && ["dryRun", "true"]
].filter(Boolean);

@@ -751,14 +750,13 @@ return {

};
var __defProp2 = Object.defineProperty, __defNormalProp = (obj, key, value) => key in obj ? __defProp2(obj, key, { enumerable: !0, configurable: !0, writable: !0, value }) : obj[key] = value, __publicField = (obj, key, value) => __defNormalProp(obj, key + "", value);
class HTTPError extends Error {
statusCode;
constructor(statusCode, message) {
super(message), __publicField(this, "statusCode"), this.name = "HTTPError", this.statusCode = statusCode;
super(message), this.name = "HTTPError", this.statusCode = statusCode;
}
}
async function assert2xx(res) {
var _a, _b;
if (res.status < 200 || res.status > 299) {
const jsonResponse = await res.json().catch(() => null);
let message;
throw jsonResponse != null && jsonResponse.error ? (_a = jsonResponse == null ? void 0 : jsonResponse.error) != null && _a.description ? message = `${((_b = jsonResponse == null ? void 0 : jsonResponse.error) == null ? void 0 : _b.type) || res.status}: ${jsonResponse.error.description}` : message = `${jsonResponse.error}: ${jsonResponse.message}` : message = `HTTP Error ${res.status}: ${res.statusText}`, new HTTPError(res.status, message);
throw jsonResponse?.error ? jsonResponse?.error?.description ? message = `${jsonResponse?.error?.type || res.status}: ${jsonResponse.error.description}` : message = `${jsonResponse.error}: ${jsonResponse.message}` : message = `HTTP Error ${res.status}: ${res.statusText}`, new HTTPError(res.status, message);
}

@@ -811,3 +809,2 @@ }

function fromExportEndpoint(options) {
var _a;
return fetchStream(

@@ -818,3 +815,3 @@ toFetchOptions({

token: options.token,
apiHost: (_a = options.apiHost) != null ? _a : "api.sanity.io",
apiHost: options.apiHost ?? "api.sanity.io",
tag: "sanity.migration.export",

@@ -1014,3 +1011,2 @@ endpoint: endpoints.data.export(options.dataset, options.documentTypes)

async function* toFetchOptionsIterable(apiConfig, mutations) {
var _a;
for await (const transaction2 of mutations)

@@ -1022,3 +1018,3 @@ yield toFetchOptions({

tag: "sanity.migration.mutate",
apiHost: (_a = apiConfig.apiHost) != null ? _a : "api.sanity.io",
apiHost: apiConfig.apiHost ?? "api.sanity.io",
endpoint: endpoints.data.mutate(apiConfig.dataset, {

@@ -1033,3 +1029,2 @@ returnIds: !0,

async function run(config, migration) {
var _a, _b, _c;
const stats = {

@@ -1065,4 +1060,3 @@ documents: 0,

() => {
var _a2;
(_a2 = config.onProgress) == null || _a2.call(config, { ...stats, documents: ++stats.documents });
config.onProgress?.({ ...stats, documents: ++stats.documents });
}

@@ -1074,8 +1068,7 @@ ), {

}), (muts) => {
var _a2;
stats.currentTransactions = arrify__default.default(muts), (_a2 = config.onProgress) == null || _a2.call(config, {
stats.currentTransactions = arrify__default.default(muts), config.onProgress?.({
...stats,
mutations: ++stats.mutations
});
}), concurrency = (_a = config == null ? void 0 : config.concurrency) != null ? _a : DEFAULT_MUTATION_CONCURRENCY;
}), concurrency = config?.concurrency ?? DEFAULT_MUTATION_CONCURRENCY;
if (concurrency > MAX_MUTATION_CONCURRENCY)

@@ -1086,18 +1079,14 @@ throw new Error(`Concurrency exceeds maximum allowed value (${MAX_MUTATION_CONCURRENCY})`);

() => {
var _a2;
(_a2 = config.onProgress) == null || _a2.call(config, { ...stats, queuedBatches: ++stats.queuedBatches });
config.onProgress?.({ ...stats, queuedBatches: ++stats.queuedBatches });
}
), submit = async (opts) => lastValueFrom(parseJSON(concatStr(decodeText(await fetchAsyncIterator(opts))))), commits = await mapAsync(
toFetchOptionsIterable(config.api, batches),
(opts) => {
var _a2;
return (_a2 = config.onProgress) == null || _a2.call(config, { ...stats, pending: ++stats.pending }), submit(opts);
},
(opts) => (config.onProgress?.({ ...stats, pending: ++stats.pending }), submit(opts)),
concurrency
);
for await (const result of commits)
stats.completedTransactions.push(result), (_b = config.onProgress) == null || _b.call(config, {
stats.completedTransactions.push(result), config.onProgress?.({
...stats
});
(_c = config.onProgress) == null || _c.call(config, {
config.onProgress?.({
...stats,

@@ -1104,0 +1093,0 @@ done: !0

{
"name": "@sanity/migrate",
"version": "3.52.5-canary.18+3b2329cf64",
"version": "3.52.5-canary.41+812e1e3930",
"description": "Tooling for running data migrations on Sanity.io projects",

@@ -55,4 +55,4 @@ "keywords": [

"@sanity/client": "^6.21.1",
"@sanity/types": "3.52.5-canary.18+3b2329cf64",
"@sanity/util": "3.52.5-canary.18+3b2329cf64",
"@sanity/types": "3.52.5-canary.41+812e1e3930",
"@sanity/util": "3.52.5-canary.41+812e1e3930",
"arrify": "^2.0.1",

@@ -74,3 +74,3 @@ "debug": "^4.3.4",

},
"gitHead": "3b2329cf640954f308676a4b02f3fda701c65587"
"gitHead": "812e1e393037e12bad90533b992ab4043396b21a"
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc