Socket
Socket
Sign inDemoInstall

@sanity/import

Package Overview
Dependencies
Maintainers
7
Versions
1163
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@sanity/import - npm Package Compare versions

Comparing version 0.115.0 to 0.115.1

4

lib/assignArrayKeys.js
'use strict';
var crypto = require('crypto');
var isPlainObject = require('lodash/isPlainObject');
var isPlainObject = require('lodash/isPlainObject'
// Note: Mutates in-place
function assignArrayKeys(obj) {
);function assignArrayKeys(obj) {
if (Array.isArray(obj)) {

@@ -9,0 +9,0 @@ obj.forEach(function (item) {

'use strict';
module.exports = function documentHasErrors(doc) {
if (typeof doc._id !== 'string') {
return `Document did not contain required "_id" property of type string`;
if (typeof doc._id !== 'undefined' && typeof doc._id !== 'string') {
return `Document contained an invalid "_id" property - must be a string`;
}

@@ -7,0 +7,0 @@

@@ -5,53 +5,53 @@ 'use strict';

var _ref = _asyncToGenerator(function* (stream, opts) {
var options = validateOptions(stream, opts);
var options = validateOptions(stream, opts
// Get raw documents from the stream
debug('Streaming input source to array of documents');
);debug('Streaming input source to array of documents');
options.onProgress({ step: 'Reading/validating data file' });
var raw = yield streamToArray(stream);
var raw = yield streamToArray(stream
// User might not have applied `_key` on array elements which are objects;
// if this is the case, generate random keys to help realtime engine
var keyed = raw.map(function (doc) {
);var keyed = raw.map(function (doc) {
return assignArrayKeys(doc);
});
}
// Sanity prefers to have a `_type` on every object. Make sure references
// has `_type` set to `reference`.
var docs = keyed.map(function (doc) {
);var docs = keyed.map(function (doc) {
return setTypeOnReferences(doc);
});
}
// Find references that will need strengthening when import is done
var strongRefs = docs.map(getStrongRefs).filter(Boolean);
);var strongRefs = docs.map(getStrongRefs).filter(Boolean
// Extract asset references from the documents
var assetRefs = flatten(docs.map(getAssetRefs).filter(function (ref) {
);var assetRefs = flatten(docs.map(getAssetRefs).filter(function (ref) {
return ref.length;
}));
})
// Remove asset references from the documents
var assetless = docs.map(unsetAssetRefs);
);var assetless = docs.map(unsetAssetRefs
// Make strong references weak so they can be imported in any order
var weakened = assetless.map(weakenStrongRefs);
);var weakened = assetless.map(weakenStrongRefs
// Create batches of documents to import. Try to keep batches below a certain
// byte-size (since document may vary greatly in size depending on type etc)
var batches = batchDocuments(weakened);
);var batches = batchDocuments(weakened
// Trigger actual import process
debug('Starting import of documents');
var docsImported = yield importBatches(batches, options);
);debug('Starting import of documents');
var docsImported = yield importBatches(batches, options
// Documents are imported, now proceed with post-import operations
debug('Uploading assets');
yield uploadAssets(assetRefs, options);
);debug('Uploading assets');
yield uploadAssets(assetRefs, options
// Strengthen references
debug('Strengthening references');
yield strengthenReferences(strongRefs, options);
);debug('Strengthening references');
yield strengthenReferences(strongRefs, options
// Return number of documents imported
return docsImported;
);return docsImported;
});

@@ -58,0 +58,0 @@

@@ -10,16 +10,16 @@ 'use strict';

// `assets` is an array of objects with shape: {documentId, path, url, type}
var assetMap = getAssetMap(assets);
var assetMap = getAssetMap(assets
// Create a function we can call for every completed upload to report progress
var progress = progressStepper(options.onProgress, {
);var progress = progressStepper(options.onProgress, {
step: 'Importing assets (files/images)',
total: assetMap.size
});
}
// Loop over all unique URLs and ensure they exist, and if not, upload them
var mapOptions = { concurrency: ASSET_UPLOAD_CONCURRENCY };
var assetIds = yield pMap(assetMap.keys(), ensureAsset.bind(null, options, progress), mapOptions);
);var mapOptions = { concurrency: ASSET_UPLOAD_CONCURRENCY };
var assetIds = yield pMap(assetMap.keys(), ensureAsset.bind(null, options, progress), mapOptions
// Loop over all documents that need asset references to be set
var batches = yield setAssetReferences(assetMap, assetIds, options);
);var batches = yield setAssetReferences(assetMap, assetIds, options);
return batches.reduce(function (prev, add) {

@@ -39,3 +39,6 @@ return prev + add;

var _assetKey$split = assetKey.split('#', 2),
var _assetKey$split = assetKey.split('#', 2
// Download the asset in order for us to create a hash
),
_assetKey$split2 = _slicedToArray(_assetKey$split, 2),

@@ -45,11 +48,8 @@ type = _assetKey$split2[0],

// Download the asset in order for us to create a hash
debug('[Asset #%d] Downloading %s', i, url);
var buffer = yield getBufferForUri(url);
var label = getHash(buffer);
var label = getHash(buffer
// See if the item exists on the server
debug('[Asset #%d] Checking for asset with hash %s', i, label);
);debug('[Asset #%d] Checking for asset with hash %s', i, label);
var assetId = yield getAssetIdForLabel(client, type, label);

@@ -66,6 +66,6 @@ if (assetId) {

var filename = basename(pathname);
var filename = basename(pathname
// If it doesn't exist, we want to upload it
debug('[Asset #%d] Uploading %s with URL %s', i, type, url);
);debug('[Asset #%d] Uploading %s with URL %s', i, type, url);
var asset = yield client.assets.upload(type, buffer, { label, filename });

@@ -158,3 +158,3 @@ progress();

}));
}, []);
}, []

@@ -164,3 +164,3 @@ // We now have an array of simple tasks, each containing:

// Instead of doing a single mutation per asset, let's batch them up
var batches = [];
);var batches = [];
for (var i = 0; i < patchTasks.length; i += ASSET_PATCH_BATCH_SIZE) {

@@ -174,6 +174,6 @@ batches.push(patchTasks.slice(i, i + ASSET_PATCH_BATCH_SIZE));

total: batches.length
});
}
// Now perform the batch operations in parallel with a given concurrency
var mapOptions = { concurrency: ASSET_PATCH_CONCURRENCY };
);var mapOptions = { concurrency: ASSET_PATCH_CONCURRENCY };
return pMap(batches, setAssetReferenceBatch.bind(null, client, progress), mapOptions);

@@ -180,0 +180,0 @@ }

{
"name": "@sanity/import",
"version": "0.115.0",
"version": "0.115.1",
"description": "Import documents to a Sanity dataset",

@@ -23,3 +23,3 @@ "main": "lib/import.js",

"dependencies": {
"@sanity/mutator": "^0.115.0",
"@sanity/mutator": "^0.115.1",
"debug": "^2.6.3",

@@ -35,3 +35,3 @@ "get-uri": "^2.0.1",

"devDependencies": {
"@sanity/client": "^0.115.0",
"@sanity/client": "^0.115.1",
"babel-preset-env": "^1.6.0",

@@ -38,0 +38,0 @@ "eslint": "^4.6.1",

module.exports = function documentHasErrors(doc) {
if (typeof doc._id !== 'string') {
return `Document did not contain required "_id" property of type string`
if (typeof doc._id !== 'undefined' && typeof doc._id !== 'string') {
return `Document contained an invalid "_id" property - must be a string`
}

@@ -5,0 +5,0 @@

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc