@sanity/import
Advanced tools
Comparing version 0.116.0-alpha.5bbff73d to 0.116.0-alpha.98a84961
'use strict'; | ||
var crypto = require('crypto'); | ||
var isPlainObject = require('lodash/isPlainObject'); | ||
var isPlainObject = require('lodash/isPlainObject' | ||
// Note: Mutates in-place | ||
function assignArrayKeys(obj) { | ||
);function assignArrayKeys(obj) { | ||
if (Array.isArray(obj)) { | ||
@@ -9,0 +9,0 @@ obj.forEach(function (item) { |
'use strict'; | ||
module.exports = function documentHasErrors(doc) { | ||
function documentHasErrors(doc) { | ||
if (typeof doc._id !== 'undefined' && typeof doc._id !== 'string') { | ||
@@ -13,2 +13,11 @@ return `Document contained an invalid "_id" property - must be a string`; | ||
return null; | ||
}; | ||
} | ||
documentHasErrors.validate = function (doc, index) { | ||
var err = documentHasErrors(doc); | ||
if (err) { | ||
throw new Error(`Failed to parse document at index #${index}: ${err}`); | ||
} | ||
}; | ||
module.exports = documentHasErrors; |
'use strict'; | ||
var importFromStream = function () { | ||
var _ref = _asyncToGenerator(function* (stream, opts) { | ||
var options = validateOptions(stream, opts); | ||
var importDocuments = function () { | ||
var _ref = _asyncToGenerator(function* (input, opts) { | ||
var options = validateOptions(input, opts); | ||
// Get raw documents from the stream | ||
debug('Streaming input source to array of documents'); | ||
options.onProgress({ step: 'Reading/validating data file' }); | ||
var raw = yield streamToArray(stream); | ||
var isStream = typeof input.pipe === 'function'; | ||
var documents = input; | ||
if (isStream) { | ||
debug('Streaming input source to array of documents'); | ||
documents = yield streamToArray(input); | ||
} else { | ||
documents.some(documentHasErrors.validate); | ||
} | ||
// User might not have applied `_key` on array elements which are objects; | ||
// if this is the case, generate random keys to help realtime engine | ||
var keyed = raw.map(function (doc) { | ||
var keyed = documents.map(function (doc) { | ||
return assignArrayKeys(doc); | ||
}); | ||
} | ||
// Sanity prefers to have a `_type` on every object. Make sure references | ||
// has `_type` set to `reference`. | ||
var docs = keyed.map(function (doc) { | ||
);var docs = keyed.map(function (doc) { | ||
return setTypeOnReferences(doc); | ||
}); | ||
} | ||
// Find references that will need strengthening when import is done | ||
var strongRefs = docs.map(getStrongRefs).filter(Boolean); | ||
);var strongRefs = docs.map(getStrongRefs).filter(Boolean | ||
// Extract asset references from the documents | ||
var assetRefs = flatten(docs.map(getAssetRefs).filter(function (ref) { | ||
);var assetRefs = flatten(docs.map(getAssetRefs).filter(function (ref) { | ||
return ref.length; | ||
})); | ||
}) | ||
// Remove asset references from the documents | ||
var assetless = docs.map(unsetAssetRefs); | ||
);var assetless = docs.map(unsetAssetRefs | ||
// Make strong references weak so they can be imported in any order | ||
var weakened = assetless.map(weakenStrongRefs); | ||
);var weakened = assetless.map(weakenStrongRefs | ||
// Create batches of documents to import. Try to keep batches below a certain | ||
// byte-size (since document may vary greatly in size depending on type etc) | ||
var batches = batchDocuments(weakened); | ||
);var batches = batchDocuments(weakened | ||
// Trigger actual import process | ||
debug('Starting import of documents'); | ||
var docsImported = yield importBatches(batches, options); | ||
);debug('Starting import of documents'); | ||
var docsImported = yield importBatches(batches, options | ||
// Documents are imported, now proceed with post-import operations | ||
debug('Uploading assets'); | ||
yield uploadAssets(assetRefs, options); | ||
);debug('Uploading assets'); | ||
yield uploadAssets(assetRefs, options | ||
// Strengthen references | ||
debug('Strengthening references'); | ||
yield strengthenReferences(strongRefs, options); | ||
);debug('Strengthening references'); | ||
yield strengthenReferences(strongRefs, options | ||
// Return number of documents imported | ||
return docsImported; | ||
);return docsImported; | ||
}); | ||
return function importFromStream(_x, _x2) { | ||
return function importDocuments(_x, _x2) { | ||
return _ref.apply(this, arguments); | ||
@@ -76,2 +81,3 @@ }; | ||
var uploadAssets = require('./uploadAssets'); | ||
var documentHasErrors = require('./documentHasErrors'); | ||
var batchDocuments = require('./batchDocuments'); | ||
@@ -86,2 +92,2 @@ var importBatches = require('./importBatches'); | ||
module.exports = importFromStream; | ||
module.exports = importDocuments; |
@@ -10,16 +10,16 @@ 'use strict'; | ||
// `assets` is an array of objects with shape: {documentId, path, url, type} | ||
var assetMap = getAssetMap(assets); | ||
var assetMap = getAssetMap(assets | ||
// Create a function we can call for every completed upload to report progress | ||
var progress = progressStepper(options.onProgress, { | ||
);var progress = progressStepper(options.onProgress, { | ||
step: 'Importing assets (files/images)', | ||
total: assetMap.size | ||
}); | ||
} | ||
// Loop over all unique URLs and ensure they exist, and if not, upload them | ||
var mapOptions = { concurrency: ASSET_UPLOAD_CONCURRENCY }; | ||
var assetIds = yield pMap(assetMap.keys(), ensureAsset.bind(null, options, progress), mapOptions); | ||
);var mapOptions = { concurrency: ASSET_UPLOAD_CONCURRENCY }; | ||
var assetIds = yield pMap(assetMap.keys(), ensureAsset.bind(null, options, progress), mapOptions | ||
// Loop over all documents that need asset references to be set | ||
var batches = yield setAssetReferences(assetMap, assetIds, options); | ||
);var batches = yield setAssetReferences(assetMap, assetIds, options); | ||
return batches.reduce(function (prev, add) { | ||
@@ -39,3 +39,6 @@ return prev + add; | ||
var _assetKey$split = assetKey.split('#', 2), | ||
var _assetKey$split = assetKey.split('#', 2 | ||
// Download the asset in order for us to create a hash | ||
), | ||
_assetKey$split2 = _slicedToArray(_assetKey$split, 2), | ||
@@ -45,11 +48,8 @@ type = _assetKey$split2[0], | ||
// Download the asset in order for us to create a hash | ||
debug('[Asset #%d] Downloading %s', i, url); | ||
var buffer = yield getBufferForUri(url); | ||
var label = getHash(buffer); | ||
var label = getHash(buffer | ||
// See if the item exists on the server | ||
debug('[Asset #%d] Checking for asset with hash %s', i, label); | ||
);debug('[Asset #%d] Checking for asset with hash %s', i, label); | ||
var assetId = yield getAssetIdForLabel(client, type, label); | ||
@@ -66,6 +66,6 @@ if (assetId) { | ||
var filename = basename(pathname); | ||
var filename = basename(pathname | ||
// If it doesn't exist, we want to upload it | ||
debug('[Asset #%d] Uploading %s with URL %s', i, type, url); | ||
);debug('[Asset #%d] Uploading %s with URL %s', i, type, url); | ||
var asset = yield client.assets.upload(type, buffer, { label, filename }); | ||
@@ -158,3 +158,3 @@ progress(); | ||
})); | ||
}, []); | ||
}, [] | ||
@@ -164,3 +164,3 @@ // We now have an array of simple tasks, each containing: | ||
// Instead of doing a single mutation per asset, let's batch them up | ||
var batches = []; | ||
);var batches = []; | ||
for (var i = 0; i < patchTasks.length; i += ASSET_PATCH_BATCH_SIZE) { | ||
@@ -174,6 +174,6 @@ batches.push(patchTasks.slice(i, i + ASSET_PATCH_BATCH_SIZE)); | ||
total: batches.length | ||
}); | ||
} | ||
// Now perform the batch operations in parallel with a given concurrency | ||
var mapOptions = { concurrency: ASSET_PATCH_CONCURRENCY }; | ||
);var mapOptions = { concurrency: ASSET_PATCH_CONCURRENCY }; | ||
return pMap(batches, setAssetReferenceBatch.bind(null, client, progress), mapOptions); | ||
@@ -180,0 +180,0 @@ } |
@@ -9,3 +9,3 @@ 'use strict'; | ||
function validateOptions(stream, opts) { | ||
function validateOptions(input, opts) { | ||
var options = defaults({}, opts, { | ||
@@ -16,4 +16,4 @@ operation: defaultOperation, | ||
if (typeof stream.pipe !== 'function') { | ||
throw new Error('Stream does not seem to be a readable stream - no "pipe" method found'); | ||
if (!input || typeof input.pipe !== 'function' && !Array.isArray(input)) { | ||
throw new Error('Stream does not seem to be a readable stream or an array'); | ||
} | ||
@@ -20,0 +20,0 @@ |
{ | ||
"name": "@sanity/import", | ||
"version": "0.116.0-alpha.5bbff73d", | ||
"version": "0.116.0-alpha.98a84961", | ||
"description": "Import documents to a Sanity dataset", | ||
@@ -23,3 +23,3 @@ "main": "lib/import.js", | ||
"dependencies": { | ||
"@sanity/mutator": "0.116.0-alpha.5bbff73d", | ||
"@sanity/mutator": "0.116.0-alpha.98a84961", | ||
"debug": "^2.6.3", | ||
@@ -35,3 +35,3 @@ "get-uri": "^2.0.1", | ||
"devDependencies": { | ||
"@sanity/client": "0.116.0-alpha.5bbff73d", | ||
"@sanity/client": "0.116.0-alpha.98a84961", | ||
"babel-preset-env": "^1.6.0", | ||
@@ -38,0 +38,0 @@ "eslint": "^4.6.1", |
@@ -25,2 +25,3 @@ # @sanity/import | ||
// Input can either be a stream or an array of documents | ||
const input = fs.createReadStream('my-documents.ndjson') | ||
@@ -27,0 +28,0 @@ sanityImport(input, { |
@@ -1,2 +0,2 @@ | ||
module.exports = function documentHasErrors(doc) { | ||
function documentHasErrors(doc) { | ||
if (typeof doc._id !== 'undefined' && typeof doc._id !== 'string') { | ||
@@ -12,1 +12,10 @@ return `Document contained an invalid "_id" property - must be a string` | ||
} | ||
documentHasErrors.validate = (doc, index) => { | ||
const err = documentHasErrors(doc) | ||
if (err) { | ||
throw new Error(`Failed to parse document at index #${index}: ${err}`) | ||
} | ||
} | ||
module.exports = documentHasErrors |
@@ -8,2 +8,3 @@ const debug = require('debug')('sanity:import') | ||
const uploadAssets = require('./uploadAssets') | ||
const documentHasErrors = require('./documentHasErrors') | ||
const batchDocuments = require('./batchDocuments') | ||
@@ -18,13 +19,18 @@ const importBatches = require('./importBatches') | ||
async function importFromStream(stream, opts) { | ||
const options = validateOptions(stream, opts) | ||
async function importDocuments(input, opts) { | ||
const options = validateOptions(input, opts) | ||
// Get raw documents from the stream | ||
debug('Streaming input source to array of documents') | ||
options.onProgress({step: 'Reading/validating data file'}) | ||
const raw = await streamToArray(stream) | ||
const isStream = typeof input.pipe === 'function' | ||
let documents = input | ||
if (isStream) { | ||
debug('Streaming input source to array of documents') | ||
documents = await streamToArray(input) | ||
} else { | ||
documents.some(documentHasErrors.validate) | ||
} | ||
// User might not have applied `_key` on array elements which are objects; | ||
// if this is the case, generate random keys to help realtime engine | ||
const keyed = raw.map(doc => assignArrayKeys(doc)) | ||
const keyed = documents.map(doc => assignArrayKeys(doc)) | ||
@@ -67,2 +73,2 @@ // Sanity prefers to have a `_type` on every object. Make sure references | ||
module.exports = importFromStream | ||
module.exports = importDocuments |
@@ -7,3 +7,3 @@ const noop = require('lodash/noop') | ||
function validateOptions(stream, opts) { | ||
function validateOptions(input, opts) { | ||
const options = defaults({}, opts, { | ||
@@ -14,17 +14,11 @@ operation: defaultOperation, | ||
if (typeof stream.pipe !== 'function') { | ||
throw new Error( | ||
'Stream does not seem to be a readable stream - no "pipe" method found' | ||
) | ||
if (!input || (typeof input.pipe !== 'function' && !Array.isArray(input))) { | ||
throw new Error('Stream does not seem to be a readable stream or an array') | ||
} | ||
if (!options.client) { | ||
throw new Error( | ||
'`options.client` must be set to an instance of @sanity/client' | ||
) | ||
throw new Error('`options.client` must be set to an instance of @sanity/client') | ||
} | ||
const missing = clientMethods.find( | ||
key => typeof options.client[key] !== 'function' | ||
) | ||
const missing = clientMethods.find(key => typeof options.client[key] !== 'function') | ||
@@ -31,0 +25,0 @@ if (missing) { |
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
933169
55
1739
62
+ Added@sanity/mutator@0.116.0-alpha.98a84961(transitive)
- Removed@sanity/mutator@0.116.0-alpha.5bbff73d(transitive)