registry-sync
Advanced tools
Comparing version 2.5.2 to 2.5.4
{ | ||
"name": "registry-sync", | ||
"version": "2.5.2", | ||
"version": "2.5.4", | ||
"description": "synchronize a remote npm registry for private use", | ||
@@ -17,28 +17,28 @@ "repository": "https://github.com/heikkipora/registry-sync", | ||
"dependencies": { | ||
"@babel/runtime": "7.11.2", | ||
"axios": "0.20.0", | ||
"@babel/runtime": "7.12.5", | ||
"axios": "0.21.1", | ||
"bluebird": "3.7.2", | ||
"commander": "6.0.0", | ||
"commander": "6.2.1", | ||
"lodash": "4.17.20", | ||
"mkdirp": "1.0.4", | ||
"rimraf": "3.0.2", | ||
"semver": "7.3.2", | ||
"semver": "7.3.4", | ||
"ssri": "8.0.0", | ||
"streamifier": "0.1.1", | ||
"tar-fs": "2.1.0" | ||
"tar-fs": "2.1.1" | ||
}, | ||
"devDependencies": { | ||
"@babel/cli": "7.10.5", | ||
"@babel/core": "7.11.4", | ||
"@babel/plugin-proposal-object-rest-spread": "7.11.0", | ||
"@babel/plugin-transform-runtime": "7.11.0", | ||
"@babel/polyfill": "7.10.4", | ||
"@babel/preset-env": "7.11.0", | ||
"@babel/register": "7.10.5", | ||
"@babel/cli": "7.12.10", | ||
"@babel/core": "7.12.10", | ||
"@babel/plugin-proposal-object-rest-spread": "7.12.1", | ||
"@babel/plugin-transform-runtime": "7.12.10", | ||
"@babel/polyfill": "7.11.5", | ||
"@babel/preset-env": "7.12.11", | ||
"@babel/register": "7.12.10", | ||
"babel-eslint": "10.1.0", | ||
"chai": "4.2.0", | ||
"eslint": "7.7.0", | ||
"eslint": "7.16.0", | ||
"eslint-plugin-mocha": "8.0.0", | ||
"express": "4.17.1", | ||
"mocha": "8.1.1" | ||
"mocha": "8.2.1" | ||
}, | ||
@@ -45,0 +45,0 @@ "keywords": [ |
@@ -7,3 +7,3 @@ # registry-sync | ||
[![npm version](https://badge.fury.io/js/registry-sync.svg)](https://badge.fury.io/js/registry-sync) | ||
[![build status](https://travis-ci.org/heikkipora/registry-sync.svg?branch=master)](https://travis-ci.org/heikkipora/registry-sync) | ||
![Run tests](https://github.com/heikkipora/registry-sync/workflows/Run%20tests/badge.svg) | ||
@@ -65,2 +65,2 @@ ## Pre-requisites | ||
Pull requests are welcome. Kindly check that your code passes ESLint checks by running ```npm run eslint``` first. | ||
Integration tests can be run with ```npm test```. Both are anyway run automatically by Travis CI. | ||
Integration tests can be run with ```npm test```. Both are anyway run automatically by GitHub Actions. |
@@ -10,4 +10,2 @@ "use strict"; | ||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator")); | ||
var _axios = _interopRequireDefault(require("axios")); | ||
@@ -26,33 +24,19 @@ | ||
function fetchUrl(_x) { | ||
return _fetchUrl.apply(this, arguments); | ||
} | ||
async function fetchUrl(url, isBinary = false) { | ||
if (isBinary) { | ||
return fetch(url, 'arraybuffer'); | ||
} | ||
function _fetchUrl() { | ||
_fetchUrl = (0, _asyncToGenerator2.default)(function* (url, isBinary = false) { | ||
if (isBinary) { | ||
return fetch(url, 'arraybuffer'); | ||
} | ||
if (!metadataCache[url]) { | ||
// eslint-disable-next-line require-atomic-updates | ||
metadataCache[url] = await fetch(url, 'json'); | ||
} | ||
if (!metadataCache[url]) { | ||
// eslint-disable-next-line require-atomic-updates | ||
metadataCache[url] = yield fetch(url, 'json'); | ||
} | ||
return metadataCache[url]; | ||
}); | ||
return _fetchUrl.apply(this, arguments); | ||
return metadataCache[url]; | ||
} | ||
function fetch(_x2, _x3) { | ||
return _fetch.apply(this, arguments); | ||
} | ||
function _fetch() { | ||
_fetch = (0, _asyncToGenerator2.default)(function* (url, responseType) { | ||
return (yield client.get(url, { | ||
responseType | ||
})).data; | ||
}); | ||
return _fetch.apply(this, arguments); | ||
async function fetch(url, responseType) { | ||
return (await client.get(url, { | ||
responseType | ||
})).data; | ||
} |
@@ -12,4 +12,2 @@ "use strict"; | ||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator")); | ||
var _lodash = _interopRequireDefault(require("lodash")); | ||
@@ -52,52 +50,38 @@ | ||
function download(_x, _x2, _x3, _x4, _x5, _x6) { | ||
return _download.apply(this, arguments); | ||
} | ||
async function download(registryUrl, localUrl, rootFolder, prebuiltBinaryProperties, enforceTarballsOverHttps, { | ||
name, | ||
version | ||
}) { | ||
const registryMetadata = await fetchMetadata(name, registryUrl); | ||
function _download() { | ||
_download = (0, _asyncToGenerator2.default)(function* (registryUrl, localUrl, rootFolder, prebuiltBinaryProperties, enforceTarballsOverHttps, { | ||
name, | ||
version | ||
}) { | ||
const registryMetadata = yield fetchMetadata(name, registryUrl); | ||
const versionMetadata = _lodash.default.cloneDeep(registryMetadata.versions[version]); | ||
const versionMetadata = _lodash.default.cloneDeep(registryMetadata.versions[version]); | ||
if (!versionMetadata) { | ||
throw new Error(`Unknown package version ${name}@${version}`); | ||
} | ||
if (!versionMetadata) { | ||
throw new Error(`Unknown package version ${name}@${version}`); | ||
} | ||
const localFolder = await ensureLocalFolderExists(name, rootFolder); | ||
let data = await downloadTarball(versionMetadata, enforceTarballsOverHttps); | ||
const localFolder = yield ensureLocalFolderExists(name, rootFolder); | ||
let data = yield downloadTarball(versionMetadata, enforceTarballsOverHttps); | ||
if ((0, _pregyp.hasPrebuiltBinaries)(versionMetadata)) { | ||
const localPregypFolder = await ensureLocalFolderExists(version, localFolder); | ||
await (0, _pregyp.downloadPrebuiltBinaries)(versionMetadata, localPregypFolder, prebuiltBinaryProperties); | ||
data = await (0, _metadata.rewriteMetadataInTarball)(data, versionMetadata, localUrl, localFolder); | ||
} | ||
if ((0, _pregyp.hasPrebuiltBinaries)(versionMetadata)) { | ||
const localPregypFolder = yield ensureLocalFolderExists(version, localFolder); | ||
yield (0, _pregyp.downloadPrebuiltBinaries)(versionMetadata, localPregypFolder, prebuiltBinaryProperties); | ||
data = yield (0, _metadata.rewriteMetadataInTarball)(data, versionMetadata, localUrl, localFolder); | ||
} | ||
yield saveTarball(versionMetadata, data, localFolder); | ||
(0, _metadata.rewriteVersionMetadata)(versionMetadata, data, localUrl); | ||
yield updateMetadata(versionMetadata, registryMetadata, registryUrl, localFolder); | ||
}); | ||
return _download.apply(this, arguments); | ||
await saveTarball(versionMetadata, data, localFolder); | ||
(0, _metadata.rewriteVersionMetadata)(versionMetadata, data, localUrl); | ||
await updateMetadata(versionMetadata, registryMetadata, registryUrl, localFolder); | ||
} | ||
function downloadTarball(_x7, _x8) { | ||
return _downloadTarball.apply(this, arguments); | ||
async function downloadTarball({ | ||
_id: id, | ||
dist | ||
}, enforceTarballsOverHttps) { | ||
const tarballUrl = enforceTarballsOverHttps ? dist.tarball.replace('http://', 'https://') : dist.tarball; | ||
const data = await fetchTarball(tarballUrl); | ||
(0, _integrity.verifyIntegrity)(data, id, dist); | ||
return data; | ||
} | ||
function _downloadTarball() { | ||
_downloadTarball = (0, _asyncToGenerator2.default)(function* ({ | ||
_id: id, | ||
dist | ||
}, enforceTarballsOverHttps) { | ||
const tarballUrl = enforceTarballsOverHttps ? dist.tarball.replace('http://', 'https://') : dist.tarball; | ||
const data = yield fetchTarball(tarballUrl); | ||
(0, _integrity.verifyIntegrity)(data, id, dist); | ||
return data; | ||
}); | ||
return _downloadTarball.apply(this, arguments); | ||
} | ||
function saveTarball({ | ||
@@ -110,52 +94,33 @@ name, | ||
function updateMetadata(_x9, _x10, _x11, _x12) { | ||
return _updateMetadata.apply(this, arguments); | ||
async function updateMetadata(versionMetadata, defaultMetadata, registryUrl, localFolder) { | ||
const { | ||
name, | ||
version | ||
} = versionMetadata; | ||
const localMetadataPath = metadataPath(name, localFolder); | ||
const localMetadata = await loadMetadata(localMetadataPath, defaultMetadata); | ||
localMetadata.versions[version] = versionMetadata; | ||
localMetadata.time[version] = defaultMetadata.time[version]; | ||
localMetadata['dist-tags'].latest = Object.keys(localMetadata.versions).sort(_semver.default.compare).pop(); | ||
await saveMetadata(localMetadataPath, localMetadata); | ||
} | ||
function _updateMetadata() { | ||
_updateMetadata = (0, _asyncToGenerator2.default)(function* (versionMetadata, defaultMetadata, registryUrl, localFolder) { | ||
const name = versionMetadata.name, | ||
version = versionMetadata.version; | ||
const localMetadataPath = metadataPath(name, localFolder); | ||
const localMetadata = yield loadMetadata(localMetadataPath, defaultMetadata); | ||
localMetadata.versions[version] = versionMetadata; | ||
localMetadata.time[version] = defaultMetadata.time[version]; | ||
localMetadata['dist-tags'].latest = Object.keys(localMetadata.versions).sort(_semver.default.compare).pop(); | ||
yield saveMetadata(localMetadataPath, localMetadata); | ||
}); | ||
return _updateMetadata.apply(this, arguments); | ||
async function loadMetadata(path, defaultMetadata) { | ||
try { | ||
const json = await fs.readFileAsync(path, 'utf8'); | ||
return JSON.parse(json); | ||
} catch (fileNotFound) { | ||
return _objectSpread(_objectSpread({}, defaultMetadata), {}, { | ||
'dist-tags': {}, | ||
time: {}, | ||
versions: {} | ||
}); | ||
} | ||
} | ||
function loadMetadata(_x13, _x14) { | ||
return _loadMetadata.apply(this, arguments); | ||
async function saveMetadata(path, metadata) { | ||
const json = JSON.stringify(metadata, null, 2); | ||
await fs.writeFileAsync(path, json, 'utf8'); | ||
} | ||
function _loadMetadata() { | ||
_loadMetadata = (0, _asyncToGenerator2.default)(function* (path, defaultMetadata) { | ||
try { | ||
const json = yield fs.readFileAsync(path, 'utf8'); | ||
return JSON.parse(json); | ||
} catch (fileNotFound) { | ||
return _objectSpread(_objectSpread({}, defaultMetadata), {}, { | ||
'dist-tags': {}, | ||
time: {}, | ||
versions: {} | ||
}); | ||
} | ||
}); | ||
return _loadMetadata.apply(this, arguments); | ||
} | ||
function saveMetadata(_x15, _x16) { | ||
return _saveMetadata.apply(this, arguments); | ||
} | ||
function _saveMetadata() { | ||
_saveMetadata = (0, _asyncToGenerator2.default)(function* (path, metadata) { | ||
const json = JSON.stringify(metadata, null, 2); | ||
yield fs.writeFileAsync(path, json, 'utf8'); | ||
}); | ||
return _saveMetadata.apply(this, arguments); | ||
} | ||
function metadataPath(name, localFolder) { | ||
@@ -169,14 +134,7 @@ return _path.default.join(localFolder, 'index.json'); | ||
function ensureLocalFolderExists(_x17, _x18) { | ||
return _ensureLocalFolderExists.apply(this, arguments); | ||
} | ||
async function ensureLocalFolderExists(name, rootFolder) { | ||
const localFolder = _path.default.resolve(rootFolder, name); | ||
function _ensureLocalFolderExists() { | ||
_ensureLocalFolderExists = (0, _asyncToGenerator2.default)(function* (name, rootFolder) { | ||
const localFolder = _path.default.resolve(rootFolder, name); | ||
yield (0, _mkdirp.default)(localFolder); | ||
return localFolder; | ||
}); | ||
return _ensureLocalFolderExists.apply(this, arguments); | ||
await (0, _mkdirp.default)(localFolder); | ||
return localFolder; | ||
} | ||
@@ -183,0 +141,0 @@ |
@@ -12,4 +12,2 @@ "use strict"; | ||
var _slicedToArray2 = _interopRequireDefault(require("@babel/runtime/helpers/slicedToArray")); | ||
var _ssri = _interopRequireDefault(require("ssri")); | ||
@@ -35,6 +33,5 @@ | ||
function sha1(data) { | ||
const _ssri$fromData$sha = (0, _slicedToArray2.default)(_ssri.default.fromData(data, { | ||
const [integrity] = _ssri.default.fromData(data, { | ||
algorithms: ['sha1'] | ||
}).sha1, 1), | ||
integrity = _ssri$fromData$sha[0]; | ||
}).sha1; | ||
@@ -45,8 +42,7 @@ return integrity.hexDigest(); | ||
function sha512(data) { | ||
const _ssri$fromData$sha2 = (0, _slicedToArray2.default)(_ssri.default.fromData(data, { | ||
const [integrity] = _ssri.default.fromData(data, { | ||
algorithms: ['sha512'] | ||
}).sha512, 1), | ||
integrity = _ssri$fromData$sha2[0]; | ||
}).sha512; | ||
return integrity.toString(); | ||
} |
@@ -13,4 +13,2 @@ "use strict"; | ||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator")); | ||
var _pregyp = require("./pregyp"); | ||
@@ -49,25 +47,18 @@ | ||
function rewriteMetadataInTarball(_x, _x2, _x3, _x4) { | ||
return _rewriteMetadataInTarball.apply(this, arguments); | ||
} | ||
async function rewriteMetadataInTarball(data, versionMetadata, localUrl, localFolder) { | ||
const tmpFolder = _path.default.join(localFolder, '.tmp'); | ||
function _rewriteMetadataInTarball() { | ||
_rewriteMetadataInTarball = (0, _asyncToGenerator2.default)(function* (data, versionMetadata, localUrl, localFolder) { | ||
const tmpFolder = _path.default.join(localFolder, '.tmp'); | ||
await (0, _mkdirp.default)(tmpFolder); | ||
await extractTgz(data, tmpFolder); | ||
yield (0, _mkdirp.default)(tmpFolder); | ||
yield extractTgz(data, tmpFolder); | ||
const manifestPath = _path.default.join(tmpFolder, 'package', 'package.json'); | ||
const manifestPath = _path.default.join(tmpFolder, 'package', 'package.json'); | ||
const json = yield fs.readFileAsync(manifestPath, 'utf8'); | ||
const metadata = JSON.parse(json); | ||
metadata.binary.host = localUrl.origin; | ||
metadata.binary.remote_path = createPrebuiltBinaryRemotePath(localUrl, versionMetadata); | ||
yield fs.writeFileAsync(manifestPath, JSON.stringify(metadata, null, 2)); | ||
const updatedData = yield compressTgz(tmpFolder); | ||
yield rimrafAsync(tmpFolder); | ||
return updatedData; | ||
}); | ||
return _rewriteMetadataInTarball.apply(this, arguments); | ||
const json = await fs.readFileAsync(manifestPath, 'utf8'); | ||
const metadata = JSON.parse(json); | ||
metadata.binary.host = localUrl.origin; | ||
metadata.binary.remote_path = createPrebuiltBinaryRemotePath(localUrl, versionMetadata); | ||
await fs.writeFileAsync(manifestPath, JSON.stringify(metadata, null, 2)); | ||
const updatedData = await compressTgz(tmpFolder); | ||
await rimrafAsync(tmpFolder); | ||
return updatedData; | ||
} | ||
@@ -74,0 +65,0 @@ |
@@ -11,4 +11,2 @@ "use strict"; | ||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator")); | ||
var _client = require("./client"); | ||
@@ -24,8 +22,2 @@ | ||
function _createForOfIteratorHelper(o, allowArrayLike) { var it; if (typeof Symbol === "undefined" || o[Symbol.iterator] == null) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = o[Symbol.iterator](); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; } | ||
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); } | ||
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i]; return arr2; } | ||
const fs = _bluebird.default.promisifyAll(require('fs')); | ||
@@ -39,66 +31,35 @@ | ||
function downloadPrebuiltBinaries(_x, _x2, _x3) { | ||
return _downloadPrebuiltBinaries.apply(this, arguments); | ||
} | ||
async function downloadPrebuiltBinaries(versionMetadata, localFolder, prebuiltBinaryProperties) { | ||
const { | ||
binary, | ||
name, | ||
version | ||
} = versionMetadata; | ||
function _downloadPrebuiltBinaries() { | ||
_downloadPrebuiltBinaries = (0, _asyncToGenerator2.default)(function* (versionMetadata, localFolder, prebuiltBinaryProperties) { | ||
const binary = versionMetadata.binary, | ||
name = versionMetadata.name, | ||
version = versionMetadata.version; | ||
for (const { | ||
abi, | ||
arch, | ||
platform | ||
} of prebuiltBinaryProperties) { | ||
const napiVersions = binary.napi_versions || ['unknown']; | ||
var _iterator = _createForOfIteratorHelper(prebuiltBinaryProperties), | ||
_step; | ||
try { | ||
for (_iterator.s(); !(_step = _iterator.n()).done;) { | ||
const _step$value = _step.value, | ||
abi = _step$value.abi, | ||
arch = _step$value.arch, | ||
platform = _step$value.platform; | ||
const napiVersions = binary.napi_versions || ['unknown']; | ||
var _iterator2 = _createForOfIteratorHelper(napiVersions), | ||
_step2; | ||
try { | ||
for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) { | ||
const napiVersion = _step2.value; | ||
yield downloadPrebuiltBinary(name, version, binary, abi, platform, arch, napiVersion, localFolder); | ||
} | ||
} catch (err) { | ||
_iterator2.e(err); | ||
} finally { | ||
_iterator2.f(); | ||
} | ||
} | ||
} catch (err) { | ||
_iterator.e(err); | ||
} finally { | ||
_iterator.f(); | ||
for (const napiVersion of napiVersions) { | ||
await downloadPrebuiltBinary(name, version, binary, abi, platform, arch, napiVersion, localFolder); | ||
} | ||
}); | ||
return _downloadPrebuiltBinaries.apply(this, arguments); | ||
} | ||
} | ||
function downloadPrebuiltBinary(_x4, _x5, _x6, _x7, _x8, _x9, _x10, _x11) { | ||
return _downloadPrebuiltBinary.apply(this, arguments); | ||
} | ||
async function downloadPrebuiltBinary(name, version, binary, abi, platform, arch, napiVersion, localFolder) { | ||
try { | ||
const data = await fetchPrebuiltBinary(name, version, binary, abi, platform, arch, napiVersion); | ||
await fs.writeFileAsync(prebuiltBinaryFilePath(name, version, binary, abi, platform, arch, napiVersion, localFolder), data); | ||
} catch (err) { | ||
// pre-built binaries are commonly not available on all platforms (and S3 will commonly respond with 403 for a non-existent file) | ||
const fileNotFoundError = err.response && (err.response.status == 403 || err.response.status == 404); | ||
function _downloadPrebuiltBinary() { | ||
_downloadPrebuiltBinary = (0, _asyncToGenerator2.default)(function* (name, version, binary, abi, platform, arch, napiVersion, localFolder) { | ||
try { | ||
const data = yield fetchPrebuiltBinary(name, version, binary, abi, platform, arch, napiVersion); | ||
yield fs.writeFileAsync(prebuiltBinaryFilePath(name, version, binary, abi, platform, arch, napiVersion, localFolder), data); | ||
} catch (err) { | ||
// pre-built binaries are commonly not available on all platforms (and S3 will commonly respond with 403 for a non-existent file) | ||
const fileNotFoundError = err.response && (err.response.status == 403 || err.response.status == 404); | ||
if (!fileNotFoundError) { | ||
console.error(`Unexpected error fetching prebuilt binary for ${name} and ABI v${abi} on ${arch}-${platform} (n-api version ${napiVersion})`); | ||
throw err; | ||
} | ||
if (!fileNotFoundError) { | ||
console.error(`Unexpected error fetching prebuilt binary for ${name} and ABI v${abi} on ${arch}-${platform} (n-api version ${napiVersion})`); | ||
throw err; | ||
} | ||
}); | ||
return _downloadPrebuiltBinary.apply(this, arguments); | ||
} | ||
} | ||
@@ -105,0 +66,0 @@ |
@@ -12,4 +12,2 @@ "use strict"; | ||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator")); | ||
var _lodash = _interopRequireDefault(require("lodash")); | ||
@@ -21,65 +19,38 @@ | ||
function updateDependenciesCache(_x, _x2, _x3) { | ||
return _updateDependenciesCache.apply(this, arguments); | ||
async function updateDependenciesCache(newDependencies, cacheFilePath, prebuiltBinaryProperties) { | ||
const { | ||
dependencies: cachedDependencies | ||
} = await loadCache(cacheFilePath); | ||
const dependencies = (0, _lodash.default)(cachedDependencies).concat(newDependencies).sortBy('id').sortedUniqBy('id').value(); | ||
const data = { | ||
dependencies, | ||
prebuiltBinaryProperties, | ||
prebuiltBinaryNApiSupport: true | ||
}; | ||
return fs.writeFileAsync(cacheFilePath, JSON.stringify(data), 'utf8'); | ||
} | ||
function _updateDependenciesCache() { | ||
_updateDependenciesCache = (0, _asyncToGenerator2.default)(function* (newDependencies, cacheFilePath, prebuiltBinaryProperties) { | ||
const _yield$loadCache = yield loadCache(cacheFilePath), | ||
cachedDependencies = _yield$loadCache.dependencies; | ||
async function dependenciesNotInCache(dependencies, cacheFilePath, prebuiltBinaryProperties) { | ||
const { | ||
dependencies: cachedDependencies, | ||
prebuiltBinaryProperties: cachedPrebuiltBinaryProperties, | ||
prebuiltBinaryNApiSupport | ||
} = await loadCache(cacheFilePath); | ||
const dependencies = (0, _lodash.default)(cachedDependencies).concat(newDependencies).sortBy('id').sortedUniqBy('id').value(); | ||
const data = { | ||
dependencies, | ||
prebuiltBinaryProperties, | ||
prebuiltBinaryNApiSupport: true | ||
}; | ||
return fs.writeFileAsync(cacheFilePath, JSON.stringify(data), 'utf8'); | ||
}); | ||
return _updateDependenciesCache.apply(this, arguments); | ||
} | ||
if (cachedDependencies.length > 0 && (!_lodash.default.isEqual(prebuiltBinaryProperties, cachedPrebuiltBinaryProperties) || !prebuiltBinaryNApiSupport)) { | ||
console.log(`Pre-built binary properties changed, re-downloading all current packages`); | ||
return dependencies; | ||
} | ||
function dependenciesNotInCache(_x4, _x5, _x6) { | ||
return _dependenciesNotInCache.apply(this, arguments); | ||
return _lodash.default.differenceBy(dependencies, cachedDependencies, 'id'); | ||
} | ||
function _dependenciesNotInCache() { | ||
_dependenciesNotInCache = (0, _asyncToGenerator2.default)(function* (dependencies, cacheFilePath, prebuiltBinaryProperties) { | ||
const _yield$loadCache2 = yield loadCache(cacheFilePath), | ||
cachedDependencies = _yield$loadCache2.dependencies, | ||
cachedPrebuiltBinaryProperties = _yield$loadCache2.prebuiltBinaryProperties, | ||
prebuiltBinaryNApiSupport = _yield$loadCache2.prebuiltBinaryNApiSupport; | ||
async function loadCache(cacheFilePath) { | ||
try { | ||
const json = await fs.readFileAsync(cacheFilePath, 'utf8'); | ||
const data = JSON.parse(json); | ||
if (cachedDependencies.length > 0 && (!_lodash.default.isEqual(prebuiltBinaryProperties, cachedPrebuiltBinaryProperties) || !prebuiltBinaryNApiSupport)) { | ||
console.log(`Pre-built binary properties changed, re-downloading all current packages`); | ||
return dependencies; | ||
} | ||
return _lodash.default.differenceBy(dependencies, cachedDependencies, 'id'); | ||
}); | ||
return _dependenciesNotInCache.apply(this, arguments); | ||
} | ||
function loadCache(_x7) { | ||
return _loadCache.apply(this, arguments); | ||
} | ||
function _loadCache() { | ||
_loadCache = (0, _asyncToGenerator2.default)(function* (cacheFilePath) { | ||
try { | ||
const json = yield fs.readFileAsync(cacheFilePath, 'utf8'); | ||
const data = JSON.parse(json); | ||
if (Array.isArray(data)) { | ||
return { | ||
dependencies: data, | ||
prebuiltBinaryProperties: [], | ||
prebuiltBinaryNApiSupport: true | ||
}; | ||
} | ||
return data; | ||
} catch (fileNotFound) { | ||
if (Array.isArray(data)) { | ||
return { | ||
dependencies: [], | ||
dependencies: data, | ||
prebuiltBinaryProperties: [], | ||
@@ -89,24 +60,24 @@ prebuiltBinaryNApiSupport: true | ||
} | ||
}); | ||
return _loadCache.apply(this, arguments); | ||
} | ||
function dependenciesFromPackageLock(_x8, _x9) { | ||
return _dependenciesFromPackageLock.apply(this, arguments); | ||
return data; | ||
} catch (fileNotFound) { | ||
return { | ||
dependencies: [], | ||
prebuiltBinaryProperties: [], | ||
prebuiltBinaryNApiSupport: true | ||
}; | ||
} | ||
} | ||
function _dependenciesFromPackageLock() { | ||
_dependenciesFromPackageLock = (0, _asyncToGenerator2.default)(function* (path, includeDevDependencies) { | ||
const json = yield fs.readFileAsync(path, 'utf8'); | ||
const dependencyTree = dependenciesRecursive(JSON.parse(json), includeDevDependencies); | ||
return (0, _lodash.default)(dependencyTree).flattenDeep().map(({ | ||
name, | ||
version | ||
}) => ({ | ||
id: `${name}@${version}`, | ||
name, | ||
version | ||
})).sortBy('id').sortedUniqBy('id').value(); | ||
}); | ||
return _dependenciesFromPackageLock.apply(this, arguments); | ||
async function dependenciesFromPackageLock(path, includeDevDependencies) { | ||
const json = await fs.readFileAsync(path, 'utf8'); | ||
const dependencyTree = dependenciesRecursive(JSON.parse(json), includeDevDependencies); | ||
return (0, _lodash.default)(dependencyTree).flattenDeep().map(({ | ||
name, | ||
version | ||
}) => ({ | ||
id: `${name}@${version}`, | ||
name, | ||
version | ||
})).sortBy('id').sortedUniqBy('id').value(); | ||
} | ||
@@ -113,0 +84,0 @@ |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); | ||
Object.defineProperty(exports, "__esModule", { | ||
@@ -10,22 +8,13 @@ value: true | ||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator")); | ||
var _download = require("./download"); | ||
var _download = require("../src/download"); | ||
var _resolve = require("./resolve"); | ||
var _resolve = require("../src/resolve"); | ||
function synchronize(_x) { | ||
return _synchronize.apply(this, arguments); | ||
} | ||
function _synchronize() { | ||
_synchronize = (0, _asyncToGenerator2.default)(function* (options) { | ||
const cacheFilePath = `${options.rootFolder}/.registry-sync-cache.json`; | ||
const packages = yield (0, _resolve.dependenciesFromPackageLock)(options.manifest, options.includeDevDependencies); | ||
const newPackages = yield (0, _resolve.dependenciesNotInCache)(packages, cacheFilePath, options.prebuiltBinaryProperties); | ||
yield (0, _download.downloadAll)(newPackages, options); | ||
yield (0, _resolve.updateDependenciesCache)(newPackages, cacheFilePath, options.prebuiltBinaryProperties); | ||
return newPackages; | ||
}); | ||
return _synchronize.apply(this, arguments); | ||
async function synchronize(options) { | ||
const cacheFilePath = `${options.rootFolder}/.registry-sync-cache.json`; | ||
const packages = await (0, _resolve.dependenciesFromPackageLock)(options.manifest, options.includeDevDependencies); | ||
const newPackages = await (0, _resolve.dependenciesNotInCache)(packages, cacheFilePath, options.prebuiltBinaryProperties); | ||
await (0, _download.downloadAll)(newPackages, options); | ||
await (0, _resolve.updateDependenciesCache)(newPackages, cacheFilePath, options.prebuiltBinaryProperties); | ||
return newPackages; | ||
} |
27144
453
+ Added@babel/runtime@7.12.5(transitive)
+ Addedaxios@0.21.1(transitive)
+ Addedcommander@6.2.1(transitive)
+ Addedlru-cache@6.0.0(transitive)
+ Addedsemver@7.3.4(transitive)
+ Addedtar-fs@2.1.1(transitive)
- Removed@babel/runtime@7.11.2(transitive)
- Removedaxios@0.20.0(transitive)
- Removedcommander@6.0.0(transitive)
- Removedsemver@7.3.2(transitive)
- Removedtar-fs@2.1.0(transitive)
Updated@babel/runtime@7.12.5
Updatedaxios@0.21.1
Updatedcommander@6.2.1
Updatedsemver@7.3.4
Updatedtar-fs@2.1.1