New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

registry-sync

Package Overview
Dependencies
Maintainers
1
Versions
89
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

registry-sync - npm Package Compare versions

Comparing version 3.0.0 to 3.1.0

28

package.json
{
"name": "registry-sync",
"version": "3.0.0",
"version": "3.1.0",
"description": "synchronize a remote npm registry for private use",

@@ -11,4 +11,5 @@ "repository": "https://github.com/heikkipora/registry-sync",

"build": "./build-npm",
"eslint": "eslint --fix src/* bin/* test/*.js",
"test": "mocha --require @babel/register --require @babel/polyfill --timeout 120000 test/*.js"
"eslint": "eslint . --ext .ts --fix",
"test": "mocha -r ts-node/register --timeout 120000 test/*.ts",
"release-test": "cd release-test && ./run-sync-install-cycle.sh"
},

@@ -20,15 +21,16 @@ "author": "Heikki Pora",

"commander": "7.0.0",
"lodash": "4.17.20",
"semver": "7.3.4",
"ssri": "8.0.1",
"streamifier": "0.1.1",
"tar-fs": "2.1.1"
},
"devDependencies": {
"@babel/cli": "7.12.10",
"@babel/core": "7.12.10",
"@babel/polyfill": "7.11.5",
"@babel/preset-env": "7.12.11",
"@babel/register": "7.12.10",
"babel-eslint": "10.1.0",
"@types/chai": "4.2.14",
"@types/lodash": "4.14.168",
"@types/mocha": "8.2.0",
"@types/node": "14.14.22",
"@types/semver": "7.3.4",
"@types/ssri": "7.1.0",
"@types/tar-fs": "2.0.0",
"@typescript-eslint/eslint-plugin": "4.14.2",
"@typescript-eslint/parser": "4.14.2",
"chai": "4.2.0",

@@ -38,3 +40,5 @@ "eslint": "7.19.0",

"express": "4.17.1",
"mocha": "8.2.1"
"mocha": "8.2.1",
"ts-node": "9.1.1",
"typescript": "4.1.3"
},

@@ -41,0 +45,0 @@ "keywords": [

@@ -37,3 +37,8 @@ # registry-sync

registry-sync --root ./local-registry --manifest ./package-lock.json --localUrl http://localhost:8000 --binaryAbi 48,57 --binaryArch x64 --binaryPlatform darwin,linux
registry-sync --root ./local-registry \
--manifest ./package-lock.json \
--localUrl http://localhost:8000 \
--binaryAbi 48,57 \
--binaryArch x64 \
--binaryPlatform darwin,linux

@@ -40,0 +45,0 @@ Re-executing ```registry-sync``` will only download and update files for new package versions.

"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.fetchUrl = fetchUrl;
var _axios = _interopRequireDefault(require("axios"));
var _https = _interopRequireDefault(require("https"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
Object.defineProperty(exports, "__esModule", { value: true });
exports.fetchBinaryData = exports.fetchJsonWithCacheCloned = void 0;
const https = require("https");
const axios_1 = require("axios");
const metadataCache = {};
const client = _axios.default.create({
httpsAgent: new _https.default.Agent({
keepAlive: true
}),
timeout: 30 * 1000
const client = axios_1.default.create({
httpsAgent: new https.Agent({ keepAlive: true }),
timeout: 30 * 1000
});
async function fetchUrl(url, isBinary = false) {
if (isBinary) {
async function fetchJsonWithCacheCloned(url) {
if (!metadataCache[url]) {
// eslint-disable-next-line require-atomic-updates
metadataCache[url] = await fetch(url, 'json');
}
return cloneDeep(metadataCache[url]);
}
exports.fetchJsonWithCacheCloned = fetchJsonWithCacheCloned;
function cloneDeep(metadata) {
return JSON.parse(JSON.stringify(metadata));
}
function fetchBinaryData(url) {
return fetch(url, 'arraybuffer');
}
if (!metadataCache[url]) {
// eslint-disable-next-line require-atomic-updates
metadataCache[url] = await fetch(url, 'json');
}
return metadataCache[url];
}
exports.fetchBinaryData = fetchBinaryData;
async function fetch(url, responseType) {
return (await client.get(url, {
responseType
})).data;
}
return (await client.get(url, { responseType })).data;
}
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "__esModule", { value: true });
exports.downloadAll = void 0;
const fs = require("fs");
const path = require("path");
const semver = require("semver");
const url = require("url");
const integrity_1 = require("./integrity");
const pregyp_1 = require("./pregyp");
const client_1 = require("./client");
const metadata_1 = require("./metadata");
async function downloadAll(packages, { localUrl, prebuiltBinaryProperties, registryUrl, rootFolder, enforceTarballsOverHttps }) {
const downloadFromRegistry = download.bind(null, registryUrl, localUrl, rootFolder, prebuiltBinaryProperties, enforceTarballsOverHttps);
for (const pkg of packages) {
await downloadFromRegistry(pkg);
}
}
exports.downloadAll = downloadAll;
var _lodash = _interopRequireDefault(require("lodash"));
var _client = require("./client");
var _fs = _interopRequireDefault(require("fs"));
var _path = _interopRequireDefault(require("path"));
var _semver = _interopRequireDefault(require("semver"));
var _url = _interopRequireDefault(require("url"));
var _integrity = require("./integrity");
var _pregyp = require("./pregyp");
var _metadata = require("./metadata");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
async function downloadAll(packages, {
localUrl,
prebuiltBinaryProperties,
registryUrl,
rootFolder,
enforceTarballsOverHttps = true
}) {
const downloadFromRegistry = download.bind(null, registryUrl, localUrl, rootFolder, prebuiltBinaryProperties, enforceTarballsOverHttps);
for (const pkg of packages) {
await downloadFromRegistry(pkg);
}
async function download(registryUrl, localUrl, rootFolder, prebuiltBinaryProperties, enforceTarballsOverHttps, { name, version }) {
const registryMetadata = await fetchMetadataCloned(name, registryUrl);
const versionMetadata = registryMetadata.versions[version];
if (!versionMetadata) {
throw new Error(`Unknown package version ${name}@${version}`);
}
const localFolder = await ensureLocalFolderExists(name, rootFolder);
let data = await downloadTarball(versionMetadata, enforceTarballsOverHttps);
let actualNapiVersions = [];
if (pregyp_1.hasPrebuiltBinaries(versionMetadata)) {
const localPregypFolder = await ensureLocalFolderExists(version, localFolder);
actualNapiVersions = await pregyp_1.downloadPrebuiltBinaries(versionMetadata, localPregypFolder, prebuiltBinaryProperties);
data = await metadata_1.rewriteMetadataInTarball(data, versionMetadata, localUrl, localFolder, actualNapiVersions);
}
await saveTarball(versionMetadata, data, localFolder);
metadata_1.rewriteVersionMetadata(versionMetadata, data, localUrl, actualNapiVersions);
await updateMetadata(versionMetadata, registryMetadata, registryUrl, localFolder);
}
async function download(registryUrl, localUrl, rootFolder, prebuiltBinaryProperties, enforceTarballsOverHttps, {
name,
version
}) {
const registryMetadata = await fetchMetadata(name, registryUrl);
const versionMetadata = _lodash.default.cloneDeep(registryMetadata.versions[version]);
if (!versionMetadata) {
throw new Error(`Unknown package version ${name}@${version}`);
}
const localFolder = await ensureLocalFolderExists(name, rootFolder);
let data = await downloadTarball(versionMetadata, enforceTarballsOverHttps);
if ((0, _pregyp.hasPrebuiltBinaries)(versionMetadata)) {
const localPregypFolder = await ensureLocalFolderExists(version, localFolder);
await (0, _pregyp.downloadPrebuiltBinaries)(versionMetadata, localPregypFolder, prebuiltBinaryProperties);
data = await (0, _metadata.rewriteMetadataInTarball)(data, versionMetadata, localUrl, localFolder);
}
await saveTarball(versionMetadata, data, localFolder);
(0, _metadata.rewriteVersionMetadata)(versionMetadata, data, localUrl);
await updateMetadata(versionMetadata, registryMetadata, registryUrl, localFolder);
async function downloadTarball({ _id: id, dist }, enforceTarballsOverHttps) {
const tarballUrl = enforceTarballsOverHttps ? dist.tarball.replace('http://', 'https://') : dist.tarball;
const data = await client_1.fetchBinaryData(tarballUrl);
integrity_1.verifyIntegrity(data, id, dist);
return data;
}
async function downloadTarball({
_id: id,
dist
}, enforceTarballsOverHttps) {
const tarballUrl = enforceTarballsOverHttps ? dist.tarball.replace('http://', 'https://') : dist.tarball;
const data = await fetchTarball(tarballUrl);
(0, _integrity.verifyIntegrity)(data, id, dist);
return data;
function saveTarball({ name, version }, data, localFolder) {
return fs.promises.writeFile(tarballPath(name, version, localFolder), data);
}
function saveTarball({
name,
version
}, data, localFolder) {
return _fs.default.promises.writeFile(tarballPath(name, version, localFolder), data);
}
async function updateMetadata(versionMetadata, defaultMetadata, registryUrl, localFolder) {
const {
name,
version
} = versionMetadata;
const localMetadataPath = metadataPath(name, localFolder);
const localMetadata = await loadMetadata(localMetadataPath, defaultMetadata);
localMetadata.versions[version] = versionMetadata;
localMetadata.time[version] = defaultMetadata.time[version];
localMetadata['dist-tags'].latest = Object.keys(localMetadata.versions).sort(_semver.default.compare).pop();
await saveMetadata(localMetadataPath, localMetadata);
const { version } = versionMetadata;
const localMetadataPath = path.join(localFolder, 'index.json');
const localMetadata = await loadMetadata(localMetadataPath, defaultMetadata);
localMetadata.versions[version] = versionMetadata;
localMetadata.time[version] = defaultMetadata.time[version];
localMetadata['dist-tags'].latest = Object.keys(localMetadata.versions).sort(semver.compare).pop();
await saveMetadata(localMetadataPath, localMetadata);
}
async function loadMetadata(path, defaultMetadata) {
try {
const json = await _fs.default.promises.readFile(path, 'utf8');
return JSON.parse(json);
} catch (fileNotFound) {
return { ...defaultMetadata,
'dist-tags': {},
time: {},
versions: {}
};
}
try {
const json = await fs.promises.readFile(path, 'utf8');
return JSON.parse(json);
}
catch (fileNotFound) {
return { ...defaultMetadata, 'dist-tags': {}, time: {}, versions: {} };
}
}
function saveMetadata(path, metadata) {
const json = JSON.stringify(metadata, null, 2);
return _fs.default.promises.writeFile(path, json, 'utf8');
const json = JSON.stringify(metadata, null, 2);
return fs.promises.writeFile(path, json, 'utf8');
}
function metadataPath(name, localFolder) {
return _path.default.join(localFolder, 'index.json');
}
function tarballPath(name, version, localFolder) {
return _path.default.join(localFolder, (0, _metadata.tarballFilename)(name, version));
return path.join(localFolder, metadata_1.tarballFilename(name, version));
}
async function ensureLocalFolderExists(name, rootFolder) {
const localFolder = _path.default.resolve(rootFolder, name);
await _fs.default.promises.mkdir(localFolder, {
recursive: true
});
return localFolder;
const localFolder = path.resolve(rootFolder, name);
await fs.promises.mkdir(localFolder, { recursive: true });
return localFolder;
}
function fetchTarball(tarballUrl) {
return (0, _client.fetchUrl)(tarballUrl, true);
function fetchMetadataCloned(name, registryUrl) {
const urlSafeName = name.replace(/\//g, '%2f');
return client_1.fetchJsonWithCacheCloned(url.resolve(registryUrl, urlSafeName));
}
function fetchMetadata(name, registryUrl) {
const urlSafeName = name.replace(/\//g, '%2f');
return (0, _client.fetchUrl)(_url.default.resolve(registryUrl, urlSafeName));
}
"use strict";
var _commander = _interopRequireDefault(require("commander"));
var _sync = require("./sync");
var _url = require("url");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
_commander.default.storeOptionsAsProperties().version(require(`${__dirname}/../package.json`).version).requiredOption('--root <path>', 'Path to save NPM package tarballs and metadata to').requiredOption('--manifest <file>', 'Path to a package-lock.json file to use as catalog for mirrored NPM packages.').requiredOption('--localUrl <url>', 'URL to use as root in stored package metadata (i.e. where folder defined as --root will be exposed at)').requiredOption('--binaryAbi <list>', 'Comma-separated list of node C++ ABI numbers to download pre-built binaries for. See NODE_MODULE_VERSION column in https://nodejs.org/en/download/releases/').requiredOption('--binaryArch <list>', 'Comma-separated list of CPU architectures to download pre-built binaries for. Valid values: arm, ia32, and x64').requiredOption('--binaryPlatform <list>', 'Comma-separated list of OS platforms to download pre-built binaries for. Valid values: linux, darwin, win32, sunos, freebsd, openbsd, and aix').option('--registryUrl [url]', 'Optional URL to use as NPM registry when fetching packages. Default value is https://registry.npmjs.org').option('--dontEnforceHttps', 'Disable the default behavior of downloading tarballs over HTTPS (will use whichever protocol is defined in the registry metadata)').option('--includeDev', 'Include also packages found from devDependencies section of the --manifest').parse(process.argv);
const abis = _commander.default.binaryAbi.split(',');
const architectures = _commander.default.binaryArch.split(',');
const platforms = _commander.default.binaryPlatform.split(',');
const prebuiltBinaryProperties = abis.map(abi => architectures.map(arch => platforms.map(platform => ({
abi,
arch,
platform
}))).flat()).flat();
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const path = require("path");
const program = require("commander");
const sync_1 = require("./sync");
const url_1 = require("url");
const { version } = JSON.parse(fs.readFileSync(path.join(__dirname, '..', 'package.json'), 'utf-8'));
program
.version(version)
.requiredOption('--root <path>', 'Path to save NPM package tarballs and metadata to')
.requiredOption('--manifest <file>', 'Path to a package-lock.json file to use as catalog for mirrored NPM packages.')
.requiredOption('--localUrl <url>', 'URL to use as root in stored package metadata (i.e. where folder defined as --root will be exposed at)')
.requiredOption('--binaryAbi <list>', 'Comma-separated list of node C++ ABI numbers to download pre-built binaries for. See NODE_MODULE_VERSION column in https://nodejs.org/en/download/releases/')
.requiredOption('--binaryArch <list>', 'Comma-separated list of CPU architectures to download pre-built binaries for. Valid values: arm, ia32, and x64')
.requiredOption('--binaryPlatform <list>', 'Comma-separated list of OS platforms to download pre-built binaries for. Valid values: linux, darwin, win32, sunos, freebsd, openbsd, and aix')
.option('--registryUrl [url]', 'Optional URL to use as NPM registry when fetching packages. Default value is https://registry.npmjs.org')
.option('--dontEnforceHttps', 'Disable the default behavior of downloading tarballs over HTTPS (will use whichever protocol is defined in the registry metadata)')
.option('--includeDev', 'Include also packages found from devDependencies section of the --manifest')
.parse(process.argv);
const rawOptions = program.opts();
const abis = rawOptions.binaryAbi.split(',').map(Number);
const architectures = rawOptions.binaryArch.split(',');
const platforms = rawOptions.binaryPlatform.split(',');
const prebuiltBinaryProperties = abis.map(abi => architectures.map(arch => platforms.map(platform => ({ abi, arch, platform }))).flat()).flat();
const options = {
localUrl: new _url.URL(_commander.default.localUrl),
manifest: _commander.default.manifest,
prebuiltBinaryProperties,
registryUrl: _commander.default.registryUrl || 'https://registry.npmjs.org',
rootFolder: _commander.default.root,
enforceTarballsOverHttps: Boolean(!_commander.default.dontEnforceHttps),
includeDevDependencies: Boolean(_commander.default.includeDev)
localUrl: new url_1.URL(rawOptions.localUrl),
manifest: rawOptions.manifest,
prebuiltBinaryProperties,
registryUrl: rawOptions.registryUrl || 'https://registry.npmjs.org',
rootFolder: rawOptions.root,
enforceTarballsOverHttps: Boolean(!rawOptions.dontEnforceHttps),
includeDevDependencies: Boolean(rawOptions.includeDev)
};
(0, _sync.synchronize)(options).then(newPackages => console.log(`Downloaded ${newPackages.length} packages`));
sync_1.synchronize(options)
.then(newPackages => console.log(`Downloaded ${newPackages.length} packages`));
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.verifyIntegrity = verifyIntegrity;
exports.sha1 = sha1;
exports.sha512 = sha512;
var _ssri = _interopRequireDefault(require("ssri"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function verifyIntegrity(data, id, {
integrity,
shasum
}) {
if (!integrity && !shasum) {
throw new Error(`Integrity values not present in metadata for ${id}`);
}
if (integrity) {
if (sha512(data) != integrity) {
throw new Error(`Integrity check with SHA512 failed for ${id}`);
Object.defineProperty(exports, "__esModule", { value: true });
exports.sha512 = exports.sha1 = exports.verifyIntegrity = void 0;
const ssri = require("ssri");
function verifyIntegrity(data, id, { integrity, shasum }) {
if (!integrity && !shasum) {
throw new Error(`Integrity values not present in metadata for ${id}`);
}
} else if (sha1(data) != shasum) {
throw new Error(`Integrity check with SHA1 failed for failed for ${id}`);
}
if (integrity) {
if (!ssri.checkData(data, integrity)) {
throw new Error(`Integrity check failed for ${id}`);
}
}
else if (sha1(data) != shasum) {
throw new Error(`Integrity check with SHA1 failed for failed for ${id}`);
}
}
exports.verifyIntegrity = verifyIntegrity;
function sha1(data) {
const [integrity] = _ssri.default.fromData(data, {
algorithms: ['sha1']
}).sha1;
return integrity.hexDigest();
return ssri.fromData(data, { algorithms: ['sha1'] }).hexDigest();
}
exports.sha1 = sha1;
function sha512(data) {
const [integrity] = _ssri.default.fromData(data, {
algorithms: ['sha512']
}).sha512;
return integrity.toString();
}
return ssri.fromData(data, { algorithms: ['sha512'] }).toString();
}
exports.sha512 = sha512;
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "__esModule", { value: true });
exports.tarballFilename = exports.extractTgz = exports.rewriteMetadataInTarball = exports.rewriteVersionMetadata = void 0;
const fs = require("fs");
const path = require("path");
const tar = require("tar-fs");
const zlib = require("zlib");
const pregyp_1 = require("./pregyp");
const stream_1 = require("stream");
const integrity_1 = require("./integrity");
function rewriteVersionMetadata(versionMetadata, data, localUrl, actualNapiVersions) {
versionMetadata.dist.tarball = localTarballUrl(versionMetadata, localUrl);
if (pregyp_1.hasPrebuiltBinaries(versionMetadata)) {
versionMetadata.binary.host = localUrl.origin;
versionMetadata.binary.remote_path = createPrebuiltBinaryRemotePath(localUrl, versionMetadata);
versionMetadata.binary.napi_versions = actualNapiVersions;
versionMetadata.dist.integrity = integrity_1.sha512(data);
versionMetadata.dist.shasum = integrity_1.sha1(data);
}
}
exports.rewriteVersionMetadata = rewriteVersionMetadata;
async function rewriteMetadataInTarball(data, versionMetadata, localUrl, localFolder, actualNapiVersions) {
const tmpFolder = path.join(localFolder, '.tmp');
await fs.promises.mkdir(tmpFolder, { recursive: true });
await extractTgz(data, tmpFolder);
const manifestPath = path.join(tmpFolder, 'package', 'package.json');
const json = await fs.promises.readFile(manifestPath, 'utf8');
const metadata = JSON.parse(json);
metadata.binary.host = localUrl.origin;
metadata.binary.remote_path = createPrebuiltBinaryRemotePath(localUrl, versionMetadata);
metadata.binary.napi_versions = actualNapiVersions;
await fs.promises.writeFile(manifestPath, JSON.stringify(metadata, null, 2));
const updatedData = await compressTgz(tmpFolder);
await fs.promises.rmdir(tmpFolder, { recursive: true });
return updatedData;
}
exports.rewriteMetadataInTarball = rewriteMetadataInTarball;
exports.extractTgz = extractTgz;
exports.tarballFilename = tarballFilename;
var _fs = _interopRequireDefault(require("fs"));
var _pregyp = require("./pregyp");
var _path = _interopRequireDefault(require("path"));
var _streamifier = _interopRequireDefault(require("streamifier"));
var _tarFs = _interopRequireDefault(require("tar-fs"));
var _zlib = _interopRequireDefault(require("zlib"));
var _integrity = require("./integrity");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function rewriteVersionMetadata(versionMetadata, data, localUrl) {
versionMetadata.dist.tarball = localTarballUrl(versionMetadata, localUrl);
if ((0, _pregyp.hasPrebuiltBinaries)(versionMetadata)) {
versionMetadata.binary.host = localUrl.origin;
versionMetadata.binary.remote_path = createPrebuiltBinaryRemotePath(localUrl, versionMetadata);
versionMetadata.dist.integrity = (0, _integrity.sha512)(data);
versionMetadata.dist.shasum = (0, _integrity.sha1)(data);
}
}
async function rewriteMetadataInTarball(data, versionMetadata, localUrl, localFolder) {
const tmpFolder = _path.default.join(localFolder, '.tmp');
await _fs.default.promises.mkdir(tmpFolder, {
recursive: true
});
await extractTgz(data, tmpFolder);
const manifestPath = _path.default.join(tmpFolder, 'package', 'package.json');
const json = await _fs.default.promises.readFile(manifestPath, 'utf8');
const metadata = JSON.parse(json);
metadata.binary.host = localUrl.origin;
metadata.binary.remote_path = createPrebuiltBinaryRemotePath(localUrl, versionMetadata);
await _fs.default.promises.writeFile(manifestPath, JSON.stringify(metadata, null, 2));
const updatedData = await compressTgz(tmpFolder);
await _fs.default.promises.rmdir(tmpFolder, {
recursive: true
});
return updatedData;
}
function createPrebuiltBinaryRemotePath(url, versionMetadata) {
return `${removeTrailingSlash(url.pathname)}/${versionMetadata.name}/${versionMetadata.version}/`;
return `${removeTrailingSlash(url.pathname)}/${versionMetadata.name}/${versionMetadata.version}/`;
}
function extractTgz(data, folder) {
return new Promise((resolve, reject) => {
const tgz = _streamifier.default.createReadStream(data).pipe(_zlib.default.createGunzip()).pipe(_tarFs.default.extract(folder));
tgz.on('finish', resolve);
tgz.on('error', reject);
});
return new Promise((resolve, reject) => {
const tgz = stream_1.Readable.from(data)
.pipe(zlib.createGunzip())
.pipe(tar.extract(folder));
tgz.on('finish', resolve);
tgz.on('error', reject);
});
}
exports.extractTgz = extractTgz;
function compressTgz(folder) {
return new Promise((resolve, reject) => {
const chunks = [];
const tgz = _tarFs.default.pack(folder).pipe(_zlib.default.createGzip());
tgz.on('data', chunk => chunks.push(chunk));
tgz.on('end', () => resolve(Buffer.concat(chunks)));
tgz.on('error', reject);
});
return new Promise((resolve, reject) => {
const chunks = [];
const tgz = tar
.pack(folder)
.pipe(zlib.createGzip());
tgz.on('data', (chunk) => chunks.push(chunk));
tgz.on('end', () => resolve(Buffer.concat(chunks)));
tgz.on('error', reject);
});
}
function localTarballUrl({
name,
version
}, localUrl) {
return `${localUrl.origin}${removeTrailingSlash(localUrl.pathname)}/${name}/${tarballFilename(name, version)}`;
function localTarballUrl({ name, version }, localUrl) {
return `${localUrl.origin}${removeTrailingSlash(localUrl.pathname)}/${name}/${tarballFilename(name, version)}`;
}
function tarballFilename(name, version) {
const normalized = name.replace(/\//g, '-');
return `${normalized}-${version}.tgz`;
const normalized = name.replace(/\//g, '-');
return `${normalized}-${version}.tgz`;
}
exports.tarballFilename = tarballFilename;
function removeTrailingSlash(str) {
return str.replace(/\/$/, "");
}
return str.replace(/\/$/, "");
}
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "__esModule", { value: true });
exports.downloadPrebuiltBinaries = exports.hasPrebuiltBinaries = void 0;
const fs = require("fs");
const path = require("path");
const semver = require("semver");
const url = require("url");
const client_1 = require("./client");
function hasPrebuiltBinaries({ binary }) {
return Boolean(binary && binary.module_name);
}
exports.hasPrebuiltBinaries = hasPrebuiltBinaries;
exports.downloadPrebuiltBinaries = downloadPrebuiltBinaries;
var _client = require("./client");
var _fs = _interopRequireDefault(require("fs"));
var _path = _interopRequireDefault(require("path"));
var _semver = _interopRequireDefault(require("semver"));
var _url = _interopRequireDefault(require("url"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function hasPrebuiltBinaries({
binary
}) {
return binary && binary.module_name;
}
async function downloadPrebuiltBinaries(versionMetadata, localFolder, prebuiltBinaryProperties) {
const {
binary,
name,
version
} = versionMetadata;
for (const {
abi,
arch,
platform
} of prebuiltBinaryProperties) {
const napiVersions = binary.napi_versions || ['unknown'];
for (const napiVersion of napiVersions) {
await downloadPrebuiltBinary(name, version, binary, abi, platform, arch, napiVersion, localFolder);
const { binary, name, version } = versionMetadata;
if (!binary.napi_versions) {
for (const { abi, arch, platform } of prebuiltBinaryProperties) {
await downloadPrebuiltBinary(localFolder, name, version, binary, abi, platform, arch);
}
return [];
}
}
const foundNapiVersions = [];
const declaredNapiVersions = binary.napi_versions || [];
for (const napiVersion of binary.napi_versions) {
const binaryDownloaded = [];
for (const { abi, arch, platform } of prebuiltBinaryProperties) {
const downloaded = await downloadPrebuiltBinary(localFolder, name, version, binary, abi, platform, arch, napiVersion);
binaryDownloaded.push(downloaded);
}
// n-api version is considered valid if at least one binary variant can be downloaded for it
// some packages miss binaries completely for a n-api version (such as sqlite3 for n-api v6 at the moment)
if (binaryDownloaded.filter(Boolean).length > 0) {
foundNapiVersions.push(napiVersion);
}
}
if (foundNapiVersions.length !== declaredNapiVersions.length) {
console.log(`Changed N-API version declaration of '${name}@${version}' from [${declaredNapiVersions}] to [${foundNapiVersions}] based on (un-)availability of pre-built binaries`);
}
return foundNapiVersions;
}
async function downloadPrebuiltBinary(name, version, binary, abi, platform, arch, napiVersion, localFolder) {
try {
const data = await fetchPrebuiltBinary(name, version, binary, abi, platform, arch, napiVersion);
await _fs.default.promises.writeFile(prebuiltBinaryFilePath(name, version, binary, abi, platform, arch, napiVersion, localFolder), data);
} catch (err) {
// pre-built binaries are commonly not available on all platforms (and S3 will commonly respond with 403 for a non-existent file)
const fileNotFoundError = err.response && (err.response.status == 403 || err.response.status == 404);
if (!fileNotFoundError) {
console.error(`Unexpected error fetching prebuilt binary for ${name} and ABI v${abi} on ${arch}-${platform} (n-api version ${napiVersion})`);
throw err;
exports.downloadPrebuiltBinaries = downloadPrebuiltBinaries;
async function downloadPrebuiltBinary(localFolder, name, version, binary, abi, platform, arch, napiVersion) {
try {
const data = await fetchPrebuiltBinary(name, version, binary, abi, platform, arch, napiVersion);
await fs.promises.writeFile(prebuiltBinaryFilePath(localFolder, name, version, binary, abi, platform, arch, napiVersion), data);
return true;
}
}
catch (err) {
// pre-built binaries are commonly not available on all platforms (and S3 will commonly respond with 403 for a non-existent file)
const fileNotFoundError = err.response && (err.response.status == 403 || err.response.status == 404);
if (!fileNotFoundError) {
console.error(`Unexpected error fetching prebuilt binary for ${name} and ABI v${abi} on ${arch}-${platform} (n-api version ${napiVersion})`);
throw err;
}
return false;
}
}
function fetchPrebuiltBinary(name, version, binary, abi, platform, arch, napiVersion) {
return (0, _client.fetchUrl)(prebuiltBinaryUrl(name, version, binary, abi, platform, arch, napiVersion), true);
return client_1.fetchBinaryData(prebuiltBinaryUrl(name, version, binary, abi, platform, arch, napiVersion));
}
function prebuiltBinaryFilePath(name, version, binary, abi, platform, arch, napiVersion, localFolder) {
return _path.default.join(localFolder, prebuiltBinaryFileName(name, version, binary, abi, platform, arch, napiVersion));
function prebuiltBinaryFilePath(localFolder, name, version, binary, abi, platform, arch, napiVersion) {
return path.join(localFolder, prebuiltBinaryFileName(name, version, binary, abi, platform, arch, napiVersion));
}
function prebuiltBinaryUrl(name, version, binary, abi, platform, arch, napiVersion) {
const remotePath = prebuiltBinaryRemotePath(name, version, binary, abi, platform, arch, napiVersion).replace(/\/$/, '');
const fileName = prebuiltBinaryFileName(name, version, binary, abi, platform, arch, napiVersion);
return _url.default.resolve(binary.host, `${remotePath}/${fileName}`);
const remotePath = prebuiltBinaryRemotePath(name, version, binary, abi, platform, arch, napiVersion)
.replace(/\/$/, '');
const fileName = prebuiltBinaryFileName(name, version, binary, abi, platform, arch, napiVersion);
return url.resolve(binary.host, `${remotePath}/${fileName}`);
}
function prebuiltBinaryRemotePath(name, version, binary, abi, platform, arch, napiVersion) {
return formatPrebuilt(binary.remote_path, name, version, binary.module_name, abi, platform, arch, napiVersion);
return formatPrebuilt(binary.remote_path, name, version, binary.module_name, abi, platform, arch, napiVersion);
}
function prebuiltBinaryFileName(name, version, binary, abi, platform, arch, napiVersion) {
return formatPrebuilt(binary.package_name, name, version, binary.module_name, abi, platform, arch, napiVersion);
} // see node-pre-gyp: /lib/util/versioning.js for documentation of possible values
return formatPrebuilt(binary.package_name, name, version, binary.module_name, abi, platform, arch, napiVersion);
}
// see node-pre-gyp: /lib/util/versioning.js for documentation of possible values
function formatPrebuilt(formatString, name, version, moduleName, abi, platform, arch, napiVersion) {
const moduleVersion = _semver.default.parse(version);
const prerelease = (moduleVersion.prerelease || []).join('.');
const build = (moduleVersion.build || []).join('.');
return formatString.replace('{name}', name).replace('{version}', version).replace('{major}', moduleVersion.major).replace('{minor}', moduleVersion.minor).replace('{patch}', moduleVersion.patch).replace('{prerelease}', prerelease).replace('{build}', build).replace('{module_name}', moduleName).replace('{node_abi}', `node-v${abi}`).replace('{platform}', platform).replace('{arch}', arch).replace('{libc}', libc(platform)).replace('{configuration}', 'Release').replace('{toolset}', '').replace('{napi_build_version}', napiVersion).replace(/[\/]+/g, '/');
const moduleVersion = semver.parse(version);
const prerelease = (moduleVersion.prerelease || []).join('.');
const build = (moduleVersion.build || []).join('.');
const formatted = formatString
.replace('{name}', name)
.replace('{version}', version)
.replace('{major}', moduleVersion.major.toString())
.replace('{minor}', moduleVersion.minor.toString())
.replace('{patch}', moduleVersion.patch.toString())
.replace('{prerelease}', prerelease)
.replace('{build}', build)
.replace('{module_name}', moduleName)
.replace('{node_abi}', `node-v${abi}`)
.replace('{platform}', platform)
.replace('{arch}', arch)
.replace('{libc}', libc(platform))
.replace('{configuration}', 'Release')
.replace('{toolset}', '')
.replace(/[/]+/g, '/');
return napiVersion ? formatted.replace('{napi_build_version}', napiVersion.toString())
: formatted;
}
function libc(platform) {
return platform === 'linux' ? 'glibc' : 'unknown';
}
return platform === 'linux' ? 'glibc' : 'unknown';
}
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.updateDependenciesCache = updateDependenciesCache;
exports.dependenciesNotInCache = dependenciesNotInCache;
exports.dependenciesFromPackageLock = dependenciesFromPackageLock;
var _lodash = _interopRequireDefault(require("lodash"));
var _fs = _interopRequireDefault(require("fs"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
Object.defineProperty(exports, "__esModule", { value: true });
exports.dependenciesFromPackageLock = exports.dependenciesNotInCache = exports.updateDependenciesCache = void 0;
const fs = require("fs");
const assert_1 = require("assert");
async function updateDependenciesCache(newDependencies, cacheFilePath, prebuiltBinaryProperties) {
const {
dependencies: cachedDependencies
} = await loadCache(cacheFilePath);
const dependencies = cachedDependencies.concat(newDependencies).sort(sortById).filter(uniqueById);
const data = {
dependencies,
prebuiltBinaryProperties,
prebuiltBinaryNApiSupport: true
};
return _fs.default.promises.writeFile(cacheFilePath, JSON.stringify(data), 'utf8');
const { dependencies: cachedDependencies } = await loadCache(cacheFilePath);
const dependencies = cachedDependencies
.concat(newDependencies)
.sort(sortById)
.filter(uniqueById);
const data = {
dependencies,
prebuiltBinaryProperties,
prebuiltBinaryNApiSupport: true,
prebuiltBinaryNApiSupportWithoutBrokenVersions: true
};
return fs.promises.writeFile(cacheFilePath, JSON.stringify(data), 'utf8');
}
exports.updateDependenciesCache = updateDependenciesCache;
async function dependenciesNotInCache(dependencies, cacheFilePath, prebuiltBinaryProperties) {
const {
dependencies: cachedDependencies,
prebuiltBinaryProperties: cachedPrebuiltBinaryProperties,
prebuiltBinaryNApiSupport
} = await loadCache(cacheFilePath);
if (cachedDependencies.length > 0 && (!_lodash.default.isEqual(prebuiltBinaryProperties, cachedPrebuiltBinaryProperties) || !prebuiltBinaryNApiSupport)) {
console.log(`Pre-built binary properties changed, re-downloading all current packages`);
return dependencies;
}
return _lodash.default.differenceBy(dependencies, cachedDependencies, 'id');
const { dependencies: cachedDependencies, prebuiltBinaryProperties: cachedPrebuiltBinaryProperties, prebuiltBinaryNApiSupport, prebuiltBinaryNApiSupportWithoutBrokenVersions } = await loadCache(cacheFilePath);
if (cachedDependencies.length > 0 && (!isDeepEqual(prebuiltBinaryProperties, cachedPrebuiltBinaryProperties) || !prebuiltBinaryNApiSupport || !prebuiltBinaryNApiSupportWithoutBrokenVersions)) {
console.log(`Pre-built binary properties changed, re-downloading all current packages`);
return dependencies;
}
const packageIdsInCache = cachedDependencies.map(pkg => pkg.id);
return dependencies.filter(pkg => !packageIdsInCache.includes(pkg.id));
}
exports.dependenciesNotInCache = dependenciesNotInCache;
async function loadCache(cacheFilePath) {
try {
const json = await _fs.default.promises.readFile(cacheFilePath, 'utf8');
const data = JSON.parse(json);
if (Array.isArray(data)) {
return {
dependencies: data,
prebuiltBinaryProperties: [],
prebuiltBinaryNApiSupport: true
};
try {
const data = JSON.parse(await fs.promises.readFile(cacheFilePath, 'utf8'));
// Migrate V1 legacy cache file schema to V3
if (Array.isArray(data)) {
return {
dependencies: data,
prebuiltBinaryProperties: [],
prebuiltBinaryNApiSupport: false,
prebuiltBinaryNApiSupportWithoutBrokenVersions: false
};
}
// migrate V2 to V3
if (!('prebuiltBinaryNApiSupportWithoutBrokenVersions' in data)) {
return {
...data,
prebuiltBinaryNApiSupportWithoutBrokenVersions: false
};
}
return data;
}
return data;
} catch (fileNotFound) {
return {
dependencies: [],
prebuiltBinaryProperties: [],
prebuiltBinaryNApiSupport: true
};
}
catch (fileNotFound) {
// empty V3 cache
return {
dependencies: [],
prebuiltBinaryProperties: [],
prebuiltBinaryNApiSupport: true,
prebuiltBinaryNApiSupportWithoutBrokenVersions: true
};
}
}
async function dependenciesFromPackageLock(path, includeDevDependencies) {
const json = await _fs.default.promises.readFile(path, 'utf8');
const dependencyTree = dependenciesRecursive(JSON.parse(json), includeDevDependencies);
return dependencyTree.map(({
name,
version
}) => ({
id: `${name}@${version}`,
name,
version
})).sort(sortById).filter(uniqueById);
const packageLock = JSON.parse(await fs.promises.readFile(path, 'utf8'));
const dependencyTree = dependenciesRecursive(packageLock, includeDevDependencies);
return dependencyTree
.map(({ name, version }) => ({ id: `${name}@${version}`, name, version }))
.sort(sortById)
.filter(uniqueById);
}
exports.dependenciesFromPackageLock = dependenciesFromPackageLock;
function sortById(a, b) {
return a.id.localeCompare(b.id);
return a.id.localeCompare(b.id);
}
function uniqueById(value, index, values) {
return values.findIndex(v => v.id === value.id) === index;
return values.findIndex(v => v.id === value.id) === index;
}
function dependenciesRecursive({
dependencies
}, includeDevDependencies) {
if (!dependencies) {
return [];
}
const includeFn = includeDevDependencies ? filterOutBundledDependencies : filterOutBundledAndDevDependencies;
return Object.entries(dependencies).filter(includeFn).map(([name, props]) => [{
name,
version: props.version
}].concat(dependenciesRecursive(props, includeDevDependencies))).flat();
function dependenciesRecursive({ dependencies }, includeDevDependencies) {
if (!dependencies) {
return [];
}
const includeFn = includeDevDependencies ? filterOutBundledDependencies : filterOutBundledAndDevDependencies;
return Object.entries(dependencies)
.filter(includeFn)
.map(([name, props]) => [{ name, version: props.version }].concat(dependenciesRecursive(props, includeDevDependencies)))
.flat();
}
function filterOutBundledDependencies([, props]) {
return !props.bundled;
return !props.bundled;
}
function filterOutBundledAndDevDependencies([, props]) {
return !(props.bundled || props.dev);
}
return !(props.bundled || props.dev);
}
function isDeepEqual(a, b) {
try {
assert_1.deepStrictEqual(a, b);
return true;
}
catch (ignored) {
return false;
}
}
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "__esModule", { value: true });
exports.synchronize = void 0;
const download_1 = require("./download");
const resolve_1 = require("./resolve");
async function synchronize(options) {
const cacheFilePath = `${options.rootFolder}/.registry-sync-cache.json`;
const packages = await resolve_1.dependenciesFromPackageLock(options.manifest, options.includeDevDependencies);
const newPackages = await resolve_1.dependenciesNotInCache(packages, cacheFilePath, options.prebuiltBinaryProperties);
await download_1.downloadAll(newPackages, options);
await resolve_1.updateDependenciesCache(newPackages, cacheFilePath, options.prebuiltBinaryProperties);
return newPackages;
}
exports.synchronize = synchronize;
var _download = require("./download");
var _resolve = require("./resolve");
async function synchronize(options) {
const cacheFilePath = `${options.rootFolder}/.registry-sync-cache.json`;
const packages = await (0, _resolve.dependenciesFromPackageLock)(options.manifest, options.includeDevDependencies);
const newPackages = await (0, _resolve.dependenciesNotInCache)(packages, cacheFilePath, options.prebuiltBinaryProperties);
await (0, _download.downloadAll)(newPackages, options);
await (0, _resolve.updateDependenciesCache)(newPackages, cacheFilePath, options.prebuiltBinaryProperties);
return newPackages;
}
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc