New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

registry-sync

Package Overview
Dependencies
Maintainers
1
Versions
89
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

registry-sync - npm Package Compare versions

Comparing version 3.1.0 to 3.2.0

src/normalize-yarn-pattern.js

30

package.json
{
"name": "registry-sync",
"version": "3.1.0",
"version": "3.2.0",
"description": "synchronize a remote npm registry for private use",

@@ -11,3 +11,7 @@ "repository": "https://github.com/heikkipora/registry-sync",

"build": "./build-npm",
"eslint": "eslint . --ext .ts --fix",
"prettier": "prettier --write .",
"prettier:check": "prettier --check --loglevel warn .",
"eslint": "eslint --fix --format=codeframe --ext .ts .",
"eslint:check": "eslint --max-warnings=0 --format=codeframe --ext .ts .",
"lint-staged": "lint-staged --verbose",
"test": "mocha -r ts-node/register --timeout 120000 test/*.ts",

@@ -19,2 +23,3 @@ "release-test": "cd release-test && ./run-sync-install-cycle.sh"

"dependencies": {
"@yarnpkg/lockfile": "1.1.0",
"axios": "0.21.1",

@@ -27,18 +32,23 @@ "commander": "7.0.0",

"devDependencies": {
"@types/chai": "4.2.14",
"@arkweid/lefthook": "0.7.2",
"@types/chai": "4.2.15",
"@types/lodash": "4.14.168",
"@types/mocha": "8.2.0",
"@types/node": "14.14.22",
"@types/node": "14.14.28",
"@types/semver": "7.3.4",
"@types/ssri": "7.1.0",
"@types/tar-fs": "2.0.0",
"@typescript-eslint/eslint-plugin": "4.14.2",
"@typescript-eslint/parser": "4.14.2",
"chai": "4.2.0",
"eslint": "7.19.0",
"@types/yarnpkg__lockfile": "1.1.4",
"@typescript-eslint/eslint-plugin": "4.15.0",
"@typescript-eslint/parser": "4.15.0",
"chai": "4.3.0",
"eslint": "7.20.0",
"eslint-config-prettier": "7.2.0",
"eslint-plugin-mocha": "8.0.0",
"express": "4.17.1",
"mocha": "8.2.1",
"lint-staged": "10.5.4",
"mocha": "8.3.0",
"prettier": "2.2.1",
"ts-node": "9.1.1",
"typescript": "4.1.3"
"typescript": "4.1.5"
},

@@ -45,0 +55,0 @@ "keywords": [

@@ -26,3 +26,3 @@ # registry-sync

--root <path> Path to save NPM package tarballs and metadata to
--manifest <file> Path to a package-lock.json file to use as catalog for mirrored NPM packages
--manifest <file> Path to a package-lock.json or yarn.lock file to use as catalog for mirrored NPM packages
--localUrl <url> URL to use as root in stored package metadata (i.e. where folder defined as --root will be exposed at)

@@ -35,2 +35,3 @@ --binaryAbi <list> Comma-separated list of node C++ ABI numbers to download pre-built binaries for. See NODE_MODULE_VERSION column in https://nodejs.org/en/download/releases/

--includeDev Include also packages found from devDependencies section of the --manifest. Not included by default.
--dryRun Print packages that would be downloaded but do not download them

@@ -46,3 +47,3 @@ Example:

Re-executing ```registry-sync``` will only download and update files for new package versions.
Re-executing `registry-sync` will only download and update files for new package versions.

@@ -52,4 +53,45 @@ ### Serving the local root folder after synchronization

Configure a web server to use `index.json` as index file name instead of `index.html`.
Also configure ```HTTP 404``` responses to have an ```application/json``` body of ```{}```.
Also configure `HTTP 404` responses to have an `application/json` body of `{}`.
For example, for local testing you can run nginx in a container to serve the downloaded packages:
```
# Create a very simple nginx config
cat <<EOF >nginx.conf
server {
listen 8000;
server_name localhost;
location / {
root /usr/share/nginx/html;
index index.json;
}
error_page 404 @404_empty_json;
location @404_empty_json {
default_type application/json;
return 404 '{}';
}
}
EOF
# Run nginx and serve directory local-registry
docker run --rm --name registry -p 8000:8000 \
--volume="${PWD}/local-registry:/usr/share/nginx/html:ro" \
--volume="${PWD}/nginx.conf:/etc/nginx/conf.d/default.conf:ro" nginx:1.19
```
Then you can install dependencies from the local registry using `npm`
```
npm_config_registry='http://localhost:8000' npm install
```
or using `yarn`
```
YARN_REGISTRY='http://localhost:8000' yarn install
```
### Creating a separate lockfile for synchronization

@@ -60,2 +102,3 @@

In this case it might be useful to copy the `package.json` that you want to synchronize as a local repository to somewhere else and create a new cross platform `package-lock.json` by running:
```

@@ -73,3 +116,3 @@ npm install --force --package-lock-only

Pull requests are welcome. Kindly check that your code passes ESLint checks by running ```npm run eslint``` first.
Integration tests can be run with ```npm test```. Both are anyway run automatically by GitHub Actions.
Pull requests are welcome. Kindly check that your code passes ESLint checks by running `npm run eslint` first.
Integration tests can be run with `npm test`. Both are anyway run automatically by GitHub Actions.

@@ -27,10 +27,9 @@ "use strict";

let data = await downloadTarball(versionMetadata, enforceTarballsOverHttps);
let actualNapiVersions = [];
if (pregyp_1.hasPrebuiltBinaries(versionMetadata)) {
const localPregypFolder = await ensureLocalFolderExists(version, localFolder);
actualNapiVersions = await pregyp_1.downloadPrebuiltBinaries(versionMetadata, localPregypFolder, prebuiltBinaryProperties);
data = await metadata_1.rewriteMetadataInTarball(data, versionMetadata, localUrl, localFolder, actualNapiVersions);
await pregyp_1.downloadPrebuiltBinaries(versionMetadata, localPregypFolder, prebuiltBinaryProperties);
data = await metadata_1.rewriteMetadataInTarball(data, versionMetadata, localUrl, localFolder);
}
await saveTarball(versionMetadata, data, localFolder);
metadata_1.rewriteVersionMetadata(versionMetadata, data, localUrl, actualNapiVersions);
metadata_1.rewriteVersionMetadata(versionMetadata, data, localUrl);
await updateMetadata(versionMetadata, registryMetadata, registryUrl, localFolder);

@@ -37,0 +36,0 @@ }

@@ -12,3 +12,3 @@ "use strict";

.requiredOption('--root <path>', 'Path to save NPM package tarballs and metadata to')
.requiredOption('--manifest <file>', 'Path to a package-lock.json file to use as catalog for mirrored NPM packages.')
.requiredOption('--manifest <file>', 'Path to a package-lock.json or yarn.lock file to use as catalog for mirrored NPM packages.')
.requiredOption('--localUrl <url>', 'URL to use as root in stored package metadata (i.e. where folder defined as --root will be exposed at)')

@@ -21,2 +21,3 @@ .requiredOption('--binaryAbi <list>', 'Comma-separated list of node C++ ABI numbers to download pre-built binaries for. See NODE_MODULE_VERSION column in https://nodejs.org/en/download/releases/')

.option('--includeDev', 'Include also packages found from devDependencies section of the --manifest')
.option('--dryRun', 'Print packages that would be downloaded but do not download them')
.parse(process.argv);

@@ -27,3 +28,5 @@ const rawOptions = program.opts();

const platforms = rawOptions.binaryPlatform.split(',');
const prebuiltBinaryProperties = abis.map(abi => architectures.map(arch => platforms.map(platform => ({ abi, arch, platform }))).flat()).flat();
const prebuiltBinaryProperties = abis
.map(abi => architectures.map(arch => platforms.map(platform => ({ abi, arch, platform }))).flat())
.flat();
const options = {

@@ -36,5 +39,5 @@ localUrl: new url_1.URL(rawOptions.localUrl),

enforceTarballsOverHttps: Boolean(!rawOptions.dontEnforceHttps),
includeDevDependencies: Boolean(rawOptions.includeDev)
includeDevDependencies: Boolean(rawOptions.includeDev),
dryRun: Boolean(rawOptions.dryRun)
};
sync_1.synchronize(options)
.then(newPackages => console.log(`Downloaded ${newPackages.length} packages`));
sync_1.synchronize(options);

@@ -11,3 +11,3 @@ "use strict";

const integrity_1 = require("./integrity");
function rewriteVersionMetadata(versionMetadata, data, localUrl, actualNapiVersions) {
function rewriteVersionMetadata(versionMetadata, data, localUrl) {
versionMetadata.dist.tarball = localTarballUrl(versionMetadata, localUrl);

@@ -17,3 +17,2 @@ if (pregyp_1.hasPrebuiltBinaries(versionMetadata)) {

versionMetadata.binary.remote_path = createPrebuiltBinaryRemotePath(localUrl, versionMetadata);
versionMetadata.binary.napi_versions = actualNapiVersions;
versionMetadata.dist.integrity = integrity_1.sha512(data);

@@ -24,3 +23,3 @@ versionMetadata.dist.shasum = integrity_1.sha1(data);

exports.rewriteVersionMetadata = rewriteVersionMetadata;
async function rewriteMetadataInTarball(data, versionMetadata, localUrl, localFolder, actualNapiVersions) {
async function rewriteMetadataInTarball(data, versionMetadata, localUrl, localFolder) {
const tmpFolder = path.join(localFolder, '.tmp');

@@ -34,3 +33,2 @@ await fs.promises.mkdir(tmpFolder, { recursive: true });

metadata.binary.remote_path = createPrebuiltBinaryRemotePath(localUrl, versionMetadata);
metadata.binary.napi_versions = actualNapiVersions;
await fs.promises.writeFile(manifestPath, JSON.stringify(metadata, null, 2));

@@ -47,5 +45,3 @@ const updatedData = await compressTgz(tmpFolder);

return new Promise((resolve, reject) => {
const tgz = stream_1.Readable.from(data)
.pipe(zlib.createGunzip())
.pipe(tar.extract(folder));
const tgz = stream_1.Readable.from(data).pipe(zlib.createGunzip()).pipe(tar.extract(folder));
tgz.on('finish', resolve);

@@ -59,5 +55,3 @@ tgz.on('error', reject);

const chunks = [];
const tgz = tar
.pack(folder)
.pipe(zlib.createGzip());
const tgz = tar.pack(folder).pipe(zlib.createGzip());
tgz.on('data', (chunk) => chunks.push(chunk));

@@ -77,3 +71,3 @@ tgz.on('end', () => resolve(Buffer.concat(chunks)));

function removeTrailingSlash(str) {
return str.replace(/\/$/, "");
return str.replace(/\/$/, '');
}

@@ -19,22 +19,9 @@ "use strict";

}
return [];
return;
}
const foundNapiVersions = [];
const declaredNapiVersions = binary.napi_versions || [];
for (const napiVersion of binary.napi_versions) {
const binaryDownloaded = [];
for (const { abi, arch, platform } of prebuiltBinaryProperties) {
const downloaded = await downloadPrebuiltBinary(localFolder, name, version, binary, abi, platform, arch, napiVersion);
binaryDownloaded.push(downloaded);
await downloadPrebuiltBinary(localFolder, name, version, binary, abi, platform, arch, napiVersion);
}
// n-api version is considered valid if at least one binary variant can be downloaded for it
// some packages miss binaries completely for a n-api version (such as sqlite3 for n-api v6 at the moment)
if (binaryDownloaded.filter(Boolean).length > 0) {
foundNapiVersions.push(napiVersion);
}
}
if (foundNapiVersions.length !== declaredNapiVersions.length) {
console.log(`Changed N-API version declaration of '${name}@${version}' from [${declaredNapiVersions}] to [${foundNapiVersions}] based on (un-)availability of pre-built binaries`);
}
return foundNapiVersions;
}

@@ -46,3 +33,2 @@ exports.downloadPrebuiltBinaries = downloadPrebuiltBinaries;

await fs.promises.writeFile(prebuiltBinaryFilePath(localFolder, name, version, binary, abi, platform, arch, napiVersion), data);
return true;
}

@@ -56,3 +42,2 @@ catch (err) {

}
return false;
}

@@ -67,4 +52,3 @@ }

function prebuiltBinaryUrl(name, version, binary, abi, platform, arch, napiVersion) {
const remotePath = prebuiltBinaryRemotePath(name, version, binary, abi, platform, arch, napiVersion)
.replace(/\/$/, '');
const remotePath = prebuiltBinaryRemotePath(name, version, binary, abi, platform, arch, napiVersion).replace(/\/$/, '');
const fileName = prebuiltBinaryFileName(name, version, binary, abi, platform, arch, napiVersion);

@@ -100,4 +84,3 @@ return url.resolve(binary.host, `${remotePath}/${fileName}`);

.replace(/[/]+/g, '/');
return napiVersion ? formatted.replace('{napi_build_version}', napiVersion.toString())
: formatted;
return napiVersion ? formatted.replace('{napi_build_version}', napiVersion.toString()) : formatted;
}

@@ -104,0 +87,0 @@ function libc(platform) {

@@ -5,14 +5,16 @@ "use strict";

const fs = require("fs");
const pathLib = require("path");
const readline = require("readline");
const url = require("url");
const assert_1 = require("assert");
const yarnLockfile = require("@yarnpkg/lockfile");
const normalize_yarn_pattern_1 = require("./normalize-yarn-pattern");
const YARN_LOCK_FILENAME = 'yarn.lock';
async function updateDependenciesCache(newDependencies, cacheFilePath, prebuiltBinaryProperties) {
const { dependencies: cachedDependencies } = await loadCache(cacheFilePath);
const dependencies = cachedDependencies
.concat(newDependencies)
.sort(sortById)
.filter(uniqueById);
const dependencies = cachedDependencies.concat(newDependencies).sort(sortById).filter(uniqueById);
const data = {
dependencies,
prebuiltBinaryProperties,
prebuiltBinaryNApiSupport: true,
prebuiltBinaryNApiSupportWithoutBrokenVersions: true
prebuiltBinaryNApiSupport: true
};

@@ -23,4 +25,5 @@ return fs.promises.writeFile(cacheFilePath, JSON.stringify(data), 'utf8');

async function dependenciesNotInCache(dependencies, cacheFilePath, prebuiltBinaryProperties) {
const { dependencies: cachedDependencies, prebuiltBinaryProperties: cachedPrebuiltBinaryProperties, prebuiltBinaryNApiSupport, prebuiltBinaryNApiSupportWithoutBrokenVersions } = await loadCache(cacheFilePath);
if (cachedDependencies.length > 0 && (!isDeepEqual(prebuiltBinaryProperties, cachedPrebuiltBinaryProperties) || !prebuiltBinaryNApiSupport || !prebuiltBinaryNApiSupportWithoutBrokenVersions)) {
const { dependencies: cachedDependencies, prebuiltBinaryProperties: cachedPrebuiltBinaryProperties, prebuiltBinaryNApiSupport } = await loadCache(cacheFilePath);
if (cachedDependencies.length > 0 &&
(!isDeepEqual(prebuiltBinaryProperties, cachedPrebuiltBinaryProperties) || !prebuiltBinaryNApiSupport)) {
console.log(`Pre-built binary properties changed, re-downloading all current packages`);

@@ -36,3 +39,3 @@ return dependencies;

const data = JSON.parse(await fs.promises.readFile(cacheFilePath, 'utf8'));
// Migrate V1 legacy cache file schema to V3
// Migrate V1 legacy cache file schema to V2
if (Array.isArray(data)) {

@@ -42,32 +45,127 @@ return {

prebuiltBinaryProperties: [],
prebuiltBinaryNApiSupport: false,
prebuiltBinaryNApiSupportWithoutBrokenVersions: false
prebuiltBinaryNApiSupport: false
};
}
// migrate V2 to V3
if (!('prebuiltBinaryNApiSupportWithoutBrokenVersions' in data)) {
return {
...data,
prebuiltBinaryNApiSupportWithoutBrokenVersions: false
};
}
return data;
}
catch (fileNotFound) {
// empty V3 cache
// empty V2 cache
return {
dependencies: [],
prebuiltBinaryProperties: [],
prebuiltBinaryNApiSupport: true,
prebuiltBinaryNApiSupportWithoutBrokenVersions: true
prebuiltBinaryNApiSupport: true
};
}
}
function isNonRegistryYarnPackagePattern(packagePattern) {
if (
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/link-resolver.js#L14
packagePattern.startsWith('link:') ||
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/file-resolver.js#L18
packagePattern.startsWith('file:') ||
/^\.{1,2}\//.test(packagePattern) ||
pathLib.isAbsolute(packagePattern) ||
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/tarball-resolver.js#L15
packagePattern.startsWith('http://') ||
packagePattern.startsWith('https://') ||
(packagePattern.indexOf('@') < 0 && (packagePattern.endsWith('.tgz') || packagePattern.endsWith('.tar.gz'))) ||
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/github-resolver.js#L6
packagePattern.startsWith('github:') ||
/^[^:@%/\s.-][^:@%/\s]*[/][^:@\s/%]+(?:#.*)?$/.test(packagePattern) ||
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/gitlab-resolver.js#L6
packagePattern.startsWith('gitlab:') ||
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/bitbucket-resolver.js#L6
packagePattern.startsWith('bitbucket:') ||
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/gist-resolver.js#L26
packagePattern.startsWith('gist:') ||
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/git-resolver.js#L19
/^git:|^git\+.+:|^ssh:|^https?:.+\.git$|^https?:.+\.git#.+/.test(packagePattern)) {
return true;
}
else {
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/git-resolver.js#L19
const { hostname, path } = url.parse(packagePattern);
if (hostname && path && ['github.com', 'gitlab.com', 'bitbucket.com', 'bitbucket.org'].indexOf(hostname) >= 0) {
return path.split('/').filter((p) => !!p).length === 2;
}
}
}
function resolvePackageNameFromRegistryYarnPackagePattern(packagePattern) {
// See https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/resolvers/exotics/registry-resolver.js#L12
const match = packagePattern.match(/^(\S+):(@?.*?)(@(.*?)|)$/);
if (match) {
return match[2];
}
else {
throw new Error(`Failed to resolve yarn package pattern ${packagePattern}, unrecognized format`);
}
}
function resolveNpmPackagesFromYarnLockDependencies(yarnLockDependencies) {
const packages = yarnLockDependencies.reduce((filterMappedDependencies, { packagePattern, version }) => {
if (isNonRegistryYarnPackagePattern(packagePattern)) {
return filterMappedDependencies;
}
let packageName;
if (packagePattern.startsWith('npm:') || packagePattern.startsWith('yarn:')) {
packageName = resolvePackageNameFromRegistryYarnPackagePattern(packagePattern);
}
else {
// Package pattern not yet recognized, continue with parsing logic from
// https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/package-request.js#L99
const { name: namePart, range: rangePart } = normalize_yarn_pattern_1.normalizeYarnPackagePattern(packagePattern);
if (isNonRegistryYarnPackagePattern(rangePart)) {
return filterMappedDependencies;
}
if (rangePart.startsWith('npm:') || rangePart.startsWith('yarn:')) {
packageName = resolvePackageNameFromRegistryYarnPackagePattern(rangePart);
}
else {
// Finally, we just assume that the pattern is a registry pattern,
// see https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/package-request.js#L119
packageName = namePart;
}
}
filterMappedDependencies.push({ id: `${packageName}@${version}`, name: packageName, version });
return filterMappedDependencies;
}, []);
return packages;
}
async function parseDependenciesFromNpmLockFile(lockFilepath, includeDevDependencies) {
const packageLock = JSON.parse(await fs.promises.readFile(lockFilepath, 'utf8'));
const dependencies = recurseNpmLockfileDependencies(packageLock, includeDevDependencies);
return dependencies.map(({ name, version }) => ({ id: `${name}@${version}`, name, version }));
}
async function parseDependenciesFromYarnLockFile(lockFilepath) {
const lockFileStream = fs.createReadStream(lockFilepath);
const lockFileReadlineInterface = readline.createInterface({
input: lockFileStream,
crlfDelay: Infinity
});
for await (const line of lockFileReadlineInterface) {
// https://github.com/yarnpkg/yarn/blob/953c8b6a20e360b097625d64189e6e56ed813e0f/src/lockfile/stringify.js#L111
if (/# yarn lockfile v1\s*$/.test(line)) {
// lockfile version 1 recognized
break;
}
if (/^\s*$/.test(line) || /^\s*#/.test(line)) {
// skip empty or comment lines
continue;
}
throw new Error(`Failed to parse file ${lockFilepath} as yarn lockfile, unrecognized format, only version 1 is supported`);
}
lockFileStream.destroy();
const lockfileContents = await fs.promises.readFile(lockFilepath, 'utf8');
const { type: lockfileParseStatus, object: packagePatternToLockedVersion } = yarnLockfile.parse(lockfileContents);
if (lockfileParseStatus !== 'success') {
throw new Error(`Failed to parse file ${lockFilepath} as yarn lockfile, parse status ${lockfileParseStatus}`);
}
const yarnLockDependencies = Object.entries(packagePatternToLockedVersion).map(([packagePattern, { version }]) => ({ packagePattern, version }));
return resolveNpmPackagesFromYarnLockDependencies(yarnLockDependencies);
}
async function dependenciesFromPackageLock(path, includeDevDependencies) {
const packageLock = JSON.parse(await fs.promises.readFile(path, 'utf8'));
const dependencyTree = dependenciesRecursive(packageLock, includeDevDependencies);
return dependencyTree
.map(({ name, version }) => ({ id: `${name}@${version}`, name, version }))
.sort(sortById)
.filter(uniqueById);
const filename = pathLib.basename(path);
const dependencies = filename === YARN_LOCK_FILENAME
? await parseDependenciesFromYarnLockFile(path)
: await parseDependenciesFromNpmLockFile(path, includeDevDependencies);
return dependencies.sort(sortById).filter(uniqueById);
}

@@ -81,3 +179,3 @@ exports.dependenciesFromPackageLock = dependenciesFromPackageLock;

}
function dependenciesRecursive({ dependencies }, includeDevDependencies) {
function recurseNpmLockfileDependencies({ dependencies }, includeDevDependencies) {
if (!dependencies) {

@@ -89,3 +187,3 @@ return [];

.filter(includeFn)
.map(([name, props]) => [{ name, version: props.version }].concat(dependenciesRecursive(props, includeDevDependencies)))
.map(([name, props]) => [{ name, version: props.version }].concat(recurseNpmLockfileDependencies(props, includeDevDependencies)))
.flat();

@@ -92,0 +190,0 @@ }

@@ -10,6 +10,13 @@ "use strict";

const newPackages = await resolve_1.dependenciesNotInCache(packages, cacheFilePath, options.prebuiltBinaryProperties);
await download_1.downloadAll(newPackages, options);
await resolve_1.updateDependenciesCache(newPackages, cacheFilePath, options.prebuiltBinaryProperties);
if (options.dryRun) {
console.log(newPackages.map(({ name, version }) => `${name}@${version}`).join('\n'));
console.log(`\nWould download ${newPackages.length} packages.`);
}
else {
await download_1.downloadAll(newPackages, options);
await resolve_1.updateDependenciesCache(newPackages, cacheFilePath, options.prebuiltBinaryProperties);
console.log(`Downloaded ${newPackages.length} packages`);
}
return newPackages;
}
exports.synchronize = synchronize;
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc