@mapbox/node-pre-gyp
Advanced tools
| name: S3 Bucket Test | ||
| on: | ||
| pull_request: | ||
| push: | ||
| branches: | ||
| - master | ||
| workflow_dispatch: | ||
| jobs: | ||
| test-on-os-node-matrix: | ||
| runs-on: ${{ matrix.os }} | ||
| strategy: | ||
| matrix: | ||
| os: [ubuntu-latest, macos-latest, windows-latest] | ||
| node: [20, 'lts/*'] | ||
| env: | ||
| AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
| AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
| S3_BUCKET: ${{ secrets.S3_BUCKET }} | ||
| name: Test S3 Bucket - Node ${{ matrix.node }} on ${{ matrix.os }} | ||
| steps: | ||
| - name: Checkout ${{ github.ref }} | ||
| uses: actions/checkout@v5 | ||
| - name: Setup node ${{ matrix.node }} | ||
| uses: actions/setup-node@v6 | ||
| with: | ||
| node-version: ${{ matrix.node }} | ||
| - name: NPM Install | ||
| run: npm install | ||
| - name: Show Environment Info | ||
| run: | | ||
| printenv | ||
| node --version | ||
| npm --version | ||
| - name: Run S3 Tests (against ${{ env.S3_BUCKET }} bucket) | ||
| run: | | ||
| npm run bucket ${{ env.S3_BUCKET }} | ||
| npm run test:s3 | ||
| if: ${{ env.S3_BUCKET != '' }} | ||
| 'use strict'; | ||
| module.exports = exports = http_mock; | ||
| const fs = require('fs'); | ||
| const path = require('path'); | ||
| const nock = require('nock'); | ||
| const os = require('os'); | ||
| const log = require('../util/log.js'); | ||
| log.heading = 'node-pre-gyp'; // differentiate node-pre-gyp's logs from npm's | ||
| function http_mock() { | ||
| log.warn('mocking http requests to s3'); | ||
| const basePath = `${os.tmpdir()}/mock`; | ||
| nock(new RegExp('([a-z0-9]+[.])*s3[.]us-east-1[.]amazonaws[.]com')) | ||
| .persist() | ||
| .get(() => true) //a function that always returns true is a catch all for nock | ||
| .reply( | ||
| (uri) => { | ||
| const bucket = 'npg-mock-bucket'; | ||
| const mockDir = uri.indexOf(bucket) === -1 ? `${basePath}/${bucket}` : basePath; | ||
| const filepath = path.join(mockDir, uri.replace(new RegExp('%2B', 'g'), '+')); | ||
| try { | ||
| fs.accessSync(filepath, fs.constants.R_OK); | ||
| } catch (e) { | ||
| return [404, 'not found\n']; | ||
| } | ||
| // mock s3 functions write to disk | ||
| // return what is read from it. | ||
| return [200, fs.createReadStream(filepath)]; | ||
| } | ||
| ); | ||
| } |
| 'use strict'; | ||
| module.exports = exports = s3_mock; | ||
| const AWSMock = require('mock-aws-s3'); | ||
| const os = require('os'); | ||
| const log = require('../util/log.js'); | ||
| log.heading = 'node-pre-gyp'; // differentiate node-pre-gyp's logs from npm's | ||
| function s3_mock() { | ||
| log.warn('mocking s3 operations'); | ||
| AWSMock.config.basePath = `${os.tmpdir()}/mock`; | ||
| const s3 = AWSMock.S3(); | ||
| // wrapped callback maker. fs calls return code of ENOENT but AWS.S3 returns | ||
| // NotFound. | ||
| const wcb = (fn) => (err, ...args) => { | ||
| if (err && err.code === 'ENOENT') { | ||
| err.code = 'NotFound'; | ||
| } | ||
| return fn(err, ...args); | ||
| }; | ||
| return { | ||
| listObjects(params, callback) { | ||
| return s3.listObjects(params, wcb(callback)); | ||
| }, | ||
| headObject(params, callback) { | ||
| return s3.headObject(params, wcb(callback)); | ||
| }, | ||
| deleteObject(params, callback) { | ||
| return s3.deleteObject(params, wcb(callback)); | ||
| }, | ||
| putObject(params, callback) { | ||
| return s3.putObject(params, wcb(callback)); | ||
| } | ||
| }; | ||
| } |
@@ -14,10 +14,9 @@ # https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-nodejs | ||
| fail-fast: false | ||
| max-parallel: 9 | ||
| matrix: | ||
| node-version: ['18.x', '20.x', '22.x'] | ||
| os: [macos-latest, ubuntu-latest, windows-latest] | ||
| node-version: ['20.x', '24.x', 'lts/*'] | ||
| os: [macos-latest, ubuntu-latest, ubuntu-24.04-arm, windows-latest] | ||
| runs-on: ${{ matrix.os }} | ||
| steps: | ||
| - uses: actions/checkout@v4 | ||
| - uses: actions/setup-node@v4 | ||
| - uses: actions/checkout@v5 | ||
| - uses: actions/setup-node@v6 | ||
| with: | ||
@@ -24,0 +23,0 @@ node-version: ${{ matrix.node-version }} |
@@ -41,7 +41,7 @@ # For most projects, this workflow file will not need changing; you simply need | ||
| - name: Checkout repository | ||
| uses: actions/checkout@v4 | ||
| uses: actions/checkout@v5 | ||
| # Initializes the CodeQL tools for scanning. | ||
| - name: Initialize CodeQL | ||
| uses: github/codeql-action/init@v3 | ||
| uses: github/codeql-action/init@v4 | ||
| with: | ||
@@ -60,3 +60,3 @@ languages: ${{ matrix.language }} | ||
| - name: Autobuild | ||
| uses: github/codeql-action/autobuild@v3 | ||
| uses: github/codeql-action/autobuild@v4 | ||
@@ -74,4 +74,4 @@ # ℹ️ Command-line programs to run using the OS shell. | ||
| - name: Perform CodeQL Analysis | ||
| uses: github/codeql-action/analyze@v3 | ||
| uses: github/codeql-action/analyze@v4 | ||
| with: | ||
| category: "/language:${{matrix.language}}" |
@@ -16,5 +16,5 @@ name: release | ||
| steps: | ||
| - uses: actions/checkout@v4 | ||
| - uses: actions/checkout@v5 | ||
| - uses: actions/setup-node@v4 | ||
| - uses: actions/setup-node@v6 | ||
| with: | ||
@@ -47,5 +47,5 @@ node-version: 22 | ||
| steps: | ||
| - uses: actions/checkout@v4 | ||
| - uses: actions/checkout@v5 | ||
| - uses: actions/setup-node@v4 | ||
| - uses: actions/setup-node@v6 | ||
| with: | ||
@@ -52,0 +52,0 @@ node-version: 22 |
+5
-0
| # node-pre-gyp changelog | ||
| ## master | ||
| ## 2.0.1 | ||
| - Update abi_crosswalk.json for abi 137 / node 24 (https://github.com/mapbox/node-pre-gyp/pull/904) | ||
| ## 2.0.0 | ||
@@ -4,0 +9,0 @@ - Supported Node versions are now stable versions of Node 18+. We will attempt to track the [Node.js release schedule](https://github.com/nodejs/release#release-schedule) and will regularly retire support for versions that have reached EOL. |
+73
-0
@@ -13,2 +13,4 @@ 'use strict'; | ||
| const napi = require('./util/napi.js'); | ||
| const s3_setup = require('./util/s3_setup.js'); | ||
| const url = require('url'); | ||
| // for fetching binaries | ||
@@ -27,2 +29,61 @@ const fetch = require('node-fetch'); | ||
| function place_binary_authenticated(opts, targetDir, callback) { | ||
| log.info('install', 'Attempting authenticated S3 download'); | ||
| // Check if AWS credentials are available | ||
| if (!process.env.AWS_ACCESS_KEY_ID && !process.env.AWS_SECRET_ACCESS_KEY) { | ||
| const err = new Error('Binary is private but AWS credentials not found. Please configure AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment variables, or use --fallback-to-build to compile from source.'); | ||
| err.statusCode = 403; | ||
| return callback(err); | ||
| } | ||
| try { | ||
| const config = s3_setup.detect(opts); | ||
| const s3 = s3_setup.get_s3(config); | ||
| const key_name = url.resolve(config.prefix, opts.package_name); | ||
| log.info('install', 'Downloading from S3:', config.bucket, key_name); | ||
| const s3_opts = { | ||
| Bucket: config.bucket, | ||
| Key: key_name | ||
| }; | ||
| s3.getObject(s3_opts, (err, data) => { | ||
| if (err) { | ||
| log.error('install', 'Authenticated S3 download failed:', err.message); | ||
| return callback(err); | ||
| } | ||
| log.info('install', 'Authenticated download successful, extracting...'); | ||
| const { Readable } = require('stream'); | ||
| const dataStream = Readable.from(data.Body); | ||
| let extractions = 0; | ||
| const countExtractions = (entry) => { | ||
| extractions += 1; | ||
| log.info('install', `unpacking ${entry.path}`); | ||
| }; | ||
| dataStream.pipe(extract(targetDir, countExtractions)) | ||
| .on('error', (e) => { | ||
| callback(e); | ||
| }) | ||
| .on('close', () => { | ||
| log.info('install', `extracted file count: ${extractions}`); | ||
| callback(); | ||
| }); | ||
| }); | ||
| } catch (e) { | ||
| if (e.code === 'MODULE_NOT_FOUND' && e.message.includes('aws-sdk')) { | ||
| const err = new Error('Binary is private and requires aws-sdk for authenticated download. Please run: npm install aws-sdk'); | ||
| err.statusCode = 403; | ||
| return callback(err); | ||
| } | ||
| log.error('install', 'Error setting up authenticated download:', e.message); | ||
| callback(e); | ||
| } | ||
| } | ||
| function place_binary(uri, targetDir, opts, callback) { | ||
@@ -68,2 +129,7 @@ log.log('GET', uri); | ||
| if (!res.ok) { | ||
| // If we get 403 Forbidden, the binary might be private - try authenticated download | ||
| if (res.status === 403) { | ||
| log.info('install', 'Received 403 Forbidden - attempting authenticated download'); | ||
| return place_binary_authenticated(opts, targetDir, callback); | ||
| } | ||
| throw new Error(`response status ${res.status} ${res.statusText} on ${sanitized}`); | ||
@@ -238,1 +304,8 @@ } | ||
| } | ||
| // setting an environment variable: node_pre_gyp_mock_s3 to any value | ||
| // enables intercepting outgoing http requests to s3 (using nock) and | ||
| // serving them from a mocked S3 file system (using mock-aws-s3) | ||
| if (process.env.node_pre_gyp_mock_s3) { | ||
| require('./mock/http')(); | ||
| } |
+2
-12
@@ -13,9 +13,2 @@ 'use strict'; | ||
| // load mocking control function for accessing s3 via https. the function is a noop always returning | ||
| // false if not mocking. | ||
| exports.mockS3Http = require('./util/s3_setup').get_mockS3Http(); | ||
| exports.mockS3Http('on'); | ||
| const mocking = exports.mockS3Http('get'); | ||
| const fs = require('fs'); | ||
@@ -46,6 +39,2 @@ const path = require('path'); | ||
| if (mocking) { | ||
| log.warn(`mocking s3 to ${process.env.node_pre_gyp_mock_s3}`); | ||
| } | ||
| // this is a getter to avoid circular reference warnings with node v14. | ||
@@ -114,3 +103,4 @@ Object.defineProperty(exports, 'find', { | ||
| proxy: String, // 'install' | ||
| loglevel: String // everywhere | ||
| loglevel: String, // everywhere | ||
| acl: String // 'publish' - S3 ACL for published binaries | ||
| }; | ||
@@ -117,0 +107,0 @@ |
+3
-2
@@ -46,3 +46,3 @@ 'use strict'; | ||
| const s3_put_opts = { | ||
| ACL: 'public-read', | ||
| ACL: opts.acl, | ||
| Body: fs.createReadStream(tarball), | ||
@@ -52,3 +52,4 @@ Key: key_name, | ||
| }; | ||
| log.info('publish', 'Putting object', s3_put_opts.ACL, s3_put_opts.Bucket, s3_put_opts.Key); | ||
| log.info('publish', 'Putting object with ACL:', s3_put_opts.ACL); | ||
| log.info('publish', 'Bucket:', s3_put_opts.Bucket, 'Key:', s3_put_opts.Key); | ||
| try { | ||
@@ -55,0 +56,0 @@ s3.putObject(s3_put_opts, (err2, resp) => { |
@@ -2930,2 +2930,10 @@ { | ||
| }, | ||
| "18.20.7": { | ||
| "node_abi": 108, | ||
| "v8": "10.2" | ||
| }, | ||
| "18.20.8": { | ||
| "node_abi": 108, | ||
| "v8": "10.2" | ||
| }, | ||
| "19.0.0": { | ||
@@ -3103,2 +3111,14 @@ "node_abi": 111, | ||
| }, | ||
| "20.18.3": { | ||
| "node_abi": 115, | ||
| "v8": "11.3" | ||
| }, | ||
| "20.19.0": { | ||
| "node_abi": 115, | ||
| "v8": "11.3" | ||
| }, | ||
| "20.19.1": { | ||
| "node_abi": 115, | ||
| "v8": "11.3" | ||
| }, | ||
| "21.0.0": { | ||
@@ -3224,2 +3244,10 @@ "node_abi": 120, | ||
| }, | ||
| "22.14.0": { | ||
| "node_abi": 127, | ||
| "v8": "12.4" | ||
| }, | ||
| "22.15.0": { | ||
| "node_abi": 127, | ||
| "v8": "12.4" | ||
| }, | ||
| "23.0.0": { | ||
@@ -3256,3 +3284,27 @@ "node_abi": 131, | ||
| "v8": "12.9" | ||
| }, | ||
| "23.7.0": { | ||
| "node_abi": 131, | ||
| "v8": "12.9" | ||
| }, | ||
| "23.8.0": { | ||
| "node_abi": 131, | ||
| "v8": "12.9" | ||
| }, | ||
| "23.9.0": { | ||
| "node_abi": 131, | ||
| "v8": "12.9" | ||
| }, | ||
| "23.10.0": { | ||
| "node_abi": 131, | ||
| "v8": "12.9" | ||
| }, | ||
| "23.11.0": { | ||
| "node_abi": 131, | ||
| "v8": "12.9" | ||
| }, | ||
| "24.0.0": { | ||
| "node_abi": 137, | ||
| "v8": "13.6" | ||
| } | ||
| } | ||
| } |
+4
-102
@@ -6,4 +6,2 @@ 'use strict'; | ||
| const url = require('url'); | ||
| const fs = require('fs'); | ||
| const path = require('path'); | ||
@@ -64,36 +62,7 @@ module.exports.detect = function(opts) { | ||
| module.exports.get_s3 = function(config) { | ||
| // setting an environment variable: node_pre_gyp_mock_s3 to any value | ||
| // enables intercepting outgoing http requests to s3 (using nock) and | ||
| // serving them from a mocked S3 file system (using mock-aws-s3) | ||
| if (process.env.node_pre_gyp_mock_s3) { | ||
| // here we're mocking. node_pre_gyp_mock_s3 is the scratch directory | ||
| // for the mock code. | ||
| const AWSMock = require('mock-aws-s3'); | ||
| const os = require('os'); | ||
| AWSMock.config.basePath = `${os.tmpdir()}/mock`; | ||
| const s3 = AWSMock.S3(); | ||
| // wrapped callback maker. fs calls return code of ENOENT but AWS.S3 returns | ||
| // NotFound. | ||
| const wcb = (fn) => (err, ...args) => { | ||
| if (err && err.code === 'ENOENT') { | ||
| err.code = 'NotFound'; | ||
| } | ||
| return fn(err, ...args); | ||
| }; | ||
| return { | ||
| listObjects(params, callback) { | ||
| return s3.listObjects(params, wcb(callback)); | ||
| }, | ||
| headObject(params, callback) { | ||
| return s3.headObject(params, wcb(callback)); | ||
| }, | ||
| deleteObject(params, callback) { | ||
| return s3.deleteObject(params, wcb(callback)); | ||
| }, | ||
| putObject(params, callback) { | ||
| return s3.putObject(params, wcb(callback)); | ||
| } | ||
| }; | ||
| return require('../mock/s3')(); | ||
| } | ||
@@ -122,69 +91,2 @@ | ||
| }; | ||
| }; | ||
| // | ||
| // function to get the mocking control function. if not mocking it returns a no-op. | ||
| // | ||
| // if mocking it sets up the mock http interceptors that use the mocked s3 file system | ||
| // to fulfill responses. | ||
| module.exports.get_mockS3Http = function() { | ||
| let mock_s3 = false; | ||
| if (!process.env.node_pre_gyp_mock_s3) { | ||
| return () => mock_s3; | ||
| } | ||
| const nock = require('nock'); | ||
| // the bucket used for testing, as addressed by https. | ||
| const host = 'https://mapbox-node-pre-gyp-public-testing-bucket.s3.us-east-1.amazonaws.com'; | ||
| const mockDir = process.env.node_pre_gyp_mock_s3 + '/mapbox-node-pre-gyp-public-testing-bucket'; | ||
| // function to setup interceptors. they are "turned off" by setting mock_s3 to false. | ||
| const mock_http = () => { | ||
| // eslint-disable-next-line no-unused-vars | ||
| function get(uri, requestBody) { | ||
| const filepath = path.join(mockDir, uri.replace('%2B', '+')); | ||
| try { | ||
| fs.accessSync(filepath, fs.constants.R_OK); | ||
| } catch (e) { | ||
| return [404, 'not found\n']; | ||
| } | ||
| // the mock s3 functions just write to disk, so just read from it. | ||
| return [200, fs.createReadStream(filepath)]; | ||
| } | ||
| // eslint-disable-next-line no-unused-vars | ||
| return nock(host) | ||
| .persist() | ||
| .get(() => mock_s3) // mock any uri for s3 when true | ||
| .reply(get); | ||
| }; | ||
| // setup interceptors. they check the mock_s3 flag to determine whether to intercept. | ||
| mock_http(nock, host, mockDir); | ||
| // function to turn matching all requests to s3 on/off. | ||
| const mockS3Http = (action) => { | ||
| const previous = mock_s3; | ||
| if (action === 'off') { | ||
| mock_s3 = false; | ||
| } else if (action === 'on') { | ||
| mock_s3 = true; | ||
| } else if (action !== 'get') { | ||
| throw new Error(`illegal action for setMockHttp ${action}`); | ||
| } | ||
| return previous; | ||
| }; | ||
| // call mockS3Http with the argument | ||
| // - 'on' - turn it on | ||
| // - 'off' - turn it off (used by fetch.test.js so it doesn't interfere with redirects) | ||
| // - 'get' - return true or false for 'on' or 'off' | ||
| return mockS3Http; | ||
| }; | ||
@@ -310,7 +310,8 @@ 'use strict'; | ||
| region: package_json.binary.region, | ||
| s3ForcePathStyle: package_json.binary.s3ForcePathStyle || false | ||
| s3ForcePathStyle: package_json.binary.s3ForcePathStyle || false, | ||
| acl: options.acl || package_json.binary.acl || 'public-read' | ||
| }; | ||
| // support host mirror with npm config `--{module_name}_binary_host_mirror` | ||
| // e.g.: https://github.com/node-inspector/v8-profiler/blob/master/package.json#L25 | ||
| // > npm install v8-profiler --profiler_binary_host_mirror=https://npm.taobao.org/mirrors/node-inspector/ | ||
| // > npm install v8-profiler --profiler_binary_host_mirror=https://registry.npmmirror.com/node-inspector/ | ||
| const validModuleName = opts.module_name.replace('-', '_'); | ||
@@ -317,0 +318,0 @@ const host = process.env['npm_config_' + validModuleName + '_binary_host_mirror'] || package_json.binary.host; |
+10
-5
| { | ||
| "name": "@mapbox/node-pre-gyp", | ||
| "description": "Node.js native addon binary install tool", | ||
| "version": "2.0.0", | ||
| "version": "2.0.2-dev", | ||
| "keywords": [ | ||
@@ -30,3 +30,3 @@ "native", | ||
| "node-fetch": "^2.6.7", | ||
| "nopt": "^8.0.0", | ||
| "nopt": "^9.0.0", | ||
| "semver": "^7.5.3", | ||
@@ -36,3 +36,3 @@ "tar": "^7.4.0" | ||
| "devDependencies": { | ||
| "@mapbox/cloudfriend": "^8.1.0", | ||
| "@mapbox/cloudfriend": "^9.0.0", | ||
| "@mapbox/eslint-config-mapbox": "^5.0.1", | ||
@@ -48,3 +48,3 @@ "aws-sdk": "^2.1087.0", | ||
| "tape": "^5.5.2", | ||
| "tar-fs": "^3.0.6" | ||
| "tar-fs": "^3.1.1" | ||
| }, | ||
@@ -64,4 +64,9 @@ "nyc": { | ||
| "update-crosswalk": "node scripts/abi_crosswalk.js", | ||
| "test": "tape test/*test.js" | ||
| "test": "tape test/*test.js", | ||
| "test:s3": "tape test/s3.test.js", | ||
| "bucket": "node scripts/set-bucket.js" | ||
| }, | ||
| "overrides": { | ||
| "js-yaml": "^3.14.2" | ||
| } | ||
| } |
+30
-2
@@ -88,2 +88,3 @@ # @mapbox/node-pre-gyp | ||
| - `--target_platform=win32`: Pass the target platform and override the host `platform`. Valid values are `linux`, `darwin`, `win32`, `sunos`, `freebsd`, `openbsd`, and `aix`. | ||
| - `--acl=<acl>`: Set the S3 ACL when publishing binaries (e.g., `public-read`, `private`). Overrides the `binary.acl` setting in package.json. | ||
@@ -189,2 +190,29 @@ Both `--build-from-source` and `--fallback-to-build` can be passed alone or they can provide values. You can pass `--fallback-to-build=false` to override the option as declared in package.json. In addition to being able to pass `--build-from-source` you can also pass `--build-from-source=myapp` where `myapp` is the name of your module. | ||
| ###### acl | ||
| The S3 Access Control List (ACL) to apply when publishing binaries. Defaults to `'public-read'` for backward compatibility. Common values include: | ||
| - `public-read` - (default) Binary is publicly accessible by anyone | ||
| - `private` - Binary requires AWS credentials to download | ||
| - `authenticated-read` - Any authenticated AWS user can download | ||
| - `bucket-owner-read` - Bucket owner gets READ access | ||
| - `bucket-owner-full-control` - Bucket owner gets FULL_CONTROL access | ||
| **For private binaries:** | ||
| - Users installing your package will need AWS credentials configured (AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment variables) | ||
| - The `aws-sdk` package must be available at install time | ||
| - If authentication fails, node-pre-gyp will fall back to building from source (if `--fallback-to-build` is specified) | ||
| You can also specify the ACL via command-line flag: `node-pre-gyp publish --acl=private` | ||
| Example for private binaries: | ||
| ```json | ||
| "binary": { | ||
| "module_name": "your_module", | ||
| "module_path": "./lib/binding/", | ||
| "host": "https://your-bucket.s3.us-east-1.amazonaws.com", | ||
| "acl": "private" | ||
| } | ||
| ``` | ||
| ##### The `binary` object has optional properties | ||
@@ -739,3 +767,3 @@ | ||
| ```bash | ||
| $ npm install v8-profiler --profiler_binary_host_mirror=https://npm.taobao.org/mirrors/node-inspector/ | ||
| $ npm install v8-profiler --profiler_binary_host_mirror=https://registry.npmmirror.com/node-inspector/ | ||
| ``` | ||
@@ -746,3 +774,3 @@ | ||
| ```bash | ||
| $ npm install canvas-prebuilt --canvas_prebuilt_binary_host_mirror=https://npm.taobao.org/mirrors/canvas-prebuilt/ | ||
| $ npm install canvas-prebuilt --canvas_prebuilt_binary_host_mirror=https://registry.npmmirror.com/canvas-prebuilt/ | ||
| ``` |
| - Supported Node versions are now stable versions of Node 18+. We will attempt to track the [Node.js release schedule](https://github.com/nodejs/release#release-schedule) and will regularly retire support for versions that have reached EOL. | ||
| - Fixed use of `s3ForcePathStyle` for installation [#650](https://github.com/mapbox/node-pre-gyp/pull/650) | ||
| - Upgraded to https-proxy-agent 7.0.5, nopt 8.0.0, semver 7.5.3, and tar 7.4.0 | ||
| - Replaced npmlog with consola | ||
| - Removed rimraf and make-dir as dependencies | ||
Network access
Supply chain riskThis module accesses the network.
Found 2 instances in 1 package
New author
Supply chain riskA new npm collaborator published a version of the package for the first time. New collaborators are usually benign additions to a project, but do indicate a change to the security surface area of a package.
Found 1 instance in 1 package
Shell access
Supply chain riskThis module accesses the system shell. Accessing the system shell increases the risk of executing arbitrary code.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 12 instances in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
No v1
QualityPackage is not semver >=1. This means it is not stable and does not support ^ ranges.
Found 1 instance in 1 package
Network access
Supply chain riskThis module accesses the network.
Found 2 instances in 1 package
Shell access
Supply chain riskThis module accesses the system shell. Accessing the system shell increases the risk of executing arbitrary code.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 10 instances in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
192159
2.5%41
5.13%5293
1.81%773
3.76%2
100%+ Added
+ Added
- Removed
- Removed
Updated