Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

node-datachannel

Package Overview
Dependencies
Maintainers
1
Versions
72
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

node-datachannel - npm Package Compare versions

Comparing version 0.2.3 to 0.2.4

lib/datachannel-stream.js

4

CMakeLists.txt
cmake_minimum_required(VERSION 3.15)
cmake_policy(SET CMP0091 NEW)
project(node_datachannel VERSION 0.2.3)
project(node_datachannel VERSION 0.2.4)

@@ -30,3 +30,3 @@ include_directories(${CMAKE_JS_INC})

GIT_REPOSITORY https://github.com/paullouisageneau/libdatachannel.git
GIT_TAG "v0.16.5"
GIT_TAG "v0.16.6"
)

@@ -33,0 +33,0 @@

@@ -0,1 +1,3 @@

import * as stream from 'stream';
export as namespace NodeDataChannel;

@@ -223,1 +225,9 @@

}
export class DataChannelStream extends stream.Duplex {
constructor(
rawChannel: DataChannel,
options?: Omit<stream.DuplexOptions, 'objectMode'>
);
get label(): string;
}
const nodeDataChannel = require("../build/Release/node_datachannel.node");
module.exports = nodeDataChannel;
module.exports = nodeDataChannel;
module.exports.DataChannelStream = require('./datachannel-stream');
/// <reference types="node"/>
import {IncomingMessage} from 'http';
declare const decompressResponse: {
/**
Decompress a HTTP response if needed.
/**
Decompress a HTTP response if needed.
@param response - The HTTP incoming stream with compressed data.
@returns The decompressed HTTP response stream.
@param response - The HTTP incoming stream with compressed data.
@returns The decompressed HTTP response stream.
@example
```
import {http} from 'http';
import decompressResponse = require('decompress-response');
@example
```
import {http} from 'http';
import decompressResponse = require('decompress-response');
http.get('https://sindresorhus.com', response => {
response = decompressResponse(response);
});
```
*/
(response: IncomingMessage): IncomingMessage;
http.get('https://sindresorhus.com', response => {
response = decompressResponse(response);
});
```
*/
declare function decompressResponse(response: IncomingMessage): IncomingMessage;
// TODO: remove this in the next major version, refactor the whole definition to:
// declare function decompressResponse(response: IncomingMessage): IncomingMessage;
// export = decompressResponse;
default: typeof decompressResponse;
};
export = decompressResponse;
'use strict';
const {PassThrough: PassThroughStream} = require('stream');
const {Transform, PassThrough} = require('stream');
const zlib = require('zlib');
const mimicResponse = require('mimic-response');
const decompressResponse = response => {
module.exports = response => {
const contentEncoding = (response.headers['content-encoding'] || '').toLowerCase();

@@ -13,29 +13,47 @@

// TODO: Remove this when targeting Node.js 12.
const isBrotli = contentEncoding === 'br';
if (isBrotli && typeof zlib.createBrotliDecompress !== 'function') {
response.destroy(new Error('Brotli is not supported on Node.js < 12'));
return response;
}
const decompress = isBrotli ? zlib.createBrotliDecompress() : zlib.createUnzip();
const stream = new PassThroughStream();
let isEmpty = true;
mimicResponse(response, stream);
const checker = new Transform({
transform(data, _encoding, callback) {
isEmpty = false;
decompress.on('error', error => {
// Ignore empty response
if (error.code === 'Z_BUF_ERROR') {
stream.end();
callback(null, data);
},
flush(callback) {
callback();
}
});
const finalStream = new PassThrough({
autoDestroy: false,
destroy(error, callback) {
response.destroy();
callback(error);
}
});
const decompressStream = isBrotli ? zlib.createBrotliDecompress() : zlib.createUnzip();
decompressStream.once('error', error => {
if (isEmpty && !response.readable) {
finalStream.end();
return;
}
stream.emit('error', error);
finalStream.destroy(error);
});
response.pipe(decompress).pipe(stream);
mimicResponse(response, finalStream);
response.pipe(checker).pipe(decompressStream).pipe(finalStream);
return stream;
return finalStream;
};
module.exports = decompressResponse;
// TODO: remove this in the next major version
module.exports.default = decompressResponse;
{
"_args": [
[
"decompress-response@4.2.1",
"decompress-response@6.0.0",
"/home/runner/work/node-datachannel/node-datachannel"
]
],
"_from": "decompress-response@4.2.1",
"_id": "decompress-response@4.2.1",
"_from": "decompress-response@6.0.0",
"_id": "decompress-response@6.0.0",
"_inBundle": false,
"_integrity": "sha512-jOSne2qbyE+/r8G1VU+G/82LBs2Fs4LAsTiLSHOCOMZQl2OKZ6i8i4IyHemTe+/yIXOtTcRQMzPcgyhoFlqPkw==",
"_integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==",
"_location": "/decompress-response",

@@ -17,8 +17,8 @@ "_phantomChildren": {},

"registry": true,
"raw": "decompress-response@4.2.1",
"raw": "decompress-response@6.0.0",
"name": "decompress-response",
"escapedName": "decompress-response",
"rawSpec": "4.2.1",
"rawSpec": "6.0.0",
"saveSpec": null,
"fetchSpec": "4.2.1"
"fetchSpec": "6.0.0"
},

@@ -28,4 +28,4 @@ "_requiredBy": [

],
"_resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-4.2.1.tgz",
"_spec": "4.2.1",
"_resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz",
"_spec": "6.0.0",
"_where": "/home/runner/work/node-datachannel/node-datachannel",

@@ -35,3 +35,3 @@ "author": {

"email": "sindresorhus@gmail.com",
"url": "sindresorhus.com"
"url": "https://sindresorhus.com"
},

@@ -42,15 +42,15 @@ "bugs": {

"dependencies": {
"mimic-response": "^2.0.0"
"mimic-response": "^3.1.0"
},
"description": "Decompress a HTTP response if needed",
"devDependencies": {
"@types/node": "^12.7.1",
"@types/node": "^14.0.1",
"ava": "^2.2.0",
"get-stream": "^5.0.0",
"pify": "^4.0.1",
"tsd": "^0.7.1",
"xo": "^0.24.0"
"pify": "^5.0.0",
"tsd": "^0.11.0",
"xo": "^0.30.0"
},
"engines": {
"node": ">=8"
"node": ">=10"
},

@@ -61,2 +61,3 @@ "files": [

],
"funding": "https://github.com/sponsors/sindresorhus",
"homepage": "https://github.com/sindresorhus/decompress-response#readme",

@@ -89,3 +90,8 @@ "keywords": [

},
"version": "4.2.1"
"version": "6.0.0",
"xo": {
"rules": {
"@typescript-eslint/prefer-readonly-parameter-types": "off"
}
}
}

@@ -1,2 +0,2 @@

# decompress-response [![Build Status](https://travis-ci.org/sindresorhus/decompress-response.svg?branch=master)](https://travis-ci.org/sindresorhus/decompress-response)
# decompress-response [![Build Status](https://travis-ci.com/sindresorhus/decompress-response.svg?branch=master)](https://travis-ci.com/sindresorhus/decompress-response)

@@ -9,3 +9,2 @@ > Decompress a HTTP response if needed

## Install

@@ -17,3 +16,2 @@

## Usage

@@ -30,3 +28,2 @@

## API

@@ -44,3 +41,2 @@

---

@@ -50,3 +46,3 @@

<b>
<a href="https://tidelift.com/subscription/pkg/npm-unzip-response?utm_source=npm-unzip-response&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
<a href="https://tidelift.com/subscription/pkg/npm-decompress-response?utm_source=npm-decompress-response&utm_medium=referral&utm_campaign=readme">Get professional support for this package with a Tidelift subscription</a>
</b>

@@ -53,0 +49,0 @@ <br>

'use strict';
var platform = require('os').platform();
var spawnSync = require('child_process').spawnSync;
var readdirSync = require('fs').readdirSync;
const childProcess = require('child_process');
const { isLinux, getReport } = require('./process');
var GLIBC = 'glibc';
var MUSL = 'musl';
const command = 'getconf GNU_LIBC_VERSION 2>&1 || true; ldd --version 2>&1 || true';
let commandOut = '';
var spawnOptions = {
encoding: 'utf8',
env: process.env
const safeCommand = () => {
if (!commandOut) {
return new Promise((resolve) => {
childProcess.exec(command, (err, out) => {
commandOut = err ? ' ' : out;
resolve(commandOut);
});
});
}
return commandOut;
};
if (!spawnSync) {
spawnSync = function () {
return { status: 126, stdout: '', stderr: '' };
};
}
const safeCommandSync = () => {
if (!commandOut) {
try {
commandOut = childProcess.execSync(command, { encoding: 'utf8' });
} catch (_err) {
commandOut = ' ';
}
}
return commandOut;
};
function contains (needle) {
return function (haystack) {
return haystack.indexOf(needle) !== -1;
};
}
/**
* A String constant containing the value `glibc`.
* @type {string}
* @public
*/
const GLIBC = 'glibc';
function versionFromMuslLdd (out) {
return out.split(/[\r\n]+/)[1].trim().split(/\s/)[1];
}
/**
* A String constant containing the value `musl`.
* @type {string}
* @public
*/
const MUSL = 'musl';
function safeReaddirSync (path) {
try {
return readdirSync(path);
} catch (e) {}
return [];
}
const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-');
var family = '';
var version = '';
var method = '';
const familyFromReport = () => {
const report = getReport();
if (report.header && report.header.glibcVersionRuntime) {
return GLIBC;
}
if (Array.isArray(report.sharedObjects)) {
if (report.sharedObjects.some(isFileMusl)) {
return MUSL;
}
}
return null;
};
if (platform === 'linux') {
// Try getconf
var glibc = spawnSync('getconf', ['GNU_LIBC_VERSION'], spawnOptions);
if (glibc.status === 0) {
family = GLIBC;
version = glibc.stdout.trim().split(' ')[1];
method = 'getconf';
} else {
// Try ldd
var ldd = spawnSync('ldd', ['--version'], spawnOptions);
if (ldd.status === 0 && ldd.stdout.indexOf(MUSL) !== -1) {
family = MUSL;
version = versionFromMuslLdd(ldd.stdout);
method = 'ldd';
} else if (ldd.status === 1 && ldd.stderr.indexOf(MUSL) !== -1) {
family = MUSL;
version = versionFromMuslLdd(ldd.stderr);
method = 'ldd';
} else {
// Try filesystem (family only)
var lib = safeReaddirSync('/lib');
if (lib.some(contains('-linux-gnu'))) {
family = GLIBC;
method = 'filesystem';
} else if (lib.some(contains('libc.musl-'))) {
family = MUSL;
method = 'filesystem';
} else if (lib.some(contains('ld-musl-'))) {
family = MUSL;
method = 'filesystem';
} else {
var usrSbin = safeReaddirSync('/usr/sbin');
if (usrSbin.some(contains('glibc'))) {
family = GLIBC;
method = 'filesystem';
}
}
const familyFromCommand = (out) => {
const [getconf, ldd1] = out.split(/[\r\n]+/);
if (getconf && getconf.includes(GLIBC)) {
return GLIBC;
}
if (ldd1 && ldd1.includes(MUSL)) {
return MUSL;
}
return null;
};
/**
* Resolves with the libc family when it can be determined, `null` otherwise.
* @returns {Promise<?string>}
*/
const family = async () => {
let family = null;
if (isLinux()) {
family = familyFromReport();
if (!family) {
const out = await safeCommand();
family = familyFromCommand(out);
}
}
}
return family;
};
var isNonGlibcLinux = (family !== '' && family !== GLIBC);
/**
* Returns the libc family when it can be determined, `null` otherwise.
* @returns {?string}
*/
const familySync = () => {
let family = null;
if (isLinux()) {
family = familyFromReport();
if (!family) {
const out = safeCommandSync();
family = familyFromCommand(out);
}
}
return family;
};
/**
* Resolves `true` only when the platform is Linux and the libc family is not `glibc`.
* @returns {Promise<boolean>}
*/
const isNonGlibcLinux = async () => isLinux() && await family() !== GLIBC;
/**
* Returns `true` only when the platform is Linux and the libc family is not `glibc`.
* @returns {boolean}
*/
const isNonGlibcLinuxSync = () => isLinux() && familySync() !== GLIBC;
const versionFromReport = () => {
const report = getReport();
if (report.header && report.header.glibcVersionRuntime) {
return report.header.glibcVersionRuntime;
}
return null;
};
const versionSuffix = (s) => s.trim().split(/\s+/)[1];
const versionFromCommand = (out) => {
const [getconf, ldd1, ldd2] = out.split(/[\r\n]+/);
if (getconf && getconf.includes(GLIBC)) {
return versionSuffix(getconf);
}
if (ldd1 && ldd2 && ldd1.includes(MUSL)) {
return versionSuffix(ldd2);
}
return null;
};
/**
* Resolves with the libc version when it can be determined, `null` otherwise.
* @returns {Promise<?string>}
*/
const version = async () => {
let version = null;
if (isLinux()) {
version = versionFromReport();
if (!version) {
const out = await safeCommand();
version = versionFromCommand(out);
}
}
return version;
};
/**
* Returns the libc version when it can be determined, `null` otherwise.
* @returns {?string}
*/
const versionSync = () => {
let version = null;
if (isLinux()) {
version = versionFromReport();
if (!version) {
const out = safeCommandSync();
version = versionFromCommand(out);
}
}
return version;
};
module.exports = {
GLIBC: GLIBC,
MUSL: MUSL,
family: family,
version: version,
method: method,
isNonGlibcLinux: isNonGlibcLinux
GLIBC,
MUSL,
family,
familySync,
isNonGlibcLinux,
isNonGlibcLinuxSync,
version,
versionSync
};
{
"_args": [
[
"detect-libc@1.0.3",
"detect-libc@2.0.1",
"/home/runner/work/node-datachannel/node-datachannel"
]
],
"_from": "detect-libc@1.0.3",
"_id": "detect-libc@1.0.3",
"_from": "detect-libc@2.0.1",
"_id": "detect-libc@2.0.1",
"_inBundle": false,
"_integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=",
"_integrity": "sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==",
"_location": "/detect-libc",

@@ -17,15 +17,14 @@ "_phantomChildren": {},

"registry": true,
"raw": "detect-libc@1.0.3",
"raw": "detect-libc@2.0.1",
"name": "detect-libc",
"escapedName": "detect-libc",
"rawSpec": "1.0.3",
"rawSpec": "2.0.1",
"saveSpec": null,
"fetchSpec": "1.0.3"
"fetchSpec": "2.0.1"
},
"_requiredBy": [
"/prebuild",
"/prebuild-install"
],
"_resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz",
"_spec": "1.0.3",
"_resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.1.tgz",
"_spec": "2.0.1",
"_where": "/home/runner/work/node-datachannel/node-datachannel",

@@ -36,5 +35,2 @@ "author": {

},
"bin": {
"detect-libc": "bin/detect-libc.js"
},
"bugs": {

@@ -51,10 +47,14 @@ "url": "https://github.com/lovell/detect-libc/issues"

"devDependencies": {
"ava": "^0.23.0",
"nyc": "^11.3.0",
"proxyquire": "^1.8.0",
"semistandard": "^11.0.0"
"ava": "^2.4.0",
"nyc": "^15.1.0",
"proxyquire": "^2.1.3",
"semistandard": "^14.2.3"
},
"engines": {
"node": ">=0.10"
"node": ">=8"
},
"files": [
"lib/",
"index.d.ts"
],
"homepage": "https://github.com/lovell/detect-libc#readme",

@@ -74,5 +74,5 @@ "keywords": [

"scripts": {
"test": "semistandard && nyc --reporter=lcov ava"
"test": "semistandard && nyc --reporter=lcov --check-coverage --branches=100 ava test/unit.js"
},
"version": "1.0.3"
"version": "2.0.1"
}
# detect-libc
Node.js module to detect the C standard library (libc) implementation
family and version in use on a given Linux system.
Node.js module to detect details of the C standard library (libc)
implementation provided by a given Linux system.
Provides a value suitable for use with the `LIBC` option of
[prebuild](https://www.npmjs.com/package/prebuild),
[prebuild-ci](https://www.npmjs.com/package/prebuild-ci) and
[prebuild-install](https://www.npmjs.com/package/prebuild-install),
therefore allowing build and provision of pre-compiled binaries
for musl-based Linux e.g. Alpine as well as glibc-based.
Currently supports detection of GNU glibc and MUSL libc.
Currently supports libc detection of `glibc` and `musl`.
Provides asychronous and synchronous functions for the
family (e.g. `glibc`, `musl`) and version (e.g. `1.23`, `1.2.3`).
For previous v1.x releases, please see the
[v1](https://github.com/lovell/detect-libc/tree/v1) branch.
## Install

@@ -21,51 +20,134 @@

## Usage
## API
### API
### GLIBC
```ts
const GLIBC: string = 'glibc';
```
A String constant containing the value `glibc`.
### MUSL
```ts
const MUSL: string = 'musl';
```
A String constant containing the value `musl`.
### family
```ts
function family(): Promise<string | null>;
```
Resolves asychronously with:
* `glibc` or `musl` when the libc family can be determined
* `null` when the libc family cannot be determined
* `null` when run on a non-Linux platform
```js
const { GLIBC, MUSL, family, version, isNonGlibcLinux } = require('detect-libc');
const { family, GLIBC, MUSL } = require('detect-libc');
switch (await family()) {
case GLIBC: ...
case MUSL: ...
case null: ...
}
```
* `GLIBC` is a String containing the value "glibc" for comparison with `family`.
* `MUSL` is a String containing the value "musl" for comparison with `family`.
* `family` is a String representing the system libc family.
* `version` is a String representing the system libc version number.
* `isNonGlibcLinux` is a Boolean representing whether the system is a non-glibc Linux, e.g. Alpine.
### familySync
### detect-libc command line tool
```ts
function familySync(): string | null;
```
When run on a Linux system with a non-glibc libc,
the child command will be run with the `LIBC` environment variable
set to the relevant value.
Synchronous version of `family()`.
On all other platforms will run the child command as-is.
```js
const { familySync, GLIBC, MUSL } = require('detect-libc');
The command line feature requires `spawnSync` provided by Node v0.12+.
switch (familySync()) {
case GLIBC: ...
case MUSL: ...
case null: ...
}
```
```sh
detect-libc child-command
### version
```ts
function version(): Promise<string | null>;
```
## Integrating with prebuild
Resolves asychronously with:
```json
"scripts": {
"install": "detect-libc prebuild-install || node-gyp rebuild",
"test": "mocha && detect-libc prebuild-ci"
},
"dependencies": {
"detect-libc": "^1.0.2",
"prebuild-install": "^2.2.0"
},
"devDependencies": {
"prebuild": "^6.2.1",
"prebuild-ci": "^2.2.3"
}
* The version when it can be determined
* `null` when the libc family cannot be determined
* `null` when run on a non-Linux platform
```js
const { version } = require('detect-libc');
const v = await version();
if (v) {
const [major, minor, patch] = v.split('.');
}
```
## Licence
### versionSync
Copyright 2017 Lovell Fuller
```ts
function versionSync(): string | null;
```
Synchronous version of `version()`.
```js
const { versionSync } = require('detect-libc');
const v = versionSync();
if (v) {
const [major, minor, patch] = v.split('.');
}
```
### isNonGlibcLinux
```ts
function isNonGlibcLinux(): Promise<boolean>;
```
Resolves asychronously with:
* `false` when the libc family is `glibc`
* `true` when the libc family is not `glibc`
* `false` when run on a non-Linux platform
```js
const { isNonGlibcLinux } = require('detect-libc');
if (await isNonGlibcLinux()) { ... }
```
### isNonGlibcLinuxSync
```ts
function isNonGlibcLinuxSync(): boolean;
```
Synchronous version of `isNonGlibcLinux()`.
```js
const { isNonGlibcLinuxSync } = require('detect-libc');
if (isNonGlibcLinuxSync()) { ... }
```
## Licensing
Copyright 2017, 2022 Lovell Fuller
Licensed under the Apache License, Version 2.0 (the "License");

@@ -72,0 +154,0 @@ you may not use this file except in compliance with the License.

@@ -13,6 +13,6 @@ import {IncomingMessage} from 'http';

declare function mimicResponse<T extends NodeJS.ReadableStream>(
fromStream: IncomingMessage,
fromStream: IncomingMessage, // eslint-disable-line @typescript-eslint/prefer-readonly-parameter-types
toStream: T,
): IncomingMessage & T;
): T & IncomingMessage;
export = mimicResponse;

@@ -9,3 +9,2 @@ 'use strict';

'complete',
'destroy',
'headers',

@@ -27,4 +26,10 @@ 'httpVersion',

module.exports = (fromStream, toStream) => {
if (toStream._readableState.autoDestroy) {
throw new Error('The second stream must have the `autoDestroy` option set to `false`');
}
const fromProperties = new Set(Object.keys(fromStream).concat(knownProperties));
const properties = {};
for (const property of fromProperties) {

@@ -36,6 +41,40 @@ // Don't overwrite existing properties.

toStream[property] = typeof fromStream[property] === 'function' ? fromStream[property].bind(fromStream) : fromStream[property];
properties[property] = {
get() {
const value = fromStream[property];
const isFunction = typeof value === 'function';
return isFunction ? value.bind(fromStream) : value;
},
set(value) {
fromStream[property] = value;
},
enumerable: true,
configurable: false
};
}
Object.defineProperties(toStream, properties);
fromStream.once('aborted', () => {
toStream.destroy();
toStream.emit('aborted');
});
fromStream.once('close', () => {
if (fromStream.complete) {
if (toStream.readable) {
toStream.once('end', () => {
toStream.emit('close');
});
} else {
toStream.emit('close');
}
} else {
toStream.emit('close');
}
});
return toStream;
};
{
"_args": [
[
"mimic-response@2.1.0",
"mimic-response@3.1.0",
"/home/runner/work/node-datachannel/node-datachannel"
]
],
"_from": "mimic-response@2.1.0",
"_id": "mimic-response@2.1.0",
"_from": "mimic-response@3.1.0",
"_id": "mimic-response@3.1.0",
"_inBundle": false,
"_integrity": "sha512-wXqjST+SLt7R009ySCglWBCFpjUygmCIfD790/kVbiGmUgfYGuB14PiTd5DwVxSV4NcYHjzMkoj5LjQZwTQLEA==",
"_integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==",
"_location": "/mimic-response",

@@ -17,8 +17,8 @@ "_phantomChildren": {},

"registry": true,
"raw": "mimic-response@2.1.0",
"raw": "mimic-response@3.1.0",
"name": "mimic-response",
"escapedName": "mimic-response",
"rawSpec": "2.1.0",
"rawSpec": "3.1.0",
"saveSpec": null,
"fetchSpec": "2.1.0"
"fetchSpec": "3.1.0"
},

@@ -28,4 +28,4 @@ "_requiredBy": [

],
"_resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-2.1.0.tgz",
"_spec": "2.1.0",
"_resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz",
"_spec": "3.1.0",
"_where": "/home/runner/work/node-datachannel/node-datachannel",

@@ -42,12 +42,12 @@ "author": {

"devDependencies": {
"@sindresorhus/tsconfig": "^0.3.0",
"@types/node": "^12.0.0",
"ava": "^1.1.0",
"@types/node": "^14.0.1",
"ava": "^2.4.0",
"create-test-server": "^2.4.0",
"pify": "^4.0.1",
"tsd": "^0.7.3",
"xo": "^0.24.0"
"p-event": "^4.1.0",
"pify": "^5.0.0",
"tsd": "^0.11.0",
"xo": "^0.30.0"
},
"engines": {
"node": ">=8"
"node": ">=10"
},

@@ -79,3 +79,3 @@ "files": [

},
"version": "2.1.0"
"version": "3.1.0"
}

@@ -1,2 +0,2 @@

# mimic-response [![Build Status](https://travis-ci.org/sindresorhus/mimic-response.svg?branch=master)](https://travis-ci.org/sindresorhus/mimic-response)
# mimic-response [![Build Status](https://travis-ci.com/sindresorhus/mimic-response.svg?branch=master)](https://travis-ci.com/sindresorhus/mimic-response)

@@ -30,2 +30,23 @@ > Mimic a [Node.js HTTP response stream](https://nodejs.org/api/http.html#http_class_http_incomingmessage)

**Note #1:** The `from.destroy(error)` function is not proxied. You have to call it manually:
```js
const stream = require('stream');
const mimicResponse = require('mimic-response');
const responseStream = getHttpResponseStream();
const myStream = new stream.PassThrough({
destroy(error, callback) {
responseStream.destroy();
callback(error);
}
});
myStream.destroy();
```
Please note that `myStream` and `responseStream` never throws. The error is passed to the request instead.
#### from

@@ -32,0 +53,0 @@

@@ -54,2 +54,9 @@ [

{
"runtime": "node",
"target": "17.0.0",
"lts": false,
"future": false,
"abi": "102"
},
{
"abi": "70",

@@ -129,3 +136,3 @@ "future": false,

"runtime": "electron",
"target": "14.0.0-beta.1"
"target": "13.0.0-beta.2"
},

@@ -137,11 +144,46 @@ {

"runtime": "electron",
"target": "13.0.0-beta.2"
"target": "15.0.0-alpha.1"
},
{
"abi": "89",
"future": false,
"lts": false,
"runtime": "electron",
"target": "14.0.0-beta.1"
},
{
"abi": "97",
"future": false,
"lts": false,
"runtime": "electron",
"target": "14.0.2"
},
{
"abi": "98",
"future": false,
"lts": false,
"runtime": "electron",
"target": "15.0.0-beta.7"
},
{
"abi": "99",
"future": false,
"lts": false,
"runtime": "electron",
"target": "16.0.0-alpha.1"
},
{
"abi": "101",
"future": false,
"lts": false,
"runtime": "electron",
"target": "17.0.0-alpha.1"
},
{
"abi": "103",
"future": true,
"lts": false,
"runtime": "electron",
"target": "15.0.0-alpha.1"
"target": "18.0.0-alpha.1"
}
]

@@ -27,2 +27,3 @@ var semver = require('semver')

var abi
var lastTarget

@@ -32,4 +33,6 @@ for (var i = 0; i < allTargets.length; i++) {

if (t.runtime !== runtime) continue
if (semver.lte(t.target, target)) abi = t.abi
else break
if (semver.lte(t.target, target) && (!lastTarget || semver.gte(t.target, lastTarget))) {
abi = t.abi
lastTarget = t.target
}
}

@@ -36,0 +39,0 @@

{
"_args": [
[
"node-abi@2.30.1",
"node-abi@3.8.0",
"/home/runner/work/node-datachannel/node-datachannel"
]
],
"_from": "node-abi@2.30.1",
"_id": "node-abi@2.30.1",
"_from": "node-abi@3.8.0",
"_id": "node-abi@3.8.0",
"_inBundle": false,
"_integrity": "sha512-/2D0wOQPgaUWzVSVgRMx+trKJRC2UG4SUc4oCJoXx9Uxjtp0Vy3/kt7zcbxHF8+Z/pK3UloLWzBISg72brfy1w==",
"_integrity": "sha512-tzua9qWWi7iW4I42vUPKM+SfaF0vQSLAm4yO5J83mSwB7GeoWrDKC/K+8YCnYNwqP5duwazbw2X9l4m8SC2cUw==",
"_location": "/node-abi",
"_phantomChildren": {},
"_phantomChildren": {
"lru-cache": "6.0.0"
},
"_requested": {
"type": "version",
"registry": true,
"raw": "node-abi@2.30.1",
"raw": "node-abi@3.8.0",
"name": "node-abi",
"escapedName": "node-abi",
"rawSpec": "2.30.1",
"rawSpec": "3.8.0",
"saveSpec": null,
"fetchSpec": "2.30.1"
"fetchSpec": "3.8.0"
},

@@ -27,4 +29,4 @@ "_requiredBy": [

],
"_resolved": "https://registry.npmjs.org/node-abi/-/node-abi-2.30.1.tgz",
"_spec": "2.30.1",
"_resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.8.0.tgz",
"_spec": "3.8.0",
"_where": "/home/runner/work/node-datachannel/node-datachannel",

@@ -38,11 +40,13 @@ "author": {

"dependencies": {
"semver": "^5.4.1"
"semver": "^7.3.5"
},
"description": "Get the Node ABI for a given target and runtime, and vice versa.",
"devDependencies": {
"got": "^10.6.0",
"semantic-release": "^15.8.0",
"tape": "^4.6.3",
"travis-deploy-once": "^5.0.1"
"@continuous-auth/semantic-release-npm": "^2.0.0",
"got": "^11.8.2",
"tape": "^5.3.1"
},
"engines": {
"node": ">=10"
},
"homepage": "https://github.com/lgeiger/node-abi#readme",

@@ -66,6 +70,5 @@ "keywords": [

"test": "tape test/index.js",
"travis-deploy-once": "travis-deploy-once",
"update-abi-registry": "node --unhandled-rejections=strict scripts/update-abi-registry.js"
},
"version": "2.30.1"
"version": "3.8.0"
}

@@ -44,2 +44,3 @@ var test = require('tape')

t.equal(getTarget('82', 'electron'), '10.0.0')
t.equal(getTarget('89', 'electron'), '13.0.0')
t.end()

@@ -103,3 +104,8 @@ })

t.throws(function () { getAbi(getNextTarget('electron'), 'electron') })
t.equal(getAbi('10.0.0-beta.1', 'electron'), '82')
t.equal(getAbi('15.0.0-beta.1', 'electron'), '89')
t.equal(getAbi('14.1.1', 'electron'), '97')
t.equal(getAbi('14.0.0', 'electron'), '89')
t.equal(getAbi('13.0.0', 'electron'), '89')
t.equal(getAbi('12.0.0', 'electron'), '87')
t.equal(getAbi('11.0.0', 'electron'), '85')
t.equal(getAbi('10.0.0', 'electron'), '82')

@@ -163,15 +169,1 @@ t.equal(getAbi('9.0.0', 'electron'), '80')

})
test('allTargets are sorted', function (t) {
var electron = allTargets.filter(function (t) { return t.runtime === 'electron' })
var node = allTargets.filter(function (t) { return t.runtime === 'node' })
var nodeWebkit = allTargets.filter(function (t) { return t.runtime === 'node-webkit' })
function sort (t1, t2) {
return semver.compare(t1.target, t2.target)
}
t.deepEqual(electron, electron.slice().sort(sort), 'electron targets are sorted')
t.deepEqual(node, node.slice().sort(sort), 'node targets are sorted')
t.deepEqual(nodeWebkit, nodeWebkit.slice().sort(sort), 'node-webkit targets are sorted')
t.end()
})

@@ -1,12 +0,12 @@

var get = require('simple-get')
var util = require('./util')
var proxy = require('./proxy')
const get = require('simple-get')
const util = require('./util')
const proxy = require('./proxy')
function findAssetId (opts, cb) {
var downloadUrl = util.getDownloadUrl(opts)
var apiUrl = util.getApiUrl(opts)
var log = opts.log || util.noopLogger
const downloadUrl = util.getDownloadUrl(opts)
const apiUrl = util.getApiUrl(opts)
const log = opts.log || util.noopLogger
log.http('request', 'GET ' + apiUrl)
var reqOpts = proxy({
const reqOpts = proxy({
url: apiUrl,

@@ -20,3 +20,3 @@ json: true,

var req = get.concat(reqOpts, function (err, res, data) {
const req = get.concat(reqOpts, function (err, res, data) {
if (err) return cb(err)

@@ -27,5 +27,5 @@ log.http(res.statusCode, apiUrl)

// Find asset id in release
for (var release of data) {
for (const release of data) {
if (release.tag_name === opts['tag-prefix'] + opts.pkg.version) {
for (var asset of release.assets) {
for (const asset of release.assets) {
if (asset.browser_download_url === downloadUrl) {

@@ -32,0 +32,0 @@ return cb(null, asset.id)

#!/usr/bin/env node
var path = require('path')
var fs = require('fs')
var napi = require('napi-build-utils')
const path = require('path')
const fs = require('fs')
const napi = require('napi-build-utils')
var pkg = require(path.resolve('package.json'))
var rc = require('./rc')(pkg)
var log = require('./log')(rc, process.env)
var download = require('./download')
var asset = require('./asset')
var util = require('./util')
const pkg = require(path.resolve('package.json'))
const rc = require('./rc')(pkg)
const log = require('./log')(rc, process.env)
const download = require('./download')
const asset = require('./asset')
const util = require('./util')
var prebuildClientVersion = require('./package.json').version
const prebuildClientVersion = require('./package.json').version
if (rc.version) {

@@ -40,7 +40,7 @@ console.log(prebuildClientVersion)

var opts = Object.assign({}, rc, { pkg: pkg, log: log })
const opts = Object.assign({}, rc, { pkg: pkg, log: log })
if (napi.isNapiRuntime(rc.runtime)) napi.logUnsupportedVersion(rc.target, log)
var origin = util.packageOrigin(process.env, pkg)
const origin = util.packageOrigin(process.env, pkg)

@@ -58,3 +58,3 @@ if (opts.force) {

var startDownload = function (downloadUrl) {
const startDownload = function (downloadUrl) {
download(downloadUrl, opts, function (err) {

@@ -61,0 +61,0 @@ if (err) {

# Changelog
## [7.0.1] - 2022-01-28
### Changed
- Upgrade to the latest version of `detect-libc` ([#166](https://github.com/prebuild/prebuild-install/issues/166)) ([`f71c6b9`](https://github.com/prebuild/prebuild-install/commit/f71c6b9)) (Lovell Fuller).
## [7.0.0] - 2021-11-12
### Changed
- **Breaking:** bump `node-abi` so that Electron 14+ gets correct ABI ([#161](https://github.com/prebuild/prebuild-install/issues/161)) ([`477f347`](https://github.com/prebuild/prebuild-install/commit/477f347)) (csett86). Drops support of Node.js < 10.
- Bump `simple-get` ([`7468c14`](https://github.com/prebuild/prebuild-install/commit/7468c14)) (Vincent Weevers).
## [6.1.4] - 2021-08-11

@@ -67,16 +80,20 @@

[6.1.4]: https://github.com/prebuild/prebuild-install/compare/v6.1.3...v6.1.4
[7.0.1]: https://github.com/prebuild/prebuild-install/releases/tag/v7.0.1
[6.1.3]: https://github.com/prebuild/prebuild-install/compare/v6.1.2...v6.1.3
[7.0.0]: https://github.com/prebuild/prebuild-install/releases/tag/v7.0.0
[6.1.2]: https://github.com/prebuild/prebuild-install/compare/v6.1.1...v6.1.2
[6.1.4]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.4
[6.1.1]: https://github.com/prebuild/prebuild-install/compare/v6.1.0...v6.1.1
[6.1.3]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.3
[6.1.0]: https://github.com/prebuild/prebuild-install/compare/v6.0.1...v6.1.0
[6.1.2]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.2
[6.0.1]: https://github.com/prebuild/prebuild-install/compare/v6.0.0...v6.0.1
[6.1.1]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.1
[6.0.0]: https://github.com/prebuild/prebuild-install/compare/v5.3.6...v6.0.0
[6.1.0]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.0
[6.0.1]: https://github.com/prebuild/prebuild-install/releases/tag/v6.0.1
[6.0.0]: https://github.com/prebuild/prebuild-install/releases/tag/v6.0.0
[5.3.6]: https://github.com/prebuild/prebuild-install/releases/tag/v5.3.6

@@ -1,17 +0,17 @@

var path = require('path')
var fs = require('fs')
var get = require('simple-get')
var pump = require('pump')
var tfs = require('tar-fs')
var zlib = require('zlib')
var util = require('./util')
var error = require('./error')
var proxy = require('./proxy')
var mkdirp = require('mkdirp-classic')
const path = require('path')
const fs = require('fs')
const get = require('simple-get')
const pump = require('pump')
const tfs = require('tar-fs')
const zlib = require('zlib')
const util = require('./util')
const error = require('./error')
const proxy = require('./proxy')
const mkdirp = require('mkdirp-classic')
function downloadPrebuild (downloadUrl, opts, cb) {
var cachedPrebuild = util.cachedPrebuild(downloadUrl)
var localPrebuild = util.localPrebuild(downloadUrl, opts)
var tempFile = util.tempFile(cachedPrebuild)
var log = opts.log || util.noopLogger
let cachedPrebuild = util.cachedPrebuild(downloadUrl)
const localPrebuild = util.localPrebuild(downloadUrl, opts)
const tempFile = util.tempFile(cachedPrebuild)
const log = opts.log || util.noopLogger

@@ -43,3 +43,3 @@ if (opts.nolocal) return download()

log.http('request', 'GET ' + downloadUrl)
var reqOpts = proxy({ url: downloadUrl }, opts)
const reqOpts = proxy({ url: downloadUrl }, opts)

@@ -54,3 +54,3 @@ if (opts.token) {

var req = get(reqOpts, function (err, res) {
const req = get(reqOpts, function (err, res) {
if (err) return onerror(err)

@@ -86,5 +86,5 @@ log.http(res.statusCode, downloadUrl)

function unpack () {
var binaryName
let binaryName
var updateName = opts.updateName || function (entry) {
const updateName = opts.updateName || function (entry) {
if (/\.node$/i.test(entry.name)) binaryName = entry.name

@@ -95,3 +95,3 @@ }

var options = {
const options = {
readable: true,

@@ -101,3 +101,3 @@ writable: true,

}
var extract = tfs.extract(opts.path, options).on('entry', updateName)
const extract = tfs.extract(opts.path, options).on('entry', updateName)

@@ -108,3 +108,3 @@ pump(fs.createReadStream(cachedPrebuild), zlib.createGunzip(), extract,

var resolved
let resolved
if (binaryName) {

@@ -133,3 +133,3 @@ try {

function ensureNpmCacheDir (cb) {
var cacheFolder = util.npmCache()
const cacheFolder = util.npmCache()
fs.access(cacheFolder, fs.R_OK | fs.W_OK, function (err) {

@@ -136,0 +136,0 @@ if (err && err.code === 'ENOENT') {

@@ -1,4 +0,4 @@

var log = require('npmlog')
var fs = require('fs')
var path = require('path')
const log = require('npmlog')
const fs = require('fs')
const path = require('path')

@@ -16,3 +16,3 @@ module.exports = function (rc, env) {

if (process.env.npm_config_prebuild_install_logfile) {
var fp = path.resolve(process.env.npm_config_prebuild_install_logfile)
const fp = path.resolve(process.env.npm_config_prebuild_install_logfile)

@@ -19,0 +19,0 @@ log.on('log', function (msg) {

{
"_args": [
[
"prebuild-install@6.1.4",
"prebuild-install@7.0.1",
"/home/runner/work/node-datachannel/node-datachannel"
]
],
"_from": "prebuild-install@6.1.4",
"_id": "prebuild-install@6.1.4",
"_from": "prebuild-install@7.0.1",
"_id": "prebuild-install@7.0.1",
"_inBundle": false,
"_integrity": "sha512-Z4vpywnK1lBg+zdPCVCsKq0xO66eEV9rWo2zrROGGiRS4JtueBOdlB1FnY8lcy7JsUud/Q3ijUxyWN26Ika0vQ==",
"_integrity": "sha512-QBSab31WqkyxpnMWQxubYAHR5S9B2+r81ucocew34Fkl98FhvKIF50jIJnNOBmAZfyNV7vE5T6gd3hTVWgY6tg==",
"_location": "/prebuild-install",

@@ -17,8 +17,8 @@ "_phantomChildren": {},

"registry": true,
"raw": "prebuild-install@6.1.4",
"raw": "prebuild-install@7.0.1",
"name": "prebuild-install",
"escapedName": "prebuild-install",
"rawSpec": "6.1.4",
"rawSpec": "7.0.1",
"saveSpec": null,
"fetchSpec": "6.1.4"
"fetchSpec": "7.0.1"
},

@@ -28,4 +28,4 @@ "_requiredBy": [

],
"_resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-6.1.4.tgz",
"_spec": "6.1.4",
"_resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.0.1.tgz",
"_spec": "7.0.1",
"_where": "/home/runner/work/node-datachannel/node-datachannel",

@@ -80,3 +80,3 @@ "author": {

"dependencies": {
"detect-libc": "^1.0.3",
"detect-libc": "^2.0.0",
"expand-template": "^2.0.3",

@@ -87,7 +87,7 @@ "github-from-package": "0.0.0",

"napi-build-utils": "^1.0.1",
"node-abi": "^2.21.0",
"node-abi": "^3.3.0",
"npmlog": "^4.0.1",
"pump": "^3.0.0",
"rc": "^1.2.7",
"simple-get": "^3.0.3",
"simple-get": "^4.0.0",
"tar-fs": "^2.0.0",

@@ -99,11 +99,11 @@ "tunnel-agent": "^0.6.0"

"a-native-module": "^1.0.0",
"hallmark": "^3.0.0",
"hallmark": "^4.0.0",
"nock": "^10.0.6",
"rimraf": "^2.5.2",
"standard": "^13.0.2",
"tape": "^4.5.1",
"standard": "^16.0.4",
"tape": "^5.3.1",
"tempy": "0.2.1"
},
"engines": {
"node": ">=6"
"node": ">=10"
},

@@ -131,6 +131,5 @@ "homepage": "https://github.com/prebuild/prebuild-install",

"hallmark": "hallmark --fix",
"lint": "standard && hallmark",
"test": "tape test/*-test.js && npm run lint"
"test": "standard && hallmark && tape test/*-test.js"
},
"version": "6.1.4"
"version": "7.0.1"
}

@@ -1,18 +0,18 @@

var url = require('url')
var tunnel = require('tunnel-agent')
var util = require('./util')
const url = require('url')
const tunnel = require('tunnel-agent')
const util = require('./util')
function applyProxy (reqOpts, opts) {
var log = opts.log || util.noopLogger
const log = opts.log || util.noopLogger
var proxy = opts['https-proxy'] || opts.proxy
const proxy = opts['https-proxy'] || opts.proxy
if (proxy) {
// eslint-disable-next-line node/no-deprecated-api
var parsedDownloadUrl = url.parse(reqOpts.url)
const parsedDownloadUrl = url.parse(reqOpts.url)
// eslint-disable-next-line node/no-deprecated-api
var parsedProxy = url.parse(proxy)
var uriProtocol = (parsedDownloadUrl.protocol === 'https:' ? 'https' : 'http')
var proxyProtocol = (parsedProxy.protocol === 'https:' ? 'Https' : 'Http')
var tunnelFnName = [uriProtocol, proxyProtocol].join('Over')
const parsedProxy = url.parse(proxy)
const uriProtocol = (parsedDownloadUrl.protocol === 'https:' ? 'https' : 'http')
const proxyProtocol = (parsedProxy.protocol === 'https:' ? 'Https' : 'Http')
const tunnelFnName = [uriProtocol, proxyProtocol].join('Over')
reqOpts.agent = tunnel[tunnelFnName]({

@@ -19,0 +19,0 @@ proxy: {

@@ -1,16 +0,16 @@

var path = require('path')
var minimist = require('minimist')
var getAbi = require('node-abi').getAbi
var detectLibc = require('detect-libc')
var napi = require('napi-build-utils')
const path = require('path')
const minimist = require('minimist')
const getAbi = require('node-abi').getAbi
const detectLibc = require('detect-libc')
const napi = require('napi-build-utils')
var env = process.env
var libc = env.LIBC || (detectLibc.isNonGlibcLinux && detectLibc.family) || ''
const env = process.env
const libc = env.LIBC || (detectLibc.isNonGlibcLinuxSync() && detectLibc.familySync()) || ''
// Get the configuration
module.exports = function (pkg) {
var pkgConf = pkg.config || {}
var buildFromSource = env.npm_config_build_from_source
const pkgConf = pkg.config || {}
const buildFromSource = env.npm_config_build_from_source
var rc = require('rc')('prebuild-install', {
const rc = require('rc')('prebuild-install', {
target: pkgConf.target || env.npm_config_target || process.versions.node,

@@ -26,4 +26,4 @@ runtime: pkgConf.runtime || env.npm_config_runtime || 'node',

path: '.',
proxy: env.npm_config_proxy || env['http_proxy'] || env['HTTP_PROXY'],
'https-proxy': env.npm_config_https_proxy || env['https_proxy'] || env['HTTPS_PROXY'],
proxy: env.npm_config_proxy || env.http_proxy || env.HTTP_PROXY,
'https-proxy': env.npm_config_https_proxy || env.https_proxy || env.HTTPS_PROXY,
'local-address': env.npm_config_local_address,

@@ -30,0 +30,0 @@ 'local-prebuilds': 'prebuilds',

@@ -8,5 +8,5 @@ # prebuild-install

![Node version](https://img.shields.io/node/v/prebuild-install.svg)
[![Test](https://github.com/prebuild/prebuild-install/actions/workflows/test.yml/badge.svg)](https://github.com/prebuild/prebuild-install/actions/workflows/test.yml)
[![david](https://david-dm.org/prebuild/prebuild-install.svg)](https://david-dm.org/prebuild/prebuild-install)
[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg)](http://standardjs.com/)
[![Test](https://img.shields.io/github/workflow/status/prebuild/prebuild-install/Test?label=test)](https://github.com/prebuild/prebuild-install/actions/workflows/test.yml)
[![Standard](https://img.shields.io/badge/standard-informational?logo=javascript\&logoColor=fff)](https://standardjs.com)
[![Common Changelog](https://common-changelog.org/badge.svg)](https://common-changelog.org)

@@ -44,2 +44,14 @@ ## Note

When a consumer then installs your package with npm thus triggering the above install script, `prebuild-install` will download a suitable prebuilt binary, or exit with a non-zero exit code if there is none, which triggers `node-gyp rebuild` in order to build from source.
Options (see below) can be passed to `prebuild-install` like so:
```json
{
"scripts": {
"install": "prebuild-install -r napi || node-gyp rebuild"
}
}
```
### Help

@@ -139,4 +151,12 @@

## Install
With [npm](https://npmjs.org) do:
```
npm install prebuild-install
```
## License
MIT
[MIT](./LICENSE)

@@ -1,9 +0,9 @@

var path = require('path')
var github = require('github-from-package')
var home = require('os').homedir
var crypto = require('crypto')
var expandTemplate = require('expand-template')()
const path = require('path')
const github = require('github-from-package')
const home = require('os').homedir
const crypto = require('crypto')
const expandTemplate = require('expand-template')()
function getDownloadUrl (opts) {
var pkgName = opts.pkg.name.replace(/^@[a-zA-Z0-9_\-.~]+\//, '')
const pkgName = opts.pkg.name.replace(/^@[a-zA-Z0-9_\-.~]+\//, '')
return expandTemplate(urlTemplate(opts), {

@@ -43,4 +43,4 @@ name: pkgName,

var packageName = '{name}-v{version}-{runtime}-v{abi}-{platform}{libc}-{arch}.tar.gz'
var hostMirrorUrl = getHostMirrorUrl(opts)
const packageName = '{name}-v{version}-{runtime}-v{abi}-{platform}{libc}-{arch}.tar.gz'
const hostMirrorUrl = getHostMirrorUrl(opts)

@@ -69,3 +69,3 @@ if (hostMirrorUrl) {

function getHostMirrorUrl (opts) {
var propName = getEnvPrefix(opts.pkg.name) + '_binary_host'
const propName = getEnvPrefix(opts.pkg.name) + '_binary_host'
return process.env[propName] || process.env[propName + '_mirror']

@@ -79,3 +79,3 @@ }

function cachedPrebuild (url) {
var digest = crypto.createHash('md5').update(url).digest('hex').slice(0, 6)
const digest = crypto.createHash('md5').update(url).digest('hex').slice(0, 6)
return path.join(prebuildCache(), digest + '-' + path.basename(url).replace(/[^a-zA-Z0-9.]+/g, '-'))

@@ -85,3 +85,3 @@ }

function npmCache () {
var env = process.env
const env = process.env
return env.npm_config_cache || (env.APPDATA ? path.join(env.APPDATA, 'npm-cache') : path.join(home(), '.npm'))

@@ -117,8 +117,8 @@ }

function localPrebuild (url, opts) {
var propName = getEnvPrefix(opts.pkg.name) + '_local_prebuilds'
var prefix = process.env[propName] || opts['local-prebuilds'] || 'prebuilds'
const propName = getEnvPrefix(opts.pkg.name) + '_local_prebuilds'
const prefix = process.env[propName] || opts['local-prebuilds'] || 'prebuilds'
return path.join(prefix, path.basename(url))
}
var noopLogger = {
const noopLogger = {
http: function () {},

@@ -125,0 +125,0 @@ silly: function () {},

@@ -0,1 +1,2 @@

/*! simple-get. MIT License. Feross Aboukhadijeh <https://feross.org/opensource> */
module.exports = simpleGet

@@ -46,2 +47,3 @@

const originalHost = opts.hostname // hostname before potential redirect
const protocol = opts.protocol === 'https:' ? https : http // Support http/https urls

@@ -54,2 +56,9 @@ const req = protocol.request(opts, res => {

const redirectHost = url.parse(opts.url).hostname // eslint-disable-line node/no-deprecated-api
// If redirected host is different than original host, drop headers to prevent cookie leak (#73)
if (redirectHost !== null && redirectHost !== originalHost) {
delete opts.headers.cookie
delete opts.headers.authorization
}
if (opts.method === 'POST' && [301, 302].includes(res.statusCode)) {

@@ -56,0 +65,0 @@ opts.method = 'GET' // On 301/302 redirect, change POST to GET (see #35)

{
"_args": [
[
"simple-get@3.1.0",
"simple-get@4.0.1",
"/home/runner/work/node-datachannel/node-datachannel"
]
],
"_from": "simple-get@3.1.0",
"_id": "simple-get@3.1.0",
"_from": "simple-get@4.0.1",
"_id": "simple-get@4.0.1",
"_inBundle": false,
"_integrity": "sha512-bCR6cP+aTdScaQCnQKbPKtJOKDp/hj9EDLJo3Nw4y1QksqaovlW/bnptB6/c1e+qmNIDHRK+oXFDdEqBT8WzUA==",
"_integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==",
"_location": "/simple-get",

@@ -17,8 +17,8 @@ "_phantomChildren": {},

"registry": true,
"raw": "simple-get@3.1.0",
"raw": "simple-get@4.0.1",
"name": "simple-get",
"escapedName": "simple-get",
"rawSpec": "3.1.0",
"rawSpec": "4.0.1",
"saveSpec": null,
"fetchSpec": "3.1.0"
"fetchSpec": "4.0.1"
},

@@ -28,4 +28,4 @@ "_requiredBy": [

],
"_resolved": "https://registry.npmjs.org/simple-get/-/simple-get-3.1.0.tgz",
"_spec": "3.1.0",
"_resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz",
"_spec": "4.0.1",
"_where": "/home/runner/work/node-datachannel/node-datachannel",

@@ -35,3 +35,3 @@ "author": {

"email": "feross@feross.org",
"url": "http://feross.org/"
"url": "https://feross.org"
},

@@ -45,3 +45,3 @@ "browser": {

"dependencies": {
"decompress-response": "^4.2.0",
"decompress-response": "^6.0.0",
"once": "^1.3.1",

@@ -55,4 +55,18 @@ "simple-concat": "^1.0.0"

"string-to-stream": "^3.0.0",
"tape": "^4.0.0"
"tape": "^5.0.0"
},
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"homepage": "https://github.com/feross/simple-get",

@@ -85,3 +99,3 @@ "keywords": [

},
"version": "3.1.0"
"version": "4.0.1"
}

@@ -1,5 +0,5 @@

# simple-get [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url]
# simple-get [![ci][ci-image]][ci-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url]
[travis-image]: https://img.shields.io/travis/feross/simple-get/master.svg
[travis-url]: https://travis-ci.org/feross/simple-get
[ci-image]: https://img.shields.io/github/workflow/status/feross/simple-get/ci/master
[ci-url]: https://github.com/feross/simple-get/actions
[npm-image]: https://img.shields.io/npm/v/simple-get.svg

@@ -257,2 +257,16 @@ [npm-url]: https://npmjs.org/package/simple-get

### Basic Auth
```js
const user = 'someuser'
const pass = 'pa$$word'
const encodedAuth = Buffer.from(`${user}:${pass}`).toString('base64')
get('http://example.com', {
headers: {
authorization: `Basic ${encodedAuth}`
}
})
```
### OAuth

@@ -259,0 +273,0 @@

{
"name": "node-datachannel",
"version": "0.2.3",
"version": "0.2.4",
"description": "libdatachannel node bindings",

@@ -49,8 +49,8 @@ "main": "lib/index.js",

"devDependencies": {
"cmake-js": "^6.2.1",
"jest": "^27.2.5",
"cmake-js": "^6.3.0",
"jest": "^27.5.1",
"nan": "^2.15.0",
"napi-thread-safe-callback-cancellable": "^0.0.7",
"node-addon-api": "^4.2.0",
"prebuild": "^11.0.0"
"prebuild": "^11.0.3"
},

@@ -61,4 +61,5 @@ "bundledDependencies": [

"dependencies": {
"prebuild-install": "^6.1.4"
"@types/node": "^17.0.21",
"prebuild-install": "^7.0.1"
}
}

@@ -148,2 +148,47 @@ const nodeDataChannel = require('../lib/index');

});
});
function waitForGathering(peer) {
return new Promise((resolve) => {
peer.onGatheringStateChange((state) => {
if (state === 'complete') resolve();
});
// Handle race conditions where gathering has already completed
if (peer.gatheringState() === 'complete') resolve();
});
}
describe('DataChannel streams', () => {
test('can build an echo pipeline', async () => {
let clientPeer = new nodeDataChannel.PeerConnection("Client", { iceServers: [] });
let echoPeer = new nodeDataChannel.PeerConnection("Client", { iceServers: [] });
const echoStream = new nodeDataChannel.DataChannelStream(
echoPeer.createDataChannel("echo-channel")
);
echoStream.pipe(echoStream); // Echo all received data back to the client
await waitForGathering(echoPeer);
const { sdp: echoDescSdp, type: echoDescType } = echoPeer.localDescription();
clientPeer.setRemoteDescription(echoDescSdp, echoDescType);
await waitForGathering(clientPeer);
const { sdp: clientDescSdp, type: clientDescType } = clientPeer.localDescription();
echoPeer.setRemoteDescription(clientDescSdp, clientDescType);
const clientChannel = await new Promise((resolve) => clientPeer.onDataChannel(resolve));
const clientResponsePromise = new Promise((resolve) => clientChannel.onMessage(resolve));
clientChannel.sendMessage("test message");
expect(await clientResponsePromise).toBe("test message");
clientChannel.close();
await new Promise((resolve) => echoStream.on('end', resolve));
clientPeer.close();
echoPeer.close();
});
});

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc