Socket
Socket
Sign inDemoInstall

http2

Package Overview
Dependencies
Maintainers
1
Versions
44
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

http2 - npm Package Compare versions

Comparing version 0.0.5 to 0.0.6

doc/endpoint.html

10

HISTORY.md
Version history
===============
### 0.0.6 (2013-07-19) ###
* `Connection` and `Endpoint` classes are usable, but not yet ready
* Addition of an exmaple server and client
* Using [istanbul](https://github.com/gotwarlost/istanbul) for measuring code coverage
* [Blog post](http://gabor.molnar.es/blog/2013/07/19/gsoc-week-number-5/)
* [Tarball](https://github.com/molnarg/node-http2/archive/node-http2-0.0.6.tar.gz)
### 0.0.5 (2013-07-14) ###

@@ -8,3 +16,3 @@

* Public API stubs are in place
* [Blog post](http://gabor.molnar.es/blog/2013/07/08/gsoc-week-number-4/)
* [Blog post](http://gabor.molnar.es/blog/2013/07/14/gsoc-week-number-4/)
* [Tarball](https://github.com/molnarg/node-http2/archive/node-http2-0.0.5.tar.gz)

@@ -11,0 +19,0 @@

67

lib/compressor.js

@@ -17,2 +17,3 @@ // HTTP/2 compression is implemented by two [Transform Stream][1] subclasses that operate in

var utils = require('../lib/utils');
var logging = require('./logging');
var Transform = require('stream').Transform;

@@ -41,3 +42,3 @@

function CompressionContext(table, limit) {
this._table = table ? table.slice() : [];
this._table = table.slice();
this._limit = limit || DEFAULT_HEADER_TABLE_LIMIT;

@@ -153,2 +154,10 @@ this._reference = [];

// `_isShadowed` determines the reachability of a given index in the Header Table. An entry in the
// Header Table is shadowed if there's an entry in the Working Set with the same ID.
CompressionContext.prototype._isShadowed = function(index) {
return this._working.some(function(entry) {
return entry.index === index;
});
};
// `generateAddCommand` tries to find a compact command (header representation) for the given

@@ -158,17 +167,7 @@ // `[name, value]` pair that causes the decoder to add the given pair to the Working Set.

var equal = CompressionContext.equal.bind(null, pair);
if (this.getWorkingSet().some(equal)) {
return undefined;
}
var working = this._working;
function shadowed(index) {
return working.some(function(entry) {
return entry.index === index;
});
}
var full_match = this._table.filter(equal);
if (full_match.length !== 0) {
var full_index = this._table.indexOf(full_match[0]);
if (!shadowed(full_index)) {
if (!this._isShadowed(full_index)) {
return {

@@ -188,3 +187,3 @@ name: full_index,

var name_index = this._table.indexOf(name_match[0]);
if (!shadowed(name_index)) {
if (!this._isShadowed(name_index)) {
return {

@@ -205,18 +204,19 @@ name: name_index,

// `generateRemoveCommand` generates a command (header representation) that causes the decoder to
// drop the given pair from the Working Set.
// `generateRemoveCommand` generates a command (an Indexed Header Representation) that causes the
// decoder to drop the given pair from the Working Set.
CompressionContext.prototype.generateRemoveCommand = function(pair) {
var match;
for (var i = 0; i < this._working.length; i++) {
var entry = this._working[i];
// * if the given header is in the Working Set, then the command is an Indexed Representation.
if (entry.pair === pair) {
return {
name: entry.index,
value: entry.index,
index: -1
};
if (this._working[i].pair === pair) {
match = this._working[i];
break;
}
}
// * if the given pair is not in the Working Set, it returns `undefined`
return undefined;
return {
name: match.index,
value: match.index,
index: -1
};
};

@@ -254,3 +254,3 @@

function Decompressor(type, log) {
this._log = log || utils.nolog;
this._log = (log || logging.root).child({ component: 'decompressor' });

@@ -270,3 +270,3 @@ var initial_table = (type === 'REQUEST') ? CompressionContext.initialRequestTable

Decompressor.prototype.decompress = function decompress(buffer) {
this._log.trace({ buffer: buffer }, 'Starting header decompression');
this._log.trace({ data: buffer }, 'Starting header decompression');

@@ -317,3 +317,3 @@ // * First, upon starting the decoding of a new set of headers, the reference set of headers is

function Compressor(type, log) {
this._log = log || utils.nolog;
this._log = (log || logging.root).child({ component: 'compressor' });

@@ -381,3 +381,3 @@ var initial_table = (type === 'REQUEST') ? CompressionContext.initialRequestTable

this._log.trace({ buffer: buffer }, 'Header compression is done');
this._log.trace({ data: buffer }, 'Header compression is done');
return buffer;

@@ -655,3 +655,3 @@ };

} else {
flags['END_' + frame.type] = true;
flags['END_' + frame.type] = false;
flags['END_STREAM'] = false;

@@ -670,2 +670,7 @@ }

// Otherwise, the frame is forwarded without taking any action
else {
this.push(frame);
}
done();

@@ -694,3 +699,3 @@ };

if (frame.type !== this._type || frame.stream !== this._stream) {
throw new Error('A series of header frames must not be interleaved with other frames!');
this.emit('error', 'A series of header frames must not be interleaved with other frames!');
}

@@ -697,0 +702,0 @@ this._frames.push(frame);

@@ -1,33 +0,295 @@

var Duplex = require('stream').Duplex;
var utils = require('./utils');
var logging = require('./logging');
var Stream = require('./stream').Stream;
var Duplex = require('stream').Duplex;
var Serializer = require('./framer').Serializer;
var Deserializer = require('./framer').Deserializer;
var Compressor = require('./compressor').Compressor;
// Overview
// --------
// | ^ | ^
// v | v |
// +--------------+ +--------------+
// +---| stream1 |---| stream2 |---- .... ---+
// | | +----------+ | | +----------+ | |
// | | | stream1. | | | | stream2. | | |
// | +-| upstream |-+ +-| upstream |-+ |
// | +----------+ +----------+ |
// | | ^ | ^ |
// | v | v | |
// | +-----+-------------+-----+-------- .... |
// | ^ | | | |
// | | v | | |
// | +--------------+ | | |
// | | stream0 | | | |
// | | connection | | | |
// | | management | multiplexing |
// | +--------------+ flow control |
// | | ^ |
// | _read() | | _write() |
// | v | |
// | +------------+ +-----------+ |
// | |output queue| |input queue| |
// +----------------+------------+-+-----------+-----------------+
// | ^
// read() | | write()
// v |
// Connection
// ----------
exports.Connection = Connection;
// `initialRequest` and `initialResponse` are optional
function Connection(role, socket, settings, initialRequest, initialResponse) {
// The main aspects of managing the connection are:
function Connection(firstStreamId, settings, log) {
// * handling IO, particularly multiplexing/demultiplexing incoming and outgoing frames
Duplex.call(this, { objectMode: true });
this.socket = socket;
this.role = role; // 'client' or 'server'
this.next_stream_id = (this.role === 'CLIENT') ? 1 : 2;
this.serializer = new Serializer();
this.deserializer = new Deserializer();
this.compressor = new Compressor();
// * logging: every method uses the common logger object
this._log = (log || logging.root).child({ component: 'connection' });
this.serializer.pipe(this.socket).pipe(this.deserializer);
// * stream management
this._initializeStreamManagement(firstStreamId);
// * settings management
this._initializeSettingsManagement(settings);
// * lifecycle management
this._initializeLifecycleManagement();
// * flow control
this._initializeFlowControl();
}
Connection.prototype = Object.create(Duplex.prototype, { constructor: { value: Connection } });
// Stream management
// -----------------
Connection.prototype._initializeStreamManagement = function _initializeStreamManagement(firstStreamId) {
this._control = new Duplex({ objectMode: true });
this._control._write = function(frame, encoding, done) {
this.emit(frame.type, frame);
done();
};
this._control._read = utils.noop;
this._control.on('readable', this.emit.bind(this, 'stream_readable'));
this.streams = [{ upstream: this._control }];
this._next_stream_id = firstStreamId;
};
Connection.prototype._newStream = function _newStream(id) {
var stream = new Stream(this._log.child({ stream: id }));
this._log.trace({ id: id }, 'Adding new stream.');
this.streams[id] = stream;
stream.upstream.on('readable', this.emit.bind(this, 'stream_readable'));
return stream;
};
Connection.prototype.createStream = function createStream() {
var id = this.next_stream_id;
this.next_stream_id += 2;
var id = this._next_stream_id;
this._next_stream_id += 2;
return this._newStream(id);
};
Connection.prototype._read = function read() {
// Multiplexing
// ------------
Connection.prototype._read = function _read() { // TODO: prioritization
this._log.trace('Starting forwarding frames from streams.');
var more_needed = true, stream, frame;
for (var id = 0; id < this.streams.length && more_needed; id++) {
stream = this.streams[id];
if (stream) {
while (frame = stream.upstream.read()) {
frame.stream = id;
more_needed = this._send(frame);
}
}
}
if (more_needed === true) {
this._log.trace('More chunk is needed, but we could not provide more.');
this.once('stream_readable', this._read.bind(this));
}
else if (more_needed === null) {
this._log.trace('We could not send more because of insufficient flow control window.'); // TODO: push back frame
this.once('window_update', this._read.bind(this));
}
else {
this._log.trace('No more chunk needed, stopping forwarding.');
}
};
Connection.prototype._write = function write(chunk, encoding, callback) {
Connection.prototype._write = function write(frame, encoding, done) {
var stream = this.streams[frame.stream];
if (!stream) {
stream = this._newStream(frame.stream);
this.emit('incoming_stream', stream);
this._log.debug({ id: frame.stream }, 'New incoming stream.');
}
this.emit('receiving', frame);
stream.upstream.write(frame);
done();
};
// Settings management
// -------------------
Connection.prototype._initializeSettingsManagement = function _initializeSettingsManagement(settings) {
this._settings = settings;
this._log.info('Sending the first SETTINGS frame as part of the connection header.');
this._control.push({
type: 'SETTINGS',
settings: this._settings
});
this.once('receiving', function(frame) {
if (frame.stream === 0 && frame.type === 'SETTINGS') {
this._log.info('Receiving the first SETTINGS frame as part of the connection header.');
} else {
this.reset();
}
});
this._control.on('SETTINGS', this._receiveSettings.bind(this));
};
Connection.prototype._receiveSettings = function _receiveSettings(frame) {
};
// Lifecycle management
// --------------------
Connection.prototype._initializeLifecycleManagement = function _initializeLifecycleManagement() {
this._pings = {};
this._control.on('PING', this._receivePing.bind(this));
this._control.on('GOAWAY', this._receiveGoaway.bind(this));
};
Connection.prototype._generatePingId = function _generatePingId() {
do {
var id = '';
for (var i = 0; i < 16; i++) {
id += Math.floor(Math.random()*16).toString(16);
}
} while(!(id in this._pings));
return id;
};
Connection.prototype.ping = function ping(callback) {
var id = this._generatePingId();
var data = new Buffer(id, 'hex');
this._pings[id] = callback;
this._log.debug({ data: data }, 'Sending PING.')
this._control.push({
type: 'PING',
flags: {
PONG: false
},
data: new Buffer(id, 'hex')
});
};
Connection.prototype._receivePing = function _receivePing(frame) {
if (frame.flags.PONG) {
var id = frame.data.toString('hex');
if (id in this._pings) {
this._log.debug({ data: frame.data }, 'Receiving answer for a PING.');
this._pings[id]();
delete this._pings[id];
} else {
this._log.warning({ data: frame.data }, 'Unsolicited PING answer.');
}
} else {
this._log.debug({ data: frame.data }, 'Answering PING.')
this._control.push({
type: 'PING',
flags: {
PONG: true
},
data: frame.data
});
}
};
Connection.prototype.reset = function reset() {
};
Connection.prototype._receiveGoaway = function _receiveGoaway(frame) {
};
// Flow control
// ------------
Connection.prototype._initializeFlowControl = function _initializeFlowControl() {
// Turning off flow control for incoming frames (not yet supported):
this._control.push({
type: 'WINDOW_UPDATE',
flags: {
END_FLOW_CONTROL: true
},
window_size: 0
});
// Initializing flow control for outgoing frames
this._window = INITIAL_WINDOW_SIZE;
this._control.on('WINDOW_UPDATE', this._updateWindow.bind(this));
};
// When a HTTP/2.0 connection is first established, new streams are created with an initial flow
// control window size of 65535 bytes.
var INITIAL_WINDOW_SIZE = 65535;
// A SETTINGS frame can alter the initial flow control window size for all current streams. When the
// value of SETTINGS_INITIAL_WINDOW_SIZE changes, a receiver MUST adjust the size of all stream by
// calling the `setInitialWindowSize` method. The window size has to be modified by the difference
// between the new value and the old value.
Connection.prototype.setInitialWindowSize = function setInitialWindowSize(initialWindowSize) {
this._window = this._window - this._initialWindowSize + initialWindowSize;
this._initialWindowSize = initialWindowSize;
};
// Flow control can be disabled for all streams on the connection using the `disableFlowControl`
// method. This may happen when there's a SETTINGS frame received with the
// SETTINGS_FLOW_CONTROL_OPTIONS setting.
Connection.prototype.disableFlowControl = function disableFlowControl() {
this._window = Infinity;
};
// The `_updateWindow` method gets called every time there's an incoming WINDOW_UPDATE frame. It
// modifies the modifies the flow control window:
//
// * Flow control can be disabled for an individual stream by sending a WINDOW_UPDATE with the
// END_FLOW_CONTROL flag set. The payload of a WINDOW_UPDATE frame that has the END_FLOW_CONTROL
// flag set is ignored.
// * A sender that receives a WINDOW_UPDATE frame updates the corresponding window by the amount
// specified in the frame.
Connection.prototype._updateWindow = function _updateWindow(frame) {
if (frame.flags.END_FLOW_CONTROL) {
this.disableFlowControl();
} else {
this._window += frame.window_size;
}
this.emit('window_update');
};
Connection.prototype._send = function _send(frame) {
if (frame && frame.type === 'DATA') {
if (frame.data.length > this._window) {
return null;
}
this._window -= frame.data.length;
}
return this.push(frame);
};

@@ -5,3 +5,3 @@ // The framer consists of two [Transform Stream][1] subclasses that operate in [object mode][2]:

// [2]: http://nodejs.org/api/stream.html#stream_new_stream_readable_options
var utils = require('./utils');
var logging = require('./logging');

@@ -13,2 +13,3 @@ var Transform = require('stream').Transform;

// Serializer

@@ -26,3 +27,3 @@ // ----------

function Serializer(log) {
this._log = log || utils.nolog;
this._log = (log || logging.root).child({ component: 'serializer' });
Transform.call(this, { objectMode: true });

@@ -47,4 +48,6 @@ }

for (var i = 0; i < buffers.length; i++) {
this._log.trace({ data: buffers[i] }, 'Outgoing data.');
this.push(buffers[i]);
}
done();

@@ -65,3 +68,3 @@ };

function Deserializer(log) {
this._log = log || utils.nolog;
this._log = (log || logging.root).child({ component: 'deserializer' });
Transform.call(this, { objectMode: true });

@@ -91,2 +94,4 @@ this._next(COMMON_HEADER_SIZE);

this._log.trace({ data: chunk }, 'Incoming data.');
while(cursor < chunk.length) {

@@ -102,26 +107,29 @@ // The content of an incoming buffer is first copied to `_buffer`. If it can't hold the full

// the actual state.
if (this._cursor === this._buffer.length) {
if (this._waiting_for_header) {
// If it's header then the parsed data is stored in a temporary variable and then the
// deserializer waits for the specified length payload.
Deserializer.commonHeader(this._buffer, this._frame);
this._next(this._frame.length);
// If it's header then the parsed data is stored in a temporary variable and then the
// deserializer waits for the specified length payload.
if (this._cursor === this._buffer.length && this._waiting_for_header) {
Deserializer.commonHeader(this._buffer, this._frame);
this._next(this._frame.length);
}
// If it's payload then the the frame object is finalized and then gets pushed out.
// Unknown frame types are ignored.
//
// Note: If we just finished the parsing of a header and the payload length is 0, this branch
// will also run.
if (this._cursor === this._buffer.length && !this._waiting_for_header) {
if (this._frame.type) {
try {
Deserializer[this._frame.type](this._buffer, this._frame);
this._log.debug({ frame: this._frame }, 'Incoming frame');
this.push(this._frame);
} catch(error) {
this._log.error({ err: error }, 'Incoming frame parsing error');
this.emit('error', error);
}
} else {
// If it's payload then the the frame object is finalized and then gets pushed out.
// Unknown frame types are ignored.
if (this._frame.type) {
try {
Deserializer[this._frame.type](this._buffer, this._frame);
this._log.debug({ frame: this._frame }, 'Incoming frame');
this.push(this._frame);
} catch(error) {
this._log.error({ state: this, error: error }, 'Incoming frame parsing error');
this.emit('error', error);
}
} else {
this._log.warn({ frame: this._frame }, 'Unknown type incoming frame');
}
this._next(COMMON_HEADER_SIZE);
this._log.warn({ frame: this._frame }, 'Unknown type incoming frame');
}
this._next(COMMON_HEADER_SIZE);
}

@@ -185,2 +193,6 @@ }

var generic_attributes = ['length', 'type', 'flags', 'stream'];
var type_specific_attributes = {};
Serializer.commonHeader = function writeCommonHeader(frame, buffers) {

@@ -239,2 +251,9 @@ var header_buffer = new Buffer(COMMON_HEADER_SIZE);

// Every frame type is registered in the following places:
//
// * `frame_types`: a register of frame type codes (used by `commonHeader()`)
// * `frame_flags`: a register of valid flags for frame types (used by `commonHeader()`)
// * `type_specific_attributes`: a register of frame specific frame object attributes (used by
// logging code and also serves as documentation for frame objects)
// [DATA Frames](http://http2.github.io/http2-spec/#DataFrames)

@@ -258,2 +277,4 @@ // ------------------------------------------------------------

type_specific_attributes.DATA = ['data'];
Serializer.DATA = function writeData(frame, buffers) {

@@ -290,2 +311,4 @@ buffers.push(frame.data);

type_specific_attributes.HEADERS = ['priority', 'headers', 'data'];
// 0 1 2 3

@@ -330,2 +353,4 @@ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1

type_specific_attributes.PRIORITY = ['priority'];
// 0 1 2 3

@@ -360,2 +385,4 @@ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1

type_specific_attributes.RST_STREAM = ['error'];
// 0 1 2 3

@@ -392,2 +419,4 @@ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1

type_specific_attributes.SETTINGS = ['settings'];
// The payload of a SETTINGS frame consists of zero or more settings. Each setting consists of an

@@ -485,2 +514,4 @@ // 8-bit reserved field, an unsigned 24-bit setting identifier, and an unsigned 32-bit value.

type_specific_attributes.PUSH_PROMISE = ['promised_stream', 'headers', 'data'];
// 0 1 2 3

@@ -525,2 +556,4 @@ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1

type_specific_attributes.PING = ['data'];
// In addition to the frame header, PING frames MUST contain 8 additional octets of opaque data.

@@ -553,2 +586,4 @@

type_specific_attributes.GOAWAY = ['last_stream', 'error'];
// 0 1 2 3

@@ -596,2 +631,4 @@ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1

type_specific_attributes.WINDOW_UPDATE = ['window_size'];
// The payload of a WINDOW_UPDATE frame is a 32-bit value indicating the additional number of bytes

@@ -627,1 +664,31 @@ // that the sender can transmit in addition to the existing flow control window. The legal range

];
// Logging
// -------
// [Bunyan serializers](https://github.com/trentm/node-bunyan#serializers) to improve logging output
// for debug messages emitted in this component.
// * `frame` serializer: it transforms data attributes from Buffers to hex strings and filters out
// flags that are not present.
logging.serializers.frame = function(frame) {
var log_entry = {};
generic_attributes.concat(type_specific_attributes[frame.type]).forEach(function(name) {
log_entry[name] = frame[name];
});
if (frame.data instanceof Buffer) {
log_entry.data = frame.data.toString('hex');
}
log_entry.flags = Object.keys(frame.flags || {}).filter(function(name) {
return frame.flags[name] === true;
});
return log_entry;
};
// * `data` serializer: it simply transforms a buffer to a hex string.
logging.serializers.data = function(data) {
return data.toString('hex');
};

@@ -1,2 +0,3 @@

var utils = require('../lib/utils');
var utils = require('./utils');
var logging = require('./logging');

@@ -18,10 +19,10 @@ var MAX_HTTP_PAYLOAD_SIZE = 16383; // TODO: this is repeated in multiple files

// * every method uses the common logger object
this._log = log || require('./utils').nolog;
this._log = (log || logging.root).child({ component: 'stream' });
// * sending and receiving frames to/from the upstream connection
this._initializeUpstream();
// * receiving and sending stream management commands
this._initializeManagement();
// * sending and receiving frames to/from the upstream connection
this._initializeUpstream();
// * maintaining the state of the stream (idle, open, closed, etc.) and error detection

@@ -43,9 +44,10 @@ this._initializeState();

Stream.prototype._initializeManagement = function _initializeManagement() {
this.on('receiving', function(frame) {
this.upstream.on('receiving', function(frame) {
if (frame.type === 'PUSH_PROMISE') {
this.emit('promise', frame.headers);
} else if (frame.type === 'HEADERS') {
this.priority = frame.priority;
this.emit('headers', frame.headers);
}
});
}.bind(this));
this.on('error', function() {

@@ -72,2 +74,3 @@ this.push(null);

});
this.priority = priority;
};

@@ -95,3 +98,3 @@

this.upstream._queue = [];
this.upstream._read = function noop() {};
this.upstream._read = utils.noop;

@@ -187,3 +190,3 @@ // When there's an incoming frame, we let the world know this by emitting a 'receiving' event.

if (this.state !== state) {
this._log.debug({ state: state }, 'State transition');
this._log.debug({ from: this.state, to: state }, 'State transition');
this.emit('state', state);

@@ -354,4 +357,6 @@ this.state = state;

Stream.prototype._initializeFlowControl = function _initializeFlowControl() {
this._read = function noop() {};
this.window = INITIAL_WINDOW_SIZE;
this._read = utils.noop;
this.upstream.on('receiving', this._receiveData.bind(this));
this._window = INITIAL_WINDOW_SIZE;
this.upstream.on('receiving', this._updateWindow.bind(this));

@@ -369,3 +374,3 @@ };

Stream.prototype.setInitialWindowSize = function setInitialWindowSize(initialWindowSize) {
this.window = this.window - this._initialWindowSize + initialWindowSize;
this._window = this._window - this._initialWindowSize + initialWindowSize;
this._initialWindowSize = initialWindowSize;

@@ -378,3 +383,3 @@ };

Stream.prototype.disableFlowControl = function disableFlowControl() {
this.window = Infinity;
this._window = Infinity;
};

@@ -395,3 +400,3 @@

} else {
this.window += frame.window_size;
this._window += frame.window_size;
}

@@ -410,3 +415,3 @@ this.emit('window_update');

// `window` is not enough to send a chunk
while (chunks.length > 0 && chunks[0].length <= this.window) {
while (chunks.length > 0 && chunks[0].length <= this._window) {
var chunk = chunks.shift();

@@ -423,3 +428,3 @@ sent += chunk.length;

// is not counted.
this.window -= chunk.length;
this._window -= chunk.length;
}

@@ -438,1 +443,11 @@

};
Stream.prototype._receiveData = function _receiveData(frame) {
if (frame.type === 'DATA') {
this.push(frame.data);
}
if (frame.flags.END_STREAM) {
this.push(null);
}
};

@@ -34,14 +34,7 @@ // Concatenate an array of buffers into a new buffer

}
return object;
return clone;
};
// No-op dummy logger
// Placeholder no-op function
function noop() {}
exports.nolog = {
fatal: noop,
error: noop,
warn: noop,
info: noop,
debug: noop,
trace: noop
};
exports.noop = noop;
{
"name": "http2",
"version": "0.0.5",
"version": "0.0.6",
"description": "An HTTP/2 server implementation",
"main": "index.js",
"main": "lib/index.js",
"engines" : {

@@ -10,2 +10,3 @@ "node" : ">=0.10.0"

"devDependencies": {
"istanbul": "*",
"chai": "*",

@@ -17,3 +18,3 @@ "mocha": "*",

"scripts": {
"test": "mocha --reporter spec",
"test": "istanbul test _mocha -- --reporter spec",
"prepublish": "docco lib/* --output doc --layout parallel --css doc/docco.css"

@@ -20,0 +21,0 @@ },

node-http2
==========
An HTTP/2 server implementation for node.js, developed as a [Google Summer of Code project](https://google-melange.appspot.com/gsoc/project/google/gsoc2013/molnarg/5001).
An HTTP/2 server implementation for node.js, developed as a [Google Summer of Code project][1].
[1]: https://google-melange.appspot.com/gsoc/project/google/gsoc2013/molnarg/5001
Status
======
------
I post weekly status updates [on my blog](http://gabor.molnar.es/blog/categories/google-summer-of-code/). Short version: framing layer, compression and stream implementation is ready. Connection handling is next.
I post weekly status updates [on my blog][2]. Short version: an example server and client can be
run. A more node-like API, more documentation and tests are coming soon.
[2]: http://gabor.molnar.es/blog/categories/google-summer-of-code/
Installation
============
------------

@@ -20,16 +25,83 @@ Using npm:

Documentation
=============
API
---
The developer documentation is generated using [docco](http://jashkenas.github.io/docco/), and is located in the `doc` directory. API documentation is coming later. The docs are usually updated only before releasing a new version. To regenerate them manually, run `npm run-script prepublish`.
API documentation is coming later, when the public API becomes usable.
Running the tests
=================
Examples
--------
To run the tests, first install [mocha](http://visionmedia.github.io/mocha/) and [chai](http://visionmedia.github.io/mocha/) (`npm install mocha chai`) and then run `npm test`.
An example server (serving up static files from its own directory) and client are available in the
example directory.
The tests are written in BDD style, so they are a good starting point to understand the code.
Running the server:
```bash
$ node ./example/server.js
Listening on localhost:8080, serving up files from ./example
```
Downloading the server's source code from the server (the downloaded content gets pumped out to the
standard error output):
```bash
$ node ./example/client.js 'http://localhost:8080/server.js' 2>/tmp/server.js
```
Development
-----------
### Development dependencies ###
There's a few library you will need to have installed to do anything described in the following
sections. After installing node-http2, run `npm install` in its directory to install development
dependencies.
Used libraries:
* [mocha][3] for tests
* [chai][4] for assertions
* [istanbul][5] for code coverage analysis
* [docco][6] for developer documentation
* [bunyan][7] for logging
[3]: http://visionmedia.github.io/mocha/
[4]: http://chaijs.com/
[5]: https://github.com/gotwarlost/istanbul
[6]: http://jashkenas.github.io/docco/
[7]: https://github.com/trentm/node-bunyan
### Developer documentation ###
The developer documentation is located in the `doc` directory. The docs are usually updated only
before releasing a new version. To regenerate them manually, run `npm run-script prepublish`.
### Running the tests ###
It's easy, just run `npm test`. The tests are written in BDD style, so they are a good starting
point to understand the code.
To generate a code coverage report, run `npm test --coverage`. Code coverage summary as of version
0.0.6:
```
Statements : 91.18% ( 775/850 )
Branches : 84.69% ( 249/294 )
Functions : 88.03% ( 103/117 )
Lines : 91.18% ( 775/850 )
```
### Logging ###
Logging is turned off by default. To turn it on, set the `HTTP2_LOG` environment variable to
`fatal`, `error`, `warn`, `info`, `debug` or `trace` (the logging level). Log output is in JSON
format, and can be pretty printed using the [bunyan][7] command line tool.
For example, running the test client with debug level logging output:
```
HTTP2_LOG=debug node ./example/client.js 'http://localhost:8080/server.js' 2>/tmp/server.js | bunyan -o short
```
License
=======
-------

@@ -36,0 +108,0 @@ The MIT License

@@ -9,5 +9,2 @@ var expect = require('chai').expect;

var log = process.env.DEBUG ? require('bunyan').createLogger({ name: 'http2', level: 'trace' })
: undefined;
var test_integers = [{

@@ -88,2 +85,9 @@ N: 5,

buffer: new Buffer('5F0A' + '067365636F6E64', 'hex')
}, {
header: {
name: 40,
value: 'third',
index: -1
},
buffer: new Buffer('7F0A' + '057468697264', 'hex')
}];

@@ -104,3 +108,17 @@

},
buffer: concat(test_headers.slice(3).map(function(test) { return test.buffer; }))
buffer: concat(test_headers.slice(3, 7).map(function(test) { return test.buffer; }))
}, {
headers: {
':path': '/my-example/resources/script.js',
'user-agent': 'my-user-agent',
'x-my-header': ['second', 'third']
},
buffer: test_headers[7].buffer
}, {
headers: {
':status': '200',
'user-agent': 'my-user-agent',
'cookie': ['first', 'second', 'third', 'third'],
'verylong': (new Buffer(9000)).toString('hex')
}
}];

@@ -182,3 +200,3 @@

it('should return the parsed header set in { name1: value1, name2: [value2, value3], ... } format', function() {
var decompressor = new Decompressor('REQUEST', log);
var decompressor = new Decompressor('REQUEST');
var header_set = test_header_sets[0];

@@ -188,4 +206,28 @@ expect(decompressor.decompress(header_set.buffer)).to.deep.equal(header_set.headers);

expect(decompressor.decompress(header_set.buffer)).to.deep.equal(header_set.headers);
header_set = test_header_sets[2];
expect(decompressor.decompress(header_set.buffer)).to.deep.equal(header_set.headers);
});
});
describe('transform stream', function() {
it('should emit an error event if a series of header frames is interleaved with other frames', function() {
var decompressor = new Decompressor('REQUEST');
var error_occured = false;
decompressor.on('error', function() {
error_occured = true;
});
decompressor.write({
type: 'HEADERS',
flags: {
END_HEADERS: false
},
data: new Buffer(5)
});
decompressor.write({
type: 'DATA',
flags: {},
data: new Buffer(5)
});
expect(error_occured).to.be.equal(true);
});
});
});

@@ -196,6 +238,6 @@

it('should be true for any header set if the states are synchronized', function() {
var compressor = new Compressor('REQUEST', log);
var compressor = new Compressor('REQUEST');
var decompressor = new Decompressor('REQUEST');
for (var i = 0; i < 10; i++) {
var headers = test_header_sets[i%2].headers;
var headers = test_header_sets[i%4].headers;
var compressed = compressor.compress(headers);

@@ -210,3 +252,23 @@ var decompressed = decompressor.decompress(compressed);

});
describe('source.pipe(compressor).pipe(decompressor).pipe(destination)', function() {
it('should behave like source.pipe(destination) for a stream of frames', function(done) {
var compressor = new Compressor('RESPONSE');
var decompressor = new Decompressor('RESPONSE');
compressor.pipe(decompressor);
for (var i = 0; i < 10; i++) {
compressor.write({
type: i%2 ? 'HEADERS' : 'PUSH_PROMISE',
flags: {},
headers: test_header_sets[i%4].headers
});
}
setTimeout(function() {
for (var j = 0; j < 10; j++) {
expect(decompressor.read().headers).to.deep.equal(test_header_sets[j%4].headers);
}
done();
}, 10);
});
});
});
});

@@ -8,5 +8,2 @@ var expect = require('chai').expect;

var log = process.env.DEBUG ? require('bunyan').createLogger({ name: 'http2', level: 'trace' })
: undefined;
var frame_types = {

@@ -189,3 +186,3 @@ DATA: ['data'],

it('should transform frame objects to appropriate buffers', function() {
var stream = new Serializer(log);
var stream = new Serializer();

@@ -243,3 +240,3 @@ for (var i = 0; i < test_frames.length; i++) {

it('should transform buffers to appropriate frame object', function() {
var stream = new Deserializer(log);
var stream = new Deserializer();

@@ -246,0 +243,0 @@ var shuffled = shuffle_buffers(test_frames.map(function(test) { return test.buffer; }));

@@ -5,8 +5,5 @@ var expect = require('chai').expect;

var log = process.env.DEBUG ? require('bunyan').createLogger({ name: 'http2', level: 'trace' })
: undefined;
// Execute a list of commands and assertions
function execute_sequence(sequence, done) {
var stream = new Stream(log);
var stream = new Stream();

@@ -57,3 +54,2 @@ var outgoing_frames = [];

checks.forEach(function(check) {
//console.log('check', check);
if ('outgoing' in check) {

@@ -64,7 +60,5 @@ expect(outgoing_frames.shift()).to.deep.equal(check.outgoing);

} else {
//console.log('X')
throw new Error('Invalid check', check);
}
});
//console.log('done')
done();

@@ -78,10 +72,10 @@ }

IDLE: [
{ type: 'DATA', data: new Buffer(5) },
{ type: 'PRIORITY', priority: 1 },
{ type: 'DATA', flags: {}, data: new Buffer(5) },
{ type: 'PRIORITY', flags: {}, priority: 1 },
{ type: 'WINDOW_UPDATE', flags: {}, settings: {} }
],
RESERVED_LOCAL: [
{ type: 'DATA', data: new Buffer(5) },
{ type: 'DATA', flags: {}, data: new Buffer(5) },
{ type: 'HEADERS', flags: {}, headers: {}, priority: undefined },
{ type: 'PRIORITY', priority: 1 },
{ type: 'PRIORITY', flags: {}, priority: 1 },
{ type: 'PUSH_PROMISE', flags: {}, headers: {} },

@@ -91,4 +85,4 @@ { type: 'WINDOW_UPDATE', flags: {}, settings: {} }

RESERVED_REMOTE: [
{ type: 'DATA', data: new Buffer(5) },
{ type: 'PRIORITY', priority: 1 },
{ type: 'DATA', flags: {}, data: new Buffer(5) },
{ type: 'PRIORITY', flags: {}, priority: 1 },
{ type: 'PUSH_PROMISE', flags: {}, headers: {} },

@@ -102,5 +96,5 @@ { type: 'WINDOW_UPDATE', flags: {}, settings: {} }

HALF_CLOSED_REMOTE: [
{ type: 'DATA', data: new Buffer(5) },
{ type: 'DATA', flags: {}, data: new Buffer(5) },
{ type: 'HEADERS', flags: {}, headers: {}, priority: undefined },
{ type: 'PRIORITY', priority: 1 },
{ type: 'PRIORITY', flags: {}, priority: 1 },
{ type: 'PUSH_PROMISE', flags: {}, headers: {} },

@@ -107,0 +101,0 @@ { type: 'WINDOW_UPDATE', flags: {}, settings: {} }

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc