🚀 Big News: Socket Acquires Coana to Bring Reachability Analysis to Every Appsec Team.Learn more
Socket
Book a DemoInstallSign in
Socket

http2

Package Overview
Dependencies
Maintainers
1
Versions
44
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

http2 - npm Package Compare versions

Comparing version

to
0.0.3

doc/utils.html

804

lib/compressor.js

@@ -0,56 +1,282 @@

// HTTP/2 compression is implemented by two [Transform Stream][1] subclasses that operate in
// [object mode][2]: the Compressor and the Decompressor. These provide a layer between the
// [framer](framer.html) and the [connection handling component](connection.html) that
// generates/parses binary header data.
//
// Compression functionality is separated from the integration part. The latter is implemented in
// the last part of the file, while the larger part of the file is an implementation of the [HTTP/2
// Header Compression][3] spec. Both Compressor and Decompressor store their compression related
// state in CompressionContext objects. It is always accessed using methods that guarantee that
// it remains in a valid state.
//
// [1]: http://nodejs.org/api/stream.html#stream_class_stream_transform
// [2]: http://nodejs.org/api/stream.html#stream_new_stream_readable_options
// [3]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-00
var Transform = require('stream').Transform;
exports.CompressionContext = CompressionContext;
exports.Decompressor = Decompressor;
exports.Compressor = Compressor;
exports.Decompressor = Decompressor;
exports.CompressionContext = CompressionContext;
function Compressor(request) {
var initial_table = request ? CompressionContext.initialRequestTable : CompressionContext.initialResponseTable
this._context = new CompressionContext(initial_table)
// Compression Context
// ===================
// A `CompressionContext` consists of the following tables:
//
// * Header Table (`this._table`) that is limited in size (`this._limit`)
// * Reference Set (`this._reference`)
// * Working Set (`this._working`)
//
// Header Table and Reference Set entries are `[name, value]` pairs (where both are strings), while
// Working Set entries are objects with two properties: `index` (a number) and `pair` (a pair).
//
// There are only two methods that modifies the state of the tables: `reinitialize()` and
// `execute(command)`.
var DEFAULT_HEADER_TABLE_LIMIT = 4096;
function CompressionContext(table, limit) {
this._table = table ? table.slice() : [];
this._limit = limit || DEFAULT_HEADER_TABLE_LIMIT;
this._reference = [];
this._working = [];
}
Compressor.prototype.compress = function compress(headers) {
// { name: value, ... } -> [[name, value], ... ]
var pairs = [];
for (var name in headers) {
var value = headers[name]
if (value instanceof Array) {
for (i = 0; i< value.length; i++) {
pairs.push([name, value[i]]);
}
// The `equal(pair1, pair2)` static method decides if two headers are considered equal. Name
// comparison is case insensitive while value comparison is case sensitive.
CompressionContext.equal = function(pair1, pair2) {
return (pair1[0].toLowerCase() === pair2[0].toLowerCase()) && (pair1[1] === pair2[1]);
};
// `getWorkingSet()` returns the current working set as an array of `[name, value]` pairs.
CompressionContext.prototype.getWorkingSet = function getWorkingSet() {
return this._working.map(function(entry) {
return entry.pair;
});
};
// `reinitialize()` must be called between parsing/generating header blocks.
CompressionContext.prototype.reinitialize = function reinitialize() {
var self = this;
// * It first executes the steps needed to *end the processing of the previous block*.
// The new reference set of headers is computed by removing from the working set all the headers
// that are not present in the header table.
this._reference = this._working.filter(function(entry) {
return self._table.indexOf(entry.pair) !== -1;
}).map(function(entry) {
return entry.pair;
});
// * Then *prepares the processing of the next block*.
// The reference set of headers is interpreted into the working set of headers: for each header
// in the reference set, an entry is added to the working set, containing the header name, its
// value, and its current index in the header table.
this._working = this._reference.map(function(pair) {
var index = self._table.indexOf(pair);
return { index: index, pair: pair };
});
};
// `execute(command)` executes the given command ([header representation][1]): updates the Header
// Table and the Working Set.
// [1]: http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-00#section-3.3
// The *JavaScript object representation* of a command:
//
// {
// name: String || Integer, // string literal or index
// value: String || Integer, // string literal or index
// index: Integer // -1 : no indexing
// // 0 - ... : substitution indexing
// // Infinity : incremental indexing
// }
//
// Examples:
//
// Indexed:
// { name: 2 , value: 2 , index: -1 }
// Literal:
// { name: 2 , value: 'X', index: -1 } // without indexing
// { name: 2 , value: 'Y', index: Infinity } // incremental indexing
// { name: 'A', value: 'Z', index: 123 } // substitution indexing
CompressionContext.prototype.execute = function execute(command) {
var index, pair;
// * For an indexed representation, it checks whether the index is present in the working set.
// If true, the corresponding entry is removed from the working set. If several entries correspond
// to this encoded index, all these entries are removed from the working set. If the index is not
// present in the working set, it is used to retrieve the corresponding header from the Header
// Table, and a new entry is added to the working set representing this header.
if (typeof command.value === 'number') {
index = command.value;
var filtered = this._working.filter(function(entry) {
return entry.index !== index;
});
if (filtered.length === this._working.length) {
pair = this._table[index];
this._working.push({ index: index, pair: pair });
} else {
pairs.push([name, value]);
this._working = filtered;
}
}
// Diff encoding
var entries = this._context.encode(pairs);
// * For a literal representation, a new entry is added to the working set representing this
// header. If the literal representation specifies that the header is to be indexed, the header is
// added accordingly to the header table, and its index is included in the entry in the working
// set. Otherwise, the entry in the working set contains an undefined index.
else {
if (typeof command.name === 'number') {
pair = [this._table[command.name][0], command.value];
} else {
pair = [command.name, command.value];
}
// Serialization
var buffers = [];
for (var i = 0; i < entries.length; i++) {
buffers.push(Compressor.header(entries[i]));
if (command.index !== -1) {
if (command.index === Infinity) {
this._table.push(pair);
} else {
this._table.splice(command.index, 1, pair);
}
this._enforceSizeBound(); // TODO: The order of these two
index = this._table.indexOf(pair); // TODO: operation is not well defined!
}
this._working.push({ index: index, pair: pair });
}
};
return Array.prototype.concat.apply([], buffers);
// `generateAddCommand` tries to find a compact command (header representation) for the given
// `[name, value]` pair that causes the decoder to add the given pair to the Working Set.
CompressionContext.prototype.generateAddCommand = function(pair) {
var equal = CompressionContext.equal.bind(null, pair);
if (this.getWorkingSet().some(equal)) {
return undefined;
}
var working = this._working;
function shadowed(index) {
return working.some(function(entry) {
return entry.index === index;
});
}
var full_match = this._table.filter(equal);
if (full_match.length !== 0) {
var full_index = this._table.indexOf(full_match[0]);
if (!shadowed(full_index)) {
return {
name: full_index,
value: full_index,
index: -1
};
}
}
var name = pair[0].toLowerCase();
var name_match = this._table.filter(function(entry) {
return entry[0].toLowerCase() === name;
});
if (name_match.length !== 0) {
var name_index = this._table.indexOf(name_match[0]);
if (!shadowed(name_index)) {
return {
name: name_index,
value: pair[1],
index: name_index
};
}
}
return {
name: name,
value: pair[1],
index: Infinity
};
};
function Decompressor() {
var initial_table = request ? CompressionContext.initialRequestTable : CompressionContext.initialResponseTable
this._context = new CompressionContext(initial_table)
// `generateRemoveCommand` generates a command (header representation) that causes the decoder to
// drop the given pair from the Working Set.
CompressionContext.prototype.generateRemoveCommand = function(pair) {
for (var i = 0; i < this._working.length; i++) {
var entry = this._working[i];
// * if the given header is in the Working Set, then the command is an Indexed Representation.
if (entry.pair === pair) {
return {
name: entry.index,
value: entry.index,
index: -1
};
}
}
// * if the given pair is not in the Working Set, it returns `undefined`
return undefined;
};
// The header table size can be bounded so as to limit the memory requirements.
// The `_enforceSizeBound()` private method drops the entries that are over the limit
// (`this._limit`).
//
// The header table size is defined as the sum of the size of each entry of the table. The size
// of an entry is the sum of the length in bytes of its name, of value's length in bytes and of
// 32 bytes (for accounting for the entry structure overhead).
CompressionContext.prototype._enforceSizeBound = function() {
var table = this._table;
var size = 0;
for (var i = 0; i < table.length; i++) {
if (table[i].size === undefined) {
table[i].size = new Buffer(table[i][0] + table[i][1], 'utf8').length + 32;
}
size += table[i].size;
}
while (size > this._limit) {
var dropped = table.shift();
size -= dropped.size;
}
};
// [Decompression process](http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-00#section-3.4)
// =======================
// The decompression process is always done by a `Decompressor` object.
//
// The compression related mutable state is stored in a contained `CompressionContext` object.
// The initial value of it's Header Table depends on which side of the connection is it on.
function Decompressor(type) {
var initial_table = (type === 'REQUEST') ? CompressionContext.initialRequestTable
: CompressionContext.initialResponseTable;
this._context = new CompressionContext(initial_table);
this._initializeStream();
}
Decompressor.prototype = Object.create(Transform.prototype, { constructor: { value: Decompressor } });
// The `decompress` method takes a buffer, and returns the decoded header set.
//
// According to the spec, to ensure a correct decoding of a set of headers, the following steps or
// equivalent ones MUST be executed by the decoder.
Decompressor.prototype.decompress = function decompress(buffer) {
// Deserialization
var entries = [];
// * First, upon starting the decoding of a new set of headers, the reference set of headers is
// interpreted into the working set of headers
this._context.reinitialize();
// * Then, the header representations are processed in their order of occurrence in the frame.
// The decoding process of the header representations are defined in the `execute(command)`
// method of the `CompressionContext` class.
buffer.cursor = 0;
while (buffer.cursor < buffer.length) {
entries.push(Decompressor.header(buffer));
this._context.execute(Decompressor.header(buffer));
}
// Diff decoding
var pairs = this._context.decode(entries);
// * When all the header representations have been processed, the working set contains all the
// headers of the set of headers.
var pairs = this._context.getWorkingSet();
// [[name, value], ... ] -> { name: value, ... }
var headers = {}
// * The working set entries are `[name, value]` pairs. As a last step, these are converted to the
// usual header set format used in node.js: `{ name1: value1, name2: [value2, value3], ... }`
var headers = {};
for (var i = 0; i < pairs.length; i++) {
var name = pairs[i][0]
, value = pairs[i][1];
var name = pairs[i][0];
var value = pairs[i][1];
if (name in headers) {

@@ -66,149 +292,74 @@ if (headers[name] instanceof Array) {

}
return headers;
};
// [Header Encoding](http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-00#section-3)
// =================
// Compression process
// ===================
function CompressionContext(table, limit) {
this._table = table ? table.slice() : [];
this._limit = limit || 4096;
this._reference = []
// The decompression process is always done by a `Compressor` object.
//
// The compression related mutable state is stored in a contained `CompressionContext` object.
// The initial value of it's Header Table depends on which side of the connection is it on.
function Compressor(type) {
var initial_table = (type === 'REQUEST') ? CompressionContext.initialRequestTable
: CompressionContext.initialResponseTable;
this._context = new CompressionContext(initial_table);
this._initializeStream();
}
Compressor.prototype = Object.create(Transform.prototype, { constructor: { value: Compressor } });
// To ensure a correct decoding of a set of headers, the following steps or equivalent ones MUST be
// executed by the decoder.
CompressionContext.prototype.decode = function(diff) {
// First, upon starting the decoding of a new set of headers, the reference set of headers is
// interpreted into the working set of headers: for each header in the reference set, an entry is
// added to the working set.
var table = this._table
, working = this._reference.slice();
var concat = require('../lib/utils').concat;
// Then, the header representations are processed in their order of occurrence in the frame.
for (var i = 0; i < diff.length; i++) {
var entry = diff[i], pair;
if (typeof entry.value === 'number') {
// For an indexed representation, the decoder checks whether the index is present in the
// working set. If true, the corresponding entry is removed from the working set. If several
// entries correspond to this encoded index, all these entries are removed from the working
// set. If the index is not present in the working set, it is used to retrieve the
// corresponding header from the header table, and a new entry is added to the working set
// representing this header.
pair = table[entry.value];
var working_index = working.indexOf(pair);
if (working_index !== -1) {
do {
working.splice(working_index, 1);
} while ((working_index = working.indexOf(pair)) !== -1)
} else {
working.push(pair);
// The `compress` method takes a header set and returns an array of buffers containing the
// encoded binary data.
//
// The inverse of the decoding process goes follows:
Compressor.prototype.compress = function compress(headers) {
var i;
// * First, the usual node.js header set format (`{ name1: value1, name2: [value2, value3], ... }`)
// has to be converted to `[name, value]` pairs.
var pairs = [];
for (var name in headers) {
var value = headers[name];
if (value instanceof Array) {
for (i = 0; i< value.length; i++) {
pairs.push([name, value[i]]);
}
} else {
// For a literal representation, a new entry is added to the working set representing this
// header. If the literal representation specifies that the header is to be indexed, the
// header is added accordingly to the header table.
if (typeof entry.name === 'number') {
pair = [table[entry.name][0], entry.value];
} else {
pair = [entry.name, entry.value];
}
working.push(pair);
if (entry.indexing) {
if ('substitution' in entry) {
table.splice(entry.substitution, 1, pair);
} else {
table.push(pair);
}
this._enforceSizeBound();
}
pairs.push([name, value]);
}
}
// The new reference set of headers is computed by removing from the working set all the headers
// that are not present in the header table.
this._reference = working.filter(function(header) {
return table.indexOf(header) !== -1;
});
// * Before generating commands that make the working set equal to the generated pair set,
// the reference set and the working set has to be reinitialized.
this._context.reinitialize();
var working = this._context.getWorkingSet(), command, commands = [];
// When all the header representations have been processed, the working set contains all the
// headers of the set of headers.
return working;
};
CompressionContext.prototype.encode = function(workingset) {
var table = this._table
, old_reference = this._reference
, new_reference = []
, diff = [];
for (var i = 0; i < workingset.length; i++) {
var pair = workingset[i], fullmatch, namematch;
for (var j = 0; j < table.length; i++) {
if (table[j][0] === pair[0]) {
if (table[j][1] === pair[1]) {
fullmatch = j;
pair = table[fullmatch];
break;
} else {
namematch = j;
}
}
// * The first commands remove the unneeded headers from the working set.
for (i = 0; i < working.length; i++) {
if (!pairs.some(CompressionContext.equal.bind(null, working[i]))) {
command = this._context.generateRemoveCommand(working[i]);
this._context.execute(command);
commands.push(command);
}
}
if (fullmatch !== undefined && old_reference.indexOf(pair) === -1) {
diff.push({
name: fullmatch,
value: fullmatch
});
new_reference.push(table[fullmatch]);
} else if (fullmatch === undefined) {
diff.push({
name: namematch !== undefined ? namematch : pair[0],
value: pair[1],
indexing: true
})
new_reference.push(pair);
table.push(pair);
this._enforceSizeBound();
// * Then the headers that are not present in the working set yet are added.
for (i = 0; i < pairs.length; i++) {
if (!working.some(CompressionContext.equal.bind(null, pairs[i]))) {
command = this._context.generateAddCommand(pairs[i]);
this._context.execute(command);
commands.push(command);
}
}
for (var k = 0; k < old_reference.length; k++) {
var reference_pair = old_reference[k];
if (!(reference_pair in new_reference)) {
var unneeded_index = table.indexOf(reference_pair);
if (unneeded_index !== -1) {
diff.push({
name: unneeded_index,
value: unneeded_index
})
}
}
// * The last step is the serialization of the generated commands.
var buffers = [];
for (i = 0; i < commands.length; i++) {
buffers.push(Compressor.header(commands[i]));
}
this._reference = new_reference
}
// The header table size can be bounded so as to limit the memory requirements.
// The _cut() method drops the entrys that are over the memory limit (`this._limit`)
CompressionContext.prototype._enforceSizeBound = function() {
// The header table size is defined as the sum of the size of each entry of the table. The size
// of an entry is the sum of the length in bytes of its name, of value's length in bytes and of
// 32 bytes (for accounting for the entry structure overhead).
var table = this._table;
var size = 0;
for (var i = 0; i < table.length; i++) {
if (table[i].size === undefined) {
table[i].size = new Buffer(table[i][0] + table[i][1], 'utf8').length + 32;
}
size += table[i].size;
}
while (size > this._limit) {
var dropped = table.shift();
size -= dropped.size;
}
return concat(Array.prototype.concat.apply([], buffers)); // [[buffers]] -> [buffers] -> buffer
};

@@ -275,5 +426,5 @@

Decompressor.integer = function readInteger(buffer, N) {
var I, limit = Math.pow(2,N) - 1
var limit = Math.pow(2,N) - 1;
I = buffer[buffer.cursor] & limit;
var I = buffer[buffer.cursor] & limit;
if (N !== 0) {

@@ -289,3 +440,3 @@ buffer.cursor += 1;

buffer.cursor += 1;
} while (buffer[buffer.cursor - 1] & 128)
} while (buffer[buffer.cursor - 1] & 128);
}

@@ -306,31 +457,21 @@

Compressor.string = function writeString(stringbuffer) {
var encoded_length = Compressor.integer(stringbuffer.length, 0);
return encoded_length.concat(stringbuffer);
Compressor.string = function writeString(str) {
var encoded_string = new Buffer(str, 'utf8');
var encoded_length = Compressor.integer(encoded_string.length, 0);
return encoded_length.concat(encoded_string);
};
Decompressor.string = function readString(buffer) {
var length = Decompressor.integer(buffer, 0)
, stringbuffer = buffer.slice(buffer.cursor, buffer.cursor + length);
var length = Decompressor.integer(buffer, 0);
var str = buffer.toString('utf8', buffer.cursor, buffer.cursor + length);
buffer.cursor += length;
return stringbuffer;
}
return str;
};
// Header represenations
// ---------------------
// [Header represenations](http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-00#section-4.3)
// -----------------------
// The **JavaScript object representation** of a header record:
// The JavaScript object representation is described near the
// `CompressionContext.prototype.execute()` method definition.
//
// {
// name: String || Number, // literal or index
// value: String || Number, // literal or index
// indexing: Boolean, // with or without indexing
// substitution: Number // substitution index
// }
//
// Not all possible header objects are valid. Constraints:
//
// * if `value` is an index, `name` should be the same index and indexed representation is used
// * if `substitution` is used, indexing should be set to true
// **All binary header representations** start with a prefix signaling the representation type and

@@ -369,57 +510,67 @@ // an index represented using prefix coded integers:

var representations = {
indexed : { prefix: 7, pattern: 0x80 },
literal : { prefix: 5, pattern: 0x60 },
literal_incremental : { prefix: 5, pattern: 0x40 },
literal_substitution: { prefix: 6, pattern: 0x00 }
};
Compressor.header = function writeString(header) {
var buffers = [];
var representation, buffers = [];
if (typeof header.value === 'number') {
buffers.push(Compressor.integer(header.value, 7));
buffers[0][0][0] |= 128;
representation = representations.indexed;
} else if (header.index === -1) {
representation = representations.literal;
} else if (header.index === Infinity) {
representation = representations.literal_incremental;
} else {
representation = representations.literal_substitution;
}
if (representation === representations.indexed) {
buffers.push(Compressor.integer(header.value, representation.prefix));
} else {
var substitution = ('substitution' in header);
var prefix = substitution ? 6 : 5;
if (typeof header.name === 'number') {
buffers.push(Compressor.integer(header.name + 1, prefix));
buffers.push(Compressor.integer(header.name + 1, representation.prefix));
} else {
buffers.push(Compressor.integer(0, prefix));
buffers.push(Compressor.integer(0, representation.prefix));
buffers.push(Compressor.string(header.name));
}
if (!substitution) {
buffers[0][0][0] |= 64;
if (!header.indexing) {
buffers[0][0][0] |= 32;
}
if (representation === representations.literal_substitution) {
buffers.push(Compressor.integer(header.index, 0));
}
if (substitution) {
buffers.push(Compressor.integer(header.substitution, 0));
}
buffers.push(Compressor.string(header.value));
}
buffers[0][0][0] |= representation.pattern;
return Array.prototype.concat.apply([], buffers); // array of arrays of buffers -> array of buffers
}
};
Decompressor.header = function readString(buffer) {
var header = {};
var representation, header = {};
if (buffer[0] & 128) {
var index = Decompressor.integer(buffer, 7);
header.indexing = false;
header.name = index;
header.value = index;
} else {
var prefix, substitution;
if (buffer[0] & 64) {
header.indexing = !(buffer[0] & 32);
prefix = 5;
var first_byte = buffer[buffer.cursor];
if (first_byte & 0x80) {
representation = representations.indexed;
} else if (first_byte & 0x40) {
if (first_byte & 0x20) {
representation = representations.literal;
} else {
header.indexing = true;
substitution = true;
prefix = 6;
representation = representations.literal_incremental;
}
} else {
representation = representations.literal_substitution;
}
header.name = Decompressor.integer(buffer, prefix) - 1;
if (representation === representations.indexed) {
header.value = header.name = Decompressor.integer(buffer, representation.prefix);
header.index = -1;
} else {
header.name = Decompressor.integer(buffer, representation.prefix) - 1;
if (header.name === -1) {

@@ -429,4 +580,8 @@ header.name = Decompressor.string(buffer);

if (substitution) {
header.substitution = Decompressor.integer(buffer, 0);
if (representation === representations.literal_substitution) {
header.index = Decompressor.integer(buffer, 0);
} else if (representation === representations.literal_incremental) {
header.index = Infinity;
} else {
header.index = -1;
}

@@ -440,2 +595,126 @@

// The compression layer
// =====================
// This section describes the interaction between the compressor/decompressor and the rest of the
// HTTP/2 implementation. The Compressor and the Decompressor makes up a layer between the
// [framer](framer.html) and the [connection handling component](connection.html). They let most
// frames pass through, except HEADERS and PUSH_PROMISE frames. They convert the frames between
// these two representations:
//
// { {
// type: 'HEADERS', type: 'HEADERS',
// flags: {}, flags: {},
// stream: 1, <===> stream: 1,
// headers: { data: Buffer
// N1: 'V1', }
// N2: ['V1', 'V2', ...],
// // ...
// }
// }
//
// There are possibly several binary frame that belong to a single non-binary frame.
var MAX_HTTP_PAYLOAD_SIZE = 16383;
// The Compressor transform stream is basically stateless.
Compressor.prototype._initializeStream = function _initializeStream() {
Transform.call(this, { objectMode: true });
};
Compressor.prototype._transform = function _transform(frame, encoding, done) {
// When it receives a HEADERS or PUSH_PROMISE frame
if (frame.type === 'HEADERS' || frame.type === 'PUSH_PROMISE') {
// * it generates a header block using the compress method
var buffer = this.compress(frame.headers);
// * splits the header block into `chunk`s that are not larger than `MAX_HTTP_PAYLOAD_SIZE`
var cursor = 0;
do {
var chunk_size = Math.min(MAX_HTTP_PAYLOAD_SIZE, buffer.length);
var chunk = buffer.slice(cursor, cursor + chunk_size);
cursor += chunk_size;
// * for each `chunk`, it generates a `chunk_frame` that is identical to the original, except
// the `data` property which holds the given chunk
var chunk_frame = {
type: frame.type,
flags: frame.flags,
stream: frame.stream,
priority: frame.priority,
data: chunk
};
// * sets the END_HEADERS or END_PUSH_STREAM flag to true if it's the last chunk
chunk_frame.flags['END_' + frame.type] = (cursor === buffer.length);
// * and pushes out the `chunk_frame`
this.push(chunk_frame);
} while (!end);
}
done();
};
// The Decompressor is a stateful transform stream, since it has to collect multiple frames first,
// and the decoding comes after unifying the payload of those frames.
//
// If there's a frame in progress, `this._in_progress` is `true`. The frames are collected in
// `this._frames`, and the type of the frame and the stream identifier is stored in `this._type`
// and `this._stream` respectively.
Decompressor.prototype._initializeStream = function _initializeStream() {
Transform.call(this, { objectMode: true });
this._in_progress = false;
this._type = undefined;
this._stream = undefined;
this._frames = undefined;
};
// When a `frame` arrives
Decompressor.prototype._transform = function _transform(frame, encoding, done) {
// * and the collection process is already `_in_progress`, the frame is simply stored, except if
// it's an illegal frame
if (this._in_progress) {
if (frame.type !== this._type || frame.stream !== this._stream) {
throw new Error('A series of header frames must not be interleaved with other frames!');
}
this._frames.push(frame);
}
// * and the collection process is not `_in_progress`, but the new frame's type is HEADERS or
// PUSH_PROMISE, a new collection process begins
else if (frame.type === 'HEADERS' || frame.type === 'PUSH_PROMISE') {
this._in_progress = true;
this._type = frame.type;
this._stream = frame.stream;
this._frames = [frame];
}
// * otherwise, the frame is forwarded without taking any action
else {
this.push(frame);
}
// When the frame signals that it's the last in the series, the header block chunks are
// concatenated, the headers are decompressed, and a new frame gets pushed out with the
// decompressed headers.
if (this._in_progress && (frame.flags.END_HEADERS || frame.flags.END_PUSH_PROMISE)) {
var buffer = concat(this._frames.map(function(frame) {
return frame.data;
}));
var headers = this.decompress(buffer);
this.push({
type: frame.type,
flags: frame.flags,
stream: frame.stream,
priority: frame.priority,
headers: headers
});
this._in_progress = false;
}
done();
};
// [Initial header names](http://tools.ietf.org/html/draft-ietf-httpbis-header-compression-00#appendix-A)

@@ -445,15 +724,78 @@ // ======================

CompressionContext.initialRequestTable = [
[':scheme', 'http'],
[':scheme', 'https'],
[':host'],
[':path', '/'],
[':method', 'get']
[ ':scheme' , 'http' ],
[ ':scheme' , 'https' ],
[ ':host' , '' ],
[ ':path' , '/' ],
[ ':method' , 'get' ],
[ 'accept' , '' ],
[ 'accept-charset' , '' ],
[ 'accept-encoding' , '' ],
[ 'accept-language' , '' ],
[ 'cookie' , '' ],
[ 'if-modified-since' , '' ],
[ 'keep-alive' , '' ],
[ 'user-agent' , '' ],
[ 'proxy-connection' , '' ],
[ 'referer' , '' ],
[ 'accept-datetime' , '' ],
[ 'authorization' , '' ],
[ 'allow' , '' ],
[ 'cache-control' , '' ],
[ 'connection' , '' ],
[ 'content-length' , '' ],
[ 'content-md5' , '' ],
[ 'content-type' , '' ],
[ 'date' , '' ],
[ 'expect' , '' ],
[ 'from' , '' ],
[ 'if-match' , '' ],
[ 'if-none-match' , '' ],
[ 'if-range' , '' ],
[ 'if-unmodified-since' , '' ],
[ 'max-forwards' , '' ],
[ 'pragma' , '' ],
[ 'proxy-authorization' , '' ],
[ 'range' , '' ],
[ 'te' , '' ],
[ 'upgrade' , '' ],
[ 'via' , '' ],
[ 'warning' , '' ]
];
CompressionContext.initialResponseTable = [
[':status', '200'],
['age'],
['cache-control'],
['content-length'],
['content-type']
[ ':status' , '200' ],
[ 'age' , '' ],
[ 'cache-control' , '' ],
[ 'content-length' , '' ],
[ 'content-type' , '' ],
[ 'date' , '' ],
[ 'etag' , '' ],
[ 'expires' , '' ],
[ 'last-modified' , '' ],
[ 'server' , '' ],
[ 'set-cookie' , '' ],
[ 'vary' , '' ],
[ 'via' , '' ],
[ 'access-control-allow-origin' , '' ],
[ 'accept-ranges' , '' ],
[ 'allow' , '' ],
[ 'connection' , '' ],
[ 'content-disposition' , '' ],
[ 'content-encoding' , '' ],
[ 'content-language' , '' ],
[ 'content-location' , '' ],
[ 'content-md5' , '' ],
[ 'content-range' , '' ],
[ 'link' , '' ],
[ 'location' , '' ],
[ 'p3p' , '' ],
[ 'pragma' , '' ],
[ 'proxy-authenticate' , '' ],
[ 'refresh' , '' ],
[ 'retry-after' , '' ],
[ 'strict-transport-security' , '' ],
[ 'trailer' , '' ],
[ 'transfer-encoding' , '' ],
[ 'warning' , '' ],
[ 'www-authenticate' , '' ]
];

@@ -57,5 +57,5 @@ // The framer consists of two [Transform Stream][1] subclasses that operate in [object mode][2]:

Transform.call(this, { objectMode: true });
this._next(8);
this._next(COMMON_HEADER_SIZE);
}
Deserializer.prototype = Object.create(Transform.prototype, { constructor: { value: Deserializer } })
Deserializer.prototype = Object.create(Transform.prototype, { constructor: { value: Deserializer } });

@@ -109,3 +109,3 @@ // The Deserializer is stateful, and it's two main alternating states are: *waiting for header* and

}
this._next(8);
this._next(COMMON_HEADER_SIZE);
}

@@ -124,2 +124,5 @@ }

//
// Additional size limits can be set by specific application uses. HTTP limits the frame size to
// 16,383 octets. This limitation is enforced on the connection layer.
//
// 0 1 2 3

@@ -161,2 +164,5 @@ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1

var COMMON_HEADER_SIZE = 8;
var MAX_PAYLOAD_SIZE = 65535;
var frame_types = [];

@@ -167,7 +173,9 @@

Serializer.commonHeader = function writeCommonHeader(frame, buffers) {
var header_buffer = new Buffer(8);
var header_buffer = new Buffer(COMMON_HEADER_SIZE);
var size = 0;
for (var i = 0; i < buffers.length; i++) size += buffers[i].length;
if (size > 65535) {
for (var i = 0; i < buffers.length; i++) {
size += buffers[i].length;
}
if (size > MAX_PAYLOAD_SIZE) {
throw new Error('Too large frame: ' + size + ' bytes');

@@ -394,3 +402,3 @@ }

if (settings_left.length !== 0) {
throw new Error('Unknown settings: ' + settings_left.join(', '))
throw new Error('Unknown settings: ' + settings_left.join(', '));
}

@@ -505,3 +513,3 @@

buffers.push(frame.data);
}
};

@@ -513,3 +521,3 @@ Deserializer.PING = function readPing(buffer, frame) {

frame.data = buffer;
}
};

@@ -516,0 +524,0 @@ // [GOAWAY](http://http2.github.io/http2-spec/#GOAWAY)

{
"name": "http2",
"version": "0.0.2",
"version": "0.0.3",
"description": "An HTTP/2 server implementation",

@@ -5,0 +5,0 @@ "main": "index.js",

@@ -9,3 +9,3 @@ node-http2

I post weekly status updates [on my blog](http://gabor.molnar.es/blog/categories/google-summer-of-code/). Short version: framing layer 70% ready.
I post weekly status updates [on my blog](http://gabor.molnar.es/blog/categories/google-summer-of-code/). Short version: framing layer is ready, compression will be ready this week.

@@ -12,0 +12,0 @@ Installation

var expect = require('chai').expect;
var concat = require('../lib/utils').concat;
var compressor = require('../lib/compressor')
, Compressor = compressor.Compressor
, Decompressor = compressor.Decompressor;
var compressor = require('../lib/compressor');
var CompressionContext = compressor.CompressionContext;
var Compressor = compressor.Compressor;
var Decompressor = compressor.Decompressor;

@@ -26,6 +28,6 @@ var test_integers = [{

var test_strings = [{
string: new Buffer('abcdefghij', 'utf-8'),
string: 'abcdefghij',
buffer: new Buffer('0A6162636465666768696A', 'hex')
}, {
string: new Buffer('éáűőúöüó€', 'utf-8'),
string: 'éáűőúöüó€',
buffer: new Buffer('13C3A9C3A1C5B1C591C3BAC3B6C3BCC3B3E282AC', 'hex')

@@ -37,4 +39,4 @@ }];

name: 3,
value: new Buffer('/my-example/index.html', 'utf-8'),
indexing: true
value: '/my-example/index.html',
index: Infinity
},

@@ -45,4 +47,4 @@ buffer: new Buffer('44' + '162F6D792D6578616D706C652F696E6465782E68746D6C', 'hex')

name: 12,
value: new Buffer('my-user-agent', 'utf-8'),
indexing: true
value: 'my-user-agent',
index: Infinity
},

@@ -52,5 +54,5 @@ buffer: new Buffer('4D' + '0D6D792D757365722D6167656E74', 'hex')

header: {
name: new Buffer('x-my-header', 'utf-8'),
value: new Buffer('first', 'utf-8'),
indexing: true
name: 'x-my-header',
value: 'first',
index: Infinity
},

@@ -62,3 +64,3 @@ buffer: new Buffer('40' + '0B782D6D792D686561646572' + '056669727374', 'hex')

value: 38,
indexing: false
index: -1
},

@@ -70,3 +72,3 @@ buffer: new Buffer('A6', 'hex')

value: 40,
indexing: false
index: -1
},

@@ -77,5 +79,4 @@ buffer: new Buffer('A8', 'hex')

name: 3,
value: new Buffer('/my-example/resources/script.js', 'utf-8'),
indexing: true,
substitution: 38
value: '/my-example/resources/script.js',
index: 38
},

@@ -86,4 +87,4 @@ buffer: new Buffer('0426' + '1F2F6D792D6578616D706C652F7265736F75726365732F7363726970742E6A73', 'hex')

name: 40,
value: new Buffer('second', 'utf-8'),
indexing: true
value: 'second',
index: Infinity
},

@@ -93,28 +94,42 @@ buffer: new Buffer('5F0A' + '067365636F6E64', 'hex')

// Concatenate buffers into a new buffer
function concat(buffers) {
var size = 0;
for (var i = 0; i < buffers.length; i++) {
size += buffers[i].length;
}
var test_header_sets = [{
headers: {
':path': '/my-example/index.html',
'user-agent': 'my-user-agent',
'x-my-header': 'first'
},
buffer: concat(test_headers.slice(0, 3).map(function(test) { return test.buffer; }))
}, {
headers: {
':path': '/my-example/resources/script.js',
'user-agent': 'my-user-agent',
'x-my-header': 'second'
},
buffer: concat(test_headers.slice(3).map(function(test) { return test.buffer; }))
}];
var concatenated = new Buffer(size);
for (var cursor = 0, j = 0; j < buffers.length; cursor += buffers[j].length, j++) {
buffers[j].copy(concatenated, cursor);
}
describe('compressor.js', function() {
describe('CompressionContext', function() {
describe('static method .equal([name1, value1], [name2, value2])', function() {
var equal = CompressionContext.equal;
it('decides if the two headers are considered equal', function() {
expect(equal(['name', 'value'], ['name', 'value'])).to.be.equal(true);
expect(equal(['name', 'value'], ['nAmE', 'value'])).to.be.equal(true);
expect(equal(['NaMe', 'value'], ['nAmE', 'value'])).to.be.equal(true);
expect(equal(['name', 'VaLuE'], ['name', 'value'])).to.be.equal(false);
expect(equal(['NaMe', 'VaLuE'], ['name', 'value'])).to.be.equal(false);
});
});
});
return concatenated;
}
describe('compressor.js', function() {
describe('Compressor', function() {
describe('static function integer(I, N)', function() {
it('should return an array of buffers that represent the N-prefix coded I value', function() {
for (var i = 0; i < test_integers.length; i++) {
var test = test_integers[i];
expect(concat(Compressor.integer(test.I, test.N))).to.deep.equal(test.buffer);
describe('static method .integer(I, N)', function() {
it('should return an array of buffers that represent the encoded form of the string str', function() {
for (var i = 0; i < test_strings.length; i++) {
var test = test_strings[i];
expect(concat(Compressor.string(test.string))).to.deep.equal(test.buffer);
}
});
});
describe('static function string(stringbuffer)', function() {
describe('static method .string(stringbuffer)', function() {
it('should return an array of buffers that represent the encoded form of the string buffer', function() {

@@ -127,3 +142,3 @@ for (var i = 0; i < test_strings.length; i++) {

});
describe('static function header({ name, value, indexing, substitution })', function() {
describe('static method .header({ name, value, indexing, substitution })', function() {
it('should return an array of buffers that represent the encoded form of the header', function() {

@@ -139,3 +154,3 @@ for (var i = 0; i < test_headers.length; i++) {

describe('Decompressor', function() {
describe('static function integer(buffer, N)', function() {
describe('static method .integer(buffer, N)', function() {
it('should return the parsed N-prefix coded number and increase the cursor property of buffer', function() {

@@ -150,8 +165,8 @@ for (var i = 0; i < test_integers.length; i++) {

});
describe('static function string(buffer)', function() {
it('should return the parsed string buffer and increase the cursor property of buffer', function() {
describe('static method .string(buffer)', function() {
it('should return the parsed string and increase the cursor property of buffer', function() {
for (var i = 0; i < test_strings.length; i++) {
var test = test_strings[i];
test.buffer.cursor = 0;
expect(Decompressor.string(test.buffer)).to.deep.equal(test.string);
expect(Decompressor.string(test.buffer)).to.equal(test.string);
expect(test.buffer.cursor).to.equal(test.buffer.length);

@@ -161,3 +176,3 @@ }

});
describe('static function header(buffer)', function() {
describe('static method .header(buffer)', function() {
it('should return the parsed header and increase the cursor property of buffer', function() {

@@ -172,3 +187,30 @@ for (var i = 0; i < test_headers.length; i++) {

});
describe('method decompress(buffer)', function() {
it('should return the parsed header set in { name1: value1, name2: [value2, value3], ... } format', function() {
var decompressor = new Decompressor('REQUEST');
var header_set = test_header_sets[0];
expect(decompressor.decompress(header_set.buffer)).to.deep.equal(header_set.headers);
header_set = test_header_sets[1];
expect(decompressor.decompress(header_set.buffer)).to.deep.equal(header_set.headers);
});
});
});
describe('invariant', function() {
describe('decompressor.decompress(compressor.compress(headerset)) === headerset', function() {
it('should be true for any header set if the states are synchronized', function() {
var compressor = new Compressor('REQUEST');
var decompressor = new Decompressor('REQUEST');
for (var i = 0; i < 10; i++) {
var headers = test_header_sets[i%2].headers;
var compressed = compressor.compress(headers);
var decompressed = decompressor.decompress(compressed);
expect(headers).to.deep.equal(decompressed);
expect(compressor._table).to.deep.equal(decompressor._table);
expect(compressor._reference).to.deep.equal(decompressor._reference);
expect(compressor._working).to.deep.equal(compressor._working);
}
});
});
});
});
var expect = require('chai').expect;
var concat = require('../lib/utils').concat;
var framer = require('../lib/framer')
, Serializer = framer.Serializer
, Deserializer = framer.Deserializer;
var framer = require('../lib/framer');
var Serializer = framer.Serializer;
var Deserializer = framer.Deserializer;

@@ -140,17 +141,2 @@ var frame_types = {

// Concatenate two buffer into a new buffer
function concat(buffers) {
var size = 0;
for (var i = 0; i < buffers.length; i++) {
size += buffers[i].length;
}
var concatenated = new Buffer(size);
for (var cursor = 0, j = 0; j < buffers.length; cursor += buffers[j].length, j++) {
buffers[j].copy(concatenated, cursor);
}
return concatenated;
}
// Concatenate an array of buffers and then cut them into random size buffers

@@ -174,5 +160,5 @@ function shuffle_buffers(buffers) {

for (var i = 0; i < test_frames.length; i++) {
var test = test_frames[i]
, buffers = [test.buffer.slice(8)]
, header_buffer = test.buffer.slice(0,8);
var test = test_frames[i];
var buffers = [test.buffer.slice(8)];
var header_buffer = test.buffer.slice(0,8);
Serializer.commonHeader(test.frame, buffers);

@@ -185,3 +171,3 @@ expect(buffers[0]).to.deep.equal(header_buffer);

Object.keys(frame_types).forEach(function(type) {
var tests = test_frames.filter(function(test) { return test.frame.type === type });
var tests = test_frames.filter(function(test) { return test.frame.type === type; });
var frame_shape = '{ ' + frame_types[type].join(', ') + ' }';

@@ -191,4 +177,4 @@ describe('static method .' + type + '(' + frame_shape + ', buffer_array)', function() {

for (var i = 0; i < tests.length; i++) {
var test = tests[i]
, buffers = [];
var test = tests[i];
var buffers = [];
Serializer[type](test.frame, buffers);

@@ -234,3 +220,3 @@ expect(concat(buffers)).to.deep.equal(test.buffer.slice(8));

Object.keys(frame_types).forEach(function(type) {
var tests = test_frames.filter(function(test) { return test.frame.type === type });
var tests = test_frames.filter(function(test) { return test.frame.type === type; });
var frame_shape = '{ ' + frame_types[type].join(', ') + ' }';

@@ -258,4 +244,4 @@ describe('static method .' + type + '(payload_buffer, frame)', function() {

shuffle_buffers(test_frames.map(function(test) { return test.buffer }))
.forEach(stream.write.bind(stream));
var shuffled = shuffle_buffers(test_frames.map(function(test) { return test.buffer; }));
shuffled.forEach(stream.write.bind(stream));

@@ -262,0 +248,0 @@ for (var j = 0; j < test_frames.length; j++) {

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet