Socket
Socket
Sign inDemoInstall

http2

Package Overview
Dependencies
Maintainers
1
Versions
44
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

http2 - npm Package Compare versions

Comparing version 0.0.3 to 0.0.4

doc/stream.html

55

lib/compressor.js

@@ -16,2 +16,3 @@ // HTTP/2 compression is implemented by two [Transform Stream][1] subclasses that operate in

var utils = require('../lib/utils');
var Transform = require('stream').Transform;

@@ -247,3 +248,5 @@

// The initial value of it's Header Table depends on which side of the connection is it on.
function Decompressor(type) {
function Decompressor(type, log) {
this._log = log || utils.nolog;
var initial_table = (type === 'REQUEST') ? CompressionContext.initialRequestTable

@@ -262,2 +265,4 @@ : CompressionContext.initialResponseTable;

Decompressor.prototype.decompress = function decompress(buffer) {
this._log.trace({ buffer: buffer }, 'Starting header decompression');
// * First, upon starting the decoding of a new set of headers, the reference set of headers is

@@ -295,2 +300,3 @@ // interpreted into the working set of headers

}
this._log.debug({ headers: headers }, 'Header decompression is done');
return headers;

@@ -306,3 +312,5 @@ };

// The initial value of it's Header Table depends on which side of the connection is it on.
function Compressor(type) {
function Compressor(type, log) {
this._log = log || utils.nolog;
var initial_table = (type === 'REQUEST') ? CompressionContext.initialRequestTable

@@ -316,4 +324,2 @@ : CompressionContext.initialResponseTable;

var concat = require('../lib/utils').concat;
// The `compress` method takes a header set and returns an array of buffers containing the

@@ -324,2 +330,3 @@ // encoded binary data.

Compressor.prototype.compress = function compress(headers) {
this._log.debug({ headers: headers }, 'Starting header compression');
var i;

@@ -370,3 +377,6 @@

return concat(Array.prototype.concat.apply([], buffers)); // [[buffers]] -> [buffers] -> buffer
var buffer = utils.concat(Array.prototype.concat.apply([], buffers)); // [[bufs]] -> [bufs] -> buf
this._log.trace({ buffer: buffer }, 'Header compression is done');
return buffer;
};

@@ -632,26 +642,19 @@

// * splits the header block into `chunk`s that are not larger than `MAX_HTTP_PAYLOAD_SIZE`
var cursor = 0;
do {
var chunk_size = Math.min(MAX_HTTP_PAYLOAD_SIZE, buffer.length);
var chunk = buffer.slice(cursor, cursor + chunk_size);
cursor += chunk_size;
// * cuts the header block into `chunks` that are not larger than `MAX_HTTP_PAYLOAD_SIZE`
var chunks = utils.cut(buffer, MAX_HTTP_PAYLOAD_SIZE);
// * for each `chunk`, it generates a `chunk_frame` that is identical to the original, except
// the `data` property which holds the given chunk
var chunk_frame = {
// * for each `chunk`, it pushes out a `chunk_frame` that is identical to the original, except
// the `data` property which holds the given chunk and the END_HEADERS/END_PUSH_STREAM flag
// that marks the last frame
for (var i = 0; i < chunks.length; i++) {
var flags = utils.clone(frame.flags);
flags['END_' + frame.type] = (i === chunks.length - 1);
this.push({
type: frame.type,
flags: frame.flags,
flags: flags,
stream: frame.stream,
priority: frame.priority,
data: chunk
};
// * sets the END_HEADERS or END_PUSH_STREAM flag to true if it's the last chunk
chunk_frame.flags['END_' + frame.type] = (cursor === buffer.length);
// * and pushes out the `chunk_frame`
this.push(chunk_frame);
} while (!end);
data: chunks[i]
});
}
}

@@ -705,3 +708,3 @@

if (this._in_progress && (frame.flags.END_HEADERS || frame.flags.END_PUSH_PROMISE)) {
var buffer = concat(this._frames.map(function(frame) {
var buffer = utils.concat(this._frames.map(function(frame) {
return frame.data;

@@ -708,0 +711,0 @@ }));

@@ -0,9 +1,12 @@

var Duplex = require('stream').Duplex;
var Serializer = require('./framer').Serializer;
var Deserializer = require('./framer').Deserializer;
var Compressor = require('./compressor').Compressor;
exports.Connection = Connection;
var Serializer = require('./framer').Serializer
, Deserializer = require('./framer').Deserializer
, Compressor = require('./compressor').Compressor
, EventEmitter = require('events').EventEmitter;
function Connection(socket, role) {
Duplex.call(this, { objectMode: true });
function Connection(socket, role) {
this.socket = socket;

@@ -18,3 +21,3 @@ this.role = role; // 'client' or 'server'

}
Connection.prototype = Object.create(EventEmitter.prototype, { constructor: { value: Connection } });
Connection.prototype = Object.create(Duplex.prototype, { constructor: { value: Connection } });

@@ -25,1 +28,7 @@ Connection.prototype.createStream = function createStream() {

};
Connection.prototype._read = function read() {
};
Connection.prototype._write = function write(chunk, encoding, callback) {
};

@@ -21,3 +21,4 @@ // The framer consists of two [Transform Stream][1] subclasses that operate in [object mode][2]:

function Serializer() {
function Serializer(log) {
this._log = log || require('./utils').nolog;
Transform.call(this, { objectMode: true });

@@ -31,2 +32,4 @@ }

Serializer.prototype._transform = function _transform(frame, encoding, done) {
this._log.debug({ frame: frame }, 'Outgoing frame');
if (!(frame.type in Serializer)) {

@@ -57,3 +60,4 @@ throw new Error('Unknown frame type: ' + frame.type);

function Deserializer() {
function Deserializer(log) {
this._log = log || require('./utils').nolog;
Transform.call(this, { objectMode: true });

@@ -106,6 +110,10 @@ this._next(COMMON_HEADER_SIZE);

Deserializer[this._frame.type](this._buffer, this._frame);
this._log.debug({ frame: this._frame }, 'Incoming frame');
this.push(this._frame);
} catch(error) {
this._log.error({ state: this, error: error }, 'Incoming frame parsing error');
this.emit('error', error);
}
} else {
this._log.warn({ frame: this._frame }, 'Unknown type incoming frame');
}

@@ -112,0 +120,0 @@ this._next(COMMON_HEADER_SIZE);

@@ -15,1 +15,33 @@ // Concatenate an array of buffers into a new buffer

};
// Cut `buffer` into chunks not larger than `size`
exports.cut = function cut(buffer, size) {
var chunks = [];
var cursor = 0;
do {
var chunk_size = Math.min(size, buffer.length - cursor);
chunks.push(buffer.slice(cursor, cursor + chunk_size));
cursor += chunk_size;
} while(cursor < buffer.length);
return chunks;
};
// Clone an object
exports.clone = function clone(object) {
var clone = {};
for (var key in object) {
clone[key] = object[key];
}
return object;
};
// No-op dummy logger
function noop() {}
exports.nolog = {
fatal: noop,
error: noop,
warn: noop,
info: noop,
debug: noop,
trace: noop
};
{
"name": "http2",
"version": "0.0.3",
"version": "0.0.4",
"description": "An HTTP/2 server implementation",

@@ -12,3 +12,4 @@ "main": "index.js",

"mocha": "*",
"docco": "*"
"docco": "*",
"bunyan": "*"
},

@@ -15,0 +16,0 @@ "scripts": {

@@ -9,3 +9,3 @@ node-http2

I post weekly status updates [on my blog](http://gabor.molnar.es/blog/categories/google-summer-of-code/). Short version: framing layer is ready, compression will be ready this week.
I post weekly status updates [on my blog](http://gabor.molnar.es/blog/categories/google-summer-of-code/). Short version: framing layer and compression is ready.

@@ -12,0 +12,0 @@ Installation

@@ -9,2 +9,5 @@ var expect = require('chai').expect;

var log = process.env.DEBUG ? require('bunyan').createLogger({ name: 'http2', level: 'trace' })
: undefined;
var test_integers = [{

@@ -177,3 +180,3 @@ N: 5,

it('should return the parsed header set in { name1: value1, name2: [value2, value3], ... } format', function() {
var decompressor = new Decompressor('REQUEST');
var decompressor = new Decompressor('REQUEST', log);
var header_set = test_header_sets[0];

@@ -190,3 +193,3 @@ expect(decompressor.decompress(header_set.buffer)).to.deep.equal(header_set.headers);

it('should be true for any header set if the states are synchronized', function() {
var compressor = new Compressor('REQUEST');
var compressor = new Compressor('REQUEST', log);
var decompressor = new Decompressor('REQUEST');

@@ -193,0 +196,0 @@ for (var i = 0; i < 10; i++) {

@@ -8,2 +8,5 @@ var expect = require('chai').expect;

var log = process.env.DEBUG ? require('bunyan').createLogger({ name: 'http2', level: 'trace' })
: undefined;
var frame_types = {

@@ -186,3 +189,4 @@ DATA: ['data'],

it('should transform frame objects to appropriate buffers', function() {
var stream = new Serializer();
var stream = new Serializer(log);
for (var i = 0; i < test_frames.length; i++) {

@@ -239,3 +243,3 @@ var test = test_frames[i];

it('should transform buffers to appropriate frame object', function() {
var stream = new Deserializer();
var stream = new Deserializer(log);

@@ -242,0 +246,0 @@ var shuffled = shuffle_buffers(test_frames.map(function(test) { return test.buffer; }));

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc