Socket
Socket
Sign inDemoInstall

rjweb-server

Package Overview
Dependencies
Maintainers
1
Versions
373
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

rjweb-server - npm Package Compare versions

Comparing version 8.7.0 to 8.7.1

4

CHANGELOG.md

@@ -0,1 +1,5 @@

## 8.7.1
- Fix Compression duplicating data sometimes
## 8.7.0

@@ -2,0 +6,0 @@

26

lib/cjs/classes/web/HttpRequest.js

@@ -514,8 +514,3 @@ "use strict";

this.rawRes.cork(() => {
if (!endEarly && (start !== 0 || end !== fileStat.size) && this.ctx.headers.get("if-unmodified-since") !== this.ctx.response.headers["last-modified"]) {
this.ctg.logger.debug("Ended unmodified-since request early because of no match");
this.ctx.response.status = import_statusEnum.default.PRECONDITION_FAILED;
this.ctx.response.statusMessage = void 0;
endEarly = true;
} else if (!endEarly && start === 0 && end === fileStat.size && this.ctg.options.performance.lastModified && this.ctx.headers.get("if-modified-since") === this.ctx.response.headers["last-modified"]) {
if (!endEarly && start === 0 && end === fileStat.size && this.ctg.options.performance.lastModified && this.ctx.headers.get("if-modified-since") === this.ctx.response.headers["last-modified"]) {
this.ctg.logger.debug("Ended modified-since request early because of match");

@@ -535,3 +530,3 @@ this.ctx.response.status = import_statusEnum.default.NOT_MODIFIED;

const stream = (0, import_fs.createReadStream)((0, import_path.resolve)(file), { start, end });
const compression = (0, import_handleCompressType.default)(compressMethod);
const compression = (0, import_handleCompressType.default)(compressMethod, true);
const destroyStreams = () => {

@@ -542,10 +537,16 @@ compression.destroy();

compression.on("data", (content) => {
this.rawRes.content = toArrayBuffer(content);
const ab = toArrayBuffer(content), lastOffset = this.rawRes.getWriteOffset();
if (!this.ctx.isAborted) {
try {
this.rawRes.contentOffset = this.rawRes.getWriteOffset();
const ok = compressWrite(this.rawRes.content);
const ok = compressWrite(ab);
if (!ok) {
compression.pause();
stream.pause();
this.rawRes.onWritable((offset) => {
this.rawRes.content = ab;
this.rawRes.contentOffset = lastOffset;
this.rawRes.onWritable(compressHeader ? () => {
compression.resume();
stream.resume();
return true;
} : (offset) => {
const sliced = this.rawRes.content.slice(offset - this.rawRes.contentOffset);

@@ -556,2 +557,3 @@ const ok2 = compressWrite(sliced);

this.ctg.logger.debug("sent http body chunk with bytelen", sliced.byteLength, "(delayed)");
compression.resume();
stream.resume();

@@ -572,3 +574,3 @@ }

}
}).once("end", () => {
}).once("close", () => {
if (compressHeader && !this.ctx.isAborted)

@@ -575,0 +577,0 @@ this.rawRes.cork(() => this.rawRes.end());

@@ -35,4 +35,21 @@ "use strict";

var import_stream = require("stream");
var import_size = __toESM(require("./size"));
var import_zlib = __toESM(require("zlib"));
function handleCompressType(type) {
class PassThrough64K extends import_stream.Duplex {
constructor() {
super({
read() {
},
write(chunk) {
let chunkCount = Math.ceil(chunk.byteLength / (0, import_size.default)(64).kb()), index = 0;
while (chunkCount) {
this.push(chunk.slice(index, index + (0, import_size.default)(64).kb()));
index += (0, import_size.default)(64).kb();
chunkCount--;
}
}
});
}
}
function handleCompressType(type, stream) {
switch (type) {

@@ -46,4 +63,4 @@ case "gzip":

default:
return new import_stream.PassThrough();
return stream ? new import_stream.PassThrough() : new PassThrough64K();
}
}

@@ -348,3 +348,3 @@ "use strict";

ctg.logger.debug("negotiated to use", compressHeader);
const compression = (0, import_handleCompressType.default)(compressMethod);
const compression = (0, import_handleCompressType.default)(compressMethod, false);
const destroyStream = () => {

@@ -354,9 +354,14 @@ compression.destroy();

compression.on("data", (content) => {
res.content = (0, import_HttpRequest.toArrayBuffer)(content);
const ab = (0, import_HttpRequest.toArrayBuffer)(content), lastOffset = res.getWriteOffset();
if (!ctx.isAborted) {
try {
res.contentOffset = res.getWriteOffset();
const ok = compressWrite(res.content);
const ok = compressWrite(ab);
if (!ok) {
res.onWritable((offset) => {
compression.pause();
res.content = ab;
res.contentOffset = lastOffset;
res.onWritable(compressHeader ? () => {
compression.resume();
return true;
} : (offset) => {
const sliced = res.content.slice(offset - res.contentOffset);

@@ -367,2 +372,3 @@ const ok2 = compressWrite(sliced);

ctg.logger.debug("sent http body chunk with bytelen", sliced.byteLength, "(delayed)");
compression.resume();
}

@@ -369,0 +375,0 @@ return ok2;

{
"name": "rjweb-server",
"version": "8.7.0",
"version": "8.7.1",
"description": "Easy and Robust Way to create a Web Server with Many Easy-to-use Features in NodeJS",

@@ -5,0 +5,0 @@ "main": "./lib/cjs/index.js",

@@ -480,8 +480,3 @@ import Status from "../../misc/statusEnum";

this.rawRes.cork(() => {
if (!endEarly && (start !== 0 || end !== fileStat.size) && this.ctx.headers.get("if-unmodified-since") !== this.ctx.response.headers["last-modified"]) {
this.ctg.logger.debug("Ended unmodified-since request early because of no match");
this.ctx.response.status = Status.PRECONDITION_FAILED;
this.ctx.response.statusMessage = void 0;
endEarly = true;
} else if (!endEarly && start === 0 && end === fileStat.size && this.ctg.options.performance.lastModified && this.ctx.headers.get("if-modified-since") === this.ctx.response.headers["last-modified"]) {
if (!endEarly && start === 0 && end === fileStat.size && this.ctg.options.performance.lastModified && this.ctx.headers.get("if-modified-since") === this.ctx.response.headers["last-modified"]) {
this.ctg.logger.debug("Ended modified-since request early because of match");

@@ -501,3 +496,3 @@ this.ctx.response.status = Status.NOT_MODIFIED;

const stream = createReadStream(pathResolve(file), { start, end });
const compression = handleCompressType(compressMethod);
const compression = handleCompressType(compressMethod, true);
const destroyStreams = () => {

@@ -508,10 +503,16 @@ compression.destroy();

compression.on("data", (content) => {
this.rawRes.content = toArrayBuffer(content);
const ab = toArrayBuffer(content), lastOffset = this.rawRes.getWriteOffset();
if (!this.ctx.isAborted) {
try {
this.rawRes.contentOffset = this.rawRes.getWriteOffset();
const ok = compressWrite(this.rawRes.content);
const ok = compressWrite(ab);
if (!ok) {
compression.pause();
stream.pause();
this.rawRes.onWritable((offset) => {
this.rawRes.content = ab;
this.rawRes.contentOffset = lastOffset;
this.rawRes.onWritable(compressHeader ? () => {
compression.resume();
stream.resume();
return true;
} : (offset) => {
const sliced = this.rawRes.content.slice(offset - this.rawRes.contentOffset);

@@ -522,2 +523,3 @@ const ok2 = compressWrite(sliced);

this.ctg.logger.debug("sent http body chunk with bytelen", sliced.byteLength, "(delayed)");
compression.resume();
stream.resume();

@@ -538,3 +540,3 @@ }

}
}).once("end", () => {
}).once("close", () => {
if (compressHeader && !this.ctx.isAborted)

@@ -541,0 +543,0 @@ this.rawRes.cork(() => this.rawRes.end());

@@ -1,4 +0,21 @@

import { PassThrough } from "stream";
import { Duplex, PassThrough } from "stream";
import size from "./size";
import zlib from "zlib";
function handleCompressType(type) {
class PassThrough64K extends Duplex {
constructor() {
super({
read() {
},
write(chunk) {
let chunkCount = Math.ceil(chunk.byteLength / size(64).kb()), index = 0;
while (chunkCount) {
this.push(chunk.slice(index, index + size(64).kb()));
index += size(64).kb();
chunkCount--;
}
}
});
}
}
function handleCompressType(type, stream) {
switch (type) {

@@ -12,3 +29,3 @@ case "gzip":

default:
return new PassThrough();
return stream ? new PassThrough() : new PassThrough64K();
}

@@ -15,0 +32,0 @@ }

@@ -315,3 +315,3 @@ import parsePath from "../parsePath";

ctg.logger.debug("negotiated to use", compressHeader);
const compression = handleCompressType(compressMethod);
const compression = handleCompressType(compressMethod, false);
const destroyStream = () => {

@@ -321,9 +321,14 @@ compression.destroy();

compression.on("data", (content) => {
res.content = toArrayBuffer(content);
const ab = toArrayBuffer(content), lastOffset = res.getWriteOffset();
if (!ctx.isAborted) {
try {
res.contentOffset = res.getWriteOffset();
const ok = compressWrite(res.content);
const ok = compressWrite(ab);
if (!ok) {
res.onWritable((offset) => {
compression.pause();
res.content = ab;
res.contentOffset = lastOffset;
res.onWritable(compressHeader ? () => {
compression.resume();
return true;
} : (offset) => {
const sliced = res.content.slice(offset - res.contentOffset);

@@ -334,2 +339,3 @@ const ok2 = compressWrite(sliced);

ctg.logger.debug("sent http body chunk with bytelen", sliced.byteLength, "(delayed)");
compression.resume();
}

@@ -336,0 +342,0 @@ return ok2;

{
"name": "rjweb-server",
"version": "8.7.0",
"version": "8.7.1",
"description": "Easy and Robust Way to create a Web Server with Many Easy-to-use Features in NodeJS",

@@ -5,0 +5,0 @@ "main": "./lib/cjs/index.js",

/// <reference types="node" />
import { PassThrough } from "stream";
import { Duplex, PassThrough } from "stream";
declare class PassThrough64K extends Duplex {
constructor();
}
export type CompressTypes = 'none' | 'gzip' | 'br' | 'deflate';
export default function handleCompressType(type: CompressTypes): PassThrough;
export default function handleCompressType(type: CompressTypes, stream: boolean): PassThrough64K | PassThrough;
export {};
{
"name": "rjweb-server",
"version": "8.7.0",
"version": "8.7.1",
"description": "Easy and Robust Way to create a Web Server with Many Easy-to-use Features in NodeJS",

@@ -5,0 +5,0 @@ "main": "./lib/cjs/index.js",

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc