Socket
Socket
Sign inDemoInstall

@crawlee/http

Package Overview
Dependencies
Maintainers
1
Versions
1019
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@crawlee/http - npm Package Compare versions

Comparing version 3.0.5-beta.10 to 3.0.5-beta.11

13

internals/http-crawler.d.ts

@@ -8,5 +8,6 @@ /// <reference types="node" />

import type { OptionsInit, Response as GotResponse, GotOptionsInit } from 'got-scraping';
import type { JsonValue } from 'type-fest';
import type { IncomingMessage } from 'node:http';
export declare type HttpErrorHandler<UserData extends Dictionary = any, // with default to Dictionary we cant use a typed router in untyped crawler
JSONData extends Dictionary = any> = ErrorHandler<HttpCrawlingContext<UserData, JSONData>>;
JSONData extends JsonValue = any> = ErrorHandler<HttpCrawlingContext<UserData, JSONData>>;
export interface HttpCrawlerOptions<Context extends InternalHttpCrawlingContext = InternalHttpCrawlingContext> extends BasicCrawlerOptions<Context> {

@@ -100,3 +101,3 @@ /**

export declare type HttpHook<UserData extends Dictionary = any, // with default to Dictionary we cant use a typed router in untyped crawler
JSONData extends Dictionary = any> = InternalHttpHook<HttpCrawlingContext<UserData, JSONData>>;
JSONData extends JsonValue = any> = InternalHttpHook<HttpCrawlingContext<UserData, JSONData>>;
/**

@@ -106,3 +107,3 @@ * @internal

export interface InternalHttpCrawlingContext<UserData extends Dictionary = any, // with default to Dictionary we cant use a typed router in untyped crawler
JSONData extends Dictionary = any, // with default to Dictionary we cant use a typed router in untyped crawler
JSONData extends JsonValue = any, // with default to Dictionary we cant use a typed router in untyped crawler
Crawler = HttpCrawler<any>> extends CrawlingContext<UserData> {

@@ -131,6 +132,6 @@ /**

}
export interface HttpCrawlingContext<UserData = any, JSONData = any> extends InternalHttpCrawlingContext<UserData, JSONData, HttpCrawler<HttpCrawlingContext<UserData, JSONData>>> {
export interface HttpCrawlingContext<UserData extends Dictionary = any, JSONData extends JsonValue = any> extends InternalHttpCrawlingContext<UserData, JSONData, HttpCrawler<HttpCrawlingContext<UserData, JSONData>>> {
}
export declare type HttpRequestHandler<UserData extends Dictionary = any, // with default to Dictionary we cant use a typed router in untyped crawler
JSONData extends Dictionary = any> = RequestHandler<HttpCrawlingContext<UserData, JSONData>>;
JSONData extends JsonValue = any> = RequestHandler<HttpCrawlingContext<UserData, JSONData>>;
/**

@@ -278,3 +279,3 @@ * Provides a framework for the parallel crawling of web pages using plain HTTP requests.

*/
protected _parseResponse(request: Request, responseStream: IncomingMessage, crawlingContext: Context): Promise<(Awaited<Partial<Context>> & {
protected _parseResponse(request: Request, responseStream: IncomingMessage, crawlingContext: Context): Promise<(Partial<Context> & {
isXml: boolean;

@@ -281,0 +282,0 @@ response: IncomingMessage;

@@ -11,3 +11,3 @@ "use strict";

const got_scraping_1 = require("got-scraping");
const path_1 = require("path");
const node_path_1 = require("node:path");
const iconv_lite_1 = tslib_1.__importDefault(require("iconv-lite"));

@@ -593,3 +593,3 @@ const ow_1 = tslib_1.__importDefault(require("ow"));

const parsedUrl = new URL(url);
const contentTypeFromExtname = mime_types_1.default.contentType((0, path_1.extname)(parsedUrl.pathname))
const contentTypeFromExtname = mime_types_1.default.contentType((0, node_path_1.extname)(parsedUrl.pathname))
|| 'application/octet-stream; charset=utf-8'; // Fallback content type, specified in https://tools.ietf.org/html/rfc7231#section-3.1.1.5

@@ -596,0 +596,0 @@ parsedContentType = content_type_1.default.parse(contentTypeFromExtname);

{
"name": "@crawlee/http",
"version": "3.0.5-beta.10",
"version": "3.0.5-beta.11",
"description": "The scalable web crawling and scraping library for JavaScript/Node.js. Enables development of data extraction and web automation jobs (not only) with headless Chrome and Puppeteer.",

@@ -58,4 +58,4 @@ "engines": {

"@apify/utilities": "^2.0.0",
"@crawlee/basic": "^3.0.5-beta.10",
"@crawlee/types": "^3.0.5-beta.10",
"@crawlee/basic": "^3.0.5-beta.11",
"@crawlee/types": "^3.0.5-beta.11",
"@types/content-type": "^1.1.5",

@@ -66,4 +66,5 @@ "content-type": "^1.0.4",

"mime-types": "^2.1.35",
"ow": "^0.28.1"
"ow": "^0.28.1",
"type-fest": "^2.19.0"
}
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc