@crawlee/basic
Advanced tools
Comparing version 3.10.6-beta.24 to 3.10.6-beta.25
import type { Log } from '@apify/log'; | ||
import { TimeoutError } from '@apify/timeout'; | ||
import type { AddRequestsBatchedOptions, AddRequestsBatchedResult, AutoscaledPoolOptions, CrawlingContext, EnqueueLinksOptions, EventManager, DatasetExportOptions, FinalStatistics, GetUserDataFromRequest, ProxyInfo, Request, RequestList, RequestOptions, RequestProvider, RouterHandler, RouterRoutes, Session, SessionPoolOptions, Source, StatisticState, StatisticsOptions, LoadedContext } from '@crawlee/core'; | ||
import type { AddRequestsBatchedOptions, AddRequestsBatchedResult, AutoscaledPoolOptions, CrawlingContext, EnqueueLinksOptions, EventManager, DatasetExportOptions, FinalStatistics, GetUserDataFromRequest, IRequestList, ProxyInfo, Request, RequestOptions, RequestProvider, RouterHandler, RouterRoutes, Session, SessionPoolOptions, Source, StatisticState, StatisticsOptions, LoadedContext } from '@crawlee/core'; | ||
import { AutoscaledPool, Configuration, Dataset, SessionPool, Statistics } from '@crawlee/core'; | ||
@@ -88,3 +88,3 @@ import type { Awaitable, BatchAddRequestsResult, Dictionary, SetStatusMessageOptions } from '@crawlee/types'; | ||
*/ | ||
requestList?: RequestList; | ||
requestList?: IRequestList; | ||
/** | ||
@@ -337,3 +337,3 @@ * Dynamic queue of URLs to be processed. This is useful for recursive crawling of websites. | ||
*/ | ||
requestList?: RequestList; | ||
requestList?: IRequestList; | ||
/** | ||
@@ -525,3 +525,3 @@ * Dynamic queue of URLs to be processed. This is useful for recursive crawling of websites. | ||
*/ | ||
protected delayRequest(request: Request, source: RequestList | RequestProvider): boolean; | ||
protected delayRequest(request: Request, source: IRequestList | RequestProvider): boolean; | ||
/** | ||
@@ -549,3 +549,3 @@ * Wrapper around requestHandler that fetches requests from RequestList/RequestQueue | ||
*/ | ||
protected _requestFunctionErrorHandler(error: Error, crawlingContext: Context, source: RequestList | RequestProvider): Promise<void>; | ||
protected _requestFunctionErrorHandler(error: Error, crawlingContext: Context, source: IRequestList | RequestProvider): Promise<void>; | ||
protected _tagUserHandlerError<T>(cb: () => unknown): Promise<T>; | ||
@@ -552,0 +552,0 @@ protected _handleFailedRequestHandler(crawlingContext: Context, error: Error): Promise<void>; |
{ | ||
"name": "@crawlee/basic", | ||
"version": "3.10.6-beta.24", | ||
"version": "3.10.6-beta.25", | ||
"description": "The scalable web crawling and scraping library for JavaScript/Node.js. Enables development of data extraction and web automation jobs (not only) with headless Chrome and Puppeteer.", | ||
@@ -51,5 +51,5 @@ "engines": { | ||
"@apify/utilities": "^2.7.10", | ||
"@crawlee/core": "3.10.6-beta.24", | ||
"@crawlee/types": "3.10.6-beta.24", | ||
"@crawlee/utils": "3.10.6-beta.24", | ||
"@crawlee/core": "3.10.6-beta.25", | ||
"@crawlee/types": "3.10.6-beta.25", | ||
"@crawlee/utils": "3.10.6-beta.25", | ||
"csv-stringify": "^6.2.0", | ||
@@ -70,3 +70,3 @@ "fs-extra": "^11.0.0", | ||
}, | ||
"gitHead": "cf47241013a1cd19d7662b5761ce1b79b9beb565" | ||
"gitHead": "e8a5d51f0fe0d8f151c570c3ac07d1df68b837bd" | ||
} |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
307066
1996
+ Added@crawlee/core@3.10.6-beta.25(transitive)
+ Added@crawlee/memory-storage@3.10.6-beta.25(transitive)
+ Added@crawlee/types@3.10.6-beta.25(transitive)
+ Added@crawlee/utils@3.10.6-beta.25(transitive)
- Removed@crawlee/core@3.10.6-beta.24(transitive)
- Removed@crawlee/memory-storage@3.10.6-beta.24(transitive)
- Removed@crawlee/types@3.10.6-beta.24(transitive)
- Removed@crawlee/utils@3.10.6-beta.24(transitive)
Updated@crawlee/core@3.10.6-beta.25