webcrawlerapi-js
Advanced tools
Comparing version
77
api.ts
import {CrawlRequest, Job, JobId, ScrapeRequest, ScrapeResponse} from "./model"; | ||
import { JobStatus } from "./constants"; | ||
import { WebcrawlerApiError, createErrorFromResponse } from "./errors"; | ||
@@ -32,14 +33,3 @@ const BASE_PATH = "https://api.webcrawlerapi.com" | ||
const response = await fetch(url, requestOptions); | ||
if (response.ok) { | ||
return response.json(); | ||
} | ||
try { | ||
const data = await response.json(); | ||
throw new Error( | ||
`failed to scrape, response returned ${response.status} ${response.statusText}: ${data?.error}` | ||
); | ||
} catch (e) { | ||
throw e; | ||
} | ||
return await this.sendRequest(url, requestOptions); | ||
} | ||
@@ -62,3 +52,3 @@ | ||
if (jobIdResponse.id === '') { | ||
throw new Error("Failed to fetch job status"); | ||
throw new WebcrawlerApiError('invalid_response', 'Failed to fetch job status', 0); | ||
} | ||
@@ -80,3 +70,3 @@ | ||
} | ||
throw new Error("Scraping took too long, please retry or increase the number of polling retries"); | ||
throw new WebcrawlerApiError('timeout', 'Scraping took too long, please retry or increase the number of polling retries', 0); | ||
} | ||
@@ -102,17 +92,3 @@ | ||
}; | ||
const response = await fetch(url, requestOptions); | ||
if (response.ok) { | ||
return response.json(); | ||
} | ||
try { | ||
const data = await response.json(); | ||
throw new Error( | ||
`failed to fetch job status ${response.status} ${response.statusText}: ${JSON.stringify(data)}` | ||
); | ||
} catch (e) { | ||
throw new Error( | ||
`failed to fetch job status ${response.status} ${response.statusText}` | ||
); | ||
} | ||
return await this.sendRequest(url, requestOptions); | ||
} | ||
@@ -139,3 +115,3 @@ | ||
if (jobIdResponse.id === '') { | ||
throw new Error("Failed to fetch job status"); | ||
throw new WebcrawlerApiError('invalid_response', 'Failed to fetch job status', 0); | ||
} | ||
@@ -194,3 +170,3 @@ | ||
} | ||
throw new Error("Crawling took too long, please retry or increase the number of polling retries"); | ||
throw new WebcrawlerApiError('timeout', 'Crawling took too long, please retry or increase the number of polling retries', 0); | ||
} | ||
@@ -225,21 +201,6 @@ | ||
} | ||
} | ||
const response = await fetch(url, requestOptions); | ||
if (response.ok) { | ||
return response.json(); | ||
} | ||
try { | ||
const data = await response.json(); | ||
throw new Error( | ||
`failed to fetch job status ${response.status} ${response.statusText}: ${JSON.stringify(data)}` | ||
); | ||
} catch (e) { | ||
throw new Error( | ||
`failed to fetch job status ${response.status} ${response.statusText}` | ||
); | ||
} | ||
}; | ||
return await this.sendRequest(url, requestOptions); | ||
} | ||
private async sendRequest(url: string, requestOptions: any): Promise<any> { | ||
@@ -250,8 +211,22 @@ let response: Response; | ||
} catch (e) { | ||
throw new Error(`Failed to send request: ${e}`); | ||
throw new WebcrawlerApiError('network_error', `Failed to send request: ${e}`, 0); | ||
} | ||
if (!response.ok) { | ||
const errorResponse = await response.json(); | ||
throw new Error(`${JSON.stringify(errorResponse)}`); | ||
try { | ||
const errorData = await response.json(); | ||
throw createErrorFromResponse(response, errorData); | ||
} catch (e) { | ||
if (e instanceof WebcrawlerApiError) { | ||
throw e; | ||
} | ||
// If we can't parse the error response, create a generic error | ||
throw new WebcrawlerApiError( | ||
'unknown_error', | ||
`Request failed with status ${response.status} ${response.statusText}`, | ||
response.status | ||
); | ||
} | ||
} | ||
return response.json(); | ||
@@ -258,0 +233,0 @@ } |
@@ -1,1 +0,4 @@ | ||
export * from "./api"; | ||
export * from "./api"; | ||
export * from "./model"; | ||
export * from "./constants"; | ||
export * from "./errors"; |
{ | ||
"name": "webcrawlerapi-js", | ||
"version": "1.0.9", | ||
"version": "1.0.10", | ||
"description": "JS client for WecrawlerAPI", | ||
"description": "JS client for WebcrawlerAPI", | ||
"main": "./dist/index.js", | ||
@@ -7,0 +7,0 @@ "types": "./dist/index.d.ts", |
Network access
Supply chain riskThis module accesses the network.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
1
-50%0
-100%19993
-34.37%484
-31.83%