🚀 Big News: Socket Acquires Coana to Bring Reachability Analysis to Every Appsec Team.Learn more
Socket
DemoInstallSign in
Socket

webcrawlerapi-js

Package Overview
Dependencies
Maintainers
1
Versions
15
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

webcrawlerapi-js - npm Package Compare versions

Comparing version

to
1.0.10

dist/errors.d.ts

77

api.ts
import {CrawlRequest, Job, JobId, ScrapeRequest, ScrapeResponse} from "./model";
import { JobStatus } from "./constants";
import { WebcrawlerApiError, createErrorFromResponse } from "./errors";

@@ -32,14 +33,3 @@ const BASE_PATH = "https://api.webcrawlerapi.com"

const response = await fetch(url, requestOptions);
if (response.ok) {
return response.json();
}
try {
const data = await response.json();
throw new Error(
`failed to scrape, response returned ${response.status} ${response.statusText}: ${data?.error}`
);
} catch (e) {
throw e;
}
return await this.sendRequest(url, requestOptions);
}

@@ -62,3 +52,3 @@

if (jobIdResponse.id === '') {
throw new Error("Failed to fetch job status");
throw new WebcrawlerApiError('invalid_response', 'Failed to fetch job status', 0);
}

@@ -80,3 +70,3 @@

}
throw new Error("Scraping took too long, please retry or increase the number of polling retries");
throw new WebcrawlerApiError('timeout', 'Scraping took too long, please retry or increase the number of polling retries', 0);
}

@@ -102,17 +92,3 @@

};
const response = await fetch(url, requestOptions);
if (response.ok) {
return response.json();
}
try {
const data = await response.json();
throw new Error(
`failed to fetch job status ${response.status} ${response.statusText}: ${JSON.stringify(data)}`
);
} catch (e) {
throw new Error(
`failed to fetch job status ${response.status} ${response.statusText}`
);
}
return await this.sendRequest(url, requestOptions);
}

@@ -139,3 +115,3 @@

if (jobIdResponse.id === '') {
throw new Error("Failed to fetch job status");
throw new WebcrawlerApiError('invalid_response', 'Failed to fetch job status', 0);
}

@@ -194,3 +170,3 @@

}
throw new Error("Crawling took too long, please retry or increase the number of polling retries");
throw new WebcrawlerApiError('timeout', 'Crawling took too long, please retry or increase the number of polling retries', 0);
}

@@ -225,21 +201,6 @@

}
}
const response = await fetch(url, requestOptions);
if (response.ok) {
return response.json();
}
try {
const data = await response.json();
throw new Error(
`failed to fetch job status ${response.status} ${response.statusText}: ${JSON.stringify(data)}`
);
} catch (e) {
throw new Error(
`failed to fetch job status ${response.status} ${response.statusText}`
);
}
};
return await this.sendRequest(url, requestOptions);
}
private async sendRequest(url: string, requestOptions: any): Promise<any> {

@@ -250,8 +211,22 @@ let response: Response;

} catch (e) {
throw new Error(`Failed to send request: ${e}`);
throw new WebcrawlerApiError('network_error', `Failed to send request: ${e}`, 0);
}
if (!response.ok) {
const errorResponse = await response.json();
throw new Error(`${JSON.stringify(errorResponse)}`);
try {
const errorData = await response.json();
throw createErrorFromResponse(response, errorData);
} catch (e) {
if (e instanceof WebcrawlerApiError) {
throw e;
}
// If we can't parse the error response, create a generic error
throw new WebcrawlerApiError(
'unknown_error',
`Request failed with status ${response.status} ${response.statusText}`,
response.status
);
}
}
return response.json();

@@ -258,0 +233,0 @@ }

@@ -1,1 +0,4 @@

export * from "./api";
export * from "./api";
export * from "./model";
export * from "./constants";
export * from "./errors";
{
"name": "webcrawlerapi-js",
"version": "1.0.9",
"version": "1.0.10",
"description": "JS client for WecrawlerAPI",
"description": "JS client for WebcrawlerAPI",
"main": "./dist/index.js",

@@ -7,0 +7,0 @@ "types": "./dist/index.d.ts",