New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@hyperbrowser/sdk

Package Overview
Dependencies
Maintainers
1
Versions
30
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@hyperbrowser/sdk - npm Package Compare versions

Comparing version 0.25.0 to 0.26.0

63

dist/services/crawl.js

@@ -7,2 +7,3 @@ "use strict";

const client_1 = require("../client");
const constants_1 = require("../types/constants");
class CrawlService extends base_1.BaseService {

@@ -57,23 +58,59 @@ /**

let jobResponse;
let failures = 0;
while (true) {
jobResponse = await this.get(jobId);
if (jobResponse.status === "completed" || jobResponse.status === "failed") {
break;
try {
jobResponse = await this.get(jobId, { batchSize: 1 });
if (jobResponse.status === "completed" || jobResponse.status === "failed") {
break;
}
failures = 0;
}
catch (error) {
failures++;
if (failures >= constants_1.POLLING_ATTEMPTS) {
throw new client_1.HyperbrowserError(`Failed to poll crawl job ${jobId} after ${constants_1.POLLING_ATTEMPTS} attempts: ${error}`);
}
}
await (0, utils_1.sleep)(2000);
}
failures = 0;
if (!returnAllPages) {
return jobResponse;
while (true) {
try {
jobResponse = await this.get(jobId);
return jobResponse;
}
catch (error) {
failures++;
if (failures >= constants_1.POLLING_ATTEMPTS) {
throw new client_1.HyperbrowserError(`Failed to get crawl job ${jobId} after ${constants_1.POLLING_ATTEMPTS} attempts: ${error}`);
}
}
await (0, utils_1.sleep)(500);
}
}
jobResponse.currentPageBatch = 0;
jobResponse.data = [];
failures = 0;
while (jobResponse.currentPageBatch < jobResponse.totalPageBatches) {
const tmpJobResponse = await this.get(jobId, {
page: jobResponse.currentPageBatch + 1,
});
if (tmpJobResponse.data) {
jobResponse.data?.push(...tmpJobResponse.data);
try {
const tmpJobResponse = await this.get(jobId, {
page: jobResponse.currentPageBatch + 1,
batchSize: 100,
});
if (tmpJobResponse.data) {
jobResponse.data?.push(...tmpJobResponse.data);
}
jobResponse.currentPageBatch = tmpJobResponse.currentPageBatch;
jobResponse.totalCrawledPages = tmpJobResponse.totalCrawledPages;
jobResponse.totalPageBatches = tmpJobResponse.totalPageBatches;
jobResponse.batchSize = tmpJobResponse.batchSize;
failures = 0;
}
jobResponse.currentPageBatch = tmpJobResponse.currentPageBatch;
jobResponse.totalCrawledPages = tmpJobResponse.totalCrawledPages;
jobResponse.totalPageBatches = tmpJobResponse.totalPageBatches;
jobResponse.batchSize = tmpJobResponse.batchSize;
catch (error) {
failures++;
if (failures >= constants_1.POLLING_ATTEMPTS) {
throw new client_1.HyperbrowserError(`Failed to get crawl job ${jobId} after ${constants_1.POLLING_ATTEMPTS} attempts: ${error}`);
}
}
await (0, utils_1.sleep)(500);

@@ -80,0 +117,0 @@ }

@@ -8,2 +8,3 @@ "use strict";

const client_1 = require("../client");
const constants_1 = require("../types/constants");
const isZodSchema = (schema) => {

@@ -69,7 +70,17 @@ return (schema &&

let jobResponse;
let failures = 0;
while (true) {
jobResponse = await this.get(jobId);
if (jobResponse.status === "completed" || jobResponse.status === "failed") {
break;
try {
jobResponse = await this.get(jobId);
if (jobResponse.status === "completed" || jobResponse.status === "failed") {
break;
}
failures = 0;
}
catch (error) {
failures++;
if (failures >= constants_1.POLLING_ATTEMPTS) {
throw new client_1.HyperbrowserError(`Failed to poll extract job ${jobId} after ${constants_1.POLLING_ATTEMPTS} attempts: ${error}`);
}
}
await (0, utils_1.sleep)(2000);

@@ -76,0 +87,0 @@ }

@@ -7,2 +7,3 @@ "use strict";

const client_1 = require("../client");
const constants_1 = require("../types/constants");
class BatchScrapeService extends base_1.BaseService {

@@ -60,3 +61,3 @@ /**

try {
jobResponse = await this.get(jobId);
jobResponse = await this.get(jobId, { batchSize: 1 });
if (jobResponse.status === "completed" || jobResponse.status === "failed") {

@@ -69,4 +70,4 @@ break;

failures++;
if (failures >= 5) {
throw new client_1.HyperbrowserError(`Failed to poll batch scrape job ${jobId} after 5 attempts: ${error}`);
if (failures >= constants_1.POLLING_ATTEMPTS) {
throw new client_1.HyperbrowserError(`Failed to poll batch scrape job ${jobId} after ${constants_1.POLLING_ATTEMPTS} attempts: ${error}`);
}

@@ -76,5 +77,20 @@ }

}
failures = 0;
if (!returnAllPages) {
return jobResponse;
while (true) {
try {
jobResponse = await this.get(jobId);
return jobResponse;
}
catch (error) {
failures++;
if (failures >= constants_1.POLLING_ATTEMPTS) {
throw new client_1.HyperbrowserError(`Failed to get batch scrape job ${jobId} after ${constants_1.POLLING_ATTEMPTS} attempts: ${error}`);
}
}
await (0, utils_1.sleep)(500);
}
}
jobResponse.currentPageBatch = 0;
jobResponse.data = [];
failures = 0;

@@ -98,4 +114,4 @@ while (jobResponse.currentPageBatch < jobResponse.totalPageBatches) {

failures++;
if (failures >= 5) {
throw new client_1.HyperbrowserError(`Failed to get batch page ${jobResponse.currentPageBatch + 1} for job ${jobId} after 5 attempts: ${error}`);
if (failures >= constants_1.POLLING_ATTEMPTS) {
throw new client_1.HyperbrowserError(`Failed to get batch page ${jobResponse.currentPageBatch + 1} for job ${jobId} after ${constants_1.POLLING_ATTEMPTS} attempts: ${error}`);
}

@@ -158,7 +174,17 @@ }

let jobResponse;
let failures = 0;
while (true) {
jobResponse = await this.get(jobId);
if (jobResponse.status === "completed" || jobResponse.status === "failed") {
break;
try {
jobResponse = await this.get(jobId);
if (jobResponse.status === "completed" || jobResponse.status === "failed") {
break;
}
failures = 0;
}
catch (error) {
failures++;
if (failures >= constants_1.POLLING_ATTEMPTS) {
throw new client_1.HyperbrowserError(`Failed to poll scrape job ${jobId} after ${constants_1.POLLING_ATTEMPTS} attempts: ${error}`);
}
}
await (0, utils_1.sleep)(2000);

@@ -165,0 +191,0 @@ }

@@ -5,5 +5,6 @@ export type ScrapeFormat = "markdown" | "html" | "links" | "screenshot";

export type CrawlJobStatus = "pending" | "running" | "completed" | "failed";
export type ScrapePageStatus = "completed" | "failed";
export type ScrapePageStatus = "completed" | "failed" | "pending" | "running";
export type CrawlPageStatus = "completed" | "failed";
export type ScrapeWaitUntil = "load" | "domcontentloaded" | "networkidle";
export declare const POLLING_ATTEMPTS = 5;
export type Country = "AD" | "AE" | "AF" | "AL" | "AM" | "AO" | "AR" | "AT" | "AU" | "AW" | "AZ" | "BA" | "BD" | "BE" | "BG" | "BH" | "BJ" | "BO" | "BR" | "BS" | "BT" | "BY" | "BZ" | "CA" | "CF" | "CH" | "CI" | "CL" | "CM" | "CN" | "CO" | "CR" | "CU" | "CY" | "CZ" | "DE" | "DJ" | "DK" | "DM" | "EC" | "EE" | "EG" | "ES" | "ET" | "EU" | "FI" | "FJ" | "FR" | "GB" | "GE" | "GH" | "GM" | "GR" | "HK" | "HN" | "HR" | "HT" | "HU" | "ID" | "IE" | "IL" | "IN" | "IQ" | "IR" | "IS" | "IT" | "JM" | "JO" | "JP" | "KE" | "KH" | "KR" | "KW" | "KZ" | "LB" | "LI" | "LR" | "LT" | "LU" | "LV" | "MA" | "MC" | "MD" | "ME" | "MG" | "MK" | "ML" | "MM" | "MN" | "MR" | "MT" | "MU" | "MV" | "MX" | "MY" | "MZ" | "NG" | "NL" | "NO" | "NZ" | "OM" | "PA" | "PE" | "PH" | "PK" | "PL" | "PR" | "PT" | "PY" | "QA" | "RANDOM_COUNTRY" | "RO" | "RS" | "RU" | "SA" | "SC" | "SD" | "SE" | "SG" | "SI" | "SK" | "SN" | "SS" | "TD" | "TG" | "TH" | "TM" | "TN" | "TR" | "TT" | "TW" | "UA" | "UG" | "US" | "UY" | "UZ" | "VE" | "VG" | "VN" | "YE" | "ZA" | "ZM" | "ZW" | "ad" | "ae" | "af" | "al" | "am" | "ao" | "ar" | "at" | "au" | "aw" | "az" | "ba" | "bd" | "be" | "bg" | "bh" | "bj" | "bo" | "br" | "bs" | "bt" | "by" | "bz" | "ca" | "cf" | "ch" | "ci" | "cl" | "cm" | "cn" | "co" | "cr" | "cu" | "cy" | "cz" | "de" | "dj" | "dk" | "dm" | "ec" | "ee" | "eg" | "es" | "et" | "eu" | "fi" | "fj" | "fr" | "gb" | "ge" | "gh" | "gm" | "gr" | "hk" | "hn" | "hr" | "ht" | "hu" | "id" | "ie" | "il" | "in" | "iq" | "ir" | "is" | "it" | "jm" | "jo" | "jp" | "ke" | "kh" | "kr" | "kw" | "kz" | "lb" | "li" | "lr" | "lt" | "lu" | "lv" | "ma" | "mc" | "md" | "me" | "mg" | "mk" | "ml" | "mm" | "mn" | "mr" | "mt" | "mu" | "mv" | "mx" | "my" | "mz" | "ng" | "nl" | "no" | "nz" | "om" | "pa" | "pe" | "ph" | "pk" | "pl" | "pr" | "pt" | "py" | "qa" | "ro" | "rs" | "ru" | "sa" | "sc" | "sd" | "se" | "sg" | "si" | "sk" | "sn" | "ss" | "td" | "tg" | "th" | "tm" | "tn" | "tr" | "tt" | "tw" | "ua" | "ug" | "us" | "uy" | "uz" | "ve" | "vg" | "vn" | "ye" | "za" | "zm" | "zw";

@@ -10,0 +11,0 @@ export type OperatingSystem = "windows" | "android" | "macos" | "linux" | "ios";

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.POLLING_ATTEMPTS = void 0;
exports.POLLING_ATTEMPTS = 5;

@@ -6,5 +6,7 @@ import { z } from "zod";

urls: string[];
systemPrompt?: string;
prompt?: string;
schema?: z.ZodSchema | object;
sessionOptions?: CreateSessionParams;
maxLinks?: number;
}

@@ -11,0 +13,0 @@ export interface StartExtractJobResponse {

{
"name": "@hyperbrowser/sdk",
"version": "0.25.0",
"version": "0.26.0",
"description": "Node SDK for Hyperbrowser API",

@@ -5,0 +5,0 @@ "author": "",

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc