@hyperbrowser/sdk
Advanced tools
Comparing version 0.16.0 to 0.17.0
@@ -1,2 +0,12 @@ | ||
export { SCRAPE_TOOL_OPENAI, CRAWL_TOOL_OPENAI } from "./openai"; | ||
export { SCRAPE_TOOL_ANTHROPIC, CRAWL_TOOL_ANTHROPIC } from "./anthropic"; | ||
import { HyperbrowserClient } from "../client"; | ||
import { StartScrapeJobParams, StartCrawlJobParams } from "../types"; | ||
export declare class WebsiteScrapeTool { | ||
static openaiToolDefinition: import("./openai").ChatCompletionTool; | ||
static anthropicToolDefinition: import("./anthropic").Tool; | ||
static runnable(hb: HyperbrowserClient, params: StartScrapeJobParams): Promise<string>; | ||
} | ||
export declare class WebsiteCrawlTool { | ||
static openaiToolDefinition: import("./openai").ChatCompletionTool; | ||
static anthropicToolDefinition: import("./anthropic").Tool; | ||
static runnable(hb: HyperbrowserClient, params: StartCrawlJobParams): Promise<string>; | ||
} |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.CRAWL_TOOL_ANTHROPIC = exports.SCRAPE_TOOL_ANTHROPIC = exports.CRAWL_TOOL_OPENAI = exports.SCRAPE_TOOL_OPENAI = void 0; | ||
var openai_1 = require("./openai"); | ||
Object.defineProperty(exports, "SCRAPE_TOOL_OPENAI", { enumerable: true, get: function () { return openai_1.SCRAPE_TOOL_OPENAI; } }); | ||
Object.defineProperty(exports, "CRAWL_TOOL_OPENAI", { enumerable: true, get: function () { return openai_1.CRAWL_TOOL_OPENAI; } }); | ||
var anthropic_1 = require("./anthropic"); | ||
Object.defineProperty(exports, "SCRAPE_TOOL_ANTHROPIC", { enumerable: true, get: function () { return anthropic_1.SCRAPE_TOOL_ANTHROPIC; } }); | ||
Object.defineProperty(exports, "CRAWL_TOOL_ANTHROPIC", { enumerable: true, get: function () { return anthropic_1.CRAWL_TOOL_ANTHROPIC; } }); | ||
exports.WebsiteCrawlTool = exports.WebsiteScrapeTool = void 0; | ||
const openai_1 = require("./openai"); | ||
const anthropic_1 = require("./anthropic"); | ||
class WebsiteScrapeTool { | ||
static async runnable(hb, params) { | ||
const resp = await hb.scrape.startAndWait(params); | ||
return resp.data?.markdown || ""; | ||
} | ||
} | ||
exports.WebsiteScrapeTool = WebsiteScrapeTool; | ||
WebsiteScrapeTool.openaiToolDefinition = openai_1.SCRAPE_TOOL_OPENAI; | ||
WebsiteScrapeTool.anthropicToolDefinition = anthropic_1.SCRAPE_TOOL_ANTHROPIC; | ||
class WebsiteCrawlTool { | ||
static async runnable(hb, params) { | ||
const resp = await hb.crawl.startAndWait(params); | ||
let markdown = ""; | ||
if (resp.data) { | ||
for (const page of resp.data) { | ||
if (page.markdown) { | ||
markdown += `\n${"-".repeat(50)}\nUrl: ${page.url}\nMarkdown:\n${page.markdown}\n`; | ||
} | ||
} | ||
} | ||
return markdown; | ||
} | ||
} | ||
exports.WebsiteCrawlTool = WebsiteCrawlTool; | ||
WebsiteCrawlTool.openaiToolDefinition = openai_1.CRAWL_TOOL_OPENAI; | ||
WebsiteCrawlTool.anthropicToolDefinition = anthropic_1.CRAWL_TOOL_ANTHROPIC; |
@@ -11,2 +11,3 @@ "use strict"; | ||
parameters: schema_1.SCRAPE_SCHEMA, | ||
strict: true, | ||
}, | ||
@@ -20,3 +21,4 @@ }; | ||
parameters: schema_1.CRAWL_SCHEMA, | ||
strict: true, | ||
}, | ||
}; |
@@ -24,2 +24,4 @@ export declare const SCRAPE_OPTIONS: { | ||
}; | ||
required: string[]; | ||
additionalProperties: boolean; | ||
}; | ||
@@ -56,5 +58,8 @@ export declare const SCRAPE_SCHEMA: { | ||
}; | ||
required: string[]; | ||
additionalProperties: boolean; | ||
}; | ||
}; | ||
required: string[]; | ||
additionalProperties: boolean; | ||
}; | ||
@@ -70,3 +75,2 @@ export declare const CRAWL_SCHEMA: { | ||
type: string; | ||
default: number; | ||
description: string; | ||
@@ -119,5 +123,8 @@ }; | ||
}; | ||
required: string[]; | ||
additionalProperties: boolean; | ||
}; | ||
}; | ||
required: string[]; | ||
additionalProperties: boolean; | ||
}; |
@@ -27,2 +27,4 @@ "use strict"; | ||
}, | ||
required: ["include_tags", "exclude_tags", "only_main_content"], | ||
additionalProperties: false, | ||
}; | ||
@@ -38,3 +40,4 @@ exports.SCRAPE_SCHEMA = { | ||
}, | ||
required: ["url"], | ||
required: ["url", "scrape_options"], | ||
additionalProperties: false, | ||
}; | ||
@@ -50,3 +53,2 @@ exports.CRAWL_SCHEMA = { | ||
type: "number", | ||
default: 10, | ||
description: "The maximum number of pages to crawl", | ||
@@ -78,3 +80,12 @@ }, | ||
}, | ||
required: ["url"], | ||
required: [ | ||
"url", | ||
"max_pages", | ||
"follow_links", | ||
"ignore_sitemap", | ||
"exclude_patterns", | ||
"include_patterns", | ||
"scrape_options", | ||
], | ||
additionalProperties: false, | ||
}; |
{ | ||
"name": "@hyperbrowser/sdk", | ||
"version": "0.16.0", | ||
"version": "0.17.0", | ||
"description": "Node SDK for Hyperbrowser API", | ||
@@ -25,3 +25,8 @@ "author": "", | ||
"browser", | ||
"automation" | ||
"automation", | ||
"webscraping", | ||
"webcrawling", | ||
"scraping", | ||
"crawling", | ||
"ai" | ||
], | ||
@@ -28,0 +33,0 @@ "dependencies": { |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 1 instance in 1 package
1
13150
9
292
1