@vercel/next-browser
Advanced tools
| import { connect as netConnect } from "node:net"; | ||
| import { readFileSync, existsSync, rmSync } from "node:fs"; | ||
| import { spawn } from "node:child_process"; | ||
| import { setTimeout as sleep } from "node:timers/promises"; | ||
| import { fileURLToPath } from "node:url"; | ||
| import { cloudSocketPath, cloudPidFile } from "./cloud-paths.js"; | ||
| export async function cloudSend(action, payload = {}) { | ||
| await ensureCloudDaemon(); | ||
| const socket = await connect(); | ||
| const id = String(Date.now()); | ||
| socket.write(JSON.stringify({ id, action, ...payload }) + "\n"); | ||
| const line = await readLine(socket); | ||
| socket.end(); | ||
| return JSON.parse(line); | ||
| } | ||
| async function ensureCloudDaemon() { | ||
| if (daemonAlive() && (await connect().then(ok, no))) | ||
| return; | ||
| const ext = import.meta.url.endsWith(".ts") ? ".ts" : ".js"; | ||
| const daemon = fileURLToPath(new URL(`./cloud-daemon${ext}`, import.meta.url)); | ||
| const child = spawn(process.execPath, [daemon], { | ||
| detached: true, | ||
| stdio: "ignore", | ||
| }); | ||
| child.unref(); | ||
| for (let i = 0; i < 50; i++) { | ||
| if (await connect().then(ok, no)) | ||
| return; | ||
| await sleep(100); | ||
| } | ||
| throw new Error(`cloud daemon failed to start (${cloudSocketPath})`); | ||
| } | ||
| function daemonAlive() { | ||
| if (!existsSync(cloudPidFile)) | ||
| return false; | ||
| const pid = Number(readFileSync(cloudPidFile, "utf-8")); | ||
| try { | ||
| process.kill(pid, 0); | ||
| return true; | ||
| } | ||
| catch { | ||
| rmSync(cloudPidFile, { force: true }); | ||
| rmSync(cloudSocketPath, { force: true }); | ||
| return false; | ||
| } | ||
| } | ||
| function connect() { | ||
| return new Promise((resolve, reject) => { | ||
| const socket = netConnect(cloudSocketPath); | ||
| socket.once("connect", () => resolve(socket)); | ||
| socket.once("error", reject); | ||
| }); | ||
| } | ||
| function readLine(socket) { | ||
| return new Promise((resolve, reject) => { | ||
| let buffer = ""; | ||
| socket.on("data", (chunk) => { | ||
| buffer += chunk; | ||
| const newline = buffer.indexOf("\n"); | ||
| if (newline >= 0) | ||
| resolve(buffer.slice(0, newline)); | ||
| }); | ||
| socket.on("error", reject); | ||
| }); | ||
| } | ||
| function ok(s) { | ||
| s.destroy(); | ||
| return true; | ||
| } | ||
| function no() { | ||
| return false; | ||
| } |
| import { createServer } from "node:net"; | ||
| import { mkdirSync, writeFileSync, rmSync } from "node:fs"; | ||
| import * as cloud from "./cloud.js"; | ||
| import { cloudSocketDir, cloudSocketPath, cloudPidFile } from "./cloud-paths.js"; | ||
| mkdirSync(cloudSocketDir, { recursive: true, mode: 0o700 }); | ||
| rmSync(cloudSocketPath, { force: true }); | ||
| rmSync(cloudPidFile, { force: true }); | ||
| writeFileSync(cloudPidFile, String(process.pid)); | ||
| const server = createServer((socket) => { | ||
| let buffer = ""; | ||
| socket.on("data", (chunk) => { | ||
| buffer += chunk; | ||
| let newline; | ||
| while ((newline = buffer.indexOf("\n")) >= 0) { | ||
| const line = buffer.slice(0, newline); | ||
| buffer = buffer.slice(newline + 1); | ||
| if (line) | ||
| dispatch(line, socket); | ||
| } | ||
| }); | ||
| socket.on("error", () => { }); | ||
| }); | ||
| server.listen(cloudSocketPath); | ||
| process.on("SIGINT", shutdown); | ||
| process.on("SIGTERM", shutdown); | ||
| process.on("exit", cleanup); | ||
| async function dispatch(line, socket) { | ||
| const cmd = JSON.parse(line); | ||
| const result = await run(cmd).catch((err) => ({ | ||
| ok: false, | ||
| error: err.message, | ||
| })); | ||
| socket.write(JSON.stringify({ id: cmd.id, ...result }) + "\n"); | ||
| if (cmd.action === "destroy") | ||
| setImmediate(shutdown); | ||
| } | ||
| async function run(cmd) { | ||
| if (cmd.action === "create") { | ||
| const data = await cloud.create(); | ||
| return { ok: true, data }; | ||
| } | ||
| if (cmd.action === "exec") { | ||
| if (!cmd.command) | ||
| return { ok: false, error: "missing command" }; | ||
| const result = await cloud.exec(cmd.command); | ||
| const data = [ | ||
| result.stdout, | ||
| result.stderr ? `stderr:\n${result.stderr}` : "", | ||
| result.exitCode !== 0 ? `exit code: ${result.exitCode}` : "", | ||
| ] | ||
| .filter(Boolean) | ||
| .join("\n"); | ||
| return { ok: result.exitCode === 0, data }; | ||
| } | ||
| if (cmd.action === "status") { | ||
| const data = await cloud.status(); | ||
| return { ok: true, data }; | ||
| } | ||
| if (cmd.action === "destroy") { | ||
| const data = await cloud.destroy(); | ||
| return { ok: true, data }; | ||
| } | ||
| return { ok: false, error: `unknown action: ${cmd.action}` }; | ||
| } | ||
| async function shutdown() { | ||
| try { | ||
| await cloud.destroy(); | ||
| } | ||
| catch { | ||
| // best effort | ||
| } | ||
| server.close(); | ||
| cleanup(); | ||
| process.exit(0); | ||
| } | ||
| function cleanup() { | ||
| rmSync(cloudSocketPath, { force: true }); | ||
| rmSync(cloudPidFile, { force: true }); | ||
| } |
| import { homedir } from "node:os"; | ||
| import { join } from "node:path"; | ||
| const dir = join(homedir(), ".next-browser"); | ||
| export const cloudSocketDir = dir; | ||
| export const cloudSocketPath = join(dir, "cloud.sock"); | ||
| export const cloudPidFile = join(dir, "cloud.pid"); | ||
| export const cloudStateFile = join(dir, "cloud.json"); |
+184
| /** | ||
| * Cloud sandbox lifecycle and operations. | ||
| * Wraps @vercel/sandbox SDK for creating/managing remote sandboxes. | ||
| */ | ||
| import { readFileSync, writeFileSync, existsSync, rmSync } from "node:fs"; | ||
| import { dirname, join, resolve } from "node:path"; | ||
| import { fileURLToPath } from "node:url"; | ||
| import { cloudStateFile } from "./cloud-paths.js"; | ||
| const __dirname = dirname(fileURLToPath(import.meta.url)); | ||
| // Dynamic import so the main CLI doesn't fail if @vercel/sandbox isn't installed | ||
| async function getSandboxSDK() { | ||
| try { | ||
| return await import("@vercel/sandbox"); | ||
| } | ||
| catch { | ||
| throw new Error("@vercel/sandbox is not installed. Run: pnpm add @vercel/sandbox"); | ||
| } | ||
| } | ||
| function saveState(state) { | ||
| writeFileSync(cloudStateFile, JSON.stringify(state, null, 2)); | ||
| } | ||
| function loadState() { | ||
| if (!existsSync(cloudStateFile)) | ||
| return null; | ||
| try { | ||
| return JSON.parse(readFileSync(cloudStateFile, "utf-8")); | ||
| } | ||
| catch { | ||
| return null; | ||
| } | ||
| } | ||
| function clearState() { | ||
| rmSync(cloudStateFile, { force: true }); | ||
| } | ||
| // Keep the live Sandbox instance in memory (daemon process) | ||
| let sandbox = null; | ||
| export async function create() { | ||
| if (sandbox) { | ||
| return `sandbox already running: ${sandbox.sandboxId}`; | ||
| } | ||
| loadEnv(); | ||
| // Check for existing state (previous daemon) | ||
| const existing = loadState(); | ||
| if (existing) { | ||
| try { | ||
| const { Sandbox } = await getSandboxSDK(); | ||
| sandbox = await Sandbox.get({ sandboxId: existing.sandboxId }); | ||
| if (sandbox.status === "running") { | ||
| return `reconnected to ${sandbox.sandboxId}`; | ||
| } | ||
| } | ||
| catch { | ||
| // stale state, clean up | ||
| } | ||
| clearState(); | ||
| } | ||
| const { Sandbox } = await getSandboxSDK(); | ||
| sandbox = await Sandbox.create({ | ||
| resources: { vcpus: 4 }, | ||
| timeout: 300_000, | ||
| ports: [3000], | ||
| runtime: "node22", | ||
| }); | ||
| const state = { | ||
| sandboxId: sandbox.sandboxId, | ||
| createdAt: new Date().toISOString(), | ||
| }; | ||
| try { | ||
| const domain = sandbox.domain(3000); | ||
| state.publicUrl = domain.startsWith("http") ? domain : `https://${domain}`; | ||
| } | ||
| catch { | ||
| // no public URL yet | ||
| } | ||
| saveState(state); | ||
| return [ | ||
| `sandbox created: ${sandbox.sandboxId}`, | ||
| state.publicUrl ? `url: ${state.publicUrl}` : null, | ||
| ] | ||
| .filter(Boolean) | ||
| .join("\n"); | ||
| } | ||
| export async function exec(command) { | ||
| if (!sandbox) | ||
| throw new Error("no sandbox running — run `cloud create` first"); | ||
| const result = await sandbox.runCommand({ | ||
| cmd: "bash", | ||
| args: ["-lc", command], | ||
| }); | ||
| let stdout = ""; | ||
| let stderr = ""; | ||
| for await (const log of result.logs()) { | ||
| if (log.stream === "stdout") | ||
| stdout += log.data; | ||
| else | ||
| stderr += log.data; | ||
| } | ||
| await result.wait(); | ||
| return { exitCode: result.exitCode, stdout, stderr }; | ||
| } | ||
| export async function destroy() { | ||
| if (!sandbox) { | ||
| const state = loadState(); | ||
| if (state) { | ||
| try { | ||
| loadEnv(); | ||
| const { Sandbox } = await getSandboxSDK(); | ||
| sandbox = await Sandbox.get({ sandboxId: state.sandboxId }); | ||
| } | ||
| catch { | ||
| clearState(); | ||
| return "no sandbox to destroy (cleared stale state)"; | ||
| } | ||
| } | ||
| else { | ||
| return "no sandbox running"; | ||
| } | ||
| } | ||
| const id = sandbox.sandboxId; | ||
| await sandbox.stop({ blocking: true }); | ||
| sandbox = null; | ||
| clearState(); | ||
| return `destroyed ${id}`; | ||
| } | ||
| export async function status() { | ||
| const state = loadState(); | ||
| if (!state) | ||
| return "no sandbox running"; | ||
| if (sandbox) { | ||
| return [ | ||
| `id: ${sandbox.sandboxId}`, | ||
| `status: ${sandbox.status}`, | ||
| `created: ${state.createdAt}`, | ||
| state.publicUrl ? `url: ${state.publicUrl}` : null, | ||
| ] | ||
| .filter(Boolean) | ||
| .join("\n"); | ||
| } | ||
| return [ | ||
| `id: ${state.sandboxId}`, | ||
| `status: unknown (daemon not running)`, | ||
| `created: ${state.createdAt}`, | ||
| state.publicUrl ? `url: ${state.publicUrl}` : null, | ||
| ] | ||
| .filter(Boolean) | ||
| .join("\n"); | ||
| } | ||
| /** | ||
| * Load .env.local for Vercel credentials. | ||
| * Searches from package directory and common locations. | ||
| */ | ||
| function loadEnv() { | ||
| const candidates = [ | ||
| join(__dirname, "..", ".env.local"), | ||
| join(__dirname, "..", "prototypes", "cloud", ".env.local"), | ||
| resolve(process.cwd(), ".env.local"), | ||
| ]; | ||
| for (const candidate of candidates) { | ||
| try { | ||
| const content = readFileSync(candidate, "utf-8"); | ||
| for (const line of content.split("\n")) { | ||
| const trimmed = line.trim(); | ||
| if (!trimmed || trimmed.startsWith("#")) | ||
| continue; | ||
| const eqIdx = trimmed.indexOf("="); | ||
| if (eqIdx === -1) | ||
| continue; | ||
| const key = trimmed.slice(0, eqIdx); | ||
| let value = trimmed.slice(eqIdx + 1); | ||
| if ((value.startsWith('"') && value.endsWith('"')) || | ||
| (value.startsWith("'") && value.endsWith("'"))) { | ||
| value = value.slice(1, -1); | ||
| } | ||
| if (!process.env[key]) { | ||
| process.env[key] = value; | ||
| } | ||
| } | ||
| return; | ||
| } | ||
| catch { | ||
| continue; | ||
| } | ||
| } | ||
| } |
+15
-39
@@ -474,47 +474,23 @@ /** | ||
| /** | ||
| * Create a temporary Chrome profile directory with DevTools set to "undocked" | ||
| * so it opens in a separate window instead of docked inside the browser. | ||
| * This keeps the main browser viewport at full desktop size. | ||
| */ | ||
| function createProfileDir() { | ||
| const dir = join(tmpdir(), `next-browser-profile-${process.pid}`); | ||
| mkdirSync(join(dir, "Default"), { recursive: true }); | ||
| writeFileSync(join(dir, "Default", "Preferences"), JSON.stringify({ | ||
| devtools: { | ||
| preferences: { | ||
| currentDockState: '"undocked"', | ||
| }, | ||
| }, | ||
| })); | ||
| return dir; | ||
| } | ||
| /** | ||
| * Launch Chromium with React DevTools extension. | ||
| * Launch Chromium with the React DevTools hook pre-injected. | ||
| * | ||
| * - launchPersistentContext with a pre-configured profile that sets DevTools | ||
| * to undocked mode — DevTools opens in a separate window, not docked | ||
| * - --load-extension loads the vendored React DevTools Chrome extension | ||
| * - --auto-open-devtools-for-tabs makes the extension activate its backend | ||
| * on every tab (same as a developer manually opening DevTools) | ||
| * - waitForEvent("serviceworker") ensures the extension's background script | ||
| * is running before we navigate | ||
| * - addInitScript(installHook) injects the DevTools hook before any page JS, | ||
| * winning the race against the extension's content script | ||
| * addInitScript(installHook) installs the DevTools global hook before any | ||
| * page JS runs. React discovers the hook and registers its renderers, | ||
| * enabling tree inspection and suspense tracking without a browser extension. | ||
| * | ||
| * Set NEXT_BROWSER_HEADLESS=1 for cloud/CI environments (no display). | ||
| */ | ||
| async function launch() { | ||
| const profileDir = createProfileDir(); | ||
| profileDirPath = profileDir; | ||
| const ctx = await chromium.launchPersistentContext(profileDir, { | ||
| headless: false, | ||
| viewport: null, // let viewport follow the physical window size | ||
| args: [ | ||
| `--disable-extensions-except=${extensionPath}`, | ||
| `--load-extension=${extensionPath}`, | ||
| "--auto-open-devtools-for-tabs", | ||
| "--window-size=1440,900", | ||
| ], | ||
| const headless = !!process.env.NEXT_BROWSER_HEADLESS; | ||
| const dir = join(tmpdir(), `next-browser-profile-${process.pid}`); | ||
| mkdirSync(dir, { recursive: true }); | ||
| profileDirPath = dir; | ||
| const ctx = await chromium.launchPersistentContext(dir, { | ||
| headless, | ||
| viewport: { width: 1440, height: 900 }, | ||
| // --no-sandbox is required when Chrome runs as root (common in containers/cloud sandboxes) | ||
| args: headless ? ["--no-sandbox"] : [], | ||
| }); | ||
| await ctx.waitForEvent("serviceworker"); | ||
| await ctx.addInitScript(installHook); | ||
| return ctx; | ||
| } |
+7
-7
@@ -69,12 +69,12 @@ export async function snapshot(page) { | ||
| const out = []; | ||
| const listener = (e) => { | ||
| const p = e.data?.payload; | ||
| if (e.data?.source === "react-devtools-bridge" && p?.event === "operations") { | ||
| out.push(p.payload); | ||
| } | ||
| // Listen on the hook directly (works in both headed and headless) | ||
| const origEmit = hook.emit; | ||
| hook.emit = function (event, data) { | ||
| if (event === "operations") | ||
| out.push(Array.from(data)); | ||
| return origEmit.apply(hook, arguments); | ||
| }; | ||
| window.addEventListener("message", listener); | ||
| ri.flushInitialOperations(); | ||
| setTimeout(() => { | ||
| window.removeEventListener("message", listener); | ||
| hook.emit = origEmit; | ||
| resolve(out); | ||
@@ -81,0 +81,0 @@ }, 50); |
+1
-1
| { | ||
| "name": "@vercel/next-browser", | ||
| "version": "0.1.4", | ||
| "version": "0.1.5", | ||
| "description": "Headed Playwright browser with React DevTools pre-loaded", | ||
@@ -5,0 +5,0 @@ "license": "MIT", |
Network access
Supply chain riskThis module accesses the network.
Found 1 instance in 1 package
Uses eval
Supply chain riskPackage uses dynamic code execution (e.g., eval()), which is a dangerous practice. This can prevent the code from running in certain environments and increases the risk that the code may contain exploits or malicious behavior.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 3 instances in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
High entropy strings
Supply chain riskContains high entropy strings. This could be a sign of encrypted data, leaked secrets or obfuscated code.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
Network access
Supply chain riskThis module accesses the network.
Found 1 instance in 1 package
Uses eval
Supply chain riskPackage uses dynamic code execution (e.g., eval()), which is a dangerous practice. This can prevent the code from running in certain environments and increases the risk that the code may contain exploits or malicious behavior.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
High entropy strings
Supply chain riskContains high entropy strings. This could be a sign of encrypted data, leaked secrets or obfuscated code.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
3010825
0.32%78
5.41%4855
7.01%111
1.83%