@versatly/workgraph
Advanced tools
Sorry, the diff of this file is too big to display
| import { | ||
| __export, | ||
| allClaims, | ||
| append, | ||
| checkpoint, | ||
| create, | ||
| createRun, | ||
| createThread, | ||
| historyOf, | ||
| keywordSearch, | ||
| list, | ||
| listTypes, | ||
| loadPolicyRegistry, | ||
| loadRegistry, | ||
| read, | ||
| recent, | ||
| refreshWikiLinkGraphIndex, | ||
| saveRegistry, | ||
| update | ||
| } from "./chunk-65ZMX2WM.js"; | ||
| // src/workspace.ts | ||
| var workspace_exports = {}; | ||
| __export(workspace_exports, { | ||
| initWorkspace: () => initWorkspace, | ||
| isWorkgraphWorkspace: () => isWorkgraphWorkspace, | ||
| workspaceConfigPath: () => workspaceConfigPath | ||
| }); | ||
| import fs2 from "fs"; | ||
| import path2 from "path"; | ||
| // src/bases.ts | ||
| var bases_exports = {}; | ||
| __export(bases_exports, { | ||
| generateBasesFromPrimitiveRegistry: () => generateBasesFromPrimitiveRegistry, | ||
| primitiveRegistryManifestPath: () => primitiveRegistryManifestPath, | ||
| readPrimitiveRegistryManifest: () => readPrimitiveRegistryManifest, | ||
| syncPrimitiveRegistryManifest: () => syncPrimitiveRegistryManifest | ||
| }); | ||
| import fs from "fs"; | ||
| import path from "path"; | ||
| import YAML from "yaml"; | ||
| var REGISTRY_MANIFEST_FILE = ".workgraph/primitive-registry.yaml"; | ||
| var DEFAULT_BASES_DIR = ".workgraph/bases"; | ||
| function primitiveRegistryManifestPath(workspacePath) { | ||
| return path.join(workspacePath, REGISTRY_MANIFEST_FILE); | ||
| } | ||
| function readPrimitiveRegistryManifest(workspacePath) { | ||
| const manifestPath = primitiveRegistryManifestPath(workspacePath); | ||
| if (!fs.existsSync(manifestPath)) { | ||
| throw new Error(`Primitive registry manifest not found: ${manifestPath}`); | ||
| } | ||
| const raw = fs.readFileSync(manifestPath, "utf-8"); | ||
| return YAML.parse(raw); | ||
| } | ||
| function syncPrimitiveRegistryManifest(workspacePath) { | ||
| const registry = loadRegistry(workspacePath); | ||
| const manifest = { | ||
| version: 1, | ||
| generatedAt: (/* @__PURE__ */ new Date()).toISOString(), | ||
| primitives: Object.values(registry.types).map((primitive) => ({ | ||
| name: primitive.name, | ||
| directory: primitive.directory, | ||
| canonical: primitive.builtIn, | ||
| builtIn: primitive.builtIn, | ||
| fields: Object.entries(primitive.fields).map(([name, field]) => ({ | ||
| name, | ||
| type: field.type, | ||
| ...field.required ? { required: true } : {}, | ||
| ...field.description ? { description: field.description } : {} | ||
| })) | ||
| })).sort((a, b) => a.name.localeCompare(b.name)) | ||
| }; | ||
| const manifestPath = primitiveRegistryManifestPath(workspacePath); | ||
| ensureDirectory(path.dirname(manifestPath)); | ||
| fs.writeFileSync(manifestPath, YAML.stringify(manifest), "utf-8"); | ||
| return manifest; | ||
| } | ||
| function generateBasesFromPrimitiveRegistry(workspacePath, options = {}) { | ||
| const manifest = readPrimitiveRegistryManifest(workspacePath); | ||
| const includeNonCanonical = options.includeNonCanonical === true; | ||
| const outputDirectory = path.join(workspacePath, options.outputDirectory ?? DEFAULT_BASES_DIR); | ||
| ensureDirectory(outputDirectory); | ||
| const generated = []; | ||
| const primitives = manifest.primitives.filter( | ||
| (primitive) => includeNonCanonical ? true : primitive.canonical | ||
| ); | ||
| for (const primitive of primitives) { | ||
| const relBasePath = `${primitive.name}.base`; | ||
| const absBasePath = path.join(outputDirectory, relBasePath); | ||
| const content = renderBaseFile(primitive); | ||
| fs.writeFileSync(absBasePath, content, "utf-8"); | ||
| generated.push(path.relative(workspacePath, absBasePath).replace(/\\/g, "/")); | ||
| } | ||
| return { | ||
| outputDirectory: path.relative(workspacePath, outputDirectory).replace(/\\/g, "/"), | ||
| generated: generated.sort() | ||
| }; | ||
| } | ||
| function renderBaseFile(primitive) { | ||
| const columnFields = primitive.fields.map((field) => field.name).filter((name, idx, arr) => arr.indexOf(name) === idx); | ||
| const baseDoc = { | ||
| id: primitive.name, | ||
| title: `${titleCase(primitive.name)} Base`, | ||
| source: { | ||
| type: "folder", | ||
| path: primitive.directory, | ||
| extension: "md" | ||
| }, | ||
| views: [ | ||
| { | ||
| id: "table", | ||
| type: "table", | ||
| name: "All", | ||
| columns: ["file.name", ...columnFields] | ||
| } | ||
| ] | ||
| }; | ||
| return YAML.stringify(baseDoc); | ||
| } | ||
| function ensureDirectory(dirPath) { | ||
| if (!fs.existsSync(dirPath)) fs.mkdirSync(dirPath, { recursive: true }); | ||
| } | ||
| function titleCase(value) { | ||
| return value.split(/[-_]/g).filter(Boolean).map((segment) => segment[0].toUpperCase() + segment.slice(1)).join(" "); | ||
| } | ||
| // src/workspace.ts | ||
| var WORKGRAPH_CONFIG_FILE = ".workgraph.json"; | ||
| function workspaceConfigPath(workspacePath) { | ||
| return path2.join(workspacePath, WORKGRAPH_CONFIG_FILE); | ||
| } | ||
| function isWorkgraphWorkspace(workspacePath) { | ||
| return fs2.existsSync(workspaceConfigPath(workspacePath)); | ||
| } | ||
| function initWorkspace(targetPath, options = {}) { | ||
| const resolvedPath = path2.resolve(targetPath); | ||
| const configPath = workspaceConfigPath(resolvedPath); | ||
| if (fs2.existsSync(configPath)) { | ||
| throw new Error(`Workgraph workspace already initialized at ${resolvedPath}`); | ||
| } | ||
| const createdDirectories = []; | ||
| ensureDir(resolvedPath, createdDirectories); | ||
| ensureDir(path2.join(resolvedPath, ".workgraph"), createdDirectories); | ||
| const registry = loadRegistry(resolvedPath); | ||
| saveRegistry(resolvedPath, registry); | ||
| syncPrimitiveRegistryManifest(resolvedPath); | ||
| if (options.createTypeDirs !== false) { | ||
| const types = listTypes(resolvedPath); | ||
| for (const typeDef of types) { | ||
| ensureDir(path2.join(resolvedPath, typeDef.directory), createdDirectories); | ||
| } | ||
| } | ||
| const now = (/* @__PURE__ */ new Date()).toISOString(); | ||
| const config = { | ||
| name: options.name ?? path2.basename(resolvedPath), | ||
| version: "1.0.0", | ||
| mode: "workgraph", | ||
| createdAt: now, | ||
| updatedAt: now | ||
| }; | ||
| fs2.writeFileSync(configPath, JSON.stringify(config, null, 2) + "\n", "utf-8"); | ||
| if (options.createReadme !== false) { | ||
| writeReadmeIfMissing(resolvedPath, config.name); | ||
| } | ||
| const bases = options.createBases === false ? { generated: [] } : generateBasesFromPrimitiveRegistry(resolvedPath); | ||
| loadPolicyRegistry(resolvedPath); | ||
| refreshWikiLinkGraphIndex(resolvedPath); | ||
| return { | ||
| workspacePath: resolvedPath, | ||
| configPath, | ||
| config, | ||
| createdDirectories, | ||
| seededTypes: listTypes(resolvedPath).map((t) => t.name), | ||
| generatedBases: bases.generated, | ||
| primitiveRegistryManifestPath: ".workgraph/primitive-registry.yaml" | ||
| }; | ||
| } | ||
| function ensureDir(dirPath, createdDirectories) { | ||
| if (fs2.existsSync(dirPath)) return; | ||
| fs2.mkdirSync(dirPath, { recursive: true }); | ||
| createdDirectories.push(dirPath); | ||
| } | ||
| function writeReadmeIfMissing(workspacePath, name) { | ||
| const readmePath = path2.join(workspacePath, "README.md"); | ||
| if (fs2.existsSync(readmePath)) return; | ||
| const content = `# ${name} | ||
| Agent-first workgraph workspace for multi-agent coordination. | ||
| ## Quickstart | ||
| \`\`\`bash | ||
| workgraph thread list --json | ||
| workgraph thread next --claim --actor agent-a --json | ||
| workgraph ledger show --count 20 --json | ||
| \`\`\` | ||
| `; | ||
| fs2.writeFileSync(readmePath, content, "utf-8"); | ||
| } | ||
| // src/command-center.ts | ||
| var command_center_exports = {}; | ||
| __export(command_center_exports, { | ||
| generateCommandCenter: () => generateCommandCenter | ||
| }); | ||
| import fs3 from "fs"; | ||
| import path3 from "path"; | ||
| function generateCommandCenter(workspacePath, options = {}) { | ||
| const actor = options.actor ?? "system"; | ||
| const recentCount = options.recentCount ?? 15; | ||
| const relOutputPath = options.outputPath ?? "Command Center.md"; | ||
| const absOutputPath = resolvePathWithinWorkspace(workspacePath, relOutputPath); | ||
| const normalizedOutputPath = path3.relative(workspacePath, absOutputPath).replace(/\\/g, "/"); | ||
| const allThreads = list(workspacePath, "thread"); | ||
| const openThreads = allThreads.filter((thread) => thread.fields.status === "open"); | ||
| const activeThreads = allThreads.filter((thread) => thread.fields.status === "active"); | ||
| const blockedThreads = allThreads.filter((thread) => thread.fields.status === "blocked"); | ||
| const doneThreads = allThreads.filter((thread) => thread.fields.status === "done"); | ||
| const claims = allClaims(workspacePath); | ||
| const recentEvents = recent(workspacePath, recentCount); | ||
| const content = renderCommandCenter({ | ||
| generatedAt: (/* @__PURE__ */ new Date()).toISOString(), | ||
| openThreads, | ||
| activeThreads, | ||
| blockedThreads, | ||
| doneThreads, | ||
| claims: [...claims.entries()].map(([target, owner]) => ({ target, owner })), | ||
| recentEvents | ||
| }); | ||
| const parentDir = path3.dirname(absOutputPath); | ||
| if (!fs3.existsSync(parentDir)) fs3.mkdirSync(parentDir, { recursive: true }); | ||
| const existed = fs3.existsSync(absOutputPath); | ||
| fs3.writeFileSync(absOutputPath, content, "utf-8"); | ||
| append( | ||
| workspacePath, | ||
| actor, | ||
| existed ? "update" : "create", | ||
| normalizedOutputPath, | ||
| "command-center", | ||
| { | ||
| generated: true, | ||
| open_threads: openThreads.length, | ||
| active_claims: claims.size, | ||
| recent_events: recentEvents.length | ||
| } | ||
| ); | ||
| return { | ||
| outputPath: normalizedOutputPath, | ||
| stats: { | ||
| totalThreads: allThreads.length, | ||
| openThreads: openThreads.length, | ||
| activeThreads: activeThreads.length, | ||
| blockedThreads: blockedThreads.length, | ||
| doneThreads: doneThreads.length, | ||
| activeClaims: claims.size, | ||
| recentEvents: recentEvents.length | ||
| }, | ||
| content | ||
| }; | ||
| } | ||
| function resolvePathWithinWorkspace(workspacePath, outputPath) { | ||
| const base = path3.resolve(workspacePath); | ||
| const resolved = path3.resolve(base, outputPath); | ||
| if (!resolved.startsWith(base + path3.sep) && resolved !== base) { | ||
| throw new Error(`Invalid command-center output path: ${outputPath}`); | ||
| } | ||
| return resolved; | ||
| } | ||
| function renderCommandCenter(input) { | ||
| const header = [ | ||
| "# Workgraph Command Center", | ||
| "", | ||
| `Generated: ${input.generatedAt}`, | ||
| "" | ||
| ]; | ||
| const statusBlock = [ | ||
| "## Thread Status", | ||
| "", | ||
| `- Open: ${input.openThreads.length}`, | ||
| `- Active: ${input.activeThreads.length}`, | ||
| `- Blocked: ${input.blockedThreads.length}`, | ||
| `- Done: ${input.doneThreads.length}`, | ||
| "" | ||
| ]; | ||
| const openTable = [ | ||
| "## Open Threads", | ||
| "", | ||
| "| Priority | Title | Path |", | ||
| "|---|---|---|", | ||
| ...input.openThreads.length > 0 ? input.openThreads.map((thread) => `| ${String(thread.fields.priority ?? "medium")} | ${String(thread.fields.title ?? "Untitled")} | \`${thread.path}\` |`) : ["| - | None | - |"], | ||
| "" | ||
| ]; | ||
| const claimsSection = [ | ||
| "## Active Claims", | ||
| "", | ||
| ...input.claims.length > 0 ? input.claims.map((claim) => `- ${claim.owner} -> \`${claim.target}\``) : ["- None"], | ||
| "" | ||
| ]; | ||
| const blockedSection = [ | ||
| "## Blocked Threads", | ||
| "", | ||
| ...input.blockedThreads.length > 0 ? input.blockedThreads.map((thread) => { | ||
| const deps = Array.isArray(thread.fields.deps) ? thread.fields.deps.join(", ") : ""; | ||
| return `- ${String(thread.fields.title ?? thread.path)} (\`${thread.path}\`)${deps ? ` blocked by: ${deps}` : ""}`; | ||
| }) : ["- None"], | ||
| "" | ||
| ]; | ||
| const recentSection = [ | ||
| "## Recent Ledger Activity", | ||
| "", | ||
| ...input.recentEvents.length > 0 ? input.recentEvents.map((event) => `- ${event.ts} ${event.op} ${event.actor} -> \`${event.target}\``) : ["- No activity"], | ||
| "" | ||
| ]; | ||
| return [ | ||
| ...header, | ||
| ...statusBlock, | ||
| ...openTable, | ||
| ...claimsSection, | ||
| ...blockedSection, | ||
| ...recentSection | ||
| ].join("\n"); | ||
| } | ||
| // src/skill.ts | ||
| var skill_exports = {}; | ||
| __export(skill_exports, { | ||
| listSkills: () => listSkills, | ||
| loadSkill: () => loadSkill, | ||
| promoteSkill: () => promoteSkill, | ||
| proposeSkill: () => proposeSkill, | ||
| skillDiff: () => skillDiff, | ||
| skillHistory: () => skillHistory, | ||
| writeSkill: () => writeSkill | ||
| }); | ||
| import fs4 from "fs"; | ||
| import path4 from "path"; | ||
| function writeSkill(workspacePath, title, body, actor, options = {}) { | ||
| const slug = skillSlug(title); | ||
| const bundleSkillPath = folderSkillPath(slug); | ||
| const legacyPath = legacySkillPath(slug); | ||
| const existing = read(workspacePath, bundleSkillPath) ?? read(workspacePath, legacyPath); | ||
| const status = options.status ?? existing?.fields.status ?? "draft"; | ||
| if (existing && options.expectedUpdatedAt) { | ||
| const currentUpdatedAt = String(existing.fields.updated ?? ""); | ||
| if (currentUpdatedAt !== options.expectedUpdatedAt) { | ||
| throw new Error(`Concurrent skill update detected for ${existing.path}. Expected updated="${options.expectedUpdatedAt}" but found "${currentUpdatedAt}".`); | ||
| } | ||
| } | ||
| if (!existing) { | ||
| ensureSkillBundleScaffold(workspacePath, slug); | ||
| const created = create(workspacePath, "skill", { | ||
| title, | ||
| owner: options.owner ?? actor, | ||
| version: options.version ?? "0.1.0", | ||
| status, | ||
| distribution: options.distribution ?? "tailscale-shared-vault", | ||
| tailscale_path: options.tailscalePath, | ||
| reviewers: options.reviewers ?? [], | ||
| depends_on: options.dependsOn ?? [], | ||
| tags: options.tags ?? [] | ||
| }, body, actor, { | ||
| pathOverride: bundleSkillPath | ||
| }); | ||
| writeSkillManifest(workspacePath, slug, created, actor); | ||
| return created; | ||
| } | ||
| const updated = update(workspacePath, existing.path, { | ||
| title, | ||
| owner: options.owner ?? existing.fields.owner ?? actor, | ||
| version: options.version ?? existing.fields.version ?? "0.1.0", | ||
| status, | ||
| distribution: options.distribution ?? existing.fields.distribution ?? "tailscale-shared-vault", | ||
| tailscale_path: options.tailscalePath ?? existing.fields.tailscale_path, | ||
| reviewers: options.reviewers ?? existing.fields.reviewers ?? [], | ||
| depends_on: options.dependsOn ?? existing.fields.depends_on ?? [], | ||
| tags: options.tags ?? existing.fields.tags ?? [] | ||
| }, body, actor); | ||
| writeSkillManifest(workspacePath, slug, updated, actor); | ||
| return updated; | ||
| } | ||
| function loadSkill(workspacePath, skillRef) { | ||
| const normalizedCandidates = normalizeSkillRefCandidates(skillRef); | ||
| const skill = normalizedCandidates.map((candidate) => read(workspacePath, candidate)).find((entry) => entry !== null); | ||
| if (!skill) throw new Error(`Skill not found: ${skillRef}`); | ||
| if (skill.type !== "skill") throw new Error(`Target is not a skill primitive: ${skillRef}`); | ||
| return skill; | ||
| } | ||
| function listSkills(workspacePath, options = {}) { | ||
| let skills = list(workspacePath, "skill"); | ||
| if (options.status) { | ||
| skills = skills.filter((skill) => skill.fields.status === options.status); | ||
| } | ||
| if (options.updatedSince) { | ||
| const threshold = Date.parse(options.updatedSince); | ||
| if (Number.isFinite(threshold)) { | ||
| skills = skills.filter((skill) => { | ||
| const updatedAt = Date.parse(String(skill.fields.updated ?? "")); | ||
| return Number.isFinite(updatedAt) && updatedAt >= threshold; | ||
| }); | ||
| } | ||
| } | ||
| return skills; | ||
| } | ||
| function proposeSkill(workspacePath, skillRef, actor, options = {}) { | ||
| const skill = loadSkill(workspacePath, skillRef); | ||
| const slug = skillSlug(String(skill.fields.title ?? skillRef)); | ||
| let proposalThread = options.proposalThread; | ||
| if (!proposalThread && options.createThreadIfMissing !== false) { | ||
| const createdThread = createThread( | ||
| workspacePath, | ||
| `Review skill: ${String(skill.fields.title)}`, | ||
| `Review and approve skill ${skill.path} for activation.`, | ||
| actor, | ||
| { | ||
| priority: "medium", | ||
| space: options.space, | ||
| context_refs: [skill.path] | ||
| } | ||
| ); | ||
| proposalThread = createdThread.path; | ||
| } | ||
| const updated = update(workspacePath, skill.path, { | ||
| status: "proposed", | ||
| proposal_thread: proposalThread ?? skill.fields.proposal_thread, | ||
| proposed_at: (/* @__PURE__ */ new Date()).toISOString(), | ||
| reviewers: options.reviewers ?? skill.fields.reviewers ?? [] | ||
| }, void 0, actor); | ||
| writeSkillManifest(workspacePath, slug, updated, actor); | ||
| return updated; | ||
| } | ||
| function skillHistory(workspacePath, skillRef, options = {}) { | ||
| const skill = loadSkill(workspacePath, skillRef); | ||
| const entries = historyOf(workspacePath, skill.path); | ||
| if (options.limit && options.limit > 0) { | ||
| return entries.slice(-options.limit); | ||
| } | ||
| return entries; | ||
| } | ||
| function skillDiff(workspacePath, skillRef) { | ||
| const skill = loadSkill(workspacePath, skillRef); | ||
| const entries = historyOf(workspacePath, skill.path).filter((entry) => entry.op === "create" || entry.op === "update"); | ||
| const latest = entries.length > 0 ? entries[entries.length - 1] : null; | ||
| const previous = entries.length > 1 ? entries[entries.length - 2] : null; | ||
| const changedFields = Array.isArray(latest?.data?.changed) ? latest.data.changed.map((value) => String(value)) : latest?.op === "create" ? Object.keys(skill.fields) : []; | ||
| return { | ||
| path: skill.path, | ||
| latestEntryTs: latest?.ts ?? null, | ||
| previousEntryTs: previous?.ts ?? null, | ||
| changedFields | ||
| }; | ||
| } | ||
| function promoteSkill(workspacePath, skillRef, actor, options = {}) { | ||
| const skill = loadSkill(workspacePath, skillRef); | ||
| const slug = skillSlug(String(skill.fields.title ?? skillRef)); | ||
| const currentVersion = String(skill.fields.version ?? "0.1.0"); | ||
| const nextVersion = options.version ?? bumpPatchVersion(currentVersion); | ||
| const updated = update(workspacePath, skill.path, { | ||
| status: "active", | ||
| version: nextVersion, | ||
| promoted_at: (/* @__PURE__ */ new Date()).toISOString() | ||
| }, void 0, actor); | ||
| writeSkillManifest(workspacePath, slug, updated, actor); | ||
| return updated; | ||
| } | ||
| function skillSlug(title) { | ||
| return title.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, "").slice(0, 80); | ||
| } | ||
| function normalizeSkillRefCandidates(skillRef) { | ||
| const raw = skillRef.trim(); | ||
| if (!raw) return []; | ||
| if (raw.includes("/")) { | ||
| const normalized = raw.endsWith(".md") ? raw : `${raw}.md`; | ||
| if (normalized.endsWith("/SKILL.md")) return [normalized]; | ||
| if (normalized.endsWith("/SKILL")) return [`${normalized}.md`]; | ||
| if (normalized.endsWith(".md")) { | ||
| const noExt = normalized.slice(0, -3); | ||
| return [normalized, `${noExt}/SKILL.md`]; | ||
| } | ||
| return [normalized, `${normalized}/SKILL.md`]; | ||
| } | ||
| const slug = skillSlug(raw); | ||
| return [folderSkillPath(slug), legacySkillPath(slug)]; | ||
| } | ||
| function bumpPatchVersion(version) { | ||
| const match = version.match(/^(\d+)\.(\d+)\.(\d+)$/); | ||
| if (!match) return "0.1.0"; | ||
| const major = Number.parseInt(match[1], 10); | ||
| const minor = Number.parseInt(match[2], 10); | ||
| const patch = Number.parseInt(match[3], 10) + 1; | ||
| return `${major}.${minor}.${patch}`; | ||
| } | ||
| function folderSkillPath(slug) { | ||
| return `skills/${slug}/SKILL.md`; | ||
| } | ||
| function legacySkillPath(slug) { | ||
| return `skills/${slug}.md`; | ||
| } | ||
| function ensureSkillBundleScaffold(workspacePath, slug) { | ||
| const skillRoot = path4.join(workspacePath, "skills", slug); | ||
| fs4.mkdirSync(skillRoot, { recursive: true }); | ||
| for (const subdir of ["scripts", "examples", "tests", "assets"]) { | ||
| fs4.mkdirSync(path4.join(skillRoot, subdir), { recursive: true }); | ||
| } | ||
| } | ||
| function writeSkillManifest(workspacePath, slug, skill, actor) { | ||
| const manifestPath = path4.join(workspacePath, "skills", slug, "skill-manifest.json"); | ||
| const dir = path4.dirname(manifestPath); | ||
| fs4.mkdirSync(dir, { recursive: true }); | ||
| const manifest = { | ||
| version: 1, | ||
| slug, | ||
| title: String(skill.fields.title ?? slug), | ||
| primitivePath: skill.path, | ||
| owner: String(skill.fields.owner ?? actor), | ||
| skillVersion: String(skill.fields.version ?? "0.1.0"), | ||
| status: String(skill.fields.status ?? "draft"), | ||
| dependsOn: Array.isArray(skill.fields.depends_on) ? skill.fields.depends_on.map((value) => String(value)) : [], | ||
| components: { | ||
| skillDoc: "SKILL.md", | ||
| scriptsDir: "scripts/", | ||
| examplesDir: "examples/", | ||
| testsDir: "tests/", | ||
| assetsDir: "assets/" | ||
| }, | ||
| updatedAt: (/* @__PURE__ */ new Date()).toISOString() | ||
| }; | ||
| fs4.writeFileSync(manifestPath, JSON.stringify(manifest, null, 2) + "\n", "utf-8"); | ||
| } | ||
| // src/board.ts | ||
| var board_exports = {}; | ||
| __export(board_exports, { | ||
| generateKanbanBoard: () => generateKanbanBoard, | ||
| syncKanbanBoard: () => syncKanbanBoard | ||
| }); | ||
| import fs5 from "fs"; | ||
| import path5 from "path"; | ||
| function generateKanbanBoard(workspacePath, options = {}) { | ||
| const threads = list(workspacePath, "thread"); | ||
| const grouped = groupThreads(threads); | ||
| const includeCancelled = options.includeCancelled === true; | ||
| const lanes = [ | ||
| { title: "Backlog", items: grouped.open, checkChar: " " }, | ||
| { title: "In Progress", items: grouped.active, checkChar: " " }, | ||
| { title: "Blocked", items: grouped.blocked, checkChar: " " }, | ||
| { title: "Done", items: grouped.done, checkChar: "x" } | ||
| ]; | ||
| if (includeCancelled) { | ||
| lanes.push({ title: "Cancelled", items: grouped.cancelled, checkChar: "x" }); | ||
| } | ||
| const content = renderKanbanMarkdown(lanes); | ||
| const relOutputPath = options.outputPath ?? "ops/Workgraph Board.md"; | ||
| const absOutputPath = resolvePathWithinWorkspace2(workspacePath, relOutputPath); | ||
| const parentDir = path5.dirname(absOutputPath); | ||
| if (!fs5.existsSync(parentDir)) fs5.mkdirSync(parentDir, { recursive: true }); | ||
| fs5.writeFileSync(absOutputPath, content, "utf-8"); | ||
| return { | ||
| outputPath: path5.relative(workspacePath, absOutputPath).replace(/\\/g, "/"), | ||
| generatedAt: (/* @__PURE__ */ new Date()).toISOString(), | ||
| counts: { | ||
| backlog: grouped.open.length, | ||
| inProgress: grouped.active.length, | ||
| blocked: grouped.blocked.length, | ||
| done: grouped.done.length, | ||
| cancelled: grouped.cancelled.length | ||
| }, | ||
| content | ||
| }; | ||
| } | ||
| function syncKanbanBoard(workspacePath, options = {}) { | ||
| return generateKanbanBoard(workspacePath, options); | ||
| } | ||
| function groupThreads(threads) { | ||
| const groups = { | ||
| open: [], | ||
| active: [], | ||
| blocked: [], | ||
| done: [], | ||
| cancelled: [] | ||
| }; | ||
| for (const thread of threads) { | ||
| const status = String(thread.fields.status ?? "open"); | ||
| switch (status) { | ||
| case "active": | ||
| groups.active.push(thread); | ||
| break; | ||
| case "blocked": | ||
| groups.blocked.push(thread); | ||
| break; | ||
| case "done": | ||
| groups.done.push(thread); | ||
| break; | ||
| case "cancelled": | ||
| groups.cancelled.push(thread); | ||
| break; | ||
| case "open": | ||
| default: | ||
| groups.open.push(thread); | ||
| break; | ||
| } | ||
| } | ||
| const byPriority = (a, b) => { | ||
| const rank = (value) => { | ||
| switch (String(value ?? "medium")) { | ||
| case "urgent": | ||
| return 0; | ||
| case "high": | ||
| return 1; | ||
| case "medium": | ||
| return 2; | ||
| case "low": | ||
| return 3; | ||
| default: | ||
| return 4; | ||
| } | ||
| }; | ||
| return rank(a.fields.priority) - rank(b.fields.priority) || String(a.fields.title).localeCompare(String(b.fields.title)); | ||
| }; | ||
| groups.open.sort(byPriority); | ||
| groups.active.sort(byPriority); | ||
| groups.blocked.sort(byPriority); | ||
| groups.done.sort(byPriority); | ||
| groups.cancelled.sort(byPriority); | ||
| return groups; | ||
| } | ||
| function renderKanbanMarkdown(lanes) { | ||
| const settings = { | ||
| "kanban-plugin": "board" | ||
| }; | ||
| const lines = [ | ||
| "---", | ||
| "kanban-plugin: board", | ||
| "---", | ||
| "" | ||
| ]; | ||
| for (const lane of lanes) { | ||
| lines.push(`## ${lane.title}`); | ||
| lines.push(""); | ||
| for (const thread of lane.items) { | ||
| const title = String(thread.fields.title ?? thread.path); | ||
| const priority = String(thread.fields.priority ?? "medium"); | ||
| lines.push(`- [${lane.checkChar}] [[${thread.path}|${title}]] (#${priority})`); | ||
| } | ||
| lines.push(""); | ||
| lines.push(""); | ||
| lines.push(""); | ||
| } | ||
| lines.push("%% kanban:settings"); | ||
| lines.push("```"); | ||
| lines.push(JSON.stringify(settings)); | ||
| lines.push("```"); | ||
| lines.push("%%"); | ||
| lines.push(""); | ||
| return lines.join("\n"); | ||
| } | ||
| function resolvePathWithinWorkspace2(workspacePath, outputPath) { | ||
| const base = path5.resolve(workspacePath); | ||
| const resolved = path5.resolve(base, outputPath); | ||
| if (!resolved.startsWith(base + path5.sep) && resolved !== base) { | ||
| throw new Error(`Invalid board output path: ${outputPath}`); | ||
| } | ||
| return resolved; | ||
| } | ||
| // src/onboard.ts | ||
| var onboard_exports = {}; | ||
| __export(onboard_exports, { | ||
| onboardWorkspace: () => onboardWorkspace, | ||
| updateOnboardingStatus: () => updateOnboardingStatus | ||
| }); | ||
| function onboardWorkspace(workspacePath, options) { | ||
| const spaces = options.spaces && options.spaces.length > 0 ? options.spaces : ["platform", "product", "operations"]; | ||
| const spacesCreated = []; | ||
| for (const space of spaces) { | ||
| const title = titleCase2(space); | ||
| const created = create( | ||
| workspacePath, | ||
| "space", | ||
| { | ||
| title, | ||
| description: `${title} workspace lane`, | ||
| members: [options.actor], | ||
| tags: ["onboarded"] | ||
| }, | ||
| `# ${title} | ||
| Auto-created during onboarding. | ||
| `, | ||
| options.actor | ||
| ); | ||
| spacesCreated.push(created.path); | ||
| } | ||
| const threadsCreated = []; | ||
| if (options.createDemoThreads !== false) { | ||
| const templates = [ | ||
| { title: "Review workspace policy gates", goal: "Validate sensitive transitions are governed.", space: spacesCreated[0] }, | ||
| { title: "Configure board sync cadence", goal: "Set board update expectations for all agents.", space: spacesCreated[1] ?? spacesCreated[0] }, | ||
| { title: "Establish daily checkpoint routine", goal: "Agents leave actionable hand-off notes.", space: spacesCreated[2] ?? spacesCreated[0] } | ||
| ]; | ||
| for (const template of templates) { | ||
| const created = create( | ||
| workspacePath, | ||
| "thread", | ||
| { | ||
| title: template.title, | ||
| goal: template.goal, | ||
| status: "open", | ||
| priority: "medium", | ||
| space: template.space, | ||
| context_refs: [template.space], | ||
| tags: ["onboarding"] | ||
| }, | ||
| `## Goal | ||
| ${template.goal} | ||
| `, | ||
| options.actor | ||
| ); | ||
| threadsCreated.push(created.path); | ||
| } | ||
| } | ||
| const boardResult = generateKanbanBoard(workspacePath, { outputPath: "ops/Onboarding Board.md" }); | ||
| const commandCenterResult = generateCommandCenter(workspacePath, { | ||
| outputPath: "ops/Onboarding Command Center.md", | ||
| actor: options.actor | ||
| }); | ||
| const checkpointResult = checkpoint( | ||
| workspacePath, | ||
| options.actor, | ||
| "Onboarding completed and workspace views initialized.", | ||
| { | ||
| next: ["Claim your next ready thread via `workgraph thread next --claim`"], | ||
| blocked: [], | ||
| tags: ["onboarding"] | ||
| } | ||
| ); | ||
| const onboarding = create( | ||
| workspacePath, | ||
| "onboarding", | ||
| { | ||
| title: `Onboarding for ${options.actor}`, | ||
| actor: options.actor, | ||
| status: "active", | ||
| spaces: spacesCreated, | ||
| thread_refs: threadsCreated, | ||
| board: boardResult.outputPath, | ||
| command_center: commandCenterResult.outputPath, | ||
| tags: ["onboarding"] | ||
| }, | ||
| [ | ||
| "# Onboarding", | ||
| "", | ||
| `Actor: ${options.actor}`, | ||
| "", | ||
| "## Spaces", | ||
| "", | ||
| ...spacesCreated.map((space) => `- [[${space}]]`), | ||
| "", | ||
| "## Starter Threads", | ||
| "", | ||
| ...threadsCreated.map((threadRef) => `- [[${threadRef}]]`), | ||
| "", | ||
| `Board: [[${boardResult.outputPath}]]`, | ||
| `Command Center: [[${commandCenterResult.outputPath}]]`, | ||
| "" | ||
| ].join("\n"), | ||
| options.actor | ||
| ); | ||
| return { | ||
| actor: options.actor, | ||
| spacesCreated, | ||
| threadsCreated, | ||
| boardPath: boardResult.outputPath, | ||
| commandCenterPath: commandCenterResult.outputPath, | ||
| checkpointPath: checkpointResult.path, | ||
| onboardingPath: onboarding.path | ||
| }; | ||
| } | ||
| function updateOnboardingStatus(workspacePath, onboardingPath, status, actor) { | ||
| const onboarding = read(workspacePath, onboardingPath); | ||
| if (!onboarding) throw new Error(`Onboarding primitive not found: ${onboardingPath}`); | ||
| if (onboarding.type !== "onboarding") { | ||
| throw new Error(`Target is not an onboarding primitive: ${onboardingPath}`); | ||
| } | ||
| const current = String(onboarding.fields.status ?? "active"); | ||
| const allowed = ONBOARDING_STATUS_TRANSITIONS[current] ?? []; | ||
| if (!allowed.includes(status)) { | ||
| throw new Error(`Invalid onboarding transition: ${current} -> ${status}. Allowed: ${allowed.join(", ") || "none"}`); | ||
| } | ||
| return update( | ||
| workspacePath, | ||
| onboardingPath, | ||
| { status }, | ||
| void 0, | ||
| actor | ||
| ); | ||
| } | ||
| var ONBOARDING_STATUS_TRANSITIONS = { | ||
| active: ["paused", "completed"], | ||
| paused: ["active", "completed"], | ||
| completed: [] | ||
| }; | ||
| function titleCase2(value) { | ||
| return value.split(/[-_\s]/g).filter(Boolean).map((part) => part[0].toUpperCase() + part.slice(1)).join(" "); | ||
| } | ||
| // src/search-qmd-adapter.ts | ||
| var search_qmd_adapter_exports = {}; | ||
| __export(search_qmd_adapter_exports, { | ||
| search: () => search | ||
| }); | ||
| function search(workspacePath, text, options = {}) { | ||
| const requestedMode = options.mode ?? "auto"; | ||
| const qmdEnabled = process.env.WORKGRAPH_QMD_ENDPOINT && process.env.WORKGRAPH_QMD_ENDPOINT.trim().length > 0; | ||
| if (requestedMode === "qmd" && !qmdEnabled) { | ||
| const results = keywordSearch(workspacePath, text, { | ||
| type: options.type, | ||
| limit: options.limit | ||
| }); | ||
| return { | ||
| mode: "core", | ||
| query: text, | ||
| results, | ||
| fallbackReason: "QMD mode requested but WORKGRAPH_QMD_ENDPOINT is not configured." | ||
| }; | ||
| } | ||
| if (requestedMode === "qmd" && qmdEnabled) { | ||
| const results = keywordSearch(workspacePath, text, { | ||
| type: options.type, | ||
| limit: options.limit | ||
| }); | ||
| return { | ||
| mode: "qmd", | ||
| query: text, | ||
| results, | ||
| fallbackReason: "QMD endpoint configured; using core-compatible local ranking in MVP." | ||
| }; | ||
| } | ||
| if (requestedMode === "auto" && qmdEnabled) { | ||
| const results = keywordSearch(workspacePath, text, { | ||
| type: options.type, | ||
| limit: options.limit | ||
| }); | ||
| return { | ||
| mode: "qmd", | ||
| query: text, | ||
| results, | ||
| fallbackReason: "Auto mode selected; QMD endpoint detected; using core-compatible local ranking in MVP." | ||
| }; | ||
| } | ||
| return { | ||
| mode: "core", | ||
| query: text, | ||
| results: keywordSearch(workspacePath, text, { | ||
| type: options.type, | ||
| limit: options.limit | ||
| }) | ||
| }; | ||
| } | ||
| // src/trigger.ts | ||
| var trigger_exports = {}; | ||
| __export(trigger_exports, { | ||
| fireTrigger: () => fireTrigger | ||
| }); | ||
| import { createHash } from "crypto"; | ||
| function fireTrigger(workspacePath, triggerPath, options) { | ||
| const trigger = read(workspacePath, triggerPath); | ||
| if (!trigger) throw new Error(`Trigger not found: ${triggerPath}`); | ||
| if (trigger.type !== "trigger") throw new Error(`Target is not a trigger primitive: ${triggerPath}`); | ||
| const triggerStatus = String(trigger.fields.status ?? "draft"); | ||
| if (!["approved", "active"].includes(triggerStatus)) { | ||
| throw new Error(`Trigger must be approved/active to fire. Current status: ${triggerStatus}`); | ||
| } | ||
| const objective = options.objective ?? `Trigger ${String(trigger.fields.title ?? triggerPath)} fired action ${String(trigger.fields.action ?? "run")}`; | ||
| const eventSeed = options.eventKey ?? (/* @__PURE__ */ new Date()).toISOString(); | ||
| const idempotencyKey = buildIdempotencyKey(triggerPath, eventSeed, objective); | ||
| const run = createRun(workspacePath, { | ||
| actor: options.actor, | ||
| objective, | ||
| context: { | ||
| trigger_path: triggerPath, | ||
| trigger_event: String(trigger.fields.event ?? ""), | ||
| ...options.context | ||
| }, | ||
| idempotencyKey | ||
| }); | ||
| append(workspacePath, options.actor, "create", triggerPath, "trigger", { | ||
| fired: true, | ||
| event_key: eventSeed, | ||
| run_id: run.id, | ||
| idempotency_key: idempotencyKey | ||
| }); | ||
| return { | ||
| triggerPath, | ||
| run, | ||
| idempotencyKey | ||
| }; | ||
| } | ||
| function buildIdempotencyKey(triggerPath, eventSeed, objective) { | ||
| return createHash("sha256").update(`${triggerPath}:${eventSeed}:${objective}`).digest("hex").slice(0, 32); | ||
| } | ||
| // src/clawdapus.ts | ||
| var clawdapus_exports = {}; | ||
| __export(clawdapus_exports, { | ||
| CLAWDAPUS_INTEGRATION_PROVIDER: () => CLAWDAPUS_INTEGRATION_PROVIDER, | ||
| DEFAULT_CLAWDAPUS_SKILL_URL: () => DEFAULT_CLAWDAPUS_SKILL_URL, | ||
| fetchClawdapusSkillMarkdown: () => fetchClawdapusSkillMarkdown, | ||
| installClawdapusSkill: () => installClawdapusSkill | ||
| }); | ||
| // src/integration-core.ts | ||
| async function installSkillIntegration(workspacePath, provider, options) { | ||
| const actor = options.actor.trim(); | ||
| if (!actor) { | ||
| throw new Error(`${provider.id} integration requires a non-empty actor.`); | ||
| } | ||
| const title = options.title?.trim() || provider.defaultTitle; | ||
| const sourceUrl = options.sourceUrl?.trim() || provider.defaultSourceUrl; | ||
| const existing = loadSkillIfExists(workspacePath, title); | ||
| if (existing && !options.force) { | ||
| throw new Error( | ||
| `Skill "${title}" already exists at ${existing.path}. Use --force to refresh it from source.` | ||
| ); | ||
| } | ||
| const fetchSkillMarkdown = options.fetchSkillMarkdown ?? ((url) => fetchSkillMarkdownFromUrl(url, provider.userAgent)); | ||
| const markdown = await fetchSkillMarkdown(sourceUrl); | ||
| if (!markdown.trim()) { | ||
| throw new Error(`Downloaded ${provider.id} skill from ${sourceUrl} is empty.`); | ||
| } | ||
| const skill = writeSkill(workspacePath, title, markdown, actor, { | ||
| owner: options.owner ?? actor, | ||
| status: options.status, | ||
| distribution: provider.distribution, | ||
| tags: mergeTags(provider.defaultTags, options.tags) | ||
| }); | ||
| return { | ||
| provider: provider.id, | ||
| skill, | ||
| sourceUrl, | ||
| importedAt: (/* @__PURE__ */ new Date()).toISOString(), | ||
| replacedExisting: existing !== null | ||
| }; | ||
| } | ||
| async function fetchSkillMarkdownFromUrl(sourceUrl, userAgent = "@versatly/workgraph optional-integration") { | ||
| let response; | ||
| try { | ||
| response = await fetch(sourceUrl, { | ||
| headers: { | ||
| "user-agent": userAgent | ||
| } | ||
| }); | ||
| } catch (error) { | ||
| throw new Error( | ||
| `Failed to download skill from ${sourceUrl}: ${errorMessage(error)}` | ||
| ); | ||
| } | ||
| if (!response.ok) { | ||
| throw new Error( | ||
| `Failed to download skill from ${sourceUrl}: HTTP ${response.status} ${response.statusText}` | ||
| ); | ||
| } | ||
| return response.text(); | ||
| } | ||
| function loadSkillIfExists(workspacePath, skillRef) { | ||
| try { | ||
| return loadSkill(workspacePath, skillRef); | ||
| } catch (error) { | ||
| const message = errorMessage(error); | ||
| if (message.startsWith("Skill not found:")) { | ||
| return null; | ||
| } | ||
| throw error; | ||
| } | ||
| } | ||
| function mergeTags(defaultTags, tags) { | ||
| const merged = /* @__PURE__ */ new Set(["optional-integration"]); | ||
| for (const tag of defaultTags) { | ||
| const normalized = tag.trim(); | ||
| if (normalized) merged.add(normalized); | ||
| } | ||
| for (const tag of tags ?? []) { | ||
| const normalized = tag.trim(); | ||
| if (normalized) merged.add(normalized); | ||
| } | ||
| return [...merged]; | ||
| } | ||
| function errorMessage(error) { | ||
| return error instanceof Error ? error.message : String(error); | ||
| } | ||
| // src/clawdapus.ts | ||
| var DEFAULT_CLAWDAPUS_SKILL_URL = "https://raw.githubusercontent.com/mostlydev/clawdapus/master/skills/clawdapus/SKILL.md"; | ||
| var CLAWDAPUS_INTEGRATION_PROVIDER = { | ||
| id: "clawdapus", | ||
| defaultTitle: "clawdapus", | ||
| defaultSourceUrl: DEFAULT_CLAWDAPUS_SKILL_URL, | ||
| distribution: "clawdapus-optional-integration", | ||
| defaultTags: ["clawdapus"], | ||
| userAgent: "@versatly/workgraph clawdapus-optional-integration" | ||
| }; | ||
| async function installClawdapusSkill(workspacePath, options) { | ||
| return installSkillIntegration( | ||
| workspacePath, | ||
| CLAWDAPUS_INTEGRATION_PROVIDER, | ||
| options | ||
| ); | ||
| } | ||
| async function fetchClawdapusSkillMarkdown(sourceUrl) { | ||
| return fetchSkillMarkdownFromUrl(sourceUrl, CLAWDAPUS_INTEGRATION_PROVIDER.userAgent); | ||
| } | ||
| // src/integration.ts | ||
| var integration_exports = {}; | ||
| __export(integration_exports, { | ||
| installIntegration: () => installIntegration, | ||
| listIntegrations: () => listIntegrations | ||
| }); | ||
| var INTEGRATIONS = { | ||
| clawdapus: { | ||
| provider: CLAWDAPUS_INTEGRATION_PROVIDER, | ||
| description: "Infrastructure-layer governance skill import for AI agent containers.", | ||
| install: installClawdapusSkill | ||
| } | ||
| }; | ||
| function listIntegrations() { | ||
| return Object.values(INTEGRATIONS).map((integration) => ({ | ||
| id: integration.provider.id, | ||
| description: integration.description, | ||
| defaultTitle: integration.provider.defaultTitle, | ||
| defaultSourceUrl: integration.provider.defaultSourceUrl | ||
| })); | ||
| } | ||
| async function installIntegration(workspacePath, integrationId, options) { | ||
| const integration = INTEGRATIONS[integrationId.trim().toLowerCase()]; | ||
| if (!integration) { | ||
| throw new Error( | ||
| `Unknown integration "${integrationId}". Supported integrations: ${supportedIntegrationList()}.` | ||
| ); | ||
| } | ||
| return integration.install(workspacePath, options); | ||
| } | ||
| function supportedIntegrationList() { | ||
| return Object.keys(INTEGRATIONS).sort().join(", "); | ||
| } | ||
| export { | ||
| bases_exports, | ||
| workspace_exports, | ||
| command_center_exports, | ||
| skill_exports, | ||
| board_exports, | ||
| onboard_exports, | ||
| search_qmd_adapter_exports, | ||
| trigger_exports, | ||
| installSkillIntegration, | ||
| fetchSkillMarkdownFromUrl, | ||
| clawdapus_exports, | ||
| integration_exports | ||
| }; |
| import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; | ||
| interface WorkgraphMcpServerOptions { | ||
| workspacePath: string; | ||
| defaultActor?: string; | ||
| readOnly?: boolean; | ||
| name?: string; | ||
| version?: string; | ||
| } | ||
| declare function createWorkgraphMcpServer(options: WorkgraphMcpServerOptions): McpServer; | ||
| declare function startWorkgraphMcpServer(options: WorkgraphMcpServerOptions): Promise<McpServer>; | ||
| type mcpServer_WorkgraphMcpServerOptions = WorkgraphMcpServerOptions; | ||
| declare const mcpServer_createWorkgraphMcpServer: typeof createWorkgraphMcpServer; | ||
| declare const mcpServer_startWorkgraphMcpServer: typeof startWorkgraphMcpServer; | ||
| declare namespace mcpServer { | ||
| export { type mcpServer_WorkgraphMcpServerOptions as WorkgraphMcpServerOptions, mcpServer_createWorkgraphMcpServer as createWorkgraphMcpServer, mcpServer_startWorkgraphMcpServer as startWorkgraphMcpServer }; | ||
| } | ||
| export { type WorkgraphMcpServerOptions as W, createWorkgraphMcpServer as c, mcpServer as m, startWorkgraphMcpServer as s }; |
| import '@modelcontextprotocol/sdk/server/mcp.js'; | ||
| export { W as WorkgraphMcpServerOptions, c as createWorkgraphMcpServer, s as startWorkgraphMcpServer } from './mcp-server-fU6U6ht8.js'; |
| import { | ||
| createWorkgraphMcpServer, | ||
| startWorkgraphMcpServer | ||
| } from "./chunk-65ZMX2WM.js"; | ||
| export { | ||
| createWorkgraphMcpServer, | ||
| startWorkgraphMcpServer | ||
| }; |
+135
-10
| import { | ||
| bases_exports, | ||
| board_exports, | ||
| clawdapus_exports, | ||
| command_center_exports, | ||
| integration_exports, | ||
| onboard_exports, | ||
| search_qmd_adapter_exports, | ||
| skill_exports, | ||
| trigger_exports, | ||
| workspace_exports | ||
| } from "./chunk-E3QU5Y53.js"; | ||
| import { | ||
| dispatch_exports, | ||
| graph_exports, | ||
| ledger_exports, | ||
| onboard_exports, | ||
| mcp_server_exports, | ||
| orientation_exports, | ||
@@ -13,9 +22,5 @@ policy_exports, | ||
| registry_exports, | ||
| search_qmd_adapter_exports, | ||
| skill_exports, | ||
| store_exports, | ||
| thread_exports, | ||
| trigger_exports, | ||
| workspace_exports | ||
| } from "./chunk-XUMA4O2Z.js"; | ||
| thread_exports | ||
| } from "./chunk-65ZMX2WM.js"; | ||
@@ -516,2 +521,36 @@ // src/cli.ts | ||
| ); | ||
| var integrationCmd = program.command("integration").description("Manage optional third-party integrations"); | ||
| addWorkspaceOption( | ||
| integrationCmd.command("list").description("List supported optional integrations").option("--json", "Emit structured JSON output") | ||
| ).action( | ||
| (opts) => runCommand( | ||
| opts, | ||
| () => ({ | ||
| integrations: integration_exports.listIntegrations() | ||
| }), | ||
| (result) => result.integrations.map((integration) => `${integration.id} (${integration.defaultTitle}) -> ${integration.defaultSourceUrl}`) | ||
| ) | ||
| ); | ||
| addWorkspaceOption( | ||
| integrationCmd.command("install <integrationName>").description("Install an optional integration into this workspace").option("-a, --actor <name>", "Agent name", DEFAULT_ACTOR).option("--owner <name>", "Skill owner override").option("--title <title>", "Skill title to store in workgraph").option("--source-url <url>", "Source URL override for integration content").option("--force", "Overwrite an existing imported integration skill").option("--json", "Emit structured JSON output") | ||
| ).action( | ||
| (integrationName, opts) => runCommand( | ||
| opts, | ||
| () => installNamedIntegration(resolveWorkspacePath(opts), integrationName, opts), | ||
| renderInstalledIntegrationResult | ||
| ) | ||
| ); | ||
| addWorkspaceOption( | ||
| integrationCmd.command("clawdapus").description("Import Clawdapus SKILL.md into this workspace").option("-a, --actor <name>", "Agent name", DEFAULT_ACTOR).option("--owner <name>", "Skill owner override").option("--title <title>", "Skill title to store in workgraph", "clawdapus").option( | ||
| "--source-url <url>", | ||
| "Source URL for Clawdapus SKILL.md", | ||
| clawdapus_exports.DEFAULT_CLAWDAPUS_SKILL_URL | ||
| ).option("--force", "Overwrite an existing imported Clawdapus skill").option("--json", "Emit structured JSON output") | ||
| ).action( | ||
| (opts) => runCommand( | ||
| opts, | ||
| () => installNamedIntegration(resolveWorkspacePath(opts), "clawdapus", opts), | ||
| renderInstalledIntegrationResult | ||
| ) | ||
| ); | ||
| var ledgerCmd = program.command("ledger").description("Inspect the append-only workgraph ledger"); | ||
@@ -939,2 +978,35 @@ addWorkspaceOption( | ||
| addWorkspaceOption( | ||
| dispatchCmd.command("create-execute <objective>").description("Create and execute a run with autonomous multi-agent coordination").option("-a, --actor <name>", "Actor", DEFAULT_ACTOR).option("--adapter <name>", "Adapter name", "cursor-cloud").option("--idempotency-key <key>", "Idempotency key").option("--agents <actors>", "Comma-separated agent identities for autonomous execution").option("--max-steps <n>", "Maximum scheduler steps", "200").option("--step-delay-ms <ms>", "Delay between scheduling steps", "25").option("--space <spaceRef>", "Restrict execution to one space").option("--no-checkpoint", "Skip automatic checkpoint generation after execution").option("--json", "Emit structured JSON output") | ||
| ).action( | ||
| (objective, opts) => runCommand( | ||
| opts, | ||
| async () => { | ||
| const workspacePath = resolveWorkspacePath(opts); | ||
| return { | ||
| run: await dispatch_exports.createAndExecuteRun( | ||
| workspacePath, | ||
| { | ||
| actor: opts.actor, | ||
| adapter: opts.adapter, | ||
| objective, | ||
| idempotencyKey: opts.idempotencyKey | ||
| }, | ||
| { | ||
| agents: csv(opts.agents), | ||
| maxSteps: Number.parseInt(String(opts.maxSteps), 10), | ||
| stepDelayMs: Number.parseInt(String(opts.stepDelayMs), 10), | ||
| space: opts.space, | ||
| createCheckpoint: opts.checkpoint | ||
| } | ||
| ) | ||
| }; | ||
| }, | ||
| (result) => [ | ||
| `Run executed: ${result.run.id} [${result.run.status}]`, | ||
| ...result.run.output ? [`Output: ${result.run.output}`] : [], | ||
| ...result.run.error ? [`Error: ${result.run.error}`] : [] | ||
| ] | ||
| ) | ||
| ); | ||
| addWorkspaceOption( | ||
| dispatchCmd.command("list").description("List runs").option("--status <status>", "queued|running|succeeded|failed|cancelled").option("--limit <n>", "Result limit").option("--json", "Emit structured JSON output") | ||
@@ -971,2 +1043,27 @@ ).action( | ||
| addWorkspaceOption( | ||
| dispatchCmd.command("execute <runId>").description("Execute a queued/running run via adapter autonomous scheduling").option("-a, --actor <name>", "Actor", DEFAULT_ACTOR).option("--agents <actors>", "Comma-separated agent identities").option("--max-steps <n>", "Maximum scheduler steps", "200").option("--step-delay-ms <ms>", "Delay between scheduling steps", "25").option("--space <spaceRef>", "Restrict execution to one space").option("--no-checkpoint", "Skip automatic checkpoint generation after execution").option("--json", "Emit structured JSON output") | ||
| ).action( | ||
| (runId, opts) => runCommand( | ||
| opts, | ||
| async () => { | ||
| const workspacePath = resolveWorkspacePath(opts); | ||
| return { | ||
| run: await dispatch_exports.executeRun(workspacePath, runId, { | ||
| actor: opts.actor, | ||
| agents: csv(opts.agents), | ||
| maxSteps: Number.parseInt(String(opts.maxSteps), 10), | ||
| stepDelayMs: Number.parseInt(String(opts.stepDelayMs), 10), | ||
| space: opts.space, | ||
| createCheckpoint: opts.checkpoint | ||
| }) | ||
| }; | ||
| }, | ||
| (result) => [ | ||
| `Run executed: ${result.run.id} [${result.run.status}]`, | ||
| ...result.run.output ? [`Output: ${result.run.output}`] : [], | ||
| ...result.run.error ? [`Error: ${result.run.error}`] : [] | ||
| ] | ||
| ) | ||
| ); | ||
| addWorkspaceOption( | ||
| dispatchCmd.command("followup <runId> <input>").description("Send follow-up input to a run").option("-a, --actor <name>", "Actor", DEFAULT_ACTOR).option("--json", "Emit structured JSON output") | ||
@@ -1114,3 +1211,15 @@ ).action( | ||
| ); | ||
| program.parse(); | ||
| var mcpCmd = program.command("mcp").description("Run Workgraph MCP server"); | ||
| addWorkspaceOption( | ||
| mcpCmd.command("serve").description("Serve stdio MCP tools/resources for this workspace").option("-a, --actor <name>", "Default actor for MCP write tools", DEFAULT_ACTOR).option("--read-only", "Disable all MCP write tools") | ||
| ).action(async (opts) => { | ||
| const workspacePath = resolveWorkspacePath(opts); | ||
| console.error(`Starting MCP server for workspace: ${workspacePath}`); | ||
| await mcp_server_exports.startWorkgraphMcpServer({ | ||
| workspacePath, | ||
| defaultActor: opts.actor, | ||
| readOnly: !!opts.readOnly | ||
| }); | ||
| }); | ||
| await program.parseAsync(); | ||
| function addWorkspaceOption(command) { | ||
@@ -1142,2 +1251,18 @@ return command.option("-w, --workspace <path>", "Workgraph workspace path").option("--vault <path>", "Alias for --workspace").option("--shared-vault <path>", "Shared vault path (e.g. mounted via Tailscale)"); | ||
| } | ||
| function installNamedIntegration(workspacePath, integrationName, opts) { | ||
| return integration_exports.installIntegration(workspacePath, integrationName, { | ||
| actor: opts.actor, | ||
| owner: opts.owner, | ||
| title: opts.title, | ||
| sourceUrl: opts.sourceUrl, | ||
| force: !!opts.force | ||
| }); | ||
| } | ||
| function renderInstalledIntegrationResult(result) { | ||
| return [ | ||
| `${result.replacedExisting ? "Updated" : "Installed"} ${result.provider} integration skill: ${result.skill.path}`, | ||
| `Source: ${result.sourceUrl}`, | ||
| `Status: ${String(result.skill.fields.status)}` | ||
| ]; | ||
| } | ||
| function parseScalar(value) { | ||
@@ -1182,5 +1307,5 @@ if (value === "true") return true; | ||
| } | ||
| function runCommand(opts, action, renderText) { | ||
| async function runCommand(opts, action, renderText) { | ||
| try { | ||
| const result = action(); | ||
| const result = await action(); | ||
| if (wantsJson(opts)) { | ||
@@ -1187,0 +1312,0 @@ console.log(JSON.stringify({ ok: true, data: result }, null, 2)); |
+136
-3
@@ -0,1 +1,4 @@ | ||
| export { m as mcpServer } from './mcp-server-fU6U6ht8.js'; | ||
| import '@modelcontextprotocol/sdk/server/mcp.js'; | ||
| /** | ||
@@ -696,3 +699,3 @@ * Workgraph type definitions. | ||
| /** | ||
| * Runtime dispatch contract (MVP local adapter). | ||
| * Runtime dispatch contract with adapter-backed execution. | ||
| */ | ||
@@ -707,2 +710,10 @@ | ||
| } | ||
| interface DispatchExecuteInput { | ||
| actor: string; | ||
| agents?: string[]; | ||
| maxSteps?: number; | ||
| stepDelayMs?: number; | ||
| space?: string; | ||
| createCheckpoint?: boolean; | ||
| } | ||
| declare function createRun(workspacePath: string, input: DispatchCreateInput): DispatchRun; | ||
@@ -715,2 +726,3 @@ declare function status(workspacePath: string, runId: string): DispatchRun; | ||
| error?: string; | ||
| contextPatch?: Record<string, unknown>; | ||
| }): DispatchRun; | ||
@@ -722,5 +734,10 @@ declare function logs(workspacePath: string, runId: string): DispatchRun['logs']; | ||
| }): DispatchRun[]; | ||
| declare function executeRun(workspacePath: string, runId: string, input: DispatchExecuteInput): Promise<DispatchRun>; | ||
| declare function createAndExecuteRun(workspacePath: string, createInput: DispatchCreateInput, executeInput?: Omit<DispatchExecuteInput, 'actor'>): Promise<DispatchRun>; | ||
| type dispatch_DispatchCreateInput = DispatchCreateInput; | ||
| type dispatch_DispatchExecuteInput = DispatchExecuteInput; | ||
| declare const dispatch_createAndExecuteRun: typeof createAndExecuteRun; | ||
| declare const dispatch_createRun: typeof createRun; | ||
| declare const dispatch_executeRun: typeof executeRun; | ||
| declare const dispatch_followup: typeof followup; | ||
@@ -733,3 +750,3 @@ declare const dispatch_listRuns: typeof listRuns; | ||
| declare namespace dispatch { | ||
| export { type dispatch_DispatchCreateInput as DispatchCreateInput, dispatch_createRun as createRun, dispatch_followup as followup, dispatch_listRuns as listRuns, dispatch_logs as logs, dispatch_markRun as markRun, dispatch_status as status, dispatch_stop as stop }; | ||
| export { type dispatch_DispatchCreateInput as DispatchCreateInput, type dispatch_DispatchExecuteInput as DispatchExecuteInput, dispatch_createAndExecuteRun as createAndExecuteRun, dispatch_createRun as createRun, dispatch_executeRun as executeRun, dispatch_followup as followup, dispatch_listRuns as listRuns, dispatch_logs as logs, dispatch_markRun as markRun, dispatch_status as status, dispatch_stop as stop }; | ||
| } | ||
@@ -819,2 +836,118 @@ | ||
| export { type DispatchRun, type FieldDefinition, type LedgerChainState, type LedgerEntry, type LedgerIndex, type LedgerOp, type PolicyParty, type PolicyRegistry, type PrimitiveInstance, type PrimitiveQueryFilters, type PrimitiveTypeDefinition, type Registry, type RunStatus, THREAD_STATUS_TRANSITIONS, type ThreadStatus, type WorkgraphBrief, type WorkgraphStatusSnapshot, type WorkgraphWorkspaceConfig, bases, board, commandCenter, dispatch, graph, ledger, onboard, orientation, policy, query, registry, searchQmdAdapter, skill, store, thread, trigger, workspace }; | ||
| interface SkillIntegrationProvider { | ||
| id: string; | ||
| defaultTitle: string; | ||
| defaultSourceUrl: string; | ||
| distribution: string; | ||
| defaultTags: string[]; | ||
| userAgent?: string; | ||
| } | ||
| interface InstallSkillIntegrationOptions { | ||
| actor: string; | ||
| owner?: string; | ||
| title?: string; | ||
| sourceUrl?: string; | ||
| force?: boolean; | ||
| status?: WriteSkillOptions['status']; | ||
| tags?: string[]; | ||
| fetchSkillMarkdown?: (sourceUrl: string) => Promise<string>; | ||
| } | ||
| interface InstallSkillIntegrationResult { | ||
| provider: string; | ||
| skill: PrimitiveInstance; | ||
| sourceUrl: string; | ||
| importedAt: string; | ||
| replacedExisting: boolean; | ||
| } | ||
| declare function installSkillIntegration(workspacePath: string, provider: SkillIntegrationProvider, options: InstallSkillIntegrationOptions): Promise<InstallSkillIntegrationResult>; | ||
| declare function fetchSkillMarkdownFromUrl(sourceUrl: string, userAgent?: string): Promise<string>; | ||
| declare const DEFAULT_CLAWDAPUS_SKILL_URL = "https://raw.githubusercontent.com/mostlydev/clawdapus/master/skills/clawdapus/SKILL.md"; | ||
| declare const CLAWDAPUS_INTEGRATION_PROVIDER: SkillIntegrationProvider; | ||
| type InstallClawdapusSkillOptions = InstallSkillIntegrationOptions; | ||
| type InstallClawdapusSkillResult = InstallSkillIntegrationResult; | ||
| declare function installClawdapusSkill(workspacePath: string, options: InstallClawdapusSkillOptions): Promise<InstallClawdapusSkillResult>; | ||
| declare function fetchClawdapusSkillMarkdown(sourceUrl: string): Promise<string>; | ||
| declare const clawdapus_CLAWDAPUS_INTEGRATION_PROVIDER: typeof CLAWDAPUS_INTEGRATION_PROVIDER; | ||
| declare const clawdapus_DEFAULT_CLAWDAPUS_SKILL_URL: typeof DEFAULT_CLAWDAPUS_SKILL_URL; | ||
| type clawdapus_InstallClawdapusSkillOptions = InstallClawdapusSkillOptions; | ||
| type clawdapus_InstallClawdapusSkillResult = InstallClawdapusSkillResult; | ||
| declare const clawdapus_fetchClawdapusSkillMarkdown: typeof fetchClawdapusSkillMarkdown; | ||
| declare const clawdapus_installClawdapusSkill: typeof installClawdapusSkill; | ||
| declare namespace clawdapus { | ||
| export { clawdapus_CLAWDAPUS_INTEGRATION_PROVIDER as CLAWDAPUS_INTEGRATION_PROVIDER, clawdapus_DEFAULT_CLAWDAPUS_SKILL_URL as DEFAULT_CLAWDAPUS_SKILL_URL, type clawdapus_InstallClawdapusSkillOptions as InstallClawdapusSkillOptions, type clawdapus_InstallClawdapusSkillResult as InstallClawdapusSkillResult, clawdapus_fetchClawdapusSkillMarkdown as fetchClawdapusSkillMarkdown, clawdapus_installClawdapusSkill as installClawdapusSkill }; | ||
| } | ||
| interface IntegrationDescriptor { | ||
| id: string; | ||
| description: string; | ||
| defaultTitle: string; | ||
| defaultSourceUrl: string; | ||
| } | ||
| declare function listIntegrations(): IntegrationDescriptor[]; | ||
| declare function installIntegration(workspacePath: string, integrationId: string, options: InstallSkillIntegrationOptions): Promise<InstallSkillIntegrationResult>; | ||
| type integration_IntegrationDescriptor = IntegrationDescriptor; | ||
| declare const integration_installIntegration: typeof installIntegration; | ||
| declare const integration_listIntegrations: typeof listIntegrations; | ||
| declare namespace integration { | ||
| export { type integration_IntegrationDescriptor as IntegrationDescriptor, integration_installIntegration as installIntegration, integration_listIntegrations as listIntegrations }; | ||
| } | ||
| interface DispatchAdapterCreateInput { | ||
| actor: string; | ||
| objective: string; | ||
| idempotencyKey?: string; | ||
| context?: Record<string, unknown>; | ||
| } | ||
| interface DispatchAdapterRunStatus { | ||
| runId: string; | ||
| status: RunStatus; | ||
| } | ||
| interface DispatchAdapterLogEntry { | ||
| ts: string; | ||
| level: 'info' | 'warn' | 'error'; | ||
| message: string; | ||
| } | ||
| interface DispatchAdapterExecutionInput { | ||
| workspacePath: string; | ||
| runId: string; | ||
| actor: string; | ||
| objective: string; | ||
| context?: Record<string, unknown>; | ||
| agents?: string[]; | ||
| maxSteps?: number; | ||
| stepDelayMs?: number; | ||
| space?: string; | ||
| createCheckpoint?: boolean; | ||
| isCancelled?: () => boolean; | ||
| } | ||
| interface DispatchAdapterExecutionResult { | ||
| status: RunStatus; | ||
| output?: string; | ||
| error?: string; | ||
| logs: DispatchAdapterLogEntry[]; | ||
| metrics?: Record<string, unknown>; | ||
| } | ||
| interface DispatchAdapter { | ||
| name: string; | ||
| create(input: DispatchAdapterCreateInput): Promise<DispatchAdapterRunStatus>; | ||
| status(runId: string): Promise<DispatchAdapterRunStatus>; | ||
| followup(runId: string, actor: string, input: string): Promise<DispatchAdapterRunStatus>; | ||
| stop(runId: string, actor: string): Promise<DispatchAdapterRunStatus>; | ||
| logs(runId: string): Promise<DispatchAdapterLogEntry[]>; | ||
| execute?(input: DispatchAdapterExecutionInput): Promise<DispatchAdapterExecutionResult>; | ||
| } | ||
| declare class CursorCloudAdapter implements DispatchAdapter { | ||
| name: string; | ||
| create(_input: DispatchAdapterCreateInput): Promise<DispatchAdapterRunStatus>; | ||
| status(runId: string): Promise<DispatchAdapterRunStatus>; | ||
| followup(runId: string, _actor: string, _input: string): Promise<DispatchAdapterRunStatus>; | ||
| stop(runId: string, _actor: string): Promise<DispatchAdapterRunStatus>; | ||
| logs(_runId: string): Promise<DispatchAdapterLogEntry[]>; | ||
| execute(input: DispatchAdapterExecutionInput): Promise<DispatchAdapterExecutionResult>; | ||
| } | ||
| export { CursorCloudAdapter, type DispatchAdapter, type DispatchAdapterCreateInput, type DispatchAdapterExecutionInput, type DispatchAdapterExecutionResult, type DispatchAdapterLogEntry, type DispatchAdapterRunStatus, type DispatchRun, type FieldDefinition, type InstallSkillIntegrationOptions, type InstallSkillIntegrationResult, type LedgerChainState, type LedgerEntry, type LedgerIndex, type LedgerOp, type PolicyParty, type PolicyRegistry, type PrimitiveInstance, type PrimitiveQueryFilters, type PrimitiveTypeDefinition, type Registry, type RunStatus, type SkillIntegrationProvider, THREAD_STATUS_TRANSITIONS, type ThreadStatus, type WorkgraphBrief, type WorkgraphStatusSnapshot, type WorkgraphWorkspaceConfig, bases, board, clawdapus, commandCenter, dispatch, fetchSkillMarkdownFromUrl, graph, installSkillIntegration, integration, ledger, onboard, orientation, policy, query, registry, searchQmdAdapter, skill, store, thread, trigger, workspace }; |
+22
-8
| import { | ||
| THREAD_STATUS_TRANSITIONS, | ||
| bases_exports, | ||
| board_exports, | ||
| clawdapus_exports, | ||
| command_center_exports, | ||
| fetchSkillMarkdownFromUrl, | ||
| installSkillIntegration, | ||
| integration_exports, | ||
| onboard_exports, | ||
| search_qmd_adapter_exports, | ||
| skill_exports, | ||
| trigger_exports, | ||
| workspace_exports | ||
| } from "./chunk-E3QU5Y53.js"; | ||
| import { | ||
| CursorCloudAdapter, | ||
| THREAD_STATUS_TRANSITIONS, | ||
| dispatch_exports, | ||
| graph_exports, | ||
| ledger_exports, | ||
| onboard_exports, | ||
| mcp_server_exports, | ||
| orientation_exports, | ||
@@ -14,17 +26,19 @@ policy_exports, | ||
| registry_exports, | ||
| search_qmd_adapter_exports, | ||
| skill_exports, | ||
| store_exports, | ||
| thread_exports, | ||
| trigger_exports, | ||
| workspace_exports | ||
| } from "./chunk-XUMA4O2Z.js"; | ||
| thread_exports | ||
| } from "./chunk-65ZMX2WM.js"; | ||
| export { | ||
| CursorCloudAdapter, | ||
| THREAD_STATUS_TRANSITIONS, | ||
| bases_exports as bases, | ||
| board_exports as board, | ||
| clawdapus_exports as clawdapus, | ||
| command_center_exports as commandCenter, | ||
| dispatch_exports as dispatch, | ||
| fetchSkillMarkdownFromUrl, | ||
| graph_exports as graph, | ||
| installSkillIntegration, | ||
| integration_exports as integration, | ||
| ledger_exports as ledger, | ||
| mcp_server_exports as mcpServer, | ||
| onboard_exports as onboard, | ||
@@ -31,0 +45,0 @@ orientation_exports as orientation, |
+9
-3
| { | ||
| "name": "@versatly/workgraph", | ||
| "version": "0.2.0", | ||
| "version": "0.3.0", | ||
| "description": "Agent-first workgraph workspace for multi-agent coordination with dynamic primitives, append-only ledger, and markdown-native storage.", | ||
@@ -17,2 +17,6 @@ "workspaces": [ | ||
| }, | ||
| "./mcp-server": { | ||
| "types": "./dist/mcp-server.d.ts", | ||
| "import": "./dist/mcp-server.js" | ||
| }, | ||
| "./cli": { | ||
@@ -34,3 +38,3 @@ "types": "./dist/cli.d.ts", | ||
| "scripts": { | ||
| "build": "tsup src/index.ts src/cli.ts --format esm --dts --clean", | ||
| "build": "tsup src/index.ts src/cli.ts src/mcp-server.ts --format esm --dts --clean", | ||
| "build:packages": "npm run build --workspaces --if-present", | ||
@@ -68,5 +72,7 @@ "typecheck": "tsc --noEmit", | ||
| "dependencies": { | ||
| "@modelcontextprotocol/sdk": "^1.27.1", | ||
| "commander": "^12.0.0", | ||
| "gray-matter": "^4.0.3", | ||
| "yaml": "^2.8.1" | ||
| "yaml": "^2.8.1", | ||
| "zod": "^4.3.6" | ||
| }, | ||
@@ -73,0 +79,0 @@ "devDependencies": { |
+28
-0
@@ -72,4 +72,10 @@ # @versatly/workgraph | ||
| workgraph dispatch mark run_123 --status succeeded --output "Review complete" --actor agent-lead --json | ||
| workgraph dispatch create-execute "Close all ready threads in platform space" \ | ||
| --actor agent-lead \ | ||
| --agents agent-a,agent-b,agent-c \ | ||
| --space spaces/platform \ | ||
| --json | ||
| workgraph trigger fire triggers/escalate-blocked.md --event-key "thread-blocked-001" --actor agent-lead --json | ||
| workgraph onboarding update onboarding/onboarding-for-agent-architect.md --status paused --actor agent-lead --json | ||
| workgraph mcp serve -w /path/to/workspace --actor agent-ops --read-only | ||
| workgraph ledger show --count 20 --json | ||
@@ -175,2 +181,24 @@ workgraph command-center --output "ops/Command Center.md" --json | ||
| ### Optional Clawdapus integration | ||
| List supported optional integrations: | ||
| ```bash | ||
| workgraph integration list --json | ||
| ``` | ||
| Install by integration ID (extensible pattern for future integrations): | ||
| ```bash | ||
| workgraph integration install clawdapus \ | ||
| --actor agent-architect \ | ||
| --json | ||
| ``` | ||
| Refresh from upstream later (or use the `integration clawdapus` alias): | ||
| ```bash | ||
| workgraph integration install clawdapus --force --actor agent-architect --json | ||
| ``` | ||
| ## Legacy memory stacks vs Workgraph primitives | ||
@@ -177,0 +205,0 @@ |
Sorry, the diff of this file is too big to display
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Network access
Supply chain riskThis module accesses the network.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 6 instances in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 6 instances in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
256038
24.62%13
44.44%6207
28.59%253
12.44%5
66.67%2
100%+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added