You're Invited:Meet the Socket Team at RSAC and BSidesSF 2026, March 23–26.RSVP
Socket
Book a DemoSign in
Socket

@versatly/workgraph

Package Overview
Dependencies
Maintainers
1
Versions
16
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@versatly/workgraph - npm Package Compare versions

Comparing version
0.1.0
to
0.2.0
dist/chunk-XUMA4O2Z.js

Sorry, the diff of this file is too big to display

+574
-23
import {
bases_exports,
board_exports,
command_center_exports,
dispatch_exports,
graph_exports,
ledger_exports,
onboard_exports,
orientation_exports,
policy_exports,
query_exports,
registry_exports,
search_qmd_adapter_exports,
skill_exports,
store_exports,
thread_exports,
trigger_exports,
workspace_exports
} from "./chunk-CRQXDCPR.js";
} from "./chunk-XUMA4O2Z.js";

@@ -16,3 +25,3 @@ // src/cli.ts

import { Command } from "commander";
var DEFAULT_ACTOR = process.env.WORKGRAPH_AGENT || process.env.CLAWVAULT_AGENT || process.env.USER || "anonymous";
var DEFAULT_ACTOR = process.env.WORKGRAPH_AGENT || process.env.USER || "anonymous";
var CLI_VERSION = (() => {

@@ -275,3 +284,3 @@ try {

addWorkspaceOption(
basesCmd.command("sync-registry").description("Sync .clawvault/primitive-registry.yaml from active registry").option("--json", "Emit structured JSON output")
basesCmd.command("sync-registry").description("Sync .workgraph/primitive-registry.yaml from active registry").option("--json", "Emit structured JSON output")
).action(

@@ -285,3 +294,3 @@ (opts) => runCommand(

primitiveCount: manifest.primitives.length,
manifestPath: ".clawvault/primitive-registry.yaml"
manifestPath: ".workgraph/primitive-registry.yaml"
};

@@ -296,3 +305,3 @@ },

addWorkspaceOption(
basesCmd.command("generate").description("Generate .base files by reading primitive-registry.yaml").option("--all", "Include non-canonical primitives").option("--refresh-registry", "Refresh primitive-registry.yaml before generation").option("--output-dir <path>", "Output directory for .base files (default: .clawvault/bases)").option("--json", "Emit structured JSON output")
basesCmd.command("generate").description("Generate .base files by reading primitive-registry.yaml").option("--all", "Include non-canonical primitives").option("--refresh-registry", "Refresh primitive-registry.yaml before generation").option("--output-dir <path>", "Output directory for .base files (default: .workgraph/bases)").option("--json", "Emit structured JSON output")
).action(

@@ -337,13 +346,3 @@ (opts) => runCommand(

const workspacePath = resolveWorkspacePath(opts);
const fields = { title };
for (const pair of opts.set ?? []) {
const eqIdx = String(pair).indexOf("=");
if (eqIdx === -1) continue;
const key = String(pair).slice(0, eqIdx).trim();
let value = String(pair).slice(eqIdx + 1).trim();
if (typeof value === "string" && value.includes(",")) {
value = value.split(",").map((v) => v.trim());
}
fields[key] = value;
}
const fields = { title, ...parseSetPairs(opts.set ?? []) };
return {

@@ -356,5 +355,24 @@ instance: store_exports.create(workspacePath, type, fields, opts.body, opts.actor)

);
addWorkspaceOption(
primitiveCmd.command("update <path>").description("Update an existing primitive instance").option("-a, --actor <name>", "Agent name", DEFAULT_ACTOR).option("--set <fields...>", 'Set fields as "key=value"').option("--body <text>", "Replace markdown body content").option("--body-file <path>", "Read markdown body content from file").option("--json", "Emit structured JSON output")
).action(
(targetPath, opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
const updates = parseSetPairs(opts.set ?? []);
let body = opts.body;
if (opts.bodyFile) {
body = fs.readFileSync(path.resolve(opts.bodyFile), "utf-8");
}
return {
instance: store_exports.update(workspacePath, targetPath, updates, body, opts.actor)
};
},
(result) => [`Updated ${result.instance.type}: ${result.instance.path}`]
)
);
var skillCmd = program.command("skill").description("Manage native skill primitives in shared workgraph vaults");
addWorkspaceOption(
skillCmd.command("write <title>").description("Create or update a skill primitive").option("-a, --actor <name>", "Agent name", DEFAULT_ACTOR).option("--owner <name>", "Skill owner").option("--version <semver>", "Skill version").option("--status <status>", "draft | proposed | active | deprecated | archived").option("--distribution <mode>", "Distribution mode", "tailscale-shared-vault").option("--tailscale-path <path>", "Shared Tailscale workspace path").option("--reviewers <list>", "Comma-separated reviewer names").option("--tags <list>", "Comma-separated tags").option("--body <text>", "Skill markdown content").option("--body-file <path>", "Read markdown content from file").option("--json", "Emit structured JSON output")
skillCmd.command("write <title>").description("Create or update a skill primitive").option("-a, --actor <name>", "Agent name", DEFAULT_ACTOR).option("--owner <name>", "Skill owner").option("--skill-version <semver>", "Skill version").option("--status <status>", "draft | proposed | active | deprecated | archived").option("--distribution <mode>", "Distribution mode", "tailscale-shared-vault").option("--tailscale-path <path>", "Shared Tailscale workspace path").option("--reviewers <list>", "Comma-separated reviewer names").option("--depends-on <list>", "Comma-separated skill dependencies (slug/path)").option("--expected-updated-at <iso>", "Optimistic concurrency guard for updates").option("--tags <list>", "Comma-separated tags").option("--body <text>", "Skill markdown content").option("--body-file <path>", "Read markdown content from file").option("--json", "Emit structured JSON output")
).action(

@@ -377,3 +395,3 @@ (title, opts) => runCommand(

owner: opts.owner,
version: opts.version,
version: opts.skillVersion,
status: opts.status,

@@ -383,2 +401,4 @@ distribution: opts.distribution,

reviewers: csv(opts.reviewers),
dependsOn: csv(opts.dependsOn),
expectedUpdatedAt: opts.expectedUpdatedAt,
tags: csv(opts.tags)

@@ -414,3 +434,3 @@ }

addWorkspaceOption(
skillCmd.command("list").description("List skills").option("--status <status>", "Filter by status").option("--json", "Emit structured JSON output")
skillCmd.command("list").description("List skills").option("--status <status>", "Filter by status").option("--updated-since <iso>", "Filter by updated timestamp (ISO-8601)").option("--json", "Emit structured JSON output")
).action(

@@ -421,3 +441,6 @@ (opts) => runCommand(

const workspacePath = resolveWorkspacePath(opts);
const skills = skill_exports.listSkills(workspacePath, { status: opts.status });
const skills = skill_exports.listSkills(workspacePath, {
status: opts.status,
updatedSince: opts.updatedSince
});
return { skills, count: skills.length };

@@ -429,2 +452,35 @@ },

addWorkspaceOption(
skillCmd.command("history <skillRef>").description("Show ledger history entries for one skill").option("--limit <n>", "Limit number of returned entries").option("--json", "Emit structured JSON output")
).action(
(skillRef, opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return {
entries: skill_exports.skillHistory(workspacePath, skillRef, {
limit: opts.limit ? Number.parseInt(String(opts.limit), 10) : void 0
})
};
},
(result) => result.entries.map((entry) => `${entry.ts} ${entry.op} ${entry.actor}`)
)
);
addWorkspaceOption(
skillCmd.command("diff <skillRef>").description("Show latest field-change summary for one skill").option("--json", "Emit structured JSON output")
).action(
(skillRef, opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return skill_exports.skillDiff(workspacePath, skillRef);
},
(result) => [
`Skill: ${result.path}`,
`Latest: ${result.latestEntryTs ?? "none"}`,
`Previous: ${result.previousEntryTs ?? "none"}`,
`Changed fields: ${result.changedFields.join(", ") || "none"}`
]
)
);
addWorkspaceOption(
skillCmd.command("propose <skillRef>").description("Move a skill into proposed state and open review thread").option("-a, --actor <name>", "Agent name", DEFAULT_ACTOR).option("--proposal-thread <path>", "Explicit proposal thread path").option("--no-create-thread", "Do not create a proposal thread automatically").option("--space <spaceRef>", "Space for created proposal thread").option("--reviewers <list>", "Comma-separated reviewers").option("--json", "Emit structured JSON output")

@@ -452,3 +508,3 @@ ).action(

addWorkspaceOption(
skillCmd.command("promote <skillRef>").description("Promote a proposed/draft skill to active").option("-a, --actor <name>", "Agent name", DEFAULT_ACTOR).option("--version <semver>", "Explicit promoted version").option("--json", "Emit structured JSON output")
skillCmd.command("promote <skillRef>").description("Promote a proposed/draft skill to active").option("-a, --actor <name>", "Agent name", DEFAULT_ACTOR).option("--skill-version <semver>", "Explicit promoted version").option("--json", "Emit structured JSON output")
).action(

@@ -461,3 +517,3 @@ (skillRef, opts) => runCommand(

skill: skill_exports.promoteSkill(workspacePath, skillRef, opts.actor, {
version: opts.version
version: opts.skillVersion
})

@@ -617,2 +673,452 @@ };

);
addWorkspaceOption(
program.command("status").description("Show workspace situational status snapshot").option("--json", "Emit structured JSON output")
).action(
(opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return orientation_exports.statusSnapshot(workspacePath);
},
(result) => [
`Threads: total=${result.threads.total} open=${result.threads.open} active=${result.threads.active} blocked=${result.threads.blocked} done=${result.threads.done}`,
`Ready threads: ${result.threads.ready} Active claims: ${result.claims.active}`,
`Primitive types: ${Object.keys(result.primitives.byType).length}`
]
)
);
addWorkspaceOption(
program.command("brief").description("Show actor-centric operational brief").option("-a, --actor <name>", "Agent name", DEFAULT_ACTOR).option("--recent <count>", "Recent activity count", "12").option("--next <count>", "Next ready threads to include", "5").option("--json", "Emit structured JSON output")
).action(
(opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return orientation_exports.brief(workspacePath, opts.actor, {
recentCount: Number.parseInt(String(opts.recent), 10),
nextCount: Number.parseInt(String(opts.next), 10)
});
},
(result) => [
`Brief for ${result.actor}`,
`My claims: ${result.myClaims.length}`,
`Blocked threads: ${result.blockedThreads.length}`,
`Next ready: ${result.nextReadyThreads.map((item) => item.path).join(", ") || "none"}`
]
)
);
addWorkspaceOption(
program.command("checkpoint <summary>").description("Create a checkpoint primitive for hand-off continuity").option("-a, --actor <name>", "Agent name", DEFAULT_ACTOR).option("--next <items>", "Comma-separated next actions").option("--blocked <items>", "Comma-separated blockers").option("--tags <items>", "Comma-separated tags").option("--json", "Emit structured JSON output")
).action(
(summary, opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return {
checkpoint: orientation_exports.checkpoint(workspacePath, opts.actor, summary, {
next: csv(opts.next),
blocked: csv(opts.blocked),
tags: csv(opts.tags)
})
};
},
(result) => [`Created checkpoint: ${result.checkpoint.path}`]
)
);
addWorkspaceOption(
program.command("intake <observation>").description("Capture intake observation as lightweight checkpoint note").option("-a, --actor <name>", "Agent name", DEFAULT_ACTOR).option("--tags <items>", "Comma-separated tags").option("--json", "Emit structured JSON output")
).action(
(observation, opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return {
intake: orientation_exports.intake(workspacePath, opts.actor, observation, {
tags: csv(opts.tags)
})
};
},
(result) => [`Captured intake: ${result.intake.path}`]
)
);
addWorkspaceOption(
program.command("query").description("Query primitive instances with multi-field filters").option("--type <type>", "Primitive type").option("--status <status>", "Status value").option("--owner <owner>", "Owner/actor value").option("--tag <tag>", "Tag filter").option("--text <text>", "Full-text contains filter").option("--path-includes <text>", "Path substring filter").option("--updated-after <iso>", "Updated at or after").option("--updated-before <iso>", "Updated at or before").option("--created-after <iso>", "Created at or after").option("--created-before <iso>", "Created at or before").option("--limit <n>", "Result limit").option("--offset <n>", "Result offset").option("--json", "Emit structured JSON output")
).action(
(opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
const results = query_exports.queryPrimitives(workspacePath, {
type: opts.type,
status: opts.status,
owner: opts.owner,
tag: opts.tag,
text: opts.text,
pathIncludes: opts.pathIncludes,
updatedAfter: opts.updatedAfter,
updatedBefore: opts.updatedBefore,
createdAfter: opts.createdAfter,
createdBefore: opts.createdBefore,
limit: opts.limit ? Number.parseInt(String(opts.limit), 10) : void 0,
offset: opts.offset ? Number.parseInt(String(opts.offset), 10) : void 0
});
return { results, count: results.length };
},
(result) => result.results.map((item) => `${item.type} ${item.path}`)
)
);
addWorkspaceOption(
program.command("search <text>").description("Keyword search across markdown body/frontmatter with optional QMD-compatible mode").option("--type <type>", "Limit to primitive type").option("--mode <mode>", "auto | core | qmd", "auto").option("--limit <n>", "Result limit").option("--json", "Emit structured JSON output")
).action(
(text, opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
const result = search_qmd_adapter_exports.search(workspacePath, text, {
mode: opts.mode,
type: opts.type,
limit: opts.limit ? Number.parseInt(String(opts.limit), 10) : void 0
});
return {
...result,
count: result.results.length
};
},
(result) => [
`Mode: ${result.mode}`,
...result.fallbackReason ? [`Note: ${result.fallbackReason}`] : [],
...result.results.map((item) => `${item.type} ${item.path}`)
]
)
);
var boardCmd = program.command("board").description("Generate and sync Obsidian Kanban board views");
addWorkspaceOption(
boardCmd.command("generate").description("Generate Obsidian Kanban board markdown from thread states").option("-o, --output <path>", "Output board path", "ops/Workgraph Board.md").option("--include-cancelled", "Include cancelled lane").option("--json", "Emit structured JSON output")
).action(
(opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return board_exports.generateKanbanBoard(workspacePath, {
outputPath: opts.output,
includeCancelled: !!opts.includeCancelled
});
},
(result) => [
`Generated board: ${result.outputPath}`,
`Backlog=${result.counts.backlog} InProgress=${result.counts.inProgress} Blocked=${result.counts.blocked} Done=${result.counts.done}`
]
)
);
addWorkspaceOption(
boardCmd.command("sync").description("Sync existing board markdown from current thread states").option("-o, --output <path>", "Output board path", "ops/Workgraph Board.md").option("--include-cancelled", "Include cancelled lane").option("--json", "Emit structured JSON output")
).action(
(opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return board_exports.syncKanbanBoard(workspacePath, {
outputPath: opts.output,
includeCancelled: !!opts.includeCancelled
});
},
(result) => [
`Synced board: ${result.outputPath}`,
`Backlog=${result.counts.backlog} InProgress=${result.counts.inProgress} Blocked=${result.counts.blocked} Done=${result.counts.done}`
]
)
);
var graphCmd = program.command("graph").description("Wiki-link graph indexing and hygiene");
addWorkspaceOption(
graphCmd.command("index").description("Build wiki-link graph index").option("--json", "Emit structured JSON output")
).action(
(opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return graph_exports.refreshWikiLinkGraphIndex(workspacePath);
},
(result) => [
`Nodes: ${result.nodes.length}`,
`Edges: ${result.edges.length}`,
`Broken links: ${result.brokenLinks.length}`
]
)
);
addWorkspaceOption(
graphCmd.command("hygiene").description("Generate graph hygiene report (orphans, broken links, hubs)").option("--json", "Emit structured JSON output")
).action(
(opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return graph_exports.graphHygieneReport(workspacePath);
},
(result) => [
`Nodes=${result.nodeCount} Edges=${result.edgeCount}`,
`Orphans=${result.orphanCount} BrokenLinks=${result.brokenLinkCount}`,
`Top hub: ${result.hubs[0]?.node ?? "none"}`
]
)
);
addWorkspaceOption(
graphCmd.command("neighbors <nodePath>").description("Query incoming/outgoing wiki-link neighbors for one node").option("--refresh", "Refresh graph index before querying").option("--json", "Emit structured JSON output")
).action(
(nodePath, opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return graph_exports.graphNeighborhood(workspacePath, nodePath, {
refresh: !!opts.refresh
});
},
(result) => [
`Node: ${result.node} (${result.exists ? "exists" : "missing"})`,
`Outgoing: ${result.outgoing.length}`,
`Incoming: ${result.incoming.length}`
]
)
);
var policyCmd = program.command("policy").description("Manage policy parties and capabilities");
var policyPartyCmd = policyCmd.command("party").description("Manage registered policy parties");
addWorkspaceOption(
policyPartyCmd.command("upsert <id>").description("Create or update a policy party").option("--roles <roles>", "Comma-separated roles").option("--capabilities <caps>", "Comma-separated capabilities").option("--json", "Emit structured JSON output")
).action(
(id, opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return {
party: policy_exports.upsertParty(workspacePath, id, {
roles: csv(opts.roles),
capabilities: csv(opts.capabilities)
})
};
},
(result) => [`Upserted policy party: ${result.party.id}`]
)
);
addWorkspaceOption(
policyPartyCmd.command("get <id>").description("Get one policy party").option("--json", "Emit structured JSON output")
).action(
(id, opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
const party = policy_exports.getParty(workspacePath, id);
if (!party) throw new Error(`Policy party not found: ${id}`);
return { party };
},
(result) => [`${result.party.id} roles=${result.party.roles.join(",")}`]
)
);
addWorkspaceOption(
policyPartyCmd.command("list").description("List policy parties").option("--json", "Emit structured JSON output")
).action(
(opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
const registry = policy_exports.loadPolicyRegistry(workspacePath);
return {
parties: Object.values(registry.parties)
};
},
(result) => result.parties.map((party) => `${party.id} [${party.roles.join(", ")}]`)
)
);
var dispatchCmd = program.command("dispatch").description("Programmatic runtime dispatch contract");
addWorkspaceOption(
dispatchCmd.command("create <objective>").description("Create a new run dispatch request").option("-a, --actor <name>", "Actor", DEFAULT_ACTOR).option("--adapter <name>", "Adapter name", "cursor-cloud").option("--idempotency-key <key>", "Idempotency key").option("--json", "Emit structured JSON output")
).action(
(objective, opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return {
run: dispatch_exports.createRun(workspacePath, {
actor: opts.actor,
adapter: opts.adapter,
objective,
idempotencyKey: opts.idempotencyKey
})
};
},
(result) => [`Run created: ${result.run.id} [${result.run.status}]`]
)
);
addWorkspaceOption(
dispatchCmd.command("list").description("List runs").option("--status <status>", "queued|running|succeeded|failed|cancelled").option("--limit <n>", "Result limit").option("--json", "Emit structured JSON output")
).action(
(opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return {
runs: dispatch_exports.listRuns(workspacePath, {
status: opts.status,
limit: opts.limit ? Number.parseInt(String(opts.limit), 10) : void 0
})
};
},
(result) => result.runs.map((run) => `${run.id} [${run.status}] ${run.objective}`)
)
);
addWorkspaceOption(
dispatchCmd.command("status <runId>").description("Get run status by ID").option("--json", "Emit structured JSON output")
).action(
(runId, opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return {
run: dispatch_exports.status(workspacePath, runId)
};
},
(result) => [`${result.run.id} [${result.run.status}]`]
)
);
addWorkspaceOption(
dispatchCmd.command("followup <runId> <input>").description("Send follow-up input to a run").option("-a, --actor <name>", "Actor", DEFAULT_ACTOR).option("--json", "Emit structured JSON output")
).action(
(runId, input, opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return {
run: dispatch_exports.followup(workspacePath, runId, opts.actor, input)
};
},
(result) => [`Follow-up recorded: ${result.run.id} [${result.run.status}]`]
)
);
addWorkspaceOption(
dispatchCmd.command("stop <runId>").description("Cancel a run").option("-a, --actor <name>", "Actor", DEFAULT_ACTOR).option("--json", "Emit structured JSON output")
).action(
(runId, opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return {
run: dispatch_exports.stop(workspacePath, runId, opts.actor)
};
},
(result) => [`Stopped run: ${result.run.id} [${result.run.status}]`]
)
);
addWorkspaceOption(
dispatchCmd.command("mark <runId>").description("Set run status transition explicitly").requiredOption("--status <status>", "running|succeeded|failed|cancelled").option("-a, --actor <name>", "Actor", DEFAULT_ACTOR).option("--output <text>", "Optional output payload").option("--error <text>", "Optional error payload").option("--json", "Emit structured JSON output")
).action(
(runId, opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
const status = normalizeRunStatus(opts.status);
return {
run: dispatch_exports.markRun(workspacePath, runId, opts.actor, status, {
output: opts.output,
error: opts.error
})
};
},
(result) => [`Marked run: ${result.run.id} [${result.run.status}]`]
)
);
addWorkspaceOption(
dispatchCmd.command("logs <runId>").description("Read logs from a run").option("--json", "Emit structured JSON output")
).action(
(runId, opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return {
runId,
logs: dispatch_exports.logs(workspacePath, runId)
};
},
(result) => result.logs.map((entry) => `${entry.ts} [${entry.level}] ${entry.message}`)
)
);
var triggerCmd = program.command("trigger").description("Trigger primitives and run dispatch lifecycle");
addWorkspaceOption(
triggerCmd.command("fire <triggerPath>").description("Fire an approved/active trigger and dispatch a run").option("-a, --actor <name>", "Actor", DEFAULT_ACTOR).option("--event-key <key>", "Deterministic event key for idempotency").option("--objective <text>", "Override run objective").option("--json", "Emit structured JSON output")
).action(
(triggerPath, opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return trigger_exports.fireTrigger(workspacePath, triggerPath, {
actor: opts.actor,
eventKey: opts.eventKey,
objective: opts.objective
});
},
(result) => [
`Fired trigger: ${result.triggerPath}`,
`Run: ${result.run.id} [${result.run.status}]`
]
)
);
addWorkspaceOption(
program.command("onboard").description("Guided agent-first workspace setup and starter artifacts").option("-a, --actor <name>", "Actor", DEFAULT_ACTOR).option("--spaces <list>", "Comma-separated space names").option("--no-demo-threads", "Skip starter onboarding threads").option("--json", "Emit structured JSON output")
).action(
(opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return onboard_exports.onboardWorkspace(workspacePath, {
actor: opts.actor,
spaces: csv(opts.spaces),
createDemoThreads: opts.demoThreads
});
},
(result) => [
`Onboarded actor: ${result.actor}`,
`Spaces created: ${result.spacesCreated.length}`,
`Threads created: ${result.threadsCreated.length}`,
`Board: ${result.boardPath}`,
`Command center: ${result.commandCenterPath}`,
`Onboarding primitive: ${result.onboardingPath}`
]
)
);
var onboardingCmd = program.command("onboarding").description("Manage onboarding primitive lifecycle");
addWorkspaceOption(
onboardingCmd.command("show <onboardingPath>").description("Show one onboarding primitive").option("--json", "Emit structured JSON output")
).action(
(onboardingPath, opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
const onboarding = store_exports.read(workspacePath, onboardingPath);
if (!onboarding) throw new Error(`Onboarding primitive not found: ${onboardingPath}`);
if (onboarding.type !== "onboarding") throw new Error(`Target is not onboarding primitive: ${onboardingPath}`);
return { onboarding };
},
(result) => [
`Onboarding: ${result.onboarding.path}`,
`Status: ${String(result.onboarding.fields.status)}`,
`Actor: ${String(result.onboarding.fields.actor)}`
]
)
);
addWorkspaceOption(
onboardingCmd.command("update <onboardingPath>").description("Update onboarding lifecycle status").requiredOption("--status <status>", "active|paused|completed").option("-a, --actor <name>", "Actor", DEFAULT_ACTOR).option("--json", "Emit structured JSON output")
).action(
(onboardingPath, opts) => runCommand(
opts,
() => {
const workspacePath = resolveWorkspacePath(opts);
return {
onboarding: onboard_exports.updateOnboardingStatus(
workspacePath,
onboardingPath,
normalizeOnboardingStatus(opts.status),
opts.actor
)
};
},
(result) => [`Updated onboarding: ${result.onboarding.path} [${String(result.onboarding.fields.status)}]`]
)
);
program.parse();

@@ -627,5 +1133,16 @@ function addWorkspaceOption(command) {

if (process.env.WORKGRAPH_PATH) return path.resolve(process.env.WORKGRAPH_PATH);
if (process.env.CLAWVAULT_PATH) return path.resolve(process.env.CLAWVAULT_PATH);
return process.cwd();
}
function parseSetPairs(pairs) {
const fields = {};
for (const pair of pairs) {
const eqIdx = String(pair).indexOf("=");
if (eqIdx === -1) continue;
const key = String(pair).slice(0, eqIdx).trim();
const raw = String(pair).slice(eqIdx + 1).trim();
if (!key) continue;
fields[key] = parseScalar(raw);
}
return fields;
}
function csv(value) {

@@ -635,2 +1152,36 @@ if (!value) return void 0;

}
function parseScalar(value) {
if (value === "true") return true;
if (value === "false") return false;
if (value === "null") return null;
if (value === "") return "";
if (/^-?\d+(\.\d+)?$/.test(value)) return Number(value);
if (value.startsWith("[") && value.endsWith("]")) {
const inner = value.slice(1, -1).trim();
if (!inner) return [];
return inner.split(",").map((item) => parseScalar(item.trim()));
}
if (value.includes(",")) {
return value.split(",").map((item) => parseScalar(item.trim()));
}
try {
return JSON.parse(value);
} catch {
return value;
}
}
function normalizeRunStatus(status) {
const normalized = String(status).toLowerCase();
if (normalized === "running" || normalized === "succeeded" || normalized === "failed" || normalized === "cancelled") {
return normalized;
}
throw new Error(`Invalid run status "${status}". Expected running|succeeded|failed|cancelled.`);
}
function normalizeOnboardingStatus(status) {
const normalized = String(status).toLowerCase();
if (normalized === "active" || normalized === "paused" || normalized === "completed") {
return normalized;
}
throw new Error(`Invalid onboarding status "${status}". Expected active|paused|completed.`);
}
function wantsJson(opts) {

@@ -637,0 +1188,0 @@ if (opts.json) return true;

@@ -9,2 +9,23 @@ /**

description?: string;
/** Allowed values when type is scalar/string-like. */
enum?: Array<string | number | boolean>;
/**
* Optional semantic template used for additional validation.
* - slug: lowercase kebab-case token
* - semver: semantic version (x.y.z)
* - email: simple email shape
* - url: absolute http(s) URL
* - iso-date: ISO-8601 date/time string
*/
template?: 'slug' | 'semver' | 'email' | 'url' | 'iso-date';
/**
* Optional regex pattern constraint for string/ref/date fields.
* Uses JavaScript regular expression syntax (without delimiters).
*/
pattern?: string;
/**
* For ref fields, constrain references to one or more primitive types.
* Example: refTypes: ['thread', 'space']
*/
refTypes?: string[];
}

@@ -70,2 +91,79 @@ interface PrimitiveTypeDefinition {

}
interface PrimitiveQueryFilters {
type?: string;
status?: string;
owner?: string;
tag?: string;
text?: string;
pathIncludes?: string;
updatedAfter?: string;
updatedBefore?: string;
createdAfter?: string;
createdBefore?: string;
limit?: number;
offset?: number;
}
interface WorkgraphStatusSnapshot {
generatedAt: string;
threads: {
total: number;
open: number;
active: number;
blocked: number;
done: number;
cancelled: number;
ready: number;
};
claims: {
active: number;
};
primitives: {
total: number;
byType: Record<string, number>;
};
}
interface WorkgraphBrief {
generatedAt: string;
actor: string;
myClaims: PrimitiveInstance[];
myOpenThreads: PrimitiveInstance[];
blockedThreads: PrimitiveInstance[];
nextReadyThreads: PrimitiveInstance[];
recentActivity: LedgerEntry[];
}
interface PolicyParty {
id: string;
roles: string[];
capabilities: string[];
createdAt: string;
updatedAt: string;
}
interface PolicyRegistry {
version: number;
parties: Record<string, PolicyParty>;
}
type RunStatus = 'queued' | 'running' | 'succeeded' | 'failed' | 'cancelled';
interface DispatchRun {
id: string;
createdAt: string;
updatedAt: string;
actor: string;
adapter: string;
objective: string;
status: RunStatus;
idempotencyKey?: string;
context?: Record<string, unknown>;
output?: string;
error?: string;
followups: Array<{
ts: string;
actor: string;
input: string;
}>;
logs: Array<{
ts: string;
level: 'info' | 'warn' | 'error';
message: string;
}>;
}

@@ -98,3 +196,3 @@ /**

*
* Format: one JSON object per line (.jsonl) in `.clawvault/ledger.jsonl`.
* Format: one JSON object per line (.jsonl) in `.workgraph/ledger.jsonl`.
*/

@@ -124,3 +222,3 @@

}
declare function query(workspacePath: string, options?: LedgerQueryOptions): LedgerEntry[];
declare function query$1(workspacePath: string, options?: LedgerQueryOptions): LedgerEntry[];
interface LedgerBlameActorSummary {

@@ -182,3 +280,2 @@ actor: string;

declare const ledger_loadIndex: typeof loadIndex;
declare const ledger_query: typeof query;
declare const ledger_readAll: typeof readAll;

@@ -191,3 +288,3 @@ declare const ledger_readSince: typeof readSince;

declare namespace ledger {
export { type ledger_LedgerBlameActorSummary as LedgerBlameActorSummary, type ledger_LedgerBlameResult as LedgerBlameResult, type ledger_LedgerQueryOptions as LedgerQueryOptions, type ledger_LedgerVerifyOptions as LedgerVerifyOptions, type ledger_LedgerVerifyResult as LedgerVerifyResult, ledger_activityOf as activityOf, ledger_allClaims as allClaims, ledger_append as append, ledger_blame as blame, ledger_claimsFromIndex as claimsFromIndex, ledger_currentOwner as currentOwner, ledger_historyOf as historyOf, ledger_isClaimed as isClaimed, ledger_ledgerChainStatePath as ledgerChainStatePath, ledger_ledgerIndexPath as ledgerIndexPath, ledger_ledgerPath as ledgerPath, ledger_loadChainState as loadChainState, ledger_loadIndex as loadIndex, ledger_query as query, ledger_readAll as readAll, ledger_readSince as readSince, ledger_rebuildHashChainState as rebuildHashChainState, ledger_rebuildIndex as rebuildIndex, ledger_recent as recent, ledger_verifyHashChain as verifyHashChain };
export { type ledger_LedgerBlameActorSummary as LedgerBlameActorSummary, type ledger_LedgerBlameResult as LedgerBlameResult, type ledger_LedgerQueryOptions as LedgerQueryOptions, type ledger_LedgerVerifyOptions as LedgerVerifyOptions, type ledger_LedgerVerifyResult as LedgerVerifyResult, ledger_activityOf as activityOf, ledger_allClaims as allClaims, ledger_append as append, ledger_blame as blame, ledger_claimsFromIndex as claimsFromIndex, ledger_currentOwner as currentOwner, ledger_historyOf as historyOf, ledger_isClaimed as isClaimed, ledger_ledgerChainStatePath as ledgerChainStatePath, ledger_ledgerIndexPath as ledgerIndexPath, ledger_ledgerPath as ledgerPath, ledger_loadChainState as loadChainState, ledger_loadIndex as loadIndex, query$1 as query, ledger_readAll as readAll, ledger_readSince as readSince, ledger_rebuildHashChainState as rebuildHashChainState, ledger_rebuildIndex as rebuildIndex, ledger_recent as recent, ledger_verifyHashChain as verifyHashChain };
}

@@ -199,3 +296,5 @@

declare function create(workspacePath: string, typeName: string, fields: Record<string, unknown>, body: string, actor: string): PrimitiveInstance;
declare function create(workspacePath: string, typeName: string, fields: Record<string, unknown>, body: string, actor: string, options?: {
pathOverride?: string;
}): PrimitiveInstance;
declare function read(workspacePath: string, relPath: string): PrimitiveInstance | null;

@@ -396,2 +495,4 @@ declare function list(workspacePath: string, typeName: string): PrimitiveInstance[];

tags?: string[];
dependsOn?: string[];
expectedUpdatedAt?: string;
}

@@ -411,4 +512,14 @@ interface ProposeSkillOptions {

status?: string;
updatedSince?: string;
}): PrimitiveInstance[];
declare function proposeSkill(workspacePath: string, skillRef: string, actor: string, options?: ProposeSkillOptions): PrimitiveInstance;
declare function skillHistory(workspacePath: string, skillRef: string, options?: {
limit?: number;
}): LedgerEntry[];
declare function skillDiff(workspacePath: string, skillRef: string): {
path: string;
latestEntryTs: string | null;
previousEntryTs: string | null;
changedFields: string[];
};
declare function promoteSkill(workspacePath: string, skillRef: string, actor: string, options?: PromoteSkillOptions): PrimitiveInstance;

@@ -423,7 +534,291 @@

declare const skill_proposeSkill: typeof proposeSkill;
declare const skill_skillDiff: typeof skillDiff;
declare const skill_skillHistory: typeof skillHistory;
declare const skill_writeSkill: typeof writeSkill;
declare namespace skill {
export { type skill_PromoteSkillOptions as PromoteSkillOptions, type skill_ProposeSkillOptions as ProposeSkillOptions, type skill_WriteSkillOptions as WriteSkillOptions, skill_listSkills as listSkills, skill_loadSkill as loadSkill, skill_promoteSkill as promoteSkill, skill_proposeSkill as proposeSkill, skill_writeSkill as writeSkill };
export { type skill_PromoteSkillOptions as PromoteSkillOptions, type skill_ProposeSkillOptions as ProposeSkillOptions, type skill_WriteSkillOptions as WriteSkillOptions, skill_listSkills as listSkills, skill_loadSkill as loadSkill, skill_promoteSkill as promoteSkill, skill_proposeSkill as proposeSkill, skill_skillDiff as skillDiff, skill_skillHistory as skillHistory, skill_writeSkill as writeSkill };
}
export { type FieldDefinition, type LedgerChainState, type LedgerEntry, type LedgerIndex, type LedgerOp, type PrimitiveInstance, type PrimitiveTypeDefinition, type Registry, THREAD_STATUS_TRANSITIONS, type ThreadStatus, type WorkgraphWorkspaceConfig, bases, commandCenter, ledger, registry, skill, store, thread, workspace };
/**
* Cross-primitive query and keyword search helpers.
*/
declare function queryPrimitives(workspacePath: string, filters?: PrimitiveQueryFilters): PrimitiveInstance[];
declare function keywordSearch(workspacePath: string, text: string, filters?: Omit<PrimitiveQueryFilters, 'text'>): PrimitiveInstance[];
declare const query_keywordSearch: typeof keywordSearch;
declare const query_queryPrimitives: typeof queryPrimitives;
declare namespace query {
export { query_keywordSearch as keywordSearch, query_queryPrimitives as queryPrimitives };
}
/**
* Orientation helpers: status, brief, checkpoint/intake.
*/
declare function statusSnapshot(workspacePath: string): WorkgraphStatusSnapshot;
declare function brief(workspacePath: string, actor: string, options?: {
recentCount?: number;
nextCount?: number;
}): WorkgraphBrief;
declare function checkpoint(workspacePath: string, actor: string, summary: string, options?: {
next?: string[];
blocked?: string[];
tags?: string[];
}): PrimitiveInstance;
declare function intake(workspacePath: string, actor: string, observation: string, options?: {
tags?: string[];
}): PrimitiveInstance;
declare const orientation_brief: typeof brief;
declare const orientation_checkpoint: typeof checkpoint;
declare const orientation_intake: typeof intake;
declare const orientation_statusSnapshot: typeof statusSnapshot;
declare namespace orientation {
export { orientation_brief as brief, orientation_checkpoint as checkpoint, orientation_intake as intake, orientation_statusSnapshot as statusSnapshot };
}
/**
* Wiki-link graph indexing and hygiene reports.
*/
interface WikiGraphEdge {
from: string;
to: string;
}
interface WikiGraphIndex {
generatedAt: string;
nodes: string[];
edges: WikiGraphEdge[];
backlinks: Record<string, string[]>;
orphans: string[];
brokenLinks: Array<{
from: string;
to: string;
}>;
hubs: Array<{
node: string;
degree: number;
}>;
}
interface WikiGraphNeighborhood {
node: string;
exists: boolean;
outgoing: string[];
incoming: string[];
}
declare function graphIndexPath(workspacePath: string): string;
declare function buildWikiLinkGraph(workspacePath: string): WikiGraphIndex;
declare function refreshWikiLinkGraphIndex(workspacePath: string): WikiGraphIndex;
declare function readWikiLinkGraphIndex(workspacePath: string): WikiGraphIndex | null;
declare function graphHygieneReport(workspacePath: string): {
generatedAt: string;
nodeCount: number;
edgeCount: number;
orphanCount: number;
brokenLinkCount: number;
hubs: Array<{
node: string;
degree: number;
}>;
orphans: string[];
brokenLinks: Array<{
from: string;
to: string;
}>;
};
declare function graphNeighborhood(workspacePath: string, nodeRef: string, options?: {
refresh?: boolean;
}): WikiGraphNeighborhood;
type graph_WikiGraphEdge = WikiGraphEdge;
type graph_WikiGraphIndex = WikiGraphIndex;
type graph_WikiGraphNeighborhood = WikiGraphNeighborhood;
declare const graph_buildWikiLinkGraph: typeof buildWikiLinkGraph;
declare const graph_graphHygieneReport: typeof graphHygieneReport;
declare const graph_graphIndexPath: typeof graphIndexPath;
declare const graph_graphNeighborhood: typeof graphNeighborhood;
declare const graph_readWikiLinkGraphIndex: typeof readWikiLinkGraphIndex;
declare const graph_refreshWikiLinkGraphIndex: typeof refreshWikiLinkGraphIndex;
declare namespace graph {
export { type graph_WikiGraphEdge as WikiGraphEdge, type graph_WikiGraphIndex as WikiGraphIndex, type graph_WikiGraphNeighborhood as WikiGraphNeighborhood, graph_buildWikiLinkGraph as buildWikiLinkGraph, graph_graphHygieneReport as graphHygieneReport, graph_graphIndexPath as graphIndexPath, graph_graphNeighborhood as graphNeighborhood, graph_readWikiLinkGraphIndex as readWikiLinkGraphIndex, graph_refreshWikiLinkGraphIndex as refreshWikiLinkGraphIndex };
}
/**
* Obsidian Kanban board generation and sync helpers.
*/
interface BoardOptions {
outputPath?: string;
includeCancelled?: boolean;
}
interface BoardResult {
outputPath: string;
generatedAt: string;
counts: {
backlog: number;
inProgress: number;
blocked: number;
done: number;
cancelled: number;
};
content: string;
}
declare function generateKanbanBoard(workspacePath: string, options?: BoardOptions): BoardResult;
declare function syncKanbanBoard(workspacePath: string, options?: BoardOptions): BoardResult;
type board_BoardOptions = BoardOptions;
type board_BoardResult = BoardResult;
declare const board_generateKanbanBoard: typeof generateKanbanBoard;
declare const board_syncKanbanBoard: typeof syncKanbanBoard;
declare namespace board {
export { type board_BoardOptions as BoardOptions, type board_BoardResult as BoardResult, board_generateKanbanBoard as generateKanbanBoard, board_syncKanbanBoard as syncKanbanBoard };
}
/**
* Policy registry and status transition gates.
*/
interface PolicyDecision {
allowed: boolean;
reason?: string;
}
declare function policyPath(workspacePath: string): string;
declare function loadPolicyRegistry(workspacePath: string): PolicyRegistry;
declare function savePolicyRegistry(workspacePath: string, registry: PolicyRegistry): void;
declare function upsertParty(workspacePath: string, partyId: string, updates: {
roles?: string[];
capabilities?: string[];
}): PolicyParty;
declare function getParty(workspacePath: string, partyId: string): PolicyParty | null;
declare function canTransitionStatus(workspacePath: string, actor: string, primitiveType: string, fromStatus: string | undefined, toStatus: string | undefined): PolicyDecision;
type policy_PolicyDecision = PolicyDecision;
declare const policy_canTransitionStatus: typeof canTransitionStatus;
declare const policy_getParty: typeof getParty;
declare const policy_loadPolicyRegistry: typeof loadPolicyRegistry;
declare const policy_policyPath: typeof policyPath;
declare const policy_savePolicyRegistry: typeof savePolicyRegistry;
declare const policy_upsertParty: typeof upsertParty;
declare namespace policy {
export { type policy_PolicyDecision as PolicyDecision, policy_canTransitionStatus as canTransitionStatus, policy_getParty as getParty, policy_loadPolicyRegistry as loadPolicyRegistry, policy_policyPath as policyPath, policy_savePolicyRegistry as savePolicyRegistry, policy_upsertParty as upsertParty };
}
/**
* Runtime dispatch contract (MVP local adapter).
*/
interface DispatchCreateInput {
actor: string;
adapter?: string;
objective: string;
context?: Record<string, unknown>;
idempotencyKey?: string;
}
declare function createRun(workspacePath: string, input: DispatchCreateInput): DispatchRun;
declare function status(workspacePath: string, runId: string): DispatchRun;
declare function followup(workspacePath: string, runId: string, actor: string, input: string): DispatchRun;
declare function stop(workspacePath: string, runId: string, actor: string): DispatchRun;
declare function markRun(workspacePath: string, runId: string, actor: string, nextStatus: Exclude<RunStatus, 'queued'>, options?: {
output?: string;
error?: string;
}): DispatchRun;
declare function logs(workspacePath: string, runId: string): DispatchRun['logs'];
declare function listRuns(workspacePath: string, options?: {
status?: RunStatus;
limit?: number;
}): DispatchRun[];
type dispatch_DispatchCreateInput = DispatchCreateInput;
declare const dispatch_createRun: typeof createRun;
declare const dispatch_followup: typeof followup;
declare const dispatch_listRuns: typeof listRuns;
declare const dispatch_logs: typeof logs;
declare const dispatch_markRun: typeof markRun;
declare const dispatch_status: typeof status;
declare const dispatch_stop: typeof stop;
declare namespace dispatch {
export { type dispatch_DispatchCreateInput as DispatchCreateInput, dispatch_createRun as createRun, dispatch_followup as followup, dispatch_listRuns as listRuns, dispatch_logs as logs, dispatch_markRun as markRun, dispatch_status as status, dispatch_stop as stop };
}
/**
* Agent-first onboarding flow for new workgraph workspaces.
*/
interface OnboardOptions {
actor: string;
spaces?: string[];
createDemoThreads?: boolean;
}
interface OnboardResult {
actor: string;
spacesCreated: string[];
threadsCreated: string[];
boardPath: string;
commandCenterPath: string;
checkpointPath: string;
onboardingPath: string;
}
type OnboardingStatus = 'active' | 'completed' | 'paused';
declare function onboardWorkspace(workspacePath: string, options: OnboardOptions): OnboardResult;
declare function updateOnboardingStatus(workspacePath: string, onboardingPath: string, status: OnboardingStatus, actor: string): PrimitiveInstance;
type onboard_OnboardOptions = OnboardOptions;
type onboard_OnboardResult = OnboardResult;
type onboard_OnboardingStatus = OnboardingStatus;
declare const onboard_onboardWorkspace: typeof onboardWorkspace;
declare const onboard_updateOnboardingStatus: typeof updateOnboardingStatus;
declare namespace onboard {
export { type onboard_OnboardOptions as OnboardOptions, type onboard_OnboardResult as OnboardResult, type onboard_OnboardingStatus as OnboardingStatus, onboard_onboardWorkspace as onboardWorkspace, onboard_updateOnboardingStatus as updateOnboardingStatus };
}
/**
* QMD-compatible search adapter.
*
* This package intentionally degrades gracefully to core keyword search when
* a QMD backend is not configured.
*/
interface QmdSearchOptions {
mode?: 'auto' | 'core' | 'qmd';
type?: string;
limit?: number;
}
interface QmdSearchResult {
mode: 'core' | 'qmd';
query: string;
results: PrimitiveInstance[];
fallbackReason?: string;
}
declare function search(workspacePath: string, text: string, options?: QmdSearchOptions): QmdSearchResult;
type searchQmdAdapter_QmdSearchOptions = QmdSearchOptions;
type searchQmdAdapter_QmdSearchResult = QmdSearchResult;
declare const searchQmdAdapter_search: typeof search;
declare namespace searchQmdAdapter {
export { type searchQmdAdapter_QmdSearchOptions as QmdSearchOptions, type searchQmdAdapter_QmdSearchResult as QmdSearchResult, searchQmdAdapter_search as search };
}
/**
* Trigger-to-run dispatch helpers.
*/
interface FireTriggerOptions {
actor: string;
eventKey?: string;
objective?: string;
context?: Record<string, unknown>;
}
interface FireTriggerResult {
triggerPath: string;
run: DispatchRun;
idempotencyKey: string;
}
declare function fireTrigger(workspacePath: string, triggerPath: string, options: FireTriggerOptions): FireTriggerResult;
type trigger_FireTriggerOptions = FireTriggerOptions;
type trigger_FireTriggerResult = FireTriggerResult;
declare const trigger_fireTrigger: typeof fireTrigger;
declare namespace trigger {
export { type trigger_FireTriggerOptions as FireTriggerOptions, type trigger_FireTriggerResult as FireTriggerResult, trigger_fireTrigger as fireTrigger };
}
export { type DispatchRun, type FieldDefinition, type LedgerChainState, type LedgerEntry, type LedgerIndex, type LedgerOp, type PolicyParty, type PolicyRegistry, type PrimitiveInstance, type PrimitiveQueryFilters, type PrimitiveTypeDefinition, type Registry, type RunStatus, THREAD_STATUS_TRANSITIONS, type ThreadStatus, type WorkgraphBrief, type WorkgraphStatusSnapshot, type WorkgraphWorkspaceConfig, bases, board, commandCenter, dispatch, graph, ledger, onboard, orientation, policy, query, registry, searchQmdAdapter, skill, store, thread, trigger, workspace };
import {
THREAD_STATUS_TRANSITIONS,
bases_exports,
board_exports,
command_center_exports,
dispatch_exports,
graph_exports,
ledger_exports,
onboard_exports,
orientation_exports,
policy_exports,
query_exports,
registry_exports,
search_qmd_adapter_exports,
skill_exports,
store_exports,
thread_exports,
trigger_exports,
workspace_exports
} from "./chunk-CRQXDCPR.js";
} from "./chunk-XUMA4O2Z.js";
export {
THREAD_STATUS_TRANSITIONS,
bases_exports as bases,
board_exports as board,
command_center_exports as commandCenter,
dispatch_exports as dispatch,
graph_exports as graph,
ledger_exports as ledger,
onboard_exports as onboard,
orientation_exports as orientation,
policy_exports as policy,
query_exports as query,
registry_exports as registry,
search_qmd_adapter_exports as searchQmdAdapter,
skill_exports as skill,
store_exports as store,
thread_exports as thread,
trigger_exports as trigger,
workspace_exports as workspace
};
+15
-6
{
"name": "@versatly/workgraph",
"version": "0.1.0",
"version": "0.2.0",
"description": "Agent-first workgraph workspace for multi-agent coordination with dynamic primitives, append-only ledger, and markdown-native storage.",
"workspaces": [
"packages/*"
],
"type": "module",

@@ -31,5 +34,10 @@ "main": "dist/index.js",

"build": "tsup src/index.ts src/cli.ts --format esm --dts --clean",
"build:packages": "npm run build --workspaces --if-present",
"typecheck": "tsc --noEmit",
"typecheck:packages": "npm run typecheck --workspaces --if-present",
"test": "vitest run --config vitest.config.ts",
"ci": "npm run typecheck && npm run test && npm run build",
"test:packages": "npm run test --workspaces --if-present",
"demo:workspace": "npm run build --silent && node scripts/generate-demo-workspace.mjs /tmp/workgraph-obsidian-demo",
"demo:obsidian-setup": "npm run build --silent && node scripts/setup-obsidian-demo.mjs /tmp/workgraph-obsidian-demo",
"ci": "npm run typecheck && npm run typecheck:packages && npm run test && npm run build",
"prepublishOnly": "npm run ci"

@@ -49,8 +57,7 @@ },

"type": "git",
"url": "git+https://github.com/Versatly/clawvault.git",
"directory": "packages/workgraph"
"url": "git+https://github.com/Versatly/workgraph.git"
},
"homepage": "https://github.com/Versatly/clawvault/tree/main/packages/workgraph",
"homepage": "https://github.com/Versatly/workgraph",
"bugs": {
"url": "https://github.com/Versatly/clawvault/issues"
"url": "https://github.com/Versatly/workgraph/issues"
},

@@ -67,2 +74,4 @@ "engines": {

"@types/node": "^20.11.0",
"ajv": "^8.18.0",
"ajv-formats": "^3.0.1",
"tsup": "^8.5.1",

@@ -69,0 +78,0 @@ "typescript": "^5.3.3",

+66
-11

@@ -1,11 +0,11 @@

# @clawvault/workgraph
# @versatly/workgraph
Agent-first workgraph workspace for multi-agent collaboration.
`@clawvault/workgraph` is the coordination core extracted from ClawVault. It focuses only on:
`@versatly/workgraph` is the standalone coordination core for multi-agent execution. It focuses only on:
- Dynamic primitive registry (`thread`, `space`, `decision`, `lesson`, `fact`, `agent`, plus custom types)
- Append-only event ledger (`.clawvault/ledger.jsonl`)
- Ledger claim index (`.clawvault/ledger-index.json`) for fast ownership queries
- Tamper-evident ledger hash-chain (`.clawvault/ledger-chain.json`)
- Append-only event ledger (`.workgraph/ledger.jsonl`)
- Ledger claim index (`.workgraph/ledger-index.json`) for fast ownership queries
- Tamper-evident ledger hash-chain (`.workgraph/ledger-chain.json`)
- Markdown-native primitive store

@@ -17,2 +17,10 @@ - Thread lifecycle coordination (claim/release/block/unblock/done/decompose)

- Primitive-registry manifest + auto-generated `.base` files
- Orientation loop commands (`workgraph status/brief/checkpoint/intake`)
- Multi-filter primitive query (`workgraph query ...`)
- Core + QMD-compatible keyword search (`workgraph search ...`)
- Obsidian Kanban board generation/sync (`workgraph board generate|sync`)
- Wiki-link graph indexing and hygiene reports (`workgraph graph index|hygiene`)
- Policy party registry and sensitive transition gates
- Programmatic dispatch contract (`workgraph dispatch ...`) with explicit status transitions
- Trigger dispatch bridge (`workgraph trigger fire ...`) with idempotency keying
- JSON-friendly CLI for agent orchestration

@@ -25,3 +33,3 @@

```bash
npm install @clawvault/workgraph
npm install @versatly/workgraph
```

@@ -32,3 +40,3 @@

```bash
npm install -g @clawvault/workgraph
npm install -g @versatly/workgraph
```

@@ -57,2 +65,14 @@

workgraph thread next --claim --actor agent-worker --json
workgraph status --json
workgraph brief --actor agent-worker --json
workgraph query --type thread --status open --limit 10 --json
workgraph search "auth" --mode auto --json
workgraph checkpoint "Completed API layer" --next "implement tests" --actor agent-worker --json
workgraph board generate --output "ops/Workgraph Board.md" --json
workgraph graph hygiene --json
workgraph graph neighbors context-nodes/context-node-1 --json
workgraph dispatch create "Review blockers" --actor agent-lead --json
workgraph dispatch mark run_123 --status succeeded --output "Review complete" --actor agent-lead --json
workgraph trigger fire triggers/escalate-blocked.md --event-key "thread-blocked-001" --actor agent-lead --json
workgraph onboarding update onboarding/onboarding-for-agent-architect.md --status paused --actor agent-lead --json
workgraph ledger show --count 20 --json

@@ -72,2 +92,34 @@ workgraph command-center --output "ops/Command Center.md" --json

### Monorepo layout (MVP)
The repository is now organized as a workspaces monorepo while preserving the
published `@versatly/workgraph` package compatibility surface.
Key workspace packages:
- `packages/kernel`
- `packages/cli`
- `packages/sdk`
- `packages/control-api`
- `packages/runtime-adapter-core`
- `packages/adapter-cursor-cloud`
- `packages/policy`
- `packages/testkit`
- `packages/search-qmd-adapter`
- `packages/obsidian-integration`
- `packages/skills`
Migration notes: see `docs/MIGRATION.md`.
### Demo vault generator
Generate the large Obsidian demo workspace used for stress-testing:
```bash
npm run demo:workspace
npm run demo:obsidian-setup
```
Runbook: `docs/OBSIDIAN_DEMO.md`.
### Space-scoped scheduling

@@ -89,3 +141,3 @@

```bash
# Sync .clawvault/primitive-registry.yaml
# Sync .workgraph/primitive-registry.yaml
workgraph bases sync-registry --json

@@ -123,7 +175,10 @@

workgraph skill load workgraph-manual --json
workgraph skill list --updated-since 2026-02-27T00:00:00.000Z --json
workgraph skill history workgraph-manual --limit 10 --json
workgraph skill diff workgraph-manual --json
```
## ClawVault memory vs Workgraph primitives (split clarification)
## Legacy memory stacks vs Workgraph primitives
`@clawvault/workgraph` is **execution coordination only**.
`@versatly/workgraph` is **execution coordination only**.

@@ -151,3 +206,3 @@ - Use it for: ownership, decomposition, dependency management, typed coordination primitives.

```ts
import { registry, thread, store, ledger, workspace } from '@clawvault/workgraph';
import { registry, thread, store, ledger, workspace } from '@versatly/workgraph';

@@ -154,0 +209,0 @@ workspace.initWorkspace('/tmp/wg');

+10
-10
---
name: workgraph
version: "0.1.0"
description: Agent-first multi-agent coordination skill for markdown-native workgraph workspaces. Use when coordinating threads, ownership, dependencies, and custom primitive schemas across multiple agents. Do not use for general long-term memory capture; this package intentionally excludes ClawVault memory scaffolding.
description: Agent-first multi-agent coordination skill for markdown-native workgraph workspaces. Use when coordinating threads, ownership, dependencies, and custom primitive schemas across multiple agents. Do not use for general long-term memory capture; this package intentionally excludes legacy memory scaffolding.
author: Versatly
source: https://github.com/Versatly/clawvault/tree/main/packages/workgraph
source: https://github.com/Versatly/workgraph
user-invocable: true

@@ -26,8 +26,8 @@ ---

- `.workgraph.json` — workspace identity and mode.
- `.clawvault/registry.json` — primitive type definitions.
- `.clawvault/ledger.jsonl` — append-only event stream.
- `.clawvault/ledger-index.json` — derived claim snapshot for fast ownership checks.
- `.clawvault/ledger-chain.json` — tamper-evident hash-chain state.
- `.clawvault/primitive-registry.yaml` — canonical primitive registry manifest.
- `.clawvault/bases/*.base` — generated Obsidian Bases files.
- `.workgraph/registry.json` — primitive type definitions.
- `.workgraph/ledger.jsonl` — append-only event stream.
- `.workgraph/ledger-index.json` — derived claim snapshot for fast ownership checks.
- `.workgraph/ledger-chain.json` — tamper-evident hash-chain state.
- `.workgraph/primitive-registry.yaml` — canonical primitive registry manifest.
- `.workgraph/bases/*.base` — generated Obsidian Bases files.
- Primitive directories (e.g. `threads/`, `spaces/`, `agents/`, custom directories).

@@ -302,5 +302,5 @@

- `clawvault` package: memory + retrieval + broader vault lifecycle.
- `@clawvault/workgraph` package: coordination substrate only.
- legacy memory packages: memory + retrieval + broader vault lifecycle.
- `@versatly/workgraph`: coordination substrate only.
Treat this package as the authoritative runtime for multi-agent primitives and claims, not as a memory taxonomy tool.
var __defProp = Object.defineProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
// src/types.ts
var THREAD_STATUS_TRANSITIONS = {
open: ["active", "cancelled"],
active: ["blocked", "done", "cancelled", "open"],
blocked: ["active", "cancelled"],
done: [],
cancelled: ["open"]
};
// src/registry.ts
var registry_exports = {};
__export(registry_exports, {
defineType: () => defineType,
extendType: () => extendType,
getType: () => getType,
listTypes: () => listTypes,
loadRegistry: () => loadRegistry,
registryPath: () => registryPath,
saveRegistry: () => saveRegistry
});
import fs2 from "fs";
import path2 from "path";
// src/ledger.ts
var ledger_exports = {};
__export(ledger_exports, {
activityOf: () => activityOf,
allClaims: () => allClaims,
append: () => append,
blame: () => blame,
claimsFromIndex: () => claimsFromIndex,
currentOwner: () => currentOwner,
historyOf: () => historyOf,
isClaimed: () => isClaimed,
ledgerChainStatePath: () => ledgerChainStatePath,
ledgerIndexPath: () => ledgerIndexPath,
ledgerPath: () => ledgerPath,
loadChainState: () => loadChainState,
loadIndex: () => loadIndex,
query: () => query,
readAll: () => readAll,
readSince: () => readSince,
rebuildHashChainState: () => rebuildHashChainState,
rebuildIndex: () => rebuildIndex,
recent: () => recent,
verifyHashChain: () => verifyHashChain
});
import fs from "fs";
import path from "path";
import crypto from "crypto";
var LEDGER_FILE = ".clawvault/ledger.jsonl";
var LEDGER_INDEX_FILE = ".clawvault/ledger-index.json";
var LEDGER_CHAIN_FILE = ".clawvault/ledger-chain.json";
var LEDGER_INDEX_VERSION = 1;
var LEDGER_CHAIN_VERSION = 1;
var LEDGER_GENESIS_HASH = "GENESIS";
function ledgerPath(workspacePath) {
return path.join(workspacePath, LEDGER_FILE);
}
function ledgerIndexPath(workspacePath) {
return path.join(workspacePath, LEDGER_INDEX_FILE);
}
function ledgerChainStatePath(workspacePath) {
return path.join(workspacePath, LEDGER_CHAIN_FILE);
}
function append(workspacePath, actor, op, target, type, data) {
const chainState = ensureChainState(workspacePath);
const baseEntry = {
ts: (/* @__PURE__ */ new Date()).toISOString(),
actor,
op,
target,
...type ? { type } : {},
...data && Object.keys(data).length > 0 ? { data } : {},
prevHash: chainState.lastHash
};
const entry = {
...baseEntry,
hash: computeEntryHash(baseEntry)
};
const lPath = ledgerPath(workspacePath);
const dir = path.dirname(lPath);
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
fs.appendFileSync(lPath, JSON.stringify(entry) + "\n", "utf-8");
updateIndexWithEntry(workspacePath, entry);
updateChainStateWithEntry(workspacePath, entry);
return entry;
}
function readAll(workspacePath) {
const lPath = ledgerPath(workspacePath);
if (!fs.existsSync(lPath)) return [];
const lines = fs.readFileSync(lPath, "utf-8").split("\n").filter(Boolean);
return lines.map((line) => JSON.parse(line));
}
function readSince(workspacePath, since) {
return readAll(workspacePath).filter((e) => e.ts >= since);
}
function loadIndex(workspacePath) {
const idxPath = ledgerIndexPath(workspacePath);
if (!fs.existsSync(idxPath)) return null;
try {
const raw = fs.readFileSync(idxPath, "utf-8");
return JSON.parse(raw);
} catch {
return null;
}
}
function loadChainState(workspacePath) {
const chainPath = ledgerChainStatePath(workspacePath);
if (!fs.existsSync(chainPath)) return null;
try {
const raw = fs.readFileSync(chainPath, "utf-8");
return JSON.parse(raw);
} catch {
return null;
}
}
function rebuildIndex(workspacePath) {
const index = seedIndex();
const entries = readAll(workspacePath);
for (const entry of entries) {
applyClaimMutation(index, entry);
index.lastEntryTs = entry.ts;
}
saveIndex(workspacePath, index);
return index;
}
function rebuildHashChainState(workspacePath) {
const entries = readAll(workspacePath);
let rollingHash = LEDGER_GENESIS_HASH;
for (const entry of entries) {
const normalized = normalizeEntryForHash(entry, rollingHash);
rollingHash = computeEntryHash(normalized);
}
const chainState = {
version: LEDGER_CHAIN_VERSION,
algorithm: "sha256",
lastHash: rollingHash,
count: entries.length,
updatedAt: (/* @__PURE__ */ new Date()).toISOString()
};
saveChainState(workspacePath, chainState);
return chainState;
}
function claimsFromIndex(workspacePath) {
try {
const index = loadIndex(workspacePath);
if (index?.version === LEDGER_INDEX_VERSION) {
return new Map(Object.entries(index.claims));
}
const rebuilt = rebuildIndex(workspacePath);
return new Map(Object.entries(rebuilt.claims));
} catch {
return claimsFromLedger(workspacePath);
}
}
function query(workspacePath, options = {}) {
let entries = readAll(workspacePath);
if (options.actor) entries = entries.filter((entry) => entry.actor === options.actor);
if (options.op) entries = entries.filter((entry) => entry.op === options.op);
if (options.target) entries = entries.filter((entry) => entry.target === options.target);
if (options.targetIncludes) entries = entries.filter((entry) => entry.target.includes(options.targetIncludes));
if (options.type) entries = entries.filter((entry) => entry.type === options.type);
if (options.since) entries = entries.filter((entry) => entry.ts >= options.since);
if (options.until) entries = entries.filter((entry) => entry.ts <= options.until);
if (options.offset && options.offset > 0) entries = entries.slice(options.offset);
if (options.limit && options.limit >= 0) entries = entries.slice(0, options.limit);
return entries;
}
function blame(workspacePath, target) {
const history = historyOf(workspacePath, target);
const byActor = /* @__PURE__ */ new Map();
for (const entry of history) {
const existing = byActor.get(entry.actor) ?? {
actor: entry.actor,
count: 0,
ops: {},
lastTs: entry.ts
};
existing.count += 1;
existing.ops[entry.op] = (existing.ops[entry.op] ?? 0) + 1;
if (entry.ts > existing.lastTs) existing.lastTs = entry.ts;
byActor.set(entry.actor, existing);
}
return {
target,
totalEntries: history.length,
actors: [...byActor.values()].sort((a, b) => b.count - a.count || a.actor.localeCompare(b.actor)),
latest: history.length > 0 ? history[history.length - 1] : null
};
}
function verifyHashChain(workspacePath, options = {}) {
const entries = readAll(workspacePath);
const warnings = [];
const issues = [];
let rollingHash = LEDGER_GENESIS_HASH;
for (let idx = 0; idx < entries.length; idx++) {
const entry = entries[idx];
const entryNumber = idx + 1;
if (entry.prevHash === void 0) {
const message = `Entry #${entryNumber} missing prevHash`;
if (options.strict) issues.push(message);
else warnings.push(message);
} else if (entry.prevHash !== rollingHash) {
issues.push(`Entry #${entryNumber} prevHash mismatch`);
}
const normalized = normalizeEntryForHash(entry, rollingHash);
const expectedHash = computeEntryHash(normalized);
if (entry.hash === void 0) {
const message = `Entry #${entryNumber} missing hash`;
if (options.strict) issues.push(message);
else warnings.push(message);
rollingHash = expectedHash;
continue;
}
if (entry.hash !== expectedHash) {
issues.push(`Entry #${entryNumber} hash mismatch`);
}
rollingHash = entry.hash;
}
const chainState = loadChainState(workspacePath);
if (chainState) {
if (chainState.count !== entries.length) {
issues.push(`Chain state count mismatch: state=${chainState.count} actual=${entries.length}`);
}
if (chainState.lastHash !== rollingHash) {
issues.push("Chain state lastHash mismatch");
}
} else if (entries.length > 0) {
warnings.push("Ledger chain state file missing");
}
return {
ok: issues.length === 0 && (!options.strict || warnings.length === 0),
entries: entries.length,
lastHash: rollingHash,
issues,
warnings,
chainState
};
}
function currentOwner(workspacePath, target) {
return allClaims(workspacePath).get(target) ?? null;
}
function isClaimed(workspacePath, target) {
return currentOwner(workspacePath, target) !== null;
}
function historyOf(workspacePath, target) {
return readAll(workspacePath).filter((e) => e.target === target);
}
function activityOf(workspacePath, actor) {
return readAll(workspacePath).filter((e) => e.actor === actor);
}
function allClaims(workspacePath) {
return claimsFromIndex(workspacePath);
}
function recent(workspacePath, count = 20) {
const all = readAll(workspacePath);
return all.slice(-count);
}
function updateIndexWithEntry(workspacePath, entry) {
const index = loadIndex(workspacePath) ?? seedIndex();
applyClaimMutation(index, entry);
index.lastEntryTs = entry.ts;
saveIndex(workspacePath, index);
}
function updateChainStateWithEntry(workspacePath, entry) {
const state = ensureChainState(workspacePath);
const chainState = {
version: LEDGER_CHAIN_VERSION,
algorithm: "sha256",
lastHash: entry.hash ?? state.lastHash,
count: state.count + 1,
updatedAt: (/* @__PURE__ */ new Date()).toISOString()
};
saveChainState(workspacePath, chainState);
}
function saveIndex(workspacePath, index) {
const idxPath = ledgerIndexPath(workspacePath);
const dir = path.dirname(idxPath);
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
fs.writeFileSync(idxPath, JSON.stringify(index, null, 2) + "\n", "utf-8");
}
function saveChainState(workspacePath, state) {
const chainPath = ledgerChainStatePath(workspacePath);
const dir = path.dirname(chainPath);
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
fs.writeFileSync(chainPath, JSON.stringify(state, null, 2) + "\n", "utf-8");
}
function seedIndex() {
return {
version: LEDGER_INDEX_VERSION,
lastEntryTs: "",
claims: {}
};
}
function applyClaimMutation(index, entry) {
if (entry.op === "claim") {
index.claims[entry.target] = entry.actor;
return;
}
if (entry.op === "release" || entry.op === "done" || entry.op === "cancel") {
delete index.claims[entry.target];
}
}
function claimsFromLedger(workspacePath) {
const claims = /* @__PURE__ */ new Map();
const entries = readAll(workspacePath);
for (const entry of entries) {
if (entry.op === "claim") claims.set(entry.target, entry.actor);
if (entry.op === "release" || entry.op === "done" || entry.op === "cancel") {
claims.delete(entry.target);
}
}
return claims;
}
function ensureChainState(workspacePath) {
const existing = loadChainState(workspacePath);
if (existing?.version === LEDGER_CHAIN_VERSION) return existing;
return rebuildHashChainState(workspacePath);
}
function normalizeEntryForHash(entry, fallbackPrevHash) {
return {
ts: entry.ts,
actor: entry.actor,
op: entry.op,
target: entry.target,
...entry.type ? { type: entry.type } : {},
...entry.data ? { data: entry.data } : {},
prevHash: entry.prevHash ?? fallbackPrevHash
};
}
function computeEntryHash(entry) {
const payload = stableStringify({
ts: entry.ts,
actor: entry.actor,
op: entry.op,
target: entry.target,
...entry.type ? { type: entry.type } : {},
...entry.data ? { data: entry.data } : {},
prevHash: entry.prevHash ?? LEDGER_GENESIS_HASH
});
return crypto.createHash("sha256").update(payload).digest("hex");
}
function stableStringify(value) {
if (value === null || typeof value !== "object") {
return JSON.stringify(value);
}
if (Array.isArray(value)) {
return `[${value.map((item) => stableStringify(item)).join(",")}]`;
}
const obj = value;
const keys = Object.keys(obj).sort();
return `{${keys.map((key) => `${JSON.stringify(key)}:${stableStringify(obj[key])}`).join(",")}}`;
}
// src/registry.ts
var REGISTRY_FILE = ".clawvault/registry.json";
var CURRENT_VERSION = 1;
var BUILT_IN_TYPES = [
{
name: "thread",
description: "A unit of coordinated work. The core workgraph node.",
directory: "threads",
builtIn: true,
createdAt: "2026-01-01T00:00:00.000Z",
createdBy: "system",
fields: {
title: { type: "string", required: true, description: "What this thread is about" },
goal: { type: "string", required: true, description: "What success looks like" },
status: { type: "string", required: true, default: "open", description: "open | active | blocked | done | cancelled" },
owner: { type: "string", description: "Agent that claimed this thread" },
priority: { type: "string", default: "medium", description: "urgent | high | medium | low" },
deps: { type: "list", default: [], description: "Thread refs this depends on" },
parent: { type: "ref", description: "Parent thread if decomposed from larger thread" },
space: { type: "ref", description: "Space ref this thread belongs to" },
context_refs: { type: "list", default: [], description: "Docs that inform this work" },
tags: { type: "list", default: [], description: "Freeform tags" },
created: { type: "date", required: true },
updated: { type: "date", required: true }
}
},
{
name: "space",
description: "A workspace boundary that groups related threads and sets context.",
directory: "spaces",
builtIn: true,
createdAt: "2026-01-01T00:00:00.000Z",
createdBy: "system",
fields: {
title: { type: "string", required: true, description: "Space name" },
description: { type: "string", description: "What this space is for" },
members: { type: "list", default: [], description: "Agent names that participate" },
thread_refs: { type: "list", default: [], description: "Thread refs in this space" },
tags: { type: "list", default: [], description: "Freeform tags" },
created: { type: "date", required: true },
updated: { type: "date", required: true }
}
},
{
name: "decision",
description: "A recorded decision with reasoning and context.",
directory: "decisions",
builtIn: true,
createdAt: "2026-01-01T00:00:00.000Z",
createdBy: "system",
fields: {
title: { type: "string", required: true },
date: { type: "date", required: true },
status: { type: "string", default: "active", description: "active | superseded | reverted" },
context_refs: { type: "list", default: [], description: "What informed this decision" },
tags: { type: "list", default: [] }
}
},
{
name: "lesson",
description: "A captured insight or pattern learned from experience.",
directory: "lessons",
builtIn: true,
createdAt: "2026-01-01T00:00:00.000Z",
createdBy: "system",
fields: {
title: { type: "string", required: true },
date: { type: "date", required: true },
confidence: { type: "string", default: "medium", description: "high | medium | low" },
context_refs: { type: "list", default: [] },
tags: { type: "list", default: [] }
}
},
{
name: "fact",
description: "A structured piece of knowledge with optional temporal validity.",
directory: "facts",
builtIn: true,
createdAt: "2026-01-01T00:00:00.000Z",
createdBy: "system",
fields: {
subject: { type: "string", required: true },
predicate: { type: "string", required: true },
object: { type: "string", required: true },
confidence: { type: "number", default: 1 },
valid_from: { type: "date" },
valid_until: { type: "date" },
source: { type: "ref", description: "Where this fact came from" }
}
},
{
name: "agent",
description: "A registered participant in the workgraph.",
directory: "agents",
builtIn: true,
createdAt: "2026-01-01T00:00:00.000Z",
createdBy: "system",
fields: {
name: { type: "string", required: true },
role: { type: "string", description: "What this agent specializes in" },
capabilities: { type: "list", default: [], description: "What this agent can do" },
active_threads: { type: "list", default: [], description: "Threads currently claimed" },
last_seen: { type: "date" }
}
},
{
name: "skill",
description: "A reusable agent skill shared through the workgraph workspace.",
directory: "skills",
builtIn: true,
createdAt: "2026-01-01T00:00:00.000Z",
createdBy: "system",
fields: {
title: { type: "string", required: true, description: "Skill title" },
status: { type: "string", required: true, default: "draft", description: "draft | proposed | active | deprecated | archived" },
version: { type: "string", default: "0.1.0", description: "Semantic version of this skill" },
owner: { type: "string", description: "Primary skill owner/maintainer" },
reviewers: { type: "list", default: [], description: "Reviewers involved in proposal" },
proposal_thread: { type: "ref", description: "Thread coordinating review/promotion" },
proposed_at: { type: "date" },
promoted_at: { type: "date" },
distribution: { type: "string", default: "tailscale-shared-vault", description: "Distribution channel for skill usage" },
tailscale_path: { type: "string", description: "Shared vault path over Tailscale" },
tags: { type: "list", default: [] },
created: { type: "date", required: true },
updated: { type: "date", required: true }
}
}
];
function registryPath(workspacePath) {
return path2.join(workspacePath, REGISTRY_FILE);
}
function loadRegistry(workspacePath) {
const rPath = registryPath(workspacePath);
if (fs2.existsSync(rPath)) {
const raw = fs2.readFileSync(rPath, "utf-8");
const registry = JSON.parse(raw);
return ensureBuiltIns(registry);
}
return seedRegistry();
}
function saveRegistry(workspacePath, registry) {
const rPath = registryPath(workspacePath);
const dir = path2.dirname(rPath);
if (!fs2.existsSync(dir)) fs2.mkdirSync(dir, { recursive: true });
fs2.writeFileSync(rPath, JSON.stringify(registry, null, 2) + "\n", "utf-8");
}
function defineType(workspacePath, name, description, fields, actor, directory) {
const registry = loadRegistry(workspacePath);
const safeName = name.toLowerCase().replace(/[^a-z0-9_-]/g, "-");
if (registry.types[safeName]?.builtIn) {
throw new Error(`Cannot redefine built-in type "${safeName}". You can extend it with new fields instead.`);
}
const now = (/* @__PURE__ */ new Date()).toISOString();
const typeDef = {
name: safeName,
description,
fields: {
title: { type: "string", required: true },
created: { type: "date", required: true },
updated: { type: "date", required: true },
tags: { type: "list", default: [] },
...fields
},
directory: directory ?? `${safeName}s`,
builtIn: false,
createdAt: now,
createdBy: actor
};
registry.types[safeName] = typeDef;
saveRegistry(workspacePath, registry);
append(workspacePath, actor, "define", ".clawvault/registry.json", safeName, {
name: safeName,
directory: typeDef.directory,
fields: Object.keys(typeDef.fields)
});
return typeDef;
}
function getType(workspacePath, name) {
const registry = loadRegistry(workspacePath);
return registry.types[name];
}
function listTypes(workspacePath) {
const registry = loadRegistry(workspacePath);
return Object.values(registry.types);
}
function extendType(workspacePath, name, newFields, _actor) {
const registry = loadRegistry(workspacePath);
const existing = registry.types[name];
if (!existing) throw new Error(`Type "${name}" not found in registry.`);
existing.fields = { ...existing.fields, ...newFields };
saveRegistry(workspacePath, registry);
return existing;
}
function seedRegistry() {
const types = {};
for (const t of BUILT_IN_TYPES) {
types[t.name] = t;
}
return { version: CURRENT_VERSION, types };
}
function ensureBuiltIns(registry) {
for (const t of BUILT_IN_TYPES) {
if (!registry.types[t.name]) {
registry.types[t.name] = t;
}
}
return registry;
}
// src/store.ts
var store_exports = {};
__export(store_exports, {
activeThreads: () => activeThreads,
blockedThreads: () => blockedThreads,
create: () => create,
findByField: () => findByField,
list: () => list,
openThreads: () => openThreads,
read: () => read,
remove: () => remove,
threadsInSpace: () => threadsInSpace,
update: () => update
});
import fs3 from "fs";
import path3 from "path";
import matter from "gray-matter";
function create(workspacePath, typeName, fields, body, actor) {
const typeDef = getType(workspacePath, typeName);
if (!typeDef) {
throw new Error(`Unknown primitive type "${typeName}". Run \`workgraph primitive list\` to see available types, or \`workgraph primitive define\` to create one.`);
}
const now = (/* @__PURE__ */ new Date()).toISOString();
const merged = applyDefaults(typeDef, {
...fields,
created: fields.created ?? now,
updated: now
});
validateFields(typeDef, merged, "create");
const slug = slugify(String(merged.title ?? merged.name ?? typeName));
const relDir = typeDef.directory;
const relPath = `${relDir}/${slug}.md`;
const absDir = path3.join(workspacePath, relDir);
const absPath = path3.join(workspacePath, relPath);
if (!fs3.existsSync(absDir)) fs3.mkdirSync(absDir, { recursive: true });
if (fs3.existsSync(absPath)) {
throw new Error(`File already exists: ${relPath}. Use update instead.`);
}
const content = matter.stringify(body, stripUndefined(merged));
fs3.writeFileSync(absPath, content, "utf-8");
append(workspacePath, actor, "create", relPath, typeName, {
title: merged.title ?? slug
});
return { path: relPath, type: typeName, fields: merged, body };
}
function read(workspacePath, relPath) {
const absPath = path3.join(workspacePath, relPath);
if (!fs3.existsSync(absPath)) return null;
const raw = fs3.readFileSync(absPath, "utf-8");
const { data, content } = matter(raw);
const typeName = inferType(workspacePath, relPath);
return { path: relPath, type: typeName, fields: data, body: content.trim() };
}
function list(workspacePath, typeName) {
const typeDef = getType(workspacePath, typeName);
if (!typeDef) return [];
const dir = path3.join(workspacePath, typeDef.directory);
if (!fs3.existsSync(dir)) return [];
const files = fs3.readdirSync(dir).filter((f) => f.endsWith(".md"));
const instances = [];
for (const file of files) {
const relPath = `${typeDef.directory}/${file}`;
const inst = read(workspacePath, relPath);
if (inst) instances.push(inst);
}
return instances;
}
function update(workspacePath, relPath, fieldUpdates, bodyUpdate, actor) {
const existing = read(workspacePath, relPath);
if (!existing) throw new Error(`Not found: ${relPath}`);
const now = (/* @__PURE__ */ new Date()).toISOString();
const newFields = { ...existing.fields, ...fieldUpdates, updated: now };
const typeDef = getType(workspacePath, existing.type);
if (!typeDef) throw new Error(`Unknown primitive type "${existing.type}" for ${relPath}`);
validateFields(typeDef, newFields, "update");
const newBody = bodyUpdate ?? existing.body;
const absPath = path3.join(workspacePath, relPath);
const content = matter.stringify(newBody, stripUndefined(newFields));
fs3.writeFileSync(absPath, content, "utf-8");
append(workspacePath, actor, "update", relPath, existing.type, {
changed: Object.keys(fieldUpdates)
});
return { path: relPath, type: existing.type, fields: newFields, body: newBody };
}
function remove(workspacePath, relPath, actor) {
const absPath = path3.join(workspacePath, relPath);
if (!fs3.existsSync(absPath)) throw new Error(`Not found: ${relPath}`);
const archiveDir = path3.join(workspacePath, ".clawvault", "archive");
if (!fs3.existsSync(archiveDir)) fs3.mkdirSync(archiveDir, { recursive: true });
const archivePath = path3.join(archiveDir, path3.basename(relPath));
fs3.renameSync(absPath, archivePath);
const typeName = inferType(workspacePath, relPath);
append(workspacePath, actor, "delete", relPath, typeName);
}
function findByField(workspacePath, typeName, field, value) {
return list(workspacePath, typeName).filter((inst) => inst.fields[field] === value);
}
function openThreads(workspacePath) {
return findByField(workspacePath, "thread", "status", "open");
}
function activeThreads(workspacePath) {
return findByField(workspacePath, "thread", "status", "active");
}
function blockedThreads(workspacePath) {
return findByField(workspacePath, "thread", "status", "blocked");
}
function threadsInSpace(workspacePath, spaceRef) {
const normalizedTarget = normalizeRefPath(spaceRef);
return list(workspacePath, "thread").filter((thread) => {
const rawSpace = thread.fields.space;
if (!rawSpace) return false;
return normalizeRefPath(rawSpace) === normalizedTarget;
});
}
function slugify(text) {
return text.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, "").slice(0, 80);
}
function applyDefaults(typeDef, fields) {
const result = { ...fields };
for (const [key, def] of Object.entries(typeDef.fields)) {
if (result[key] === void 0 && def.default !== void 0) {
result[key] = def.default;
}
}
return result;
}
function stripUndefined(obj) {
const result = {};
for (const [k, v] of Object.entries(obj)) {
if (v !== void 0) result[k] = v;
}
return result;
}
function inferType(workspacePath, relPath) {
const registry = loadRegistry(workspacePath);
const dir = relPath.split("/")[0];
for (const typeDef of Object.values(registry.types)) {
if (typeDef.directory === dir) return typeDef.name;
}
return "unknown";
}
function normalizeRefPath(value) {
const raw = String(value ?? "").trim();
if (!raw) return "";
const unwrapped = raw.startsWith("[[") && raw.endsWith("]]") ? raw.slice(2, -2) : raw;
return unwrapped.endsWith(".md") ? unwrapped : `${unwrapped}.md`;
}
function validateFields(typeDef, fields, mode) {
const issues = [];
for (const [fieldName, definition] of Object.entries(typeDef.fields)) {
const value = fields[fieldName];
if (definition.required && isMissingRequiredValue(value)) {
issues.push(`Missing required field "${fieldName}"`);
continue;
}
if (value === void 0 || value === null) continue;
if (!isFieldTypeCompatible(definition.type, value)) {
issues.push(`Field "${fieldName}" expected ${definition.type}, got ${describeValue(value)}`);
}
}
if (issues.length > 0) {
throw new Error(`Invalid ${typeDef.name} ${mode} payload: ${issues.join("; ")}`);
}
}
function isMissingRequiredValue(value) {
if (value === void 0 || value === null) return true;
if (typeof value === "string" && value.trim().length === 0) return true;
return false;
}
function isFieldTypeCompatible(type, value) {
switch (type) {
case "string":
case "ref":
case "date":
return typeof value === "string";
case "number":
return typeof value === "number" && Number.isFinite(value);
case "boolean":
return typeof value === "boolean";
case "list":
return Array.isArray(value);
case "any":
return true;
default:
return true;
}
}
function describeValue(value) {
if (Array.isArray(value)) return "array";
if (value === null) return "null";
return typeof value;
}
// src/thread.ts
var thread_exports = {};
__export(thread_exports, {
block: () => block,
cancel: () => cancel,
claim: () => claim,
claimNextReady: () => claimNextReady,
claimNextReadyInSpace: () => claimNextReadyInSpace,
createThread: () => createThread,
decompose: () => decompose,
done: () => done,
isReadyForClaim: () => isReadyForClaim,
listReadyThreads: () => listReadyThreads,
listReadyThreadsInSpace: () => listReadyThreadsInSpace,
pickNextReadyThread: () => pickNextReadyThread,
pickNextReadyThreadInSpace: () => pickNextReadyThreadInSpace,
release: () => release,
unblock: () => unblock
});
function createThread(workspacePath, title, goal, actor, opts = {}) {
const normalizedSpace = opts.space ? normalizeWorkspaceRef(opts.space) : void 0;
const contextRefs = opts.context_refs ?? [];
const mergedContextRefs = normalizedSpace && !contextRefs.includes(normalizedSpace) ? [...contextRefs, normalizedSpace] : contextRefs;
return create(workspacePath, "thread", {
title,
goal,
status: "open",
priority: opts.priority ?? "medium",
deps: opts.deps ?? [],
parent: opts.parent,
space: normalizedSpace,
context_refs: mergedContextRefs,
tags: opts.tags ?? []
}, `## Goal
${goal}
`, actor);
}
function isReadyForClaim(workspacePath, threadPathOrInstance) {
const instance = typeof threadPathOrInstance === "string" ? read(workspacePath, threadPathOrInstance) : threadPathOrInstance;
if (!instance) return false;
if (instance.type !== "thread") return false;
if (instance.fields.status !== "open") return false;
const hasUnfinishedChildren = list(workspacePath, "thread").some(
(candidate) => candidate.fields.parent === instance.path && !["done", "cancelled"].includes(String(candidate.fields.status))
);
if (hasUnfinishedChildren) return false;
const deps = Array.isArray(instance.fields.deps) ? instance.fields.deps : [];
if (deps.length === 0) return true;
for (const dep of deps) {
const depRef = normalizeThreadRef(dep);
if (depRef.startsWith("external/")) return false;
const depThread = read(workspacePath, depRef);
if (!depThread || depThread.fields.status !== "done") {
return false;
}
}
return true;
}
function listReadyThreads(workspacePath) {
const open = openThreads(workspacePath);
return open.filter((t) => isReadyForClaim(workspacePath, t)).sort(compareThreadPriority);
}
function listReadyThreadsInSpace(workspacePath, spaceRef) {
const normalizedSpace = normalizeWorkspaceRef(spaceRef);
return listReadyThreads(workspacePath).filter(
(thread) => normalizeWorkspaceRef(thread.fields.space) === normalizedSpace
);
}
function pickNextReadyThread(workspacePath) {
const ready = listReadyThreads(workspacePath);
return ready[0] ?? null;
}
function pickNextReadyThreadInSpace(workspacePath, spaceRef) {
const ready = listReadyThreadsInSpace(workspacePath, spaceRef);
return ready[0] ?? null;
}
function claimNextReady(workspacePath, actor) {
const next = pickNextReadyThread(workspacePath);
if (!next) return null;
return claim(workspacePath, next.path, actor);
}
function claimNextReadyInSpace(workspacePath, actor, spaceRef) {
const next = pickNextReadyThreadInSpace(workspacePath, spaceRef);
if (!next) return null;
return claim(workspacePath, next.path, actor);
}
function claim(workspacePath, threadPath, actor) {
const thread = read(workspacePath, threadPath);
if (!thread) throw new Error(`Thread not found: ${threadPath}`);
const status = thread.fields.status;
if (status !== "open") {
throw new Error(`Cannot claim thread in "${status}" state. Only "open" threads can be claimed.`);
}
const owner = currentOwner(workspacePath, threadPath);
if (owner) {
throw new Error(`Thread already claimed by "${owner}". Wait for release or use a different thread.`);
}
append(workspacePath, actor, "claim", threadPath, "thread");
return update(workspacePath, threadPath, {
status: "active",
owner: actor
}, void 0, actor);
}
function release(workspacePath, threadPath, actor, reason) {
const thread = read(workspacePath, threadPath);
if (!thread) throw new Error(`Thread not found: ${threadPath}`);
assertOwner(workspacePath, threadPath, actor);
append(
workspacePath,
actor,
"release",
threadPath,
"thread",
reason ? { reason } : void 0
);
return update(workspacePath, threadPath, {
status: "open",
owner: null
}, void 0, actor);
}
function block(workspacePath, threadPath, actor, blockedBy, reason) {
const thread = read(workspacePath, threadPath);
if (!thread) throw new Error(`Thread not found: ${threadPath}`);
assertTransition(thread.fields.status, "blocked");
append(workspacePath, actor, "block", threadPath, "thread", {
blocked_by: blockedBy,
...reason ? { reason } : {}
});
const currentDeps = thread.fields.deps ?? [];
const updatedDeps = currentDeps.includes(blockedBy) ? currentDeps : [...currentDeps, blockedBy];
return update(workspacePath, threadPath, {
status: "blocked",
deps: updatedDeps
}, void 0, actor);
}
function unblock(workspacePath, threadPath, actor) {
const thread = read(workspacePath, threadPath);
if (!thread) throw new Error(`Thread not found: ${threadPath}`);
assertTransition(thread.fields.status, "active");
append(workspacePath, actor, "unblock", threadPath, "thread");
return update(workspacePath, threadPath, {
status: "active"
}, void 0, actor);
}
function done(workspacePath, threadPath, actor, output) {
const thread = read(workspacePath, threadPath);
if (!thread) throw new Error(`Thread not found: ${threadPath}`);
assertTransition(thread.fields.status, "done");
assertOwner(workspacePath, threadPath, actor);
append(
workspacePath,
actor,
"done",
threadPath,
"thread",
output ? { output } : void 0
);
const newBody = output ? `${thread.body}
## Output
${output}
` : thread.body;
return update(workspacePath, threadPath, {
status: "done"
}, newBody, actor);
}
function cancel(workspacePath, threadPath, actor, reason) {
const thread = read(workspacePath, threadPath);
if (!thread) throw new Error(`Thread not found: ${threadPath}`);
assertTransition(thread.fields.status, "cancelled");
append(
workspacePath,
actor,
"cancel",
threadPath,
"thread",
reason ? { reason } : void 0
);
return update(workspacePath, threadPath, {
status: "cancelled",
owner: null
}, void 0, actor);
}
function decompose(workspacePath, parentPath, subthreads, actor) {
const parent = read(workspacePath, parentPath);
if (!parent) throw new Error(`Thread not found: ${parentPath}`);
const created = [];
for (const sub of subthreads) {
const inst = createThread(workspacePath, sub.title, sub.goal, actor, {
parent: parentPath,
deps: sub.deps,
space: typeof parent.fields.space === "string" ? parent.fields.space : void 0
});
created.push(inst);
}
const childRefs = created.map((c) => `[[${c.path}]]`);
const decomposeNote = `
## Sub-threads
${childRefs.map((r) => `- ${r}`).join("\n")}
`;
update(workspacePath, parentPath, {}, parent.body + decomposeNote, actor);
append(workspacePath, actor, "decompose", parentPath, "thread", {
children: created.map((c) => c.path)
});
return created;
}
function assertTransition(from, to) {
const allowed = THREAD_STATUS_TRANSITIONS[from];
if (!allowed?.includes(to)) {
throw new Error(`Invalid transition: "${from}" \u2192 "${to}". Allowed: ${allowed?.join(", ") ?? "none"}`);
}
}
function assertOwner(workspacePath, threadPath, actor) {
const owner = currentOwner(workspacePath, threadPath);
if (owner && owner !== actor) {
throw new Error(`Thread is owned by "${owner}", not "${actor}". Only the owner can perform this action.`);
}
}
function compareThreadPriority(a, b) {
const rank = (value) => {
const normalized = String(value ?? "medium").toLowerCase();
switch (normalized) {
case "urgent":
return 0;
case "high":
return 1;
case "medium":
return 2;
case "low":
return 3;
default:
return 4;
}
};
const byPriority = rank(a.fields.priority) - rank(b.fields.priority);
if (byPriority !== 0) return byPriority;
const createdA = Date.parse(String(a.fields.created ?? ""));
const createdB = Date.parse(String(b.fields.created ?? ""));
const safeA = Number.isNaN(createdA) ? Number.MAX_SAFE_INTEGER : createdA;
const safeB = Number.isNaN(createdB) ? Number.MAX_SAFE_INTEGER : createdB;
return safeA - safeB;
}
function normalizeThreadRef(value) {
const raw = String(value ?? "").trim();
if (!raw) return raw;
const unwrapped = raw.startsWith("[[") && raw.endsWith("]]") ? raw.slice(2, -2) : raw;
if (unwrapped.startsWith("external/")) return unwrapped;
if (unwrapped.endsWith(".md")) return unwrapped;
return `${unwrapped}.md`;
}
function normalizeWorkspaceRef(value) {
const raw = String(value ?? "").trim();
if (!raw) return "";
const unwrapped = raw.startsWith("[[") && raw.endsWith("]]") ? raw.slice(2, -2) : raw;
return unwrapped.endsWith(".md") ? unwrapped : `${unwrapped}.md`;
}
// src/workspace.ts
var workspace_exports = {};
__export(workspace_exports, {
initWorkspace: () => initWorkspace,
isWorkgraphWorkspace: () => isWorkgraphWorkspace,
workspaceConfigPath: () => workspaceConfigPath
});
import fs5 from "fs";
import path5 from "path";
// src/bases.ts
var bases_exports = {};
__export(bases_exports, {
generateBasesFromPrimitiveRegistry: () => generateBasesFromPrimitiveRegistry,
primitiveRegistryManifestPath: () => primitiveRegistryManifestPath,
readPrimitiveRegistryManifest: () => readPrimitiveRegistryManifest,
syncPrimitiveRegistryManifest: () => syncPrimitiveRegistryManifest
});
import fs4 from "fs";
import path4 from "path";
import YAML from "yaml";
var REGISTRY_MANIFEST_FILE = ".clawvault/primitive-registry.yaml";
var DEFAULT_BASES_DIR = ".clawvault/bases";
function primitiveRegistryManifestPath(workspacePath) {
return path4.join(workspacePath, REGISTRY_MANIFEST_FILE);
}
function readPrimitiveRegistryManifest(workspacePath) {
const manifestPath = primitiveRegistryManifestPath(workspacePath);
if (!fs4.existsSync(manifestPath)) {
throw new Error(`Primitive registry manifest not found: ${manifestPath}`);
}
const raw = fs4.readFileSync(manifestPath, "utf-8");
return YAML.parse(raw);
}
function syncPrimitiveRegistryManifest(workspacePath) {
const registry = loadRegistry(workspacePath);
const manifest = {
version: 1,
generatedAt: (/* @__PURE__ */ new Date()).toISOString(),
primitives: Object.values(registry.types).map((primitive) => ({
name: primitive.name,
directory: primitive.directory,
canonical: primitive.builtIn,
builtIn: primitive.builtIn,
fields: Object.entries(primitive.fields).map(([name, field]) => ({
name,
type: field.type,
...field.required ? { required: true } : {},
...field.description ? { description: field.description } : {}
}))
})).sort((a, b) => a.name.localeCompare(b.name))
};
const manifestPath = primitiveRegistryManifestPath(workspacePath);
ensureDirectory(path4.dirname(manifestPath));
fs4.writeFileSync(manifestPath, YAML.stringify(manifest), "utf-8");
return manifest;
}
function generateBasesFromPrimitiveRegistry(workspacePath, options = {}) {
const manifest = readPrimitiveRegistryManifest(workspacePath);
const includeNonCanonical = options.includeNonCanonical === true;
const outputDirectory = path4.join(workspacePath, options.outputDirectory ?? DEFAULT_BASES_DIR);
ensureDirectory(outputDirectory);
const generated = [];
const primitives = manifest.primitives.filter(
(primitive) => includeNonCanonical ? true : primitive.canonical
);
for (const primitive of primitives) {
const relBasePath = `${primitive.name}.base`;
const absBasePath = path4.join(outputDirectory, relBasePath);
const content = renderBaseFile(primitive);
fs4.writeFileSync(absBasePath, content, "utf-8");
generated.push(path4.relative(workspacePath, absBasePath).replace(/\\/g, "/"));
}
return {
outputDirectory: path4.relative(workspacePath, outputDirectory).replace(/\\/g, "/"),
generated: generated.sort()
};
}
function renderBaseFile(primitive) {
const columnFields = primitive.fields.map((field) => field.name).filter((name, idx, arr) => arr.indexOf(name) === idx);
const baseDoc = {
id: primitive.name,
title: `${titleCase(primitive.name)} Base`,
source: {
type: "folder",
path: primitive.directory,
extension: "md"
},
views: [
{
id: "table",
type: "table",
name: "All",
columns: ["file.name", ...columnFields]
}
]
};
return YAML.stringify(baseDoc);
}
function ensureDirectory(dirPath) {
if (!fs4.existsSync(dirPath)) fs4.mkdirSync(dirPath, { recursive: true });
}
function titleCase(value) {
return value.split(/[-_]/g).filter(Boolean).map((segment) => segment[0].toUpperCase() + segment.slice(1)).join(" ");
}
// src/workspace.ts
var WORKGRAPH_CONFIG_FILE = ".workgraph.json";
function workspaceConfigPath(workspacePath) {
return path5.join(workspacePath, WORKGRAPH_CONFIG_FILE);
}
function isWorkgraphWorkspace(workspacePath) {
return fs5.existsSync(workspaceConfigPath(workspacePath));
}
function initWorkspace(targetPath, options = {}) {
const resolvedPath = path5.resolve(targetPath);
const configPath = workspaceConfigPath(resolvedPath);
if (fs5.existsSync(configPath)) {
throw new Error(`Workgraph workspace already initialized at ${resolvedPath}`);
}
const createdDirectories = [];
ensureDir(resolvedPath, createdDirectories);
ensureDir(path5.join(resolvedPath, ".clawvault"), createdDirectories);
const registry = loadRegistry(resolvedPath);
saveRegistry(resolvedPath, registry);
syncPrimitiveRegistryManifest(resolvedPath);
if (options.createTypeDirs !== false) {
const types = listTypes(resolvedPath);
for (const typeDef of types) {
ensureDir(path5.join(resolvedPath, typeDef.directory), createdDirectories);
}
}
const now = (/* @__PURE__ */ new Date()).toISOString();
const config = {
name: options.name ?? path5.basename(resolvedPath),
version: "1.0.0",
mode: "workgraph",
createdAt: now,
updatedAt: now
};
fs5.writeFileSync(configPath, JSON.stringify(config, null, 2) + "\n", "utf-8");
if (options.createReadme !== false) {
writeReadmeIfMissing(resolvedPath, config.name);
}
const bases = options.createBases === false ? { generated: [] } : generateBasesFromPrimitiveRegistry(resolvedPath);
return {
workspacePath: resolvedPath,
configPath,
config,
createdDirectories,
seededTypes: listTypes(resolvedPath).map((t) => t.name),
generatedBases: bases.generated,
primitiveRegistryManifestPath: ".clawvault/primitive-registry.yaml"
};
}
function ensureDir(dirPath, createdDirectories) {
if (fs5.existsSync(dirPath)) return;
fs5.mkdirSync(dirPath, { recursive: true });
createdDirectories.push(dirPath);
}
function writeReadmeIfMissing(workspacePath, name) {
const readmePath = path5.join(workspacePath, "README.md");
if (fs5.existsSync(readmePath)) return;
const content = `# ${name}
Agent-first workgraph workspace for multi-agent coordination.
## Quickstart
\`\`\`bash
workgraph thread list --json
workgraph thread next --claim --actor agent-a --json
workgraph ledger show --count 20 --json
\`\`\`
`;
fs5.writeFileSync(readmePath, content, "utf-8");
}
// src/command-center.ts
var command_center_exports = {};
__export(command_center_exports, {
generateCommandCenter: () => generateCommandCenter
});
import fs6 from "fs";
import path6 from "path";
function generateCommandCenter(workspacePath, options = {}) {
const actor = options.actor ?? "system";
const recentCount = options.recentCount ?? 15;
const relOutputPath = options.outputPath ?? "Command Center.md";
const absOutputPath = resolvePathWithinWorkspace(workspacePath, relOutputPath);
const normalizedOutputPath = path6.relative(workspacePath, absOutputPath).replace(/\\/g, "/");
const allThreads = list(workspacePath, "thread");
const openThreads2 = allThreads.filter((thread) => thread.fields.status === "open");
const activeThreads2 = allThreads.filter((thread) => thread.fields.status === "active");
const blockedThreads2 = allThreads.filter((thread) => thread.fields.status === "blocked");
const doneThreads = allThreads.filter((thread) => thread.fields.status === "done");
const claims = allClaims(workspacePath);
const recentEvents = recent(workspacePath, recentCount);
const content = renderCommandCenter({
generatedAt: (/* @__PURE__ */ new Date()).toISOString(),
openThreads: openThreads2,
activeThreads: activeThreads2,
blockedThreads: blockedThreads2,
doneThreads,
claims: [...claims.entries()].map(([target, owner]) => ({ target, owner })),
recentEvents
});
const parentDir = path6.dirname(absOutputPath);
if (!fs6.existsSync(parentDir)) fs6.mkdirSync(parentDir, { recursive: true });
const existed = fs6.existsSync(absOutputPath);
fs6.writeFileSync(absOutputPath, content, "utf-8");
append(
workspacePath,
actor,
existed ? "update" : "create",
normalizedOutputPath,
"command-center",
{
generated: true,
open_threads: openThreads2.length,
active_claims: claims.size,
recent_events: recentEvents.length
}
);
return {
outputPath: normalizedOutputPath,
stats: {
totalThreads: allThreads.length,
openThreads: openThreads2.length,
activeThreads: activeThreads2.length,
blockedThreads: blockedThreads2.length,
doneThreads: doneThreads.length,
activeClaims: claims.size,
recentEvents: recentEvents.length
},
content
};
}
function resolvePathWithinWorkspace(workspacePath, outputPath) {
const base = path6.resolve(workspacePath);
const resolved = path6.resolve(base, outputPath);
if (!resolved.startsWith(base + path6.sep) && resolved !== base) {
throw new Error(`Invalid command-center output path: ${outputPath}`);
}
return resolved;
}
function renderCommandCenter(input) {
const header = [
"# Workgraph Command Center",
"",
`Generated: ${input.generatedAt}`,
""
];
const statusBlock = [
"## Thread Status",
"",
`- Open: ${input.openThreads.length}`,
`- Active: ${input.activeThreads.length}`,
`- Blocked: ${input.blockedThreads.length}`,
`- Done: ${input.doneThreads.length}`,
""
];
const openTable = [
"## Open Threads",
"",
"| Priority | Title | Path |",
"|---|---|---|",
...input.openThreads.length > 0 ? input.openThreads.map((thread) => `| ${String(thread.fields.priority ?? "medium")} | ${String(thread.fields.title ?? "Untitled")} | \`${thread.path}\` |`) : ["| - | None | - |"],
""
];
const claimsSection = [
"## Active Claims",
"",
...input.claims.length > 0 ? input.claims.map((claim2) => `- ${claim2.owner} -> \`${claim2.target}\``) : ["- None"],
""
];
const blockedSection = [
"## Blocked Threads",
"",
...input.blockedThreads.length > 0 ? input.blockedThreads.map((thread) => {
const deps = Array.isArray(thread.fields.deps) ? thread.fields.deps.join(", ") : "";
return `- ${String(thread.fields.title ?? thread.path)} (\`${thread.path}\`)${deps ? ` blocked by: ${deps}` : ""}`;
}) : ["- None"],
""
];
const recentSection = [
"## Recent Ledger Activity",
"",
...input.recentEvents.length > 0 ? input.recentEvents.map((event) => `- ${event.ts} ${event.op} ${event.actor} -> \`${event.target}\``) : ["- No activity"],
""
];
return [
...header,
...statusBlock,
...openTable,
...claimsSection,
...blockedSection,
...recentSection
].join("\n");
}
// src/skill.ts
var skill_exports = {};
__export(skill_exports, {
listSkills: () => listSkills,
loadSkill: () => loadSkill,
promoteSkill: () => promoteSkill,
proposeSkill: () => proposeSkill,
writeSkill: () => writeSkill
});
function writeSkill(workspacePath, title, body, actor, options = {}) {
const relPath = pathForSkillTitle(title);
const existing = read(workspacePath, relPath);
const status = options.status ?? existing?.fields.status ?? "draft";
if (!existing) {
return create(workspacePath, "skill", {
title,
owner: options.owner ?? actor,
version: options.version ?? "0.1.0",
status,
distribution: options.distribution ?? "tailscale-shared-vault",
tailscale_path: options.tailscalePath,
reviewers: options.reviewers ?? [],
tags: options.tags ?? []
}, body, actor);
}
return update(workspacePath, existing.path, {
title,
owner: options.owner ?? existing.fields.owner ?? actor,
version: options.version ?? existing.fields.version ?? "0.1.0",
status,
distribution: options.distribution ?? existing.fields.distribution ?? "tailscale-shared-vault",
tailscale_path: options.tailscalePath ?? existing.fields.tailscale_path,
reviewers: options.reviewers ?? existing.fields.reviewers ?? [],
tags: options.tags ?? existing.fields.tags ?? []
}, body, actor);
}
function loadSkill(workspacePath, skillRef) {
const normalized = normalizeSkillRef(skillRef);
const skill = read(workspacePath, normalized);
if (!skill) throw new Error(`Skill not found: ${skillRef}`);
if (skill.type !== "skill") throw new Error(`Target is not a skill primitive: ${skillRef}`);
return skill;
}
function listSkills(workspacePath, options = {}) {
let skills = list(workspacePath, "skill");
if (options.status) {
skills = skills.filter((skill) => skill.fields.status === options.status);
}
return skills;
}
function proposeSkill(workspacePath, skillRef, actor, options = {}) {
const skill = loadSkill(workspacePath, skillRef);
let proposalThread = options.proposalThread;
if (!proposalThread && options.createThreadIfMissing !== false) {
const createdThread = createThread(
workspacePath,
`Review skill: ${String(skill.fields.title)}`,
`Review and approve skill ${skill.path} for activation.`,
actor,
{
priority: "medium",
space: options.space,
context_refs: [skill.path]
}
);
proposalThread = createdThread.path;
}
return update(workspacePath, skill.path, {
status: "proposed",
proposal_thread: proposalThread ?? skill.fields.proposal_thread,
proposed_at: (/* @__PURE__ */ new Date()).toISOString(),
reviewers: options.reviewers ?? skill.fields.reviewers ?? []
}, void 0, actor);
}
function promoteSkill(workspacePath, skillRef, actor, options = {}) {
const skill = loadSkill(workspacePath, skillRef);
const currentVersion = String(skill.fields.version ?? "0.1.0");
const nextVersion = options.version ?? bumpPatchVersion(currentVersion);
return update(workspacePath, skill.path, {
status: "active",
version: nextVersion,
promoted_at: (/* @__PURE__ */ new Date()).toISOString()
}, void 0, actor);
}
function pathForSkillTitle(title) {
const slug = title.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, "").slice(0, 80);
return `skills/${slug}.md`;
}
function normalizeSkillRef(skillRef) {
const raw = skillRef.trim();
if (!raw) return raw;
if (raw.includes("/")) {
return raw.endsWith(".md") ? raw : `${raw}.md`;
}
const slug = raw.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, "").slice(0, 80);
return `skills/${slug}.md`;
}
function bumpPatchVersion(version) {
const match = version.match(/^(\d+)\.(\d+)\.(\d+)$/);
if (!match) return "0.1.0";
const major = Number.parseInt(match[1], 10);
const minor = Number.parseInt(match[2], 10);
const patch = Number.parseInt(match[3], 10) + 1;
return `${major}.${minor}.${patch}`;
}
export {
THREAD_STATUS_TRANSITIONS,
ledger_exports,
registry_exports,
store_exports,
thread_exports,
bases_exports,
workspace_exports,
command_center_exports,
skill_exports
};