| // src/workflow-files.ts | ||
| import path from "path"; | ||
| var STATE_DIR = ".krow/state/workflows"; | ||
| var TASKS_DIR = ".krow/tasks"; | ||
| var RELAYS_DIR = ".krow/relays"; | ||
| function absoluteRoot(rootDir = process.cwd()) { | ||
| return path.resolve(rootDir); | ||
| } | ||
| function workflowStatePath(workflowId) { | ||
| return `${STATE_DIR}/${workflowId}.json`; | ||
| } | ||
| function absoluteWorkflowStatePath(workflowId, rootDir = process.cwd()) { | ||
| return path.join(absoluteRoot(rootDir), workflowStatePath(workflowId)); | ||
| } | ||
| function workflowTaskRootPath(workflowId) { | ||
| return `${TASKS_DIR}/${workflowId}`; | ||
| } | ||
| function workflowTaskIndexPath(workflowId) { | ||
| return `${workflowTaskRootPath(workflowId)}/index.md`; | ||
| } | ||
| function workflowRelayRootPath(workflowId) { | ||
| return `${RELAYS_DIR}/${workflowId}`; | ||
| } | ||
| function unitTaskDirPath(workflowId, unitId) { | ||
| return `${workflowTaskRootPath(workflowId)}/${unitId}`; | ||
| } | ||
| function unitBriefPath(workflowId, unitId) { | ||
| return `${unitTaskDirPath(workflowId, unitId)}/brief.md`; | ||
| } | ||
| function unitContextPath(workflowId, unitId) { | ||
| return `${unitTaskDirPath(workflowId, unitId)}/context.md`; | ||
| } | ||
| function unitStatusPath(workflowId, unitId) { | ||
| return `${unitTaskDirPath(workflowId, unitId)}/status.md`; | ||
| } | ||
| function unitResultPath(workflowId, unitId) { | ||
| return `${unitTaskDirPath(workflowId, unitId)}/result.md`; | ||
| } | ||
| function unitBatonPath(workflowId, unitId) { | ||
| return `${unitTaskDirPath(workflowId, unitId)}/baton.md`; | ||
| } | ||
| function unitArtifactsDirPath(workflowId, unitId) { | ||
| return `${unitTaskDirPath(workflowId, unitId)}/artifacts`; | ||
| } | ||
| function unitRelayPath(workflowId, unitId) { | ||
| return `${workflowRelayRootPath(workflowId)}/${unitId}.md`; | ||
| } | ||
| function absolutePath(relativePath, rootDir = process.cwd()) { | ||
| return path.join(absoluteRoot(rootDir), relativePath); | ||
| } | ||
| // src/workflow-graph.ts | ||
| function asVerifyOutput(value) { | ||
| if (!value || typeof value !== "object") { | ||
| return void 0; | ||
| } | ||
| return value; | ||
| } | ||
| function unitDependencies(unit) { | ||
| if (!Array.isArray(unit.dependsOn)) { | ||
| return []; | ||
| } | ||
| return unit.dependsOn.filter((value) => typeof value === "string" && value.length > 0); | ||
| } | ||
| function inferGraphStrategy(units) { | ||
| if (units.length <= 1) { | ||
| return "single"; | ||
| } | ||
| const hasDependencies = units.some((unit) => unitDependencies(unit).length > 0); | ||
| const hasParallelRoots = units.filter((unit) => unit.kind !== "integration" && unit.parallelizable !== false).length > 1; | ||
| if (hasParallelRoots) { | ||
| return "parallel_fanout"; | ||
| } | ||
| return hasDependencies ? "serial" : "single"; | ||
| } | ||
| function isUnitCompleted(state, unitId) { | ||
| const verify = asVerifyOutput(state.outputs[unitId]?.verify); | ||
| return verify?.passed === true; | ||
| } | ||
| function completedUnitIds(state) { | ||
| return state.units.filter((unit) => isUnitCompleted(state, unit.id)).map((unit) => unit.id); | ||
| } | ||
| function pendingUnits(state) { | ||
| return state.units.filter((unit) => !isUnitCompleted(state, unit.id)); | ||
| } | ||
| function readyUnits(state) { | ||
| const completed = new Set(completedUnitIds(state)); | ||
| return state.units.filter( | ||
| (unit) => !completed.has(unit.id) && unitDependencies(unit).every((dependencyId) => completed.has(dependencyId)) | ||
| ); | ||
| } | ||
| function nextReadyUnitIndex(state) { | ||
| const readyIds = new Set(readyUnits(state).map((unit) => unit.id)); | ||
| const index = state.units.findIndex((unit) => readyIds.has(unit.id)); | ||
| return index >= 0 ? index : void 0; | ||
| } | ||
| function compactUnit(unit) { | ||
| return { | ||
| id: unit.id, | ||
| title: unit.title, | ||
| kind: unit.kind ?? "work", | ||
| dependsOn: unitDependencies(unit), | ||
| parallelizable: unit.parallelizable === true, | ||
| scope: Array.isArray(unit.scope) ? unit.scope : [], | ||
| ownership: Array.isArray(unit.ownership) ? unit.ownership : [], | ||
| priority: unit.priority ?? "medium", | ||
| estimatedEffort: unit.estimatedEffort ?? "medium", | ||
| mergeRequired: unit.mergeRequired === true, | ||
| sharedRisks: Array.isArray(unit.sharedRisks) ? unit.sharedRisks : [], | ||
| acceptanceCriteria: Array.isArray(unit.acceptanceCriteria) ? unit.acceptanceCriteria : [] | ||
| }; | ||
| } | ||
| function buildDownstreamDepths(units) { | ||
| const dependents = /* @__PURE__ */ new Map(); | ||
| for (const unit of units) { | ||
| for (const dependencyId of unitDependencies(unit)) { | ||
| dependents.set(dependencyId, [...dependents.get(dependencyId) ?? [], unit.id]); | ||
| } | ||
| } | ||
| const memo = /* @__PURE__ */ new Map(); | ||
| function visit(unitId) { | ||
| if (memo.has(unitId)) { | ||
| return memo.get(unitId); | ||
| } | ||
| const childIds = dependents.get(unitId) ?? []; | ||
| const depth = childIds.length === 0 ? 1 : 1 + Math.max(...childIds.map(visit)); | ||
| memo.set(unitId, depth); | ||
| return depth; | ||
| } | ||
| for (const unit of units) { | ||
| visit(unit.id); | ||
| } | ||
| return memo; | ||
| } | ||
| function buildRunContext(state) { | ||
| const currentUnit3 = state.units[state.currentUnitIndex]; | ||
| const ready = readyUnits(state); | ||
| const completed = completedUnitIds(state); | ||
| const pending = pendingUnits(state); | ||
| const downstreamDepths = buildDownstreamDepths(state.units); | ||
| function enrichUnit(unit) { | ||
| return { | ||
| ...compactUnit(unit), | ||
| criticalPathLength: downstreamDepths.get(unit.id) ?? 1, | ||
| packetRef: unitBriefPath(state.workflowId, unit.id), | ||
| statusRef: unitStatusPath(state.workflowId, unit.id), | ||
| resultRef: unitResultPath(state.workflowId, unit.id), | ||
| batonRef: unitBatonPath(state.workflowId, unit.id), | ||
| relayRef: unitRelayPath(state.workflowId, unit.id) | ||
| }; | ||
| } | ||
| return { | ||
| graphStrategy: state.graphStrategy ?? inferGraphStrategy(state.units), | ||
| workflowTaskIndexRef: workflowTaskIndexPath(state.workflowId), | ||
| currentUnit: currentUnit3 ? enrichUnit(currentUnit3) : void 0, | ||
| readyUnits: ready.map(enrichUnit), | ||
| readySiblingUnitIds: ready.filter((unit) => unit.id !== currentUnit3?.id).map((unit) => unit.id), | ||
| completedUnitIds: completed, | ||
| blockedUnitIds: pending.filter((unit) => !ready.some((readyUnit) => readyUnit.id === unit.id)).map((unit) => unit.id), | ||
| dependencyRelayRefs: currentUnit3 ? unitDependencies(currentUnit3).map((dependencyId) => unitRelayPath(state.workflowId, dependencyId)) : [], | ||
| pendingUnitIds: pending.map((unit) => unit.id), | ||
| remainingUnitCount: pending.length | ||
| }; | ||
| } | ||
| // src/validators.ts | ||
| var validStatuses = [ | ||
| "phase_clarify", | ||
| "clarify_pending", | ||
| "phase_execute", | ||
| "phase_verify", | ||
| "phase_capture", | ||
| "completed", | ||
| "blocked", | ||
| "stopped" | ||
| ]; | ||
| var validPhases = ["clarify", "execute", "verify", "capture"]; | ||
| var validScoreKeys = ["accuracy", "completeness", "consistency"]; | ||
| function isRecord(value) { | ||
| return typeof value === "object" && value !== null && !Array.isArray(value); | ||
| } | ||
| function isNonEmptyString(value) { | ||
| return typeof value === "string" && value.trim().length > 0; | ||
| } | ||
| function isStringArray(value) { | ||
| return Array.isArray(value) && value.every(isNonEmptyString); | ||
| } | ||
| function validateWorkflowUnits(value) { | ||
| const issues = []; | ||
| if (!Array.isArray(value) || value.length === 0) { | ||
| return ["units must be a non-empty array"]; | ||
| } | ||
| const unitIds = []; | ||
| value.forEach((unit, index) => { | ||
| const path3 = `units[${index}]`; | ||
| if (!isRecord(unit)) { | ||
| issues.push(`${path3} must be an object`); | ||
| return; | ||
| } | ||
| if (!isNonEmptyString(unit.id)) { | ||
| issues.push(`${path3}.id must be a non-empty string`); | ||
| } else { | ||
| unitIds.push(unit.id); | ||
| } | ||
| if (!isNonEmptyString(unit.title)) { | ||
| issues.push(`${path3}.title must be a non-empty string`); | ||
| } | ||
| if (unit.kind !== void 0 && unit.kind !== "work" && unit.kind !== "integration") { | ||
| issues.push(`${path3}.kind must be 'work' or 'integration' when present`); | ||
| } | ||
| if (unit.request !== void 0 && !isNonEmptyString(unit.request)) { | ||
| issues.push(`${path3}.request must be a non-empty string when present`); | ||
| } | ||
| if (unit.scope !== void 0 && !isStringArray(unit.scope)) { | ||
| issues.push(`${path3}.scope must be an array of non-empty strings when present`); | ||
| } | ||
| if (unit.ownership !== void 0 && !isStringArray(unit.ownership)) { | ||
| issues.push(`${path3}.ownership must be an array of non-empty strings when present`); | ||
| } | ||
| if (unit.priority !== void 0 && unit.priority !== "high" && unit.priority !== "medium" && unit.priority !== "low") { | ||
| issues.push(`${path3}.priority must be 'high', 'medium', or 'low' when present`); | ||
| } | ||
| if (unit.estimatedEffort !== void 0 && unit.estimatedEffort !== "small" && unit.estimatedEffort !== "medium" && unit.estimatedEffort !== "large") { | ||
| issues.push(`${path3}.estimatedEffort must be 'small', 'medium', or 'large' when present`); | ||
| } | ||
| if (unit.mergeRequired !== void 0 && typeof unit.mergeRequired !== "boolean") { | ||
| issues.push(`${path3}.mergeRequired must be a boolean when present`); | ||
| } | ||
| if (unit.sharedRisks !== void 0 && !isStringArray(unit.sharedRisks)) { | ||
| issues.push(`${path3}.sharedRisks must be an array of non-empty strings when present`); | ||
| } | ||
| if (unit.acceptanceCriteria !== void 0 && !isStringArray(unit.acceptanceCriteria)) { | ||
| issues.push(`${path3}.acceptanceCriteria must be an array of non-empty strings when present`); | ||
| } | ||
| if (unit.verifyFocus !== void 0 && !isStringArray(unit.verifyFocus)) { | ||
| issues.push(`${path3}.verifyFocus must be an array of non-empty strings when present`); | ||
| } | ||
| if (unit.dependsOn !== void 0 && !isStringArray(unit.dependsOn)) { | ||
| issues.push(`${path3}.dependsOn must be an array of non-empty strings when present`); | ||
| } | ||
| if (unit.parallelizable !== void 0 && typeof unit.parallelizable !== "boolean") { | ||
| issues.push(`${path3}.parallelizable must be a boolean when present`); | ||
| } | ||
| }); | ||
| const duplicates = unitIds.filter((id, index) => unitIds.indexOf(id) !== index); | ||
| duplicates.forEach((id) => issues.push(`duplicate unit id: ${id}`)); | ||
| const uniqueIds = new Set(unitIds); | ||
| value.forEach((unit, index) => { | ||
| if (!isRecord(unit) || !isNonEmptyString(unit.id)) { | ||
| return; | ||
| } | ||
| const path3 = `units[${index}]`; | ||
| const dependencies = unitDependencies(unit); | ||
| dependencies.forEach((dependencyId) => { | ||
| if (!uniqueIds.has(dependencyId)) { | ||
| issues.push(`${path3}.dependsOn references unknown unit: ${dependencyId}`); | ||
| } | ||
| if (dependencyId === unit.id) { | ||
| issues.push(`${path3}.dependsOn cannot include the unit itself`); | ||
| } | ||
| }); | ||
| }); | ||
| if (issues.length > 0) { | ||
| return issues; | ||
| } | ||
| const units = value; | ||
| const visiting = /* @__PURE__ */ new Set(); | ||
| const visited = /* @__PURE__ */ new Set(); | ||
| const unitMap = new Map(units.map((unit) => [unit.id, unit])); | ||
| function visit(unitId) { | ||
| if (visited.has(unitId) || issues.length > 0) { | ||
| return; | ||
| } | ||
| if (visiting.has(unitId)) { | ||
| issues.push(`unit dependency cycle detected at ${unitId}`); | ||
| return; | ||
| } | ||
| visiting.add(unitId); | ||
| const unit = unitMap.get(unitId); | ||
| if (unit) { | ||
| for (const dependencyId of unitDependencies(unit)) { | ||
| visit(dependencyId); | ||
| } | ||
| } | ||
| visiting.delete(unitId); | ||
| visited.add(unitId); | ||
| } | ||
| unitIds.forEach(visit); | ||
| return issues; | ||
| } | ||
| function validateDecisionPrompts(value, path3) { | ||
| const issues = []; | ||
| if (!Array.isArray(value)) { | ||
| return [`${path3} must be an array`]; | ||
| } | ||
| value.forEach((item, index) => { | ||
| const itemPath = `${path3}[${index}]`; | ||
| if (!isRecord(item)) { | ||
| issues.push(`${itemPath} must be an object`); | ||
| return; | ||
| } | ||
| if (!isNonEmptyString(item.id)) { | ||
| issues.push(`${itemPath}.id must be a non-empty string`); | ||
| } | ||
| if (!isNonEmptyString(item.question)) { | ||
| issues.push(`${itemPath}.question must be a non-empty string`); | ||
| } | ||
| if (!Array.isArray(item.options) || item.options.length === 0) { | ||
| issues.push(`${itemPath}.options must be a non-empty array`); | ||
| return; | ||
| } | ||
| item.options.forEach((option, optionIndex) => { | ||
| const optionPath = `${itemPath}.options[${optionIndex}]`; | ||
| if (!isRecord(option)) { | ||
| issues.push(`${optionPath} must be an object`); | ||
| return; | ||
| } | ||
| if (!isNonEmptyString(option.id)) { | ||
| issues.push(`${optionPath}.id must be a non-empty string`); | ||
| } | ||
| if (!isNonEmptyString(option.label)) { | ||
| issues.push(`${optionPath}.label must be a non-empty string`); | ||
| } | ||
| if (option.description !== void 0 && !isNonEmptyString(option.description)) { | ||
| issues.push(`${optionPath}.description must be a non-empty string when present`); | ||
| } | ||
| }); | ||
| }); | ||
| return issues; | ||
| } | ||
| function validateVerifyIssues(value, path3) { | ||
| const issues = []; | ||
| if (!Array.isArray(value)) { | ||
| return [`${path3} must be an array`]; | ||
| } | ||
| value.forEach((issue, index) => { | ||
| const issuePath = `${path3}[${index}]`; | ||
| if (!isRecord(issue)) { | ||
| issues.push(`${issuePath} must be an object`); | ||
| return; | ||
| } | ||
| if (issue.severity !== "error" && issue.severity !== "warning") { | ||
| issues.push(`${issuePath}.severity must be 'error' or 'warning'`); | ||
| } | ||
| if (!isNonEmptyString(issue.category)) { | ||
| issues.push(`${issuePath}.category must be a non-empty string`); | ||
| } | ||
| if (!isNonEmptyString(issue.description)) { | ||
| issues.push(`${issuePath}.description must be a non-empty string`); | ||
| } | ||
| if (issue.suggestion !== void 0 && !isNonEmptyString(issue.suggestion)) { | ||
| issues.push(`${issuePath}.suggestion must be a non-empty string when present`); | ||
| } | ||
| }); | ||
| return issues; | ||
| } | ||
| function validateClarifyOutput(value) { | ||
| const issues = []; | ||
| if (!isRecord(value)) { | ||
| return { ok: false, issues: ["clarify output must be an object"] }; | ||
| } | ||
| if (typeof value.ready !== "boolean") { | ||
| issues.push("ready must be a boolean"); | ||
| } | ||
| if (!isNonEmptyString(value.summary)) { | ||
| issues.push("summary must be a non-empty string"); | ||
| } | ||
| if (!isStringArray(value.assumptions)) { | ||
| issues.push("assumptions must be an array of non-empty strings"); | ||
| } | ||
| if (!isStringArray(value.evidence)) { | ||
| issues.push("evidence must be an array of non-empty strings"); | ||
| } | ||
| if (!isStringArray(value.acceptanceCriteria)) { | ||
| issues.push("acceptanceCriteria must be an array of non-empty strings"); | ||
| } | ||
| if (value.verifyFocus !== void 0 && !isStringArray(value.verifyFocus)) { | ||
| issues.push("verifyFocus must be an array of non-empty strings when present"); | ||
| } | ||
| issues.push(...validateDecisionPrompts(value.decisions, "decisions")); | ||
| return issues.length === 0 ? { ok: true, issues: [], value } : { ok: false, issues }; | ||
| } | ||
| function validateDecisionAnswers(value) { | ||
| const issues = []; | ||
| if (!Array.isArray(value)) { | ||
| return { ok: false, issues: ["decision answers must be an array"] }; | ||
| } | ||
| value.forEach((item, index) => { | ||
| const path3 = `decisionAnswers[${index}]`; | ||
| if (!isRecord(item)) { | ||
| issues.push(`${path3} must be an object`); | ||
| return; | ||
| } | ||
| if (!isNonEmptyString(item.decisionId)) { | ||
| issues.push(`${path3}.decisionId must be a non-empty string`); | ||
| } | ||
| if (!isNonEmptyString(item.selectedOptionId)) { | ||
| issues.push(`${path3}.selectedOptionId must be a non-empty string`); | ||
| } | ||
| if (item.customInput !== void 0 && !isNonEmptyString(item.customInput)) { | ||
| issues.push(`${path3}.customInput must be a non-empty string when present`); | ||
| } | ||
| }); | ||
| return issues.length === 0 ? { ok: true, issues: [], value } : { ok: false, issues }; | ||
| } | ||
| function validateExecuteOutput(value) { | ||
| const issues = []; | ||
| if (!isRecord(value)) { | ||
| return { ok: false, issues: ["execute output must be an object"] }; | ||
| } | ||
| if (!isNonEmptyString(value.summary)) { | ||
| issues.push("summary must be a non-empty string"); | ||
| } | ||
| if (value.changedFiles !== void 0 && !isStringArray(value.changedFiles)) { | ||
| issues.push("changedFiles must be an array of non-empty strings when present"); | ||
| } | ||
| if (value.outputFiles !== void 0 && !isStringArray(value.outputFiles)) { | ||
| issues.push("outputFiles must be an array of non-empty strings when present"); | ||
| } | ||
| if (value.artifacts !== void 0 && !isStringArray(value.artifacts)) { | ||
| issues.push("artifacts must be an array of non-empty strings when present"); | ||
| } | ||
| if (value.checks !== void 0 && !isStringArray(value.checks)) { | ||
| issues.push("checks must be an array of non-empty strings when present"); | ||
| } | ||
| if (value.handoffNotes !== void 0 && !isStringArray(value.handoffNotes)) { | ||
| issues.push("handoffNotes must be an array of non-empty strings when present"); | ||
| } | ||
| if (value.notes !== void 0 && !isStringArray(value.notes)) { | ||
| issues.push("notes must be an array of non-empty strings when present"); | ||
| } | ||
| const hasPayload = Array.isArray(value.changedFiles) && value.changedFiles.length > 0 || Array.isArray(value.outputFiles) && value.outputFiles.length > 0 || Array.isArray(value.artifacts) && value.artifacts.length > 0; | ||
| if (!hasPayload) { | ||
| issues.push("execute output must include changedFiles, outputFiles, or artifacts"); | ||
| } | ||
| return issues.length === 0 ? { ok: true, issues: [], value } : { ok: false, issues }; | ||
| } | ||
| function validateVerifyOutput(value) { | ||
| const issues = []; | ||
| if (!isRecord(value)) { | ||
| return { ok: false, issues: ["verify output must be an object"] }; | ||
| } | ||
| if (typeof value.passed !== "boolean") { | ||
| issues.push("passed must be a boolean"); | ||
| } | ||
| if (!Array.isArray(value.checks)) { | ||
| issues.push("checks must be an array"); | ||
| } else { | ||
| value.checks.forEach((check, index) => { | ||
| const path3 = `checks[${index}]`; | ||
| if (!isRecord(check)) { | ||
| issues.push(`${path3} must be an object`); | ||
| return; | ||
| } | ||
| if (!isNonEmptyString(check.name)) { | ||
| issues.push(`${path3}.name must be a non-empty string`); | ||
| } | ||
| if (check.status !== "passed" && check.status !== "failed" && check.status !== "skipped") { | ||
| issues.push(`${path3}.status must be 'passed', 'failed', or 'skipped'`); | ||
| } | ||
| if (check.command !== void 0 && !isNonEmptyString(check.command)) { | ||
| issues.push(`${path3}.command must be a non-empty string when present`); | ||
| } | ||
| if (!isNonEmptyString(check.evidence)) { | ||
| issues.push(`${path3}.evidence must be a non-empty string`); | ||
| } | ||
| }); | ||
| } | ||
| if (!isStringArray(value.evidence)) { | ||
| issues.push("evidence must be an array of non-empty strings"); | ||
| } | ||
| issues.push(...validateVerifyIssues(value.issues, "issues")); | ||
| if (!isNonEmptyString(value.summary)) { | ||
| issues.push("summary must be a non-empty string"); | ||
| } | ||
| if (value.score !== void 0) { | ||
| if (!isRecord(value.score)) { | ||
| issues.push("score must be an object when present"); | ||
| } else { | ||
| validScoreKeys.forEach((key) => { | ||
| const scoreMap = value.score; | ||
| const score = scoreMap[key]; | ||
| if (typeof score !== "number" || score < 0 || score > 100) { | ||
| issues.push(`score.${key} must be a number between 0 and 100`); | ||
| } | ||
| }); | ||
| } | ||
| } | ||
| if (value.needsHuman !== void 0 && typeof value.needsHuman !== "boolean") { | ||
| issues.push("needsHuman must be a boolean when present"); | ||
| } | ||
| if (value.retryHint !== void 0 && !isNonEmptyString(value.retryHint)) { | ||
| issues.push("retryHint must be a non-empty string when present"); | ||
| } | ||
| if (value.unverifiedClaims !== void 0 && !isStringArray(value.unverifiedClaims)) { | ||
| issues.push("unverifiedClaims must be an array of non-empty strings when present"); | ||
| } | ||
| if (value.decisions !== void 0) { | ||
| issues.push(...validateDecisionPrompts(value.decisions, "decisions")); | ||
| } | ||
| return issues.length === 0 ? { ok: true, issues: [], value } : { ok: false, issues }; | ||
| } | ||
| function validateCaptureOutput(value) { | ||
| const issues = []; | ||
| if (!isRecord(value)) { | ||
| return { ok: false, issues: ["capture output must be an object"] }; | ||
| } | ||
| if (!Array.isArray(value.entries)) { | ||
| issues.push("entries must be an array"); | ||
| } else { | ||
| value.entries.forEach((entry, index) => { | ||
| const path3 = `entries[${index}]`; | ||
| if (!isRecord(entry)) { | ||
| issues.push(`${path3} must be an object`); | ||
| return; | ||
| } | ||
| if (!isNonEmptyString(entry.filename)) { | ||
| issues.push(`${path3}.filename must be a non-empty string`); | ||
| } | ||
| if (!isNonEmptyString(entry.content)) { | ||
| issues.push(`${path3}.content must be a non-empty string`); | ||
| } | ||
| if (!isNonEmptyString(entry.reason)) { | ||
| issues.push(`${path3}.reason must be a non-empty string`); | ||
| } | ||
| if (entry.action !== void 0 && entry.action !== "create" && entry.action !== "update") { | ||
| issues.push(`${path3}.action must be 'create' or 'update' when present`); | ||
| } | ||
| }); | ||
| } | ||
| return issues.length === 0 ? { ok: true, issues: [], value } : { ok: false, issues }; | ||
| } | ||
| function validateWorkflowState(value) { | ||
| const issues = []; | ||
| if (!isRecord(value)) { | ||
| return { ok: false, issues: ["workflow state must be an object"] }; | ||
| } | ||
| const stringFields = [ | ||
| "schemaVersion", | ||
| "workflowId", | ||
| "mode", | ||
| "description", | ||
| "status", | ||
| "phase", | ||
| "taskRoot", | ||
| "relayRoot", | ||
| "createdAt", | ||
| "updatedAt" | ||
| ]; | ||
| stringFields.forEach((field) => { | ||
| if (!isNonEmptyString(value[field])) { | ||
| issues.push(`${field} must be a non-empty string`); | ||
| } | ||
| }); | ||
| issues.push(...validateWorkflowUnits(value.units)); | ||
| if (typeof value.currentUnitIndex !== "number" || value.currentUnitIndex < 0) { | ||
| issues.push("currentUnitIndex must be a non-negative number"); | ||
| } else if (Array.isArray(value.units) && value.currentUnitIndex >= value.units.length) { | ||
| issues.push("currentUnitIndex must point to an existing unit"); | ||
| } | ||
| if (typeof value.captureEnabled !== "boolean") { | ||
| issues.push("captureEnabled must be a boolean"); | ||
| } | ||
| if (typeof value.maxVerifyAttempts !== "number" || value.maxVerifyAttempts < 1) { | ||
| issues.push("maxVerifyAttempts must be a positive number"); | ||
| } | ||
| if (typeof value.verifyAttempts !== "number" || value.verifyAttempts < 0) { | ||
| issues.push("verifyAttempts must be a non-negative number"); | ||
| } | ||
| if (!Array.isArray(value.pendingDecisions)) { | ||
| issues.push("pendingDecisions must be an array"); | ||
| } else { | ||
| issues.push(...validateDecisionPrompts(value.pendingDecisions, "pendingDecisions")); | ||
| } | ||
| if (!Array.isArray(value.decisionHistory)) { | ||
| issues.push("decisionHistory must be an array"); | ||
| } else { | ||
| value.decisionHistory.forEach((item, index) => { | ||
| const path3 = `decisionHistory[${index}]`; | ||
| if (!isRecord(item)) { | ||
| issues.push(`${path3} must be an object`); | ||
| return; | ||
| } | ||
| if (!isNonEmptyString(item.decisionId)) { | ||
| issues.push(`${path3}.decisionId must be a non-empty string`); | ||
| } | ||
| if (!isNonEmptyString(item.selectedOptionId)) { | ||
| issues.push(`${path3}.selectedOptionId must be a non-empty string`); | ||
| } | ||
| if (item.customInput !== void 0 && !isNonEmptyString(item.customInput)) { | ||
| issues.push(`${path3}.customInput must be a non-empty string when present`); | ||
| } | ||
| }); | ||
| } | ||
| if (!isRecord(value.outputs)) { | ||
| issues.push("outputs must be an object"); | ||
| } | ||
| if (value.graphStrategy !== void 0 && value.graphStrategy !== "single" && value.graphStrategy !== "serial" && value.graphStrategy !== "parallel_fanout") { | ||
| issues.push("graphStrategy must be 'single', 'serial', or 'parallel_fanout' when present"); | ||
| } | ||
| if (value.graphNotes !== void 0 && !isStringArray(value.graphNotes)) { | ||
| issues.push("graphNotes must be an array of non-empty strings when present"); | ||
| } | ||
| if (value.lastVerifyIssues !== void 0) { | ||
| issues.push(...validateVerifyIssues(value.lastVerifyIssues, "lastVerifyIssues")); | ||
| } | ||
| if (!validStatuses.includes(value.status)) { | ||
| issues.push("status must be one of the canonical workflow statuses"); | ||
| } | ||
| if (!validPhases.includes(value.phase)) { | ||
| issues.push("phase must be one of the canonical workflow phases"); | ||
| } | ||
| if (Array.isArray(value.units) && value.graphStrategy === void 0) { | ||
| const inferredStrategy = inferGraphStrategy(value.units); | ||
| if (inferredStrategy === "parallel_fanout" && Array.isArray(value.units) && value.units.length < 2) { | ||
| issues.push("parallel_fanout requires multiple workflow units"); | ||
| } | ||
| } | ||
| return issues.length === 0 ? { ok: true, issues: [], value } : { ok: false, issues }; | ||
| } | ||
| function validateDecisionPromptArray(value) { | ||
| const issues = validateDecisionPrompts(value, "decisions"); | ||
| return issues.length === 0 ? { ok: true, issues: [], value } : { ok: false, issues }; | ||
| } | ||
| function validateVerifyIssueArray(value) { | ||
| const issues = validateVerifyIssues(value, "issues"); | ||
| return issues.length === 0 ? { ok: true, issues: [], value } : { ok: false, issues }; | ||
| } | ||
| // src/state-store.ts | ||
| import { existsSync, mkdirSync, readdirSync, readFileSync, writeFileSync } from "fs"; | ||
| import path2 from "path"; | ||
| function currentUnit(state) { | ||
| return state.units[state.currentUnitIndex]; | ||
| } | ||
| function hydrateWorkflowState(state) { | ||
| if (!state.taskRoot) { | ||
| state.taskRoot = workflowTaskRootPath(state.workflowId); | ||
| } | ||
| if (!state.relayRoot) { | ||
| state.relayRoot = workflowRelayRootPath(state.workflowId); | ||
| } | ||
| return state; | ||
| } | ||
| function asClarifyOutput(value) { | ||
| if (!value || typeof value !== "object") { | ||
| return void 0; | ||
| } | ||
| return value; | ||
| } | ||
| function asExecuteOutput(value) { | ||
| if (!value || typeof value !== "object") { | ||
| return void 0; | ||
| } | ||
| return value; | ||
| } | ||
| function asVerifyOutput2(value) { | ||
| if (!value || typeof value !== "object") { | ||
| return void 0; | ||
| } | ||
| return value; | ||
| } | ||
| function unitOutputs(state, unitId) { | ||
| return state.outputs[unitId] ?? {}; | ||
| } | ||
| function writeTextFile(relativePath, content, rootDir = process.cwd()) { | ||
| const filePath = absolutePath(relativePath, rootDir); | ||
| mkdirSync(path2.dirname(filePath), { recursive: true }); | ||
| writeFileSync(filePath, content); | ||
| } | ||
| function markdownList(items, empty = "(none)") { | ||
| if (items.length === 0) { | ||
| return [`- ${empty}`]; | ||
| } | ||
| return items.map((item) => `- ${item}`); | ||
| } | ||
| function formatUnitRuntimeStatus(state, unit) { | ||
| if (completedUnitIds(state).includes(unit.id)) { | ||
| return "completed"; | ||
| } | ||
| if (currentUnit(state)?.id === unit.id && state.status.startsWith("phase_")) { | ||
| return "current"; | ||
| } | ||
| if (readyUnits(state).some((candidate) => candidate.id === unit.id)) { | ||
| return "ready"; | ||
| } | ||
| return "pending"; | ||
| } | ||
| function buildWorkflowIndex(state) { | ||
| const lines = [ | ||
| `# Workflow ${state.workflowId}`, | ||
| "", | ||
| `- Description: ${state.description}`, | ||
| `- Status: ${state.status}`, | ||
| `- Phase: ${state.phase}`, | ||
| `- Graph strategy: ${state.graphStrategy ?? "single"}`, | ||
| `- State ref: ${workflowStatePath(state.workflowId)}`, | ||
| `- Relay root: ${state.relayRoot}`, | ||
| "", | ||
| "## Units" | ||
| ]; | ||
| for (const unit of state.units) { | ||
| lines.push(`### ${unit.id} \xB7 ${unit.title}`); | ||
| lines.push(`- Runtime status: ${formatUnitRuntimeStatus(state, unit)}`); | ||
| lines.push(`- Kind: ${unit.kind ?? "work"}`); | ||
| lines.push(`- Priority: ${unit.priority ?? "medium"}`); | ||
| lines.push(`- Estimated effort: ${unit.estimatedEffort ?? "medium"}`); | ||
| lines.push(`- Brief: ${unitBriefPath(state.workflowId, unit.id)}`); | ||
| lines.push(`- Context: ${unitContextPath(state.workflowId, unit.id)}`); | ||
| lines.push(`- Status: ${unitStatusPath(state.workflowId, unit.id)}`); | ||
| lines.push(`- Result: ${unitResultPath(state.workflowId, unit.id)}`); | ||
| lines.push(`- Baton: ${unitBatonPath(state.workflowId, unit.id)}`); | ||
| lines.push(`- Relay: ${unitRelayPath(state.workflowId, unit.id)}`); | ||
| if ((unit.dependsOn?.length ?? 0) > 0) { | ||
| lines.push(`- Depends on: ${unit.dependsOn.join(", ")}`); | ||
| } | ||
| lines.push(""); | ||
| } | ||
| return `${lines.join("\n")} | ||
| `; | ||
| } | ||
| function buildUnitBrief(state, unit) { | ||
| const lines = [ | ||
| `# ${unit.id} \xB7 ${unit.title}`, | ||
| "", | ||
| `- Workflow: ${state.workflowId}`, | ||
| `- Workflow index: ${workflowTaskIndexPath(state.workflowId)}`, | ||
| `- State ref: ${workflowStatePath(state.workflowId)}`, | ||
| `- Kind: ${unit.kind ?? "work"}`, | ||
| `- Priority: ${unit.priority ?? "medium"}`, | ||
| `- Estimated effort: ${unit.estimatedEffort ?? "medium"}`, | ||
| `- Parallelizable: ${unit.parallelizable === true ? "yes" : "no"}`, | ||
| `- Merge required: ${unit.mergeRequired === true ? "yes" : "no"}`, | ||
| `- Status ref: ${unitStatusPath(state.workflowId, unit.id)}`, | ||
| `- Result ref: ${unitResultPath(state.workflowId, unit.id)}`, | ||
| `- Baton ref: ${unitBatonPath(state.workflowId, unit.id)}`, | ||
| `- Relay ref: ${unitRelayPath(state.workflowId, unit.id)}`, | ||
| "", | ||
| "## Request", | ||
| unit.request ?? state.description, | ||
| "", | ||
| "## Scope", | ||
| ...markdownList(unit.scope ?? []), | ||
| "", | ||
| "## Ownership", | ||
| ...markdownList(unit.ownership ?? []), | ||
| "", | ||
| "## Acceptance Criteria", | ||
| ...markdownList(unit.acceptanceCriteria ?? []), | ||
| "", | ||
| "## Shared Risks", | ||
| ...markdownList(unit.sharedRisks ?? []), | ||
| "", | ||
| "## Dependencies", | ||
| ...markdownList(unitDependencies(unit).map((dependencyId) => `${dependencyId} -> ${unitRelayPath(state.workflowId, dependencyId)}`)) | ||
| ]; | ||
| return `${lines.join("\n")} | ||
| `; | ||
| } | ||
| function buildUnitContext(state, unit) { | ||
| const lines = [ | ||
| `# Context for ${unit.id}`, | ||
| "", | ||
| "## Anchors", | ||
| `- Files: ${(unit.anchors?.filePaths ?? []).join(", ") || "(none)"}`, | ||
| `- Symbols: ${(unit.anchors?.symbols ?? []).join(", ") || "(none)"}`, | ||
| `- Errors: ${(unit.anchors?.errors ?? []).join(", ") || "(none)"}`, | ||
| `- Tests: ${(unit.anchors?.tests ?? []).join(", ") || "(none)"}`, | ||
| `- Tickets: ${(unit.anchors?.tickets ?? []).join(", ") || "(none)"}`, | ||
| "", | ||
| "## Intake Notes", | ||
| ...markdownList(unit.intakeNotes ?? []), | ||
| "", | ||
| "## Intake Intents", | ||
| ...markdownList( | ||
| (unit.intakeIntents ?? []).map((intent) => { | ||
| const targets = intent.targets?.length ? ` targets=${intent.targets.join(", ")}` : ""; | ||
| const query = intent.query ? ` query=${intent.query}` : ""; | ||
| return `${intent.kind} [${intent.priority}]${targets}${query} :: ${intent.reason}`; | ||
| }) | ||
| ), | ||
| "", | ||
| "## Upstream Relays", | ||
| ...markdownList(unitDependencies(unit).map((dependencyId) => unitRelayPath(state.workflowId, dependencyId))) | ||
| ]; | ||
| if ((state.graphNotes?.length ?? 0) > 0) { | ||
| lines.push("", "## Graph Notes", ...markdownList(state.graphNotes ?? [])); | ||
| } | ||
| return `${lines.join("\n")} | ||
| `; | ||
| } | ||
| function buildUnitStatus(state, unit) { | ||
| const readySiblingIds = readyUnits(state).filter((candidate) => candidate.id !== unit.id).map((candidate) => candidate.id); | ||
| const lines = [ | ||
| `# Status for ${unit.id}`, | ||
| "", | ||
| `- Runtime status: ${formatUnitRuntimeStatus(state, unit)}`, | ||
| `- Workflow status: ${state.status}`, | ||
| `- Phase: ${state.phase}`, | ||
| `- Updated at: ${state.updatedAt}`, | ||
| `- Verify attempts: ${state.verifyAttempts} / ${state.maxVerifyAttempts}`, | ||
| `- Current unit: ${currentUnit(state)?.id === unit.id ? "yes" : "no"}`, | ||
| "", | ||
| "## Ready Siblings", | ||
| ...markdownList(readySiblingIds), | ||
| "", | ||
| "## Pending Decisions", | ||
| ...markdownList(state.pendingDecisions.map((decision) => `${decision.id}: ${decision.question}`)) | ||
| ]; | ||
| if (state.blockedReason) { | ||
| lines.push("", "## Blocked Reason", state.blockedReason); | ||
| } | ||
| if ((state.lastVerifyIssues?.length ?? 0) > 0 && currentUnit(state)?.id === unit.id) { | ||
| lines.push("", "## Last Verify Issues", ...markdownList(state.lastVerifyIssues.map((issue) => `${issue.severity} ${issue.category}: ${issue.description}`))); | ||
| } | ||
| return `${lines.join("\n")} | ||
| `; | ||
| } | ||
| function buildUnitResult(state, unit) { | ||
| const outputs = unitOutputs(state, unit.id); | ||
| const clarify = asClarifyOutput(outputs.clarify); | ||
| const execute = asExecuteOutput(outputs.execute); | ||
| const verify = asVerifyOutput2(outputs.verify); | ||
| const lines = [`# Result for ${unit.id}`, ""]; | ||
| if (clarify) { | ||
| lines.push("## Clarify"); | ||
| lines.push(clarify.summary, ""); | ||
| lines.push("### Evidence", ...markdownList(clarify.evidence), ""); | ||
| lines.push("### Acceptance Criteria", ...markdownList(clarify.acceptanceCriteria), ""); | ||
| lines.push("### Assumptions", ...markdownList(clarify.assumptions), ""); | ||
| } else { | ||
| lines.push("## Clarify", "(not completed yet)", ""); | ||
| } | ||
| if (execute) { | ||
| lines.push("## Execute"); | ||
| lines.push(execute.summary, ""); | ||
| lines.push("### Changed Files", ...markdownList(execute.changedFiles ?? []), ""); | ||
| lines.push("### Output Files", ...markdownList(execute.outputFiles ?? []), ""); | ||
| lines.push("### Artifacts", ...markdownList(execute.artifacts ?? []), ""); | ||
| lines.push("### Checks", ...markdownList(execute.checks ?? []), ""); | ||
| lines.push("### Handoff Notes", ...markdownList(execute.handoffNotes ?? []), ""); | ||
| lines.push("### Notes", ...markdownList(execute.notes ?? []), ""); | ||
| } else { | ||
| lines.push("## Execute", "(not completed yet)", ""); | ||
| } | ||
| if (verify) { | ||
| lines.push("## Verify"); | ||
| lines.push(`- Passed: ${verify.passed ? "yes" : "no"}`); | ||
| lines.push(`- Summary: ${verify.summary}`); | ||
| if (verify.score) { | ||
| lines.push( | ||
| `- Score: accuracy=${verify.score.accuracy}, completeness=${verify.score.completeness}, consistency=${verify.score.consistency}` | ||
| ); | ||
| } | ||
| lines.push("", "### Checks", ...markdownList( | ||
| verify.checks.map( | ||
| (check) => `${check.status.toUpperCase()} ${check.name}${check.command ? ` [${check.command}]` : ""}: ${check.evidence}` | ||
| ) | ||
| ), ""); | ||
| lines.push("### Evidence", ...markdownList(verify.evidence), ""); | ||
| lines.push("### Issues", ...markdownList(verify.issues.map((issue) => `${issue.severity} ${issue.category}: ${issue.description}`)), ""); | ||
| lines.push("### Unverified Claims", ...markdownList(verify.unverifiedClaims ?? []), ""); | ||
| } else { | ||
| lines.push("## Verify", "(not completed yet)", ""); | ||
| } | ||
| return `${lines.join("\n")} | ||
| `; | ||
| } | ||
| function buildUnitBaton(state, unit) { | ||
| const outputs = unitOutputs(state, unit.id); | ||
| const clarify = asClarifyOutput(outputs.clarify); | ||
| const execute = asExecuteOutput(outputs.execute); | ||
| const verify = asVerifyOutput2(outputs.verify); | ||
| const downstreamUnits = state.units.filter((candidate) => unitDependencies(candidate).includes(unit.id)); | ||
| const lines = [ | ||
| `# Baton for ${unit.id}`, | ||
| "", | ||
| `- Title: ${unit.title}`, | ||
| `- Workflow: ${state.workflowId}`, | ||
| `- Status: ${formatUnitRuntimeStatus(state, unit)}`, | ||
| `- Result ref: ${unitResultPath(state.workflowId, unit.id)}`, | ||
| "", | ||
| "## Carry Forward", | ||
| ...markdownList([ | ||
| clarify?.summary ?? "clarify not completed", | ||
| execute?.summary ?? "execute not completed", | ||
| verify?.summary ?? "verify not completed" | ||
| ]), | ||
| "", | ||
| "## Changed Files", | ||
| ...markdownList(execute?.changedFiles ?? []), | ||
| "", | ||
| "## Acceptance Criteria", | ||
| ...markdownList(clarify?.acceptanceCriteria ?? unit.acceptanceCriteria ?? []), | ||
| "", | ||
| "## Handoff Notes", | ||
| ...markdownList(execute?.handoffNotes ?? []), | ||
| "", | ||
| "## Downstream Units", | ||
| ...markdownList(downstreamUnits.map((candidate) => `${candidate.id} -> ${unitBriefPath(state.workflowId, candidate.id)}`)) | ||
| ]; | ||
| return `${lines.join("\n")} | ||
| `; | ||
| } | ||
| function syncWorkflowTaskPackets(state, rootDir = process.cwd()) { | ||
| writeTextFile(workflowTaskIndexPath(state.workflowId), buildWorkflowIndex(state), rootDir); | ||
| for (const unit of state.units) { | ||
| mkdirSync(absolutePath(unitArtifactsDirPath(state.workflowId, unit.id), rootDir), { recursive: true }); | ||
| writeTextFile(unitBriefPath(state.workflowId, unit.id), buildUnitBrief(state, unit), rootDir); | ||
| writeTextFile(unitContextPath(state.workflowId, unit.id), buildUnitContext(state, unit), rootDir); | ||
| writeTextFile(unitStatusPath(state.workflowId, unit.id), buildUnitStatus(state, unit), rootDir); | ||
| writeTextFile(unitResultPath(state.workflowId, unit.id), buildUnitResult(state, unit), rootDir); | ||
| const baton = buildUnitBaton(state, unit); | ||
| writeTextFile(unitBatonPath(state.workflowId, unit.id), baton, rootDir); | ||
| writeTextFile(unitRelayPath(state.workflowId, unit.id), baton, rootDir); | ||
| } | ||
| } | ||
| function touchUpdatedAt(timestamp) { | ||
| return timestamp; | ||
| } | ||
| function saveWorkflowState(state, rootDir = process.cwd()) { | ||
| state.taskRoot = workflowTaskRootPath(state.workflowId); | ||
| state.relayRoot = workflowRelayRootPath(state.workflowId); | ||
| const filePath = absoluteWorkflowStatePath(state.workflowId, rootDir); | ||
| mkdirSync(path2.dirname(filePath), { recursive: true }); | ||
| writeFileSync(filePath, JSON.stringify(state, null, 2)); | ||
| syncWorkflowTaskPackets(state, rootDir); | ||
| return filePath; | ||
| } | ||
| function loadWorkflowState(workflowId, rootDir = process.cwd()) { | ||
| const filePath = absoluteWorkflowStatePath(workflowId, rootDir); | ||
| return hydrateWorkflowState(JSON.parse(readFileSync(filePath, "utf8"))); | ||
| } | ||
| function saveState(state, rootDir = process.cwd()) { | ||
| state.updatedAt = (/* @__PURE__ */ new Date()).toISOString(); | ||
| saveWorkflowState(state, rootDir); | ||
| } | ||
| function loadState(workflowId, rootDir = process.cwd()) { | ||
| const filePath = absoluteWorkflowStatePath(workflowId, rootDir); | ||
| if (!existsSync(filePath)) { | ||
| return null; | ||
| } | ||
| return hydrateWorkflowState(JSON.parse(readFileSync(filePath, "utf8"))); | ||
| } | ||
| function listStates(rootDir = process.cwd()) { | ||
| const dir = path2.join(absoluteRoot(rootDir), path2.dirname(workflowStatePath("placeholder"))); | ||
| if (!existsSync(dir)) { | ||
| return []; | ||
| } | ||
| return readdirSync(dir).filter((entry) => entry.endsWith(".json")).map((entry) => { | ||
| try { | ||
| return hydrateWorkflowState(JSON.parse(readFileSync(path2.join(dir, entry), "utf8"))); | ||
| } catch { | ||
| return null; | ||
| } | ||
| }).filter((state) => state !== null).filter((state) => state.status !== "completed" && state.status !== "stopped").sort((left, right) => right.updatedAt.localeCompare(left.updatedAt)); | ||
| } | ||
| // src/orchestrator.ts | ||
| function cloneState(value) { | ||
| return JSON.parse(JSON.stringify(value)); | ||
| } | ||
| function nowIso() { | ||
| return (/* @__PURE__ */ new Date()).toISOString(); | ||
| } | ||
| function createWorkflowId() { | ||
| return `wf-${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 8)}`; | ||
| } | ||
| function currentUnit2(state) { | ||
| return state.units[state.currentUnitIndex]; | ||
| } | ||
| function currentUnitOutputBucket(state) { | ||
| const unit = currentUnit2(state); | ||
| if (!unit) { | ||
| return {}; | ||
| } | ||
| if (!state.outputs[unit.id]) { | ||
| state.outputs[unit.id] = {}; | ||
| } | ||
| return state.outputs[unit.id]; | ||
| } | ||
| function storePhaseOutput(state, phase, payload) { | ||
| currentUnitOutputBucket(state)[phase] = payload; | ||
| } | ||
| function schemaForPhase(phase) { | ||
| return `schemas/payloads/${phase}-output.schema.json`; | ||
| } | ||
| function promptForPhase(phase) { | ||
| switch (phase) { | ||
| case "clarify": | ||
| return "prompts/clarify.md"; | ||
| case "execute": | ||
| return "prompts/executor.md"; | ||
| case "verify": | ||
| return "prompts/verifier.md"; | ||
| case "capture": | ||
| return "prompts/capture.md"; | ||
| } | ||
| } | ||
| function toRuntimePhase(phase) { | ||
| switch (phase) { | ||
| case "clarify": | ||
| case "execute": | ||
| case "verify": | ||
| case "capture": | ||
| return phase; | ||
| default: | ||
| return void 0; | ||
| } | ||
| } | ||
| function runSignal(state, phase, instructions) { | ||
| const context = buildRunContext(state); | ||
| const unit = currentUnit2(state); | ||
| const currentUnitContext = context.currentUnit && typeof context.currentUnit === "object" ? context.currentUnit : void 0; | ||
| const readySiblingUnitIds = Array.isArray(context.readySiblingUnitIds) ? context.readySiblingUnitIds.filter((value) => typeof value === "string") : []; | ||
| const detailLines = [instructions]; | ||
| if (typeof context.graphStrategy === "string") { | ||
| detailLines.push(`Graph strategy: ${context.graphStrategy}.`); | ||
| } | ||
| if (currentUnitContext?.title && typeof currentUnitContext.title === "string") { | ||
| detailLines.push( | ||
| `Current unit: ${currentUnitContext.title}${typeof currentUnitContext.id === "string" ? ` (${currentUnitContext.id})` : ""}.` | ||
| ); | ||
| } | ||
| if (readySiblingUnitIds.length > 0) { | ||
| detailLines.push( | ||
| `Other ready units: ${readySiblingUnitIds.join(", ")}. Use them as scheduling metadata only; this signal still covers one unit.` | ||
| ); | ||
| } | ||
| return { | ||
| type: "run", | ||
| workflow_id: state.workflowId, | ||
| mode: state.mode, | ||
| unit_id: unit?.id, | ||
| phase, | ||
| prompt_ref: promptForPhase(phase), | ||
| required_schema: schemaForPhase(phase), | ||
| state_ref: workflowStatePath(state.workflowId), | ||
| workflow_task_index_ref: workflowTaskIndexPath(state.workflowId), | ||
| task_packet_ref: unit ? unitBriefPath(state.workflowId, unit.id) : void 0, | ||
| task_context_ref: unit ? unitContextPath(state.workflowId, unit.id) : void 0, | ||
| task_status_ref: unit ? unitStatusPath(state.workflowId, unit.id) : void 0, | ||
| task_result_ref: unit ? unitResultPath(state.workflowId, unit.id) : void 0, | ||
| baton_ref: unit ? unitBatonPath(state.workflowId, unit.id) : void 0, | ||
| relay_refs: unit ? unitDependencies(unit).map((dependencyId) => unitRelayPath(state.workflowId, dependencyId)) : [], | ||
| context, | ||
| on_complete: { | ||
| kind: "phase_output", | ||
| phase | ||
| }, | ||
| instructions: detailLines.join(" ") | ||
| }; | ||
| } | ||
| function faultSignal(state, error, issues, recoverable) { | ||
| return { | ||
| type: "fault", | ||
| workflow_id: state?.workflowId, | ||
| mode: state?.mode, | ||
| unit_id: state ? currentUnit2(state)?.id : void 0, | ||
| phase: state ? toRuntimePhase(state.phase) : void 0, | ||
| expected_schema: state && toRuntimePhase(state.phase) ? schemaForPhase(toRuntimePhase(state.phase)) : void 0, | ||
| issues, | ||
| error, | ||
| recoverable | ||
| }; | ||
| } | ||
| function terminalSignal(state) { | ||
| return { | ||
| type: "done", | ||
| workflow_id: state.workflowId, | ||
| mode: state.mode, | ||
| status: state.status === "blocked" ? "blocked" : state.status === "stopped" ? "stopped" : "completed", | ||
| state_ref: workflowStatePath(state.workflowId), | ||
| workflow_task_index_ref: workflowTaskIndexPath(state.workflowId), | ||
| outputs: Object.keys(state.outputs), | ||
| message: state.status === "blocked" ? state.blockedReason || "workflow blocked" : state.status === "stopped" ? "workflow stopped" : "workflow completed" | ||
| }; | ||
| } | ||
| function withResponse(result) { | ||
| return { ...result, response: result.signal }; | ||
| } | ||
| function normalizeCreateWorkflowInput(input) { | ||
| if (typeof input !== "string") { | ||
| return input; | ||
| } | ||
| const title = input.trim() || "main"; | ||
| return { | ||
| mode: "work", | ||
| description: input, | ||
| units: [{ id: "main", title }] | ||
| }; | ||
| } | ||
| function setPhase(state, status, phase) { | ||
| state.status = status; | ||
| state.phase = phase; | ||
| state.updatedAt = nowIso(); | ||
| } | ||
| function advanceToNextUnit(state) { | ||
| const nextIndex = nextReadyUnitIndex(state); | ||
| if (nextIndex === void 0) { | ||
| state.status = "blocked"; | ||
| state.blockedReason = "no ready unit remained in the workflow graph"; | ||
| state.updatedAt = nowIso(); | ||
| return; | ||
| } | ||
| state.currentUnitIndex = nextIndex; | ||
| state.verifyAttempts = 0; | ||
| setPhase(state, "phase_clarify", "clarify"); | ||
| } | ||
| function validateState(state) { | ||
| const result = validateWorkflowState(state); | ||
| if (result.ok) { | ||
| return void 0; | ||
| } | ||
| return faultSignal(void 0, "invalid workflow state", result.issues, false); | ||
| } | ||
| function createWorkflow(input) { | ||
| const normalized = normalizeCreateWorkflowInput(input); | ||
| const timestamp = normalized.createdAt ?? nowIso(); | ||
| const initialUnitIndex = normalized.units.findIndex((unit) => (unit.dependsOn?.length ?? 0) === 0); | ||
| const state = { | ||
| schemaVersion: "1.2.0", | ||
| workflowId: normalized.workflowId ?? createWorkflowId(), | ||
| mode: normalized.mode, | ||
| description: normalized.description, | ||
| status: "phase_clarify", | ||
| phase: "clarify", | ||
| units: normalized.units, | ||
| graphStrategy: normalized.graphStrategy, | ||
| graphNotes: normalized.graphNotes, | ||
| currentUnitIndex: initialUnitIndex >= 0 ? initialUnitIndex : 0, | ||
| captureEnabled: normalized.captureEnabled ?? false, | ||
| maxVerifyAttempts: normalized.maxVerifyAttempts ?? 3, | ||
| verifyAttempts: 0, | ||
| pendingDecisions: [], | ||
| decisionHistory: [], | ||
| outputs: {}, | ||
| taskRoot: "", | ||
| relayRoot: "", | ||
| createdAt: timestamp, | ||
| updatedAt: timestamp | ||
| }; | ||
| state.taskRoot = workflowTaskRootPath(state.workflowId); | ||
| state.relayRoot = workflowRelayRootPath(state.workflowId); | ||
| const stateValidation = validateWorkflowState(state); | ||
| if (!stateValidation.ok) { | ||
| const signal2 = faultSignal(state, "new workflow failed validation", stateValidation.issues, false); | ||
| return { state, signal: signal2, response: signal2 }; | ||
| } | ||
| const signal = nextSignal(state); | ||
| return { state, signal, response: signal }; | ||
| } | ||
| function nextSignal(state) { | ||
| const stateValidation = validateWorkflowState(state); | ||
| if (!stateValidation.ok) { | ||
| return faultSignal(state, "invalid workflow state", stateValidation.issues, false); | ||
| } | ||
| switch (state.status) { | ||
| case "phase_clarify": | ||
| return runSignal( | ||
| state, | ||
| "clarify", | ||
| "Tighten the current unit until execution is safe, verification is clear, and blocked sibling units are not pulled into scope." | ||
| ); | ||
| case "clarify_pending": | ||
| return { | ||
| type: "gate", | ||
| gate: "clarify", | ||
| workflow_id: state.workflowId, | ||
| mode: state.mode, | ||
| unit_id: currentUnit2(state)?.id, | ||
| options: state.pendingDecisions.map((decision) => decision.id), | ||
| state_ref: workflowStatePath(state.workflowId), | ||
| workflow_task_index_ref: workflowTaskIndexPath(state.workflowId), | ||
| task_status_ref: currentUnit2(state) ? unitStatusPath(state.workflowId, currentUnit2(state).id) : void 0, | ||
| instructions: "Collect the pending external decisions, then submit decision answers and resume clarify." | ||
| }; | ||
| case "phase_execute": | ||
| return runSignal( | ||
| state, | ||
| "execute", | ||
| "Perform only the clarified unit of work. If the graph exposes other ready units, treat them as scheduler metadata for the host rather than silently expanding this unit." | ||
| ); | ||
| case "phase_verify": | ||
| return runSignal( | ||
| state, | ||
| "verify", | ||
| "Try to disprove the claimed result for the current unit and report recoverable issues precisely enough to drive the next clarify pass." | ||
| ); | ||
| case "phase_capture": | ||
| return runSignal(state, "capture", "Capture only durable, reusable patterns worth saving."); | ||
| case "completed": | ||
| case "blocked": | ||
| case "stopped": | ||
| return terminalSignal(state); | ||
| default: | ||
| return faultSignal(state, `unsupported status: ${String(state.status)}`, [], false); | ||
| } | ||
| } | ||
| function nextResponse(state) { | ||
| return nextSignal(state); | ||
| } | ||
| function stopWorkflow(state, reason = "workflow stopped") { | ||
| const next = cloneState(state); | ||
| next.status = "stopped"; | ||
| next.blockedReason = reason; | ||
| next.updatedAt = nowIso(); | ||
| const signal = nextSignal(next); | ||
| return { state: next, signal, response: signal }; | ||
| } | ||
| function applyDecisionAnswers(state, input) { | ||
| if (state.status !== "clarify_pending") { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "decision answers are only valid during clarify_pending", [], true) | ||
| }); | ||
| } | ||
| const validation = validateDecisionAnswers(input); | ||
| if (!validation.ok || !validation.value) { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "invalid decision answers", validation.issues, true) | ||
| }); | ||
| } | ||
| const next = cloneState(state); | ||
| next.decisionHistory.push(...validation.value); | ||
| next.pendingDecisions = []; | ||
| setPhase(next, "phase_clarify", "clarify"); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| function submitDecisions(state, input) { | ||
| return applyDecisionAnswers(state, input); | ||
| } | ||
| function applyPhaseOutput(state, phase, input) { | ||
| if (state.phase !== phase) { | ||
| return withResponse({ | ||
| signal: faultSignal( | ||
| state, | ||
| `phase output for ${phase} is invalid while workflow phase is ${state.phase}`, | ||
| [], | ||
| true | ||
| ) | ||
| }); | ||
| } | ||
| switch (phase) { | ||
| case "clarify": | ||
| return applyClarify(state, input); | ||
| case "execute": | ||
| return applyExecute(state, input); | ||
| case "verify": | ||
| return applyVerify(state, input); | ||
| case "capture": | ||
| return applyCapture(state, input); | ||
| default: | ||
| return withResponse({ signal: faultSignal(state, `unsupported phase: ${phase}`, [], false) }); | ||
| } | ||
| } | ||
| function submitPhaseOutput(state, phase, input) { | ||
| return applyPhaseOutput(state, phase, input); | ||
| } | ||
| function applyClarify(state, input) { | ||
| if (state.status !== "phase_clarify") { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "clarify output is only valid during phase_clarify", [], true) | ||
| }); | ||
| } | ||
| const validation = validateClarifyOutput(input); | ||
| if (!validation.ok || !validation.value) { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "invalid clarify output", validation.issues, true) | ||
| }); | ||
| } | ||
| const next = cloneState(state); | ||
| storePhaseOutput(next, "clarify", validation.value); | ||
| if (!validation.value.ready) { | ||
| if (validation.value.decisions.length === 0) { | ||
| return withResponse({ | ||
| signal: faultSignal( | ||
| state, | ||
| "clarify output is not ready but did not supply any external decisions", | ||
| [], | ||
| true | ||
| ) | ||
| }); | ||
| } | ||
| next.pendingDecisions = validation.value.decisions; | ||
| setPhase(next, "clarify_pending", "clarify"); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| next.pendingDecisions = []; | ||
| setPhase(next, "phase_execute", "execute"); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| function applyExecute(state, input) { | ||
| if (state.status !== "phase_execute") { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "execute output is only valid during phase_execute", [], true) | ||
| }); | ||
| } | ||
| const validation = validateExecuteOutput(input); | ||
| if (!validation.ok || !validation.value) { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "invalid execute output", validation.issues, true) | ||
| }); | ||
| } | ||
| const next = cloneState(state); | ||
| storePhaseOutput(next, "execute", validation.value); | ||
| setPhase(next, "phase_verify", "verify"); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| function applyVerify(state, input) { | ||
| if (state.status !== "phase_verify") { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "verify output is only valid during phase_verify", [], true) | ||
| }); | ||
| } | ||
| const validation = validateVerifyOutput(input); | ||
| if (!validation.ok || !validation.value) { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "invalid verify output", validation.issues, true) | ||
| }); | ||
| } | ||
| const next = cloneState(state); | ||
| storePhaseOutput(next, "verify", validation.value); | ||
| next.lastVerifyIssues = validation.value.issues; | ||
| if (validation.value.passed) { | ||
| const allUnitsComplete = completedUnitIds(next).length >= next.units.length; | ||
| if (!allUnitsComplete) { | ||
| advanceToNextUnit(next); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| if (next.captureEnabled) { | ||
| next.verifyAttempts = 0; | ||
| setPhase(next, "phase_capture", "capture"); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| next.verifyAttempts = 0; | ||
| next.status = "completed"; | ||
| next.updatedAt = nowIso(); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| next.verifyAttempts += 1; | ||
| if (validation.value.needsHuman) { | ||
| if (!validation.value.decisions || validation.value.decisions.length === 0) { | ||
| next.status = "blocked"; | ||
| next.blockedReason = validation.value.summary; | ||
| next.updatedAt = nowIso(); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| next.pendingDecisions = validation.value.decisions; | ||
| setPhase(next, "clarify_pending", "clarify"); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| if (next.verifyAttempts >= next.maxVerifyAttempts) { | ||
| next.status = "blocked"; | ||
| next.blockedReason = validation.value.retryHint || validation.value.summary; | ||
| next.updatedAt = nowIso(); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| setPhase(next, "phase_clarify", "clarify"); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| function applyCapture(state, input) { | ||
| if (state.status !== "phase_capture") { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "capture output is only valid during phase_capture", [], true) | ||
| }); | ||
| } | ||
| const validation = validateCaptureOutput(input); | ||
| if (!validation.ok || !validation.value) { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "invalid capture output", validation.issues, true) | ||
| }); | ||
| } | ||
| const next = cloneState(state); | ||
| storePhaseOutput(next, "capture", validation.value); | ||
| next.status = "completed"; | ||
| next.updatedAt = nowIso(); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| export { | ||
| absoluteRoot, | ||
| workflowStatePath, | ||
| absoluteWorkflowStatePath, | ||
| workflowTaskRootPath, | ||
| workflowTaskIndexPath, | ||
| workflowRelayRootPath, | ||
| unitBriefPath, | ||
| unitContextPath, | ||
| unitStatusPath, | ||
| unitResultPath, | ||
| unitBatonPath, | ||
| unitRelayPath, | ||
| completedUnitIds, | ||
| readyUnits, | ||
| buildRunContext, | ||
| validateClarifyOutput, | ||
| validateDecisionAnswers, | ||
| validateExecuteOutput, | ||
| validateVerifyOutput, | ||
| validateCaptureOutput, | ||
| validateWorkflowState, | ||
| validateDecisionPromptArray, | ||
| validateVerifyIssueArray, | ||
| touchUpdatedAt, | ||
| saveWorkflowState, | ||
| loadWorkflowState, | ||
| saveState, | ||
| loadState, | ||
| listStates, | ||
| validateState, | ||
| createWorkflow, | ||
| nextSignal, | ||
| nextResponse, | ||
| stopWorkflow, | ||
| applyDecisionAnswers, | ||
| submitDecisions, | ||
| applyPhaseOutput, | ||
| submitPhaseOutput | ||
| }; |
+323
-13
@@ -6,9 +6,17 @@ #!/usr/bin/env node | ||
| applyPhaseOutput, | ||
| completedUnitIds, | ||
| createWorkflow, | ||
| loadWorkflowState, | ||
| nextSignal, | ||
| readyUnits, | ||
| saveWorkflowState, | ||
| stopWorkflow, | ||
| validateWorkflowState | ||
| } from "./chunk-JNIRFG7T.js"; | ||
| unitBriefPath, | ||
| unitRelayPath, | ||
| unitResultPath, | ||
| unitStatusPath, | ||
| validateWorkflowState, | ||
| workflowTaskIndexPath, | ||
| workflowTaskRootPath | ||
| } from "./chunk-5PW7BQFK.js"; | ||
@@ -265,2 +273,271 @@ // src/cli.ts | ||
| } | ||
| function makeUnitId(index) { | ||
| return `unit-${String(index).padStart(3, "0")}`; | ||
| } | ||
| function explicitDeliverables(message) { | ||
| const lines = message.split(/\r?\n/).map((line) => line.trim()).filter(Boolean); | ||
| const numbered = lines.map((line) => line.match(/^\d+[.)]\s+(.+)$/)?.[1]?.trim()).filter((value) => Boolean(value)); | ||
| if (numbered.length >= 2) { | ||
| return { items: numbered, ordered: true }; | ||
| } | ||
| const bullets = lines.map((line) => line.match(/^[-*+]\s+(.+)$/)?.[1]?.trim()).filter((value) => Boolean(value)); | ||
| if (bullets.length >= 2) { | ||
| return { items: bullets, ordered: false }; | ||
| } | ||
| return void 0; | ||
| } | ||
| function scopeKeyForFilePath(filePath) { | ||
| const parts = filePath.split("/").filter(Boolean); | ||
| if (parts.length <= 2) { | ||
| return filePath; | ||
| } | ||
| const [root, second] = parts; | ||
| if ((root === "packages" || root === "services") && second) { | ||
| return `${root}/${second}`; | ||
| } | ||
| if ((root === "src" || root === "app" || root === "lib" || /^tests?$/i.test(root)) && second && !second.includes(".")) { | ||
| return `${root}/${second}`; | ||
| } | ||
| return parts.slice(0, Math.min(parts.length - 1, 2)).join("/"); | ||
| } | ||
| function inferUnitPriority(objective, anchors, kind) { | ||
| if (kind === "integration") { | ||
| return "high"; | ||
| } | ||
| if (anchors.errors.length > 0 || anchors.tests.length > 0 || anchors.tickets.length > 0) { | ||
| return "high"; | ||
| } | ||
| if (looksLikeFix(objective) || looksLikeCreate(objective)) { | ||
| return "medium"; | ||
| } | ||
| return "low"; | ||
| } | ||
| function inferUnitEffort(objective, scope, dependencies, kind) { | ||
| if (kind === "integration") { | ||
| return scope.length >= 3 ? "large" : "medium"; | ||
| } | ||
| if (dependencies.length > 0 || scope.length >= 4) { | ||
| return "large"; | ||
| } | ||
| if (scope.length >= 2 || /\b(migrate|refactor|redesign|restructure|cross-cutting)\b/i.test(objective)) { | ||
| return "medium"; | ||
| } | ||
| return "small"; | ||
| } | ||
| function inferAcceptanceCriteria(objective, anchors, scope, kind) { | ||
| const criteria = []; | ||
| if (kind === "integration") { | ||
| criteria.push("Cross-unit interfaces and shared surfaces still align after all upstream units complete."); | ||
| criteria.push("The final user-facing result matches the original request rather than only isolated slices."); | ||
| } | ||
| if (looksLikeFix(objective)) { | ||
| criteria.push("The reported symptom or failing behavior is no longer reproducible on the scoped surface."); | ||
| } | ||
| if (looksLikeCreate(objective)) { | ||
| criteria.push("The requested behavior or artifact exists on the intended surface and stays bounded to this unit."); | ||
| } | ||
| if (anchors.tests.length > 0) { | ||
| criteria.push(`Verification can reference ${anchors.tests.join(", ")} without broadening into unrelated surfaces.`); | ||
| } | ||
| if (scope.length > 0) { | ||
| criteria.push(`Changes remain bounded to: ${scope.join(", ")}.`); | ||
| } | ||
| return unique(criteria); | ||
| } | ||
| function inferSharedRisks(objective, anchors, scope, kind) { | ||
| const risks = []; | ||
| if (kind === "integration") { | ||
| risks.push("Cross-unit merge drift can hide interface breakage until the final verify pass."); | ||
| } | ||
| if (anchors.errors.length > 0) { | ||
| risks.push("A narrow fix can mask the symptom without proving the underlying failure surface is covered."); | ||
| } | ||
| if (anchors.tests.length === 0 && (looksLikeFix(objective) || looksLikeCreate(objective))) { | ||
| risks.push("Verification surface is implicit, so clarify must lock down what proves the result."); | ||
| } | ||
| if (scope.some((item) => /(?:package\.json|tsconfig|vite\.config|eslint|prettier|pnpm-lock|package-lock|yarn\.lock)/i.test(item))) { | ||
| risks.push("Config or toolchain changes can affect sibling units outside the local scope."); | ||
| } | ||
| if (scope.length >= 3) { | ||
| risks.push("Wide scope increases the chance that a supposedly isolated unit bleeds into neighboring work."); | ||
| } | ||
| return unique(risks); | ||
| } | ||
| function enrichUnit(unit, objective, anchors, graphStrategy) { | ||
| const scope = Array.isArray(unit.scope) ? unique(unit.scope) : []; | ||
| const dependsOn = Array.isArray(unit.dependsOn) ? unit.dependsOn : []; | ||
| const kind = unit.kind ?? "work"; | ||
| return { | ||
| ...unit, | ||
| scope, | ||
| ownership: Array.isArray(unit.ownership) ? unique(unit.ownership) : [], | ||
| priority: unit.priority ?? inferUnitPriority(objective, anchors, kind), | ||
| estimatedEffort: unit.estimatedEffort ?? inferUnitEffort(objective, scope, dependsOn, kind), | ||
| mergeRequired: unit.mergeRequired ?? graphStrategy !== "single", | ||
| sharedRisks: unit.sharedRisks ?? inferSharedRisks(objective, anchors, scope, kind), | ||
| acceptanceCriteria: unit.acceptanceCriteria ?? inferAcceptanceCriteria(objective, anchors, scope, kind) | ||
| }; | ||
| } | ||
| function buildSingleUnit(route, objective, anchors, intents, notes) { | ||
| const scopedPaths = unique(anchors.filePaths.map(scopeKeyForFilePath)); | ||
| return enrichUnit({ | ||
| id: makeUnitId(1), | ||
| title: normalizeTitle(objective), | ||
| kind: "work", | ||
| request: route.normalizedMessage, | ||
| scope: unique([...anchors.filePaths, ...anchors.symbols]), | ||
| dependsOn: [], | ||
| parallelizable: false, | ||
| ownership: scopedPaths, | ||
| anchors, | ||
| intakeIntents: intents, | ||
| intakeNotes: notes | ||
| }, objective, anchors, "single"); | ||
| } | ||
| function buildIntegrationUnit(objective, index, priorUnits, graphNotes) { | ||
| return enrichUnit({ | ||
| id: makeUnitId(index), | ||
| title: "Integrate and verify cross-unit result", | ||
| kind: "integration", | ||
| request: `Integrate and verify the combined result for: ${objective}`, | ||
| scope: unique(priorUnits.flatMap((unit) => unit.scope ?? [])), | ||
| dependsOn: priorUnits.map((unit) => unit.id), | ||
| parallelizable: false, | ||
| ownership: unique(priorUnits.flatMap((unit) => unit.ownership ?? [])), | ||
| intakeNotes: graphNotes, | ||
| verifyFocus: [ | ||
| "Cross-unit interfaces still align after all ready units complete.", | ||
| "The final user-facing result matches the original objective, not just each isolated slice." | ||
| ] | ||
| }, objective, { | ||
| filePaths: unique(priorUnits.flatMap((unit) => unit.anchors?.filePaths ?? [])), | ||
| symbols: unique(priorUnits.flatMap((unit) => unit.anchors?.symbols ?? [])), | ||
| errors: unique(priorUnits.flatMap((unit) => unit.anchors?.errors ?? [])), | ||
| tests: unique(priorUnits.flatMap((unit) => unit.anchors?.tests ?? [])), | ||
| tickets: unique(priorUnits.flatMap((unit) => unit.anchors?.tickets ?? [])) | ||
| }, "parallel_fanout"); | ||
| } | ||
| function carveWorkflowUnits(route, objective, anchors, intents, notes) { | ||
| const deliverables = explicitDeliverables(route.normalizedMessage); | ||
| if (deliverables) { | ||
| const graphNotes = deliverables.ordered ? ["carved from explicit numbered deliverables; run units in the declared order"] : ["carved from explicit bullet deliverables; root units are parallel-ready and fan into one integration unit"]; | ||
| const units = deliverables.items.map((item, index) => { | ||
| const itemAnchors = extractAnchors(item); | ||
| const scopedPaths = unique(itemAnchors.filePaths.map(scopeKeyForFilePath)); | ||
| const dependsOn = deliverables.ordered && index > 0 ? [makeUnitId(index)] : []; | ||
| return enrichUnit({ | ||
| id: makeUnitId(index + 1), | ||
| title: normalizeTitle(item), | ||
| kind: "work", | ||
| request: item, | ||
| scope: unique([...itemAnchors.filePaths, ...itemAnchors.symbols]), | ||
| dependsOn, | ||
| parallelizable: !deliverables.ordered, | ||
| ownership: scopedPaths, | ||
| anchors: itemAnchors, | ||
| intakeIntents: intents, | ||
| intakeNotes: [...notes, ...graphNotes] | ||
| }, item, itemAnchors, deliverables.ordered ? "serial" : "parallel_fanout"); | ||
| }); | ||
| const proposedUnits = !deliverables.ordered ? [...units, buildIntegrationUnit(objective, units.length + 1, units, graphNotes)] : units; | ||
| return { | ||
| proposedUnits, | ||
| graphStrategy: deliverables.ordered ? "serial" : "parallel_fanout", | ||
| graphNotes | ||
| }; | ||
| } | ||
| const groupedScopes = /* @__PURE__ */ new Map(); | ||
| for (const filePath of anchors.filePaths) { | ||
| const scope = scopeKeyForFilePath(filePath); | ||
| groupedScopes.set(scope, [...groupedScopes.get(scope) ?? [], filePath]); | ||
| } | ||
| if (groupedScopes.size >= 2) { | ||
| const graphNotes = [ | ||
| "carved from disjoint file scopes; root units are parallel-ready when the host can execute them independently", | ||
| "a final integration unit fans in after the scoped units verify successfully" | ||
| ]; | ||
| const scopeEntries = [...groupedScopes.entries()]; | ||
| const units = scopeEntries.map( | ||
| ([scope, scopedPaths], index) => enrichUnit( | ||
| { | ||
| id: makeUnitId(index + 1), | ||
| title: normalizeTitle(`${scope}: ${objective}`), | ||
| kind: "work", | ||
| request: `${objective} | ||
| Focus this unit on ${scope}.`, | ||
| scope: scopedPaths, | ||
| dependsOn: [], | ||
| parallelizable: true, | ||
| ownership: [scope], | ||
| anchors: { | ||
| ...anchors, | ||
| filePaths: scopedPaths, | ||
| symbols: anchors.symbols.filter((symbol) => scopedPaths.some((candidate) => candidate.includes(symbol.toLowerCase()))) | ||
| }, | ||
| intakeIntents: intents, | ||
| intakeNotes: [...notes, ...graphNotes] | ||
| }, | ||
| objective, | ||
| { | ||
| ...anchors, | ||
| filePaths: scopedPaths, | ||
| symbols: [] | ||
| }, | ||
| "parallel_fanout" | ||
| ) | ||
| ); | ||
| return { | ||
| proposedUnits: [...units, buildIntegrationUnit(objective, units.length + 1, units, graphNotes)], | ||
| graphStrategy: "parallel_fanout", | ||
| graphNotes | ||
| }; | ||
| } | ||
| if (anchors.symbols.length >= 2 && anchors.filePaths.length === 0) { | ||
| const graphNotes = [ | ||
| "carved from multiple explicit symbol targets without concrete file paths", | ||
| "each root unit should map its symbol to code before execution and fan into one integration pass" | ||
| ]; | ||
| const units = anchors.symbols.map( | ||
| (symbol, index) => enrichUnit( | ||
| { | ||
| id: makeUnitId(index + 1), | ||
| title: normalizeTitle(`${symbol}: ${objective}`), | ||
| kind: "work", | ||
| request: `${objective} | ||
| Focus this unit on the ${symbol} surface.`, | ||
| scope: [symbol], | ||
| dependsOn: [], | ||
| parallelizable: true, | ||
| ownership: [symbol], | ||
| anchors: { | ||
| ...anchors, | ||
| filePaths: [], | ||
| symbols: [symbol] | ||
| }, | ||
| intakeIntents: intents, | ||
| intakeNotes: [...notes, ...graphNotes] | ||
| }, | ||
| objective, | ||
| { | ||
| ...anchors, | ||
| filePaths: [], | ||
| symbols: [symbol] | ||
| }, | ||
| "parallel_fanout" | ||
| ) | ||
| ); | ||
| return { | ||
| proposedUnits: [...units, buildIntegrationUnit(objective, units.length + 1, units, graphNotes)], | ||
| graphStrategy: "parallel_fanout", | ||
| graphNotes | ||
| }; | ||
| } | ||
| return { | ||
| proposedUnits: [buildSingleUnit(route, objective, anchors, intents, notes)], | ||
| graphStrategy: "single", | ||
| graphNotes: ["request stayed as one bounded workflow unit"] | ||
| }; | ||
| } | ||
| function hasAcceptanceSignal(message) { | ||
@@ -286,2 +563,3 @@ return /\b(should|must|expected|acceptance|done when|success means|result should)\b/i.test(message) || /(되어야|해야 한다|기대 결과|완료 조건|성공 조건)/.test(message); | ||
| if (route.kind !== "work") { | ||
| const singleUnit = buildSingleUnit(route, objective, anchors, intents, ["request was routed to chat mode"]); | ||
| return { | ||
@@ -291,2 +569,5 @@ objective, | ||
| intents, | ||
| proposedUnits: [singleUnit], | ||
| graphStrategy: "single", | ||
| graphNotes: ["request was routed to chat mode"], | ||
| missingEvidence, | ||
@@ -357,2 +638,6 @@ questions, | ||
| } | ||
| if ((looksLikeFix(objective) || looksLikeCreate(objective)) && anchors.tests.length === 0) { | ||
| questions.push("What concrete test, reproduction, or verification surface should prove the result is correct?"); | ||
| missingEvidence.push("verification surface"); | ||
| } | ||
| if (looksLikeFix(objective) && anchors.errors.length === 0) { | ||
@@ -374,2 +659,3 @@ questions.push("What exact symptom, failing case, or error should be fixed?"); | ||
| } | ||
| const carved = carveWorkflowUnits(route, objective, anchors, intents, notes); | ||
| return { | ||
@@ -379,2 +665,5 @@ objective, | ||
| intents, | ||
| proposedUnits: carved.proposedUnits, | ||
| graphStrategy: carved.graphStrategy, | ||
| graphNotes: carved.graphNotes, | ||
| missingEvidence, | ||
@@ -561,12 +850,5 @@ questions, | ||
| description: intake.objective, | ||
| units: [ | ||
| { | ||
| id: "unit-001", | ||
| title: normalizeTitle(intake.objective), | ||
| request: route.normalizedMessage, | ||
| anchors: intake.anchors, | ||
| intakeIntents: intake.intents, | ||
| intakeNotes: intake.notes | ||
| } | ||
| ], | ||
| units: intake.proposedUnits, | ||
| graphStrategy: intake.graphStrategy, | ||
| graphNotes: intake.graphNotes, | ||
| captureEnabled: input.captureEnabled ?? false, | ||
@@ -587,2 +869,4 @@ maxVerifyAttempts: input.maxVerifyAttempts ?? 3 | ||
| const unit = state.units[state.currentUnitIndex]; | ||
| const completed = completedUnitIds(state); | ||
| const ready = readyUnits(state); | ||
| return { | ||
@@ -592,8 +876,32 @@ workflowId: state.workflowId, | ||
| description: state.description, | ||
| graphStrategy: state.graphStrategy ?? "single", | ||
| status: state.status, | ||
| phase: state.phase, | ||
| workflowTaskIndexRef: workflowTaskIndexPath(state.workflowId), | ||
| taskRoot: workflowTaskRootPath(state.workflowId), | ||
| currentUnit: unit ? { | ||
| id: unit.id, | ||
| title: unit.title | ||
| title: unit.title, | ||
| kind: unit.kind ?? "work", | ||
| dependsOn: unit.dependsOn ?? [], | ||
| priority: unit.priority ?? "medium", | ||
| estimatedEffort: unit.estimatedEffort ?? "medium", | ||
| acceptanceCriteria: unit.acceptanceCriteria ?? [], | ||
| sharedRisks: unit.sharedRisks ?? [], | ||
| packetRef: unitBriefPath(state.workflowId, unit.id), | ||
| statusRef: unitStatusPath(state.workflowId, unit.id), | ||
| resultRef: unitResultPath(state.workflowId, unit.id), | ||
| relayRefs: (unit.dependsOn ?? []).map((dependencyId) => unitRelayPath(state.workflowId, dependencyId)) | ||
| } : void 0, | ||
| readyUnits: ready.map((readyUnit) => ({ | ||
| id: readyUnit.id, | ||
| title: readyUnit.title, | ||
| kind: readyUnit.kind ?? "work", | ||
| parallelizable: readyUnit.parallelizable === true, | ||
| priority: readyUnit.priority ?? "medium", | ||
| estimatedEffort: readyUnit.estimatedEffort ?? "medium", | ||
| packetRef: unitBriefPath(state.workflowId, readyUnit.id) | ||
| })), | ||
| completedUnitCount: completed.length, | ||
| totalUnitCount: state.units.length, | ||
| verifyAttempts: state.verifyAttempts, | ||
@@ -649,2 +957,4 @@ maxVerifyAttempts: state.maxVerifyAttempts, | ||
| statePath: savedPath, | ||
| workflowTaskIndexRef: workflowTaskIndexPath(result.state.workflowId), | ||
| taskRoot: workflowTaskRootPath(result.state.workflowId), | ||
| signal: result.signal | ||
@@ -651,0 +961,0 @@ }); |
+64
-7
| type Phase = "clarify" | "plan" | "execute" | "verify"; | ||
| type RuntimePhase = "clarify" | "execute" | "verify" | "capture"; | ||
| type WorkflowPhase = RuntimePhase; | ||
| type WorkflowGraphStrategy = "single" | "serial" | "parallel_fanout"; | ||
| type WorkflowUnitKind = "work" | "integration"; | ||
| type WorkflowPriority = "high" | "medium" | "low"; | ||
| type WorkflowEffort = "small" | "medium" | "large"; | ||
| type WorkflowStatus = "phase_clarify" | "clarify_pending" | "phase_execute" | "phase_verify" | "phase_capture" | "completed" | "blocked" | "stopped"; | ||
@@ -25,2 +29,4 @@ interface DecisionOption { | ||
| assumptions: string[]; | ||
| evidence: string[]; | ||
| acceptanceCriteria: string[]; | ||
| verifyFocus?: string[]; | ||
@@ -34,2 +40,4 @@ decisions: DecisionPrompt[]; | ||
| artifacts?: string[]; | ||
| checks?: string[]; | ||
| handoffNotes?: string[]; | ||
| notes: string[]; | ||
@@ -48,5 +56,13 @@ } | ||
| } | ||
| interface VerifyCheck { | ||
| name: string; | ||
| status: "passed" | "failed" | "skipped"; | ||
| command?: string; | ||
| evidence: string; | ||
| } | ||
| interface VerifyOutput { | ||
| passed: boolean; | ||
| score?: VerifyScore; | ||
| checks: VerifyCheck[]; | ||
| evidence: string[]; | ||
| issues: VerifyIssue[]; | ||
@@ -56,2 +72,3 @@ decisions?: DecisionPrompt[]; | ||
| retryHint?: string; | ||
| unverifiedClaims?: string[]; | ||
| summary: string; | ||
@@ -71,2 +88,17 @@ } | ||
| title: string; | ||
| kind?: WorkflowUnitKind; | ||
| request?: string; | ||
| scope?: string[]; | ||
| dependsOn?: string[]; | ||
| parallelizable?: boolean; | ||
| ownership?: string[]; | ||
| priority?: WorkflowPriority; | ||
| estimatedEffort?: WorkflowEffort; | ||
| mergeRequired?: boolean; | ||
| sharedRisks?: string[]; | ||
| acceptanceCriteria?: string[]; | ||
| verifyFocus?: string[]; | ||
| anchors?: RequestAnchors; | ||
| intakeIntents?: CapabilityIntent[]; | ||
| intakeNotes?: string[]; | ||
| [key: string]: unknown; | ||
@@ -79,2 +111,4 @@ } | ||
| units: WorkflowUnit[]; | ||
| graphStrategy?: WorkflowGraphStrategy; | ||
| graphNotes?: string[]; | ||
| captureEnabled?: boolean; | ||
@@ -150,2 +184,4 @@ maxVerifyAttempts?: number; | ||
| units: WorkflowUnit[]; | ||
| graphStrategy?: WorkflowGraphStrategy; | ||
| graphNotes?: string[]; | ||
| currentUnitIndex: number; | ||
@@ -158,2 +194,4 @@ captureEnabled: boolean; | ||
| outputs: WorkflowOutputs; | ||
| taskRoot: string; | ||
| relayRoot: string; | ||
| createdAt: string; | ||
@@ -163,6 +201,2 @@ updatedAt: string; | ||
| blockedReason?: string; | ||
| clarifyOutput?: ClarifyOutput; | ||
| planOutput?: PlanOutput; | ||
| tasks: Task[]; | ||
| activeWork?: ActiveWork; | ||
| } | ||
@@ -178,2 +212,9 @@ interface RunSignal { | ||
| state_ref: string; | ||
| workflow_task_index_ref?: string; | ||
| task_packet_ref?: string; | ||
| task_context_ref?: string; | ||
| task_status_ref?: string; | ||
| task_result_ref?: string; | ||
| baton_ref?: string; | ||
| relay_refs?: string[]; | ||
| context?: Record<string, unknown>; | ||
@@ -194,2 +235,4 @@ on_complete: { | ||
| state_ref: string; | ||
| workflow_task_index_ref?: string; | ||
| task_status_ref?: string; | ||
| on_complete?: Record<string, unknown>; | ||
@@ -204,2 +247,3 @@ instructions: string; | ||
| state_ref: string; | ||
| workflow_task_index_ref?: string; | ||
| outputs?: string[]; | ||
@@ -312,2 +356,5 @@ message: string; | ||
| intents: CapabilityIntent[]; | ||
| proposedUnits: WorkflowUnit[]; | ||
| graphStrategy: WorkflowGraphStrategy; | ||
| graphNotes: string[]; | ||
| missingEvidence: string[]; | ||
@@ -405,5 +452,15 @@ questions: string[]; | ||
| declare function absoluteRoot(rootDir?: string): string; | ||
| declare function workflowStatePath(workflowId: string): string; | ||
| declare function absoluteWorkflowStatePath(workflowId: string, rootDir?: string): string; | ||
| declare function statePath(workflowId: string, rootDir?: string): string; | ||
| declare function workflowTaskRootPath(workflowId: string): string; | ||
| declare function workflowTaskIndexPath(workflowId: string): string; | ||
| declare function workflowRelayRootPath(workflowId: string): string; | ||
| declare function unitBriefPath(workflowId: string, unitId: string): string; | ||
| declare function unitContextPath(workflowId: string, unitId: string): string; | ||
| declare function unitStatusPath(workflowId: string, unitId: string): string; | ||
| declare function unitResultPath(workflowId: string, unitId: string): string; | ||
| declare function unitBatonPath(workflowId: string, unitId: string): string; | ||
| declare function unitRelayPath(workflowId: string, unitId: string): string; | ||
| declare function touchUpdatedAt(timestamp: string): string; | ||
@@ -443,3 +500,3 @@ declare function saveWorkflowState(state: WorkflowState, rootDir?: string): string; | ||
| declare function submitDecisionsCommand(workflowId: string): string; | ||
| declare function runSignalInstructions(workflowId: string, phase: RuntimePhase, schemaRef: string, stateRef: string): string; | ||
| declare function runSignalInstructions(workflowId: string, phase: RuntimePhase, schemaRef: string, stateRef: string, context?: Record<string, unknown>): string; | ||
| declare function clarifyGateInstructions(workflowId: string, stateRef: string): string; | ||
@@ -449,2 +506,2 @@ declare function doneInstructions(workflowId: string, stateRef: string): string; | ||
| export { type ActiveWork, type BatchResponse, type Capability, type CapabilityAdapter, type CapabilityIntent, type CapabilityKind, type CapabilityPolicy, type CapabilitySurface, type CaptureEntry, type CaptureOutput, type ClarifyOutput, type CollectedContext, type ContextInput, type ContextSpec, type ControlSignal, type CreateWorkflowInput, type DecisionAnswer, type DecisionOption, type DecisionPrompt, type DoneResponse, type DoneSignal, type ExecuteOutput, type FaultResponse, type FaultSignal, type ForkedWorkerAdapter, type GateResponse, type GateSignal, type IntakePlan, type LocalControlCommandName, type LocalControlCommandSpec, type OutputSpec, type Phase, type PhaseResponse, type PlanOutput, type ProtocolResponse, type RequestAnchors, type RouteConfidence, type RouteDecision, type RouteKind, type RouteSource, type RunSignal, type RuntimePhase, type StartFromMessageInput, type StartFromMessageResult, type Task, type TaskDefinition, type ValidationResult, type VerifyIssue, type VerifyOutput, type VerifyScore, type WorkItem, type WorkItemState, type WorkerExecutionMode, type WorkerLaunchRequest, type WorkerLaunchResult, type WorkflowOutputs, type WorkflowPhase, type WorkflowState, type WorkflowStatus, type WorkflowUnit, absoluteWorkflowStatePath, applyDecisionAnswers, applyPhaseOutput, attachSignalInstructions, buildCapturePrompt, buildClarifyGateSignal, buildClarifyPrompt, buildDoneSignal, buildExecutePrompt, buildFaultSignal, buildPromptForPhase, buildRunSignal, buildVerifyPrompt, clarifyGateInstructions, cliPrefix, createWorkflow, doneInstructions, doneSignalInstructions, faultInstructions, faultSignalInstructions, intakeCommand, listStates, loadState, loadWorkflowState, nextCommand, nextResponse, nextSignal, promptRefForPhase, resumeCommand, routeCommand, runSignalInstructions, saveState, saveWorkflowState, schemaRefForPhase, signalForState, startCommand, statePath, statusCommand, stopCommand, stopWorkflow, submitDecisions, submitDecisionsCommand, submitPhaseCommand, submitPhaseOutput, touchUpdatedAt, validateCaptureOutput, validateClarifyOutput, validateDecisionAnswers, validateDecisionPromptArray, validateExecuteOutput, validateState, validateVerifyIssueArray, validateVerifyOutput, validateWorkflowState, workflowStatePath }; | ||
| export { type ActiveWork, type BatchResponse, type Capability, type CapabilityAdapter, type CapabilityIntent, type CapabilityKind, type CapabilityPolicy, type CapabilitySurface, type CaptureEntry, type CaptureOutput, type ClarifyOutput, type CollectedContext, type ContextInput, type ContextSpec, type ControlSignal, type CreateWorkflowInput, type DecisionAnswer, type DecisionOption, type DecisionPrompt, type DoneResponse, type DoneSignal, type ExecuteOutput, type FaultResponse, type FaultSignal, type ForkedWorkerAdapter, type GateResponse, type GateSignal, type IntakePlan, type LocalControlCommandName, type LocalControlCommandSpec, type OutputSpec, type Phase, type PhaseResponse, type PlanOutput, type ProtocolResponse, type RequestAnchors, type RouteConfidence, type RouteDecision, type RouteKind, type RouteSource, type RunSignal, type RuntimePhase, type StartFromMessageInput, type StartFromMessageResult, type Task, type TaskDefinition, type ValidationResult, type VerifyCheck, type VerifyIssue, type VerifyOutput, type VerifyScore, type WorkItem, type WorkItemState, type WorkerExecutionMode, type WorkerLaunchRequest, type WorkerLaunchResult, type WorkflowEffort, type WorkflowGraphStrategy, type WorkflowOutputs, type WorkflowPhase, type WorkflowPriority, type WorkflowState, type WorkflowStatus, type WorkflowUnit, type WorkflowUnitKind, absoluteRoot, absoluteWorkflowStatePath, applyDecisionAnswers, applyPhaseOutput, attachSignalInstructions, buildCapturePrompt, buildClarifyGateSignal, buildClarifyPrompt, buildDoneSignal, buildExecutePrompt, buildFaultSignal, buildPromptForPhase, buildRunSignal, buildVerifyPrompt, clarifyGateInstructions, cliPrefix, createWorkflow, doneInstructions, doneSignalInstructions, faultInstructions, faultSignalInstructions, intakeCommand, listStates, loadState, loadWorkflowState, nextCommand, nextResponse, nextSignal, promptRefForPhase, resumeCommand, routeCommand, runSignalInstructions, saveState, saveWorkflowState, schemaRefForPhase, signalForState, startCommand, statusCommand, stopCommand, stopWorkflow, submitDecisions, submitDecisionsCommand, submitPhaseCommand, submitPhaseOutput, touchUpdatedAt, unitBatonPath, unitBriefPath, unitContextPath, unitRelayPath, unitResultPath, unitStatusPath, validateCaptureOutput, validateClarifyOutput, validateDecisionAnswers, validateDecisionPromptArray, validateExecuteOutput, validateState, validateVerifyIssueArray, validateVerifyOutput, validateWorkflowState, workflowRelayRootPath, workflowStatePath, workflowTaskIndexPath, workflowTaskRootPath }; |
+137
-12
| import { | ||
| absoluteRoot, | ||
| absoluteWorkflowStatePath, | ||
| applyDecisionAnswers, | ||
| applyPhaseOutput, | ||
| buildRunContext, | ||
| completedUnitIds, | ||
| createWorkflow, | ||
@@ -13,3 +16,2 @@ listStates, | ||
| saveWorkflowState, | ||
| statePath, | ||
| stopWorkflow, | ||
@@ -19,2 +21,8 @@ submitDecisions, | ||
| touchUpdatedAt, | ||
| unitBatonPath, | ||
| unitBriefPath, | ||
| unitContextPath, | ||
| unitRelayPath, | ||
| unitResultPath, | ||
| unitStatusPath, | ||
| validateCaptureOutput, | ||
@@ -29,4 +37,7 @@ validateClarifyOutput, | ||
| validateWorkflowState, | ||
| workflowStatePath | ||
| } from "./chunk-JNIRFG7T.js"; | ||
| workflowRelayRootPath, | ||
| workflowStatePath, | ||
| workflowTaskIndexPath, | ||
| workflowTaskRootPath | ||
| } from "./chunk-5PW7BQFK.js"; | ||
@@ -74,3 +85,3 @@ // src/instructions.ts | ||
| } | ||
| function runSignalInstructions(workflowId, phase, schemaRef, stateRef) { | ||
| function runSignalInstructions(workflowId, phase, schemaRef, stateRef, context) { | ||
| const lines = [ | ||
@@ -82,7 +93,32 @@ `Execute the ${phase} phase for workflow ${workflowId}.`, | ||
| ]; | ||
| const graphStrategy = typeof context?.graphStrategy === "string" ? context.graphStrategy : void 0; | ||
| const currentUnit2 = context?.currentUnit && typeof context.currentUnit === "object" ? context.currentUnit : void 0; | ||
| const readySiblingUnitIds = Array.isArray(context?.readySiblingUnitIds) ? context.readySiblingUnitIds.filter((value) => typeof value === "string") : []; | ||
| const currentUnitRefs = context?.currentUnit && typeof context.currentUnit === "object" ? context.currentUnit : void 0; | ||
| const dependencyRelayRefs = Array.isArray(context?.dependencyRelayRefs) ? context.dependencyRelayRefs.filter((value) => typeof value === "string") : []; | ||
| if (graphStrategy) { | ||
| lines.push(`Graph strategy: ${graphStrategy}.`); | ||
| } | ||
| if (currentUnit2?.title && typeof currentUnit2.title === "string") { | ||
| lines.push(`Current unit: ${currentUnit2.title}${typeof currentUnit2.id === "string" ? ` (${currentUnit2.id})` : ""}.`); | ||
| } | ||
| if (currentUnitRefs?.packetRef && typeof currentUnitRefs.packetRef === "string") { | ||
| lines.push(`Read the task packet at ${currentUnitRefs.packetRef}.`); | ||
| } | ||
| if (currentUnitRefs?.statusRef && typeof currentUnitRefs.statusRef === "string") { | ||
| lines.push(`Update your understanding from ${currentUnitRefs.statusRef} before acting.`); | ||
| } | ||
| if (dependencyRelayRefs.length > 0) { | ||
| lines.push(`Read upstream relay files first: ${dependencyRelayRefs.join(", ")}.`); | ||
| } | ||
| if (readySiblingUnitIds.length > 0) { | ||
| lines.push( | ||
| `Other ready units exist: ${readySiblingUnitIds.join(", ")}. This run still covers only the current unit; use the graph metadata for host-level scheduling, not silent scope expansion.` | ||
| ); | ||
| } | ||
| if (phase === "clarify") { | ||
| lines.push("If external input is required, return ready=false with the full current decisions array."); | ||
| lines.push("If external input is required, return ready=false with the full current decisions array, plus evidence and acceptanceCriteria for what is already known."); | ||
| } | ||
| if (phase === "verify") { | ||
| lines.push("If the result needs an external decision, set needsHuman=true and provide bundled decisions."); | ||
| lines.push("If the result needs an external decision, set needsHuman=true and provide bundled decisions. Always include checks, evidence, and any unverifiedClaims."); | ||
| } | ||
@@ -155,2 +191,48 @@ return lines.join("\n"); | ||
| } | ||
| function graphContextBlock(state, unit) { | ||
| const context = buildRunContext(state); | ||
| const currentUnit2 = context.currentUnit && typeof context.currentUnit === "object" ? context.currentUnit : void 0; | ||
| const readySiblingUnitIds = Array.isArray(context.readySiblingUnitIds) ? context.readySiblingUnitIds.filter((value) => typeof value === "string") : []; | ||
| const blockedUnitIds = Array.isArray(context.blockedUnitIds) ? context.blockedUnitIds.filter((value) => typeof value === "string") : []; | ||
| const completed = completedUnitIds(state); | ||
| const lines = ["Graph context:"]; | ||
| lines.push(`- Strategy: ${state.graphStrategy ?? "single"}`); | ||
| lines.push(`- Completed units: ${completed.length} of ${state.units.length}`); | ||
| lines.push(`- Workflow task index: ${workflowTaskIndexPath(state.workflowId)}`); | ||
| if (currentUnit2?.kind && typeof currentUnit2.kind === "string") { | ||
| lines.push(`- Current unit kind: ${currentUnit2.kind}`); | ||
| } | ||
| if (currentUnit2?.packetRef && typeof currentUnit2.packetRef === "string") { | ||
| lines.push(`- Task packet: ${currentUnit2.packetRef}`); | ||
| } | ||
| if (currentUnit2?.statusRef && typeof currentUnit2.statusRef === "string") { | ||
| lines.push(`- Task status: ${currentUnit2.statusRef}`); | ||
| } | ||
| if (currentUnit2?.resultRef && typeof currentUnit2.resultRef === "string") { | ||
| lines.push(`- Task result: ${currentUnit2.resultRef}`); | ||
| } | ||
| if (Array.isArray(unit?.dependsOn) && unit.dependsOn.length > 0) { | ||
| lines.push(`- Depends on: ${unit.dependsOn.join(", ")}`); | ||
| } | ||
| if (Array.isArray(unit?.scope) && unit.scope.length > 0) { | ||
| lines.push(`- Scoped surfaces: ${unit.scope.join(", ")}`); | ||
| } | ||
| if (Array.isArray(unit?.ownership) && unit.ownership.length > 0) { | ||
| lines.push(`- Ownership boundary: ${unit.ownership.join(", ")}`); | ||
| } | ||
| if (Array.isArray(unit?.acceptanceCriteria) && unit.acceptanceCriteria.length > 0) { | ||
| lines.push(`- Acceptance criteria: ${unit.acceptanceCriteria.join(" | ")}`); | ||
| } | ||
| if (Array.isArray(unit?.sharedRisks) && unit.sharedRisks.length > 0) { | ||
| lines.push(`- Shared risks: ${unit.sharedRisks.join(" | ")}`); | ||
| } | ||
| if (readySiblingUnitIds.length > 0) { | ||
| lines.push(`- Other ready units: ${readySiblingUnitIds.join(", ")}`); | ||
| } | ||
| if (blockedUnitIds.length > 0) { | ||
| lines.push(`- Blocked units: ${blockedUnitIds.join(", ")}`); | ||
| } | ||
| lines.push(""); | ||
| return lines; | ||
| } | ||
| function payloadBlock(title, payload) { | ||
@@ -190,7 +272,11 @@ return [title, "```json", JSON.stringify(payload, null, 2), "```", ""]; | ||
| "- Read the relevant code and gather evidence before making factual claims.", | ||
| "- List the concrete evidence you actually used: files, symbols, tests, logs, or docs.", | ||
| "- Turn the user's success condition into explicit acceptance criteria for this unit.", | ||
| "- Surface only the current missing information bundle. If external input is required, ask for all known decisions at once.", | ||
| "- Keep the response scoped to clarify for the current unit.", | ||
| "- Do not pull blocked or not-yet-ready units into this clarify pass.", | ||
| "- If the unit is ready, make `ready` true and leave `decisions` empty.", | ||
| "" | ||
| ]; | ||
| lines.push(...graphContextBlock(state, unit)); | ||
| if (state.decisionHistory.length > 0) { | ||
@@ -215,2 +301,4 @@ lines.push("Decision history:"); | ||
| assumptions: ["Explicit assumptions that still shape execution."], | ||
| evidence: ["src/example.ts::TargetFunction", "tests/example.test.ts"], | ||
| acceptanceCriteria: ["The changed behavior is observable on the scoped surface.", "The intended verification surface is clear."], | ||
| verifyFocus: ["Evidence that verification should check."], | ||
@@ -236,6 +324,9 @@ decisions: [] | ||
| "- Use evidence gathered during clarify rather than re-expanding scope.", | ||
| "- Treat sibling ready units as scheduler context. This run still owns only the current unit.", | ||
| "- Make concrete code or artifact changes and return one execute payload.", | ||
| "- Report exactly what changed and any checks you ran.", | ||
| "- Leave downstream workers a usable handoff trail when this unit is part of a larger graph.", | ||
| "" | ||
| ]; | ||
| lines.push(...graphContextBlock(state, unit)); | ||
| if (clarify) { | ||
@@ -258,2 +349,4 @@ lines.push("Clarify summary:"); | ||
| artifacts: [], | ||
| checks: ["npm run typecheck"], | ||
| handoffNotes: ["Integration unit should re-check shared interfaces after sibling units finish."], | ||
| notes: ["Checks run, constraints, or follow-up notes."] | ||
@@ -279,7 +372,11 @@ })); | ||
| "- Read the changed files and run proportionate checks when possible.", | ||
| "- Report the concrete checks you ran, their status, and what evidence each one produced.", | ||
| "- Record any claims that still remain unverified instead of implying they passed.", | ||
| "- Report recoverable issues precisely enough to drive the next clarify pass.", | ||
| "- Use `needsHuman` only when the workflow truly requires an external decision.", | ||
| "- Verify only the current unit and any explicitly declared integration surface.", | ||
| "- Stay inside the verify payload contract for the current unit.", | ||
| "" | ||
| ]; | ||
| lines.push(...graphContextBlock(state, unit)); | ||
| if (clarify?.verifyFocus?.length) { | ||
@@ -307,2 +404,6 @@ lines.push("Verify focus:"); | ||
| score: { accuracy: 100, completeness: 100, consistency: 100 }, | ||
| checks: [ | ||
| { name: "Typecheck", status: "passed", command: "npm run typecheck", evidence: "Command exited 0." } | ||
| ], | ||
| evidence: ["src/example.ts matches the clarified scope.", "npm run typecheck exited 0."], | ||
| issues: [], | ||
@@ -312,2 +413,3 @@ decisions: [], | ||
| retryHint: "", | ||
| unverifiedClaims: [], | ||
| summary: "Verification result with evidence-backed conclusions." | ||
@@ -371,2 +473,3 @@ })); | ||
| const stateRef = workflowStatePath(state.workflowId); | ||
| const unitId = currentUnitId(state); | ||
| return { | ||
@@ -376,3 +479,3 @@ type: "run", | ||
| mode: state.mode, | ||
| unit_id: currentUnitId(state), | ||
| unit_id: unitId, | ||
| phase, | ||
@@ -382,2 +485,10 @@ prompt_ref: promptRefForPhase(phase), | ||
| state_ref: stateRef, | ||
| workflow_task_index_ref: workflowTaskIndexPath(state.workflowId), | ||
| task_packet_ref: unitId ? unitBriefPath(state.workflowId, unitId) : void 0, | ||
| task_context_ref: unitId ? unitContextPath(state.workflowId, unitId) : void 0, | ||
| task_status_ref: unitId ? unitStatusPath(state.workflowId, unitId) : void 0, | ||
| task_result_ref: unitId ? unitResultPath(state.workflowId, unitId) : void 0, | ||
| baton_ref: unitId ? unitBatonPath(state.workflowId, unitId) : void 0, | ||
| relay_refs: state.units[state.currentUnitIndex] ? (state.units[state.currentUnitIndex].dependsOn ?? []).map((dependencyId) => unitRelayPath(state.workflowId, dependencyId)) : void 0, | ||
| context: buildRunContext(state), | ||
| on_complete: { | ||
@@ -387,3 +498,3 @@ kind: "phase_output", | ||
| }, | ||
| instructions: runSignalInstructions(state.workflowId, phase, schemaRefForPhase(phase), stateRef) | ||
| instructions: runSignalInstructions(state.workflowId, phase, schemaRefForPhase(phase), stateRef, buildRunContext(state)) | ||
| }; | ||
@@ -393,2 +504,3 @@ } | ||
| const stateRef = workflowStatePath(state.workflowId); | ||
| const unitId = currentUnitId(state); | ||
| return { | ||
@@ -399,5 +511,7 @@ type: "gate", | ||
| mode: state.mode, | ||
| unit_id: currentUnitId(state), | ||
| unit_id: unitId, | ||
| options: state.pendingDecisions.map((decision) => decision.id), | ||
| state_ref: stateRef, | ||
| workflow_task_index_ref: workflowTaskIndexPath(state.workflowId), | ||
| task_status_ref: unitId ? unitStatusPath(state.workflowId, unitId) : void 0, | ||
| on_complete: { | ||
@@ -418,2 +532,3 @@ kind: "decision_answers", | ||
| state_ref: stateRef, | ||
| workflow_task_index_ref: workflowTaskIndexPath(state.workflowId), | ||
| outputs: Object.keys(state.outputs), | ||
@@ -446,3 +561,4 @@ message: state.status === "blocked" ? state.blockedReason || "workflow blocked" : state.status === "stopped" ? "workflow stopped" : "workflow completed" | ||
| signal.required_schema, | ||
| signal.state_ref | ||
| signal.state_ref, | ||
| signal.context | ||
| ) | ||
@@ -492,2 +608,3 @@ }; | ||
| export { | ||
| absoluteRoot, | ||
| absoluteWorkflowStatePath, | ||
@@ -529,3 +646,2 @@ applyDecisionAnswers, | ||
| startCommand, | ||
| statePath, | ||
| statusCommand, | ||
@@ -539,2 +655,8 @@ stopCommand, | ||
| touchUpdatedAt, | ||
| unitBatonPath, | ||
| unitBriefPath, | ||
| unitContextPath, | ||
| unitRelayPath, | ||
| unitResultPath, | ||
| unitStatusPath, | ||
| validateCaptureOutput, | ||
@@ -549,3 +671,6 @@ validateClarifyOutput, | ||
| validateWorkflowState, | ||
| workflowStatePath | ||
| workflowRelayRootPath, | ||
| workflowStatePath, | ||
| workflowTaskIndexPath, | ||
| workflowTaskRootPath | ||
| }; |
+18
-15
@@ -44,2 +44,3 @@ #!/usr/bin/env node | ||
| - Read \`state_ref\` before acting. | ||
| - Read \`workflow_task_index_ref\`, \`task_packet_ref\`, and any \`relay_refs\` before acting. | ||
| - Use \`phase\`, \`prompt_ref\`, \`required_schema\`, and \`instructions\` as the contract. | ||
@@ -52,5 +53,5 @@ - Spawn an Agent for this phase. DO NOT execute the phase yourself. | ||
| - \`capture\`: Read, Grep, Glob, Edit, Write, Bash, Agent | ||
| - In \`clarify\`, gather evidence first and ask the full missing-information bundle only when still required. | ||
| - In \`execute\`, complete the work end to end. If the scope cleanly splits, use bounded subagents step by step or in parallel with explicit file ownership. | ||
| - In \`verify\`, try to disprove the claimed result. Do not silently edit files during verification. | ||
| - In \`clarify\`, gather evidence first, list the concrete evidence you used, and turn the success condition into explicit acceptance criteria for the unit before returning ready=true. | ||
| - In \`execute\`, complete only the current unit. If the workflow state shows multiple ready sibling units with disjoint ownership, use that as host-level scheduling metadata for bounded subagents or parallel runs; do not silently merge sibling units into one payload. | ||
| - In \`verify\`, try to disprove the claimed result. Do not silently edit files during verification. Always include concrete checks, evidence, and any unverified claims in the payload. | ||
| - In \`capture\`, write only durable reusable learnings. | ||
@@ -62,3 +63,3 @@ - Wait for the Agent result. | ||
| ### type = "gate" | ||
| - If \`gate\` = "clarify": read \`state_ref\`, present the bundled question set to the user in one message, collect the answers as one JSON object, run \`npx krow submit-decisions <workflow_id> '<JSON>'\`, then run \`npx krow resume <workflow_id>\`. | ||
| - If \`gate\` = "clarify": read \`state_ref\` and \`task_status_ref\`, present the bundled question set to the user in one message, collect the answers as one JSON object, run \`npx krow submit-decisions <workflow_id> '<JSON>'\`, then run \`npx krow resume <workflow_id>\`. | ||
| - For any other gate, follow \`instructions\` exactly and only stop for real external input. | ||
@@ -79,3 +80,3 @@ | ||
| - Minify JSON before passing to commands (no newlines). | ||
| - State lives under \`.krow/state/workflows/<workflowId>.json\`. Read the referenced state file instead of guessing. | ||
| - State lives under \`.krow/state/workflows/<workflowId>.json\`. Task packets live under \`.krow/tasks/<workflowId>/\` and relays live under \`.krow/relays/<workflowId>/\`. Read the referenced files instead of guessing. | ||
| `; | ||
@@ -120,2 +121,3 @@ | ||
| - Read \`state_ref\` before acting. | ||
| - Read \`workflow_task_index_ref\`, \`task_packet_ref\`, and any \`relay_refs\` before acting. | ||
| - Use \`phase\`, \`prompt_ref\`, \`required_schema\`, and \`instructions\` as the contract. | ||
@@ -128,5 +130,5 @@ - Spawn an Agent for this phase. DO NOT execute the phase yourself. | ||
| - \`capture\`: Read, Grep, Glob, Edit, Write, Bash, Agent | ||
| - In \`clarify\`, gather evidence first and ask the full missing-information bundle only when still required. | ||
| - In \`execute\`, complete the work end to end. If the scope cleanly splits, use bounded subagents step by step or in parallel with explicit file ownership. | ||
| - In \`verify\`, try to disprove the claimed result. Do not silently edit files during verification. | ||
| - In \`clarify\`, gather evidence first, list the concrete evidence you used, and turn the success condition into explicit acceptance criteria for the unit before returning ready=true. | ||
| - In \`execute\`, complete only the current unit. If the workflow state shows multiple ready sibling units with disjoint ownership, use that as host-level scheduling metadata for bounded subagents or parallel runs; do not silently merge sibling units into one payload. | ||
| - In \`verify\`, try to disprove the claimed result. Do not silently edit files during verification. Always include concrete checks, evidence, and any unverified claims in the payload. | ||
| - In \`capture\`, write only durable reusable learnings. | ||
@@ -138,3 +140,3 @@ - Wait for the Agent result. | ||
| ### type = "gate" | ||
| - If \`gate\` = "clarify": read \`state_ref\`, present the bundled question set to the user in one message, collect the answers as one JSON object, run \`npx krow submit-decisions <workflow_id> '<JSON>'\`, then run \`npx krow resume <workflow_id>\`. | ||
| - If \`gate\` = "clarify": read \`state_ref\` and \`task_status_ref\`, present the bundled question set to the user in one message, collect the answers as one JSON object, run \`npx krow submit-decisions <workflow_id> '<JSON>'\`, then run \`npx krow resume <workflow_id>\`. | ||
| - For any other gate, follow \`instructions\` exactly and only stop for real external input. | ||
@@ -155,3 +157,3 @@ | ||
| - Minify JSON before passing to commands (no newlines). | ||
| - State lives under \`.krow/state/workflows/<workflowId>.json\`. Read the referenced state file instead of guessing. | ||
| - State lives under \`.krow/state/workflows/<workflowId>.json\`. Task packets live under \`.krow/tasks/<workflowId>/\` and relays live under \`.krow/relays/<workflowId>/\`. Read the referenced files instead of guessing. | ||
| `; | ||
@@ -189,2 +191,3 @@ | ||
| - Read \`state_ref\` before acting. | ||
| - Read \`workflow_task_index_ref\`, \`task_packet_ref\`, and any \`relay_refs\` before acting. | ||
| - Use \`phase\`, \`prompt_ref\`, \`required_schema\`, and \`instructions\` as the contract. | ||
@@ -196,5 +199,5 @@ - Tool policy by phase: | ||
| - \`capture\`: use read_file, grep_search, list_directory, edit_file, write_file, and run_shell_command only for durable knowledge capture | ||
| - In \`clarify\`, gather evidence first and ask the full missing-information bundle only when still required. | ||
| - In \`execute\`, complete the work end to end. If the work cannot be parallelized in-host, process it step by step without skipping transitions. | ||
| - In \`verify\`, try to disprove the claimed result before accepting it. | ||
| - In \`clarify\`, gather evidence first, list the concrete evidence you used, and turn the success condition into explicit acceptance criteria for the unit before returning ready=true. | ||
| - In \`execute\`, complete only the current unit. If the workflow state shows other ready sibling units, use that graph metadata for host scheduling; if the host cannot parallelize them, process the ready units step by step without skipping transitions. | ||
| - In \`verify\`, try to disprove the claimed result before accepting it. Always include concrete checks, evidence, and any unverified claims. | ||
| - When the phase work is complete, collect your result as one JSON object matching \`required_schema\`. | ||
@@ -204,3 +207,3 @@ - Run via run_shell_command: \`npx krow submit-phase <workflow_id> <phase> '<JSON>'\`, replacing placeholders from the signal and minifying JSON first. | ||
| ### type = "gate" | ||
| - If \`gate\` = "clarify": read \`state_ref\`, present the bundled question set to the user in one message, collect the answers as one JSON object, run \`npx krow submit-decisions <workflow_id> '<JSON>'\` via run_shell_command, then run \`npx krow resume <workflow_id>\`. | ||
| - If \`gate\` = "clarify": read \`state_ref\` and \`task_status_ref\`, present the bundled question set to the user in one message, collect the answers as one JSON object, run \`npx krow submit-decisions <workflow_id> '<JSON>'\` via run_shell_command, then run \`npx krow resume <workflow_id>\`. | ||
| - For any other gate, follow \`instructions\` exactly and only stop for real external input. | ||
@@ -221,3 +224,3 @@ | ||
| - Minify JSON before passing to commands (no newlines). | ||
| - State lives under \`.krow/state/workflows/<workflowId>.json\`. Read the referenced state file instead of guessing. | ||
| - State lives under \`.krow/state/workflows/<workflowId>.json\`. Task packets live under \`.krow/tasks/<workflowId>/\` and relays live under \`.krow/relays/<workflowId>/\`. Read the referenced files instead of guessing. | ||
| """ | ||
@@ -224,0 +227,0 @@ `; |
+1
-1
| { | ||
| "name": "krow-cli", | ||
| "version": "0.2.3", | ||
| "version": "0.2.4", | ||
| "description": "A host-agnostic agent harness for coding work", | ||
@@ -5,0 +5,0 @@ "type": "module", |
+9
-3
@@ -53,4 +53,4 @@ # krow | ||
| - `route`: classify a message as chat or work without creating workflow state | ||
| - `intake`: extract anchors, missing evidence, and bundled clarification questions | ||
| - `start`: create workflow state and emit the first control signal | ||
| - `intake`: extract anchors, missing evidence, bundled clarification questions, and a proposed unit graph | ||
| - `start`: create workflow state, carve ready units when strong split signals exist, and emit the first control signal | ||
| - `status`, `next`, `resume`: inspect or continue persisted workflow state | ||
@@ -65,2 +65,8 @@ - `submit-phase`, `submit-decisions`, `stop`: advance or terminate local workflow state | ||
| The wrappers use `intake --intent work` first so agents gather evidence and bundled questions before a workflow starts. After start, the runtime advances through `clarify -> execute -> verify -> capture` and persists state under `.krow/state/workflows/<workflowId>.json`. | ||
| The wrappers use `intake --intent work` first so agents gather evidence, bundled questions, and a proposed unit graph before a workflow starts. After start, the runtime advances each unit through `clarify -> execute -> verify -> capture`, schedules the next ready unit from the dependency graph, and persists: | ||
| - workflow state under `.krow/state/workflows/<workflowId>.json` | ||
| - task packets under `.krow/tasks/<workflowId>/` | ||
| - relay and baton files under `.krow/relays/<workflowId>/` | ||
| The current contract is still host-assisted. `krow` does not spawn teammates itself, but it now gives the host richer scheduling metadata, durable task packets, and stricter clarify/verify payload contracts so parallel-capable hosts can behave more predictably. |
| // src/validators.ts | ||
| var validStatuses = [ | ||
| "phase_clarify", | ||
| "clarify_pending", | ||
| "phase_execute", | ||
| "phase_verify", | ||
| "phase_capture", | ||
| "completed", | ||
| "blocked", | ||
| "stopped" | ||
| ]; | ||
| var validPhases = ["clarify", "execute", "verify", "capture"]; | ||
| var validScoreKeys = ["accuracy", "completeness", "consistency"]; | ||
| function isRecord(value) { | ||
| return typeof value === "object" && value !== null && !Array.isArray(value); | ||
| } | ||
| function isNonEmptyString(value) { | ||
| return typeof value === "string" && value.trim().length > 0; | ||
| } | ||
| function isStringArray(value) { | ||
| return Array.isArray(value) && value.every(isNonEmptyString); | ||
| } | ||
| function validateDecisionPrompts(value, path2) { | ||
| const issues = []; | ||
| if (!Array.isArray(value)) { | ||
| return [`${path2} must be an array`]; | ||
| } | ||
| value.forEach((item, index) => { | ||
| const itemPath = `${path2}[${index}]`; | ||
| if (!isRecord(item)) { | ||
| issues.push(`${itemPath} must be an object`); | ||
| return; | ||
| } | ||
| if (!isNonEmptyString(item.id)) { | ||
| issues.push(`${itemPath}.id must be a non-empty string`); | ||
| } | ||
| if (!isNonEmptyString(item.question)) { | ||
| issues.push(`${itemPath}.question must be a non-empty string`); | ||
| } | ||
| if (!Array.isArray(item.options) || item.options.length === 0) { | ||
| issues.push(`${itemPath}.options must be a non-empty array`); | ||
| return; | ||
| } | ||
| item.options.forEach((option, optionIndex) => { | ||
| const optionPath = `${itemPath}.options[${optionIndex}]`; | ||
| if (!isRecord(option)) { | ||
| issues.push(`${optionPath} must be an object`); | ||
| return; | ||
| } | ||
| if (!isNonEmptyString(option.id)) { | ||
| issues.push(`${optionPath}.id must be a non-empty string`); | ||
| } | ||
| if (!isNonEmptyString(option.label)) { | ||
| issues.push(`${optionPath}.label must be a non-empty string`); | ||
| } | ||
| if (option.description !== void 0 && !isNonEmptyString(option.description)) { | ||
| issues.push(`${optionPath}.description must be a non-empty string when present`); | ||
| } | ||
| }); | ||
| }); | ||
| return issues; | ||
| } | ||
| function validateVerifyIssues(value, path2) { | ||
| const issues = []; | ||
| if (!Array.isArray(value)) { | ||
| return [`${path2} must be an array`]; | ||
| } | ||
| value.forEach((issue, index) => { | ||
| const issuePath = `${path2}[${index}]`; | ||
| if (!isRecord(issue)) { | ||
| issues.push(`${issuePath} must be an object`); | ||
| return; | ||
| } | ||
| if (issue.severity !== "error" && issue.severity !== "warning") { | ||
| issues.push(`${issuePath}.severity must be 'error' or 'warning'`); | ||
| } | ||
| if (!isNonEmptyString(issue.category)) { | ||
| issues.push(`${issuePath}.category must be a non-empty string`); | ||
| } | ||
| if (!isNonEmptyString(issue.description)) { | ||
| issues.push(`${issuePath}.description must be a non-empty string`); | ||
| } | ||
| if (issue.suggestion !== void 0 && !isNonEmptyString(issue.suggestion)) { | ||
| issues.push(`${issuePath}.suggestion must be a non-empty string when present`); | ||
| } | ||
| }); | ||
| return issues; | ||
| } | ||
| function validateClarifyOutput(value) { | ||
| const issues = []; | ||
| if (!isRecord(value)) { | ||
| return { ok: false, issues: ["clarify output must be an object"] }; | ||
| } | ||
| if (typeof value.ready !== "boolean") { | ||
| issues.push("ready must be a boolean"); | ||
| } | ||
| if (!isNonEmptyString(value.summary)) { | ||
| issues.push("summary must be a non-empty string"); | ||
| } | ||
| if (!isStringArray(value.assumptions)) { | ||
| issues.push("assumptions must be an array of non-empty strings"); | ||
| } | ||
| if (value.verifyFocus !== void 0 && !isStringArray(value.verifyFocus)) { | ||
| issues.push("verifyFocus must be an array of non-empty strings when present"); | ||
| } | ||
| issues.push(...validateDecisionPrompts(value.decisions, "decisions")); | ||
| return issues.length === 0 ? { ok: true, issues: [], value } : { ok: false, issues }; | ||
| } | ||
| function validateDecisionAnswers(value) { | ||
| const issues = []; | ||
| if (!Array.isArray(value)) { | ||
| return { ok: false, issues: ["decision answers must be an array"] }; | ||
| } | ||
| value.forEach((item, index) => { | ||
| const path2 = `decisionAnswers[${index}]`; | ||
| if (!isRecord(item)) { | ||
| issues.push(`${path2} must be an object`); | ||
| return; | ||
| } | ||
| if (!isNonEmptyString(item.decisionId)) { | ||
| issues.push(`${path2}.decisionId must be a non-empty string`); | ||
| } | ||
| if (!isNonEmptyString(item.selectedOptionId)) { | ||
| issues.push(`${path2}.selectedOptionId must be a non-empty string`); | ||
| } | ||
| if (item.customInput !== void 0 && !isNonEmptyString(item.customInput)) { | ||
| issues.push(`${path2}.customInput must be a non-empty string when present`); | ||
| } | ||
| }); | ||
| return issues.length === 0 ? { ok: true, issues: [], value } : { ok: false, issues }; | ||
| } | ||
| function validateExecuteOutput(value) { | ||
| const issues = []; | ||
| if (!isRecord(value)) { | ||
| return { ok: false, issues: ["execute output must be an object"] }; | ||
| } | ||
| if (!isNonEmptyString(value.summary)) { | ||
| issues.push("summary must be a non-empty string"); | ||
| } | ||
| if (value.changedFiles !== void 0 && !isStringArray(value.changedFiles)) { | ||
| issues.push("changedFiles must be an array of non-empty strings when present"); | ||
| } | ||
| if (value.outputFiles !== void 0 && !isStringArray(value.outputFiles)) { | ||
| issues.push("outputFiles must be an array of non-empty strings when present"); | ||
| } | ||
| if (value.artifacts !== void 0 && !isStringArray(value.artifacts)) { | ||
| issues.push("artifacts must be an array of non-empty strings when present"); | ||
| } | ||
| if (value.notes !== void 0 && !isStringArray(value.notes)) { | ||
| issues.push("notes must be an array of non-empty strings when present"); | ||
| } | ||
| const hasPayload = Array.isArray(value.changedFiles) && value.changedFiles.length > 0 || Array.isArray(value.outputFiles) && value.outputFiles.length > 0 || Array.isArray(value.artifacts) && value.artifacts.length > 0; | ||
| if (!hasPayload) { | ||
| issues.push("execute output must include changedFiles, outputFiles, or artifacts"); | ||
| } | ||
| return issues.length === 0 ? { ok: true, issues: [], value } : { ok: false, issues }; | ||
| } | ||
| function validateVerifyOutput(value) { | ||
| const issues = []; | ||
| if (!isRecord(value)) { | ||
| return { ok: false, issues: ["verify output must be an object"] }; | ||
| } | ||
| if (typeof value.passed !== "boolean") { | ||
| issues.push("passed must be a boolean"); | ||
| } | ||
| issues.push(...validateVerifyIssues(value.issues, "issues")); | ||
| if (!isNonEmptyString(value.summary)) { | ||
| issues.push("summary must be a non-empty string"); | ||
| } | ||
| if (value.score !== void 0) { | ||
| if (!isRecord(value.score)) { | ||
| issues.push("score must be an object when present"); | ||
| } else { | ||
| validScoreKeys.forEach((key) => { | ||
| const scoreMap = value.score; | ||
| const score = scoreMap[key]; | ||
| if (typeof score !== "number" || score < 0 || score > 100) { | ||
| issues.push(`score.${key} must be a number between 0 and 100`); | ||
| } | ||
| }); | ||
| } | ||
| } | ||
| if (value.needsHuman !== void 0 && typeof value.needsHuman !== "boolean") { | ||
| issues.push("needsHuman must be a boolean when present"); | ||
| } | ||
| if (value.retryHint !== void 0 && !isNonEmptyString(value.retryHint)) { | ||
| issues.push("retryHint must be a non-empty string when present"); | ||
| } | ||
| if (value.decisions !== void 0) { | ||
| issues.push(...validateDecisionPrompts(value.decisions, "decisions")); | ||
| } | ||
| return issues.length === 0 ? { ok: true, issues: [], value } : { ok: false, issues }; | ||
| } | ||
| function validateCaptureOutput(value) { | ||
| const issues = []; | ||
| if (!isRecord(value)) { | ||
| return { ok: false, issues: ["capture output must be an object"] }; | ||
| } | ||
| if (!Array.isArray(value.entries)) { | ||
| issues.push("entries must be an array"); | ||
| } else { | ||
| value.entries.forEach((entry, index) => { | ||
| const path2 = `entries[${index}]`; | ||
| if (!isRecord(entry)) { | ||
| issues.push(`${path2} must be an object`); | ||
| return; | ||
| } | ||
| if (!isNonEmptyString(entry.filename)) { | ||
| issues.push(`${path2}.filename must be a non-empty string`); | ||
| } | ||
| if (!isNonEmptyString(entry.content)) { | ||
| issues.push(`${path2}.content must be a non-empty string`); | ||
| } | ||
| if (!isNonEmptyString(entry.reason)) { | ||
| issues.push(`${path2}.reason must be a non-empty string`); | ||
| } | ||
| if (entry.action !== void 0 && entry.action !== "create" && entry.action !== "update") { | ||
| issues.push(`${path2}.action must be 'create' or 'update' when present`); | ||
| } | ||
| }); | ||
| } | ||
| return issues.length === 0 ? { ok: true, issues: [], value } : { ok: false, issues }; | ||
| } | ||
| function validateWorkflowState(value) { | ||
| const issues = []; | ||
| if (!isRecord(value)) { | ||
| return { ok: false, issues: ["workflow state must be an object"] }; | ||
| } | ||
| const stringFields = [ | ||
| "schemaVersion", | ||
| "workflowId", | ||
| "mode", | ||
| "description", | ||
| "status", | ||
| "phase", | ||
| "createdAt", | ||
| "updatedAt" | ||
| ]; | ||
| stringFields.forEach((field) => { | ||
| if (!isNonEmptyString(value[field])) { | ||
| issues.push(`${field} must be a non-empty string`); | ||
| } | ||
| }); | ||
| if (!Array.isArray(value.units) || value.units.length === 0) { | ||
| issues.push("units must be a non-empty array"); | ||
| } else { | ||
| value.units.forEach((unit, index) => { | ||
| const path2 = `units[${index}]`; | ||
| if (!isRecord(unit)) { | ||
| issues.push(`${path2} must be an object`); | ||
| return; | ||
| } | ||
| if (!isNonEmptyString(unit.id)) { | ||
| issues.push(`${path2}.id must be a non-empty string`); | ||
| } | ||
| if (!isNonEmptyString(unit.title)) { | ||
| issues.push(`${path2}.title must be a non-empty string`); | ||
| } | ||
| }); | ||
| } | ||
| if (typeof value.currentUnitIndex !== "number" || value.currentUnitIndex < 0) { | ||
| issues.push("currentUnitIndex must be a non-negative number"); | ||
| } else if (Array.isArray(value.units) && value.currentUnitIndex >= value.units.length) { | ||
| issues.push("currentUnitIndex must point to an existing unit"); | ||
| } | ||
| if (typeof value.captureEnabled !== "boolean") { | ||
| issues.push("captureEnabled must be a boolean"); | ||
| } | ||
| if (typeof value.maxVerifyAttempts !== "number" || value.maxVerifyAttempts < 1) { | ||
| issues.push("maxVerifyAttempts must be a positive number"); | ||
| } | ||
| if (typeof value.verifyAttempts !== "number" || value.verifyAttempts < 0) { | ||
| issues.push("verifyAttempts must be a non-negative number"); | ||
| } | ||
| if (!Array.isArray(value.pendingDecisions)) { | ||
| issues.push("pendingDecisions must be an array"); | ||
| } else { | ||
| issues.push(...validateDecisionPrompts(value.pendingDecisions, "pendingDecisions")); | ||
| } | ||
| if (!Array.isArray(value.decisionHistory)) { | ||
| issues.push("decisionHistory must be an array"); | ||
| } else { | ||
| value.decisionHistory.forEach((item, index) => { | ||
| const path2 = `decisionHistory[${index}]`; | ||
| if (!isRecord(item)) { | ||
| issues.push(`${path2} must be an object`); | ||
| return; | ||
| } | ||
| if (!isNonEmptyString(item.decisionId)) { | ||
| issues.push(`${path2}.decisionId must be a non-empty string`); | ||
| } | ||
| if (!isNonEmptyString(item.selectedOptionId)) { | ||
| issues.push(`${path2}.selectedOptionId must be a non-empty string`); | ||
| } | ||
| if (item.customInput !== void 0 && !isNonEmptyString(item.customInput)) { | ||
| issues.push(`${path2}.customInput must be a non-empty string when present`); | ||
| } | ||
| }); | ||
| } | ||
| if (!isRecord(value.outputs)) { | ||
| issues.push("outputs must be an object"); | ||
| } | ||
| if (value.lastVerifyIssues !== void 0) { | ||
| issues.push(...validateVerifyIssues(value.lastVerifyIssues, "lastVerifyIssues")); | ||
| } | ||
| if (!validStatuses.includes(value.status)) { | ||
| issues.push("status must be one of the canonical workflow statuses"); | ||
| } | ||
| if (!validPhases.includes(value.phase)) { | ||
| issues.push("phase must be one of the canonical workflow phases"); | ||
| } | ||
| return issues.length === 0 ? { ok: true, issues: [], value } : { ok: false, issues }; | ||
| } | ||
| function validateDecisionPromptArray(value) { | ||
| const issues = validateDecisionPrompts(value, "decisions"); | ||
| return issues.length === 0 ? { ok: true, issues: [], value } : { ok: false, issues }; | ||
| } | ||
| function validateVerifyIssueArray(value) { | ||
| const issues = validateVerifyIssues(value, "issues"); | ||
| return issues.length === 0 ? { ok: true, issues: [], value } : { ok: false, issues }; | ||
| } | ||
| // src/state-store.ts | ||
| import { existsSync, mkdirSync, readdirSync, readFileSync, writeFileSync } from "fs"; | ||
| import path from "path"; | ||
| var STATE_DIR = ".krow/state/workflows"; | ||
| function absoluteRoot(rootDir = process.cwd()) { | ||
| return path.resolve(rootDir); | ||
| } | ||
| function workflowStatePath(workflowId) { | ||
| return `${STATE_DIR}/${workflowId}.json`; | ||
| } | ||
| function absoluteWorkflowStatePath(workflowId, rootDir = process.cwd()) { | ||
| return path.join(absoluteRoot(rootDir), workflowStatePath(workflowId)); | ||
| } | ||
| function statePath(workflowId, rootDir = process.cwd()) { | ||
| return absoluteWorkflowStatePath(workflowId, rootDir); | ||
| } | ||
| function touchUpdatedAt(timestamp) { | ||
| return timestamp; | ||
| } | ||
| function saveWorkflowState(state, rootDir = process.cwd()) { | ||
| const filePath = absoluteWorkflowStatePath(state.workflowId, rootDir); | ||
| mkdirSync(path.dirname(filePath), { recursive: true }); | ||
| writeFileSync(filePath, JSON.stringify(state, null, 2)); | ||
| return filePath; | ||
| } | ||
| function loadWorkflowState(workflowId, rootDir = process.cwd()) { | ||
| const filePath = absoluteWorkflowStatePath(workflowId, rootDir); | ||
| return JSON.parse(readFileSync(filePath, "utf8")); | ||
| } | ||
| function saveState(state, rootDir = process.cwd()) { | ||
| state.updatedAt = (/* @__PURE__ */ new Date()).toISOString(); | ||
| saveWorkflowState(state, rootDir); | ||
| } | ||
| function loadState(workflowId, rootDir = process.cwd()) { | ||
| const filePath = absoluteWorkflowStatePath(workflowId, rootDir); | ||
| if (!existsSync(filePath)) { | ||
| return null; | ||
| } | ||
| return JSON.parse(readFileSync(filePath, "utf8")); | ||
| } | ||
| function listStates(rootDir = process.cwd()) { | ||
| const dir = path.join(absoluteRoot(rootDir), STATE_DIR); | ||
| if (!existsSync(dir)) { | ||
| return []; | ||
| } | ||
| return readdirSync(dir).filter((entry) => entry.endsWith(".json")).map((entry) => { | ||
| try { | ||
| return JSON.parse(readFileSync(path.join(dir, entry), "utf8")); | ||
| } catch { | ||
| return null; | ||
| } | ||
| }).filter((state) => state !== null).filter((state) => state.status !== "completed" && state.status !== "stopped").sort((left, right) => right.updatedAt.localeCompare(left.updatedAt)); | ||
| } | ||
| // src/orchestrator.ts | ||
| function cloneState(value) { | ||
| return JSON.parse(JSON.stringify(value)); | ||
| } | ||
| function nowIso() { | ||
| return (/* @__PURE__ */ new Date()).toISOString(); | ||
| } | ||
| function createWorkflowId() { | ||
| return `wf-${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 8)}`; | ||
| } | ||
| function currentUnit(state) { | ||
| return state.units[state.currentUnitIndex]; | ||
| } | ||
| function currentUnitOutputBucket(state) { | ||
| const unit = currentUnit(state); | ||
| if (!unit) { | ||
| return {}; | ||
| } | ||
| if (!state.outputs[unit.id]) { | ||
| state.outputs[unit.id] = {}; | ||
| } | ||
| return state.outputs[unit.id]; | ||
| } | ||
| function storePhaseOutput(state, phase, payload) { | ||
| currentUnitOutputBucket(state)[phase] = payload; | ||
| } | ||
| function schemaForPhase(phase) { | ||
| return `schemas/payloads/${phase}-output.schema.json`; | ||
| } | ||
| function promptForPhase(phase) { | ||
| switch (phase) { | ||
| case "clarify": | ||
| return "prompts/clarify.md"; | ||
| case "execute": | ||
| return "prompts/executor.md"; | ||
| case "verify": | ||
| return "prompts/verifier.md"; | ||
| case "capture": | ||
| return "prompts/capture.md"; | ||
| } | ||
| } | ||
| function toRuntimePhase(phase) { | ||
| switch (phase) { | ||
| case "clarify": | ||
| case "execute": | ||
| case "verify": | ||
| case "capture": | ||
| return phase; | ||
| default: | ||
| return void 0; | ||
| } | ||
| } | ||
| function runSignal(state, phase, instructions) { | ||
| return { | ||
| type: "run", | ||
| workflow_id: state.workflowId, | ||
| mode: state.mode, | ||
| unit_id: currentUnit(state)?.id, | ||
| phase, | ||
| prompt_ref: promptForPhase(phase), | ||
| required_schema: schemaForPhase(phase), | ||
| state_ref: workflowStatePath(state.workflowId), | ||
| on_complete: { | ||
| kind: "phase_output", | ||
| phase | ||
| }, | ||
| instructions | ||
| }; | ||
| } | ||
| function faultSignal(state, error, issues, recoverable) { | ||
| return { | ||
| type: "fault", | ||
| workflow_id: state?.workflowId, | ||
| mode: state?.mode, | ||
| unit_id: state ? currentUnit(state)?.id : void 0, | ||
| phase: state ? toRuntimePhase(state.phase) : void 0, | ||
| expected_schema: state && toRuntimePhase(state.phase) ? schemaForPhase(toRuntimePhase(state.phase)) : void 0, | ||
| issues, | ||
| error, | ||
| recoverable | ||
| }; | ||
| } | ||
| function terminalSignal(state) { | ||
| return { | ||
| type: "done", | ||
| workflow_id: state.workflowId, | ||
| mode: state.mode, | ||
| status: state.status === "blocked" ? "blocked" : state.status === "stopped" ? "stopped" : "completed", | ||
| state_ref: workflowStatePath(state.workflowId), | ||
| outputs: Object.keys(state.outputs), | ||
| message: state.status === "blocked" ? state.blockedReason || "workflow blocked" : state.status === "stopped" ? "workflow stopped" : "workflow completed" | ||
| }; | ||
| } | ||
| function withResponse(result) { | ||
| return { ...result, response: result.signal }; | ||
| } | ||
| function normalizeCreateWorkflowInput(input) { | ||
| if (typeof input !== "string") { | ||
| return input; | ||
| } | ||
| const title = input.trim() || "main"; | ||
| return { | ||
| mode: "work", | ||
| description: input, | ||
| units: [{ id: "main", title }] | ||
| }; | ||
| } | ||
| function setPhase(state, status, phase) { | ||
| state.status = status; | ||
| state.phase = phase; | ||
| state.updatedAt = nowIso(); | ||
| } | ||
| function advanceToNextUnit(state) { | ||
| state.currentUnitIndex += 1; | ||
| state.verifyAttempts = 0; | ||
| setPhase(state, "phase_clarify", "clarify"); | ||
| } | ||
| function validateState(state) { | ||
| const result = validateWorkflowState(state); | ||
| if (result.ok) { | ||
| return void 0; | ||
| } | ||
| return faultSignal(void 0, "invalid workflow state", result.issues, false); | ||
| } | ||
| function createWorkflow(input) { | ||
| const normalized = normalizeCreateWorkflowInput(input); | ||
| const timestamp = normalized.createdAt ?? nowIso(); | ||
| const state = { | ||
| schemaVersion: "1.0.0", | ||
| workflowId: normalized.workflowId ?? createWorkflowId(), | ||
| mode: normalized.mode, | ||
| description: normalized.description, | ||
| status: "phase_clarify", | ||
| phase: "clarify", | ||
| units: normalized.units, | ||
| currentUnitIndex: 0, | ||
| captureEnabled: normalized.captureEnabled ?? false, | ||
| maxVerifyAttempts: normalized.maxVerifyAttempts ?? 3, | ||
| verifyAttempts: 0, | ||
| pendingDecisions: [], | ||
| decisionHistory: [], | ||
| outputs: {}, | ||
| createdAt: timestamp, | ||
| updatedAt: timestamp | ||
| }; | ||
| const stateValidation = validateWorkflowState(state); | ||
| if (!stateValidation.ok) { | ||
| const signal2 = faultSignal(state, "new workflow failed validation", stateValidation.issues, false); | ||
| return { state, signal: signal2, response: signal2 }; | ||
| } | ||
| const signal = nextSignal(state); | ||
| return { state, signal, response: signal }; | ||
| } | ||
| function nextSignal(state) { | ||
| const stateValidation = validateWorkflowState(state); | ||
| if (!stateValidation.ok) { | ||
| return faultSignal(state, "invalid workflow state", stateValidation.issues, false); | ||
| } | ||
| switch (state.status) { | ||
| case "phase_clarify": | ||
| return runSignal(state, "clarify", "Tighten the current unit until execution is safe and verification is clear."); | ||
| case "clarify_pending": | ||
| return { | ||
| type: "gate", | ||
| gate: "clarify", | ||
| workflow_id: state.workflowId, | ||
| mode: state.mode, | ||
| unit_id: currentUnit(state)?.id, | ||
| options: state.pendingDecisions.map((decision) => decision.id), | ||
| state_ref: workflowStatePath(state.workflowId), | ||
| instructions: "Collect the pending external decisions, then submit decision answers and resume clarify." | ||
| }; | ||
| case "phase_execute": | ||
| return runSignal(state, "execute", "Perform only the clarified unit of work and leave evidence for verification."); | ||
| case "phase_verify": | ||
| return runSignal(state, "verify", "Try to disprove the claimed result and report recoverable issues precisely."); | ||
| case "phase_capture": | ||
| return runSignal(state, "capture", "Capture only durable, reusable patterns worth saving."); | ||
| case "completed": | ||
| case "blocked": | ||
| case "stopped": | ||
| return terminalSignal(state); | ||
| default: | ||
| return faultSignal(state, `unsupported status: ${String(state.status)}`, [], false); | ||
| } | ||
| } | ||
| function nextResponse(state) { | ||
| return nextSignal(state); | ||
| } | ||
| function stopWorkflow(state, reason = "workflow stopped") { | ||
| const next = cloneState(state); | ||
| next.status = "stopped"; | ||
| next.blockedReason = reason; | ||
| next.updatedAt = nowIso(); | ||
| const signal = nextSignal(next); | ||
| return { state: next, signal, response: signal }; | ||
| } | ||
| function applyDecisionAnswers(state, input) { | ||
| if (state.status !== "clarify_pending") { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "decision answers are only valid during clarify_pending", [], true) | ||
| }); | ||
| } | ||
| const validation = validateDecisionAnswers(input); | ||
| if (!validation.ok || !validation.value) { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "invalid decision answers", validation.issues, true) | ||
| }); | ||
| } | ||
| const next = cloneState(state); | ||
| next.decisionHistory.push(...validation.value); | ||
| next.pendingDecisions = []; | ||
| setPhase(next, "phase_clarify", "clarify"); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| function submitDecisions(state, input) { | ||
| return applyDecisionAnswers(state, input); | ||
| } | ||
| function applyPhaseOutput(state, phase, input) { | ||
| if (state.phase !== phase) { | ||
| return withResponse({ | ||
| signal: faultSignal( | ||
| state, | ||
| `phase output for ${phase} is invalid while workflow phase is ${state.phase}`, | ||
| [], | ||
| true | ||
| ) | ||
| }); | ||
| } | ||
| switch (phase) { | ||
| case "clarify": | ||
| return applyClarify(state, input); | ||
| case "execute": | ||
| return applyExecute(state, input); | ||
| case "verify": | ||
| return applyVerify(state, input); | ||
| case "capture": | ||
| return applyCapture(state, input); | ||
| default: | ||
| return withResponse({ signal: faultSignal(state, `unsupported phase: ${phase}`, [], false) }); | ||
| } | ||
| } | ||
| function submitPhaseOutput(state, phase, input) { | ||
| return applyPhaseOutput(state, phase, input); | ||
| } | ||
| function applyClarify(state, input) { | ||
| if (state.status !== "phase_clarify") { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "clarify output is only valid during phase_clarify", [], true) | ||
| }); | ||
| } | ||
| const validation = validateClarifyOutput(input); | ||
| if (!validation.ok || !validation.value) { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "invalid clarify output", validation.issues, true) | ||
| }); | ||
| } | ||
| const next = cloneState(state); | ||
| storePhaseOutput(next, "clarify", validation.value); | ||
| if (!validation.value.ready) { | ||
| if (validation.value.decisions.length === 0) { | ||
| return withResponse({ | ||
| signal: faultSignal( | ||
| state, | ||
| "clarify output is not ready but did not supply any external decisions", | ||
| [], | ||
| true | ||
| ) | ||
| }); | ||
| } | ||
| next.pendingDecisions = validation.value.decisions; | ||
| setPhase(next, "clarify_pending", "clarify"); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| next.pendingDecisions = []; | ||
| setPhase(next, "phase_execute", "execute"); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| function applyExecute(state, input) { | ||
| if (state.status !== "phase_execute") { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "execute output is only valid during phase_execute", [], true) | ||
| }); | ||
| } | ||
| const validation = validateExecuteOutput(input); | ||
| if (!validation.ok || !validation.value) { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "invalid execute output", validation.issues, true) | ||
| }); | ||
| } | ||
| const next = cloneState(state); | ||
| storePhaseOutput(next, "execute", validation.value); | ||
| setPhase(next, "phase_verify", "verify"); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| function applyVerify(state, input) { | ||
| if (state.status !== "phase_verify") { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "verify output is only valid during phase_verify", [], true) | ||
| }); | ||
| } | ||
| const validation = validateVerifyOutput(input); | ||
| if (!validation.ok || !validation.value) { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "invalid verify output", validation.issues, true) | ||
| }); | ||
| } | ||
| const next = cloneState(state); | ||
| storePhaseOutput(next, "verify", validation.value); | ||
| next.lastVerifyIssues = validation.value.issues; | ||
| if (validation.value.passed) { | ||
| const isLastUnit = next.currentUnitIndex >= next.units.length - 1; | ||
| if (!isLastUnit) { | ||
| advanceToNextUnit(next); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| if (next.captureEnabled) { | ||
| next.verifyAttempts = 0; | ||
| setPhase(next, "phase_capture", "capture"); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| next.verifyAttempts = 0; | ||
| next.status = "completed"; | ||
| next.updatedAt = nowIso(); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| next.verifyAttempts += 1; | ||
| if (validation.value.needsHuman) { | ||
| if (!validation.value.decisions || validation.value.decisions.length === 0) { | ||
| next.status = "blocked"; | ||
| next.blockedReason = validation.value.summary; | ||
| next.updatedAt = nowIso(); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| next.pendingDecisions = validation.value.decisions; | ||
| setPhase(next, "clarify_pending", "clarify"); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| if (next.verifyAttempts >= next.maxVerifyAttempts) { | ||
| next.status = "blocked"; | ||
| next.blockedReason = validation.value.retryHint || validation.value.summary; | ||
| next.updatedAt = nowIso(); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| setPhase(next, "phase_clarify", "clarify"); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| function applyCapture(state, input) { | ||
| if (state.status !== "phase_capture") { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "capture output is only valid during phase_capture", [], true) | ||
| }); | ||
| } | ||
| const validation = validateCaptureOutput(input); | ||
| if (!validation.ok || !validation.value) { | ||
| return withResponse({ | ||
| signal: faultSignal(state, "invalid capture output", validation.issues, true) | ||
| }); | ||
| } | ||
| const next = cloneState(state); | ||
| storePhaseOutput(next, "capture", validation.value); | ||
| next.status = "completed"; | ||
| next.updatedAt = nowIso(); | ||
| return withResponse({ state: next, signal: nextSignal(next) }); | ||
| } | ||
| export { | ||
| validateClarifyOutput, | ||
| validateDecisionAnswers, | ||
| validateExecuteOutput, | ||
| validateVerifyOutput, | ||
| validateCaptureOutput, | ||
| validateWorkflowState, | ||
| validateDecisionPromptArray, | ||
| validateVerifyIssueArray, | ||
| workflowStatePath, | ||
| absoluteWorkflowStatePath, | ||
| statePath, | ||
| touchUpdatedAt, | ||
| saveWorkflowState, | ||
| loadWorkflowState, | ||
| saveState, | ||
| loadState, | ||
| listStates, | ||
| validateState, | ||
| createWorkflow, | ||
| nextSignal, | ||
| nextResponse, | ||
| stopWorkflow, | ||
| applyDecisionAnswers, | ||
| submitDecisions, | ||
| applyPhaseOutput, | ||
| submitPhaseOutput | ||
| }; |
Shell access
Supply chain riskThis module accesses the system shell. Accessing the system shell increases the risk of executing arbitrary code.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
Shell access
Supply chain riskThis module accesses the system shell. Accessing the system shell increases the risk of executing arbitrary code.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
152924
49.53%3966
40.34%71
9.23%