@versatly/tasktime
Advanced tools
| export declare function formatDuration(ms: number): string; | ||
| export declare function sparkline(values: number[]): string; | ||
| export declare function barChart(data: { | ||
| label: string; | ||
| value: number; | ||
| }[], options?: { | ||
| width?: number; | ||
| color?: boolean; | ||
| }): string; | ||
| export declare function lineChart(data: { | ||
| label: string; | ||
| value: number; | ||
| }[], options?: { | ||
| height?: number; | ||
| width?: number; | ||
| }): string; | ||
| export declare function progressBar(current: number, total: number, width?: number): string; | ||
| export declare function heatmap(data: number[][], labels: { | ||
| rows: string[]; | ||
| cols: string[]; | ||
| }): string; | ||
| export declare function box(title: string, content: string): string; |
+110
| import chalk from 'chalk'; | ||
| const BLOCKS = ['▁', '▂', '▃', '▄', '▅', '▆', '▇', '█']; | ||
| const BAR_CHARS = { full: '█', half: '▌', empty: '░' }; | ||
| export function formatDuration(ms) { | ||
| if (ms < 1000) | ||
| return `${ms}ms`; | ||
| if (ms < 60000) | ||
| return `${(ms / 1000).toFixed(1)}s`; | ||
| if (ms < 3600000) | ||
| return `${(ms / 60000).toFixed(1)}m`; | ||
| const hours = Math.floor(ms / 3600000); | ||
| const mins = Math.round((ms % 3600000) / 60000); | ||
| return `${hours}h ${mins}m`; | ||
| } | ||
| export function sparkline(values) { | ||
| if (values.length === 0) | ||
| return ''; | ||
| const max = Math.max(...values); | ||
| if (max === 0) | ||
| return BLOCKS[0].repeat(values.length); | ||
| return values.map(v => { | ||
| const idx = Math.round((v / max) * (BLOCKS.length - 1)); | ||
| return BLOCKS[idx]; | ||
| }).join(''); | ||
| } | ||
| export function barChart(data, options = {}) { | ||
| const { width = 40, color = true } = options; | ||
| if (data.length === 0) | ||
| return 'No data'; | ||
| const maxValue = Math.max(...data.map(d => d.value)); | ||
| const maxLabel = Math.max(...data.map(d => d.label.length)); | ||
| const lines = []; | ||
| const colors = [chalk.cyan, chalk.green, chalk.yellow, chalk.magenta, chalk.blue, chalk.red]; | ||
| for (let i = 0; i < data.length; i++) { | ||
| const { label, value } = data[i]; | ||
| const barLen = maxValue > 0 ? Math.round((value / maxValue) * width) : 0; | ||
| const bar = BAR_CHARS.full.repeat(barLen) + BAR_CHARS.empty.repeat(width - barLen); | ||
| const colorFn = color ? colors[i % colors.length] : (s) => s; | ||
| const paddedLabel = label.padEnd(maxLabel); | ||
| lines.push(`${paddedLabel} ${colorFn(bar)} ${formatDuration(value)}`); | ||
| } | ||
| return lines.join('\n'); | ||
| } | ||
| export function lineChart(data, options = {}) { | ||
| const { height = 8, width = 50 } = options; | ||
| if (data.length === 0) | ||
| return 'No data'; | ||
| const values = data.map(d => d.value); | ||
| const max = Math.max(...values); | ||
| const min = Math.min(...values); | ||
| const range = max - min || 1; | ||
| const chart = Array(height).fill(null).map(() => Array(width).fill(' ')); | ||
| // Plot points | ||
| for (let i = 0; i < data.length && i < width; i++) { | ||
| const normalizedY = (values[i] - min) / range; | ||
| const y = Math.round((1 - normalizedY) * (height - 1)); | ||
| chart[y][i] = chalk.cyan('●'); | ||
| // Fill below | ||
| for (let j = y + 1; j < height; j++) { | ||
| chart[j][i] = chalk.dim('│'); | ||
| } | ||
| } | ||
| // Build output with axis | ||
| const lines = []; | ||
| lines.push(chalk.dim(`${formatDuration(max).padStart(8)} ┤`)); | ||
| for (let i = 0; i < height; i++) { | ||
| const axisLabel = i === height - 1 ? formatDuration(min).padStart(8) : ''.padStart(8); | ||
| const axisChar = i === height - 1 ? '└' : '│'; | ||
| lines.push(chalk.dim(`${axisLabel} ${axisChar}`) + chart[i].join('')); | ||
| } | ||
| // X-axis labels | ||
| const labels = data.slice(0, width).map(d => d.label.slice(-2)); | ||
| lines.push(chalk.dim(' ' + labels.join(''))); | ||
| return lines.join('\n'); | ||
| } | ||
| export function progressBar(current, total, width = 30) { | ||
| const percent = total > 0 ? current / total : 0; | ||
| const filled = Math.round(percent * width); | ||
| const empty = width - filled; | ||
| const bar = chalk.green(BAR_CHARS.full.repeat(filled)) + chalk.dim(BAR_CHARS.empty.repeat(empty)); | ||
| return `${bar} ${(percent * 100).toFixed(1)}%`; | ||
| } | ||
| export function heatmap(data, labels) { | ||
| const max = Math.max(...data.flat()); | ||
| const chars = [' ', '░', '▒', '▓', '█']; | ||
| const colors = [chalk.dim, chalk.blue, chalk.cyan, chalk.yellow, chalk.red]; | ||
| const lines = []; | ||
| lines.push(' ' + labels.cols.map(c => c.slice(0, 2).padStart(3)).join('')); | ||
| for (let i = 0; i < data.length; i++) { | ||
| const row = data[i]; | ||
| const cells = row.map(v => { | ||
| const intensity = max > 0 ? Math.floor((v / max) * (chars.length - 1)) : 0; | ||
| return colors[intensity](chars[intensity].repeat(3)); | ||
| }).join(''); | ||
| lines.push(`${labels.rows[i].padStart(4)} ${cells}`); | ||
| } | ||
| return lines.join('\n'); | ||
| } | ||
| export function box(title, content) { | ||
| const lines = content.split('\n'); | ||
| const maxLen = Math.max(title.length, ...lines.map(l => l.replace(/\x1b\[[0-9;]*m/g, '').length)); | ||
| const width = maxLen + 2; | ||
| const top = `╭─${title}${'─'.repeat(width - title.length - 1)}╮`; | ||
| const bottom = `╰${'─'.repeat(width + 1)}╯`; | ||
| const middle = lines.map(l => { | ||
| const plainLen = l.replace(/\x1b\[[0-9;]*m/g, '').length; | ||
| return `│ ${l}${' '.repeat(maxLen - plainLen)} │`; | ||
| }).join('\n'); | ||
| return `${top}\n${middle}\n${bottom}`; | ||
| } |
| #!/usr/bin/env node | ||
| export {}; |
+26
| export interface TaskRecord { | ||
| id: number; | ||
| task: string; | ||
| category: string; | ||
| started_at: string; | ||
| ended_at: string | null; | ||
| duration_ms: number | null; | ||
| notes: string | null; | ||
| } | ||
| export declare function startTask(task: string, category?: string): TaskRecord; | ||
| export declare function stopTask(id: number, notes?: string): TaskRecord | null; | ||
| export declare function getActiveTask(): TaskRecord | null; | ||
| export declare function getTasks(limit?: number, category?: string): TaskRecord[]; | ||
| export declare function searchTasks(query: string): TaskRecord[]; | ||
| export declare function getStats(days?: number): { | ||
| category: string; | ||
| total_ms: number; | ||
| count: number; | ||
| }[]; | ||
| export declare function getDailyStats(days?: number): { | ||
| date: string; | ||
| total_ms: number; | ||
| count: number; | ||
| }[]; | ||
| export declare function getCategories(): string[]; | ||
| export declare function seedSampleData(): void; |
+158
| import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'; | ||
| import { homedir } from 'os'; | ||
| import { join } from 'path'; | ||
| const DATA_DIR = join(homedir(), '.tasktime'); | ||
| const DB_PATH = join(DATA_DIR, 'tasks.json'); | ||
| let cache = null; | ||
| function ensureDir() { | ||
| if (!existsSync(DATA_DIR)) { | ||
| mkdirSync(DATA_DIR, { recursive: true }); | ||
| } | ||
| } | ||
| function load() { | ||
| if (cache) | ||
| return cache; | ||
| ensureDir(); | ||
| if (!existsSync(DB_PATH)) { | ||
| cache = { nextId: 1, tasks: [] }; | ||
| return cache; | ||
| } | ||
| try { | ||
| cache = JSON.parse(readFileSync(DB_PATH, 'utf-8')); | ||
| return cache; | ||
| } | ||
| catch { | ||
| cache = { nextId: 1, tasks: [] }; | ||
| return cache; | ||
| } | ||
| } | ||
| function save() { | ||
| ensureDir(); | ||
| writeFileSync(DB_PATH, JSON.stringify(cache, null, 2)); | ||
| } | ||
| export function startTask(task, category = 'general') { | ||
| const db = load(); | ||
| const now = new Date().toISOString(); | ||
| const record = { | ||
| id: db.nextId++, | ||
| task, | ||
| category, | ||
| started_at: now, | ||
| ended_at: null, | ||
| duration_ms: null, | ||
| notes: null | ||
| }; | ||
| db.tasks.push(record); | ||
| save(); | ||
| return record; | ||
| } | ||
| export function stopTask(id, notes) { | ||
| const db = load(); | ||
| const now = new Date().toISOString(); | ||
| const task = db.tasks.find(t => t.id === id); | ||
| if (!task) | ||
| return null; | ||
| const startTime = new Date(task.started_at).getTime(); | ||
| const endTime = new Date(now).getTime(); | ||
| task.ended_at = now; | ||
| task.duration_ms = endTime - startTime; | ||
| task.notes = notes || null; | ||
| save(); | ||
| return task; | ||
| } | ||
| export function getActiveTask() { | ||
| const db = load(); | ||
| const active = db.tasks.filter(t => t.ended_at === null); | ||
| return active.length > 0 ? active[active.length - 1] : null; | ||
| } | ||
| export function getTasks(limit = 50, category) { | ||
| const db = load(); | ||
| let tasks = [...db.tasks].reverse(); | ||
| if (category) { | ||
| tasks = tasks.filter(t => t.category === category); | ||
| } | ||
| return tasks.slice(0, limit); | ||
| } | ||
| export function searchTasks(query) { | ||
| const db = load(); | ||
| const q = query.toLowerCase(); | ||
| return db.tasks.filter(t => t.task.toLowerCase().includes(q) || | ||
| t.category.toLowerCase().includes(q) || | ||
| (t.notes && t.notes.toLowerCase().includes(q))); | ||
| } | ||
| export function getStats(days = 7) { | ||
| const db = load(); | ||
| const since = Date.now() - days * 24 * 60 * 60 * 1000; | ||
| const tasks = db.tasks.filter(t => new Date(t.started_at).getTime() >= since && | ||
| t.duration_ms !== null); | ||
| const byCategory = new Map(); | ||
| for (const t of tasks) { | ||
| const existing = byCategory.get(t.category) || { total_ms: 0, count: 0 }; | ||
| existing.total_ms += t.duration_ms; | ||
| existing.count++; | ||
| byCategory.set(t.category, existing); | ||
| } | ||
| return Array.from(byCategory.entries()) | ||
| .map(([category, stats]) => ({ category, ...stats })) | ||
| .sort((a, b) => b.total_ms - a.total_ms); | ||
| } | ||
| export function getDailyStats(days = 7) { | ||
| const db = load(); | ||
| const since = Date.now() - days * 24 * 60 * 60 * 1000; | ||
| const tasks = db.tasks.filter(t => new Date(t.started_at).getTime() >= since && | ||
| t.duration_ms !== null); | ||
| const byDate = new Map(); | ||
| for (const t of tasks) { | ||
| const date = t.started_at.split('T')[0]; | ||
| const existing = byDate.get(date) || { total_ms: 0, count: 0 }; | ||
| existing.total_ms += t.duration_ms; | ||
| existing.count++; | ||
| byDate.set(date, existing); | ||
| } | ||
| return Array.from(byDate.entries()) | ||
| .map(([date, stats]) => ({ date, ...stats })) | ||
| .sort((a, b) => a.date.localeCompare(b.date)); | ||
| } | ||
| export function getCategories() { | ||
| const db = load(); | ||
| return [...new Set(db.tasks.map(t => t.category))].sort(); | ||
| } | ||
| // Seed sample data for demos | ||
| export function seedSampleData() { | ||
| const db = load(); | ||
| if (db.tasks.length > 0) | ||
| return; // Already has data | ||
| const categories = ['coding', 'research', 'testing', 'docs', 'review']; | ||
| const tasks = [ | ||
| 'Build auth module', | ||
| 'Implement API endpoints', | ||
| 'Write unit tests', | ||
| 'Code review PR #42', | ||
| 'Research caching strategies', | ||
| 'Fix memory leak', | ||
| 'Update documentation', | ||
| 'Refactor database layer', | ||
| 'Add error handling', | ||
| 'Deploy to staging', | ||
| 'Debug WebSocket issue', | ||
| 'Optimize query performance', | ||
| ]; | ||
| const now = Date.now(); | ||
| for (let i = 0; i < 15; i++) { | ||
| const daysAgo = Math.floor(Math.random() * 7); | ||
| const hoursAgo = Math.floor(Math.random() * 12); | ||
| const start = now - (daysAgo * 24 * 60 * 60 * 1000) - (hoursAgo * 60 * 60 * 1000); | ||
| const duration = (5 + Math.random() * 55) * 60 * 1000; // 5-60 minutes | ||
| const record = { | ||
| id: db.nextId++, | ||
| task: tasks[Math.floor(Math.random() * tasks.length)], | ||
| category: categories[Math.floor(Math.random() * categories.length)], | ||
| started_at: new Date(start).toISOString(), | ||
| ended_at: new Date(start + duration).toISOString(), | ||
| duration_ms: Math.round(duration), | ||
| notes: Math.random() > 0.7 ? 'Completed successfully' : null | ||
| }; | ||
| db.tasks.push(record); | ||
| } | ||
| save(); | ||
| } |
| export * from './db.js'; | ||
| export * from './charts.js'; |
| // Export modules for programmatic use | ||
| export * from './db.js'; | ||
| export * from './charts.js'; |
+81
| # @versatly/tasktime ⏱️ | ||
| CLI task timer for AI agents — benchmark your learning progression with auto-save logs and visualizations. | ||
| ## Install | ||
| ```bash | ||
| npm install -g @versatly/tasktime | ||
| ``` | ||
| ## Usage | ||
| ```bash | ||
| # Start timing a task | ||
| tasktime start "Build auth module" --category coding | ||
| # Check what's running | ||
| tasktime status | ||
| # Stop when done | ||
| tasktime stop --notes "Implemented JWT + refresh tokens" | ||
| # View history | ||
| tasktime history | ||
| tasktime ls -n 20 | ||
| # Search tasks | ||
| tasktime search "auth" | ||
| # Get reports with charts | ||
| tasktime report | ||
| tasktime report --days 30 | ||
| # Pretty charts | ||
| tasktime chart --type bar | ||
| tasktime chart --type line | ||
| tasktime chart --type spark | ||
| # List categories | ||
| tasktime categories | ||
| # One-liner for prompts | ||
| tasktime now # ⏱️ Build auth module (1h 23m) | ||
| ``` | ||
| ## Alias | ||
| `tt` is a shortcut: | ||
| ```bash | ||
| tt start "Quick fix" | ||
| tt stop | ||
| tt report | ||
| ``` | ||
| ## Features | ||
| - ⏱️ **Simple timer** — start/stop with one command | ||
| - 📁 **Categories** — organize tasks by type | ||
| - 🔍 **Full-text search** — find any past task | ||
| - 📊 **ASCII charts** — bar, line, sparkline | ||
| - 💾 **SQLite storage** — local, fast, reliable | ||
| - 📝 **Notes** — attach context when stopping | ||
| ## Data | ||
| Stored in `~/.tasktime/tasks.db` (SQLite). | ||
| ## Programmatic Use | ||
| ```typescript | ||
| import { startTask, stopTask, getTasks, getStats } from '@versatly/tasktime'; | ||
| const task = startTask('My task', 'coding'); | ||
| // ... do work ... | ||
| const completed = stopTask(task.id, 'Done!'); | ||
| ``` | ||
| ## License | ||
| MIT |
+236
-49
| #!/usr/bin/env node | ||
| "use strict"; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||
| const commander_1 = require("commander"); | ||
| const compare_1 = require("./commands/compare"); | ||
| const estimate_1 = require("./commands/estimate"); | ||
| const export_1 = require("./commands/export"); | ||
| const graph_1 = require("./commands/graph"); | ||
| const list_1 = require("./commands/list"); | ||
| const measure_1 = require("./commands/measure"); | ||
| const pdf_1 = require("./commands/pdf"); | ||
| const report_1 = require("./commands/report"); | ||
| const run_1 = require("./commands/run"); | ||
| const search_1 = require("./commands/search"); | ||
| const start_1 = require("./commands/start"); | ||
| const status_1 = require("./commands/status"); | ||
| const stop_1 = require("./commands/stop"); | ||
| const storage_1 = require("./lib/storage"); | ||
| async function main() { | ||
| const program = new commander_1.Command(); | ||
| program | ||
| .name("tasktime") | ||
| .description("Benchmark your workflows and generate performance graphs") | ||
| .version("0.1.0") | ||
| .enablePositionalOptions() | ||
| .option("--storage", "Print storage root path") | ||
| .hook("preAction", (thisCommand) => { | ||
| if (thisCommand.opts().storage) { | ||
| console.log((0, storage_1.getStorageRoot)()); | ||
| process.exit(0); | ||
| } | ||
| import { Command } from 'commander'; | ||
| import chalk from 'chalk'; | ||
| import Table from 'cli-table3'; | ||
| import { format, formatDistanceToNow } from 'date-fns'; | ||
| import { startTask, stopTask, getActiveTask, getTasks, searchTasks, getStats, getDailyStats, getCategories, seedSampleData } from './db.js'; | ||
| import { barChart, sparkline, lineChart, formatDuration, box } from './charts.js'; | ||
| const program = new Command(); | ||
| program | ||
| .name('tasktime') | ||
| .alias('tt') | ||
| .description('⏱️ CLI task timer for AI agents - benchmark your learning progression') | ||
| .version('1.0.0'); | ||
| // Start a task | ||
| program | ||
| .command('start <task>') | ||
| .description('Start timing a task') | ||
| .option('-c, --category <category>', 'Task category', 'general') | ||
| .action((task, opts) => { | ||
| const active = getActiveTask(); | ||
| if (active) { | ||
| console.log(chalk.yellow(`⚠️ Already timing: "${active.task}"`)); | ||
| console.log(chalk.dim(` Started ${formatDistanceToNow(new Date(active.started_at))} ago`)); | ||
| console.log(chalk.dim(` Run 'tasktime stop' first, or 'tasktime stop && tasktime start "${task}"'`)); | ||
| process.exit(1); | ||
| } | ||
| const record = startTask(task, opts.category); | ||
| console.log(chalk.green(`⏱️ Started: ${chalk.bold(task)}`)); | ||
| console.log(chalk.dim(` Category: ${opts.category}`)); | ||
| console.log(chalk.dim(` ID: ${record.id}`)); | ||
| }); | ||
| // Stop the current task | ||
| program | ||
| .command('stop') | ||
| .description('Stop the current task') | ||
| .option('-n, --notes <notes>', 'Add notes to the task') | ||
| .action((opts) => { | ||
| const active = getActiveTask(); | ||
| if (!active) { | ||
| console.log(chalk.yellow('⚠️ No active task')); | ||
| process.exit(1); | ||
| } | ||
| const record = stopTask(active.id, opts.notes); | ||
| if (!record) { | ||
| console.log(chalk.red('❌ Failed to stop task')); | ||
| process.exit(1); | ||
| } | ||
| console.log(chalk.green(`✅ Completed: ${chalk.bold(record.task)}`)); | ||
| console.log(chalk.cyan(` Duration: ${formatDuration(record.duration_ms)}`)); | ||
| if (opts.notes) { | ||
| console.log(chalk.dim(` Notes: ${opts.notes}`)); | ||
| } | ||
| }); | ||
| // Show current status | ||
| program | ||
| .command('status') | ||
| .description('Show current task status') | ||
| .action(() => { | ||
| const active = getActiveTask(); | ||
| if (!active) { | ||
| console.log(chalk.dim('No active task. Run `tasktime start "your task"` to begin.')); | ||
| return; | ||
| } | ||
| const elapsed = Date.now() - new Date(active.started_at).getTime(); | ||
| console.log(box('⏱️ Active Task', [ | ||
| `${chalk.bold(active.task)}`, | ||
| `${chalk.dim('Category:')} ${active.category}`, | ||
| `${chalk.dim('Elapsed:')} ${chalk.cyan(formatDuration(elapsed))}`, | ||
| `${chalk.dim('Started:')} ${format(new Date(active.started_at), 'HH:mm:ss')}`, | ||
| ].join('\n'))); | ||
| }); | ||
| // List history | ||
| program | ||
| .command('history') | ||
| .alias('ls') | ||
| .description('Show task history') | ||
| .option('-n, --limit <n>', 'Number of tasks', '10') | ||
| .option('-c, --category <category>', 'Filter by category') | ||
| .action((opts) => { | ||
| const tasks = getTasks(parseInt(opts.limit), opts.category); | ||
| if (tasks.length === 0) { | ||
| console.log(chalk.dim('No tasks recorded yet.')); | ||
| return; | ||
| } | ||
| const table = new Table({ | ||
| head: [ | ||
| chalk.dim('ID'), | ||
| chalk.dim('Task'), | ||
| chalk.dim('Category'), | ||
| chalk.dim('Duration'), | ||
| chalk.dim('When') | ||
| ], | ||
| style: { head: [], border: [] } | ||
| }); | ||
| (0, start_1.registerStartCommand)(program); | ||
| (0, stop_1.registerStopCommand)(program); | ||
| (0, run_1.registerRunCommand)(program); | ||
| (0, status_1.registerStatusCommand)(program); | ||
| (0, list_1.registerListCommand)(program); | ||
| (0, search_1.registerSearchCommand)(program); | ||
| (0, measure_1.registerMeasureCommand)(program); | ||
| (0, estimate_1.registerEstimateCommand)(program); | ||
| (0, export_1.registerExportCommand)(program); | ||
| (0, graph_1.registerGraphCommand)(program); | ||
| (0, compare_1.registerCompareCommand)(program); | ||
| (0, report_1.registerReportCommand)(program); | ||
| (0, pdf_1.registerPdfCommand)(program); | ||
| await program.parseAsync(process.argv); | ||
| } | ||
| main().catch((error) => { | ||
| const message = error instanceof Error ? error.message : String(error); | ||
| console.error(`tasktime error: ${message}`); | ||
| process.exitCode = 1; | ||
| for (const t of tasks) { | ||
| const when = formatDistanceToNow(new Date(t.started_at), { addSuffix: true }); | ||
| const dur = t.duration_ms ? formatDuration(t.duration_ms) : chalk.yellow('⏱️ running'); | ||
| table.push([ | ||
| chalk.dim(t.id.toString()), | ||
| t.task.slice(0, 40) + (t.task.length > 40 ? '…' : ''), | ||
| chalk.cyan(t.category), | ||
| dur, | ||
| chalk.dim(when) | ||
| ]); | ||
| } | ||
| console.log(table.toString()); | ||
| }); | ||
| // Search tasks | ||
| program | ||
| .command('search <query>') | ||
| .description('Search tasks (full-text)') | ||
| .action((query) => { | ||
| const results = searchTasks(query); | ||
| if (results.length === 0) { | ||
| console.log(chalk.dim(`No tasks matching "${query}"`)); | ||
| return; | ||
| } | ||
| console.log(chalk.bold(`Found ${results.length} tasks:\n`)); | ||
| for (const t of results) { | ||
| const dur = t.duration_ms ? formatDuration(t.duration_ms) : 'running'; | ||
| console.log(` ${chalk.cyan(t.id.toString().padStart(3))} ${t.task}`); | ||
| console.log(chalk.dim(` ${t.category} • ${dur}`)); | ||
| } | ||
| }); | ||
| // Show stats/report | ||
| program | ||
| .command('report') | ||
| .alias('stats') | ||
| .description('Show time stats and charts') | ||
| .option('-d, --days <n>', 'Days to include', '7') | ||
| .action((opts) => { | ||
| const days = parseInt(opts.days); | ||
| const stats = getStats(days); | ||
| const daily = getDailyStats(days); | ||
| if (stats.length === 0) { | ||
| console.log(chalk.dim(`No tasks in the last ${days} days.`)); | ||
| return; | ||
| } | ||
| // Summary | ||
| const totalMs = stats.reduce((sum, s) => sum + s.total_ms, 0); | ||
| const totalCount = stats.reduce((sum, s) => sum + s.count, 0); | ||
| console.log(chalk.bold(`\n📊 Task Report (last ${days} days)\n`)); | ||
| console.log(` Total time: ${chalk.cyan(formatDuration(totalMs))}`); | ||
| console.log(` Total tasks: ${chalk.cyan(totalCount.toString())}`); | ||
| console.log(` Categories: ${chalk.cyan(stats.length.toString())}`); | ||
| // Category bar chart | ||
| console.log(chalk.bold('\n📁 Time by Category:\n')); | ||
| console.log(barChart(stats.map(s => ({ label: s.category, value: s.total_ms })))); | ||
| // Daily sparkline | ||
| if (daily.length > 1) { | ||
| console.log(chalk.bold('\n📈 Daily Activity:\n')); | ||
| const spark = sparkline(daily.map(d => d.total_ms)); | ||
| const labels = daily.map(d => format(new Date(d.date), 'MM/dd')).join(' '); | ||
| console.log(` ${spark}`); | ||
| console.log(chalk.dim(` ${labels}`)); | ||
| } | ||
| // Top tasks table | ||
| const tasks = getTasks(5); | ||
| if (tasks.length > 0) { | ||
| console.log(chalk.bold('\n🔥 Recent Tasks:\n')); | ||
| for (const t of tasks) { | ||
| const dur = t.duration_ms ? formatDuration(t.duration_ms) : chalk.yellow('running'); | ||
| console.log(` ${chalk.dim('•')} ${t.task.slice(0, 50)} ${chalk.dim('—')} ${chalk.cyan(dur)}`); | ||
| } | ||
| } | ||
| console.log(''); | ||
| }); | ||
| // Chart command | ||
| program | ||
| .command('chart') | ||
| .description('Show detailed charts') | ||
| .option('-t, --type <type>', 'Chart type: bar, line, spark', 'bar') | ||
| .option('-d, --days <n>', 'Days to include', '7') | ||
| .action((opts) => { | ||
| const days = parseInt(opts.days); | ||
| const daily = getDailyStats(days); | ||
| if (daily.length === 0) { | ||
| console.log(chalk.dim('No data to chart.')); | ||
| return; | ||
| } | ||
| const data = daily.map(d => ({ | ||
| label: format(new Date(d.date), 'MM/dd'), | ||
| value: d.total_ms | ||
| })); | ||
| console.log(chalk.bold(`\n📊 Daily Time (${opts.type} chart)\n`)); | ||
| switch (opts.type) { | ||
| case 'line': | ||
| console.log(lineChart(data)); | ||
| break; | ||
| case 'spark': | ||
| console.log(` ${sparkline(data.map(d => d.value))}`); | ||
| console.log(chalk.dim(` ${data.map(d => d.label).join(' ')}`)); | ||
| break; | ||
| default: | ||
| console.log(barChart(data)); | ||
| } | ||
| console.log(''); | ||
| }); | ||
| // Categories | ||
| program | ||
| .command('categories') | ||
| .alias('cats') | ||
| .description('List all categories') | ||
| .action(() => { | ||
| const cats = getCategories(); | ||
| if (cats.length === 0) { | ||
| console.log(chalk.dim('No categories yet.')); | ||
| return; | ||
| } | ||
| const stats = getStats(30); | ||
| console.log(chalk.bold('\n📁 Categories:\n')); | ||
| for (const cat of cats) { | ||
| const stat = stats.find(s => s.category === cat); | ||
| const time = stat ? formatDuration(stat.total_ms) : chalk.dim('0'); | ||
| console.log(` ${chalk.cyan('•')} ${cat} ${chalk.dim(`(${time} this month)`)}`); | ||
| } | ||
| console.log(''); | ||
| }); | ||
| // Quick single-line status | ||
| program | ||
| .command('now') | ||
| .description('One-line status (for prompts)') | ||
| .action(() => { | ||
| const active = getActiveTask(); | ||
| if (!active) { | ||
| process.exit(0); // Silent if nothing active | ||
| } | ||
| const elapsed = Date.now() - new Date(active.started_at).getTime(); | ||
| console.log(`⏱️ ${active.task} (${formatDuration(elapsed)})`); | ||
| }); | ||
| // Seed sample data for demos | ||
| program | ||
| .command('seed') | ||
| .description('Seed sample data for demo (only if empty)') | ||
| .action(() => { | ||
| seedSampleData(); | ||
| console.log(chalk.green('✅ Sample data seeded! Run `tasktime report` to see charts.')); | ||
| }); | ||
| program.parse(); |
+35
-21
| { | ||
| "name": "@versatly/tasktime", | ||
| "version": "0.1.0", | ||
| "description": "TaskTime CLI for workflow benchmarking and graphing", | ||
| "license": "MIT", | ||
| "version": "1.0.0", | ||
| "description": "CLI task timer for AI agents - benchmark learning progression with auto-save logs and visualizations", | ||
| "main": "dist/index.js", | ||
| "bin": { | ||
| "tasktime": "dist/cli.js" | ||
| "tasktime": "./dist/cli.js", | ||
| "tt": "./dist/cli.js" | ||
| }, | ||
| "main": "dist/cli.js", | ||
| "files": [ | ||
| "dist", | ||
| "scripts", | ||
| "SKILL.md" | ||
| ], | ||
| "type": "module", | ||
| "scripts": { | ||
| "build": "tsc -p tsconfig.json", | ||
| "dev": "tsx src/cli.ts", | ||
| "typecheck": "tsc --noEmit", | ||
| "start": "node dist/cli.js" | ||
| "build": "tsc", | ||
| "dev": "tsc -w", | ||
| "start": "node dist/cli.js", | ||
| "prepublishOnly": "npm run build" | ||
| }, | ||
| "keywords": [ | ||
| "cli", | ||
| "timer", | ||
| "task", | ||
| "ai-agent", | ||
| "benchmark", | ||
| "productivity", | ||
| "timing" | ||
| "time-tracking", | ||
| "cli" | ||
| ], | ||
| "author": "Versatly <pedro@versatly.com>", | ||
| "license": "MIT", | ||
| "repository": { | ||
| "type": "git", | ||
| "url": "https://github.com/Versatly/tasktime" | ||
| }, | ||
| "dependencies": { | ||
| "commander": "^12.1.0" | ||
| "chalk": "^5.3.0", | ||
| "cli-table3": "^0.6.5", | ||
| "commander": "^12.1.0", | ||
| "date-fns": "^3.6.0" | ||
| }, | ||
| "devDependencies": { | ||
| "@types/node": "^22.13.10", | ||
| "tsx": "^4.19.3", | ||
| "typescript": "^5.8.2" | ||
| } | ||
| "@types/node": "^22.0.0", | ||
| "typescript": "^5.5.0" | ||
| }, | ||
| "engines": { | ||
| "node": ">=18" | ||
| }, | ||
| "files": [ | ||
| "dist", | ||
| "README.md" | ||
| ] | ||
| } |
| "use strict"; | ||
| var __importDefault = (this && this.__importDefault) || function (mod) { | ||
| return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
| }; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||
| exports.registerCompareCommand = registerCompareCommand; | ||
| const node_path_1 = __importDefault(require("node:path")); | ||
| const graphs_1 = require("../lib/graphs"); | ||
| const storage_1 = require("../lib/storage"); | ||
| function registerCompareCommand(program) { | ||
| program | ||
| .command("compare <workflowA> <workflowB>") | ||
| .description("Generate side-by-side workflow comparison graph") | ||
| .option("--last <count>", "Number of recent runs to include per workflow", "50") | ||
| .option("--output <file>", "Output PNG file path") | ||
| .option("--dark", "Use dark theme", false) | ||
| .action(async (workflowA, workflowB, options) => { | ||
| const last = Number.parseInt(options.last ?? "50", 10); | ||
| const workflows = await (0, storage_1.getCompareRuns)([workflowA, workflowB], Number.isNaN(last) ? 50 : last); | ||
| const outputPath = node_path_1.default.resolve(process.cwd(), options.output ?? `${workflowA}-vs-${workflowB}.png`); | ||
| await (0, graphs_1.generateCompareGraph)({ | ||
| workflows, | ||
| output: outputPath, | ||
| dark: Boolean(options.dark), | ||
| }, __dirname); | ||
| console.log(`Saved compare graph: ${outputPath}`); | ||
| }); | ||
| } |
| "use strict"; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||
| exports.registerEstimateCommand = registerEstimateCommand; | ||
| const format_1 = require("../lib/format"); | ||
| const stats_1 = require("../lib/stats"); | ||
| const storage_1 = require("../lib/storage"); | ||
| function percentile(values, q) { | ||
| if (values.length === 0) { | ||
| return 0; | ||
| } | ||
| const sorted = [...values].sort((a, b) => a - b); | ||
| const rank = (sorted.length - 1) * q; | ||
| const lower = Math.floor(rank); | ||
| const upper = Math.ceil(rank); | ||
| if (lower === upper) { | ||
| return sorted[lower]; | ||
| } | ||
| const weight = rank - lower; | ||
| return sorted[lower] * (1 - weight) + sorted[upper] * weight; | ||
| } | ||
| function registerEstimateCommand(program) { | ||
| program | ||
| .command("estimate <workflow>") | ||
| .description("Estimate expected runtime from workflow history") | ||
| .option("--json", "Output machine-readable JSON", false) | ||
| .action(async (workflow, options) => { | ||
| const history = await (0, storage_1.getWorkflowHistory)(workflow); | ||
| const durations = history.map((run) => run.durationMs); | ||
| const avgMs = (0, stats_1.average)(durations); | ||
| const spreadMs = (0, stats_1.stdev)(durations); | ||
| const p25 = percentile(durations, 0.25); | ||
| const p75 = percentile(durations, 0.75); | ||
| const payload = { | ||
| workflow: history[0]?.workflow ?? workflow, | ||
| runs: durations.length, | ||
| expectedMs: Math.round(avgMs), | ||
| spreadMs: Math.round(spreadMs), | ||
| rangeMs: { | ||
| p25: Math.round(p25), | ||
| p75: Math.round(p75), | ||
| }, | ||
| }; | ||
| if (options.json) { | ||
| console.log(JSON.stringify(payload, null, 2)); | ||
| return; | ||
| } | ||
| console.log(`Based on ${payload.runs} runs, expect ${(0, format_1.formatDuration)(payload.expectedMs)} +/- ${(0, format_1.formatDuration)(payload.spreadMs)}`); | ||
| console.log(`Typical range (middle 50%): ${(0, format_1.formatDuration)(payload.rangeMs.p25)} to ${(0, format_1.formatDuration)(payload.rangeMs.p75)}`); | ||
| }); | ||
| } |
| "use strict"; | ||
| var __importDefault = (this && this.__importDefault) || function (mod) { | ||
| return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
| }; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||
| exports.registerExportCommand = registerExportCommand; | ||
| const node_fs_1 = require("node:fs"); | ||
| const node_path_1 = __importDefault(require("node:path")); | ||
| const storage_1 = require("../lib/storage"); | ||
| function escapeCsv(value) { | ||
| const safe = value.replace(/"/g, "\"\""); | ||
| return `"${safe}"`; | ||
| } | ||
| function runsToCsv(runs) { | ||
| const rows = [ | ||
| [ | ||
| "workflow", | ||
| "category", | ||
| "durationMs", | ||
| "startedAt", | ||
| "endedAt", | ||
| "tags", | ||
| "notes", | ||
| "session", | ||
| "command", | ||
| "commandExitCode", | ||
| ].join(","), | ||
| ]; | ||
| for (const run of runs) { | ||
| rows.push([ | ||
| escapeCsv(run.workflow), | ||
| escapeCsv(run.category ?? ""), | ||
| String(run.durationMs), | ||
| escapeCsv(run.startedAt), | ||
| escapeCsv(run.endedAt), | ||
| escapeCsv(run.tags.join(";")), | ||
| escapeCsv(run.notes ?? ""), | ||
| escapeCsv(run.session ?? ""), | ||
| escapeCsv(run.command ?? ""), | ||
| run.commandExitCode === undefined ? "" : String(run.commandExitCode), | ||
| ].join(",")); | ||
| } | ||
| return `${rows.join("\n")}\n`; | ||
| } | ||
| async function writeOrPrint(content, outputPath) { | ||
| if (!outputPath) { | ||
| process.stdout.write(content); | ||
| return; | ||
| } | ||
| const resolved = node_path_1.default.resolve(process.cwd(), outputPath); | ||
| await node_fs_1.promises.mkdir(node_path_1.default.dirname(resolved), { recursive: true }); | ||
| await node_fs_1.promises.writeFile(resolved, content, "utf8"); | ||
| console.log(`Exported data: ${resolved}`); | ||
| } | ||
| function registerExportCommand(program) { | ||
| program | ||
| .command("export <workflow>") | ||
| .description("Export workflow run history as JSON or CSV") | ||
| .option("--format <format>", "Export format: json|csv", "json") | ||
| .option("--output <file>", "Write output to file (defaults to stdout)") | ||
| .action(async (workflow, options) => { | ||
| const format = (options.format ?? "json").trim().toLowerCase(); | ||
| if (format !== "json" && format !== "csv") { | ||
| throw new Error('Unsupported format. Use --format "json" or --format "csv"'); | ||
| } | ||
| const runs = await (0, storage_1.getWorkflowHistory)(workflow); | ||
| if (format === "json") { | ||
| const content = `${JSON.stringify({ | ||
| workflow: runs[0]?.workflow ?? workflow, | ||
| exportedAt: new Date().toISOString(), | ||
| runs, | ||
| }, null, 2)}\n`; | ||
| await writeOrPrint(content, options.output); | ||
| return; | ||
| } | ||
| const content = runsToCsv(runs); | ||
| await writeOrPrint(content, options.output); | ||
| }); | ||
| } |
| "use strict"; | ||
| var __importDefault = (this && this.__importDefault) || function (mod) { | ||
| return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
| }; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||
| exports.registerGraphCommand = registerGraphCommand; | ||
| const node_path_1 = __importDefault(require("node:path")); | ||
| const graphs_1 = require("../lib/graphs"); | ||
| const storage_1 = require("../lib/storage"); | ||
| function registerGraphCommand(program) { | ||
| program | ||
| .command("graph <workflow>") | ||
| .description("Generate trend graph for a workflow") | ||
| .option("--last <count>", "Number of recent runs to include", "20") | ||
| .option("--output <file>", "Output PNG file path") | ||
| .option("--dark", "Use dark theme", false) | ||
| .action(async (workflow, options) => { | ||
| const last = Number.parseInt(options.last ?? "20", 10); | ||
| const runs = await (0, storage_1.getWorkflowRuns)(workflow, Number.isNaN(last) ? 20 : last); | ||
| const outputPath = node_path_1.default.resolve(process.cwd(), options.output ?? `${workflow}-trend.png`); | ||
| await (0, graphs_1.generateTrendGraph)({ | ||
| workflow, | ||
| output: outputPath, | ||
| dark: Boolean(options.dark), | ||
| runs, | ||
| }, __dirname); | ||
| console.log(`Saved trend graph: ${outputPath}`); | ||
| }); | ||
| } |
| "use strict"; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||
| exports.registerListCommand = registerListCommand; | ||
| const storage_1 = require("../lib/storage"); | ||
| function registerListCommand(program) { | ||
| program | ||
| .command("list") | ||
| .description("List recent benchmark runs") | ||
| .option("--last <count>", "Number of runs to display", "10") | ||
| .action(async (options) => { | ||
| const last = Number.parseInt(options.last ?? "10", 10); | ||
| const runs = await (0, storage_1.listRuns)(Number.isNaN(last) ? 10 : last); | ||
| if (runs.length === 0) { | ||
| console.log("No benchmark runs recorded yet"); | ||
| return; | ||
| } | ||
| for (const run of runs) { | ||
| console.log((0, storage_1.formatRunLine)(run)); | ||
| } | ||
| }); | ||
| } |
| "use strict"; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||
| exports.registerMeasureCommand = registerMeasureCommand; | ||
| const node_child_process_1 = require("node:child_process"); | ||
| const format_1 = require("../lib/format"); | ||
| function toSeconds(durationMs) { | ||
| return (durationMs / 1000).toFixed(2); | ||
| } | ||
| function registerMeasureCommand(program) { | ||
| program | ||
| .command("measure [command...]") | ||
| .description("Run a command and print duration without storing benchmark data") | ||
| .allowUnknownOption(true) | ||
| .passThroughOptions() | ||
| .option("--json", "Output machine-readable JSON", false) | ||
| .action(async (command, options) => { | ||
| const filteredCommand = command.filter((arg) => arg !== "--"); | ||
| if (filteredCommand.length === 0) { | ||
| throw new Error('No command provided. Use: tasktime measure -- <command>'); | ||
| } | ||
| const startedAt = new Date(); | ||
| const exitCode = await new Promise((resolve, reject) => { | ||
| const child = (0, node_child_process_1.spawn)(filteredCommand[0], filteredCommand.slice(1), { stdio: "inherit" }); | ||
| child.on("error", (error) => { | ||
| reject(new Error(`Failed to run command "${filteredCommand[0]}": ${error.message}`)); | ||
| }); | ||
| child.on("close", (code) => { | ||
| resolve(code ?? 1); | ||
| }); | ||
| }); | ||
| const endedAt = new Date(); | ||
| const durationMs = Math.max(0, endedAt.getTime() - startedAt.getTime()); | ||
| if (options.json) { | ||
| console.log(JSON.stringify({ | ||
| command: filteredCommand.join(" "), | ||
| durationMs, | ||
| durationSeconds: Number(toSeconds(durationMs)), | ||
| startedAt: startedAt.toISOString(), | ||
| endedAt: endedAt.toISOString(), | ||
| exitCode, | ||
| }, null, 2)); | ||
| } | ||
| else { | ||
| console.log(`Command completed in ${(0, format_1.formatDuration)(durationMs)} (${toSeconds(durationMs)}s)`); | ||
| } | ||
| if (exitCode !== 0) { | ||
| process.exitCode = exitCode; | ||
| } | ||
| }); | ||
| } |
| "use strict"; | ||
| var __importDefault = (this && this.__importDefault) || function (mod) { | ||
| return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
| }; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||
| exports.registerPdfCommand = registerPdfCommand; | ||
| const node_fs_1 = require("node:fs"); | ||
| const node_os_1 = __importDefault(require("node:os")); | ||
| const node_path_1 = __importDefault(require("node:path")); | ||
| const graphs_1 = require("../lib/graphs"); | ||
| const stats_1 = require("../lib/stats"); | ||
| const storage_1 = require("../lib/storage"); | ||
| function resolveOutputPath(output, fallbackName) { | ||
| return node_path_1.default.resolve(process.cwd(), output ?? fallbackName); | ||
| } | ||
| function trendLabel(trend) { | ||
| if (trend === "improving") { | ||
| return "improving"; | ||
| } | ||
| if (trend === "regressing") { | ||
| return "regressing"; | ||
| } | ||
| return "stable"; | ||
| } | ||
| function buildPersonalBestKeys(history) { | ||
| const keys = new Set(); | ||
| let best = Number.POSITIVE_INFINITY; | ||
| for (const run of history) { | ||
| if (run.durationMs < best) { | ||
| best = run.durationMs; | ||
| keys.add(`${run.endedAt}:${run.durationMs}`); | ||
| } | ||
| } | ||
| return keys; | ||
| } | ||
| function compactNotes(notes) { | ||
| if (!notes) { | ||
| return ""; | ||
| } | ||
| const flat = notes.replace(/\s+/g, " ").trim(); | ||
| return flat.length <= 72 ? flat : `${flat.slice(0, 69)}...`; | ||
| } | ||
| function normalizedDays(input) { | ||
| const parsed = Number.parseInt(input ?? "7", 10); | ||
| if (Number.isNaN(parsed) || parsed < 1) { | ||
| return 7; | ||
| } | ||
| return Math.min(parsed, 31); | ||
| } | ||
| function normalizedLast(input) { | ||
| const parsed = Number.parseInt(input ?? "30", 10); | ||
| if (Number.isNaN(parsed) || parsed < 1) { | ||
| return 30; | ||
| } | ||
| return Math.min(parsed, 500); | ||
| } | ||
| function registerPdfCommand(program) { | ||
| program | ||
| .command("pdf [workflow]") | ||
| .description("Generate PDF report for workflow, week, or category") | ||
| .option("--week", "Build weekly report PDF") | ||
| .option("--category <category>", "Build category report PDF") | ||
| .option("--output <file>", "Output PDF file path") | ||
| .option("--last <count>", "Recent runs to include in history tables", "30") | ||
| .option("--days <count>", "Days for weekly report", "7") | ||
| .option("--dark", "Use dark graph theme", false) | ||
| .action(async (workflow, options) => { | ||
| const modeCount = Number(Boolean(workflow)) + Number(Boolean(options.week)) + Number(Boolean(options.category)); | ||
| if (modeCount !== 1) { | ||
| throw new Error('Pick exactly one mode: workflow argument, "--week", or "--category <name>"'); | ||
| } | ||
| const tmpDir = await node_fs_1.promises.mkdtemp(node_path_1.default.join(node_os_1.default.tmpdir(), "tasktime-pdf-")); | ||
| try { | ||
| const agent = process.env.TASKTIME_AGENT_NAME ?? process.env.USER ?? "agent"; | ||
| const generatedAt = new Date().toISOString(); | ||
| const dark = Boolean(options.dark); | ||
| const last = normalizedLast(options.last); | ||
| if (workflow) { | ||
| const entry = await (0, storage_1.getWorkflowEntry)(workflow); | ||
| const runs = await (0, storage_1.getWorkflowRuns)(workflow, last); | ||
| const allRuns = entry.history; | ||
| const pbKeys = buildPersonalBestKeys(allRuns); | ||
| const firstDuration = allRuns[0].durationMs; | ||
| const latestDuration = allRuns[allRuns.length - 1].durationMs; | ||
| const changePct = firstDuration > 0 ? ((firstDuration - latestDuration) / firstDuration) * 100 : 0; | ||
| const streak = (0, stats_1.calculateImprovementStreak)(allRuns); | ||
| const trend = (0, stats_1.inferTrend)(allRuns); | ||
| const graphPath = node_path_1.default.join(tmpDir, "workflow-trend.png"); | ||
| await (0, graphs_1.generateTrendGraph)({ | ||
| workflow: allRuns[0]?.workflow ?? workflow, | ||
| output: graphPath, | ||
| dark, | ||
| runs, | ||
| }, __dirname); | ||
| const outputPath = resolveOutputPath(options.output, `${allRuns[0]?.workflow ?? workflow}-report.pdf`); | ||
| await (0, graphs_1.generatePdfReport)({ | ||
| mode: "workflow", | ||
| output: outputPath, | ||
| graphPath, | ||
| generatedAt, | ||
| agent, | ||
| workflow: allRuns[0]?.workflow ?? workflow, | ||
| dateRange: { | ||
| start: allRuns[0].endedAt, | ||
| end: allRuns[allRuns.length - 1].endedAt, | ||
| }, | ||
| summary: { | ||
| totalRuns: entry.runs, | ||
| averageMs: Math.round(entry.avgMs), | ||
| bestMs: entry.bestMs, | ||
| worstMs: entry.worstMs, | ||
| trend: trendLabel(trend), | ||
| }, | ||
| improvements: { | ||
| changePct: Number(changePct.toFixed(1)), | ||
| streak, | ||
| }, | ||
| history: runs.map((run) => ({ | ||
| endedAt: run.endedAt, | ||
| durationMs: run.durationMs, | ||
| notes: compactNotes(run.notes), | ||
| personalBest: pbKeys.has(`${run.endedAt}:${run.durationMs}`), | ||
| })), | ||
| }, __dirname); | ||
| console.log(`Saved PDF report: ${outputPath}`); | ||
| return; | ||
| } | ||
| if (options.week) { | ||
| const days = normalizedDays(options.days); | ||
| const weekly = await (0, storage_1.getWeeklySummary)(days); | ||
| const graphPath = node_path_1.default.join(tmpDir, "weekly-report.png"); | ||
| await (0, graphs_1.generateReportGraph)({ | ||
| mode: "week", | ||
| output: graphPath, | ||
| dark, | ||
| weekly, | ||
| }, __dirname); | ||
| const outputPath = resolveOutputPath(options.output, "tasktime-weekly-report.pdf"); | ||
| const totalRuns = weekly.days.reduce((sum, day) => sum + day.runCount, 0); | ||
| const totalMinutes = weekly.days.reduce((sum, day) => sum + | ||
| Object.values(day.categories).reduce((daySum, durationMs) => daySum + durationMs, 0) / 60000, 0); | ||
| const totalImprovements = weekly.days.reduce((sum, day) => sum + day.improvements, 0); | ||
| const totalRegressions = weekly.days.reduce((sum, day) => sum + day.regressions, 0); | ||
| await (0, graphs_1.generatePdfReport)({ | ||
| mode: "week", | ||
| output: outputPath, | ||
| graphPath, | ||
| generatedAt, | ||
| agent, | ||
| weekly, | ||
| summary: { | ||
| days, | ||
| totalRuns, | ||
| totalMinutes: Number(totalMinutes.toFixed(1)), | ||
| totalImprovements, | ||
| totalRegressions, | ||
| }, | ||
| }, __dirname); | ||
| console.log(`Saved PDF report: ${outputPath}`); | ||
| return; | ||
| } | ||
| const category = options.category.trim().toLowerCase(); | ||
| const summary = await (0, storage_1.getCategorySummary)(category); | ||
| const runs = await (0, storage_1.getRunsByCategory)(category, last); | ||
| const graphPath = node_path_1.default.join(tmpDir, "category-report.png"); | ||
| await (0, graphs_1.generateReportGraph)({ | ||
| mode: "category", | ||
| output: graphPath, | ||
| dark, | ||
| category, | ||
| summary, | ||
| }, __dirname); | ||
| const outputPath = resolveOutputPath(options.output, `tasktime-category-${category}.pdf`); | ||
| await (0, graphs_1.generatePdfReport)({ | ||
| mode: "category", | ||
| output: outputPath, | ||
| graphPath, | ||
| generatedAt, | ||
| agent, | ||
| category, | ||
| summary: summary.map((item) => ({ | ||
| workflow: item.workflow, | ||
| runs: item.runs, | ||
| averageMs: Math.round(item.averageMs), | ||
| stdevMs: Math.round(item.stdevMs), | ||
| })), | ||
| history: runs.map((run) => ({ | ||
| workflow: run.workflow, | ||
| endedAt: run.endedAt, | ||
| durationMs: run.durationMs, | ||
| notes: compactNotes(run.notes), | ||
| })), | ||
| }, __dirname); | ||
| console.log(`Saved PDF report: ${outputPath}`); | ||
| } | ||
| finally { | ||
| await node_fs_1.promises.rm(tmpDir, { recursive: true, force: true }); | ||
| } | ||
| }); | ||
| } |
| "use strict"; | ||
| var __importDefault = (this && this.__importDefault) || function (mod) { | ||
| return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
| }; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||
| exports.registerReportCommand = registerReportCommand; | ||
| const node_path_1 = __importDefault(require("node:path")); | ||
| const graphs_1 = require("../lib/graphs"); | ||
| const storage_1 = require("../lib/storage"); | ||
| function registerReportCommand(program) { | ||
| program | ||
| .command("report") | ||
| .description("Generate weekly or category benchmark report") | ||
| .option("--week", "Weekly summary report") | ||
| .option("--category <category>", "Category summary report") | ||
| .option("--output <file>", "Output PNG file path") | ||
| .option("--dark", "Use dark theme", false) | ||
| .action(async (options) => { | ||
| if (!options.week && !options.category) { | ||
| throw new Error("Use --week or --category <name>"); | ||
| } | ||
| if (options.week && options.category) { | ||
| throw new Error("Use either --week or --category, not both"); | ||
| } | ||
| if (options.week) { | ||
| const weekly = await (0, storage_1.getWeeklySummary)(7); | ||
| const outputPath = node_path_1.default.resolve(process.cwd(), options.output ?? "tasktime-weekly.png"); | ||
| await (0, graphs_1.generateReportGraph)({ | ||
| mode: "week", | ||
| output: outputPath, | ||
| dark: Boolean(options.dark), | ||
| weekly, | ||
| }, __dirname); | ||
| console.log(`Saved weekly report: ${outputPath}`); | ||
| return; | ||
| } | ||
| const category = options.category; | ||
| const summary = await (0, storage_1.getCategorySummary)(category); | ||
| const outputPath = node_path_1.default.resolve(process.cwd(), options.output ?? `tasktime-category-${category}.png`); | ||
| await (0, graphs_1.generateReportGraph)({ | ||
| mode: "category", | ||
| output: outputPath, | ||
| dark: Boolean(options.dark), | ||
| category, | ||
| summary, | ||
| }, __dirname); | ||
| console.log(`Saved category report: ${outputPath}`); | ||
| }); | ||
| } |
| "use strict"; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||
| exports.registerRunCommand = registerRunCommand; | ||
| const node_child_process_1 = require("node:child_process"); | ||
| const format_1 = require("../lib/format"); | ||
| const storage_1 = require("../lib/storage"); | ||
| const timer_1 = require("../lib/timer"); | ||
| async function executeCommand(command, workflow) { | ||
| // Filter out the -- separator if present | ||
| const filteredCommand = command.filter(arg => arg !== '--'); | ||
| if (filteredCommand.length === 0) { | ||
| throw new Error('No command provided. Use: tasktime run "workflow" -- <command>'); | ||
| } | ||
| const startedAt = new Date(); | ||
| const exitCode = await new Promise((resolve, reject) => { | ||
| const child = (0, node_child_process_1.spawn)(filteredCommand[0], filteredCommand.slice(1), { | ||
| stdio: "inherit", | ||
| }); | ||
| child.on("error", (error) => { | ||
| reject(new Error(`Failed running command for "${workflow}": ${error.message}`)); | ||
| }); | ||
| child.on("close", (code) => { | ||
| resolve(code ?? 1); | ||
| }); | ||
| }); | ||
| const endedAt = new Date(); | ||
| return { exitCode, startedAt, endedAt }; | ||
| } | ||
| function registerRunCommand(program) { | ||
| program | ||
| .command("run <workflow> [command...]") | ||
| .description("Run a command and store timing benchmark") | ||
| .option("--category <category>", "Workflow category") | ||
| .option("--tags <tags>", "Comma-separated tags") | ||
| .option("--notes <notes>", "Optional notes") | ||
| .option("--session <session>", "Session identifier") | ||
| .allowUnknownOption(true) | ||
| .passThroughOptions() | ||
| .action(async (workflow, command, options) => { | ||
| const execution = await executeCommand(command, workflow); | ||
| const commandLine = command.filter(arg => arg !== '--').join(" "); | ||
| const result = await (0, timer_1.runTimedCommand)({ | ||
| workflow, | ||
| category: options.category, | ||
| tags: (0, format_1.parseTags)(options.tags), | ||
| notes: options.notes, | ||
| session: options.session, | ||
| startedAt: execution.startedAt, | ||
| endedAt: execution.endedAt, | ||
| command: commandLine, | ||
| exitCode: execution.exitCode, | ||
| }); | ||
| const durations = await (0, storage_1.getWorkflowHistoryDurations)(result.run.workflow); | ||
| console.log(""); | ||
| console.log((0, format_1.formatRunCompletion)(result, durations)); | ||
| if (execution.exitCode !== 0) { | ||
| process.exitCode = execution.exitCode; | ||
| } | ||
| }); | ||
| } |
| "use strict"; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||
| exports.registerSearchCommand = registerSearchCommand; | ||
| const storage_1 = require("../lib/storage"); | ||
| function registerSearchCommand(program) { | ||
| program | ||
| .command("search <query>") | ||
| .description("Search benchmark history") | ||
| .option("--last <count>", "Maximum matching runs to display", "25") | ||
| .action(async (query, options) => { | ||
| const last = Number.parseInt(options.last ?? "25", 10); | ||
| const runs = await (0, storage_1.searchRuns)(query, Number.isNaN(last) ? 25 : last); | ||
| if (runs.length === 0) { | ||
| console.log(`No matches found for "${query}"`); | ||
| return; | ||
| } | ||
| for (const run of runs) { | ||
| console.log((0, storage_1.formatRunLine)(run)); | ||
| } | ||
| }); | ||
| } |
| "use strict"; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||
| exports.registerStartCommand = registerStartCommand; | ||
| const format_1 = require("../lib/format"); | ||
| const timer_1 = require("../lib/timer"); | ||
| function registerStartCommand(program) { | ||
| program | ||
| .command("start <workflow>") | ||
| .description("Start a benchmark timer") | ||
| .option("--category <category>", "Workflow category") | ||
| .option("--tags <tags>", "Comma-separated tags") | ||
| .option("--notes <notes>", "Optional notes") | ||
| .option("--session <session>", "Session identifier") | ||
| .action(async (workflow, options) => { | ||
| const timer = await (0, timer_1.startTimer)({ | ||
| workflow, | ||
| category: options.category, | ||
| tags: (0, format_1.parseTags)(options.tags), | ||
| notes: options.notes, | ||
| session: options.session, | ||
| }); | ||
| console.log(`Started "${timer.workflow}" at ${new Date(timer.startedAt).toLocaleTimeString()}`); | ||
| if (timer.category) { | ||
| console.log(`Category: ${timer.category}`); | ||
| } | ||
| if (timer.tags.length > 0) { | ||
| console.log(`Tags: ${timer.tags.map((tag) => `#${tag}`).join(" ")}`); | ||
| } | ||
| }); | ||
| } |
| "use strict"; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||
| exports.registerStatusCommand = registerStatusCommand; | ||
| const format_1 = require("../lib/format"); | ||
| const storage_1 = require("../lib/storage"); | ||
| function registerStatusCommand(program) { | ||
| program | ||
| .command("status") | ||
| .description("Show active timer status") | ||
| .action(async () => { | ||
| const status = await (0, storage_1.getStatus)(); | ||
| if (!status.activeTimer || status.elapsedMs === undefined) { | ||
| console.log("No active timer"); | ||
| return; | ||
| } | ||
| console.log(`Active: ${status.activeTimer.workflow}`); | ||
| console.log(`Elapsed: ${(0, format_1.formatDurationClock)(status.elapsedMs)}`); | ||
| console.log(`Started: ${new Date(status.activeTimer.startedAt).toLocaleString()}`); | ||
| if (status.activeTimer.category) { | ||
| console.log(`Category: ${status.activeTimer.category}`); | ||
| } | ||
| if (status.activeTimer.tags.length > 0) { | ||
| console.log(`Tags: ${status.activeTimer.tags.map((tag) => `#${tag}`).join(" ")}`); | ||
| } | ||
| }); | ||
| } |
| "use strict"; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||
| exports.registerStopCommand = registerStopCommand; | ||
| const format_1 = require("../lib/format"); | ||
| const storage_1 = require("../lib/storage"); | ||
| const timer_1 = require("../lib/timer"); | ||
| function registerStopCommand(program) { | ||
| program | ||
| .command("stop") | ||
| .description("Stop the active benchmark timer") | ||
| .option("--notes <notes>", "Additional notes") | ||
| .option("--tags <tags>", "Additional comma-separated tags") | ||
| .action(async (options) => { | ||
| const result = await (0, timer_1.stopTimer)({ | ||
| notes: options.notes, | ||
| tags: (0, format_1.parseTags)(options.tags), | ||
| }); | ||
| const durations = await (0, storage_1.getWorkflowHistoryDurations)(result.run.workflow); | ||
| console.log((0, format_1.formatRunCompletion)(result, durations)); | ||
| }); | ||
| } |
| "use strict"; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||
| exports.normalizeWorkflowName = normalizeWorkflowName; | ||
| exports.parseTags = parseTags; | ||
| exports.formatDuration = formatDuration; | ||
| exports.formatDurationClock = formatDurationClock; | ||
| exports.formatPercentage = formatPercentage; | ||
| exports.formatRunCompletion = formatRunCompletion; | ||
| const BAR_WIDTH = 20; | ||
| const MASTERY_TARGET_MS = 180000; | ||
| const MASTERY_WINDOW = 10; | ||
| function normalizeWorkflowName(name) { | ||
| return name | ||
| .trim() | ||
| .toLowerCase() | ||
| .replace(/[^a-z0-9]+/g, "-") | ||
| .replace(/^-+|-+$/g, ""); | ||
| } | ||
| function parseTags(input) { | ||
| if (!input) { | ||
| return []; | ||
| } | ||
| return Array.from(new Set(input | ||
| .split(",") | ||
| .map((value) => value.trim().toLowerCase()) | ||
| .filter(Boolean))); | ||
| } | ||
| function formatDuration(durationMs) { | ||
| if (durationMs < 1000) { | ||
| return `${durationMs}ms`; | ||
| } | ||
| const totalSeconds = Math.floor(durationMs / 1000); | ||
| const minutes = Math.floor(totalSeconds / 60); | ||
| const seconds = totalSeconds % 60; | ||
| if (minutes === 0) { | ||
| return `${seconds}s`; | ||
| } | ||
| return `${minutes}m ${seconds}s`; | ||
| } | ||
| function formatDurationClock(durationMs) { | ||
| const totalSeconds = Math.floor(durationMs / 1000); | ||
| const hours = Math.floor(totalSeconds / 3600); | ||
| const minutes = Math.floor((totalSeconds % 3600) / 60); | ||
| const seconds = totalSeconds % 60; | ||
| const hh = String(hours).padStart(2, "0"); | ||
| const mm = String(minutes).padStart(2, "0"); | ||
| const ss = String(seconds).padStart(2, "0"); | ||
| return `${hh}:${mm}:${ss}`; | ||
| } | ||
| function formatPercentage(value) { | ||
| const sign = value > 0 ? "+" : ""; | ||
| return `${sign}${value.toFixed(1)}%`; | ||
| } | ||
| function progressBar(current, total) { | ||
| const bounded = Math.min(Math.max(current, 0), total); | ||
| const ratio = total === 0 ? 0 : bounded / total; | ||
| const filled = Math.round(ratio * BAR_WIDTH); | ||
| const empty = BAR_WIDTH - filled; | ||
| return `[${"█".repeat(filled)}${"░".repeat(empty)}]`; | ||
| } | ||
| function formatRunCompletion(result, workflowHistoryDurations) { | ||
| const lines = []; | ||
| lines.push(`✅ ${result.run.workflow} completed in ${formatDuration(result.run.durationMs)}`); | ||
| lines.push(""); | ||
| if (result.personalBest) { | ||
| lines.push("🏆 NEW PERSONAL BEST!"); | ||
| if (result.previousBestMs !== undefined) { | ||
| lines.push(` Previous: ${formatDuration(result.previousBestMs)} -> Now: ${formatDuration(result.run.durationMs)}`); | ||
| } | ||
| lines.push(""); | ||
| } | ||
| const avgDirection = result.improvementVsAveragePct >= 0 ? "slower" : "faster"; | ||
| lines.push(`📈 ${Math.abs(result.improvementVsAveragePct).toFixed(1)}% ${avgDirection} than your average`); | ||
| if (result.improvementStreak >= 2) { | ||
| lines.push(`🔥 ${result.improvementStreak}-run improvement streak!`); | ||
| } | ||
| lines.push(""); | ||
| const recent = workflowHistoryDurations.slice(-MASTERY_WINDOW); | ||
| const masteredRuns = recent.filter((ms) => ms <= MASTERY_TARGET_MS).length; | ||
| const bar = progressBar(masteredRuns, MASTERY_WINDOW); | ||
| const percent = Math.round((masteredRuns / MASTERY_WINDOW) * 100); | ||
| lines.push("Progress to mastery:"); | ||
| lines.push(`${bar} ${percent}% (${masteredRuns}/${MASTERY_WINDOW} sub-${formatDuration(MASTERY_TARGET_MS)} runs)`); | ||
| return lines.join("\n"); | ||
| } |
| "use strict"; | ||
| var __importDefault = (this && this.__importDefault) || function (mod) { | ||
| return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
| }; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||
| exports.generateTrendGraph = generateTrendGraph; | ||
| exports.generateCompareGraph = generateCompareGraph; | ||
| exports.generateReportGraph = generateReportGraph; | ||
| exports.generatePdfReport = generatePdfReport; | ||
| const node_fs_1 = require("node:fs"); | ||
| const node_os_1 = __importDefault(require("node:os")); | ||
| const node_path_1 = __importDefault(require("node:path")); | ||
| const node_child_process_1 = require("node:child_process"); | ||
| function projectRootFromCurrentDir(currentDir) { | ||
| return node_path_1.default.resolve(currentDir, "../.."); | ||
| } | ||
| async function runPythonScript(scriptName, payload, currentDir, pythonPackages) { | ||
| const root = projectRootFromCurrentDir(currentDir); | ||
| const scriptPath = node_path_1.default.join(root, "scripts", scriptName); | ||
| const tempDir = await node_fs_1.promises.mkdtemp(node_path_1.default.join(node_os_1.default.tmpdir(), "tasktime-")); | ||
| const payloadPath = node_path_1.default.join(tempDir, "payload.json"); | ||
| try { | ||
| await node_fs_1.promises.writeFile(payloadPath, JSON.stringify(payload, null, 2), "utf8"); | ||
| await new Promise((resolve, reject) => { | ||
| const args = ["run"]; | ||
| for (const packageName of pythonPackages) { | ||
| args.push("--with", packageName); | ||
| } | ||
| args.push("python3", scriptPath, payloadPath); | ||
| const child = (0, node_child_process_1.spawn)("uv", args, { stdio: "inherit" }); | ||
| child.on("error", (error) => { | ||
| reject(new Error(`Unable to execute python script "${scriptName}": ${error.message}`)); | ||
| }); | ||
| child.on("close", (code) => { | ||
| if (code === 0) { | ||
| resolve(); | ||
| } | ||
| else { | ||
| reject(new Error(`Python script "${scriptName}" exited with code ${code}`)); | ||
| } | ||
| }); | ||
| }); | ||
| } | ||
| finally { | ||
| await node_fs_1.promises.rm(tempDir, { recursive: true, force: true }); | ||
| } | ||
| } | ||
| async function generateTrendGraph(payload, currentDir) { | ||
| await runPythonScript("graph.py", payload, currentDir, ["matplotlib"]); | ||
| } | ||
| async function generateCompareGraph(payload, currentDir) { | ||
| await runPythonScript("compare.py", payload, currentDir, ["matplotlib"]); | ||
| } | ||
| async function generateReportGraph(payload, currentDir) { | ||
| await runPythonScript("report.py", payload, currentDir, ["matplotlib"]); | ||
| } | ||
| async function generatePdfReport(payload, currentDir) { | ||
| await runPythonScript("pdf.py", payload, currentDir, ["reportlab"]); | ||
| } |
| "use strict"; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||
| exports.average = average; | ||
| exports.stdev = stdev; | ||
| exports.improvementVsAveragePct = improvementVsAveragePct; | ||
| exports.calculateImprovementStreak = calculateImprovementStreak; | ||
| exports.inferTrend = inferTrend; | ||
| exports.linearRegression = linearRegression; | ||
| function average(values) { | ||
| if (values.length === 0) { | ||
| return 0; | ||
| } | ||
| const total = values.reduce((sum, value) => sum + value, 0); | ||
| return total / values.length; | ||
| } | ||
| function stdev(values) { | ||
| if (values.length <= 1) { | ||
| return 0; | ||
| } | ||
| const avg = average(values); | ||
| const variance = values.reduce((sum, value) => sum + (value - avg) ** 2, 0) / (values.length - 1); | ||
| return Math.sqrt(variance); | ||
| } | ||
| function improvementVsAveragePct(durationMs, avgMs) { | ||
| if (avgMs <= 0) { | ||
| return 0; | ||
| } | ||
| return ((durationMs - avgMs) / avgMs) * 100; | ||
| } | ||
| function calculateImprovementStreak(history) { | ||
| if (history.length < 2) { | ||
| return history.length; | ||
| } | ||
| let streak = 1; | ||
| for (let idx = history.length - 1; idx > 0; idx -= 1) { | ||
| const current = history[idx]; | ||
| const previous = history[idx - 1]; | ||
| if (current.durationMs < previous.durationMs) { | ||
| streak += 1; | ||
| } | ||
| else { | ||
| break; | ||
| } | ||
| } | ||
| return streak; | ||
| } | ||
| function inferTrend(history) { | ||
| if (history.length < 3) { | ||
| return "stable"; | ||
| } | ||
| const firstHalf = history.slice(0, Math.floor(history.length / 2)); | ||
| const secondHalf = history.slice(Math.floor(history.length / 2)); | ||
| const firstAvg = average(firstHalf.map((run) => run.durationMs)); | ||
| const secondAvg = average(secondHalf.map((run) => run.durationMs)); | ||
| if (secondAvg < firstAvg * 0.95) { | ||
| return "improving"; | ||
| } | ||
| if (secondAvg > firstAvg * 1.05) { | ||
| return "regressing"; | ||
| } | ||
| return "stable"; | ||
| } | ||
| function linearRegression(x, y) { | ||
| if (x.length !== y.length || x.length === 0) { | ||
| return { slope: 0, intercept: 0, r2: 0 }; | ||
| } | ||
| const n = x.length; | ||
| const sumX = x.reduce((sum, value) => sum + value, 0); | ||
| const sumY = y.reduce((sum, value) => sum + value, 0); | ||
| const sumXY = x.reduce((sum, value, index) => sum + value * y[index], 0); | ||
| const sumXX = x.reduce((sum, value) => sum + value * value, 0); | ||
| const denominator = n * sumXX - sumX * sumX; | ||
| if (denominator === 0) { | ||
| return { slope: 0, intercept: sumY / n, r2: 0 }; | ||
| } | ||
| const slope = (n * sumXY - sumX * sumY) / denominator; | ||
| const intercept = (sumY - slope * sumX) / n; | ||
| const meanY = sumY / n; | ||
| const ssTotal = y.reduce((sum, value) => sum + (value - meanY) ** 2, 0); | ||
| const ssResidual = y.reduce((sum, value, index) => { | ||
| const estimate = slope * x[index] + intercept; | ||
| return sum + (value - estimate) ** 2; | ||
| }, 0); | ||
| const r2 = ssTotal === 0 ? 0 : 1 - ssResidual / ssTotal; | ||
| return { slope, intercept, r2 }; | ||
| } |
| "use strict"; | ||
| var __importDefault = (this && this.__importDefault) || function (mod) { | ||
| return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
| }; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||
| exports.getStorageRoot = getStorageRoot; | ||
| exports.getActiveTimer = getActiveTimer; | ||
| exports.startTimer = startTimer; | ||
| exports.recordRun = recordRun; | ||
| exports.stopTimer = stopTimer; | ||
| exports.getStatus = getStatus; | ||
| exports.listRuns = listRuns; | ||
| exports.searchRuns = searchRuns; | ||
| exports.getWorkflowRuns = getWorkflowRuns; | ||
| exports.getWorkflowHistory = getWorkflowHistory; | ||
| exports.getWorkflowEntry = getWorkflowEntry; | ||
| exports.getWorkflowHistoryDurations = getWorkflowHistoryDurations; | ||
| exports.getCompareRuns = getCompareRuns; | ||
| exports.getCategorySummary = getCategorySummary; | ||
| exports.getRunsByCategory = getRunsByCategory; | ||
| exports.getWeeklySummary = getWeeklySummary; | ||
| exports.formatRunLine = formatRunLine; | ||
| const node_os_1 = __importDefault(require("node:os")); | ||
| const node_path_1 = __importDefault(require("node:path")); | ||
| const node_fs_1 = require("node:fs"); | ||
| const format_1 = require("./format"); | ||
| const stats_1 = require("./stats"); | ||
| const INDEX_FILE = ".tasktime.json"; | ||
| const WORKFLOWS_DIR = "workflows"; | ||
| const CATEGORIES_DIR = "categories"; | ||
| const STORAGE_ROOT = process.env.TASKTIME_STORAGE_ROOT ?? node_path_1.default.join(node_os_1.default.homedir(), ".openclaw/workspace/memory/benchmarks"); | ||
| function defaultIndex() { | ||
| return { | ||
| workflows: {}, | ||
| categories: [], | ||
| totalRuns: 0, | ||
| activeTimer: null, | ||
| }; | ||
| } | ||
| function getIndexPath() { | ||
| return node_path_1.default.join(STORAGE_ROOT, INDEX_FILE); | ||
| } | ||
| function getWorkflowPath(workflow) { | ||
| return node_path_1.default.join(STORAGE_ROOT, WORKFLOWS_DIR, `${workflow}.md`); | ||
| } | ||
| function getCategoryPath(category) { | ||
| return node_path_1.default.join(STORAGE_ROOT, CATEGORIES_DIR, `${category}.md`); | ||
| } | ||
| function getDailyPath(date) { | ||
| return node_path_1.default.join(STORAGE_ROOT, `${toLocalDateString(date)}.md`); | ||
| } | ||
| async function pathExists(filePath) { | ||
| try { | ||
| await node_fs_1.promises.access(filePath); | ||
| return true; | ||
| } | ||
| catch { | ||
| return false; | ||
| } | ||
| } | ||
| async function ensureStorage() { | ||
| await node_fs_1.promises.mkdir(STORAGE_ROOT, { recursive: true }); | ||
| await node_fs_1.promises.mkdir(node_path_1.default.join(STORAGE_ROOT, WORKFLOWS_DIR), { recursive: true }); | ||
| await node_fs_1.promises.mkdir(node_path_1.default.join(STORAGE_ROOT, CATEGORIES_DIR), { recursive: true }); | ||
| } | ||
| async function writeIndex(index) { | ||
| await ensureStorage(); | ||
| const persisted = { | ||
| workflows: Object.fromEntries(Object.entries(index.workflows).map(([workflow, entry]) => [ | ||
| workflow, | ||
| { | ||
| category: entry.category, | ||
| runs: entry.runs, | ||
| best_ms: entry.bestMs, | ||
| avg_ms: entry.avgMs, | ||
| worst_ms: entry.worstMs, | ||
| last_run: entry.lastRun, | ||
| history: entry.history.map((run) => ({ | ||
| workflow: run.workflow, | ||
| category: run.category, | ||
| tags: run.tags, | ||
| notes: run.notes, | ||
| session: run.session, | ||
| duration_ms: run.durationMs, | ||
| started_at: run.startedAt, | ||
| ended_at: run.endedAt, | ||
| command: run.command, | ||
| command_exit_code: run.commandExitCode, | ||
| })), | ||
| }, | ||
| ])), | ||
| categories: index.categories, | ||
| total_runs: index.totalRuns, | ||
| active_timer: index.activeTimer | ||
| ? { | ||
| workflow: index.activeTimer.workflow, | ||
| category: index.activeTimer.category, | ||
| tags: index.activeTimer.tags, | ||
| notes: index.activeTimer.notes, | ||
| session: index.activeTimer.session, | ||
| started_at: index.activeTimer.startedAt, | ||
| } | ||
| : null, | ||
| }; | ||
| await node_fs_1.promises.writeFile(getIndexPath(), JSON.stringify(persisted, null, 2), "utf8"); | ||
| } | ||
| async function readIndex() { | ||
| await ensureStorage(); | ||
| const indexPath = getIndexPath(); | ||
| if (!(await pathExists(indexPath))) { | ||
| const initial = defaultIndex(); | ||
| await writeIndex(initial); | ||
| return initial; | ||
| } | ||
| const raw = await node_fs_1.promises.readFile(indexPath, "utf8"); | ||
| try { | ||
| const parsed = JSON.parse(raw); | ||
| const workflows = Object.fromEntries(Object.entries(parsed.workflows ?? {}).map(([workflow, entry]) => [ | ||
| workflow, | ||
| { | ||
| category: entry.category, | ||
| runs: entry.runs, | ||
| bestMs: entry.best_ms ?? entry.bestMs ?? 0, | ||
| avgMs: entry.avg_ms ?? entry.avgMs ?? 0, | ||
| worstMs: entry.worst_ms ?? entry.worstMs ?? 0, | ||
| lastRun: entry.last_run ?? entry.lastRun ?? "", | ||
| history: (entry.history ?? []).map((run) => ({ | ||
| workflow: run.workflow, | ||
| category: run.category, | ||
| tags: run.tags ?? [], | ||
| notes: run.notes, | ||
| session: run.session, | ||
| durationMs: run.duration_ms ?? run.durationMs ?? 0, | ||
| startedAt: run.started_at ?? run.startedAt ?? "", | ||
| endedAt: run.ended_at ?? run.endedAt ?? "", | ||
| command: run.command, | ||
| commandExitCode: run.command_exit_code ?? | ||
| run.commandExitCode, | ||
| })), | ||
| }, | ||
| ])); | ||
| return { | ||
| workflows, | ||
| categories: parsed.categories ?? [], | ||
| totalRuns: parsed.total_runs ?? parsed.totalRuns ?? 0, | ||
| activeTimer: parsed.active_timer | ||
| ? { | ||
| workflow: parsed.active_timer.workflow, | ||
| category: parsed.active_timer.category, | ||
| tags: parsed.active_timer.tags ?? [], | ||
| notes: parsed.active_timer.notes, | ||
| session: parsed.active_timer.session, | ||
| startedAt: parsed.active_timer.started_at, | ||
| } | ||
| : parsed.activeTimer ?? null, | ||
| }; | ||
| } | ||
| catch { | ||
| const initial = defaultIndex(); | ||
| await writeIndex(initial); | ||
| return initial; | ||
| } | ||
| } | ||
| function toLocalDateString(date) { | ||
| const year = date.getFullYear(); | ||
| const month = String(date.getMonth() + 1).padStart(2, "0"); | ||
| const day = String(date.getDate()).padStart(2, "0"); | ||
| return `${year}-${month}-${day}`; | ||
| } | ||
| function toLocalTimeString(date) { | ||
| const hh = String(date.getHours()).padStart(2, "0"); | ||
| const mm = String(date.getMinutes()).padStart(2, "0"); | ||
| const ss = String(date.getSeconds()).padStart(2, "0"); | ||
| return `${hh}:${mm}:${ss}`; | ||
| } | ||
| function flattenRuns(index) { | ||
| const runs = Object.values(index.workflows).flatMap((entry) => entry.history); | ||
| return runs.sort((a, b) => new Date(a.endedAt).getTime() - new Date(b.endedAt).getTime()); | ||
| } | ||
| function sanitizeMarkdownValue(value) { | ||
| return value.replace(/\|/g, "\\|").replace(/\n/g, " ").trim(); | ||
| } | ||
| function computeStdev(values) { | ||
| if (values.length <= 1) { | ||
| return 0; | ||
| } | ||
| const avg = (0, stats_1.average)(values); | ||
| const variance = values.reduce((sum, value) => sum + (value - avg) ** 2, 0) / (values.length - 1); | ||
| return Math.sqrt(variance); | ||
| } | ||
| async function writeDailyLog(run, personalBest, previousBestMs) { | ||
| const endDate = new Date(run.endedAt); | ||
| const filePath = getDailyPath(endDate); | ||
| const exists = await pathExists(filePath); | ||
| if (!exists) { | ||
| const header = `# Benchmarks: ${toLocalDateString(endDate)}\n\n`; | ||
| await node_fs_1.promises.writeFile(filePath, header, "utf8"); | ||
| } | ||
| const tags = run.tags.length > 0 ? run.tags.map((tag) => `#${tag}`).join(" ") : "(none)"; | ||
| const lines = []; | ||
| lines.push(`## ${run.workflow} @ ${toLocalTimeString(endDate)}`); | ||
| lines.push(`- Duration: ${(0, format_1.formatDuration)(run.durationMs)} (${run.durationMs}ms)`); | ||
| lines.push(`- Category: ${run.category ?? "uncategorized"}`); | ||
| lines.push(`- Tags: ${tags}`); | ||
| if (run.notes) { | ||
| lines.push(`- Notes: ${run.notes}`); | ||
| } | ||
| if (personalBest) { | ||
| if (previousBestMs !== undefined) { | ||
| const pct = Math.round(((previousBestMs - run.durationMs) / previousBestMs) * 100); | ||
| lines.push(`- Personal Best: ✅ (prev: ${(0, format_1.formatDuration)(previousBestMs)} -> ${pct}% faster)`); | ||
| } | ||
| else { | ||
| lines.push("- Personal Best: ✅ (first run)"); | ||
| } | ||
| } | ||
| if (run.session) { | ||
| lines.push(`- Session: ${run.session}`); | ||
| } | ||
| if (run.command) { | ||
| lines.push(`- Command: \`${run.command}\``); | ||
| lines.push(`- Exit Code: ${run.commandExitCode ?? 0}`); | ||
| } | ||
| const block = `${lines.join("\n")}\n\n`; | ||
| await node_fs_1.promises.appendFile(filePath, block, "utf8"); | ||
| } | ||
| async function writeWorkflowFile(workflow, entry) { | ||
| const filePath = getWorkflowPath(workflow); | ||
| const history = [...entry.history].sort((a, b) => new Date(a.endedAt).getTime() - new Date(b.endedAt).getTime()); | ||
| const created = history.length > 0 ? toLocalDateString(new Date(history[0].startedAt)) : toLocalDateString(new Date()); | ||
| const best = history.reduce((min, run) => (run.durationMs < min.durationMs ? run : min), history[0]); | ||
| const worst = history.reduce((max, run) => (run.durationMs > max.durationMs ? run : max), history[0]); | ||
| const trend = (0, stats_1.inferTrend)(history); | ||
| const trendText = trend === "improving" ? "down improving" : trend === "regressing" ? "up regressing" : "flat stable"; | ||
| const lines = []; | ||
| lines.push(`# Workflow: ${workflow}`); | ||
| lines.push(`Category: ${entry.category ?? "uncategorized"}`); | ||
| lines.push(`Created: ${created}`); | ||
| lines.push(`Total Runs: ${entry.runs}`); | ||
| lines.push(""); | ||
| lines.push("## Stats"); | ||
| lines.push(`- Average: ${(0, format_1.formatDuration)(entry.avgMs)}`); | ||
| lines.push(`- Best: ${(0, format_1.formatDuration)(entry.bestMs)} (${toLocalDateString(new Date(best.endedAt))})`); | ||
| lines.push(`- Worst: ${(0, format_1.formatDuration)(entry.worstMs)} (${toLocalDateString(new Date(worst.endedAt))})`); | ||
| lines.push(`- Trend: ${trendText}`); | ||
| lines.push(""); | ||
| lines.push("## History"); | ||
| lines.push("| Date | Duration | Notes | PB? |"); | ||
| lines.push("|------|----------|-------|-----|"); | ||
| const personalBestKeys = new Set(); | ||
| let runningBest = Number.POSITIVE_INFINITY; | ||
| for (const run of history) { | ||
| if (run.durationMs < runningBest) { | ||
| runningBest = run.durationMs; | ||
| personalBestKeys.add(`${run.endedAt}:${run.durationMs}`); | ||
| } | ||
| } | ||
| for (const run of [...history].reverse()) { | ||
| const date = toLocalDateString(new Date(run.endedAt)); | ||
| const notes = sanitizeMarkdownValue(run.notes ?? ""); | ||
| const pb = personalBestKeys.has(`${run.endedAt}:${run.durationMs}`) ? "✅" : ""; | ||
| lines.push(`| ${date} | ${(0, format_1.formatDuration)(run.durationMs)} | ${notes} | ${pb} |`); | ||
| } | ||
| lines.push(""); | ||
| await node_fs_1.promises.writeFile(filePath, `${lines.join("\n")}`, "utf8"); | ||
| } | ||
| async function writeCategoryFile(category, runs) { | ||
| const filePath = getCategoryPath(category); | ||
| const sorted = [...runs].sort((a, b) => new Date(b.endedAt).getTime() - new Date(a.endedAt).getTime()); | ||
| const durations = sorted.map((run) => run.durationMs); | ||
| const lines = []; | ||
| lines.push(`# Category: ${category}`); | ||
| lines.push(`Total Runs: ${sorted.length}`); | ||
| lines.push(`Average: ${(0, format_1.formatDuration)((0, stats_1.average)(durations))}`); | ||
| lines.push(""); | ||
| lines.push("## History"); | ||
| lines.push("| Date | Workflow | Duration | Tags | Notes |"); | ||
| lines.push("|------|----------|----------|------|-------|"); | ||
| for (const run of sorted) { | ||
| const date = toLocalDateString(new Date(run.endedAt)); | ||
| const tags = run.tags.map((tag) => `#${tag}`).join(" "); | ||
| const notes = sanitizeMarkdownValue(run.notes ?? ""); | ||
| lines.push(`| ${date} | ${run.workflow} | ${(0, format_1.formatDuration)(run.durationMs)} | ${tags} | ${notes} |`); | ||
| } | ||
| lines.push(""); | ||
| await node_fs_1.promises.writeFile(filePath, lines.join("\n"), "utf8"); | ||
| } | ||
| function createWorkflowEntry(existing, run) { | ||
| const history = [...(existing?.history ?? []), run]; | ||
| const durations = history.map((item) => item.durationMs); | ||
| return { | ||
| category: run.category ?? existing?.category, | ||
| runs: history.length, | ||
| bestMs: Math.min(...durations), | ||
| avgMs: (0, stats_1.average)(durations), | ||
| worstMs: Math.max(...durations), | ||
| lastRun: run.endedAt, | ||
| history, | ||
| }; | ||
| } | ||
| async function rebuildCategoryFiles(index) { | ||
| for (const category of index.categories) { | ||
| const runs = flattenRuns(index).filter((run) => run.category === category); | ||
| await writeCategoryFile(category, runs); | ||
| } | ||
| } | ||
| function getStorageRoot() { | ||
| return STORAGE_ROOT; | ||
| } | ||
| async function getActiveTimer() { | ||
| const index = await readIndex(); | ||
| return index.activeTimer; | ||
| } | ||
| async function startTimer(input) { | ||
| const workflow = (0, format_1.normalizeWorkflowName)(input.workflow); | ||
| if (!workflow) { | ||
| throw new Error("Workflow name cannot be empty"); | ||
| } | ||
| const index = await readIndex(); | ||
| if (index.activeTimer) { | ||
| throw new Error(`Timer already running for "${index.activeTimer.workflow}"`); | ||
| } | ||
| const timer = { | ||
| workflow, | ||
| category: input.category?.trim(), | ||
| tags: input.tags ?? [], | ||
| notes: input.notes?.trim(), | ||
| session: input.session?.trim(), | ||
| startedAt: new Date().toISOString(), | ||
| }; | ||
| index.activeTimer = timer; | ||
| await writeIndex(index); | ||
| return timer; | ||
| } | ||
| async function recordRun(input) { | ||
| const workflow = (0, format_1.normalizeWorkflowName)(input.workflow); | ||
| if (!workflow) { | ||
| throw new Error("Workflow name cannot be empty"); | ||
| } | ||
| if (!Number.isFinite(input.durationMs) || input.durationMs < 0) { | ||
| throw new Error("Duration must be a non-negative number"); | ||
| } | ||
| const index = await readIndex(); | ||
| const existing = index.workflows[workflow]; | ||
| const startedAt = input.startedAt ?? new Date(Date.now() - input.durationMs).toISOString(); | ||
| const endedAt = input.endedAt ?? new Date().toISOString(); | ||
| const tags = Array.from(new Set((input.tags ?? []).map((tag) => tag.trim().toLowerCase()).filter(Boolean))); | ||
| const category = input.category?.trim() || existing?.category; | ||
| const notes = input.notes?.trim(); | ||
| const session = input.session?.trim(); | ||
| const run = { | ||
| workflow, | ||
| category, | ||
| tags, | ||
| notes, | ||
| session, | ||
| durationMs: Math.round(input.durationMs), | ||
| startedAt, | ||
| endedAt, | ||
| command: input.command, | ||
| commandExitCode: input.commandExitCode, | ||
| }; | ||
| const previousBestMs = existing?.bestMs; | ||
| const updated = createWorkflowEntry(existing, run); | ||
| const personalBest = previousBestMs === undefined || run.durationMs < previousBestMs; | ||
| const streak = (0, stats_1.calculateImprovementStreak)(updated.history); | ||
| const improvement = (0, stats_1.improvementVsAveragePct)(run.durationMs, updated.avgMs); | ||
| index.workflows[workflow] = updated; | ||
| index.activeTimer = null; | ||
| if (category && !index.categories.includes(category)) { | ||
| index.categories.push(category); | ||
| index.categories.sort(); | ||
| } | ||
| index.totalRuns = flattenRuns(index).length; | ||
| await writeIndex(index); | ||
| await writeDailyLog(run, personalBest, previousBestMs); | ||
| await writeWorkflowFile(workflow, updated); | ||
| await rebuildCategoryFiles(index); | ||
| return { | ||
| run, | ||
| personalBest, | ||
| previousBestMs, | ||
| averageMs: updated.avgMs, | ||
| improvementVsAveragePct: improvement, | ||
| improvementStreak: streak, | ||
| }; | ||
| } | ||
| async function stopTimer(input) { | ||
| const index = await readIndex(); | ||
| if (!index.activeTimer) { | ||
| throw new Error("No active timer to stop"); | ||
| } | ||
| const active = index.activeTimer; | ||
| const endedAt = new Date(); | ||
| const startedAt = new Date(active.startedAt); | ||
| const durationMs = Math.max(0, endedAt.getTime() - startedAt.getTime()); | ||
| const notes = [active.notes, input?.notes].filter(Boolean).join(" | ") || undefined; | ||
| const tags = Array.from(new Set([...(active.tags ?? []), ...(input?.tags ?? [])])); | ||
| return recordRun({ | ||
| workflow: active.workflow, | ||
| category: active.category, | ||
| tags, | ||
| notes, | ||
| session: active.session, | ||
| startedAt: startedAt.toISOString(), | ||
| endedAt: endedAt.toISOString(), | ||
| durationMs, | ||
| }); | ||
| } | ||
| async function getStatus() { | ||
| const activeTimer = await getActiveTimer(); | ||
| if (!activeTimer) { | ||
| return { activeTimer: null }; | ||
| } | ||
| const elapsedMs = Math.max(0, Date.now() - new Date(activeTimer.startedAt).getTime()); | ||
| return { activeTimer, elapsedMs }; | ||
| } | ||
| async function listRuns(last = 10) { | ||
| const index = await readIndex(); | ||
| return flattenRuns(index).reverse().slice(0, Math.max(1, last)); | ||
| } | ||
| async function searchRuns(query, last = 25) { | ||
| const index = await readIndex(); | ||
| const lower = query.toLowerCase().trim(); | ||
| if (!lower) { | ||
| return []; | ||
| } | ||
| return flattenRuns(index) | ||
| .reverse() | ||
| .filter((run) => { | ||
| const haystack = [ | ||
| run.workflow, | ||
| run.category ?? "", | ||
| run.tags.join(" "), | ||
| run.notes ?? "", | ||
| run.session ?? "", | ||
| run.command ?? "", | ||
| ] | ||
| .join(" ") | ||
| .toLowerCase(); | ||
| return haystack.includes(lower); | ||
| }) | ||
| .slice(0, Math.max(1, last)); | ||
| } | ||
| async function getWorkflowRuns(workflowName, last = 20) { | ||
| const history = await getWorkflowHistory(workflowName); | ||
| return history.slice(-Math.max(1, last)); | ||
| } | ||
| async function getWorkflowHistory(workflowName) { | ||
| const workflow = (0, format_1.normalizeWorkflowName)(workflowName); | ||
| const index = await readIndex(); | ||
| const entry = index.workflows[workflow]; | ||
| if (!entry) { | ||
| throw new Error(`Workflow "${workflow}" not found`); | ||
| } | ||
| return [...entry.history]; | ||
| } | ||
| async function getWorkflowEntry(workflowName) { | ||
| const workflow = (0, format_1.normalizeWorkflowName)(workflowName); | ||
| const index = await readIndex(); | ||
| const entry = index.workflows[workflow]; | ||
| if (!entry) { | ||
| throw new Error(`Workflow "${workflow}" not found`); | ||
| } | ||
| return { | ||
| ...entry, | ||
| history: [...entry.history], | ||
| }; | ||
| } | ||
| async function getWorkflowHistoryDurations(workflowName) { | ||
| const workflow = (0, format_1.normalizeWorkflowName)(workflowName); | ||
| const index = await readIndex(); | ||
| const entry = index.workflows[workflow]; | ||
| if (!entry) { | ||
| return []; | ||
| } | ||
| return entry.history.map((run) => run.durationMs); | ||
| } | ||
| async function getCompareRuns(workflows, last = 50) { | ||
| const index = await readIndex(); | ||
| return workflows.map((workflowName) => { | ||
| const workflow = (0, format_1.normalizeWorkflowName)(workflowName); | ||
| const entry = index.workflows[workflow]; | ||
| if (!entry) { | ||
| throw new Error(`Workflow "${workflow}" not found`); | ||
| } | ||
| return { | ||
| name: workflow, | ||
| runs: entry.history.slice(-Math.max(1, last)), | ||
| }; | ||
| }); | ||
| } | ||
| async function getCategorySummary(category) { | ||
| const normalizedCategory = category.trim().toLowerCase(); | ||
| const index = await readIndex(); | ||
| const summaries = []; | ||
| for (const [workflow, entry] of Object.entries(index.workflows)) { | ||
| const runs = entry.history.filter((run) => (run.category ?? "").toLowerCase() === normalizedCategory); | ||
| if (runs.length === 0) { | ||
| continue; | ||
| } | ||
| const durations = runs.map((run) => run.durationMs); | ||
| summaries.push({ | ||
| workflow, | ||
| averageMs: (0, stats_1.average)(durations), | ||
| stdevMs: computeStdev(durations), | ||
| runs: runs.length, | ||
| }); | ||
| } | ||
| if (summaries.length === 0) { | ||
| throw new Error(`No runs found for category "${normalizedCategory}"`); | ||
| } | ||
| return summaries.sort((a, b) => a.averageMs - b.averageMs); | ||
| } | ||
| async function getRunsByCategory(category, last = 50) { | ||
| const normalizedCategory = category.trim().toLowerCase(); | ||
| const index = await readIndex(); | ||
| const runs = flattenRuns(index) | ||
| .filter((run) => (run.category ?? "").toLowerCase() === normalizedCategory) | ||
| .slice(-Math.max(1, last)); | ||
| if (runs.length === 0) { | ||
| throw new Error(`No runs found for category "${normalizedCategory}"`); | ||
| } | ||
| return runs; | ||
| } | ||
| async function getWeeklySummary(days = 7) { | ||
| const index = await readIndex(); | ||
| const allRuns = flattenRuns(index); | ||
| const dayMap = new Map(); | ||
| const now = new Date(); | ||
| const startDate = new Date(now); | ||
| startDate.setHours(0, 0, 0, 0); | ||
| startDate.setDate(startDate.getDate() - (days - 1)); | ||
| for (let offset = 0; offset < days; offset += 1) { | ||
| const date = new Date(startDate); | ||
| date.setDate(startDate.getDate() + offset); | ||
| const key = toLocalDateString(date); | ||
| dayMap.set(key, { | ||
| date: key, | ||
| categories: {}, | ||
| runCount: 0, | ||
| improvements: 0, | ||
| regressions: 0, | ||
| }); | ||
| } | ||
| const previousByWorkflow = new Map(); | ||
| for (const run of allRuns) { | ||
| const day = toLocalDateString(new Date(run.endedAt)); | ||
| const current = dayMap.get(day); | ||
| const previousDuration = previousByWorkflow.get(run.workflow); | ||
| if (current) { | ||
| const category = run.category ?? "uncategorized"; | ||
| current.categories[category] = (current.categories[category] ?? 0) + run.durationMs; | ||
| current.runCount += 1; | ||
| if (previousDuration !== undefined) { | ||
| if (run.durationMs < previousDuration) { | ||
| current.improvements += 1; | ||
| } | ||
| else if (run.durationMs > previousDuration) { | ||
| current.regressions += 1; | ||
| } | ||
| } | ||
| } | ||
| previousByWorkflow.set(run.workflow, run.durationMs); | ||
| } | ||
| const categories = Array.from(new Set(Array.from(dayMap.values()).flatMap((day) => Object.keys(day.categories)))).sort(); | ||
| return { | ||
| days: Array.from(dayMap.values()), | ||
| categories, | ||
| }; | ||
| } | ||
| function formatRunLine(run) { | ||
| const endedAt = new Date(run.endedAt); | ||
| const date = `${toLocalDateString(endedAt)} ${toLocalTimeString(endedAt)}`; | ||
| const tags = run.tags.length > 0 ? `[${run.tags.join(",")}]` : ""; | ||
| return `${date} ${run.workflow} ${(0, format_1.formatDurationClock)(run.durationMs)} ${run.category ?? "uncategorized"} ${tags}`; | ||
| } |
| "use strict"; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||
| exports.startTimer = startTimer; | ||
| exports.stopTimer = stopTimer; | ||
| exports.runTimedCommand = runTimedCommand; | ||
| const storage_1 = require("./storage"); | ||
| async function startTimer(input) { | ||
| return (0, storage_1.startTimer)(input); | ||
| } | ||
| async function stopTimer(input) { | ||
| return (0, storage_1.stopTimer)(input); | ||
| } | ||
| async function runTimedCommand(input) { | ||
| return (0, storage_1.recordRun)({ | ||
| workflow: input.workflow, | ||
| category: input.category, | ||
| tags: input.tags, | ||
| notes: input.notes, | ||
| session: input.session, | ||
| startedAt: input.startedAt.toISOString(), | ||
| endedAt: input.endedAt.toISOString(), | ||
| durationMs: Math.max(0, input.endedAt.getTime() - input.startedAt.getTime()), | ||
| command: input.command, | ||
| commandExitCode: input.exitCode, | ||
| }); | ||
| } |
| "use strict"; | ||
| Object.defineProperty(exports, "__esModule", { value: true }); |
| #!/usr/bin/env python3 | ||
| import json | ||
| import sys | ||
| from pathlib import Path | ||
| try: | ||
| import matplotlib.pyplot as plt | ||
| except ImportError as exc: | ||
| raise SystemExit( | ||
| "matplotlib is required. Install it with: python3 -m pip install matplotlib" | ||
| ) from exc | ||
| def read_payload() -> dict: | ||
| if len(sys.argv) < 2: | ||
| raise SystemExit("Usage: compare.py <payload.json>") | ||
| payload_path = Path(sys.argv[1]) | ||
| return json.loads(payload_path.read_text(encoding="utf-8")) | ||
| def to_seconds(ms: int) -> float: | ||
| return ms / 1000.0 | ||
| def apply_theme(dark: bool): | ||
| if dark: | ||
| plt.style.use("dark_background") | ||
| else: | ||
| plt.style.use("seaborn-v0_8-whitegrid") | ||
| def main(): | ||
| payload = read_payload() | ||
| workflows = payload.get("workflows", []) | ||
| output = payload.get("output") | ||
| dark = bool(payload.get("dark", False)) | ||
| if not output: | ||
| raise SystemExit("Payload must include output path") | ||
| if len(workflows) < 2: | ||
| raise SystemExit("Need at least two workflows to compare") | ||
| labels = [] | ||
| distributions = [] | ||
| for workflow in workflows: | ||
| runs = workflow.get("runs", []) | ||
| if len(runs) == 0: | ||
| continue | ||
| labels.append(f"{workflow.get('name', 'workflow')}\n(n={len(runs)})") | ||
| durations = [to_seconds(run["durationMs"]) for run in runs] | ||
| distributions.append(durations) | ||
| if len(distributions) < 2: | ||
| raise SystemExit("Each compared workflow needs at least one run") | ||
| apply_theme(dark) | ||
| fig, ax = plt.subplots(figsize=(10, 6)) | ||
| boxplot = ax.boxplot( | ||
| distributions, | ||
| labels=labels, | ||
| patch_artist=True, | ||
| showmeans=True, | ||
| meanline=False, | ||
| ) | ||
| colors = ["#4e79a7", "#a0cbe8", "#59a14f", "#f28e2b"] | ||
| for index, box in enumerate(boxplot["boxes"]): | ||
| box.set_facecolor(colors[index % len(colors)]) | ||
| box.set_alpha(0.65) | ||
| ax.set_title("TaskTime Workflow Comparison") | ||
| ax.set_ylabel("Duration (seconds)") | ||
| ax.grid(alpha=0.25) | ||
| fig.tight_layout() | ||
| output_path = Path(output) | ||
| output_path.parent.mkdir(parents=True, exist_ok=True) | ||
| fig.savefig(output_path, dpi=160) | ||
| plt.close(fig) | ||
| print(f"Saved compare graph: {output_path}") | ||
| if __name__ == "__main__": | ||
| main() |
-146
| #!/usr/bin/env python3 | ||
| import json | ||
| import math | ||
| import sys | ||
| from pathlib import Path | ||
| from datetime import datetime | ||
| try: | ||
| import matplotlib.pyplot as plt | ||
| except ImportError as exc: | ||
| raise SystemExit( | ||
| "matplotlib is required. Install it with: python3 -m pip install matplotlib" | ||
| ) from exc | ||
| def read_payload() -> dict: | ||
| if len(sys.argv) < 2: | ||
| raise SystemExit("Usage: graph.py <payload.json>") | ||
| payload_path = Path(sys.argv[1]) | ||
| return json.loads(payload_path.read_text(encoding="utf-8")) | ||
| def to_seconds(ms: int) -> float: | ||
| return ms / 1000.0 | ||
| def linear_regression(x_vals, y_vals): | ||
| n = len(x_vals) | ||
| if n == 0: | ||
| return 0.0, 0.0, 0.0 | ||
| sum_x = sum(x_vals) | ||
| sum_y = sum(y_vals) | ||
| sum_xy = sum(x * y for x, y in zip(x_vals, y_vals)) | ||
| sum_xx = sum(x * x for x in x_vals) | ||
| denominator = n * sum_xx - sum_x * sum_x | ||
| if denominator == 0: | ||
| intercept = sum_y / n | ||
| return 0.0, intercept, 0.0 | ||
| slope = (n * sum_xy - sum_x * sum_y) / denominator | ||
| intercept = (sum_y - slope * sum_x) / n | ||
| mean_y = sum_y / n | ||
| ss_total = sum((value - mean_y) ** 2 for value in y_vals) | ||
| ss_res = sum((y - (slope * x + intercept)) ** 2 for x, y in zip(x_vals, y_vals)) | ||
| r2 = 0.0 if ss_total == 0 else 1 - (ss_res / ss_total) | ||
| return slope, intercept, r2 | ||
| def apply_theme(dark: bool): | ||
| if dark: | ||
| plt.style.use("dark_background") | ||
| else: | ||
| plt.style.use("seaborn-v0_8-whitegrid") | ||
| def build_binned_means(x_vals, y_vals): | ||
| n = len(x_vals) | ||
| if n < 4: | ||
| return [], [] | ||
| bins = max(2, int(math.sqrt(n))) | ||
| width = max(1, math.ceil(n / bins)) | ||
| binned_x = [] | ||
| binned_y = [] | ||
| for start in range(0, n, width): | ||
| chunk_x = x_vals[start : start + width] | ||
| chunk_y = y_vals[start : start + width] | ||
| if not chunk_x: | ||
| continue | ||
| binned_x.append(sum(chunk_x) / len(chunk_x)) | ||
| binned_y.append(sum(chunk_y) / len(chunk_y)) | ||
| return binned_x, binned_y | ||
| def main(): | ||
| payload = read_payload() | ||
| runs = payload.get("runs", []) | ||
| workflow = payload.get("workflow", "workflow") | ||
| output = payload.get("output") | ||
| dark = bool(payload.get("dark", False)) | ||
| if not output: | ||
| raise SystemExit("Payload must include output path") | ||
| if len(runs) == 0: | ||
| raise SystemExit(f"No runs available to graph for '{workflow}'") | ||
| apply_theme(dark) | ||
| x_vals = list(range(1, len(runs) + 1)) | ||
| y_vals = [to_seconds(run["durationMs"]) for run in runs] | ||
| slope, intercept, r2 = linear_regression(x_vals, y_vals) | ||
| trend_line = [slope * x + intercept for x in x_vals] | ||
| best_index = min(range(len(y_vals)), key=lambda index: y_vals[index]) | ||
| bin_x, bin_y = build_binned_means(x_vals, y_vals) | ||
| fig, ax = plt.subplots(figsize=(11, 6)) | ||
| ax.scatter(x_vals, y_vals, color="#4fa3ff", alpha=0.85, label=f"Runs (n={len(y_vals)})") | ||
| ax.plot(x_vals, trend_line, color="#f28e2b", linewidth=2.0, label=f"Trend (r^2={r2:.2f})") | ||
| if bin_x: | ||
| ax.plot( | ||
| bin_x, | ||
| bin_y, | ||
| color="#9c9ede", | ||
| linestyle="--", | ||
| linewidth=1.5, | ||
| label="Binned means", | ||
| ) | ||
| ax.scatter( | ||
| [x_vals[best_index]], | ||
| [y_vals[best_index]], | ||
| color="#e15759", | ||
| s=90, | ||
| marker="*", | ||
| label=f"Best: {y_vals[best_index]:.1f}s", | ||
| zorder=5, | ||
| ) | ||
| ax.set_title(f"TaskTime Trend: {workflow}", fontsize=14, pad=12) | ||
| ax.set_xlabel("Run number") | ||
| ax.set_ylabel("Duration (seconds)") | ||
| ax.legend(loc="upper right") | ||
| ax.grid(alpha=0.25) | ||
| # Add run date range for quick context. | ||
| started = datetime.fromisoformat(runs[0]["endedAt"].replace("Z", "+00:00")) | ||
| ended = datetime.fromisoformat(runs[-1]["endedAt"].replace("Z", "+00:00")) | ||
| subtitle = f"Range: {started.date()} to {ended.date()}" | ||
| fig.text(0.01, 0.01, subtitle, fontsize=9, alpha=0.8) | ||
| output_path = Path(output) | ||
| output_path.parent.mkdir(parents=True, exist_ok=True) | ||
| fig.tight_layout() | ||
| fig.savefig(output_path, dpi=160) | ||
| plt.close(fig) | ||
| print(f"Saved trend graph: {output_path}") | ||
| if __name__ == "__main__": | ||
| main() |
-335
| #!/usr/bin/env python3 | ||
| import json | ||
| import sys | ||
| from datetime import datetime | ||
| from pathlib import Path | ||
| try: | ||
| from reportlab.lib import colors | ||
| from reportlab.lib.pagesizes import letter | ||
| from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet | ||
| from reportlab.lib.units import inch | ||
| from reportlab.platypus import Image, Paragraph, SimpleDocTemplate, Spacer, Table, TableStyle | ||
| except ImportError as exc: | ||
| raise SystemExit( | ||
| "reportlab is required. Install it with: python3 -m pip install reportlab" | ||
| ) from exc | ||
| def read_payload() -> dict: | ||
| if len(sys.argv) < 2: | ||
| raise SystemExit("Usage: pdf.py <payload.json>") | ||
| payload_path = Path(sys.argv[1]) | ||
| return json.loads(payload_path.read_text(encoding="utf-8")) | ||
| def parse_iso(value: str) -> datetime: | ||
| return datetime.fromisoformat(value.replace("Z", "+00:00")) | ||
| def fmt_date(value: str) -> str: | ||
| return parse_iso(value).strftime("%Y-%m-%d %H:%M") | ||
| def fmt_duration(ms: int) -> str: | ||
| if ms < 1000: | ||
| return f"{ms}ms" | ||
| total_seconds = ms // 1000 | ||
| minutes = total_seconds // 60 | ||
| seconds = total_seconds % 60 | ||
| if minutes == 0: | ||
| return f"{seconds}s" | ||
| return f"{minutes}m {seconds}s" | ||
| def style_table(data, col_widths): | ||
| table = Table(data, colWidths=col_widths, repeatRows=1) | ||
| style = TableStyle( | ||
| [ | ||
| ("BACKGROUND", (0, 0), (-1, 0), colors.HexColor("#1f4e79")), | ||
| ("TEXTCOLOR", (0, 0), (-1, 0), colors.whitesmoke), | ||
| ("FONTNAME", (0, 0), (-1, 0), "Helvetica-Bold"), | ||
| ("FONTSIZE", (0, 0), (-1, 0), 10), | ||
| ("ALIGN", (0, 0), (-1, 0), "LEFT"), | ||
| ("GRID", (0, 0), (-1, -1), 0.25, colors.HexColor("#d4d4d4")), | ||
| ("FONTNAME", (0, 1), (-1, -1), "Helvetica"), | ||
| ("FONTSIZE", (0, 1), (-1, -1), 9), | ||
| ("VALIGN", (0, 0), (-1, -1), "TOP"), | ||
| ("LEFTPADDING", (0, 0), (-1, -1), 6), | ||
| ("RIGHTPADDING", (0, 0), (-1, -1), 6), | ||
| ("TOPPADDING", (0, 0), (-1, -1), 5), | ||
| ("BOTTOMPADDING", (0, 0), (-1, -1), 5), | ||
| ] | ||
| ) | ||
| for row_index in range(1, len(data)): | ||
| if row_index % 2 == 0: | ||
| style.add("BACKGROUND", (0, row_index), (-1, row_index), colors.HexColor("#f7f9fc")) | ||
| table.setStyle(style) | ||
| return table | ||
| def add_header(story, styles, title: str, subtitle_lines): | ||
| story.append(Paragraph(title, styles["report_title"])) | ||
| for line in subtitle_lines: | ||
| story.append(Paragraph(line, styles["meta"])) | ||
| story.append(Spacer(1, 0.18 * inch)) | ||
| def add_graph(story, graph_path: str): | ||
| graph_file = Path(graph_path) | ||
| if not graph_file.exists(): | ||
| return | ||
| image = Image(str(graph_file), width=6.8 * inch, height=3.4 * inch) | ||
| image.hAlign = "LEFT" | ||
| story.append(image) | ||
| story.append(Spacer(1, 0.15 * inch)) | ||
| def workflow_story(payload: dict, styles) -> list: | ||
| workflow = payload.get("workflow", "workflow") | ||
| date_range = payload.get("dateRange", {}) | ||
| summary = payload.get("summary", {}) | ||
| improvements = payload.get("improvements", {}) | ||
| history = payload.get("history", []) | ||
| agent = payload.get("agent", "agent") | ||
| generated_at = payload.get("generatedAt", datetime.utcnow().isoformat() + "Z") | ||
| story = [] | ||
| add_header( | ||
| story, | ||
| styles, | ||
| "TaskTime Workflow Report", | ||
| [ | ||
| f"Workflow: {workflow}", | ||
| f"Agent: {agent}", | ||
| f"Range: {fmt_date(date_range.get('start', generated_at))} to {fmt_date(date_range.get('end', generated_at))}", | ||
| ], | ||
| ) | ||
| summary_rows = [ | ||
| ["Metric", "Value"], | ||
| ["Total runs", str(summary.get("totalRuns", 0))], | ||
| ["Average", fmt_duration(int(summary.get("averageMs", 0)))], | ||
| ["Best", fmt_duration(int(summary.get("bestMs", 0)))], | ||
| ["Worst", fmt_duration(int(summary.get("worstMs", 0)))], | ||
| ["Trend", str(summary.get("trend", "stable"))], | ||
| ] | ||
| story.append(style_table(summary_rows, [2.4 * inch, 4.2 * inch])) | ||
| story.append(Spacer(1, 0.15 * inch)) | ||
| add_graph(story, payload.get("graphPath", "")) | ||
| metrics_rows = [ | ||
| ["Improvement metric", "Value"], | ||
| ["Change from first run", f"{float(improvements.get('changePct', 0.0)):.1f}%"], | ||
| ["Current improvement streak", str(int(improvements.get("streak", 0)))], | ||
| ] | ||
| story.append(style_table(metrics_rows, [2.9 * inch, 3.7 * inch])) | ||
| story.append(Spacer(1, 0.15 * inch)) | ||
| history_rows = [["Date", "Duration", "Notes", "PB"]] | ||
| for run in reversed(history): | ||
| history_rows.append( | ||
| [ | ||
| fmt_date(run.get("endedAt", generated_at)), | ||
| fmt_duration(int(run.get("durationMs", 0))), | ||
| str(run.get("notes", "")), | ||
| "yes" if bool(run.get("personalBest", False)) else "", | ||
| ] | ||
| ) | ||
| story.append(Paragraph("Run History", styles["section"])) | ||
| story.append(Spacer(1, 0.05 * inch)) | ||
| story.append(style_table(history_rows, [1.5 * inch, 1.0 * inch, 3.7 * inch, 0.5 * inch])) | ||
| return story | ||
| def weekly_story(payload: dict, styles) -> list: | ||
| summary = payload.get("summary", {}) | ||
| weekly = payload.get("weekly", {}) | ||
| days = weekly.get("days", []) | ||
| agent = payload.get("agent", "agent") | ||
| start_date = days[0]["date"] if days else "-" | ||
| end_date = days[-1]["date"] if days else "-" | ||
| story = [] | ||
| add_header( | ||
| story, | ||
| styles, | ||
| "TaskTime Weekly Report", | ||
| [ | ||
| f"Agent: {agent}", | ||
| f"Range: {start_date} to {end_date}", | ||
| ], | ||
| ) | ||
| summary_rows = [ | ||
| ["Metric", "Value"], | ||
| ["Days", str(summary.get("days", 0))], | ||
| ["Total runs", str(summary.get("totalRuns", 0))], | ||
| ["Total time", f"{summary.get('totalMinutes', 0)} minutes"], | ||
| ["Improvements", str(summary.get("totalImprovements", 0))], | ||
| ["Regressions", str(summary.get("totalRegressions", 0))], | ||
| ] | ||
| story.append(style_table(summary_rows, [2.4 * inch, 4.2 * inch])) | ||
| story.append(Spacer(1, 0.15 * inch)) | ||
| add_graph(story, payload.get("graphPath", "")) | ||
| day_rows = [["Date", "Runs", "Time (min)", "Improvements", "Regressions"]] | ||
| for day in days: | ||
| total_minutes = sum(day.get("categories", {}).values()) / 60000.0 | ||
| day_rows.append( | ||
| [ | ||
| day.get("date", ""), | ||
| str(day.get("runCount", 0)), | ||
| f"{total_minutes:.1f}", | ||
| str(day.get("improvements", 0)), | ||
| str(day.get("regressions", 0)), | ||
| ] | ||
| ) | ||
| story.append(Paragraph("Daily Breakdown", styles["section"])) | ||
| story.append(Spacer(1, 0.05 * inch)) | ||
| story.append(style_table(day_rows, [1.6 * inch, 0.8 * inch, 1.1 * inch, 1.4 * inch, 1.4 * inch])) | ||
| return story | ||
| def category_story(payload: dict, styles) -> list: | ||
| category = payload.get("category", "category") | ||
| summary = payload.get("summary", []) | ||
| history = payload.get("history", []) | ||
| agent = payload.get("agent", "agent") | ||
| generated_at = payload.get("generatedAt", datetime.utcnow().isoformat() + "Z") | ||
| story = [] | ||
| add_header( | ||
| story, | ||
| styles, | ||
| "TaskTime Category Report", | ||
| [ | ||
| f"Category: {category}", | ||
| f"Agent: {agent}", | ||
| ], | ||
| ) | ||
| summary_rows = [["Workflow", "Runs", "Average", "StdDev"]] | ||
| for item in summary: | ||
| summary_rows.append( | ||
| [ | ||
| item.get("workflow", ""), | ||
| str(item.get("runs", 0)), | ||
| fmt_duration(int(item.get("averageMs", 0))), | ||
| fmt_duration(int(item.get("stdevMs", 0))), | ||
| ] | ||
| ) | ||
| story.append(style_table(summary_rows, [2.8 * inch, 0.8 * inch, 1.2 * inch, 1.2 * inch])) | ||
| story.append(Spacer(1, 0.15 * inch)) | ||
| add_graph(story, payload.get("graphPath", "")) | ||
| history_rows = [["Date", "Workflow", "Duration", "Notes"]] | ||
| for run in reversed(history): | ||
| history_rows.append( | ||
| [ | ||
| fmt_date(run.get("endedAt", generated_at)), | ||
| str(run.get("workflow", "")), | ||
| fmt_duration(int(run.get("durationMs", 0))), | ||
| str(run.get("notes", "")), | ||
| ] | ||
| ) | ||
| story.append(Paragraph("Recent Run History", styles["section"])) | ||
| story.append(Spacer(1, 0.05 * inch)) | ||
| story.append(style_table(history_rows, [1.4 * inch, 1.5 * inch, 0.9 * inch, 2.8 * inch])) | ||
| return story | ||
| def build_styles(): | ||
| base = getSampleStyleSheet() | ||
| base.add( | ||
| ParagraphStyle( | ||
| name="report_title", | ||
| parent=base["Title"], | ||
| fontName="Helvetica-Bold", | ||
| fontSize=18, | ||
| spaceAfter=4, | ||
| ) | ||
| ) | ||
| base.add( | ||
| ParagraphStyle( | ||
| name="meta", | ||
| parent=base["BodyText"], | ||
| fontName="Helvetica", | ||
| fontSize=9, | ||
| textColor=colors.HexColor("#444444"), | ||
| leading=11, | ||
| ) | ||
| ) | ||
| base.add( | ||
| ParagraphStyle( | ||
| name="section", | ||
| parent=base["Heading3"], | ||
| fontName="Helvetica-Bold", | ||
| fontSize=12, | ||
| spaceAfter=4, | ||
| ) | ||
| ) | ||
| return base | ||
| def footer_callback(label: str): | ||
| def draw_footer(canvas, doc): | ||
| canvas.saveState() | ||
| canvas.setFillColor(colors.HexColor("#777777")) | ||
| canvas.setFont("Helvetica", 8) | ||
| canvas.drawString(doc.leftMargin, 0.35 * inch, label) | ||
| canvas.drawRightString( | ||
| letter[0] - doc.rightMargin, 0.35 * inch, f"Page {canvas.getPageNumber()}" | ||
| ) | ||
| canvas.restoreState() | ||
| return draw_footer | ||
| def main(): | ||
| payload = read_payload() | ||
| mode = payload.get("mode") | ||
| output = payload.get("output") | ||
| generated_at = payload.get("generatedAt", datetime.utcnow().isoformat() + "Z") | ||
| if not output: | ||
| raise SystemExit("Payload must include output path") | ||
| styles = build_styles() | ||
| story = [] | ||
| if mode == "workflow": | ||
| story = workflow_story(payload, styles) | ||
| elif mode == "week": | ||
| story = weekly_story(payload, styles) | ||
| elif mode == "category": | ||
| story = category_story(payload, styles) | ||
| else: | ||
| raise SystemExit("Payload mode must be 'workflow', 'week', or 'category'") | ||
| output_path = Path(output) | ||
| output_path.parent.mkdir(parents=True, exist_ok=True) | ||
| document = SimpleDocTemplate( | ||
| str(output_path), | ||
| pagesize=letter, | ||
| leftMargin=0.65 * inch, | ||
| rightMargin=0.65 * inch, | ||
| topMargin=0.7 * inch, | ||
| bottomMargin=0.6 * inch, | ||
| title="TaskTime Report", | ||
| ) | ||
| footer = footer_callback(f"Generated at {fmt_date(generated_at)}") | ||
| document.build(story, onFirstPage=footer, onLaterPages=footer) | ||
| print(f"Saved PDF report: {output_path}") | ||
| if __name__ == "__main__": | ||
| main() |
| #!/usr/bin/env python3 | ||
| import json | ||
| import sys | ||
| from pathlib import Path | ||
| try: | ||
| import matplotlib.pyplot as plt | ||
| except ImportError as exc: | ||
| raise SystemExit( | ||
| "matplotlib is required. Install it with: python3 -m pip install matplotlib" | ||
| ) from exc | ||
| def read_payload() -> dict: | ||
| if len(sys.argv) < 2: | ||
| raise SystemExit("Usage: report.py <payload.json>") | ||
| payload_path = Path(sys.argv[1]) | ||
| return json.loads(payload_path.read_text(encoding="utf-8")) | ||
| def apply_theme(dark: bool): | ||
| if dark: | ||
| plt.style.use("dark_background") | ||
| else: | ||
| plt.style.use("seaborn-v0_8-whitegrid") | ||
| def render_category_report(payload: dict): | ||
| summary = payload.get("summary", []) | ||
| category = payload.get("category", "category") | ||
| output = payload.get("output") | ||
| dark = bool(payload.get("dark", False)) | ||
| if len(summary) == 0: | ||
| raise SystemExit(f"No summary data available for category '{category}'") | ||
| if not output: | ||
| raise SystemExit("Payload must include output path") | ||
| workflows = [item["workflow"] for item in summary] | ||
| averages = [item["averageMs"] / 1000.0 for item in summary] | ||
| errors = [item["stdevMs"] / 1000.0 for item in summary] | ||
| counts = [item["runs"] for item in summary] | ||
| apply_theme(dark) | ||
| fig, ax = plt.subplots(figsize=(11, 6)) | ||
| bars = ax.bar( | ||
| workflows, | ||
| averages, | ||
| yerr=errors, | ||
| capsize=4, | ||
| color="#4e79a7", | ||
| alpha=0.8, | ||
| ) | ||
| ax.set_title(f"Category Summary: {category}") | ||
| ax.set_ylabel("Average Duration (seconds)") | ||
| ax.set_xlabel("Workflow") | ||
| ax.grid(axis="y", alpha=0.25) | ||
| for bar, run_count in zip(bars, counts): | ||
| ax.text( | ||
| bar.get_x() + bar.get_width() / 2.0, | ||
| bar.get_height(), | ||
| f"n={run_count}", | ||
| ha="center", | ||
| va="bottom", | ||
| fontsize=9, | ||
| ) | ||
| fig.tight_layout() | ||
| output_path = Path(output) | ||
| output_path.parent.mkdir(parents=True, exist_ok=True) | ||
| fig.savefig(output_path, dpi=160) | ||
| plt.close(fig) | ||
| print(f"Saved category report: {output_path}") | ||
| def render_weekly_report(payload: dict): | ||
| weekly = payload.get("weekly", {}) | ||
| days = weekly.get("days", []) | ||
| categories = weekly.get("categories", []) | ||
| output = payload.get("output") | ||
| dark = bool(payload.get("dark", False)) | ||
| if len(days) == 0: | ||
| raise SystemExit("No weekly data available") | ||
| if not output: | ||
| raise SystemExit("Payload must include output path") | ||
| apply_theme(dark) | ||
| fig, ax = plt.subplots(figsize=(12, 7)) | ||
| x_vals = list(range(len(days))) | ||
| day_labels = [day["date"] for day in days] | ||
| category_totals = { | ||
| category: [day["categories"].get(category, 0) / 60000.0 for day in days] | ||
| for category in categories | ||
| } | ||
| bottom = [0.0] * len(days) | ||
| palette = ["#4e79a7", "#76b7b2", "#59a14f", "#f28e2b", "#e15759", "#b07aa1"] | ||
| for index, category in enumerate(categories): | ||
| values = category_totals[category] | ||
| ax.bar( | ||
| x_vals, | ||
| values, | ||
| bottom=bottom, | ||
| label=category, | ||
| color=palette[index % len(palette)], | ||
| alpha=0.85, | ||
| ) | ||
| bottom = [b + v for b, v in zip(bottom, values)] | ||
| ax.set_xticks(x_vals, day_labels, rotation=25, ha="right") | ||
| ax.set_ylabel("Time spent (minutes)") | ||
| ax.set_title("TaskTime Weekly Report") | ||
| ax.legend(loc="upper left") | ||
| ax.grid(axis="y", alpha=0.25) | ||
| run_counts = [day["runCount"] for day in days] | ||
| improvements = [day["improvements"] for day in days] | ||
| regressions = [day["regressions"] for day in days] | ||
| ax2 = ax.twinx() | ||
| ax2.plot(x_vals, run_counts, color="#f28e2b", marker="o", linewidth=2, label="Run count") | ||
| ax2.set_ylabel("Runs") | ||
| ax2.set_ylim(bottom=0) | ||
| for index, (imp, reg) in enumerate(zip(improvements, regressions)): | ||
| label = f"+{imp}/-{reg}" | ||
| ax2.text(x_vals[index], run_counts[index] + 0.1, label, ha="center", va="bottom", fontsize=8) | ||
| fig.tight_layout() | ||
| output_path = Path(output) | ||
| output_path.parent.mkdir(parents=True, exist_ok=True) | ||
| fig.savefig(output_path, dpi=160) | ||
| plt.close(fig) | ||
| print(f"Saved weekly report: {output_path}") | ||
| def main(): | ||
| payload = read_payload() | ||
| mode = payload.get("mode") | ||
| if mode == "week": | ||
| render_weekly_report(payload) | ||
| elif mode == "category": | ||
| render_category_report(payload) | ||
| else: | ||
| raise SystemExit("Payload mode must be 'week' or 'category'") | ||
| if __name__ == "__main__": | ||
| main() |
-26
| # TaskTime CLI | ||
| TaskTime is a CLI for benchmarking workflow performance and generating shareable charts. | ||
| ## Commands | ||
| - `tasktime start <workflow> [--category ... --tags ... --notes ... --session ...]` | ||
| - `tasktime stop [--notes ... --tags ...]` | ||
| - `tasktime run <workflow> -- <command ...>` | ||
| - `tasktime status` | ||
| - `tasktime list --last 10` | ||
| - `tasktime search "<query>"` | ||
| - `tasktime graph <workflow> --last 20 --output graph.png [--dark]` | ||
| - `tasktime compare <workflow-a> <workflow-b> --output compare.png [--dark]` | ||
| - `tasktime report --week --output weekly.png [--dark]` | ||
| - `tasktime report --category loom --output category.png [--dark]` | ||
| ## Storage | ||
| By default data is stored in: | ||
| `~/.openclaw/workspace/memory/benchmarks/` | ||
| Override during testing with: | ||
| `TASKTIME_STORAGE_ROOT=/custom/path tasktime ...` |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
No README
QualityPackage does not have a README. This may indicate a failed publish or a low quality package.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 3 instances in 1 package
No contributors or author data
MaintenancePackage does not specify a list of contributors or an author in package.json.
Found 1 instance in 1 package
No repository
Supply chain riskPackage does not have a linked source code repository. Without this field, a package will have no reference to the location of the source code use to generate the package.
Found 1 instance in 1 package
No v1
QualityPackage is not semver >=1. This means it is not stable and does not support ^ ranges.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
2
-33.33%1
-50%1
-50%0
-100%82
Infinity%1
-80%Yes
NaN22932
-73.49%4
300%10
-61.54%562
-73.74%1
Infinity%+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added