chore: initial backup of Claude Code configuration
Includes: CLAUDE.md, settings.json, agents, commands, rules, skills, hooks, contexts, evals, get-shit-done, plugin configs (installed list and marketplace sources). Excludes credentials, runtime caches, telemetry, session data, and plugin binary cache.
This commit is contained in:
722
get-shit-done/bin/gsd-tools.cjs
Normal file
722
get-shit-done/bin/gsd-tools.cjs
Normal file
@@ -0,0 +1,722 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* GSD Tools — CLI utility for GSD workflow operations
|
||||
*
|
||||
* Replaces repetitive inline bash patterns across ~50 GSD command/workflow/agent files.
|
||||
* Centralizes: config parsing, model resolution, phase lookup, git commits, summary verification.
|
||||
*
|
||||
* Usage: node gsd-tools.cjs <command> [args] [--raw]
|
||||
*
|
||||
* Atomic Commands:
|
||||
* state load Load project config + state
|
||||
* state json Output STATE.md frontmatter as JSON
|
||||
* state update <field> <value> Update a STATE.md field
|
||||
* state get [section] Get STATE.md content or section
|
||||
* state patch --field val ... Batch update STATE.md fields
|
||||
* state begin-phase --phase N --name S --plans C Update STATE.md for new phase start
|
||||
* state signal-waiting --type T --question Q --options "A|B" --phase P Write WAITING.json signal
|
||||
* state signal-resume Remove WAITING.json signal
|
||||
* resolve-model <agent-type> Get model for agent based on profile
|
||||
* find-phase <phase> Find phase directory by number
|
||||
* commit <message> [--files f1 f2] Commit planning docs
|
||||
* verify-summary <path> Verify a SUMMARY.md file
|
||||
* generate-slug <text> Convert text to URL-safe slug
|
||||
* current-timestamp [format] Get timestamp (full|date|filename)
|
||||
* list-todos [area] Count and enumerate pending todos
|
||||
* verify-path-exists <path> Check file/directory existence
|
||||
* config-ensure-section Initialize .planning/config.json
|
||||
* history-digest Aggregate all SUMMARY.md data
|
||||
* summary-extract <path> [--fields] Extract structured data from SUMMARY.md
|
||||
* state-snapshot Structured parse of STATE.md
|
||||
* phase-plan-index <phase> Index plans with waves and status
|
||||
* websearch <query> Search web via Brave API (if configured)
|
||||
* [--limit N] [--freshness day|week|month]
|
||||
*
|
||||
* Phase Operations:
|
||||
* phase next-decimal <phase> Calculate next decimal phase number
|
||||
* phase add <description> Append new phase to roadmap + create dir
|
||||
* phase insert <after> <description> Insert decimal phase after existing
|
||||
* phase remove <phase> [--force] Remove phase, renumber all subsequent
|
||||
* phase complete <phase> Mark phase done, update state + roadmap
|
||||
*
|
||||
* Roadmap Operations:
|
||||
* roadmap get-phase <phase> Extract phase section from ROADMAP.md
|
||||
* roadmap analyze Full roadmap parse with disk status
|
||||
* roadmap update-plan-progress <N> Update progress table row from disk (PLAN vs SUMMARY counts)
|
||||
*
|
||||
* Requirements Operations:
|
||||
* requirements mark-complete <ids> Mark requirement IDs as complete in REQUIREMENTS.md
|
||||
* Accepts: REQ-01,REQ-02 or REQ-01 REQ-02 or [REQ-01, REQ-02]
|
||||
*
|
||||
* Milestone Operations:
|
||||
* milestone complete <version> Archive milestone, create MILESTONES.md
|
||||
* [--name <name>]
|
||||
* [--archive-phases] Move phase dirs to milestones/vX.Y-phases/
|
||||
*
|
||||
* Validation:
|
||||
* validate consistency Check phase numbering, disk/roadmap sync
|
||||
* validate health [--repair] Check .planning/ integrity, optionally repair
|
||||
*
|
||||
* Progress:
|
||||
* progress [json|table|bar] Render progress in various formats
|
||||
*
|
||||
* Todos:
|
||||
* todo complete <filename> Move todo from pending to completed
|
||||
*
|
||||
* Scaffolding:
|
||||
* scaffold context --phase <N> Create CONTEXT.md template
|
||||
* scaffold uat --phase <N> Create UAT.md template
|
||||
* scaffold verification --phase <N> Create VERIFICATION.md template
|
||||
* scaffold phase-dir --phase <N> Create phase directory
|
||||
* --name <name>
|
||||
*
|
||||
* Frontmatter CRUD:
|
||||
* frontmatter get <file> [--field k] Extract frontmatter as JSON
|
||||
* frontmatter set <file> --field k Update single frontmatter field
|
||||
* --value jsonVal
|
||||
* frontmatter merge <file> Merge JSON into frontmatter
|
||||
* --data '{json}'
|
||||
* frontmatter validate <file> Validate required fields
|
||||
* --schema plan|summary|verification
|
||||
*
|
||||
* Verification Suite:
|
||||
* verify plan-structure <file> Check PLAN.md structure + tasks
|
||||
* verify phase-completeness <phase> Check all plans have summaries
|
||||
* verify references <file> Check @-refs + paths resolve
|
||||
* verify commits <h1> [h2] ... Batch verify commit hashes
|
||||
* verify artifacts <plan-file> Check must_haves.artifacts
|
||||
* verify key-links <plan-file> Check must_haves.key_links
|
||||
*
|
||||
* Template Fill:
|
||||
* template fill summary --phase N Create pre-filled SUMMARY.md
|
||||
* [--plan M] [--name "..."]
|
||||
* [--fields '{json}']
|
||||
* template fill plan --phase N Create pre-filled PLAN.md
|
||||
* [--plan M] [--type execute|tdd]
|
||||
* [--wave N] [--fields '{json}']
|
||||
* template fill verification Create pre-filled VERIFICATION.md
|
||||
* --phase N [--fields '{json}']
|
||||
*
|
||||
* State Progression:
|
||||
* state advance-plan Increment plan counter
|
||||
* state record-metric --phase N Record execution metrics
|
||||
* --plan M --duration Xmin
|
||||
* [--tasks N] [--files N]
|
||||
* state update-progress Recalculate progress bar
|
||||
* state add-decision --summary "..." Add decision to STATE.md
|
||||
* [--phase N] [--rationale "..."]
|
||||
* [--summary-file path] [--rationale-file path]
|
||||
* state add-blocker --text "..." Add blocker
|
||||
* [--text-file path]
|
||||
* state resolve-blocker --text "..." Remove blocker
|
||||
* state record-session Update session continuity
|
||||
* --stopped-at "..."
|
||||
* [--resume-file path]
|
||||
*
|
||||
* Compound Commands (workflow-specific initialization):
|
||||
* init execute-phase <phase> All context for execute-phase workflow
|
||||
* init plan-phase <phase> All context for plan-phase workflow
|
||||
* init new-project All context for new-project workflow
|
||||
* init new-milestone All context for new-milestone workflow
|
||||
* init quick <description> All context for quick workflow
|
||||
* init resume All context for resume-project workflow
|
||||
* init verify-work <phase> All context for verify-work workflow
|
||||
* init phase-op <phase> Generic phase operation context
|
||||
* init todos [area] All context for todo workflows
|
||||
* init milestone-op All context for milestone operations
|
||||
* init map-codebase All context for map-codebase workflow
|
||||
* init progress All context for progress workflow
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { error } = require('./lib/core.cjs');
|
||||
const state = require('./lib/state.cjs');
|
||||
const phase = require('./lib/phase.cjs');
|
||||
const roadmap = require('./lib/roadmap.cjs');
|
||||
const verify = require('./lib/verify.cjs');
|
||||
const config = require('./lib/config.cjs');
|
||||
const template = require('./lib/template.cjs');
|
||||
const milestone = require('./lib/milestone.cjs');
|
||||
const commands = require('./lib/commands.cjs');
|
||||
const init = require('./lib/init.cjs');
|
||||
const frontmatter = require('./lib/frontmatter.cjs');
|
||||
const profilePipeline = require('./lib/profile-pipeline.cjs');
|
||||
const profileOutput = require('./lib/profile-output.cjs');
|
||||
|
||||
// ─── CLI Router ───────────────────────────────────────────────────────────────
|
||||
|
||||
async function main() {
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
// Optional cwd override for sandboxed subagents running outside project root.
|
||||
let cwd = process.cwd();
|
||||
const cwdEqArg = args.find(arg => arg.startsWith('--cwd='));
|
||||
const cwdIdx = args.indexOf('--cwd');
|
||||
if (cwdEqArg) {
|
||||
const value = cwdEqArg.slice('--cwd='.length).trim();
|
||||
if (!value) error('Missing value for --cwd');
|
||||
args.splice(args.indexOf(cwdEqArg), 1);
|
||||
cwd = path.resolve(value);
|
||||
} else if (cwdIdx !== -1) {
|
||||
const value = args[cwdIdx + 1];
|
||||
if (!value || value.startsWith('--')) error('Missing value for --cwd');
|
||||
args.splice(cwdIdx, 2);
|
||||
cwd = path.resolve(value);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(cwd) || !fs.statSync(cwd).isDirectory()) {
|
||||
error(`Invalid --cwd: ${cwd}`);
|
||||
}
|
||||
|
||||
const rawIndex = args.indexOf('--raw');
|
||||
const raw = rawIndex !== -1;
|
||||
if (rawIndex !== -1) args.splice(rawIndex, 1);
|
||||
|
||||
const command = args[0];
|
||||
|
||||
if (!command) {
|
||||
error('Usage: gsd-tools <command> [args] [--raw] [--cwd <path>]\nCommands: state, resolve-model, find-phase, commit, verify-summary, verify, frontmatter, template, generate-slug, current-timestamp, list-todos, verify-path-exists, config-ensure-section, init');
|
||||
}
|
||||
|
||||
switch (command) {
|
||||
case 'state': {
|
||||
const subcommand = args[1];
|
||||
if (subcommand === 'json') {
|
||||
state.cmdStateJson(cwd, raw);
|
||||
} else if (subcommand === 'update') {
|
||||
state.cmdStateUpdate(cwd, args[2], args[3]);
|
||||
} else if (subcommand === 'get') {
|
||||
state.cmdStateGet(cwd, args[2], raw);
|
||||
} else if (subcommand === 'patch') {
|
||||
const patches = {};
|
||||
for (let i = 2; i < args.length; i += 2) {
|
||||
const key = args[i].replace(/^--/, '');
|
||||
const value = args[i + 1];
|
||||
if (key && value !== undefined) {
|
||||
patches[key] = value;
|
||||
}
|
||||
}
|
||||
state.cmdStatePatch(cwd, patches, raw);
|
||||
} else if (subcommand === 'advance-plan') {
|
||||
state.cmdStateAdvancePlan(cwd, raw);
|
||||
} else if (subcommand === 'record-metric') {
|
||||
const phaseIdx = args.indexOf('--phase');
|
||||
const planIdx = args.indexOf('--plan');
|
||||
const durationIdx = args.indexOf('--duration');
|
||||
const tasksIdx = args.indexOf('--tasks');
|
||||
const filesIdx = args.indexOf('--files');
|
||||
state.cmdStateRecordMetric(cwd, {
|
||||
phase: phaseIdx !== -1 ? args[phaseIdx + 1] : null,
|
||||
plan: planIdx !== -1 ? args[planIdx + 1] : null,
|
||||
duration: durationIdx !== -1 ? args[durationIdx + 1] : null,
|
||||
tasks: tasksIdx !== -1 ? args[tasksIdx + 1] : null,
|
||||
files: filesIdx !== -1 ? args[filesIdx + 1] : null,
|
||||
}, raw);
|
||||
} else if (subcommand === 'update-progress') {
|
||||
state.cmdStateUpdateProgress(cwd, raw);
|
||||
} else if (subcommand === 'add-decision') {
|
||||
const phaseIdx = args.indexOf('--phase');
|
||||
const summaryIdx = args.indexOf('--summary');
|
||||
const summaryFileIdx = args.indexOf('--summary-file');
|
||||
const rationaleIdx = args.indexOf('--rationale');
|
||||
const rationaleFileIdx = args.indexOf('--rationale-file');
|
||||
state.cmdStateAddDecision(cwd, {
|
||||
phase: phaseIdx !== -1 ? args[phaseIdx + 1] : null,
|
||||
summary: summaryIdx !== -1 ? args[summaryIdx + 1] : null,
|
||||
summary_file: summaryFileIdx !== -1 ? args[summaryFileIdx + 1] : null,
|
||||
rationale: rationaleIdx !== -1 ? args[rationaleIdx + 1] : '',
|
||||
rationale_file: rationaleFileIdx !== -1 ? args[rationaleFileIdx + 1] : null,
|
||||
}, raw);
|
||||
} else if (subcommand === 'add-blocker') {
|
||||
const textIdx = args.indexOf('--text');
|
||||
const textFileIdx = args.indexOf('--text-file');
|
||||
state.cmdStateAddBlocker(cwd, {
|
||||
text: textIdx !== -1 ? args[textIdx + 1] : null,
|
||||
text_file: textFileIdx !== -1 ? args[textFileIdx + 1] : null,
|
||||
}, raw);
|
||||
} else if (subcommand === 'resolve-blocker') {
|
||||
const textIdx = args.indexOf('--text');
|
||||
state.cmdStateResolveBlocker(cwd, textIdx !== -1 ? args[textIdx + 1] : null, raw);
|
||||
} else if (subcommand === 'record-session') {
|
||||
const stoppedIdx = args.indexOf('--stopped-at');
|
||||
const resumeIdx = args.indexOf('--resume-file');
|
||||
state.cmdStateRecordSession(cwd, {
|
||||
stopped_at: stoppedIdx !== -1 ? args[stoppedIdx + 1] : null,
|
||||
resume_file: resumeIdx !== -1 ? args[resumeIdx + 1] : 'None',
|
||||
}, raw);
|
||||
} else if (subcommand === 'begin-phase') {
|
||||
const phaseIdx = args.indexOf('--phase');
|
||||
const nameIdx = args.indexOf('--name');
|
||||
const plansIdx = args.indexOf('--plans');
|
||||
state.cmdStateBeginPhase(
|
||||
cwd,
|
||||
phaseIdx !== -1 ? args[phaseIdx + 1] : null,
|
||||
nameIdx !== -1 ? args[nameIdx + 1] : null,
|
||||
plansIdx !== -1 ? parseInt(args[plansIdx + 1], 10) : null,
|
||||
raw
|
||||
);
|
||||
} else if (subcommand === 'signal-waiting') {
|
||||
const typeIdx = args.indexOf('--type');
|
||||
const qIdx = args.indexOf('--question');
|
||||
const optIdx = args.indexOf('--options');
|
||||
const phaseIdx = args.indexOf('--phase');
|
||||
state.cmdSignalWaiting(
|
||||
cwd,
|
||||
typeIdx !== -1 ? args[typeIdx + 1] : null,
|
||||
qIdx !== -1 ? args[qIdx + 1] : null,
|
||||
optIdx !== -1 ? args[optIdx + 1] : null,
|
||||
phaseIdx !== -1 ? args[phaseIdx + 1] : null,
|
||||
raw
|
||||
);
|
||||
} else if (subcommand === 'signal-resume') {
|
||||
state.cmdSignalResume(cwd, raw);
|
||||
} else {
|
||||
state.cmdStateLoad(cwd, raw);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'resolve-model': {
|
||||
commands.cmdResolveModel(cwd, args[1], raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'find-phase': {
|
||||
phase.cmdFindPhase(cwd, args[1], raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'commit': {
|
||||
const amend = args.includes('--amend');
|
||||
const filesIndex = args.indexOf('--files');
|
||||
// Collect all positional args between command name and first flag,
|
||||
// then join them — handles both quoted ("multi word msg") and
|
||||
// unquoted (multi word msg) invocations from different shells
|
||||
const endIndex = filesIndex !== -1 ? filesIndex : args.length;
|
||||
const messageArgs = args.slice(1, endIndex).filter(a => !a.startsWith('--'));
|
||||
const message = messageArgs.join(' ') || undefined;
|
||||
const files = filesIndex !== -1 ? args.slice(filesIndex + 1).filter(a => !a.startsWith('--')) : [];
|
||||
commands.cmdCommit(cwd, message, files, raw, amend);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'verify-summary': {
|
||||
const summaryPath = args[1];
|
||||
const countIndex = args.indexOf('--check-count');
|
||||
const checkCount = countIndex !== -1 ? parseInt(args[countIndex + 1], 10) : 2;
|
||||
verify.cmdVerifySummary(cwd, summaryPath, checkCount, raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'template': {
|
||||
const subcommand = args[1];
|
||||
if (subcommand === 'select') {
|
||||
template.cmdTemplateSelect(cwd, args[2], raw);
|
||||
} else if (subcommand === 'fill') {
|
||||
const templateType = args[2];
|
||||
const phaseIdx = args.indexOf('--phase');
|
||||
const planIdx = args.indexOf('--plan');
|
||||
const nameIdx = args.indexOf('--name');
|
||||
const typeIdx = args.indexOf('--type');
|
||||
const waveIdx = args.indexOf('--wave');
|
||||
const fieldsIdx = args.indexOf('--fields');
|
||||
template.cmdTemplateFill(cwd, templateType, {
|
||||
phase: phaseIdx !== -1 ? args[phaseIdx + 1] : null,
|
||||
plan: planIdx !== -1 ? args[planIdx + 1] : null,
|
||||
name: nameIdx !== -1 ? args[nameIdx + 1] : null,
|
||||
type: typeIdx !== -1 ? args[typeIdx + 1] : 'execute',
|
||||
wave: waveIdx !== -1 ? args[waveIdx + 1] : '1',
|
||||
fields: fieldsIdx !== -1 ? JSON.parse(args[fieldsIdx + 1]) : {},
|
||||
}, raw);
|
||||
} else {
|
||||
error('Unknown template subcommand. Available: select, fill');
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'frontmatter': {
|
||||
const subcommand = args[1];
|
||||
const file = args[2];
|
||||
if (subcommand === 'get') {
|
||||
const fieldIdx = args.indexOf('--field');
|
||||
frontmatter.cmdFrontmatterGet(cwd, file, fieldIdx !== -1 ? args[fieldIdx + 1] : null, raw);
|
||||
} else if (subcommand === 'set') {
|
||||
const fieldIdx = args.indexOf('--field');
|
||||
const valueIdx = args.indexOf('--value');
|
||||
frontmatter.cmdFrontmatterSet(cwd, file, fieldIdx !== -1 ? args[fieldIdx + 1] : null, valueIdx !== -1 ? args[valueIdx + 1] : undefined, raw);
|
||||
} else if (subcommand === 'merge') {
|
||||
const dataIdx = args.indexOf('--data');
|
||||
frontmatter.cmdFrontmatterMerge(cwd, file, dataIdx !== -1 ? args[dataIdx + 1] : null, raw);
|
||||
} else if (subcommand === 'validate') {
|
||||
const schemaIdx = args.indexOf('--schema');
|
||||
frontmatter.cmdFrontmatterValidate(cwd, file, schemaIdx !== -1 ? args[schemaIdx + 1] : null, raw);
|
||||
} else {
|
||||
error('Unknown frontmatter subcommand. Available: get, set, merge, validate');
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'verify': {
|
||||
const subcommand = args[1];
|
||||
if (subcommand === 'plan-structure') {
|
||||
verify.cmdVerifyPlanStructure(cwd, args[2], raw);
|
||||
} else if (subcommand === 'phase-completeness') {
|
||||
verify.cmdVerifyPhaseCompleteness(cwd, args[2], raw);
|
||||
} else if (subcommand === 'references') {
|
||||
verify.cmdVerifyReferences(cwd, args[2], raw);
|
||||
} else if (subcommand === 'commits') {
|
||||
verify.cmdVerifyCommits(cwd, args.slice(2), raw);
|
||||
} else if (subcommand === 'artifacts') {
|
||||
verify.cmdVerifyArtifacts(cwd, args[2], raw);
|
||||
} else if (subcommand === 'key-links') {
|
||||
verify.cmdVerifyKeyLinks(cwd, args[2], raw);
|
||||
} else {
|
||||
error('Unknown verify subcommand. Available: plan-structure, phase-completeness, references, commits, artifacts, key-links');
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'generate-slug': {
|
||||
commands.cmdGenerateSlug(args[1], raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'current-timestamp': {
|
||||
commands.cmdCurrentTimestamp(args[1] || 'full', raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'list-todos': {
|
||||
commands.cmdListTodos(cwd, args[1], raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'verify-path-exists': {
|
||||
commands.cmdVerifyPathExists(cwd, args[1], raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'config-ensure-section': {
|
||||
config.cmdConfigEnsureSection(cwd, raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'config-set': {
|
||||
config.cmdConfigSet(cwd, args[1], args[2], raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case "config-set-model-profile": {
|
||||
config.cmdConfigSetModelProfile(cwd, args[1], raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'config-get': {
|
||||
config.cmdConfigGet(cwd, args[1], raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'history-digest': {
|
||||
commands.cmdHistoryDigest(cwd, raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'phases': {
|
||||
const subcommand = args[1];
|
||||
if (subcommand === 'list') {
|
||||
const typeIndex = args.indexOf('--type');
|
||||
const phaseIndex = args.indexOf('--phase');
|
||||
const options = {
|
||||
type: typeIndex !== -1 ? args[typeIndex + 1] : null,
|
||||
phase: phaseIndex !== -1 ? args[phaseIndex + 1] : null,
|
||||
includeArchived: args.includes('--include-archived'),
|
||||
};
|
||||
phase.cmdPhasesList(cwd, options, raw);
|
||||
} else {
|
||||
error('Unknown phases subcommand. Available: list');
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'roadmap': {
|
||||
const subcommand = args[1];
|
||||
if (subcommand === 'get-phase') {
|
||||
roadmap.cmdRoadmapGetPhase(cwd, args[2], raw);
|
||||
} else if (subcommand === 'analyze') {
|
||||
roadmap.cmdRoadmapAnalyze(cwd, raw);
|
||||
} else if (subcommand === 'update-plan-progress') {
|
||||
roadmap.cmdRoadmapUpdatePlanProgress(cwd, args[2], raw);
|
||||
} else {
|
||||
error('Unknown roadmap subcommand. Available: get-phase, analyze, update-plan-progress');
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'requirements': {
|
||||
const subcommand = args[1];
|
||||
if (subcommand === 'mark-complete') {
|
||||
milestone.cmdRequirementsMarkComplete(cwd, args.slice(2), raw);
|
||||
} else {
|
||||
error('Unknown requirements subcommand. Available: mark-complete');
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'phase': {
|
||||
const subcommand = args[1];
|
||||
if (subcommand === 'next-decimal') {
|
||||
phase.cmdPhaseNextDecimal(cwd, args[2], raw);
|
||||
} else if (subcommand === 'add') {
|
||||
phase.cmdPhaseAdd(cwd, args.slice(2).join(' '), raw);
|
||||
} else if (subcommand === 'insert') {
|
||||
phase.cmdPhaseInsert(cwd, args[2], args.slice(3).join(' '), raw);
|
||||
} else if (subcommand === 'remove') {
|
||||
const forceFlag = args.includes('--force');
|
||||
phase.cmdPhaseRemove(cwd, args[2], { force: forceFlag }, raw);
|
||||
} else if (subcommand === 'complete') {
|
||||
phase.cmdPhaseComplete(cwd, args[2], raw);
|
||||
} else {
|
||||
error('Unknown phase subcommand. Available: next-decimal, add, insert, remove, complete');
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'milestone': {
|
||||
const subcommand = args[1];
|
||||
if (subcommand === 'complete') {
|
||||
const nameIndex = args.indexOf('--name');
|
||||
const archivePhases = args.includes('--archive-phases');
|
||||
// Collect --name value (everything after --name until next flag or end)
|
||||
let milestoneName = null;
|
||||
if (nameIndex !== -1) {
|
||||
const nameArgs = [];
|
||||
for (let i = nameIndex + 1; i < args.length; i++) {
|
||||
if (args[i].startsWith('--')) break;
|
||||
nameArgs.push(args[i]);
|
||||
}
|
||||
milestoneName = nameArgs.join(' ') || null;
|
||||
}
|
||||
milestone.cmdMilestoneComplete(cwd, args[2], { name: milestoneName, archivePhases }, raw);
|
||||
} else {
|
||||
error('Unknown milestone subcommand. Available: complete');
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'validate': {
|
||||
const subcommand = args[1];
|
||||
if (subcommand === 'consistency') {
|
||||
verify.cmdValidateConsistency(cwd, raw);
|
||||
} else if (subcommand === 'health') {
|
||||
const repairFlag = args.includes('--repair');
|
||||
verify.cmdValidateHealth(cwd, { repair: repairFlag }, raw);
|
||||
} else {
|
||||
error('Unknown validate subcommand. Available: consistency, health');
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'progress': {
|
||||
const subcommand = args[1] || 'json';
|
||||
commands.cmdProgressRender(cwd, subcommand, raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'stats': {
|
||||
const subcommand = args[1] || 'json';
|
||||
commands.cmdStats(cwd, subcommand, raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'todo': {
|
||||
const subcommand = args[1];
|
||||
if (subcommand === 'complete') {
|
||||
commands.cmdTodoComplete(cwd, args[2], raw);
|
||||
} else {
|
||||
error('Unknown todo subcommand. Available: complete');
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'scaffold': {
|
||||
const scaffoldType = args[1];
|
||||
const phaseIndex = args.indexOf('--phase');
|
||||
const nameIndex = args.indexOf('--name');
|
||||
const scaffoldOptions = {
|
||||
phase: phaseIndex !== -1 ? args[phaseIndex + 1] : null,
|
||||
name: nameIndex !== -1 ? args.slice(nameIndex + 1).join(' ') : null,
|
||||
};
|
||||
commands.cmdScaffold(cwd, scaffoldType, scaffoldOptions, raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'init': {
|
||||
const workflow = args[1];
|
||||
switch (workflow) {
|
||||
case 'execute-phase':
|
||||
init.cmdInitExecutePhase(cwd, args[2], raw);
|
||||
break;
|
||||
case 'plan-phase':
|
||||
init.cmdInitPlanPhase(cwd, args[2], raw);
|
||||
break;
|
||||
case 'new-project':
|
||||
init.cmdInitNewProject(cwd, raw);
|
||||
break;
|
||||
case 'new-milestone':
|
||||
init.cmdInitNewMilestone(cwd, raw);
|
||||
break;
|
||||
case 'quick':
|
||||
init.cmdInitQuick(cwd, args.slice(2).join(' '), raw);
|
||||
break;
|
||||
case 'resume':
|
||||
init.cmdInitResume(cwd, raw);
|
||||
break;
|
||||
case 'verify-work':
|
||||
init.cmdInitVerifyWork(cwd, args[2], raw);
|
||||
break;
|
||||
case 'phase-op':
|
||||
init.cmdInitPhaseOp(cwd, args[2], raw);
|
||||
break;
|
||||
case 'todos':
|
||||
init.cmdInitTodos(cwd, args[2], raw);
|
||||
break;
|
||||
case 'milestone-op':
|
||||
init.cmdInitMilestoneOp(cwd, raw);
|
||||
break;
|
||||
case 'map-codebase':
|
||||
init.cmdInitMapCodebase(cwd, raw);
|
||||
break;
|
||||
case 'progress':
|
||||
init.cmdInitProgress(cwd, raw);
|
||||
break;
|
||||
default:
|
||||
error(`Unknown init workflow: ${workflow}\nAvailable: execute-phase, plan-phase, new-project, new-milestone, quick, resume, verify-work, phase-op, todos, milestone-op, map-codebase, progress`);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'phase-plan-index': {
|
||||
phase.cmdPhasePlanIndex(cwd, args[1], raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'state-snapshot': {
|
||||
state.cmdStateSnapshot(cwd, raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'summary-extract': {
|
||||
const summaryPath = args[1];
|
||||
const fieldsIndex = args.indexOf('--fields');
|
||||
const fields = fieldsIndex !== -1 ? args[fieldsIndex + 1].split(',') : null;
|
||||
commands.cmdSummaryExtract(cwd, summaryPath, fields, raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'websearch': {
|
||||
const query = args[1];
|
||||
const limitIdx = args.indexOf('--limit');
|
||||
const freshnessIdx = args.indexOf('--freshness');
|
||||
await commands.cmdWebsearch(query, {
|
||||
limit: limitIdx !== -1 ? parseInt(args[limitIdx + 1], 10) : 10,
|
||||
freshness: freshnessIdx !== -1 ? args[freshnessIdx + 1] : null,
|
||||
}, raw);
|
||||
break;
|
||||
}
|
||||
|
||||
// ─── Profiling Pipeline ────────────────────────────────────────────────
|
||||
|
||||
case 'scan-sessions': {
|
||||
const pathIdx = args.indexOf('--path');
|
||||
const sessionsPath = pathIdx !== -1 ? args[pathIdx + 1] : null;
|
||||
const verboseFlag = args.includes('--verbose');
|
||||
const jsonFlag = args.includes('--json');
|
||||
await profilePipeline.cmdScanSessions(sessionsPath, { verbose: verboseFlag, json: jsonFlag }, raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'extract-messages': {
|
||||
const sessionIdx = args.indexOf('--session');
|
||||
const sessionId = sessionIdx !== -1 ? args[sessionIdx + 1] : null;
|
||||
const limitIdx = args.indexOf('--limit');
|
||||
const limit = limitIdx !== -1 ? parseInt(args[limitIdx + 1], 10) : null;
|
||||
const pathIdx = args.indexOf('--path');
|
||||
const sessionsPath = pathIdx !== -1 ? args[pathIdx + 1] : null;
|
||||
const projectArg = args[1];
|
||||
if (!projectArg || projectArg.startsWith('--')) {
|
||||
error('Usage: gsd-tools extract-messages <project> [--session <id>] [--limit N] [--path <dir>]\nRun scan-sessions first to see available projects.');
|
||||
}
|
||||
await profilePipeline.cmdExtractMessages(projectArg, { sessionId, limit }, raw, sessionsPath);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'profile-sample': {
|
||||
const pathIdx = args.indexOf('--path');
|
||||
const sessionsPath = pathIdx !== -1 ? args[pathIdx + 1] : null;
|
||||
const limitIdx = args.indexOf('--limit');
|
||||
const limit = limitIdx !== -1 ? parseInt(args[limitIdx + 1], 10) : 150;
|
||||
const maxPerIdx = args.indexOf('--max-per-project');
|
||||
const maxPerProject = maxPerIdx !== -1 ? parseInt(args[maxPerIdx + 1], 10) : null;
|
||||
const maxCharsIdx = args.indexOf('--max-chars');
|
||||
const maxChars = maxCharsIdx !== -1 ? parseInt(args[maxCharsIdx + 1], 10) : 500;
|
||||
await profilePipeline.cmdProfileSample(sessionsPath, { limit, maxPerProject, maxChars }, raw);
|
||||
break;
|
||||
}
|
||||
|
||||
// ─── Profile Output ──────────────────────────────────────────────────
|
||||
|
||||
case 'write-profile': {
|
||||
const inputIdx = args.indexOf('--input');
|
||||
const inputPath = inputIdx !== -1 ? args[inputIdx + 1] : null;
|
||||
if (!inputPath) error('--input <analysis-json-path> is required');
|
||||
const outputIdx = args.indexOf('--output');
|
||||
const outputPath = outputIdx !== -1 ? args[outputIdx + 1] : null;
|
||||
profileOutput.cmdWriteProfile(cwd, { input: inputPath, output: outputPath }, raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'profile-questionnaire': {
|
||||
const answersIdx = args.indexOf('--answers');
|
||||
const answers = answersIdx !== -1 ? args[answersIdx + 1] : null;
|
||||
profileOutput.cmdProfileQuestionnaire({ answers }, raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'generate-dev-preferences': {
|
||||
const analysisIdx = args.indexOf('--analysis');
|
||||
const analysisPath = analysisIdx !== -1 ? args[analysisIdx + 1] : null;
|
||||
const outputIdx = args.indexOf('--output');
|
||||
const outputPath = outputIdx !== -1 ? args[outputIdx + 1] : null;
|
||||
const stackIdx = args.indexOf('--stack');
|
||||
const stack = stackIdx !== -1 ? args[stackIdx + 1] : null;
|
||||
profileOutput.cmdGenerateDevPreferences(cwd, { analysis: analysisPath, output: outputPath, stack }, raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'generate-claude-profile': {
|
||||
const analysisIdx = args.indexOf('--analysis');
|
||||
const analysisPath = analysisIdx !== -1 ? args[analysisIdx + 1] : null;
|
||||
const outputIdx = args.indexOf('--output');
|
||||
const outputPath = outputIdx !== -1 ? args[outputIdx + 1] : null;
|
||||
const globalFlag = args.includes('--global');
|
||||
profileOutput.cmdGenerateClaudeProfile(cwd, { analysis: analysisPath, output: outputPath, global: globalFlag }, raw);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'generate-claude-md': {
|
||||
const outputIdx = args.indexOf('--output');
|
||||
const outputPath = outputIdx !== -1 ? args[outputIdx + 1] : null;
|
||||
const autoFlag = args.includes('--auto');
|
||||
const forceFlag = args.includes('--force');
|
||||
profileOutput.cmdGenerateClaudeMd(cwd, { output: outputPath, auto: autoFlag, force: forceFlag }, raw);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
error(`Unknown command: ${command}`);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
709
get-shit-done/bin/lib/commands.cjs
Normal file
709
get-shit-done/bin/lib/commands.cjs
Normal file
@@ -0,0 +1,709 @@
|
||||
/**
|
||||
* Commands — Standalone utility commands
|
||||
*/
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { execSync } = require('child_process');
|
||||
const { safeReadFile, loadConfig, isGitIgnored, execGit, normalizePhaseName, comparePhaseNum, getArchivedPhaseDirs, generateSlugInternal, getMilestoneInfo, getMilestonePhaseFilter, resolveModelInternal, stripShippedMilestones, extractCurrentMilestone, toPosixPath, output, error, findPhaseInternal } = require('./core.cjs');
|
||||
const { extractFrontmatter } = require('./frontmatter.cjs');
|
||||
const { MODEL_PROFILES } = require('./model-profiles.cjs');
|
||||
|
||||
function cmdGenerateSlug(text, raw) {
|
||||
if (!text) {
|
||||
error('text required for slug generation');
|
||||
}
|
||||
|
||||
const slug = text
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9]+/g, '-')
|
||||
.replace(/^-+|-+$/g, '');
|
||||
|
||||
const result = { slug };
|
||||
output(result, raw, slug);
|
||||
}
|
||||
|
||||
function cmdCurrentTimestamp(format, raw) {
|
||||
const now = new Date();
|
||||
let result;
|
||||
|
||||
switch (format) {
|
||||
case 'date':
|
||||
result = now.toISOString().split('T')[0];
|
||||
break;
|
||||
case 'filename':
|
||||
result = now.toISOString().replace(/:/g, '-').replace(/\..+/, '');
|
||||
break;
|
||||
case 'full':
|
||||
default:
|
||||
result = now.toISOString();
|
||||
break;
|
||||
}
|
||||
|
||||
output({ timestamp: result }, raw, result);
|
||||
}
|
||||
|
||||
function cmdListTodos(cwd, area, raw) {
|
||||
const pendingDir = path.join(cwd, '.planning', 'todos', 'pending');
|
||||
|
||||
let count = 0;
|
||||
const todos = [];
|
||||
|
||||
try {
|
||||
const files = fs.readdirSync(pendingDir).filter(f => f.endsWith('.md'));
|
||||
|
||||
for (const file of files) {
|
||||
try {
|
||||
const content = fs.readFileSync(path.join(pendingDir, file), 'utf-8');
|
||||
const createdMatch = content.match(/^created:\s*(.+)$/m);
|
||||
const titleMatch = content.match(/^title:\s*(.+)$/m);
|
||||
const areaMatch = content.match(/^area:\s*(.+)$/m);
|
||||
|
||||
const todoArea = areaMatch ? areaMatch[1].trim() : 'general';
|
||||
|
||||
// Apply area filter if specified
|
||||
if (area && todoArea !== area) continue;
|
||||
|
||||
count++;
|
||||
todos.push({
|
||||
file,
|
||||
created: createdMatch ? createdMatch[1].trim() : 'unknown',
|
||||
title: titleMatch ? titleMatch[1].trim() : 'Untitled',
|
||||
area: todoArea,
|
||||
path: toPosixPath(path.join('.planning', 'todos', 'pending', file)),
|
||||
});
|
||||
} catch {}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
const result = { count, todos };
|
||||
output(result, raw, count.toString());
|
||||
}
|
||||
|
||||
function cmdVerifyPathExists(cwd, targetPath, raw) {
|
||||
if (!targetPath) {
|
||||
error('path required for verification');
|
||||
}
|
||||
|
||||
const fullPath = path.isAbsolute(targetPath) ? targetPath : path.join(cwd, targetPath);
|
||||
|
||||
try {
|
||||
const stats = fs.statSync(fullPath);
|
||||
const type = stats.isDirectory() ? 'directory' : stats.isFile() ? 'file' : 'other';
|
||||
const result = { exists: true, type };
|
||||
output(result, raw, 'true');
|
||||
} catch {
|
||||
const result = { exists: false, type: null };
|
||||
output(result, raw, 'false');
|
||||
}
|
||||
}
|
||||
|
||||
function cmdHistoryDigest(cwd, raw) {
|
||||
const phasesDir = path.join(cwd, '.planning', 'phases');
|
||||
const digest = { phases: {}, decisions: [], tech_stack: new Set() };
|
||||
|
||||
// Collect all phase directories: archived + current
|
||||
const allPhaseDirs = [];
|
||||
|
||||
// Add archived phases first (oldest milestones first)
|
||||
const archived = getArchivedPhaseDirs(cwd);
|
||||
for (const a of archived) {
|
||||
allPhaseDirs.push({ name: a.name, fullPath: a.fullPath, milestone: a.milestone });
|
||||
}
|
||||
|
||||
// Add current phases
|
||||
if (fs.existsSync(phasesDir)) {
|
||||
try {
|
||||
const currentDirs = fs.readdirSync(phasesDir, { withFileTypes: true })
|
||||
.filter(e => e.isDirectory())
|
||||
.map(e => e.name)
|
||||
.sort();
|
||||
for (const dir of currentDirs) {
|
||||
allPhaseDirs.push({ name: dir, fullPath: path.join(phasesDir, dir), milestone: null });
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
if (allPhaseDirs.length === 0) {
|
||||
digest.tech_stack = [];
|
||||
output(digest, raw);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
for (const { name: dir, fullPath: dirPath } of allPhaseDirs) {
|
||||
const summaries = fs.readdirSync(dirPath).filter(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md');
|
||||
|
||||
for (const summary of summaries) {
|
||||
try {
|
||||
const content = fs.readFileSync(path.join(dirPath, summary), 'utf-8');
|
||||
const fm = extractFrontmatter(content);
|
||||
|
||||
const phaseNum = fm.phase || dir.split('-')[0];
|
||||
|
||||
if (!digest.phases[phaseNum]) {
|
||||
digest.phases[phaseNum] = {
|
||||
name: fm.name || dir.split('-').slice(1).join(' ') || 'Unknown',
|
||||
provides: new Set(),
|
||||
affects: new Set(),
|
||||
patterns: new Set(),
|
||||
};
|
||||
}
|
||||
|
||||
// Merge provides
|
||||
if (fm['dependency-graph'] && fm['dependency-graph'].provides) {
|
||||
fm['dependency-graph'].provides.forEach(p => digest.phases[phaseNum].provides.add(p));
|
||||
} else if (fm.provides) {
|
||||
fm.provides.forEach(p => digest.phases[phaseNum].provides.add(p));
|
||||
}
|
||||
|
||||
// Merge affects
|
||||
if (fm['dependency-graph'] && fm['dependency-graph'].affects) {
|
||||
fm['dependency-graph'].affects.forEach(a => digest.phases[phaseNum].affects.add(a));
|
||||
}
|
||||
|
||||
// Merge patterns
|
||||
if (fm['patterns-established']) {
|
||||
fm['patterns-established'].forEach(p => digest.phases[phaseNum].patterns.add(p));
|
||||
}
|
||||
|
||||
// Merge decisions
|
||||
if (fm['key-decisions']) {
|
||||
fm['key-decisions'].forEach(d => {
|
||||
digest.decisions.push({ phase: phaseNum, decision: d });
|
||||
});
|
||||
}
|
||||
|
||||
// Merge tech stack
|
||||
if (fm['tech-stack'] && fm['tech-stack'].added) {
|
||||
fm['tech-stack'].added.forEach(t => digest.tech_stack.add(typeof t === 'string' ? t : t.name));
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
// Skip malformed summaries
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Convert Sets to Arrays for JSON output
|
||||
Object.keys(digest.phases).forEach(p => {
|
||||
digest.phases[p].provides = [...digest.phases[p].provides];
|
||||
digest.phases[p].affects = [...digest.phases[p].affects];
|
||||
digest.phases[p].patterns = [...digest.phases[p].patterns];
|
||||
});
|
||||
digest.tech_stack = [...digest.tech_stack];
|
||||
|
||||
output(digest, raw);
|
||||
} catch (e) {
|
||||
error('Failed to generate history digest: ' + e.message);
|
||||
}
|
||||
}
|
||||
|
||||
function cmdResolveModel(cwd, agentType, raw) {
|
||||
if (!agentType) {
|
||||
error('agent-type required');
|
||||
}
|
||||
|
||||
const config = loadConfig(cwd);
|
||||
const profile = config.model_profile || 'balanced';
|
||||
const model = resolveModelInternal(cwd, agentType);
|
||||
|
||||
const agentModels = MODEL_PROFILES[agentType];
|
||||
const result = agentModels
|
||||
? { model, profile }
|
||||
: { model, profile, unknown_agent: true };
|
||||
output(result, raw, model);
|
||||
}
|
||||
|
||||
function cmdCommit(cwd, message, files, raw, amend) {
|
||||
if (!message && !amend) {
|
||||
error('commit message required');
|
||||
}
|
||||
|
||||
const config = loadConfig(cwd);
|
||||
|
||||
// Check commit_docs config
|
||||
if (!config.commit_docs) {
|
||||
const result = { committed: false, hash: null, reason: 'skipped_commit_docs_false' };
|
||||
output(result, raw, 'skipped');
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if .planning is gitignored
|
||||
if (isGitIgnored(cwd, '.planning')) {
|
||||
const result = { committed: false, hash: null, reason: 'skipped_gitignored' };
|
||||
output(result, raw, 'skipped');
|
||||
return;
|
||||
}
|
||||
|
||||
// Stage files
|
||||
const filesToStage = files && files.length > 0 ? files : ['.planning/'];
|
||||
for (const file of filesToStage) {
|
||||
execGit(cwd, ['add', file]);
|
||||
}
|
||||
|
||||
// Commit
|
||||
const commitArgs = amend ? ['commit', '--amend', '--no-edit'] : ['commit', '-m', message];
|
||||
const commitResult = execGit(cwd, commitArgs);
|
||||
if (commitResult.exitCode !== 0) {
|
||||
if (commitResult.stdout.includes('nothing to commit') || commitResult.stderr.includes('nothing to commit')) {
|
||||
const result = { committed: false, hash: null, reason: 'nothing_to_commit' };
|
||||
output(result, raw, 'nothing');
|
||||
return;
|
||||
}
|
||||
const result = { committed: false, hash: null, reason: 'nothing_to_commit', error: commitResult.stderr };
|
||||
output(result, raw, 'nothing');
|
||||
return;
|
||||
}
|
||||
|
||||
// Get short hash
|
||||
const hashResult = execGit(cwd, ['rev-parse', '--short', 'HEAD']);
|
||||
const hash = hashResult.exitCode === 0 ? hashResult.stdout : null;
|
||||
const result = { committed: true, hash, reason: 'committed' };
|
||||
output(result, raw, hash || 'committed');
|
||||
}
|
||||
|
||||
function cmdSummaryExtract(cwd, summaryPath, fields, raw) {
|
||||
if (!summaryPath) {
|
||||
error('summary-path required for summary-extract');
|
||||
}
|
||||
|
||||
const fullPath = path.join(cwd, summaryPath);
|
||||
|
||||
if (!fs.existsSync(fullPath)) {
|
||||
output({ error: 'File not found', path: summaryPath }, raw);
|
||||
return;
|
||||
}
|
||||
|
||||
const content = fs.readFileSync(fullPath, 'utf-8');
|
||||
const fm = extractFrontmatter(content);
|
||||
|
||||
// Parse key-decisions into structured format
|
||||
const parseDecisions = (decisionsList) => {
|
||||
if (!decisionsList || !Array.isArray(decisionsList)) return [];
|
||||
return decisionsList.map(d => {
|
||||
const colonIdx = d.indexOf(':');
|
||||
if (colonIdx > 0) {
|
||||
return {
|
||||
summary: d.substring(0, colonIdx).trim(),
|
||||
rationale: d.substring(colonIdx + 1).trim(),
|
||||
};
|
||||
}
|
||||
return { summary: d, rationale: null };
|
||||
});
|
||||
};
|
||||
|
||||
// Build full result
|
||||
const fullResult = {
|
||||
path: summaryPath,
|
||||
one_liner: fm['one-liner'] || null,
|
||||
key_files: fm['key-files'] || [],
|
||||
tech_added: (fm['tech-stack'] && fm['tech-stack'].added) || [],
|
||||
patterns: fm['patterns-established'] || [],
|
||||
decisions: parseDecisions(fm['key-decisions']),
|
||||
requirements_completed: fm['requirements-completed'] || [],
|
||||
};
|
||||
|
||||
// If fields specified, filter to only those fields
|
||||
if (fields && fields.length > 0) {
|
||||
const filtered = { path: summaryPath };
|
||||
for (const field of fields) {
|
||||
if (fullResult[field] !== undefined) {
|
||||
filtered[field] = fullResult[field];
|
||||
}
|
||||
}
|
||||
output(filtered, raw);
|
||||
return;
|
||||
}
|
||||
|
||||
output(fullResult, raw);
|
||||
}
|
||||
|
||||
async function cmdWebsearch(query, options, raw) {
|
||||
const apiKey = process.env.BRAVE_API_KEY;
|
||||
|
||||
if (!apiKey) {
|
||||
// No key = silent skip, agent falls back to built-in WebSearch
|
||||
output({ available: false, reason: 'BRAVE_API_KEY not set' }, raw, '');
|
||||
return;
|
||||
}
|
||||
|
||||
if (!query) {
|
||||
output({ available: false, error: 'Query required' }, raw, '');
|
||||
return;
|
||||
}
|
||||
|
||||
const params = new URLSearchParams({
|
||||
q: query,
|
||||
count: String(options.limit || 10),
|
||||
country: 'us',
|
||||
search_lang: 'en',
|
||||
text_decorations: 'false'
|
||||
});
|
||||
|
||||
if (options.freshness) {
|
||||
params.set('freshness', options.freshness);
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(
|
||||
`https://api.search.brave.com/res/v1/web/search?${params}`,
|
||||
{
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'X-Subscription-Token': apiKey
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
output({ available: false, error: `API error: ${response.status}` }, raw, '');
|
||||
return;
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
const results = (data.web?.results || []).map(r => ({
|
||||
title: r.title,
|
||||
url: r.url,
|
||||
description: r.description,
|
||||
age: r.age || null
|
||||
}));
|
||||
|
||||
output({
|
||||
available: true,
|
||||
query,
|
||||
count: results.length,
|
||||
results
|
||||
}, raw, results.map(r => `${r.title}\n${r.url}\n${r.description}`).join('\n\n'));
|
||||
} catch (err) {
|
||||
output({ available: false, error: err.message }, raw, '');
|
||||
}
|
||||
}
|
||||
|
||||
function cmdProgressRender(cwd, format, raw) {
|
||||
const phasesDir = path.join(cwd, '.planning', 'phases');
|
||||
const roadmapPath = path.join(cwd, '.planning', 'ROADMAP.md');
|
||||
const milestone = getMilestoneInfo(cwd);
|
||||
|
||||
const phases = [];
|
||||
let totalPlans = 0;
|
||||
let totalSummaries = 0;
|
||||
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name).sort((a, b) => comparePhaseNum(a, b));
|
||||
|
||||
for (const dir of dirs) {
|
||||
const dm = dir.match(/^(\d+(?:\.\d+)*)-?(.*)/);
|
||||
const phaseNum = dm ? dm[1] : dir;
|
||||
const phaseName = dm && dm[2] ? dm[2].replace(/-/g, ' ') : '';
|
||||
const phaseFiles = fs.readdirSync(path.join(phasesDir, dir));
|
||||
const plans = phaseFiles.filter(f => f.endsWith('-PLAN.md') || f === 'PLAN.md').length;
|
||||
const summaries = phaseFiles.filter(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md').length;
|
||||
|
||||
totalPlans += plans;
|
||||
totalSummaries += summaries;
|
||||
|
||||
let status;
|
||||
if (plans === 0) status = 'Pending';
|
||||
else if (summaries >= plans) status = 'Complete';
|
||||
else if (summaries > 0) status = 'In Progress';
|
||||
else status = 'Planned';
|
||||
|
||||
phases.push({ number: phaseNum, name: phaseName, plans, summaries, status });
|
||||
}
|
||||
} catch {}
|
||||
|
||||
const percent = totalPlans > 0 ? Math.min(100, Math.round((totalSummaries / totalPlans) * 100)) : 0;
|
||||
|
||||
if (format === 'table') {
|
||||
// Render markdown table
|
||||
const barWidth = 10;
|
||||
const filled = Math.round((percent / 100) * barWidth);
|
||||
const bar = '\u2588'.repeat(filled) + '\u2591'.repeat(barWidth - filled);
|
||||
let out = `# ${milestone.version} ${milestone.name}\n\n`;
|
||||
out += `**Progress:** [${bar}] ${totalSummaries}/${totalPlans} plans (${percent}%)\n\n`;
|
||||
out += `| Phase | Name | Plans | Status |\n`;
|
||||
out += `|-------|------|-------|--------|\n`;
|
||||
for (const p of phases) {
|
||||
out += `| ${p.number} | ${p.name} | ${p.summaries}/${p.plans} | ${p.status} |\n`;
|
||||
}
|
||||
output({ rendered: out }, raw, out);
|
||||
} else if (format === 'bar') {
|
||||
const barWidth = 20;
|
||||
const filled = Math.round((percent / 100) * barWidth);
|
||||
const bar = '\u2588'.repeat(filled) + '\u2591'.repeat(barWidth - filled);
|
||||
const text = `[${bar}] ${totalSummaries}/${totalPlans} plans (${percent}%)`;
|
||||
output({ bar: text, percent, completed: totalSummaries, total: totalPlans }, raw, text);
|
||||
} else {
|
||||
// JSON format
|
||||
output({
|
||||
milestone_version: milestone.version,
|
||||
milestone_name: milestone.name,
|
||||
phases,
|
||||
total_plans: totalPlans,
|
||||
total_summaries: totalSummaries,
|
||||
percent,
|
||||
}, raw);
|
||||
}
|
||||
}
|
||||
|
||||
function cmdTodoComplete(cwd, filename, raw) {
|
||||
if (!filename) {
|
||||
error('filename required for todo complete');
|
||||
}
|
||||
|
||||
const pendingDir = path.join(cwd, '.planning', 'todos', 'pending');
|
||||
const completedDir = path.join(cwd, '.planning', 'todos', 'completed');
|
||||
const sourcePath = path.join(pendingDir, filename);
|
||||
|
||||
if (!fs.existsSync(sourcePath)) {
|
||||
error(`Todo not found: ${filename}`);
|
||||
}
|
||||
|
||||
// Ensure completed directory exists
|
||||
fs.mkdirSync(completedDir, { recursive: true });
|
||||
|
||||
// Read, add completion timestamp, move
|
||||
let content = fs.readFileSync(sourcePath, 'utf-8');
|
||||
const today = new Date().toISOString().split('T')[0];
|
||||
content = `completed: ${today}\n` + content;
|
||||
|
||||
fs.writeFileSync(path.join(completedDir, filename), content, 'utf-8');
|
||||
fs.unlinkSync(sourcePath);
|
||||
|
||||
output({ completed: true, file: filename, date: today }, raw, 'completed');
|
||||
}
|
||||
|
||||
function cmdScaffold(cwd, type, options, raw) {
|
||||
const { phase, name } = options;
|
||||
const padded = phase ? normalizePhaseName(phase) : '00';
|
||||
const today = new Date().toISOString().split('T')[0];
|
||||
|
||||
// Find phase directory
|
||||
const phaseInfo = phase ? findPhaseInternal(cwd, phase) : null;
|
||||
const phaseDir = phaseInfo ? path.join(cwd, phaseInfo.directory) : null;
|
||||
|
||||
if (phase && !phaseDir && type !== 'phase-dir') {
|
||||
error(`Phase ${phase} directory not found`);
|
||||
}
|
||||
|
||||
let filePath, content;
|
||||
|
||||
switch (type) {
|
||||
case 'context': {
|
||||
filePath = path.join(phaseDir, `${padded}-CONTEXT.md`);
|
||||
content = `---\nphase: "${padded}"\nname: "${name || phaseInfo?.phase_name || 'Unnamed'}"\ncreated: ${today}\n---\n\n# Phase ${phase}: ${name || phaseInfo?.phase_name || 'Unnamed'} — Context\n\n## Decisions\n\n_Decisions will be captured during /gsd:discuss-phase ${phase}_\n\n## Discretion Areas\n\n_Areas where the executor can use judgment_\n\n## Deferred Ideas\n\n_Ideas to consider later_\n`;
|
||||
break;
|
||||
}
|
||||
case 'uat': {
|
||||
filePath = path.join(phaseDir, `${padded}-UAT.md`);
|
||||
content = `---\nphase: "${padded}"\nname: "${name || phaseInfo?.phase_name || 'Unnamed'}"\ncreated: ${today}\nstatus: pending\n---\n\n# Phase ${phase}: ${name || phaseInfo?.phase_name || 'Unnamed'} — User Acceptance Testing\n\n## Test Results\n\n| # | Test | Status | Notes |\n|---|------|--------|-------|\n\n## Summary\n\n_Pending UAT_\n`;
|
||||
break;
|
||||
}
|
||||
case 'verification': {
|
||||
filePath = path.join(phaseDir, `${padded}-VERIFICATION.md`);
|
||||
content = `---\nphase: "${padded}"\nname: "${name || phaseInfo?.phase_name || 'Unnamed'}"\ncreated: ${today}\nstatus: pending\n---\n\n# Phase ${phase}: ${name || phaseInfo?.phase_name || 'Unnamed'} — Verification\n\n## Goal-Backward Verification\n\n**Phase Goal:** [From ROADMAP.md]\n\n## Checks\n\n| # | Requirement | Status | Evidence |\n|---|------------|--------|----------|\n\n## Result\n\n_Pending verification_\n`;
|
||||
break;
|
||||
}
|
||||
case 'phase-dir': {
|
||||
if (!phase || !name) {
|
||||
error('phase and name required for phase-dir scaffold');
|
||||
}
|
||||
const slug = generateSlugInternal(name);
|
||||
const dirName = `${padded}-${slug}`;
|
||||
const phasesParent = path.join(cwd, '.planning', 'phases');
|
||||
fs.mkdirSync(phasesParent, { recursive: true });
|
||||
const dirPath = path.join(phasesParent, dirName);
|
||||
fs.mkdirSync(dirPath, { recursive: true });
|
||||
output({ created: true, directory: `.planning/phases/${dirName}`, path: dirPath }, raw, dirPath);
|
||||
return;
|
||||
}
|
||||
default:
|
||||
error(`Unknown scaffold type: ${type}. Available: context, uat, verification, phase-dir`);
|
||||
}
|
||||
|
||||
if (fs.existsSync(filePath)) {
|
||||
output({ created: false, reason: 'already_exists', path: filePath }, raw, 'exists');
|
||||
return;
|
||||
}
|
||||
|
||||
fs.writeFileSync(filePath, content, 'utf-8');
|
||||
const relPath = toPosixPath(path.relative(cwd, filePath));
|
||||
output({ created: true, path: relPath }, raw, relPath);
|
||||
}
|
||||
|
||||
function cmdStats(cwd, format, raw) {
|
||||
const phasesDir = path.join(cwd, '.planning', 'phases');
|
||||
const roadmapPath = path.join(cwd, '.planning', 'ROADMAP.md');
|
||||
const reqPath = path.join(cwd, '.planning', 'REQUIREMENTS.md');
|
||||
const statePath = path.join(cwd, '.planning', 'STATE.md');
|
||||
const milestone = getMilestoneInfo(cwd);
|
||||
const isDirInMilestone = getMilestonePhaseFilter(cwd);
|
||||
|
||||
// Phase & plan stats (reuse progress pattern)
|
||||
const phasesByNumber = new Map();
|
||||
let totalPlans = 0;
|
||||
let totalSummaries = 0;
|
||||
|
||||
try {
|
||||
const roadmapContent = extractCurrentMilestone(fs.readFileSync(roadmapPath, 'utf-8'), cwd);
|
||||
const headingPattern = /#{2,4}\s*Phase\s+(\d+[A-Z]?(?:\.\d+)*)\s*:\s*([^\n]+)/gi;
|
||||
let match;
|
||||
while ((match = headingPattern.exec(roadmapContent)) !== null) {
|
||||
phasesByNumber.set(match[1], {
|
||||
number: match[1],
|
||||
name: match[2].replace(/\(INSERTED\)/i, '').trim(),
|
||||
plans: 0,
|
||||
summaries: 0,
|
||||
status: 'Not Started',
|
||||
});
|
||||
}
|
||||
} catch {}
|
||||
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
const dirs = entries
|
||||
.filter(e => e.isDirectory())
|
||||
.map(e => e.name)
|
||||
.filter(isDirInMilestone)
|
||||
.sort((a, b) => comparePhaseNum(a, b));
|
||||
|
||||
for (const dir of dirs) {
|
||||
const dm = dir.match(/^(\d+[A-Z]?(?:\.\d+)*)-?(.*)/i);
|
||||
const phaseNum = dm ? dm[1] : dir;
|
||||
const phaseName = dm && dm[2] ? dm[2].replace(/-/g, ' ') : '';
|
||||
const phaseFiles = fs.readdirSync(path.join(phasesDir, dir));
|
||||
const plans = phaseFiles.filter(f => f.endsWith('-PLAN.md') || f === 'PLAN.md').length;
|
||||
const summaries = phaseFiles.filter(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md').length;
|
||||
|
||||
totalPlans += plans;
|
||||
totalSummaries += summaries;
|
||||
|
||||
let status;
|
||||
if (plans === 0) status = 'Not Started';
|
||||
else if (summaries >= plans) status = 'Complete';
|
||||
else if (summaries > 0) status = 'In Progress';
|
||||
else status = 'Planned';
|
||||
|
||||
const existing = phasesByNumber.get(phaseNum);
|
||||
phasesByNumber.set(phaseNum, {
|
||||
number: phaseNum,
|
||||
name: existing?.name || phaseName,
|
||||
plans,
|
||||
summaries,
|
||||
status,
|
||||
});
|
||||
}
|
||||
} catch {}
|
||||
|
||||
const phases = [...phasesByNumber.values()].sort((a, b) => comparePhaseNum(a.number, b.number));
|
||||
const completedPhases = phases.filter(p => p.status === 'Complete').length;
|
||||
const planPercent = totalPlans > 0 ? Math.min(100, Math.round((totalSummaries / totalPlans) * 100)) : 0;
|
||||
const percent = phases.length > 0 ? Math.min(100, Math.round((completedPhases / phases.length) * 100)) : 0;
|
||||
|
||||
// Requirements stats
|
||||
let requirementsTotal = 0;
|
||||
let requirementsComplete = 0;
|
||||
try {
|
||||
if (fs.existsSync(reqPath)) {
|
||||
const reqContent = fs.readFileSync(reqPath, 'utf-8');
|
||||
const checked = reqContent.match(/^- \[x\] \*\*/gm);
|
||||
const unchecked = reqContent.match(/^- \[ \] \*\*/gm);
|
||||
requirementsComplete = checked ? checked.length : 0;
|
||||
requirementsTotal = requirementsComplete + (unchecked ? unchecked.length : 0);
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// Last activity from STATE.md
|
||||
let lastActivity = null;
|
||||
try {
|
||||
if (fs.existsSync(statePath)) {
|
||||
const stateContent = fs.readFileSync(statePath, 'utf-8');
|
||||
const activityMatch = stateContent.match(/^last_activity:\s*(.+)$/im)
|
||||
|| stateContent.match(/\*\*Last Activity:\*\*\s*(.+)/i)
|
||||
|| stateContent.match(/^Last Activity:\s*(.+)$/im)
|
||||
|| stateContent.match(/^Last activity:\s*(.+)$/im);
|
||||
if (activityMatch) lastActivity = activityMatch[1].trim();
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// Git stats
|
||||
let gitCommits = 0;
|
||||
let gitFirstCommitDate = null;
|
||||
const commitCount = execGit(cwd, ['rev-list', '--count', 'HEAD']);
|
||||
if (commitCount.exitCode === 0) {
|
||||
gitCommits = parseInt(commitCount.stdout, 10) || 0;
|
||||
}
|
||||
const rootHash = execGit(cwd, ['rev-list', '--max-parents=0', 'HEAD']);
|
||||
if (rootHash.exitCode === 0 && rootHash.stdout) {
|
||||
const firstCommit = rootHash.stdout.split('\n')[0].trim();
|
||||
const firstDate = execGit(cwd, ['show', '-s', '--format=%as', firstCommit]);
|
||||
if (firstDate.exitCode === 0) {
|
||||
gitFirstCommitDate = firstDate.stdout || null;
|
||||
}
|
||||
}
|
||||
|
||||
const result = {
|
||||
milestone_version: milestone.version,
|
||||
milestone_name: milestone.name,
|
||||
phases,
|
||||
phases_completed: completedPhases,
|
||||
phases_total: phases.length,
|
||||
total_plans: totalPlans,
|
||||
total_summaries: totalSummaries,
|
||||
percent,
|
||||
plan_percent: planPercent,
|
||||
requirements_total: requirementsTotal,
|
||||
requirements_complete: requirementsComplete,
|
||||
git_commits: gitCommits,
|
||||
git_first_commit_date: gitFirstCommitDate,
|
||||
last_activity: lastActivity,
|
||||
};
|
||||
|
||||
if (format === 'table') {
|
||||
const barWidth = 10;
|
||||
const filled = Math.round((percent / 100) * barWidth);
|
||||
const bar = '\u2588'.repeat(filled) + '\u2591'.repeat(barWidth - filled);
|
||||
let out = `# ${milestone.version} ${milestone.name} \u2014 Statistics\n\n`;
|
||||
out += `**Progress:** [${bar}] ${completedPhases}/${phases.length} phases (${percent}%)\n`;
|
||||
if (totalPlans > 0) {
|
||||
out += `**Plans:** ${totalSummaries}/${totalPlans} complete (${planPercent}%)\n`;
|
||||
}
|
||||
out += `**Phases:** ${completedPhases}/${phases.length} complete\n`;
|
||||
if (requirementsTotal > 0) {
|
||||
out += `**Requirements:** ${requirementsComplete}/${requirementsTotal} complete\n`;
|
||||
}
|
||||
out += '\n';
|
||||
out += `| Phase | Name | Plans | Completed | Status |\n`;
|
||||
out += `|-------|------|-------|-----------|--------|\n`;
|
||||
for (const p of phases) {
|
||||
out += `| ${p.number} | ${p.name} | ${p.plans} | ${p.summaries} | ${p.status} |\n`;
|
||||
}
|
||||
if (gitCommits > 0) {
|
||||
out += `\n**Git:** ${gitCommits} commits`;
|
||||
if (gitFirstCommitDate) out += ` (since ${gitFirstCommitDate})`;
|
||||
out += '\n';
|
||||
}
|
||||
if (lastActivity) out += `**Last activity:** ${lastActivity}\n`;
|
||||
output({ rendered: out }, raw, out);
|
||||
} else {
|
||||
output(result, raw);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
cmdGenerateSlug,
|
||||
cmdCurrentTimestamp,
|
||||
cmdListTodos,
|
||||
cmdVerifyPathExists,
|
||||
cmdHistoryDigest,
|
||||
cmdResolveModel,
|
||||
cmdCommit,
|
||||
cmdSummaryExtract,
|
||||
cmdWebsearch,
|
||||
cmdProgressRender,
|
||||
cmdTodoComplete,
|
||||
cmdScaffold,
|
||||
cmdStats,
|
||||
};
|
||||
307
get-shit-done/bin/lib/config.cjs
Normal file
307
get-shit-done/bin/lib/config.cjs
Normal file
@@ -0,0 +1,307 @@
|
||||
/**
|
||||
* Config — Planning config CRUD operations
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { output, error } = require('./core.cjs');
|
||||
const {
|
||||
VALID_PROFILES,
|
||||
getAgentToModelMapForProfile,
|
||||
formatAgentToModelMapAsTable,
|
||||
} = require('./model-profiles.cjs');
|
||||
|
||||
const VALID_CONFIG_KEYS = new Set([
|
||||
'mode', 'granularity', 'parallelization', 'commit_docs', 'model_profile',
|
||||
'search_gitignored', 'brave_search',
|
||||
'workflow.research', 'workflow.plan_check', 'workflow.verifier',
|
||||
'workflow.nyquist_validation', 'workflow.ui_phase', 'workflow.ui_safety_gate',
|
||||
'workflow._auto_chain_active',
|
||||
'git.branching_strategy', 'git.phase_branch_template', 'git.milestone_branch_template',
|
||||
'planning.commit_docs', 'planning.search_gitignored',
|
||||
]);
|
||||
|
||||
const CONFIG_KEY_SUGGESTIONS = {
|
||||
'workflow.nyquist_validation_enabled': 'workflow.nyquist_validation',
|
||||
'agents.nyquist_validation_enabled': 'workflow.nyquist_validation',
|
||||
'nyquist.validation_enabled': 'workflow.nyquist_validation',
|
||||
};
|
||||
|
||||
function validateKnownConfigKeyPath(keyPath) {
|
||||
const suggested = CONFIG_KEY_SUGGESTIONS[keyPath];
|
||||
if (suggested) {
|
||||
error(`Unknown config key: ${keyPath}. Did you mean ${suggested}?`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures the config file exists (creates it if needed).
|
||||
*
|
||||
* Does not call `output()`, so can be used as one step in a command without triggering `exit(0)` in
|
||||
* the happy path. But note that `error()` will still `exit(1)` out of the process.
|
||||
*/
|
||||
function ensureConfigFile(cwd) {
|
||||
const configPath = path.join(cwd, '.planning', 'config.json');
|
||||
const planningDir = path.join(cwd, '.planning');
|
||||
|
||||
// Ensure .planning directory exists
|
||||
try {
|
||||
if (!fs.existsSync(planningDir)) {
|
||||
fs.mkdirSync(planningDir, { recursive: true });
|
||||
}
|
||||
} catch (err) {
|
||||
error('Failed to create .planning directory: ' + err.message);
|
||||
}
|
||||
|
||||
// Check if config already exists
|
||||
if (fs.existsSync(configPath)) {
|
||||
return { created: false, reason: 'already_exists' };
|
||||
}
|
||||
|
||||
// Detect Brave Search API key availability
|
||||
const homedir = require('os').homedir();
|
||||
const braveKeyFile = path.join(homedir, '.gsd', 'brave_api_key');
|
||||
const hasBraveSearch = !!(process.env.BRAVE_API_KEY || fs.existsSync(braveKeyFile));
|
||||
|
||||
// Load user-level defaults from ~/.gsd/defaults.json if available
|
||||
const globalDefaultsPath = path.join(homedir, '.gsd', 'defaults.json');
|
||||
let userDefaults = {};
|
||||
try {
|
||||
if (fs.existsSync(globalDefaultsPath)) {
|
||||
userDefaults = JSON.parse(fs.readFileSync(globalDefaultsPath, 'utf-8'));
|
||||
// Migrate deprecated "depth" key to "granularity"
|
||||
if ('depth' in userDefaults && !('granularity' in userDefaults)) {
|
||||
const depthToGranularity = { quick: 'coarse', standard: 'standard', comprehensive: 'fine' };
|
||||
userDefaults.granularity = depthToGranularity[userDefaults.depth] || userDefaults.depth;
|
||||
delete userDefaults.depth;
|
||||
try {
|
||||
fs.writeFileSync(globalDefaultsPath, JSON.stringify(userDefaults, null, 2), 'utf-8');
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// Ignore malformed global defaults, fall back to hardcoded
|
||||
}
|
||||
|
||||
// Create default config (user-level defaults override hardcoded defaults)
|
||||
const hardcoded = {
|
||||
model_profile: 'balanced',
|
||||
commit_docs: true,
|
||||
search_gitignored: false,
|
||||
branching_strategy: 'none',
|
||||
phase_branch_template: 'gsd/phase-{phase}-{slug}',
|
||||
milestone_branch_template: 'gsd/{milestone}-{slug}',
|
||||
workflow: {
|
||||
research: true,
|
||||
plan_check: true,
|
||||
verifier: true,
|
||||
nyquist_validation: true,
|
||||
},
|
||||
parallelization: true,
|
||||
brave_search: hasBraveSearch,
|
||||
};
|
||||
const defaults = {
|
||||
...hardcoded,
|
||||
...userDefaults,
|
||||
workflow: { ...hardcoded.workflow, ...(userDefaults.workflow || {}) },
|
||||
};
|
||||
|
||||
try {
|
||||
fs.writeFileSync(configPath, JSON.stringify(defaults, null, 2), 'utf-8');
|
||||
return { created: true, path: '.planning/config.json' };
|
||||
} catch (err) {
|
||||
error('Failed to create config.json: ' + err.message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Command to ensure the config file exists (creates it if needed).
|
||||
*
|
||||
* Note that this exits the process (via `output()`) even in the happy path; use
|
||||
* `ensureConfigFile()` directly if you need to avoid this.
|
||||
*/
|
||||
function cmdConfigEnsureSection(cwd, raw) {
|
||||
const ensureConfigFileResult = ensureConfigFile(cwd);
|
||||
if (ensureConfigFileResult.created) {
|
||||
output(ensureConfigFileResult, raw, 'created');
|
||||
} else {
|
||||
output(ensureConfigFileResult, raw, 'exists');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a value in the config file, allowing nested values via dot notation (e.g.,
|
||||
* "workflow.research").
|
||||
*
|
||||
* Does not call `output()`, so can be used as one step in a command without triggering `exit(0)` in
|
||||
* the happy path. But note that `error()` will still `exit(1)` out of the process.
|
||||
*/
|
||||
function setConfigValue(cwd, keyPath, parsedValue) {
|
||||
const configPath = path.join(cwd, '.planning', 'config.json');
|
||||
|
||||
// Load existing config or start with empty object
|
||||
let config = {};
|
||||
try {
|
||||
if (fs.existsSync(configPath)) {
|
||||
config = JSON.parse(fs.readFileSync(configPath, 'utf-8'));
|
||||
}
|
||||
} catch (err) {
|
||||
error('Failed to read config.json: ' + err.message);
|
||||
}
|
||||
|
||||
// Set nested value using dot notation (e.g., "workflow.research")
|
||||
const keys = keyPath.split('.');
|
||||
let current = config;
|
||||
for (let i = 0; i < keys.length - 1; i++) {
|
||||
const key = keys[i];
|
||||
if (current[key] === undefined || typeof current[key] !== 'object') {
|
||||
current[key] = {};
|
||||
}
|
||||
current = current[key];
|
||||
}
|
||||
const previousValue = current[keys[keys.length - 1]]; // Capture previous value before overwriting
|
||||
current[keys[keys.length - 1]] = parsedValue;
|
||||
|
||||
// Write back
|
||||
try {
|
||||
fs.writeFileSync(configPath, JSON.stringify(config, null, 2), 'utf-8');
|
||||
return { updated: true, key: keyPath, value: parsedValue, previousValue };
|
||||
} catch (err) {
|
||||
error('Failed to write config.json: ' + err.message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Command to set a value in the config file, allowing nested values via dot notation (e.g.,
|
||||
* "workflow.research").
|
||||
*
|
||||
* Note that this exits the process (via `output()`) even in the happy path; use `setConfigValue()`
|
||||
* directly if you need to avoid this.
|
||||
*/
|
||||
function cmdConfigSet(cwd, keyPath, value, raw) {
|
||||
if (!keyPath) {
|
||||
error('Usage: config-set <key.path> <value>');
|
||||
}
|
||||
|
||||
validateKnownConfigKeyPath(keyPath);
|
||||
|
||||
if (!VALID_CONFIG_KEYS.has(keyPath)) {
|
||||
error(`Unknown config key: "${keyPath}". Valid keys: ${[...VALID_CONFIG_KEYS].sort().join(', ')}`);
|
||||
}
|
||||
|
||||
// Parse value (handle booleans and numbers)
|
||||
let parsedValue = value;
|
||||
if (value === 'true') parsedValue = true;
|
||||
else if (value === 'false') parsedValue = false;
|
||||
else if (!isNaN(value) && value !== '') parsedValue = Number(value);
|
||||
|
||||
const setConfigValueResult = setConfigValue(cwd, keyPath, parsedValue);
|
||||
output(setConfigValueResult, raw, `${keyPath}=${parsedValue}`);
|
||||
}
|
||||
|
||||
function cmdConfigGet(cwd, keyPath, raw) {
|
||||
const configPath = path.join(cwd, '.planning', 'config.json');
|
||||
|
||||
if (!keyPath) {
|
||||
error('Usage: config-get <key.path>');
|
||||
}
|
||||
|
||||
let config = {};
|
||||
try {
|
||||
if (fs.existsSync(configPath)) {
|
||||
config = JSON.parse(fs.readFileSync(configPath, 'utf-8'));
|
||||
} else {
|
||||
error('No config.json found at ' + configPath);
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.message.startsWith('No config.json')) throw err;
|
||||
error('Failed to read config.json: ' + err.message);
|
||||
}
|
||||
|
||||
// Traverse dot-notation path (e.g., "workflow.auto_advance")
|
||||
const keys = keyPath.split('.');
|
||||
let current = config;
|
||||
for (const key of keys) {
|
||||
if (current === undefined || current === null || typeof current !== 'object') {
|
||||
error(`Key not found: ${keyPath}`);
|
||||
}
|
||||
current = current[key];
|
||||
}
|
||||
|
||||
if (current === undefined) {
|
||||
error(`Key not found: ${keyPath}`);
|
||||
}
|
||||
|
||||
output(current, raw, String(current));
|
||||
}
|
||||
|
||||
/**
|
||||
* Command to set the model profile in the config file.
|
||||
*
|
||||
* Note that this exits the process (via `output()`) even in the happy path.
|
||||
*/
|
||||
function cmdConfigSetModelProfile(cwd, profile, raw) {
|
||||
if (!profile) {
|
||||
error(`Usage: config-set-model-profile <${VALID_PROFILES.join('|')}>`);
|
||||
}
|
||||
|
||||
const normalizedProfile = profile.toLowerCase().trim();
|
||||
if (!VALID_PROFILES.includes(normalizedProfile)) {
|
||||
error(`Invalid profile '${profile}'. Valid profiles: ${VALID_PROFILES.join(', ')}`);
|
||||
}
|
||||
|
||||
// Ensure config exists (create if needed)
|
||||
ensureConfigFile(cwd);
|
||||
|
||||
// Set the model profile in the config
|
||||
const { previousValue } = setConfigValue(cwd, 'model_profile', normalizedProfile, raw);
|
||||
const previousProfile = previousValue || 'balanced';
|
||||
|
||||
// Build result value / message and return
|
||||
const agentToModelMap = getAgentToModelMapForProfile(normalizedProfile);
|
||||
const result = {
|
||||
updated: true,
|
||||
profile: normalizedProfile,
|
||||
previousProfile,
|
||||
agentToModelMap,
|
||||
};
|
||||
const rawValue = getCmdConfigSetModelProfileResultMessage(
|
||||
normalizedProfile,
|
||||
previousProfile,
|
||||
agentToModelMap
|
||||
);
|
||||
output(result, raw, rawValue);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the message to display for the result of the `config-set-model-profile` command when
|
||||
* displaying raw output.
|
||||
*/
|
||||
function getCmdConfigSetModelProfileResultMessage(
|
||||
normalizedProfile,
|
||||
previousProfile,
|
||||
agentToModelMap
|
||||
) {
|
||||
const agentToModelTable = formatAgentToModelMapAsTable(agentToModelMap);
|
||||
const didChange = previousProfile !== normalizedProfile;
|
||||
const paragraphs = didChange
|
||||
? [
|
||||
`✓ Model profile set to: ${normalizedProfile} (was: ${previousProfile})`,
|
||||
'Agents will now use:',
|
||||
agentToModelTable,
|
||||
'Next spawned agents will use the new profile.',
|
||||
]
|
||||
: [
|
||||
`✓ Model profile is already set to: ${normalizedProfile}`,
|
||||
'Agents are using:',
|
||||
agentToModelTable,
|
||||
];
|
||||
return paragraphs.join('\n\n');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
cmdConfigEnsureSection,
|
||||
cmdConfigSet,
|
||||
cmdConfigGet,
|
||||
cmdConfigSetModelProfile,
|
||||
};
|
||||
712
get-shit-done/bin/lib/core.cjs
Normal file
712
get-shit-done/bin/lib/core.cjs
Normal file
@@ -0,0 +1,712 @@
|
||||
/**
|
||||
* Core — Shared utilities, constants, and internal helpers
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { execSync, spawnSync } = require('child_process');
|
||||
const { MODEL_PROFILES } = require('./model-profiles.cjs');
|
||||
|
||||
// ─── Path helpers ────────────────────────────────────────────────────────────
|
||||
|
||||
/** Normalize a relative path to always use forward slashes (cross-platform). */
|
||||
function toPosixPath(p) {
|
||||
return p.split(path.sep).join('/');
|
||||
}
|
||||
|
||||
// ─── Output helpers ───────────────────────────────────────────────────────────
|
||||
|
||||
function output(result, raw, rawValue) {
|
||||
if (raw && rawValue !== undefined) {
|
||||
process.stdout.write(String(rawValue));
|
||||
} else {
|
||||
const json = JSON.stringify(result, null, 2);
|
||||
// Large payloads exceed Claude Code's Bash tool buffer (~50KB).
|
||||
// Write to tmpfile and output the path prefixed with @file: so callers can detect it.
|
||||
if (json.length > 50000) {
|
||||
const tmpPath = path.join(require('os').tmpdir(), `gsd-${Date.now()}.json`);
|
||||
fs.writeFileSync(tmpPath, json, 'utf-8');
|
||||
process.stdout.write('@file:' + tmpPath);
|
||||
} else {
|
||||
process.stdout.write(json);
|
||||
}
|
||||
}
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
function error(message) {
|
||||
process.stderr.write('Error: ' + message + '\n');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// ─── File & Config utilities ──────────────────────────────────────────────────
|
||||
|
||||
function safeReadFile(filePath) {
|
||||
try {
|
||||
return fs.readFileSync(filePath, 'utf-8');
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function loadConfig(cwd) {
|
||||
const configPath = path.join(cwd, '.planning', 'config.json');
|
||||
const defaults = {
|
||||
model_profile: 'balanced',
|
||||
commit_docs: true,
|
||||
search_gitignored: false,
|
||||
branching_strategy: 'none',
|
||||
phase_branch_template: 'gsd/phase-{phase}-{slug}',
|
||||
milestone_branch_template: 'gsd/{milestone}-{slug}',
|
||||
research: true,
|
||||
plan_checker: true,
|
||||
verifier: true,
|
||||
nyquist_validation: true,
|
||||
parallelization: true,
|
||||
brave_search: false,
|
||||
resolve_model_ids: false, // when true, resolve aliases (opus/sonnet/haiku) to full model IDs
|
||||
};
|
||||
|
||||
try {
|
||||
const raw = fs.readFileSync(configPath, 'utf-8');
|
||||
const parsed = JSON.parse(raw);
|
||||
|
||||
// Migrate deprecated "depth" key to "granularity" with value mapping
|
||||
if ('depth' in parsed && !('granularity' in parsed)) {
|
||||
const depthToGranularity = { quick: 'coarse', standard: 'standard', comprehensive: 'fine' };
|
||||
parsed.granularity = depthToGranularity[parsed.depth] || parsed.depth;
|
||||
delete parsed.depth;
|
||||
try { fs.writeFileSync(configPath, JSON.stringify(parsed, null, 2), 'utf-8'); } catch {}
|
||||
}
|
||||
|
||||
const get = (key, nested) => {
|
||||
if (parsed[key] !== undefined) return parsed[key];
|
||||
if (nested && parsed[nested.section] && parsed[nested.section][nested.field] !== undefined) {
|
||||
return parsed[nested.section][nested.field];
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
const parallelization = (() => {
|
||||
const val = get('parallelization');
|
||||
if (typeof val === 'boolean') return val;
|
||||
if (typeof val === 'object' && val !== null && 'enabled' in val) return val.enabled;
|
||||
return defaults.parallelization;
|
||||
})();
|
||||
|
||||
return {
|
||||
model_profile: get('model_profile') ?? defaults.model_profile,
|
||||
commit_docs: get('commit_docs', { section: 'planning', field: 'commit_docs' }) ?? defaults.commit_docs,
|
||||
search_gitignored: get('search_gitignored', { section: 'planning', field: 'search_gitignored' }) ?? defaults.search_gitignored,
|
||||
branching_strategy: get('branching_strategy', { section: 'git', field: 'branching_strategy' }) ?? defaults.branching_strategy,
|
||||
phase_branch_template: get('phase_branch_template', { section: 'git', field: 'phase_branch_template' }) ?? defaults.phase_branch_template,
|
||||
milestone_branch_template: get('milestone_branch_template', { section: 'git', field: 'milestone_branch_template' }) ?? defaults.milestone_branch_template,
|
||||
research: get('research', { section: 'workflow', field: 'research' }) ?? defaults.research,
|
||||
plan_checker: get('plan_checker', { section: 'workflow', field: 'plan_check' }) ?? defaults.plan_checker,
|
||||
verifier: get('verifier', { section: 'workflow', field: 'verifier' }) ?? defaults.verifier,
|
||||
nyquist_validation: get('nyquist_validation', { section: 'workflow', field: 'nyquist_validation' }) ?? defaults.nyquist_validation,
|
||||
parallelization,
|
||||
brave_search: get('brave_search') ?? defaults.brave_search,
|
||||
resolve_model_ids: get('resolve_model_ids') ?? defaults.resolve_model_ids,
|
||||
model_overrides: parsed.model_overrides || null,
|
||||
};
|
||||
} catch {
|
||||
return defaults;
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Git utilities ────────────────────────────────────────────────────────────
|
||||
|
||||
function isGitIgnored(cwd, targetPath) {
|
||||
try {
|
||||
// --no-index checks .gitignore rules regardless of whether the file is tracked.
|
||||
// Without it, git check-ignore returns "not ignored" for tracked files even when
|
||||
// .gitignore explicitly lists them — a common source of confusion when .planning/
|
||||
// was committed before being added to .gitignore.
|
||||
execSync('git check-ignore -q --no-index -- ' + targetPath.replace(/[^a-zA-Z0-9._\-/]/g, ''), {
|
||||
cwd,
|
||||
stdio: 'pipe',
|
||||
});
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Markdown normalization ─────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Normalize markdown to fix common markdownlint violations.
|
||||
* Applied at write points so GSD-generated .planning/ files are IDE-friendly.
|
||||
*
|
||||
* Rules enforced:
|
||||
* MD022 — Blank lines around headings
|
||||
* MD031 — Blank lines around fenced code blocks
|
||||
* MD032 — Blank lines around lists
|
||||
* MD012 — No multiple consecutive blank lines (collapsed to 2 max)
|
||||
* MD047 — Files end with a single newline
|
||||
*/
|
||||
function normalizeMd(content) {
|
||||
if (!content || typeof content !== 'string') return content;
|
||||
|
||||
// Normalize line endings to LF for consistent processing
|
||||
let text = content.replace(/\r\n/g, '\n');
|
||||
|
||||
const lines = text.split('\n');
|
||||
const result = [];
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
const prev = i > 0 ? lines[i - 1] : '';
|
||||
const prevTrimmed = prev.trimEnd();
|
||||
const trimmed = line.trimEnd();
|
||||
|
||||
// MD022: Blank line before headings (skip first line and frontmatter delimiters)
|
||||
if (/^#{1,6}\s/.test(trimmed) && i > 0 && prevTrimmed !== '' && prevTrimmed !== '---') {
|
||||
result.push('');
|
||||
}
|
||||
|
||||
// MD031: Blank line before fenced code blocks
|
||||
if (/^```/.test(trimmed) && i > 0 && prevTrimmed !== '' && !isInsideFencedBlock(lines, i)) {
|
||||
result.push('');
|
||||
}
|
||||
|
||||
// MD032: Blank line before lists (- item, * item, N. item, - [ ] item)
|
||||
if (/^(\s*[-*+]\s|\s*\d+\.\s)/.test(line) && i > 0 &&
|
||||
prevTrimmed !== '' && !/^(\s*[-*+]\s|\s*\d+\.\s)/.test(prev) &&
|
||||
prevTrimmed !== '---') {
|
||||
result.push('');
|
||||
}
|
||||
|
||||
result.push(line);
|
||||
|
||||
// MD022: Blank line after headings
|
||||
if (/^#{1,6}\s/.test(trimmed) && i < lines.length - 1) {
|
||||
const next = lines[i + 1];
|
||||
if (next !== undefined && next.trimEnd() !== '') {
|
||||
result.push('');
|
||||
}
|
||||
}
|
||||
|
||||
// MD031: Blank line after closing fenced code blocks
|
||||
if (/^```\s*$/.test(trimmed) && isClosingFence(lines, i) && i < lines.length - 1) {
|
||||
const next = lines[i + 1];
|
||||
if (next !== undefined && next.trimEnd() !== '') {
|
||||
result.push('');
|
||||
}
|
||||
}
|
||||
|
||||
// MD032: Blank line after last list item in a block
|
||||
if (/^(\s*[-*+]\s|\s*\d+\.\s)/.test(line) && i < lines.length - 1) {
|
||||
const next = lines[i + 1];
|
||||
if (next !== undefined && next.trimEnd() !== '' &&
|
||||
!/^(\s*[-*+]\s|\s*\d+\.\s)/.test(next) &&
|
||||
!/^\s/.test(next)) {
|
||||
// Only add blank line if next line is not a continuation/indented line
|
||||
result.push('');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
text = result.join('\n');
|
||||
|
||||
// MD012: Collapse 3+ consecutive blank lines to 2
|
||||
text = text.replace(/\n{3,}/g, '\n\n');
|
||||
|
||||
// MD047: Ensure file ends with exactly one newline
|
||||
text = text.replace(/\n*$/, '\n');
|
||||
|
||||
return text;
|
||||
}
|
||||
|
||||
/** Check if line index i is inside an already-open fenced code block */
|
||||
function isInsideFencedBlock(lines, i) {
|
||||
let fenceCount = 0;
|
||||
for (let j = 0; j < i; j++) {
|
||||
if (/^```/.test(lines[j].trimEnd())) fenceCount++;
|
||||
}
|
||||
return fenceCount % 2 === 1;
|
||||
}
|
||||
|
||||
/** Check if a ``` line is a closing fence (odd number of fences up to and including this one) */
|
||||
function isClosingFence(lines, i) {
|
||||
let fenceCount = 0;
|
||||
for (let j = 0; j <= i; j++) {
|
||||
if (/^```/.test(lines[j].trimEnd())) fenceCount++;
|
||||
}
|
||||
return fenceCount % 2 === 0;
|
||||
}
|
||||
|
||||
function execGit(cwd, args) {
|
||||
const result = spawnSync('git', args, {
|
||||
cwd,
|
||||
stdio: 'pipe',
|
||||
encoding: 'utf-8',
|
||||
});
|
||||
return {
|
||||
exitCode: result.status ?? 1,
|
||||
stdout: (result.stdout ?? '').toString().trim(),
|
||||
stderr: (result.stderr ?? '').toString().trim(),
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Phase utilities ──────────────────────────────────────────────────────────
|
||||
|
||||
function escapeRegex(value) {
|
||||
return String(value).replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
}
|
||||
|
||||
function normalizePhaseName(phase) {
|
||||
const match = String(phase).match(/^(\d+)([A-Z])?((?:\.\d+)*)/i);
|
||||
if (!match) return phase;
|
||||
const padded = match[1].padStart(2, '0');
|
||||
const letter = match[2] ? match[2].toUpperCase() : '';
|
||||
const decimal = match[3] || '';
|
||||
return padded + letter + decimal;
|
||||
}
|
||||
|
||||
function comparePhaseNum(a, b) {
|
||||
const pa = String(a).match(/^(\d+)([A-Z])?((?:\.\d+)*)/i);
|
||||
const pb = String(b).match(/^(\d+)([A-Z])?((?:\.\d+)*)/i);
|
||||
if (!pa || !pb) return String(a).localeCompare(String(b));
|
||||
const intDiff = parseInt(pa[1], 10) - parseInt(pb[1], 10);
|
||||
if (intDiff !== 0) return intDiff;
|
||||
// No letter sorts before letter: 12 < 12A < 12B
|
||||
const la = (pa[2] || '').toUpperCase();
|
||||
const lb = (pb[2] || '').toUpperCase();
|
||||
if (la !== lb) {
|
||||
if (!la) return -1;
|
||||
if (!lb) return 1;
|
||||
return la < lb ? -1 : 1;
|
||||
}
|
||||
// Segment-by-segment decimal comparison: 12A < 12A.1 < 12A.1.2 < 12A.2
|
||||
const aDecParts = pa[3] ? pa[3].slice(1).split('.').map(p => parseInt(p, 10)) : [];
|
||||
const bDecParts = pb[3] ? pb[3].slice(1).split('.').map(p => parseInt(p, 10)) : [];
|
||||
const maxLen = Math.max(aDecParts.length, bDecParts.length);
|
||||
if (aDecParts.length === 0 && bDecParts.length > 0) return -1;
|
||||
if (bDecParts.length === 0 && aDecParts.length > 0) return 1;
|
||||
for (let i = 0; i < maxLen; i++) {
|
||||
const av = Number.isFinite(aDecParts[i]) ? aDecParts[i] : 0;
|
||||
const bv = Number.isFinite(bDecParts[i]) ? bDecParts[i] : 0;
|
||||
if (av !== bv) return av - bv;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
function searchPhaseInDir(baseDir, relBase, normalized) {
|
||||
try {
|
||||
const entries = fs.readdirSync(baseDir, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name).sort((a, b) => comparePhaseNum(a, b));
|
||||
const match = dirs.find(d => d.startsWith(normalized));
|
||||
if (!match) return null;
|
||||
|
||||
const dirMatch = match.match(/^(\d+[A-Z]?(?:\.\d+)*)-?(.*)/i);
|
||||
const phaseNumber = dirMatch ? dirMatch[1] : normalized;
|
||||
const phaseName = dirMatch && dirMatch[2] ? dirMatch[2] : null;
|
||||
const phaseDir = path.join(baseDir, match);
|
||||
const phaseFiles = fs.readdirSync(phaseDir);
|
||||
|
||||
const plans = phaseFiles.filter(f => f.endsWith('-PLAN.md') || f === 'PLAN.md').sort();
|
||||
const summaries = phaseFiles.filter(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md').sort();
|
||||
const hasResearch = phaseFiles.some(f => f.endsWith('-RESEARCH.md') || f === 'RESEARCH.md');
|
||||
const hasContext = phaseFiles.some(f => f.endsWith('-CONTEXT.md') || f === 'CONTEXT.md');
|
||||
const hasVerification = phaseFiles.some(f => f.endsWith('-VERIFICATION.md') || f === 'VERIFICATION.md');
|
||||
|
||||
const completedPlanIds = new Set(
|
||||
summaries.map(s => s.replace('-SUMMARY.md', '').replace('SUMMARY.md', ''))
|
||||
);
|
||||
const incompletePlans = plans.filter(p => {
|
||||
const planId = p.replace('-PLAN.md', '').replace('PLAN.md', '');
|
||||
return !completedPlanIds.has(planId);
|
||||
});
|
||||
|
||||
return {
|
||||
found: true,
|
||||
directory: toPosixPath(path.join(relBase, match)),
|
||||
phase_number: phaseNumber,
|
||||
phase_name: phaseName,
|
||||
phase_slug: phaseName ? phaseName.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, '') : null,
|
||||
plans,
|
||||
summaries,
|
||||
incomplete_plans: incompletePlans,
|
||||
has_research: hasResearch,
|
||||
has_context: hasContext,
|
||||
has_verification: hasVerification,
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function findPhaseInternal(cwd, phase) {
|
||||
if (!phase) return null;
|
||||
|
||||
const phasesDir = path.join(cwd, '.planning', 'phases');
|
||||
const normalized = normalizePhaseName(phase);
|
||||
|
||||
// Search current phases first
|
||||
const current = searchPhaseInDir(phasesDir, '.planning/phases', normalized);
|
||||
if (current) return current;
|
||||
|
||||
// Search archived milestone phases (newest first)
|
||||
const milestonesDir = path.join(cwd, '.planning', 'milestones');
|
||||
if (!fs.existsSync(milestonesDir)) return null;
|
||||
|
||||
try {
|
||||
const milestoneEntries = fs.readdirSync(milestonesDir, { withFileTypes: true });
|
||||
const archiveDirs = milestoneEntries
|
||||
.filter(e => e.isDirectory() && /^v[\d.]+-phases$/.test(e.name))
|
||||
.map(e => e.name)
|
||||
.sort()
|
||||
.reverse();
|
||||
|
||||
for (const archiveName of archiveDirs) {
|
||||
const version = archiveName.match(/^(v[\d.]+)-phases$/)[1];
|
||||
const archivePath = path.join(milestonesDir, archiveName);
|
||||
const relBase = '.planning/milestones/' + archiveName;
|
||||
const result = searchPhaseInDir(archivePath, relBase, normalized);
|
||||
if (result) {
|
||||
result.archived = version;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function getArchivedPhaseDirs(cwd) {
|
||||
const milestonesDir = path.join(cwd, '.planning', 'milestones');
|
||||
const results = [];
|
||||
|
||||
if (!fs.existsSync(milestonesDir)) return results;
|
||||
|
||||
try {
|
||||
const milestoneEntries = fs.readdirSync(milestonesDir, { withFileTypes: true });
|
||||
// Find v*-phases directories, sort newest first
|
||||
const phaseDirs = milestoneEntries
|
||||
.filter(e => e.isDirectory() && /^v[\d.]+-phases$/.test(e.name))
|
||||
.map(e => e.name)
|
||||
.sort()
|
||||
.reverse();
|
||||
|
||||
for (const archiveName of phaseDirs) {
|
||||
const version = archiveName.match(/^(v[\d.]+)-phases$/)[1];
|
||||
const archivePath = path.join(milestonesDir, archiveName);
|
||||
const entries = fs.readdirSync(archivePath, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name).sort((a, b) => comparePhaseNum(a, b));
|
||||
|
||||
for (const dir of dirs) {
|
||||
results.push({
|
||||
name: dir,
|
||||
milestone: version,
|
||||
basePath: path.join('.planning', 'milestones', archiveName),
|
||||
fullPath: path.join(archivePath, dir),
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
// ─── Roadmap milestone scoping ───────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Strip shipped milestone content wrapped in <details> blocks.
|
||||
* Used to isolate current milestone phases when searching ROADMAP.md
|
||||
* for phase headings or checkboxes — prevents matching archived milestone
|
||||
* phases that share the same numbers as current milestone phases.
|
||||
*/
|
||||
function stripShippedMilestones(content) {
|
||||
return content.replace(/<details>[\s\S]*?<\/details>/gi, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the current milestone section from ROADMAP.md by positive lookup.
|
||||
*
|
||||
* Instead of stripping <details> blocks (negative heuristic that breaks if
|
||||
* agents wrap the current milestone in <details>), this finds the section
|
||||
* matching the current milestone version and returns only that content.
|
||||
*
|
||||
* Falls back to stripShippedMilestones() if:
|
||||
* - cwd is not provided
|
||||
* - STATE.md doesn't exist or has no milestone field
|
||||
* - Version can't be found in ROADMAP.md
|
||||
*
|
||||
* @param {string} content - Full ROADMAP.md content
|
||||
* @param {string} [cwd] - Working directory for reading STATE.md
|
||||
* @returns {string} Content scoped to current milestone
|
||||
*/
|
||||
function extractCurrentMilestone(content, cwd) {
|
||||
if (!cwd) return stripShippedMilestones(content);
|
||||
|
||||
// 1. Get current milestone version from STATE.md frontmatter
|
||||
let version = null;
|
||||
try {
|
||||
const statePath = path.join(cwd, '.planning', 'STATE.md');
|
||||
if (fs.existsSync(statePath)) {
|
||||
const stateRaw = fs.readFileSync(statePath, 'utf-8');
|
||||
const milestoneMatch = stateRaw.match(/^milestone:\s*(.+)/m);
|
||||
if (milestoneMatch) {
|
||||
version = milestoneMatch[1].trim();
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// 2. Fallback: derive version from getMilestoneInfo pattern in ROADMAP.md itself
|
||||
if (!version) {
|
||||
// Check for 🚧 in-progress marker
|
||||
const inProgressMatch = content.match(/🚧\s*\*\*v(\d+\.\d+)\s/);
|
||||
if (inProgressMatch) {
|
||||
version = 'v' + inProgressMatch[1];
|
||||
}
|
||||
}
|
||||
|
||||
if (!version) return stripShippedMilestones(content);
|
||||
|
||||
// 3. Find the section matching this version
|
||||
// Match headings like: ## Roadmap v3.0: Name, ## v3.0 Name, etc.
|
||||
const escapedVersion = escapeRegex(version);
|
||||
const sectionPattern = new RegExp(
|
||||
`(^#{1,3}\\s+.*${escapedVersion}[^\\n]*)`,
|
||||
'mi'
|
||||
);
|
||||
const sectionMatch = content.match(sectionPattern);
|
||||
|
||||
if (!sectionMatch) return stripShippedMilestones(content);
|
||||
|
||||
const sectionStart = sectionMatch.index;
|
||||
|
||||
// Find the end: next milestone heading at same or higher level, or EOF
|
||||
// Milestone headings look like: ## v2.0, ## Roadmap v2.0, ## ✅ v1.0, etc.
|
||||
const headingLevel = sectionMatch[1].match(/^(#{1,3})\s/)[1].length;
|
||||
const restContent = content.slice(sectionStart + sectionMatch[0].length);
|
||||
const nextMilestonePattern = new RegExp(
|
||||
`^#{1,${headingLevel}}\\s+(?:.*v\\d+\\.\\d+|✅|📋|🚧)`,
|
||||
'mi'
|
||||
);
|
||||
const nextMatch = restContent.match(nextMilestonePattern);
|
||||
|
||||
let sectionEnd;
|
||||
if (nextMatch) {
|
||||
sectionEnd = sectionStart + sectionMatch[0].length + nextMatch.index;
|
||||
} else {
|
||||
sectionEnd = content.length;
|
||||
}
|
||||
|
||||
// Return everything before the current milestone section (non-milestone content
|
||||
// like title, overview) plus the current milestone section
|
||||
const beforeMilestones = content.slice(0, sectionStart);
|
||||
const currentSection = content.slice(sectionStart, sectionEnd);
|
||||
|
||||
// Also include any content before the first milestone heading (title, overview, etc.)
|
||||
// but strip any <details> blocks in it (these are definitely shipped)
|
||||
const preamble = beforeMilestones.replace(/<details>[\s\S]*?<\/details>/gi, '');
|
||||
|
||||
return preamble + currentSection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace a pattern only in the current milestone section of ROADMAP.md
|
||||
* (everything after the last </details> close tag). Used for write operations
|
||||
* that must not accidentally modify archived milestone checkboxes/tables.
|
||||
*/
|
||||
function replaceInCurrentMilestone(content, pattern, replacement) {
|
||||
const lastDetailsClose = content.lastIndexOf('</details>');
|
||||
if (lastDetailsClose === -1) {
|
||||
return content.replace(pattern, replacement);
|
||||
}
|
||||
const offset = lastDetailsClose + '</details>'.length;
|
||||
const before = content.slice(0, offset);
|
||||
const after = content.slice(offset);
|
||||
return before + after.replace(pattern, replacement);
|
||||
}
|
||||
|
||||
// ─── Roadmap & model utilities ────────────────────────────────────────────────
|
||||
|
||||
function getRoadmapPhaseInternal(cwd, phaseNum) {
|
||||
if (!phaseNum) return null;
|
||||
const roadmapPath = path.join(cwd, '.planning', 'ROADMAP.md');
|
||||
if (!fs.existsSync(roadmapPath)) return null;
|
||||
|
||||
try {
|
||||
const content = extractCurrentMilestone(fs.readFileSync(roadmapPath, 'utf-8'), cwd);
|
||||
const escapedPhase = escapeRegex(phaseNum.toString());
|
||||
const phasePattern = new RegExp(`#{2,4}\\s*Phase\\s+${escapedPhase}:\\s*([^\\n]+)`, 'i');
|
||||
const headerMatch = content.match(phasePattern);
|
||||
if (!headerMatch) return null;
|
||||
|
||||
const phaseName = headerMatch[1].trim();
|
||||
const headerIndex = headerMatch.index;
|
||||
const restOfContent = content.slice(headerIndex);
|
||||
const nextHeaderMatch = restOfContent.match(/\n#{2,4}\s+Phase\s+\d/i);
|
||||
const sectionEnd = nextHeaderMatch ? headerIndex + nextHeaderMatch.index : content.length;
|
||||
const section = content.slice(headerIndex, sectionEnd).trim();
|
||||
|
||||
const goalMatch = section.match(/\*\*Goal(?:\*\*:|\*?\*?:\*\*)\s*([^\n]+)/i);
|
||||
const goal = goalMatch ? goalMatch[1].trim() : null;
|
||||
|
||||
return {
|
||||
found: true,
|
||||
phase_number: phaseNum.toString(),
|
||||
phase_name: phaseName,
|
||||
goal,
|
||||
section,
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Model alias resolution ───────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Map short model aliases to full model IDs.
|
||||
* Updated each release to match current model versions.
|
||||
* Users can override with model_overrides in config.json for custom/latest models.
|
||||
*/
|
||||
const MODEL_ALIAS_MAP = {
|
||||
'opus': 'claude-opus-4-0',
|
||||
'sonnet': 'claude-sonnet-4-5',
|
||||
'haiku': 'claude-haiku-3-5',
|
||||
};
|
||||
|
||||
function resolveModelInternal(cwd, agentType) {
|
||||
const config = loadConfig(cwd);
|
||||
|
||||
// Check per-agent override first
|
||||
const override = config.model_overrides?.[agentType];
|
||||
if (override) {
|
||||
return override;
|
||||
}
|
||||
|
||||
// Fall back to profile lookup
|
||||
const profile = String(config.model_profile || 'balanced').toLowerCase();
|
||||
const agentModels = MODEL_PROFILES[agentType];
|
||||
if (!agentModels) return 'sonnet';
|
||||
if (profile === 'inherit') return 'inherit';
|
||||
const alias = agentModels[profile] || agentModels['balanced'] || 'sonnet';
|
||||
|
||||
// If resolve_model_ids is true, map alias to full model ID
|
||||
// This prevents 404s when the Task tool passes aliases directly to the API
|
||||
if (config.resolve_model_ids) {
|
||||
return MODEL_ALIAS_MAP[alias] || alias;
|
||||
}
|
||||
|
||||
return alias;
|
||||
}
|
||||
|
||||
// ─── Misc utilities ───────────────────────────────────────────────────────────
|
||||
|
||||
function pathExistsInternal(cwd, targetPath) {
|
||||
const fullPath = path.isAbsolute(targetPath) ? targetPath : path.join(cwd, targetPath);
|
||||
try {
|
||||
fs.statSync(fullPath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function generateSlugInternal(text) {
|
||||
if (!text) return null;
|
||||
return text.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, '');
|
||||
}
|
||||
|
||||
function getMilestoneInfo(cwd) {
|
||||
try {
|
||||
const roadmap = fs.readFileSync(path.join(cwd, '.planning', 'ROADMAP.md'), 'utf-8');
|
||||
|
||||
// First: check for list-format roadmaps using 🚧 (in-progress) marker
|
||||
// e.g. "- 🚧 **v2.1 Belgium** — Phases 24-28 (in progress)"
|
||||
const inProgressMatch = roadmap.match(/🚧\s*\*\*v(\d+\.\d+)\s+([^*]+)\*\*/);
|
||||
if (inProgressMatch) {
|
||||
return {
|
||||
version: 'v' + inProgressMatch[1],
|
||||
name: inProgressMatch[2].trim(),
|
||||
};
|
||||
}
|
||||
|
||||
// Second: heading-format roadmaps — strip shipped milestones in <details> blocks
|
||||
const cleaned = stripShippedMilestones(roadmap);
|
||||
// Extract version and name from the same ## heading for consistency
|
||||
const headingMatch = cleaned.match(/## .*v(\d+\.\d+)[:\s]+([^\n(]+)/);
|
||||
if (headingMatch) {
|
||||
return {
|
||||
version: 'v' + headingMatch[1],
|
||||
name: headingMatch[2].trim(),
|
||||
};
|
||||
}
|
||||
// Fallback: try bare version match
|
||||
const versionMatch = cleaned.match(/v(\d+\.\d+)/);
|
||||
return {
|
||||
version: versionMatch ? versionMatch[0] : 'v1.0',
|
||||
name: 'milestone',
|
||||
};
|
||||
} catch {
|
||||
return { version: 'v1.0', name: 'milestone' };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a filter function that checks whether a phase directory belongs
|
||||
* to the current milestone based on ROADMAP.md phase headings.
|
||||
* If no ROADMAP exists or no phases are listed, returns a pass-all filter.
|
||||
*/
|
||||
function getMilestonePhaseFilter(cwd) {
|
||||
const milestonePhaseNums = new Set();
|
||||
try {
|
||||
const roadmap = extractCurrentMilestone(fs.readFileSync(path.join(cwd, '.planning', 'ROADMAP.md'), 'utf-8'), cwd);
|
||||
const phasePattern = /#{2,4}\s*Phase\s+(\d+[A-Z]?(?:\.\d+)*)\s*:/gi;
|
||||
let m;
|
||||
while ((m = phasePattern.exec(roadmap)) !== null) {
|
||||
milestonePhaseNums.add(m[1]);
|
||||
}
|
||||
} catch {}
|
||||
|
||||
if (milestonePhaseNums.size === 0) {
|
||||
const passAll = () => true;
|
||||
passAll.phaseCount = 0;
|
||||
return passAll;
|
||||
}
|
||||
|
||||
const normalized = new Set(
|
||||
[...milestonePhaseNums].map(n => (n.replace(/^0+/, '') || '0').toLowerCase())
|
||||
);
|
||||
|
||||
function isDirInMilestone(dirName) {
|
||||
const m = dirName.match(/^0*(\d+[A-Za-z]?(?:\.\d+)*)/);
|
||||
if (!m) return false;
|
||||
return normalized.has(m[1].toLowerCase());
|
||||
}
|
||||
isDirInMilestone.phaseCount = milestonePhaseNums.size;
|
||||
return isDirInMilestone;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
output,
|
||||
error,
|
||||
safeReadFile,
|
||||
loadConfig,
|
||||
isGitIgnored,
|
||||
execGit,
|
||||
normalizeMd,
|
||||
escapeRegex,
|
||||
normalizePhaseName,
|
||||
comparePhaseNum,
|
||||
searchPhaseInDir,
|
||||
findPhaseInternal,
|
||||
getArchivedPhaseDirs,
|
||||
getRoadmapPhaseInternal,
|
||||
resolveModelInternal,
|
||||
pathExistsInternal,
|
||||
generateSlugInternal,
|
||||
getMilestoneInfo,
|
||||
getMilestonePhaseFilter,
|
||||
stripShippedMilestones,
|
||||
extractCurrentMilestone,
|
||||
replaceInCurrentMilestone,
|
||||
toPosixPath,
|
||||
MODEL_ALIAS_MAP,
|
||||
};
|
||||
299
get-shit-done/bin/lib/frontmatter.cjs
Normal file
299
get-shit-done/bin/lib/frontmatter.cjs
Normal file
@@ -0,0 +1,299 @@
|
||||
/**
|
||||
* Frontmatter — YAML frontmatter parsing, serialization, and CRUD commands
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { safeReadFile, normalizeMd, output, error } = require('./core.cjs');
|
||||
|
||||
// ─── Parsing engine ───────────────────────────────────────────────────────────
|
||||
|
||||
function extractFrontmatter(content) {
|
||||
const frontmatter = {};
|
||||
const match = content.match(/^---\r?\n([\s\S]+?)\r?\n---/);
|
||||
if (!match) return frontmatter;
|
||||
|
||||
const yaml = match[1];
|
||||
const lines = yaml.split(/\r?\n/);
|
||||
|
||||
// Stack to track nested objects: [{obj, key, indent}]
|
||||
// obj = object to write to, key = current key collecting array items, indent = indentation level
|
||||
let stack = [{ obj: frontmatter, key: null, indent: -1 }];
|
||||
|
||||
for (const line of lines) {
|
||||
// Skip empty lines
|
||||
if (line.trim() === '') continue;
|
||||
|
||||
// Calculate indentation (number of leading spaces)
|
||||
const indentMatch = line.match(/^(\s*)/);
|
||||
const indent = indentMatch ? indentMatch[1].length : 0;
|
||||
|
||||
// Pop stack back to appropriate level
|
||||
while (stack.length > 1 && indent <= stack[stack.length - 1].indent) {
|
||||
stack.pop();
|
||||
}
|
||||
|
||||
const current = stack[stack.length - 1];
|
||||
|
||||
// Check for key: value pattern
|
||||
const keyMatch = line.match(/^(\s*)([a-zA-Z0-9_-]+):\s*(.*)/);
|
||||
if (keyMatch) {
|
||||
const key = keyMatch[2];
|
||||
const value = keyMatch[3].trim();
|
||||
|
||||
if (value === '' || value === '[') {
|
||||
// Key with no value or opening bracket — could be nested object or array
|
||||
// We'll determine based on next lines, for now create placeholder
|
||||
current.obj[key] = value === '[' ? [] : {};
|
||||
current.key = null;
|
||||
// Push new context for potential nested content
|
||||
stack.push({ obj: current.obj[key], key: null, indent });
|
||||
} else if (value.startsWith('[') && value.endsWith(']')) {
|
||||
// Inline array: key: [a, b, c]
|
||||
current.obj[key] = value.slice(1, -1).split(',').map(s => s.trim().replace(/^["']|["']$/g, '')).filter(Boolean);
|
||||
current.key = null;
|
||||
} else {
|
||||
// Simple key: value
|
||||
current.obj[key] = value.replace(/^["']|["']$/g, '');
|
||||
current.key = null;
|
||||
}
|
||||
} else if (line.trim().startsWith('- ')) {
|
||||
// Array item
|
||||
const itemValue = line.trim().slice(2).replace(/^["']|["']$/g, '');
|
||||
|
||||
// If current context is an empty object, convert to array
|
||||
if (typeof current.obj === 'object' && !Array.isArray(current.obj) && Object.keys(current.obj).length === 0) {
|
||||
// Find the key in parent that points to this object and convert it
|
||||
const parent = stack.length > 1 ? stack[stack.length - 2] : null;
|
||||
if (parent) {
|
||||
for (const k of Object.keys(parent.obj)) {
|
||||
if (parent.obj[k] === current.obj) {
|
||||
parent.obj[k] = [itemValue];
|
||||
current.obj = parent.obj[k];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (Array.isArray(current.obj)) {
|
||||
current.obj.push(itemValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return frontmatter;
|
||||
}
|
||||
|
||||
function reconstructFrontmatter(obj) {
|
||||
const lines = [];
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
if (value === null || value === undefined) continue;
|
||||
if (Array.isArray(value)) {
|
||||
if (value.length === 0) {
|
||||
lines.push(`${key}: []`);
|
||||
} else if (value.every(v => typeof v === 'string') && value.length <= 3 && value.join(', ').length < 60) {
|
||||
lines.push(`${key}: [${value.join(', ')}]`);
|
||||
} else {
|
||||
lines.push(`${key}:`);
|
||||
for (const item of value) {
|
||||
lines.push(` - ${typeof item === 'string' && (item.includes(':') || item.includes('#')) ? `"${item}"` : item}`);
|
||||
}
|
||||
}
|
||||
} else if (typeof value === 'object') {
|
||||
lines.push(`${key}:`);
|
||||
for (const [subkey, subval] of Object.entries(value)) {
|
||||
if (subval === null || subval === undefined) continue;
|
||||
if (Array.isArray(subval)) {
|
||||
if (subval.length === 0) {
|
||||
lines.push(` ${subkey}: []`);
|
||||
} else if (subval.every(v => typeof v === 'string') && subval.length <= 3 && subval.join(', ').length < 60) {
|
||||
lines.push(` ${subkey}: [${subval.join(', ')}]`);
|
||||
} else {
|
||||
lines.push(` ${subkey}:`);
|
||||
for (const item of subval) {
|
||||
lines.push(` - ${typeof item === 'string' && (item.includes(':') || item.includes('#')) ? `"${item}"` : item}`);
|
||||
}
|
||||
}
|
||||
} else if (typeof subval === 'object') {
|
||||
lines.push(` ${subkey}:`);
|
||||
for (const [subsubkey, subsubval] of Object.entries(subval)) {
|
||||
if (subsubval === null || subsubval === undefined) continue;
|
||||
if (Array.isArray(subsubval)) {
|
||||
if (subsubval.length === 0) {
|
||||
lines.push(` ${subsubkey}: []`);
|
||||
} else {
|
||||
lines.push(` ${subsubkey}:`);
|
||||
for (const item of subsubval) {
|
||||
lines.push(` - ${item}`);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
lines.push(` ${subsubkey}: ${subsubval}`);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const sv = String(subval);
|
||||
lines.push(` ${subkey}: ${sv.includes(':') || sv.includes('#') ? `"${sv}"` : sv}`);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const sv = String(value);
|
||||
if (sv.includes(':') || sv.includes('#') || sv.startsWith('[') || sv.startsWith('{')) {
|
||||
lines.push(`${key}: "${sv}"`);
|
||||
} else {
|
||||
lines.push(`${key}: ${sv}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
function spliceFrontmatter(content, newObj) {
|
||||
const yamlStr = reconstructFrontmatter(newObj);
|
||||
const match = content.match(/^---\r?\n[\s\S]+?\r?\n---/);
|
||||
if (match) {
|
||||
return `---\n${yamlStr}\n---` + content.slice(match[0].length);
|
||||
}
|
||||
return `---\n${yamlStr}\n---\n\n` + content;
|
||||
}
|
||||
|
||||
function parseMustHavesBlock(content, blockName) {
|
||||
// Extract a specific block from must_haves in raw frontmatter YAML
|
||||
// Handles 3-level nesting: must_haves > artifacts/key_links > [{path, provides, ...}]
|
||||
const fmMatch = content.match(/^---\r?\n([\s\S]+?)\r?\n---/);
|
||||
if (!fmMatch) return [];
|
||||
|
||||
const yaml = fmMatch[1];
|
||||
// Find the block (e.g., "truths:", "artifacts:", "key_links:")
|
||||
const blockPattern = new RegExp(`^\\s{4}${blockName}:\\s*$`, 'm');
|
||||
const blockStart = yaml.search(blockPattern);
|
||||
if (blockStart === -1) return [];
|
||||
|
||||
const afterBlock = yaml.slice(blockStart);
|
||||
const blockLines = afterBlock.split(/\r?\n/).slice(1); // skip the header line
|
||||
|
||||
const items = [];
|
||||
let current = null;
|
||||
|
||||
for (const line of blockLines) {
|
||||
// Stop at same or lower indent level (non-continuation)
|
||||
if (line.trim() === '') continue;
|
||||
const indent = line.match(/^(\s*)/)[1].length;
|
||||
if (indent <= 4 && line.trim() !== '') break; // back to must_haves level or higher
|
||||
|
||||
if (line.match(/^\s{6}-\s+/)) {
|
||||
// New list item at 6-space indent
|
||||
if (current) items.push(current);
|
||||
current = {};
|
||||
// Check if it's a simple string item
|
||||
const simpleMatch = line.match(/^\s{6}-\s+"?([^"]+)"?\s*$/);
|
||||
if (simpleMatch && !line.includes(':')) {
|
||||
current = simpleMatch[1];
|
||||
} else {
|
||||
// Key-value on same line as dash: "- path: value"
|
||||
const kvMatch = line.match(/^\s{6}-\s+(\w+):\s*"?([^"]*)"?\s*$/);
|
||||
if (kvMatch) {
|
||||
current = {};
|
||||
current[kvMatch[1]] = kvMatch[2];
|
||||
}
|
||||
}
|
||||
} else if (current && typeof current === 'object') {
|
||||
// Continuation key-value at 8+ space indent
|
||||
const kvMatch = line.match(/^\s{8,}(\w+):\s*"?([^"]*)"?\s*$/);
|
||||
if (kvMatch) {
|
||||
const val = kvMatch[2];
|
||||
// Try to parse as number
|
||||
current[kvMatch[1]] = /^\d+$/.test(val) ? parseInt(val, 10) : val;
|
||||
}
|
||||
// Array items under a key
|
||||
const arrMatch = line.match(/^\s{10,}-\s+"?([^"]+)"?\s*$/);
|
||||
if (arrMatch) {
|
||||
// Find the last key added and convert to array
|
||||
const keys = Object.keys(current);
|
||||
const lastKey = keys[keys.length - 1];
|
||||
if (lastKey && !Array.isArray(current[lastKey])) {
|
||||
current[lastKey] = current[lastKey] ? [current[lastKey]] : [];
|
||||
}
|
||||
if (lastKey) current[lastKey].push(arrMatch[1]);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (current) items.push(current);
|
||||
|
||||
return items;
|
||||
}
|
||||
|
||||
// ─── Frontmatter CRUD commands ────────────────────────────────────────────────
|
||||
|
||||
const FRONTMATTER_SCHEMAS = {
|
||||
plan: { required: ['phase', 'plan', 'type', 'wave', 'depends_on', 'files_modified', 'autonomous', 'must_haves'] },
|
||||
summary: { required: ['phase', 'plan', 'subsystem', 'tags', 'duration', 'completed'] },
|
||||
verification: { required: ['phase', 'verified', 'status', 'score'] },
|
||||
};
|
||||
|
||||
function cmdFrontmatterGet(cwd, filePath, field, raw) {
|
||||
if (!filePath) { error('file path required'); }
|
||||
const fullPath = path.isAbsolute(filePath) ? filePath : path.join(cwd, filePath);
|
||||
const content = safeReadFile(fullPath);
|
||||
if (!content) { output({ error: 'File not found', path: filePath }, raw); return; }
|
||||
const fm = extractFrontmatter(content);
|
||||
if (field) {
|
||||
const value = fm[field];
|
||||
if (value === undefined) { output({ error: 'Field not found', field }, raw); return; }
|
||||
output({ [field]: value }, raw, JSON.stringify(value));
|
||||
} else {
|
||||
output(fm, raw);
|
||||
}
|
||||
}
|
||||
|
||||
function cmdFrontmatterSet(cwd, filePath, field, value, raw) {
|
||||
if (!filePath || !field || value === undefined) { error('file, field, and value required'); }
|
||||
const fullPath = path.isAbsolute(filePath) ? filePath : path.join(cwd, filePath);
|
||||
if (!fs.existsSync(fullPath)) { output({ error: 'File not found', path: filePath }, raw); return; }
|
||||
const content = fs.readFileSync(fullPath, 'utf-8');
|
||||
const fm = extractFrontmatter(content);
|
||||
let parsedValue;
|
||||
try { parsedValue = JSON.parse(value); } catch { parsedValue = value; }
|
||||
fm[field] = parsedValue;
|
||||
const newContent = spliceFrontmatter(content, fm);
|
||||
fs.writeFileSync(fullPath, normalizeMd(newContent), 'utf-8');
|
||||
output({ updated: true, field, value: parsedValue }, raw, 'true');
|
||||
}
|
||||
|
||||
function cmdFrontmatterMerge(cwd, filePath, data, raw) {
|
||||
if (!filePath || !data) { error('file and data required'); }
|
||||
const fullPath = path.isAbsolute(filePath) ? filePath : path.join(cwd, filePath);
|
||||
if (!fs.existsSync(fullPath)) { output({ error: 'File not found', path: filePath }, raw); return; }
|
||||
const content = fs.readFileSync(fullPath, 'utf-8');
|
||||
const fm = extractFrontmatter(content);
|
||||
let mergeData;
|
||||
try { mergeData = JSON.parse(data); } catch { error('Invalid JSON for --data'); return; }
|
||||
Object.assign(fm, mergeData);
|
||||
const newContent = spliceFrontmatter(content, fm);
|
||||
fs.writeFileSync(fullPath, normalizeMd(newContent), 'utf-8');
|
||||
output({ merged: true, fields: Object.keys(mergeData) }, raw, 'true');
|
||||
}
|
||||
|
||||
function cmdFrontmatterValidate(cwd, filePath, schemaName, raw) {
|
||||
if (!filePath || !schemaName) { error('file and schema required'); }
|
||||
const schema = FRONTMATTER_SCHEMAS[schemaName];
|
||||
if (!schema) { error(`Unknown schema: ${schemaName}. Available: ${Object.keys(FRONTMATTER_SCHEMAS).join(', ')}`); }
|
||||
const fullPath = path.isAbsolute(filePath) ? filePath : path.join(cwd, filePath);
|
||||
const content = safeReadFile(fullPath);
|
||||
if (!content) { output({ error: 'File not found', path: filePath }, raw); return; }
|
||||
const fm = extractFrontmatter(content);
|
||||
const missing = schema.required.filter(f => fm[f] === undefined);
|
||||
const present = schema.required.filter(f => fm[f] !== undefined);
|
||||
output({ valid: missing.length === 0, missing, present, schema: schemaName }, raw, missing.length === 0 ? 'valid' : 'invalid');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
extractFrontmatter,
|
||||
reconstructFrontmatter,
|
||||
spliceFrontmatter,
|
||||
parseMustHavesBlock,
|
||||
FRONTMATTER_SCHEMAS,
|
||||
cmdFrontmatterGet,
|
||||
cmdFrontmatterSet,
|
||||
cmdFrontmatterMerge,
|
||||
cmdFrontmatterValidate,
|
||||
};
|
||||
782
get-shit-done/bin/lib/init.cjs
Normal file
782
get-shit-done/bin/lib/init.cjs
Normal file
@@ -0,0 +1,782 @@
|
||||
/**
|
||||
* Init — Compound init commands for workflow bootstrapping
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { execSync } = require('child_process');
|
||||
const { loadConfig, resolveModelInternal, findPhaseInternal, getRoadmapPhaseInternal, pathExistsInternal, generateSlugInternal, getMilestoneInfo, getMilestonePhaseFilter, stripShippedMilestones, extractCurrentMilestone, normalizePhaseName, toPosixPath, output, error } = require('./core.cjs');
|
||||
|
||||
function cmdInitExecutePhase(cwd, phase, raw) {
|
||||
if (!phase) {
|
||||
error('phase required for init execute-phase');
|
||||
}
|
||||
|
||||
const config = loadConfig(cwd);
|
||||
const phaseInfo = findPhaseInternal(cwd, phase);
|
||||
const milestone = getMilestoneInfo(cwd);
|
||||
|
||||
const roadmapPhase = getRoadmapPhaseInternal(cwd, phase);
|
||||
const reqMatch = roadmapPhase?.section?.match(/^\*\*Requirements\*\*:[^\S\n]*([^\n]*)$/m);
|
||||
const reqExtracted = reqMatch
|
||||
? reqMatch[1].replace(/[\[\]]/g, '').split(',').map(s => s.trim()).filter(Boolean).join(', ')
|
||||
: null;
|
||||
const phase_req_ids = (reqExtracted && reqExtracted !== 'TBD') ? reqExtracted : null;
|
||||
|
||||
const result = {
|
||||
// Models
|
||||
executor_model: resolveModelInternal(cwd, 'gsd-executor'),
|
||||
verifier_model: resolveModelInternal(cwd, 'gsd-verifier'),
|
||||
|
||||
// Config flags
|
||||
commit_docs: config.commit_docs,
|
||||
parallelization: config.parallelization,
|
||||
branching_strategy: config.branching_strategy,
|
||||
phase_branch_template: config.phase_branch_template,
|
||||
milestone_branch_template: config.milestone_branch_template,
|
||||
verifier_enabled: config.verifier,
|
||||
|
||||
// Phase info
|
||||
phase_found: !!phaseInfo,
|
||||
phase_dir: phaseInfo?.directory || null,
|
||||
phase_number: phaseInfo?.phase_number || null,
|
||||
phase_name: phaseInfo?.phase_name || null,
|
||||
phase_slug: phaseInfo?.phase_slug || null,
|
||||
phase_req_ids,
|
||||
|
||||
// Plan inventory
|
||||
plans: phaseInfo?.plans || [],
|
||||
summaries: phaseInfo?.summaries || [],
|
||||
incomplete_plans: phaseInfo?.incomplete_plans || [],
|
||||
plan_count: phaseInfo?.plans?.length || 0,
|
||||
incomplete_count: phaseInfo?.incomplete_plans?.length || 0,
|
||||
|
||||
// Branch name (pre-computed)
|
||||
branch_name: config.branching_strategy === 'phase' && phaseInfo
|
||||
? config.phase_branch_template
|
||||
.replace('{phase}', phaseInfo.phase_number)
|
||||
.replace('{slug}', phaseInfo.phase_slug || 'phase')
|
||||
: config.branching_strategy === 'milestone'
|
||||
? config.milestone_branch_template
|
||||
.replace('{milestone}', milestone.version)
|
||||
.replace('{slug}', generateSlugInternal(milestone.name) || 'milestone')
|
||||
: null,
|
||||
|
||||
// Milestone info
|
||||
milestone_version: milestone.version,
|
||||
milestone_name: milestone.name,
|
||||
milestone_slug: generateSlugInternal(milestone.name),
|
||||
|
||||
// File existence
|
||||
state_exists: pathExistsInternal(cwd, '.planning/STATE.md'),
|
||||
roadmap_exists: pathExistsInternal(cwd, '.planning/ROADMAP.md'),
|
||||
config_exists: pathExistsInternal(cwd, '.planning/config.json'),
|
||||
// File paths
|
||||
state_path: '.planning/STATE.md',
|
||||
roadmap_path: '.planning/ROADMAP.md',
|
||||
config_path: '.planning/config.json',
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
function cmdInitPlanPhase(cwd, phase, raw) {
|
||||
if (!phase) {
|
||||
error('phase required for init plan-phase');
|
||||
}
|
||||
|
||||
const config = loadConfig(cwd);
|
||||
const phaseInfo = findPhaseInternal(cwd, phase);
|
||||
|
||||
const roadmapPhase = getRoadmapPhaseInternal(cwd, phase);
|
||||
const reqMatch = roadmapPhase?.section?.match(/^\*\*Requirements\*\*:[^\S\n]*([^\n]*)$/m);
|
||||
const reqExtracted = reqMatch
|
||||
? reqMatch[1].replace(/[\[\]]/g, '').split(',').map(s => s.trim()).filter(Boolean).join(', ')
|
||||
: null;
|
||||
const phase_req_ids = (reqExtracted && reqExtracted !== 'TBD') ? reqExtracted : null;
|
||||
|
||||
const result = {
|
||||
// Models
|
||||
researcher_model: resolveModelInternal(cwd, 'gsd-phase-researcher'),
|
||||
planner_model: resolveModelInternal(cwd, 'gsd-planner'),
|
||||
checker_model: resolveModelInternal(cwd, 'gsd-plan-checker'),
|
||||
|
||||
// Workflow flags
|
||||
research_enabled: config.research,
|
||||
plan_checker_enabled: config.plan_checker,
|
||||
nyquist_validation_enabled: config.nyquist_validation,
|
||||
commit_docs: config.commit_docs,
|
||||
|
||||
// Phase info
|
||||
phase_found: !!phaseInfo,
|
||||
phase_dir: phaseInfo?.directory || null,
|
||||
phase_number: phaseInfo?.phase_number || null,
|
||||
phase_name: phaseInfo?.phase_name || null,
|
||||
phase_slug: phaseInfo?.phase_slug || null,
|
||||
padded_phase: phaseInfo?.phase_number ? normalizePhaseName(phaseInfo.phase_number) : null,
|
||||
phase_req_ids,
|
||||
|
||||
// Existing artifacts
|
||||
has_research: phaseInfo?.has_research || false,
|
||||
has_context: phaseInfo?.has_context || false,
|
||||
has_plans: (phaseInfo?.plans?.length || 0) > 0,
|
||||
plan_count: phaseInfo?.plans?.length || 0,
|
||||
|
||||
// Environment
|
||||
planning_exists: pathExistsInternal(cwd, '.planning'),
|
||||
roadmap_exists: pathExistsInternal(cwd, '.planning/ROADMAP.md'),
|
||||
|
||||
// File paths
|
||||
state_path: '.planning/STATE.md',
|
||||
roadmap_path: '.planning/ROADMAP.md',
|
||||
requirements_path: '.planning/REQUIREMENTS.md',
|
||||
};
|
||||
|
||||
if (phaseInfo?.directory) {
|
||||
// Find *-CONTEXT.md in phase directory
|
||||
const phaseDirFull = path.join(cwd, phaseInfo.directory);
|
||||
try {
|
||||
const files = fs.readdirSync(phaseDirFull);
|
||||
const contextFile = files.find(f => f.endsWith('-CONTEXT.md') || f === 'CONTEXT.md');
|
||||
if (contextFile) {
|
||||
result.context_path = toPosixPath(path.join(phaseInfo.directory, contextFile));
|
||||
}
|
||||
const researchFile = files.find(f => f.endsWith('-RESEARCH.md') || f === 'RESEARCH.md');
|
||||
if (researchFile) {
|
||||
result.research_path = toPosixPath(path.join(phaseInfo.directory, researchFile));
|
||||
}
|
||||
const verificationFile = files.find(f => f.endsWith('-VERIFICATION.md') || f === 'VERIFICATION.md');
|
||||
if (verificationFile) {
|
||||
result.verification_path = toPosixPath(path.join(phaseInfo.directory, verificationFile));
|
||||
}
|
||||
const uatFile = files.find(f => f.endsWith('-UAT.md') || f === 'UAT.md');
|
||||
if (uatFile) {
|
||||
result.uat_path = toPosixPath(path.join(phaseInfo.directory, uatFile));
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
function cmdInitNewProject(cwd, raw) {
|
||||
const config = loadConfig(cwd);
|
||||
|
||||
// Detect Brave Search API key availability
|
||||
const homedir = require('os').homedir();
|
||||
const braveKeyFile = path.join(homedir, '.gsd', 'brave_api_key');
|
||||
const hasBraveSearch = !!(process.env.BRAVE_API_KEY || fs.existsSync(braveKeyFile));
|
||||
|
||||
// Detect existing code
|
||||
let hasCode = false;
|
||||
let hasPackageFile = false;
|
||||
try {
|
||||
const files = execSync('find . -maxdepth 3 \\( -name "*.ts" -o -name "*.js" -o -name "*.py" -o -name "*.go" -o -name "*.rs" -o -name "*.swift" -o -name "*.java" \\) 2>/dev/null | grep -v node_modules | grep -v .git | head -5', {
|
||||
cwd,
|
||||
encoding: 'utf-8',
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
});
|
||||
hasCode = files.trim().length > 0;
|
||||
} catch {}
|
||||
|
||||
hasPackageFile = pathExistsInternal(cwd, 'package.json') ||
|
||||
pathExistsInternal(cwd, 'requirements.txt') ||
|
||||
pathExistsInternal(cwd, 'Cargo.toml') ||
|
||||
pathExistsInternal(cwd, 'go.mod') ||
|
||||
pathExistsInternal(cwd, 'Package.swift');
|
||||
|
||||
const result = {
|
||||
// Models
|
||||
researcher_model: resolveModelInternal(cwd, 'gsd-project-researcher'),
|
||||
synthesizer_model: resolveModelInternal(cwd, 'gsd-research-synthesizer'),
|
||||
roadmapper_model: resolveModelInternal(cwd, 'gsd-roadmapper'),
|
||||
|
||||
// Config
|
||||
commit_docs: config.commit_docs,
|
||||
|
||||
// Existing state
|
||||
project_exists: pathExistsInternal(cwd, '.planning/PROJECT.md'),
|
||||
has_codebase_map: pathExistsInternal(cwd, '.planning/codebase'),
|
||||
planning_exists: pathExistsInternal(cwd, '.planning'),
|
||||
|
||||
// Brownfield detection
|
||||
has_existing_code: hasCode,
|
||||
has_package_file: hasPackageFile,
|
||||
is_brownfield: hasCode || hasPackageFile,
|
||||
needs_codebase_map: (hasCode || hasPackageFile) && !pathExistsInternal(cwd, '.planning/codebase'),
|
||||
|
||||
// Git state
|
||||
has_git: pathExistsInternal(cwd, '.git'),
|
||||
|
||||
// Enhanced search
|
||||
brave_search_available: hasBraveSearch,
|
||||
|
||||
// File paths
|
||||
project_path: '.planning/PROJECT.md',
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
function cmdInitNewMilestone(cwd, raw) {
|
||||
const config = loadConfig(cwd);
|
||||
const milestone = getMilestoneInfo(cwd);
|
||||
|
||||
const result = {
|
||||
// Models
|
||||
researcher_model: resolveModelInternal(cwd, 'gsd-project-researcher'),
|
||||
synthesizer_model: resolveModelInternal(cwd, 'gsd-research-synthesizer'),
|
||||
roadmapper_model: resolveModelInternal(cwd, 'gsd-roadmapper'),
|
||||
|
||||
// Config
|
||||
commit_docs: config.commit_docs,
|
||||
research_enabled: config.research,
|
||||
|
||||
// Current milestone
|
||||
current_milestone: milestone.version,
|
||||
current_milestone_name: milestone.name,
|
||||
|
||||
// File existence
|
||||
project_exists: pathExistsInternal(cwd, '.planning/PROJECT.md'),
|
||||
roadmap_exists: pathExistsInternal(cwd, '.planning/ROADMAP.md'),
|
||||
state_exists: pathExistsInternal(cwd, '.planning/STATE.md'),
|
||||
|
||||
// File paths
|
||||
project_path: '.planning/PROJECT.md',
|
||||
roadmap_path: '.planning/ROADMAP.md',
|
||||
state_path: '.planning/STATE.md',
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
function cmdInitQuick(cwd, description, raw) {
|
||||
const config = loadConfig(cwd);
|
||||
const now = new Date();
|
||||
const slug = description ? generateSlugInternal(description)?.substring(0, 40) : null;
|
||||
|
||||
// Generate collision-resistant quick task ID: YYMMDD-xxx
|
||||
// xxx = 2-second precision blocks since midnight, encoded as 3-char Base36 (lowercase)
|
||||
// Range: 000 (00:00:00) to xbz (23:59:58), guaranteed 3 chars for any time of day.
|
||||
// Provides ~2s uniqueness window per user — practically collision-free across a team.
|
||||
const yy = String(now.getFullYear()).slice(-2);
|
||||
const mm = String(now.getMonth() + 1).padStart(2, '0');
|
||||
const dd = String(now.getDate()).padStart(2, '0');
|
||||
const dateStr = yy + mm + dd;
|
||||
const secondsSinceMidnight = now.getHours() * 3600 + now.getMinutes() * 60 + now.getSeconds();
|
||||
const timeBlocks = Math.floor(secondsSinceMidnight / 2);
|
||||
const timeEncoded = timeBlocks.toString(36).padStart(3, '0');
|
||||
const quickId = dateStr + '-' + timeEncoded;
|
||||
|
||||
const result = {
|
||||
// Models
|
||||
planner_model: resolveModelInternal(cwd, 'gsd-planner'),
|
||||
executor_model: resolveModelInternal(cwd, 'gsd-executor'),
|
||||
checker_model: resolveModelInternal(cwd, 'gsd-plan-checker'),
|
||||
verifier_model: resolveModelInternal(cwd, 'gsd-verifier'),
|
||||
|
||||
// Config
|
||||
commit_docs: config.commit_docs,
|
||||
|
||||
// Quick task info
|
||||
quick_id: quickId,
|
||||
slug: slug,
|
||||
description: description || null,
|
||||
|
||||
// Timestamps
|
||||
date: now.toISOString().split('T')[0],
|
||||
timestamp: now.toISOString(),
|
||||
|
||||
// Paths
|
||||
quick_dir: '.planning/quick',
|
||||
task_dir: slug ? `.planning/quick/${quickId}-${slug}` : null,
|
||||
|
||||
// File existence
|
||||
roadmap_exists: pathExistsInternal(cwd, '.planning/ROADMAP.md'),
|
||||
planning_exists: pathExistsInternal(cwd, '.planning'),
|
||||
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
function cmdInitResume(cwd, raw) {
|
||||
const config = loadConfig(cwd);
|
||||
|
||||
// Check for interrupted agent
|
||||
let interruptedAgentId = null;
|
||||
try {
|
||||
interruptedAgentId = fs.readFileSync(path.join(cwd, '.planning', 'current-agent-id.txt'), 'utf-8').trim();
|
||||
} catch {}
|
||||
|
||||
const result = {
|
||||
// File existence
|
||||
state_exists: pathExistsInternal(cwd, '.planning/STATE.md'),
|
||||
roadmap_exists: pathExistsInternal(cwd, '.planning/ROADMAP.md'),
|
||||
project_exists: pathExistsInternal(cwd, '.planning/PROJECT.md'),
|
||||
planning_exists: pathExistsInternal(cwd, '.planning'),
|
||||
|
||||
// File paths
|
||||
state_path: '.planning/STATE.md',
|
||||
roadmap_path: '.planning/ROADMAP.md',
|
||||
project_path: '.planning/PROJECT.md',
|
||||
|
||||
// Agent state
|
||||
has_interrupted_agent: !!interruptedAgentId,
|
||||
interrupted_agent_id: interruptedAgentId,
|
||||
|
||||
// Config
|
||||
commit_docs: config.commit_docs,
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
function cmdInitVerifyWork(cwd, phase, raw) {
|
||||
if (!phase) {
|
||||
error('phase required for init verify-work');
|
||||
}
|
||||
|
||||
const config = loadConfig(cwd);
|
||||
const phaseInfo = findPhaseInternal(cwd, phase);
|
||||
|
||||
const result = {
|
||||
// Models
|
||||
planner_model: resolveModelInternal(cwd, 'gsd-planner'),
|
||||
checker_model: resolveModelInternal(cwd, 'gsd-plan-checker'),
|
||||
|
||||
// Config
|
||||
commit_docs: config.commit_docs,
|
||||
|
||||
// Phase info
|
||||
phase_found: !!phaseInfo,
|
||||
phase_dir: phaseInfo?.directory || null,
|
||||
phase_number: phaseInfo?.phase_number || null,
|
||||
phase_name: phaseInfo?.phase_name || null,
|
||||
|
||||
// Existing artifacts
|
||||
has_verification: phaseInfo?.has_verification || false,
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
function cmdInitPhaseOp(cwd, phase, raw) {
|
||||
const config = loadConfig(cwd);
|
||||
let phaseInfo = findPhaseInternal(cwd, phase);
|
||||
|
||||
// If the only disk match comes from an archived milestone, prefer the
|
||||
// current milestone's ROADMAP entry so discuss-phase and similar flows
|
||||
// don't attach to shipped work that reused the same phase number.
|
||||
if (phaseInfo?.archived) {
|
||||
const roadmapPhase = getRoadmapPhaseInternal(cwd, phase);
|
||||
if (roadmapPhase?.found) {
|
||||
const phaseName = roadmapPhase.phase_name;
|
||||
phaseInfo = {
|
||||
found: true,
|
||||
directory: null,
|
||||
phase_number: roadmapPhase.phase_number,
|
||||
phase_name: phaseName,
|
||||
phase_slug: phaseName ? phaseName.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, '') : null,
|
||||
plans: [],
|
||||
summaries: [],
|
||||
incomplete_plans: [],
|
||||
has_research: false,
|
||||
has_context: false,
|
||||
has_verification: false,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to ROADMAP.md if no directory exists (e.g., Plans: TBD)
|
||||
if (!phaseInfo) {
|
||||
const roadmapPhase = getRoadmapPhaseInternal(cwd, phase);
|
||||
if (roadmapPhase?.found) {
|
||||
const phaseName = roadmapPhase.phase_name;
|
||||
phaseInfo = {
|
||||
found: true,
|
||||
directory: null,
|
||||
phase_number: roadmapPhase.phase_number,
|
||||
phase_name: phaseName,
|
||||
phase_slug: phaseName ? phaseName.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, '') : null,
|
||||
plans: [],
|
||||
summaries: [],
|
||||
incomplete_plans: [],
|
||||
has_research: false,
|
||||
has_context: false,
|
||||
has_verification: false,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const result = {
|
||||
// Config
|
||||
commit_docs: config.commit_docs,
|
||||
brave_search: config.brave_search,
|
||||
|
||||
// Phase info
|
||||
phase_found: !!phaseInfo,
|
||||
phase_dir: phaseInfo?.directory || null,
|
||||
phase_number: phaseInfo?.phase_number || null,
|
||||
phase_name: phaseInfo?.phase_name || null,
|
||||
phase_slug: phaseInfo?.phase_slug || null,
|
||||
padded_phase: phaseInfo?.phase_number ? normalizePhaseName(phaseInfo.phase_number) : null,
|
||||
|
||||
// Existing artifacts
|
||||
has_research: phaseInfo?.has_research || false,
|
||||
has_context: phaseInfo?.has_context || false,
|
||||
has_plans: (phaseInfo?.plans?.length || 0) > 0,
|
||||
has_verification: phaseInfo?.has_verification || false,
|
||||
plan_count: phaseInfo?.plans?.length || 0,
|
||||
|
||||
// File existence
|
||||
roadmap_exists: pathExistsInternal(cwd, '.planning/ROADMAP.md'),
|
||||
planning_exists: pathExistsInternal(cwd, '.planning'),
|
||||
|
||||
// File paths
|
||||
state_path: '.planning/STATE.md',
|
||||
roadmap_path: '.planning/ROADMAP.md',
|
||||
requirements_path: '.planning/REQUIREMENTS.md',
|
||||
};
|
||||
|
||||
if (phaseInfo?.directory) {
|
||||
const phaseDirFull = path.join(cwd, phaseInfo.directory);
|
||||
try {
|
||||
const files = fs.readdirSync(phaseDirFull);
|
||||
const contextFile = files.find(f => f.endsWith('-CONTEXT.md') || f === 'CONTEXT.md');
|
||||
if (contextFile) {
|
||||
result.context_path = toPosixPath(path.join(phaseInfo.directory, contextFile));
|
||||
}
|
||||
const researchFile = files.find(f => f.endsWith('-RESEARCH.md') || f === 'RESEARCH.md');
|
||||
if (researchFile) {
|
||||
result.research_path = toPosixPath(path.join(phaseInfo.directory, researchFile));
|
||||
}
|
||||
const verificationFile = files.find(f => f.endsWith('-VERIFICATION.md') || f === 'VERIFICATION.md');
|
||||
if (verificationFile) {
|
||||
result.verification_path = toPosixPath(path.join(phaseInfo.directory, verificationFile));
|
||||
}
|
||||
const uatFile = files.find(f => f.endsWith('-UAT.md') || f === 'UAT.md');
|
||||
if (uatFile) {
|
||||
result.uat_path = toPosixPath(path.join(phaseInfo.directory, uatFile));
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
function cmdInitTodos(cwd, area, raw) {
|
||||
const config = loadConfig(cwd);
|
||||
const now = new Date();
|
||||
|
||||
// List todos (reuse existing logic)
|
||||
const pendingDir = path.join(cwd, '.planning', 'todos', 'pending');
|
||||
let count = 0;
|
||||
const todos = [];
|
||||
|
||||
try {
|
||||
const files = fs.readdirSync(pendingDir).filter(f => f.endsWith('.md'));
|
||||
for (const file of files) {
|
||||
try {
|
||||
const content = fs.readFileSync(path.join(pendingDir, file), 'utf-8');
|
||||
const createdMatch = content.match(/^created:\s*(.+)$/m);
|
||||
const titleMatch = content.match(/^title:\s*(.+)$/m);
|
||||
const areaMatch = content.match(/^area:\s*(.+)$/m);
|
||||
const todoArea = areaMatch ? areaMatch[1].trim() : 'general';
|
||||
|
||||
if (area && todoArea !== area) continue;
|
||||
|
||||
count++;
|
||||
todos.push({
|
||||
file,
|
||||
created: createdMatch ? createdMatch[1].trim() : 'unknown',
|
||||
title: titleMatch ? titleMatch[1].trim() : 'Untitled',
|
||||
area: todoArea,
|
||||
path: '.planning/todos/pending/' + file,
|
||||
});
|
||||
} catch {}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
const result = {
|
||||
// Config
|
||||
commit_docs: config.commit_docs,
|
||||
|
||||
// Timestamps
|
||||
date: now.toISOString().split('T')[0],
|
||||
timestamp: now.toISOString(),
|
||||
|
||||
// Todo inventory
|
||||
todo_count: count,
|
||||
todos,
|
||||
area_filter: area || null,
|
||||
|
||||
// Paths
|
||||
pending_dir: '.planning/todos/pending',
|
||||
completed_dir: '.planning/todos/completed',
|
||||
|
||||
// File existence
|
||||
planning_exists: pathExistsInternal(cwd, '.planning'),
|
||||
todos_dir_exists: pathExistsInternal(cwd, '.planning/todos'),
|
||||
pending_dir_exists: pathExistsInternal(cwd, '.planning/todos/pending'),
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
function cmdInitMilestoneOp(cwd, raw) {
|
||||
const config = loadConfig(cwd);
|
||||
const milestone = getMilestoneInfo(cwd);
|
||||
|
||||
// Count phases
|
||||
let phaseCount = 0;
|
||||
let completedPhases = 0;
|
||||
const phasesDir = path.join(cwd, '.planning', 'phases');
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name);
|
||||
phaseCount = dirs.length;
|
||||
|
||||
// Count phases with summaries (completed)
|
||||
for (const dir of dirs) {
|
||||
try {
|
||||
const phaseFiles = fs.readdirSync(path.join(phasesDir, dir));
|
||||
const hasSummary = phaseFiles.some(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md');
|
||||
if (hasSummary) completedPhases++;
|
||||
} catch {}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// Check archive
|
||||
const archiveDir = path.join(cwd, '.planning', 'archive');
|
||||
let archivedMilestones = [];
|
||||
try {
|
||||
archivedMilestones = fs.readdirSync(archiveDir, { withFileTypes: true })
|
||||
.filter(e => e.isDirectory())
|
||||
.map(e => e.name);
|
||||
} catch {}
|
||||
|
||||
const result = {
|
||||
// Config
|
||||
commit_docs: config.commit_docs,
|
||||
|
||||
// Current milestone
|
||||
milestone_version: milestone.version,
|
||||
milestone_name: milestone.name,
|
||||
milestone_slug: generateSlugInternal(milestone.name),
|
||||
|
||||
// Phase counts
|
||||
phase_count: phaseCount,
|
||||
completed_phases: completedPhases,
|
||||
all_phases_complete: phaseCount > 0 && phaseCount === completedPhases,
|
||||
|
||||
// Archive
|
||||
archived_milestones: archivedMilestones,
|
||||
archive_count: archivedMilestones.length,
|
||||
|
||||
// File existence
|
||||
project_exists: pathExistsInternal(cwd, '.planning/PROJECT.md'),
|
||||
roadmap_exists: pathExistsInternal(cwd, '.planning/ROADMAP.md'),
|
||||
state_exists: pathExistsInternal(cwd, '.planning/STATE.md'),
|
||||
archive_exists: pathExistsInternal(cwd, '.planning/archive'),
|
||||
phases_dir_exists: pathExistsInternal(cwd, '.planning/phases'),
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
function cmdInitMapCodebase(cwd, raw) {
|
||||
const config = loadConfig(cwd);
|
||||
|
||||
// Check for existing codebase maps
|
||||
const codebaseDir = path.join(cwd, '.planning', 'codebase');
|
||||
let existingMaps = [];
|
||||
try {
|
||||
existingMaps = fs.readdirSync(codebaseDir).filter(f => f.endsWith('.md'));
|
||||
} catch {}
|
||||
|
||||
const result = {
|
||||
// Models
|
||||
mapper_model: resolveModelInternal(cwd, 'gsd-codebase-mapper'),
|
||||
|
||||
// Config
|
||||
commit_docs: config.commit_docs,
|
||||
search_gitignored: config.search_gitignored,
|
||||
parallelization: config.parallelization,
|
||||
|
||||
// Paths
|
||||
codebase_dir: '.planning/codebase',
|
||||
|
||||
// Existing maps
|
||||
existing_maps: existingMaps,
|
||||
has_maps: existingMaps.length > 0,
|
||||
|
||||
// File existence
|
||||
planning_exists: pathExistsInternal(cwd, '.planning'),
|
||||
codebase_dir_exists: pathExistsInternal(cwd, '.planning/codebase'),
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
function cmdInitProgress(cwd, raw) {
|
||||
const config = loadConfig(cwd);
|
||||
const milestone = getMilestoneInfo(cwd);
|
||||
|
||||
// Analyze phases — filter to current milestone and include ROADMAP-only phases
|
||||
const phasesDir = path.join(cwd, '.planning', 'phases');
|
||||
const phases = [];
|
||||
let currentPhase = null;
|
||||
let nextPhase = null;
|
||||
|
||||
// Build set of phases defined in ROADMAP for the current milestone
|
||||
const roadmapPhaseNums = new Set();
|
||||
const roadmapPhaseNames = new Map();
|
||||
try {
|
||||
const roadmapContent = extractCurrentMilestone(
|
||||
fs.readFileSync(path.join(cwd, '.planning', 'ROADMAP.md'), 'utf-8'), cwd
|
||||
);
|
||||
const headingPattern = /#{2,4}\s*Phase\s+(\d+[A-Z]?(?:\.\d+)*)\s*:\s*([^\n]+)/gi;
|
||||
let hm;
|
||||
while ((hm = headingPattern.exec(roadmapContent)) !== null) {
|
||||
roadmapPhaseNums.add(hm[1]);
|
||||
roadmapPhaseNames.set(hm[1], hm[2].replace(/\(INSERTED\)/i, '').trim());
|
||||
}
|
||||
} catch {}
|
||||
|
||||
const isDirInMilestone = getMilestonePhaseFilter(cwd);
|
||||
const seenPhaseNums = new Set();
|
||||
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name)
|
||||
.filter(isDirInMilestone)
|
||||
.sort((a, b) => {
|
||||
const pa = a.match(/^(\d+[A-Z]?(?:\.\d+)*)/i);
|
||||
const pb = b.match(/^(\d+[A-Z]?(?:\.\d+)*)/i);
|
||||
if (!pa || !pb) return a.localeCompare(b);
|
||||
return parseInt(pa[1], 10) - parseInt(pb[1], 10);
|
||||
});
|
||||
|
||||
for (const dir of dirs) {
|
||||
const match = dir.match(/^(\d+[A-Z]?(?:\.\d+)*)-?(.*)/i);
|
||||
const phaseNumber = match ? match[1] : dir;
|
||||
const phaseName = match && match[2] ? match[2] : null;
|
||||
seenPhaseNums.add(phaseNumber.replace(/^0+/, '') || '0');
|
||||
|
||||
const phasePath = path.join(phasesDir, dir);
|
||||
const phaseFiles = fs.readdirSync(phasePath);
|
||||
|
||||
const plans = phaseFiles.filter(f => f.endsWith('-PLAN.md') || f === 'PLAN.md');
|
||||
const summaries = phaseFiles.filter(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md');
|
||||
const hasResearch = phaseFiles.some(f => f.endsWith('-RESEARCH.md') || f === 'RESEARCH.md');
|
||||
|
||||
const status = summaries.length >= plans.length && plans.length > 0 ? 'complete' :
|
||||
plans.length > 0 ? 'in_progress' :
|
||||
hasResearch ? 'researched' : 'pending';
|
||||
|
||||
const phaseInfo = {
|
||||
number: phaseNumber,
|
||||
name: phaseName,
|
||||
directory: '.planning/phases/' + dir,
|
||||
status,
|
||||
plan_count: plans.length,
|
||||
summary_count: summaries.length,
|
||||
has_research: hasResearch,
|
||||
};
|
||||
|
||||
phases.push(phaseInfo);
|
||||
|
||||
// Find current (first incomplete with plans) and next (first pending)
|
||||
if (!currentPhase && (status === 'in_progress' || status === 'researched')) {
|
||||
currentPhase = phaseInfo;
|
||||
}
|
||||
if (!nextPhase && status === 'pending') {
|
||||
nextPhase = phaseInfo;
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// Add phases defined in ROADMAP but not yet scaffolded to disk
|
||||
for (const [num, name] of roadmapPhaseNames) {
|
||||
const stripped = num.replace(/^0+/, '') || '0';
|
||||
if (!seenPhaseNums.has(stripped)) {
|
||||
const phaseInfo = {
|
||||
number: num,
|
||||
name: name.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, ''),
|
||||
directory: null,
|
||||
status: 'not_started',
|
||||
plan_count: 0,
|
||||
summary_count: 0,
|
||||
has_research: false,
|
||||
};
|
||||
phases.push(phaseInfo);
|
||||
if (!nextPhase && !currentPhase) {
|
||||
nextPhase = phaseInfo;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Re-sort phases by number after adding ROADMAP-only phases
|
||||
phases.sort((a, b) => parseInt(a.number, 10) - parseInt(b.number, 10));
|
||||
|
||||
// Check for paused work
|
||||
let pausedAt = null;
|
||||
try {
|
||||
const state = fs.readFileSync(path.join(cwd, '.planning', 'STATE.md'), 'utf-8');
|
||||
const pauseMatch = state.match(/\*\*Paused At:\*\*\s*(.+)/);
|
||||
if (pauseMatch) pausedAt = pauseMatch[1].trim();
|
||||
} catch {}
|
||||
|
||||
const result = {
|
||||
// Models
|
||||
executor_model: resolveModelInternal(cwd, 'gsd-executor'),
|
||||
planner_model: resolveModelInternal(cwd, 'gsd-planner'),
|
||||
|
||||
// Config
|
||||
commit_docs: config.commit_docs,
|
||||
|
||||
// Milestone
|
||||
milestone_version: milestone.version,
|
||||
milestone_name: milestone.name,
|
||||
|
||||
// Phase overview
|
||||
phases,
|
||||
phase_count: phases.length,
|
||||
completed_count: phases.filter(p => p.status === 'complete').length,
|
||||
in_progress_count: phases.filter(p => p.status === 'in_progress').length,
|
||||
|
||||
// Current state
|
||||
current_phase: currentPhase,
|
||||
next_phase: nextPhase,
|
||||
paused_at: pausedAt,
|
||||
has_work_in_progress: !!currentPhase,
|
||||
|
||||
// File existence
|
||||
project_exists: pathExistsInternal(cwd, '.planning/PROJECT.md'),
|
||||
roadmap_exists: pathExistsInternal(cwd, '.planning/ROADMAP.md'),
|
||||
state_exists: pathExistsInternal(cwd, '.planning/STATE.md'),
|
||||
// File paths
|
||||
state_path: '.planning/STATE.md',
|
||||
roadmap_path: '.planning/ROADMAP.md',
|
||||
project_path: '.planning/PROJECT.md',
|
||||
config_path: '.planning/config.json',
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
cmdInitExecutePhase,
|
||||
cmdInitPlanPhase,
|
||||
cmdInitNewProject,
|
||||
cmdInitNewMilestone,
|
||||
cmdInitQuick,
|
||||
cmdInitResume,
|
||||
cmdInitVerifyWork,
|
||||
cmdInitPhaseOp,
|
||||
cmdInitTodos,
|
||||
cmdInitMilestoneOp,
|
||||
cmdInitMapCodebase,
|
||||
cmdInitProgress,
|
||||
};
|
||||
250
get-shit-done/bin/lib/milestone.cjs
Normal file
250
get-shit-done/bin/lib/milestone.cjs
Normal file
@@ -0,0 +1,250 @@
|
||||
/**
|
||||
* Milestone — Milestone and requirements lifecycle operations
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { escapeRegex, getMilestonePhaseFilter, normalizeMd, output, error } = require('./core.cjs');
|
||||
const { extractFrontmatter } = require('./frontmatter.cjs');
|
||||
const { writeStateMd } = require('./state.cjs');
|
||||
|
||||
function cmdRequirementsMarkComplete(cwd, reqIdsRaw, raw) {
|
||||
if (!reqIdsRaw || reqIdsRaw.length === 0) {
|
||||
error('requirement IDs required. Usage: requirements mark-complete REQ-01,REQ-02 or REQ-01 REQ-02');
|
||||
}
|
||||
|
||||
// Accept comma-separated, space-separated, or bracket-wrapped: [REQ-01, REQ-02]
|
||||
const reqIds = reqIdsRaw
|
||||
.join(' ')
|
||||
.replace(/[\[\]]/g, '')
|
||||
.split(/[,\s]+/)
|
||||
.map(r => r.trim())
|
||||
.filter(Boolean);
|
||||
|
||||
if (reqIds.length === 0) {
|
||||
error('no valid requirement IDs found');
|
||||
}
|
||||
|
||||
const reqPath = path.join(cwd, '.planning', 'REQUIREMENTS.md');
|
||||
if (!fs.existsSync(reqPath)) {
|
||||
output({ updated: false, reason: 'REQUIREMENTS.md not found', ids: reqIds }, raw, 'no requirements file');
|
||||
return;
|
||||
}
|
||||
|
||||
let reqContent = fs.readFileSync(reqPath, 'utf-8');
|
||||
const updated = [];
|
||||
const alreadyComplete = [];
|
||||
const notFound = [];
|
||||
|
||||
for (const reqId of reqIds) {
|
||||
let found = false;
|
||||
const reqEscaped = escapeRegex(reqId);
|
||||
|
||||
// Update checkbox: - [ ] **REQ-ID** → - [x] **REQ-ID**
|
||||
const checkboxPattern = new RegExp(`(-\\s*\\[)[ ](\\]\\s*\\*\\*${reqEscaped}\\*\\*)`, 'gi');
|
||||
if (checkboxPattern.test(reqContent)) {
|
||||
reqContent = reqContent.replace(checkboxPattern, '$1x$2');
|
||||
found = true;
|
||||
}
|
||||
|
||||
// Update traceability table: | REQ-ID | Phase N | Pending | → | REQ-ID | Phase N | Complete |
|
||||
const tablePattern = new RegExp(`(\\|\\s*${reqEscaped}\\s*\\|[^|]+\\|)\\s*Pending\\s*(\\|)`, 'gi');
|
||||
if (tablePattern.test(reqContent)) {
|
||||
// Re-read since test() advances lastIndex for global regex
|
||||
reqContent = reqContent.replace(
|
||||
new RegExp(`(\\|\\s*${reqEscaped}\\s*\\|[^|]+\\|)\\s*Pending\\s*(\\|)`, 'gi'),
|
||||
'$1 Complete $2'
|
||||
);
|
||||
found = true;
|
||||
}
|
||||
|
||||
if (found) {
|
||||
updated.push(reqId);
|
||||
} else {
|
||||
// Check if already complete before declaring not_found
|
||||
const doneCheckbox = new RegExp(`-\\s*\\[x\\]\\s*\\*\\*${reqEscaped}\\*\\*`, 'gi');
|
||||
const doneTable = new RegExp(`\\|\\s*${reqEscaped}\\s*\\|[^|]+\\|\\s*Complete\\s*\\|`, 'gi');
|
||||
if (doneCheckbox.test(reqContent) || doneTable.test(reqContent)) {
|
||||
alreadyComplete.push(reqId);
|
||||
} else {
|
||||
notFound.push(reqId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (updated.length > 0) {
|
||||
fs.writeFileSync(reqPath, reqContent, 'utf-8');
|
||||
}
|
||||
|
||||
output({
|
||||
updated: updated.length > 0,
|
||||
marked_complete: updated,
|
||||
already_complete: alreadyComplete,
|
||||
not_found: notFound,
|
||||
total: reqIds.length,
|
||||
}, raw, `${updated.length}/${reqIds.length} requirements marked complete`);
|
||||
}
|
||||
|
||||
function cmdMilestoneComplete(cwd, version, options, raw) {
|
||||
if (!version) {
|
||||
error('version required for milestone complete (e.g., v1.0)');
|
||||
}
|
||||
|
||||
const roadmapPath = path.join(cwd, '.planning', 'ROADMAP.md');
|
||||
const reqPath = path.join(cwd, '.planning', 'REQUIREMENTS.md');
|
||||
const statePath = path.join(cwd, '.planning', 'STATE.md');
|
||||
const milestonesPath = path.join(cwd, '.planning', 'MILESTONES.md');
|
||||
const archiveDir = path.join(cwd, '.planning', 'milestones');
|
||||
const phasesDir = path.join(cwd, '.planning', 'phases');
|
||||
const today = new Date().toISOString().split('T')[0];
|
||||
const milestoneName = options.name || version;
|
||||
|
||||
// Ensure archive directory exists
|
||||
fs.mkdirSync(archiveDir, { recursive: true });
|
||||
|
||||
// Scope stats and accomplishments to only the phases belonging to the
|
||||
// current milestone's ROADMAP. Uses the shared filter from core.cjs
|
||||
// (same logic used by cmdPhasesList and other callers).
|
||||
const isDirInMilestone = getMilestonePhaseFilter(cwd);
|
||||
|
||||
// Gather stats from phases (scoped to current milestone only)
|
||||
let phaseCount = 0;
|
||||
let totalPlans = 0;
|
||||
let totalTasks = 0;
|
||||
const accomplishments = [];
|
||||
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name).sort();
|
||||
|
||||
for (const dir of dirs) {
|
||||
if (!isDirInMilestone(dir)) continue;
|
||||
|
||||
phaseCount++;
|
||||
const phaseFiles = fs.readdirSync(path.join(phasesDir, dir));
|
||||
const plans = phaseFiles.filter(f => f.endsWith('-PLAN.md') || f === 'PLAN.md');
|
||||
const summaries = phaseFiles.filter(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md');
|
||||
totalPlans += plans.length;
|
||||
|
||||
// Extract one-liners from summaries
|
||||
for (const s of summaries) {
|
||||
try {
|
||||
const content = fs.readFileSync(path.join(phasesDir, dir, s), 'utf-8');
|
||||
const fm = extractFrontmatter(content);
|
||||
if (fm['one-liner']) {
|
||||
accomplishments.push(fm['one-liner']);
|
||||
}
|
||||
// Count tasks
|
||||
const taskMatches = content.match(/##\s*Task\s*\d+/gi) || [];
|
||||
totalTasks += taskMatches.length;
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// Archive ROADMAP.md
|
||||
if (fs.existsSync(roadmapPath)) {
|
||||
const roadmapContent = fs.readFileSync(roadmapPath, 'utf-8');
|
||||
fs.writeFileSync(path.join(archiveDir, `${version}-ROADMAP.md`), roadmapContent, 'utf-8');
|
||||
}
|
||||
|
||||
// Archive REQUIREMENTS.md
|
||||
if (fs.existsSync(reqPath)) {
|
||||
const reqContent = fs.readFileSync(reqPath, 'utf-8');
|
||||
const archiveHeader = `# Requirements Archive: ${version} ${milestoneName}\n\n**Archived:** ${today}\n**Status:** SHIPPED\n\nFor current requirements, see \`.planning/REQUIREMENTS.md\`.\n\n---\n\n`;
|
||||
fs.writeFileSync(path.join(archiveDir, `${version}-REQUIREMENTS.md`), archiveHeader + reqContent, 'utf-8');
|
||||
}
|
||||
|
||||
// Archive audit file if exists
|
||||
const auditFile = path.join(cwd, '.planning', `${version}-MILESTONE-AUDIT.md`);
|
||||
if (fs.existsSync(auditFile)) {
|
||||
fs.renameSync(auditFile, path.join(archiveDir, `${version}-MILESTONE-AUDIT.md`));
|
||||
}
|
||||
|
||||
// Create/append MILESTONES.md entry
|
||||
const accomplishmentsList = accomplishments.map(a => `- ${a}`).join('\n');
|
||||
const milestoneEntry = `## ${version} ${milestoneName} (Shipped: ${today})\n\n**Phases completed:** ${phaseCount} phases, ${totalPlans} plans, ${totalTasks} tasks\n\n**Key accomplishments:**\n${accomplishmentsList || '- (none recorded)'}\n\n---\n\n`;
|
||||
|
||||
if (fs.existsSync(milestonesPath)) {
|
||||
const existing = fs.readFileSync(milestonesPath, 'utf-8');
|
||||
if (!existing.trim()) {
|
||||
// Empty file — treat like new
|
||||
fs.writeFileSync(milestonesPath, normalizeMd(`# Milestones\n\n${milestoneEntry}`), 'utf-8');
|
||||
} else {
|
||||
// Insert after the header line(s) for reverse chronological order (newest first)
|
||||
const headerMatch = existing.match(/^(#{1,3}\s+[^\n]*\n\n?)/);
|
||||
if (headerMatch) {
|
||||
const header = headerMatch[1];
|
||||
const rest = existing.slice(header.length);
|
||||
fs.writeFileSync(milestonesPath, normalizeMd(header + milestoneEntry + rest), 'utf-8');
|
||||
} else {
|
||||
// No recognizable header — prepend the entry
|
||||
fs.writeFileSync(milestonesPath, normalizeMd(milestoneEntry + existing), 'utf-8');
|
||||
}
|
||||
}
|
||||
} else {
|
||||
fs.writeFileSync(milestonesPath, normalizeMd(`# Milestones\n\n${milestoneEntry}`), 'utf-8');
|
||||
}
|
||||
|
||||
// Update STATE.md
|
||||
if (fs.existsSync(statePath)) {
|
||||
let stateContent = fs.readFileSync(statePath, 'utf-8');
|
||||
stateContent = stateContent.replace(
|
||||
/(\*\*Status:\*\*\s*).*/,
|
||||
`$1${version} milestone complete`
|
||||
);
|
||||
stateContent = stateContent.replace(
|
||||
/(\*\*Last Activity:\*\*\s*).*/,
|
||||
`$1${today}`
|
||||
);
|
||||
stateContent = stateContent.replace(
|
||||
/(\*\*Last Activity Description:\*\*\s*).*/,
|
||||
`$1${version} milestone completed and archived`
|
||||
);
|
||||
writeStateMd(statePath, stateContent, cwd);
|
||||
}
|
||||
|
||||
// Archive phase directories if requested
|
||||
let phasesArchived = false;
|
||||
if (options.archivePhases) {
|
||||
try {
|
||||
const phaseArchiveDir = path.join(archiveDir, `${version}-phases`);
|
||||
fs.mkdirSync(phaseArchiveDir, { recursive: true });
|
||||
|
||||
const phaseEntries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
const phaseDirNames = phaseEntries.filter(e => e.isDirectory()).map(e => e.name);
|
||||
let archivedCount = 0;
|
||||
for (const dir of phaseDirNames) {
|
||||
if (!isDirInMilestone(dir)) continue;
|
||||
fs.renameSync(path.join(phasesDir, dir), path.join(phaseArchiveDir, dir));
|
||||
archivedCount++;
|
||||
}
|
||||
phasesArchived = archivedCount > 0;
|
||||
} catch {}
|
||||
}
|
||||
|
||||
const result = {
|
||||
version,
|
||||
name: milestoneName,
|
||||
date: today,
|
||||
phases: phaseCount,
|
||||
plans: totalPlans,
|
||||
tasks: totalTasks,
|
||||
accomplishments,
|
||||
archived: {
|
||||
roadmap: fs.existsSync(path.join(archiveDir, `${version}-ROADMAP.md`)),
|
||||
requirements: fs.existsSync(path.join(archiveDir, `${version}-REQUIREMENTS.md`)),
|
||||
audit: fs.existsSync(path.join(archiveDir, `${version}-MILESTONE-AUDIT.md`)),
|
||||
phases: phasesArchived,
|
||||
},
|
||||
milestones_updated: true,
|
||||
state_updated: fs.existsSync(statePath),
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
cmdRequirementsMarkComplete,
|
||||
cmdMilestoneComplete,
|
||||
};
|
||||
68
get-shit-done/bin/lib/model-profiles.cjs
Normal file
68
get-shit-done/bin/lib/model-profiles.cjs
Normal file
@@ -0,0 +1,68 @@
|
||||
/**
|
||||
* Mapping of GSD agent to model for each profile.
|
||||
*
|
||||
* Should be in sync with the profiles table in `get-shit-done/references/model-profiles.md`. But
|
||||
* possibly worth making this the single source of truth at some point, and removing the markdown
|
||||
* reference table in favor of programmatically determining the model to use for an agent (which
|
||||
* would be faster, use fewer tokens, and be less error-prone).
|
||||
*/
|
||||
const MODEL_PROFILES = {
|
||||
'gsd-planner': { quality: 'opus', balanced: 'opus', budget: 'sonnet' },
|
||||
'gsd-roadmapper': { quality: 'opus', balanced: 'sonnet', budget: 'sonnet' },
|
||||
'gsd-executor': { quality: 'opus', balanced: 'sonnet', budget: 'sonnet' },
|
||||
'gsd-phase-researcher': { quality: 'opus', balanced: 'sonnet', budget: 'haiku' },
|
||||
'gsd-project-researcher': { quality: 'opus', balanced: 'sonnet', budget: 'haiku' },
|
||||
'gsd-research-synthesizer': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
||||
'gsd-debugger': { quality: 'opus', balanced: 'sonnet', budget: 'sonnet' },
|
||||
'gsd-codebase-mapper': { quality: 'sonnet', balanced: 'haiku', budget: 'haiku' },
|
||||
'gsd-verifier': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
||||
'gsd-plan-checker': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
||||
'gsd-integration-checker': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
||||
'gsd-nyquist-auditor': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
||||
'gsd-ui-researcher': { quality: 'opus', balanced: 'sonnet', budget: 'haiku' },
|
||||
'gsd-ui-checker': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
||||
'gsd-ui-auditor': { quality: 'sonnet', balanced: 'sonnet', budget: 'haiku' },
|
||||
};
|
||||
const VALID_PROFILES = Object.keys(MODEL_PROFILES['gsd-planner']);
|
||||
|
||||
/**
|
||||
* Formats the agent-to-model mapping as a human-readable table (in string format).
|
||||
*
|
||||
* @param {Object<string, string>} agentToModelMap - A mapping from agent to model
|
||||
* @returns {string} A formatted table string
|
||||
*/
|
||||
function formatAgentToModelMapAsTable(agentToModelMap) {
|
||||
const agentWidth = Math.max('Agent'.length, ...Object.keys(agentToModelMap).map((a) => a.length));
|
||||
const modelWidth = Math.max(
|
||||
'Model'.length,
|
||||
...Object.values(agentToModelMap).map((m) => m.length)
|
||||
);
|
||||
const sep = '─'.repeat(agentWidth + 2) + '┼' + '─'.repeat(modelWidth + 2);
|
||||
const header = ' ' + 'Agent'.padEnd(agentWidth) + ' │ ' + 'Model'.padEnd(modelWidth);
|
||||
let agentToModelTable = header + '\n' + sep + '\n';
|
||||
for (const [agent, model] of Object.entries(agentToModelMap)) {
|
||||
agentToModelTable += ' ' + agent.padEnd(agentWidth) + ' │ ' + model.padEnd(modelWidth) + '\n';
|
||||
}
|
||||
return agentToModelTable;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a mapping from agent to model for the given model profile.
|
||||
*
|
||||
* @param {string} normalizedProfile - The normalized (lowercase and trimmed) profile name
|
||||
* @returns {Object<string, string>} A mapping from agent to model for the given profile
|
||||
*/
|
||||
function getAgentToModelMapForProfile(normalizedProfile) {
|
||||
const agentToModelMap = {};
|
||||
for (const [agent, profileToModelMap] of Object.entries(MODEL_PROFILES)) {
|
||||
agentToModelMap[agent] = profileToModelMap[normalizedProfile];
|
||||
}
|
||||
return agentToModelMap;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
MODEL_PROFILES,
|
||||
VALID_PROFILES,
|
||||
formatAgentToModelMapAsTable,
|
||||
getAgentToModelMapForProfile,
|
||||
};
|
||||
939
get-shit-done/bin/lib/phase.cjs
Normal file
939
get-shit-done/bin/lib/phase.cjs
Normal file
@@ -0,0 +1,939 @@
|
||||
/**
|
||||
* Phase — Phase CRUD, query, and lifecycle operations
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { escapeRegex, normalizePhaseName, comparePhaseNum, findPhaseInternal, getArchivedPhaseDirs, generateSlugInternal, getMilestonePhaseFilter, stripShippedMilestones, extractCurrentMilestone, replaceInCurrentMilestone, toPosixPath, output, error } = require('./core.cjs');
|
||||
const { extractFrontmatter } = require('./frontmatter.cjs');
|
||||
const { writeStateMd } = require('./state.cjs');
|
||||
|
||||
function cmdPhasesList(cwd, options, raw) {
|
||||
const phasesDir = path.join(cwd, '.planning', 'phases');
|
||||
const { type, phase, includeArchived } = options;
|
||||
|
||||
// If no phases directory, return empty
|
||||
if (!fs.existsSync(phasesDir)) {
|
||||
if (type) {
|
||||
output({ files: [], count: 0 }, raw, '');
|
||||
} else {
|
||||
output({ directories: [], count: 0 }, raw, '');
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Get all phase directories
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
let dirs = entries.filter(e => e.isDirectory()).map(e => e.name);
|
||||
|
||||
// Include archived phases if requested
|
||||
if (includeArchived) {
|
||||
const archived = getArchivedPhaseDirs(cwd);
|
||||
for (const a of archived) {
|
||||
dirs.push(`${a.name} [${a.milestone}]`);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort numerically (handles integers, decimals, letter-suffix, hybrids)
|
||||
dirs.sort((a, b) => comparePhaseNum(a, b));
|
||||
|
||||
// If filtering by phase number
|
||||
if (phase) {
|
||||
const normalized = normalizePhaseName(phase);
|
||||
const match = dirs.find(d => d.startsWith(normalized));
|
||||
if (!match) {
|
||||
output({ files: [], count: 0, phase_dir: null, error: 'Phase not found' }, raw, '');
|
||||
return;
|
||||
}
|
||||
dirs = [match];
|
||||
}
|
||||
|
||||
// If listing files of a specific type
|
||||
if (type) {
|
||||
const files = [];
|
||||
for (const dir of dirs) {
|
||||
const dirPath = path.join(phasesDir, dir);
|
||||
const dirFiles = fs.readdirSync(dirPath);
|
||||
|
||||
let filtered;
|
||||
if (type === 'plans') {
|
||||
filtered = dirFiles.filter(f => f.endsWith('-PLAN.md') || f === 'PLAN.md');
|
||||
} else if (type === 'summaries') {
|
||||
filtered = dirFiles.filter(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md');
|
||||
} else {
|
||||
filtered = dirFiles;
|
||||
}
|
||||
|
||||
files.push(...filtered.sort());
|
||||
}
|
||||
|
||||
const result = {
|
||||
files,
|
||||
count: files.length,
|
||||
phase_dir: phase ? dirs[0].replace(/^\d+(?:\.\d+)*-?/, '') : null,
|
||||
};
|
||||
output(result, raw, files.join('\n'));
|
||||
return;
|
||||
}
|
||||
|
||||
// Default: list directories
|
||||
output({ directories: dirs, count: dirs.length }, raw, dirs.join('\n'));
|
||||
} catch (e) {
|
||||
error('Failed to list phases: ' + e.message);
|
||||
}
|
||||
}
|
||||
|
||||
function cmdPhaseNextDecimal(cwd, basePhase, raw) {
|
||||
const phasesDir = path.join(cwd, '.planning', 'phases');
|
||||
const normalized = normalizePhaseName(basePhase);
|
||||
|
||||
// Check if phases directory exists
|
||||
if (!fs.existsSync(phasesDir)) {
|
||||
output(
|
||||
{
|
||||
found: false,
|
||||
base_phase: normalized,
|
||||
next: `${normalized}.1`,
|
||||
existing: [],
|
||||
},
|
||||
raw,
|
||||
`${normalized}.1`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name);
|
||||
|
||||
// Check if base phase exists
|
||||
const baseExists = dirs.some(d => d.startsWith(normalized + '-') || d === normalized);
|
||||
|
||||
// Find existing decimal phases for this base
|
||||
const decimalPattern = new RegExp(`^${normalized}\\.(\\d+)`);
|
||||
const existingDecimals = [];
|
||||
|
||||
for (const dir of dirs) {
|
||||
const match = dir.match(decimalPattern);
|
||||
if (match) {
|
||||
existingDecimals.push(`${normalized}.${match[1]}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort numerically
|
||||
existingDecimals.sort((a, b) => comparePhaseNum(a, b));
|
||||
|
||||
// Calculate next decimal
|
||||
let nextDecimal;
|
||||
if (existingDecimals.length === 0) {
|
||||
nextDecimal = `${normalized}.1`;
|
||||
} else {
|
||||
const lastDecimal = existingDecimals[existingDecimals.length - 1];
|
||||
const lastNum = parseInt(lastDecimal.split('.')[1], 10);
|
||||
nextDecimal = `${normalized}.${lastNum + 1}`;
|
||||
}
|
||||
|
||||
output(
|
||||
{
|
||||
found: baseExists,
|
||||
base_phase: normalized,
|
||||
next: nextDecimal,
|
||||
existing: existingDecimals,
|
||||
},
|
||||
raw,
|
||||
nextDecimal
|
||||
);
|
||||
} catch (e) {
|
||||
error('Failed to calculate next decimal phase: ' + e.message);
|
||||
}
|
||||
}
|
||||
|
||||
function cmdFindPhase(cwd, phase, raw) {
|
||||
if (!phase) {
|
||||
error('phase identifier required');
|
||||
}
|
||||
|
||||
const phasesDir = path.join(cwd, '.planning', 'phases');
|
||||
const normalized = normalizePhaseName(phase);
|
||||
|
||||
const notFound = { found: false, directory: null, phase_number: null, phase_name: null, plans: [], summaries: [] };
|
||||
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name).sort((a, b) => comparePhaseNum(a, b));
|
||||
|
||||
const match = dirs.find(d => d.startsWith(normalized));
|
||||
if (!match) {
|
||||
output(notFound, raw, '');
|
||||
return;
|
||||
}
|
||||
|
||||
const dirMatch = match.match(/^(\d+[A-Z]?(?:\.\d+)*)-?(.*)/i);
|
||||
const phaseNumber = dirMatch ? dirMatch[1] : normalized;
|
||||
const phaseName = dirMatch && dirMatch[2] ? dirMatch[2] : null;
|
||||
|
||||
const phaseDir = path.join(phasesDir, match);
|
||||
const phaseFiles = fs.readdirSync(phaseDir);
|
||||
const plans = phaseFiles.filter(f => f.endsWith('-PLAN.md') || f === 'PLAN.md').sort();
|
||||
const summaries = phaseFiles.filter(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md').sort();
|
||||
|
||||
const result = {
|
||||
found: true,
|
||||
directory: toPosixPath(path.join('.planning', 'phases', match)),
|
||||
phase_number: phaseNumber,
|
||||
phase_name: phaseName,
|
||||
plans,
|
||||
summaries,
|
||||
};
|
||||
|
||||
output(result, raw, result.directory);
|
||||
} catch {
|
||||
output(notFound, raw, '');
|
||||
}
|
||||
}
|
||||
|
||||
function extractObjective(content) {
|
||||
const m = content.match(/<objective>\s*\n?\s*(.+)/);
|
||||
return m ? m[1].trim() : null;
|
||||
}
|
||||
|
||||
function cmdPhasePlanIndex(cwd, phase, raw) {
|
||||
if (!phase) {
|
||||
error('phase required for phase-plan-index');
|
||||
}
|
||||
|
||||
const phasesDir = path.join(cwd, '.planning', 'phases');
|
||||
const normalized = normalizePhaseName(phase);
|
||||
|
||||
// Find phase directory
|
||||
let phaseDir = null;
|
||||
let phaseDirName = null;
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name).sort((a, b) => comparePhaseNum(a, b));
|
||||
const match = dirs.find(d => d.startsWith(normalized));
|
||||
if (match) {
|
||||
phaseDir = path.join(phasesDir, match);
|
||||
phaseDirName = match;
|
||||
}
|
||||
} catch {
|
||||
// phases dir doesn't exist
|
||||
}
|
||||
|
||||
if (!phaseDir) {
|
||||
output({ phase: normalized, error: 'Phase not found', plans: [], waves: {}, incomplete: [], has_checkpoints: false }, raw);
|
||||
return;
|
||||
}
|
||||
|
||||
// Get all files in phase directory
|
||||
const phaseFiles = fs.readdirSync(phaseDir);
|
||||
const planFiles = phaseFiles.filter(f => f.endsWith('-PLAN.md') || f === 'PLAN.md').sort();
|
||||
const summaryFiles = phaseFiles.filter(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md');
|
||||
|
||||
// Build set of plan IDs with summaries
|
||||
const completedPlanIds = new Set(
|
||||
summaryFiles.map(s => s.replace('-SUMMARY.md', '').replace('SUMMARY.md', ''))
|
||||
);
|
||||
|
||||
const plans = [];
|
||||
const waves = {};
|
||||
const incomplete = [];
|
||||
let hasCheckpoints = false;
|
||||
|
||||
for (const planFile of planFiles) {
|
||||
const planId = planFile.replace('-PLAN.md', '').replace('PLAN.md', '');
|
||||
const planPath = path.join(phaseDir, planFile);
|
||||
const content = fs.readFileSync(planPath, 'utf-8');
|
||||
const fm = extractFrontmatter(content);
|
||||
|
||||
// Count tasks: XML <task> tags (canonical) or ## Task N markdown (legacy)
|
||||
const xmlTasks = content.match(/<task[\s>]/gi) || [];
|
||||
const mdTasks = content.match(/##\s*Task\s*\d+/gi) || [];
|
||||
const taskCount = xmlTasks.length || mdTasks.length;
|
||||
|
||||
// Parse wave as integer
|
||||
const wave = parseInt(fm.wave, 10) || 1;
|
||||
|
||||
// Parse autonomous (default true if not specified)
|
||||
let autonomous = true;
|
||||
if (fm.autonomous !== undefined) {
|
||||
autonomous = fm.autonomous === 'true' || fm.autonomous === true;
|
||||
}
|
||||
|
||||
if (!autonomous) {
|
||||
hasCheckpoints = true;
|
||||
}
|
||||
|
||||
// Parse files_modified (underscore is canonical; also accept hyphenated for compat)
|
||||
let filesModified = [];
|
||||
const fmFiles = fm['files_modified'] || fm['files-modified'];
|
||||
if (fmFiles) {
|
||||
filesModified = Array.isArray(fmFiles) ? fmFiles : [fmFiles];
|
||||
}
|
||||
|
||||
const hasSummary = completedPlanIds.has(planId);
|
||||
if (!hasSummary) {
|
||||
incomplete.push(planId);
|
||||
}
|
||||
|
||||
const plan = {
|
||||
id: planId,
|
||||
wave,
|
||||
autonomous,
|
||||
objective: extractObjective(content) || fm.objective || null,
|
||||
files_modified: filesModified,
|
||||
task_count: taskCount,
|
||||
has_summary: hasSummary,
|
||||
};
|
||||
|
||||
plans.push(plan);
|
||||
|
||||
// Group by wave
|
||||
const waveKey = String(wave);
|
||||
if (!waves[waveKey]) {
|
||||
waves[waveKey] = [];
|
||||
}
|
||||
waves[waveKey].push(planId);
|
||||
}
|
||||
|
||||
const result = {
|
||||
phase: normalized,
|
||||
plans,
|
||||
waves,
|
||||
incomplete,
|
||||
has_checkpoints: hasCheckpoints,
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
function cmdPhaseAdd(cwd, description, raw) {
|
||||
if (!description) {
|
||||
error('description required for phase add');
|
||||
}
|
||||
|
||||
const roadmapPath = path.join(cwd, '.planning', 'ROADMAP.md');
|
||||
if (!fs.existsSync(roadmapPath)) {
|
||||
error('ROADMAP.md not found');
|
||||
}
|
||||
|
||||
const rawContent = fs.readFileSync(roadmapPath, 'utf-8');
|
||||
const content = extractCurrentMilestone(rawContent, cwd);
|
||||
const slug = generateSlugInternal(description);
|
||||
|
||||
// Find highest integer phase number (in current milestone only)
|
||||
const phasePattern = /#{2,4}\s*Phase\s+(\d+)[A-Z]?(?:\.\d+)*:/gi;
|
||||
let maxPhase = 0;
|
||||
let m;
|
||||
while ((m = phasePattern.exec(content)) !== null) {
|
||||
const num = parseInt(m[1], 10);
|
||||
if (num > maxPhase) maxPhase = num;
|
||||
}
|
||||
|
||||
const newPhaseNum = maxPhase + 1;
|
||||
const paddedNum = String(newPhaseNum).padStart(2, '0');
|
||||
const dirName = `${paddedNum}-${slug}`;
|
||||
const dirPath = path.join(cwd, '.planning', 'phases', dirName);
|
||||
|
||||
// Create directory with .gitkeep so git tracks empty folders
|
||||
fs.mkdirSync(dirPath, { recursive: true });
|
||||
fs.writeFileSync(path.join(dirPath, '.gitkeep'), '');
|
||||
|
||||
// Build phase entry
|
||||
const phaseEntry = `\n### Phase ${newPhaseNum}: ${description}\n\n**Goal:** [To be planned]\n**Requirements**: TBD\n**Depends on:** Phase ${maxPhase}\n**Plans:** 0 plans\n\nPlans:\n- [ ] TBD (run /gsd:plan-phase ${newPhaseNum} to break down)\n`;
|
||||
|
||||
// Find insertion point: before last "---" or at end
|
||||
let updatedContent;
|
||||
const lastSeparator = rawContent.lastIndexOf('\n---');
|
||||
if (lastSeparator > 0) {
|
||||
updatedContent = rawContent.slice(0, lastSeparator) + phaseEntry + rawContent.slice(lastSeparator);
|
||||
} else {
|
||||
updatedContent = rawContent + phaseEntry;
|
||||
}
|
||||
|
||||
fs.writeFileSync(roadmapPath, updatedContent, 'utf-8');
|
||||
|
||||
const result = {
|
||||
phase_number: newPhaseNum,
|
||||
padded: paddedNum,
|
||||
name: description,
|
||||
slug,
|
||||
directory: `.planning/phases/${dirName}`,
|
||||
};
|
||||
|
||||
output(result, raw, paddedNum);
|
||||
}
|
||||
|
||||
function cmdPhaseInsert(cwd, afterPhase, description, raw) {
|
||||
if (!afterPhase || !description) {
|
||||
error('after-phase and description required for phase insert');
|
||||
}
|
||||
|
||||
const roadmapPath = path.join(cwd, '.planning', 'ROADMAP.md');
|
||||
if (!fs.existsSync(roadmapPath)) {
|
||||
error('ROADMAP.md not found');
|
||||
}
|
||||
|
||||
const rawContent = fs.readFileSync(roadmapPath, 'utf-8');
|
||||
const content = extractCurrentMilestone(rawContent, cwd);
|
||||
const slug = generateSlugInternal(description);
|
||||
|
||||
// Normalize input then strip leading zeros for flexible matching
|
||||
const normalizedAfter = normalizePhaseName(afterPhase);
|
||||
const unpadded = normalizedAfter.replace(/^0+/, '');
|
||||
const afterPhaseEscaped = unpadded.replace(/\./g, '\\.');
|
||||
const targetPattern = new RegExp(`#{2,4}\\s*Phase\\s+0*${afterPhaseEscaped}:`, 'i');
|
||||
if (!targetPattern.test(content)) {
|
||||
error(`Phase ${afterPhase} not found in ROADMAP.md`);
|
||||
}
|
||||
|
||||
// Calculate next decimal using existing logic
|
||||
const phasesDir = path.join(cwd, '.planning', 'phases');
|
||||
const normalizedBase = normalizePhaseName(afterPhase);
|
||||
let existingDecimals = [];
|
||||
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name);
|
||||
const decimalPattern = new RegExp(`^${normalizedBase}\\.(\\d+)`);
|
||||
for (const dir of dirs) {
|
||||
const dm = dir.match(decimalPattern);
|
||||
if (dm) existingDecimals.push(parseInt(dm[1], 10));
|
||||
}
|
||||
} catch {}
|
||||
|
||||
const nextDecimal = existingDecimals.length === 0 ? 1 : Math.max(...existingDecimals) + 1;
|
||||
const decimalPhase = `${normalizedBase}.${nextDecimal}`;
|
||||
const dirName = `${decimalPhase}-${slug}`;
|
||||
const dirPath = path.join(cwd, '.planning', 'phases', dirName);
|
||||
|
||||
// Create directory with .gitkeep so git tracks empty folders
|
||||
fs.mkdirSync(dirPath, { recursive: true });
|
||||
fs.writeFileSync(path.join(dirPath, '.gitkeep'), '');
|
||||
|
||||
// Build phase entry
|
||||
const phaseEntry = `\n### Phase ${decimalPhase}: ${description} (INSERTED)\n\n**Goal:** [Urgent work - to be planned]\n**Requirements**: TBD\n**Depends on:** Phase ${afterPhase}\n**Plans:** 0 plans\n\nPlans:\n- [ ] TBD (run /gsd:plan-phase ${decimalPhase} to break down)\n`;
|
||||
|
||||
// Insert after the target phase section
|
||||
const headerPattern = new RegExp(`(#{2,4}\\s*Phase\\s+0*${afterPhaseEscaped}:[^\\n]*\\n)`, 'i');
|
||||
const headerMatch = rawContent.match(headerPattern);
|
||||
if (!headerMatch) {
|
||||
error(`Could not find Phase ${afterPhase} header`);
|
||||
}
|
||||
|
||||
const headerIdx = rawContent.indexOf(headerMatch[0]);
|
||||
const afterHeader = rawContent.slice(headerIdx + headerMatch[0].length);
|
||||
const nextPhaseMatch = afterHeader.match(/\n#{2,4}\s+Phase\s+\d/i);
|
||||
|
||||
let insertIdx;
|
||||
if (nextPhaseMatch) {
|
||||
insertIdx = headerIdx + headerMatch[0].length + nextPhaseMatch.index;
|
||||
} else {
|
||||
insertIdx = rawContent.length;
|
||||
}
|
||||
|
||||
const updatedContent = rawContent.slice(0, insertIdx) + phaseEntry + rawContent.slice(insertIdx);
|
||||
fs.writeFileSync(roadmapPath, updatedContent, 'utf-8');
|
||||
|
||||
const result = {
|
||||
phase_number: decimalPhase,
|
||||
after_phase: afterPhase,
|
||||
name: description,
|
||||
slug,
|
||||
directory: `.planning/phases/${dirName}`,
|
||||
};
|
||||
|
||||
output(result, raw, decimalPhase);
|
||||
}
|
||||
|
||||
function cmdPhaseRemove(cwd, targetPhase, options, raw) {
|
||||
if (!targetPhase) {
|
||||
error('phase number required for phase remove');
|
||||
}
|
||||
|
||||
const roadmapPath = path.join(cwd, '.planning', 'ROADMAP.md');
|
||||
const phasesDir = path.join(cwd, '.planning', 'phases');
|
||||
const force = options.force || false;
|
||||
|
||||
if (!fs.existsSync(roadmapPath)) {
|
||||
error('ROADMAP.md not found');
|
||||
}
|
||||
|
||||
// Normalize the target
|
||||
const normalized = normalizePhaseName(targetPhase);
|
||||
const isDecimal = targetPhase.includes('.');
|
||||
|
||||
// Find and validate target directory
|
||||
let targetDir = null;
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name).sort((a, b) => comparePhaseNum(a, b));
|
||||
targetDir = dirs.find(d => d.startsWith(normalized + '-') || d === normalized);
|
||||
} catch {}
|
||||
|
||||
// Check for executed work (SUMMARY.md files)
|
||||
if (targetDir && !force) {
|
||||
const targetPath = path.join(phasesDir, targetDir);
|
||||
const files = fs.readdirSync(targetPath);
|
||||
const summaries = files.filter(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md');
|
||||
if (summaries.length > 0) {
|
||||
error(`Phase ${targetPhase} has ${summaries.length} executed plan(s). Use --force to remove anyway.`);
|
||||
}
|
||||
}
|
||||
|
||||
// Delete target directory
|
||||
if (targetDir) {
|
||||
fs.rmSync(path.join(phasesDir, targetDir), { recursive: true, force: true });
|
||||
}
|
||||
|
||||
// Renumber subsequent phases
|
||||
const renamedDirs = [];
|
||||
const renamedFiles = [];
|
||||
|
||||
if (isDecimal) {
|
||||
// Decimal removal: renumber sibling decimals (e.g., removing 06.2 → 06.3 becomes 06.2)
|
||||
const baseParts = normalized.split('.');
|
||||
const baseInt = baseParts[0];
|
||||
const removedDecimal = parseInt(baseParts[1], 10);
|
||||
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name).sort((a, b) => comparePhaseNum(a, b));
|
||||
|
||||
// Find sibling decimals with higher numbers
|
||||
const decPattern = new RegExp(`^${baseInt}\\.(\\d+)-(.+)$`);
|
||||
const toRename = [];
|
||||
for (const dir of dirs) {
|
||||
const dm = dir.match(decPattern);
|
||||
if (dm && parseInt(dm[1], 10) > removedDecimal) {
|
||||
toRename.push({ dir, oldDecimal: parseInt(dm[1], 10), slug: dm[2] });
|
||||
}
|
||||
}
|
||||
|
||||
// Sort descending to avoid conflicts
|
||||
toRename.sort((a, b) => b.oldDecimal - a.oldDecimal);
|
||||
|
||||
for (const item of toRename) {
|
||||
const newDecimal = item.oldDecimal - 1;
|
||||
const oldPhaseId = `${baseInt}.${item.oldDecimal}`;
|
||||
const newPhaseId = `${baseInt}.${newDecimal}`;
|
||||
const newDirName = `${baseInt}.${newDecimal}-${item.slug}`;
|
||||
|
||||
// Rename directory
|
||||
fs.renameSync(path.join(phasesDir, item.dir), path.join(phasesDir, newDirName));
|
||||
renamedDirs.push({ from: item.dir, to: newDirName });
|
||||
|
||||
// Rename files inside
|
||||
const dirFiles = fs.readdirSync(path.join(phasesDir, newDirName));
|
||||
for (const f of dirFiles) {
|
||||
// Files may have phase prefix like "06.2-01-PLAN.md"
|
||||
if (f.includes(oldPhaseId)) {
|
||||
const newFileName = f.replace(oldPhaseId, newPhaseId);
|
||||
fs.renameSync(
|
||||
path.join(phasesDir, newDirName, f),
|
||||
path.join(phasesDir, newDirName, newFileName)
|
||||
);
|
||||
renamedFiles.push({ from: f, to: newFileName });
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
} else {
|
||||
// Integer removal: renumber all subsequent integer phases
|
||||
const removedInt = parseInt(normalized, 10);
|
||||
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name).sort((a, b) => comparePhaseNum(a, b));
|
||||
|
||||
// Collect directories that need renumbering (integer phases > removed, and their decimals/letters)
|
||||
const toRename = [];
|
||||
for (const dir of dirs) {
|
||||
const dm = dir.match(/^(\d+)([A-Z])?(?:\.(\d+))?-(.+)$/i);
|
||||
if (!dm) continue;
|
||||
const dirInt = parseInt(dm[1], 10);
|
||||
if (dirInt > removedInt) {
|
||||
toRename.push({
|
||||
dir,
|
||||
oldInt: dirInt,
|
||||
letter: dm[2] ? dm[2].toUpperCase() : '',
|
||||
decimal: dm[3] ? parseInt(dm[3], 10) : null,
|
||||
slug: dm[4],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Sort descending to avoid conflicts
|
||||
toRename.sort((a, b) => {
|
||||
if (a.oldInt !== b.oldInt) return b.oldInt - a.oldInt;
|
||||
return (b.decimal || 0) - (a.decimal || 0);
|
||||
});
|
||||
|
||||
for (const item of toRename) {
|
||||
const newInt = item.oldInt - 1;
|
||||
const newPadded = String(newInt).padStart(2, '0');
|
||||
const oldPadded = String(item.oldInt).padStart(2, '0');
|
||||
const letterSuffix = item.letter || '';
|
||||
const decimalSuffix = item.decimal !== null ? `.${item.decimal}` : '';
|
||||
const oldPrefix = `${oldPadded}${letterSuffix}${decimalSuffix}`;
|
||||
const newPrefix = `${newPadded}${letterSuffix}${decimalSuffix}`;
|
||||
const newDirName = `${newPrefix}-${item.slug}`;
|
||||
|
||||
// Rename directory
|
||||
fs.renameSync(path.join(phasesDir, item.dir), path.join(phasesDir, newDirName));
|
||||
renamedDirs.push({ from: item.dir, to: newDirName });
|
||||
|
||||
// Rename files inside
|
||||
const dirFiles = fs.readdirSync(path.join(phasesDir, newDirName));
|
||||
for (const f of dirFiles) {
|
||||
if (f.startsWith(oldPrefix)) {
|
||||
const newFileName = newPrefix + f.slice(oldPrefix.length);
|
||||
fs.renameSync(
|
||||
path.join(phasesDir, newDirName, f),
|
||||
path.join(phasesDir, newDirName, newFileName)
|
||||
);
|
||||
renamedFiles.push({ from: f, to: newFileName });
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
// Update ROADMAP.md
|
||||
let roadmapContent = fs.readFileSync(roadmapPath, 'utf-8');
|
||||
|
||||
// Remove the target phase section
|
||||
const targetEscaped = escapeRegex(targetPhase);
|
||||
const sectionPattern = new RegExp(
|
||||
`\\n?#{2,4}\\s*Phase\\s+${targetEscaped}\\s*:[\\s\\S]*?(?=\\n#{2,4}\\s+Phase\\s+\\d|$)`,
|
||||
'i'
|
||||
);
|
||||
roadmapContent = roadmapContent.replace(sectionPattern, '');
|
||||
|
||||
// Remove from phase list (checkbox)
|
||||
const checkboxPattern = new RegExp(`\\n?-\\s*\\[[ x]\\]\\s*.*Phase\\s+${targetEscaped}[:\\s][^\\n]*`, 'gi');
|
||||
roadmapContent = roadmapContent.replace(checkboxPattern, '');
|
||||
|
||||
// Remove from progress table
|
||||
const tableRowPattern = new RegExp(`\\n?\\|\\s*${targetEscaped}\\.?\\s[^|]*\\|[^\\n]*`, 'gi');
|
||||
roadmapContent = roadmapContent.replace(tableRowPattern, '');
|
||||
|
||||
// Renumber references in ROADMAP for subsequent phases
|
||||
if (!isDecimal) {
|
||||
const removedInt = parseInt(normalized, 10);
|
||||
|
||||
// Collect all integer phases > removedInt
|
||||
const maxPhase = 99; // reasonable upper bound
|
||||
for (let oldNum = maxPhase; oldNum > removedInt; oldNum--) {
|
||||
const newNum = oldNum - 1;
|
||||
const oldStr = String(oldNum);
|
||||
const newStr = String(newNum);
|
||||
const oldPad = oldStr.padStart(2, '0');
|
||||
const newPad = newStr.padStart(2, '0');
|
||||
|
||||
// Phase headings: ## Phase 18: or ### Phase 18: → ## Phase 17: or ### Phase 17:
|
||||
roadmapContent = roadmapContent.replace(
|
||||
new RegExp(`(#{2,4}\\s*Phase\\s+)${oldStr}(\\s*:)`, 'gi'),
|
||||
`$1${newStr}$2`
|
||||
);
|
||||
|
||||
// Checkbox items: - [ ] **Phase 18:** → - [ ] **Phase 17:**
|
||||
roadmapContent = roadmapContent.replace(
|
||||
new RegExp(`(Phase\\s+)${oldStr}([:\\s])`, 'g'),
|
||||
`$1${newStr}$2`
|
||||
);
|
||||
|
||||
// Plan references: 18-01 → 17-01
|
||||
roadmapContent = roadmapContent.replace(
|
||||
new RegExp(`${oldPad}-(\\d{2})`, 'g'),
|
||||
`${newPad}-$1`
|
||||
);
|
||||
|
||||
// Table rows: | 18. → | 17.
|
||||
roadmapContent = roadmapContent.replace(
|
||||
new RegExp(`(\\|\\s*)${oldStr}\\.\\s`, 'g'),
|
||||
`$1${newStr}. `
|
||||
);
|
||||
|
||||
// Depends on references
|
||||
roadmapContent = roadmapContent.replace(
|
||||
new RegExp(`(Depends on:\\*\\*\\s*Phase\\s+)${oldStr}\\b`, 'gi'),
|
||||
`$1${newStr}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(roadmapPath, roadmapContent, 'utf-8');
|
||||
|
||||
// Update STATE.md phase count
|
||||
const statePath = path.join(cwd, '.planning', 'STATE.md');
|
||||
if (fs.existsSync(statePath)) {
|
||||
let stateContent = fs.readFileSync(statePath, 'utf-8');
|
||||
// Update "Total Phases" field
|
||||
const totalPattern = /(\*\*Total Phases:\*\*\s*)(\d+)/;
|
||||
const totalMatch = stateContent.match(totalPattern);
|
||||
if (totalMatch) {
|
||||
const oldTotal = parseInt(totalMatch[2], 10);
|
||||
stateContent = stateContent.replace(totalPattern, `$1${oldTotal - 1}`);
|
||||
}
|
||||
// Update "Phase: X of Y" pattern
|
||||
const ofPattern = /(\bof\s+)(\d+)(\s*(?:\(|phases?))/i;
|
||||
const ofMatch = stateContent.match(ofPattern);
|
||||
if (ofMatch) {
|
||||
const oldTotal = parseInt(ofMatch[2], 10);
|
||||
stateContent = stateContent.replace(ofPattern, `$1${oldTotal - 1}$3`);
|
||||
}
|
||||
writeStateMd(statePath, stateContent, cwd);
|
||||
}
|
||||
|
||||
const result = {
|
||||
removed: targetPhase,
|
||||
directory_deleted: targetDir || null,
|
||||
renamed_directories: renamedDirs,
|
||||
renamed_files: renamedFiles,
|
||||
roadmap_updated: true,
|
||||
state_updated: fs.existsSync(statePath),
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
function cmdPhaseComplete(cwd, phaseNum, raw) {
|
||||
if (!phaseNum) {
|
||||
error('phase number required for phase complete');
|
||||
}
|
||||
|
||||
const roadmapPath = path.join(cwd, '.planning', 'ROADMAP.md');
|
||||
const statePath = path.join(cwd, '.planning', 'STATE.md');
|
||||
const phasesDir = path.join(cwd, '.planning', 'phases');
|
||||
const normalized = normalizePhaseName(phaseNum);
|
||||
const today = new Date().toISOString().split('T')[0];
|
||||
|
||||
// Verify phase info
|
||||
const phaseInfo = findPhaseInternal(cwd, phaseNum);
|
||||
if (!phaseInfo) {
|
||||
error(`Phase ${phaseNum} not found`);
|
||||
}
|
||||
|
||||
const planCount = phaseInfo.plans.length;
|
||||
const summaryCount = phaseInfo.summaries.length;
|
||||
let requirementsUpdated = false;
|
||||
|
||||
// Update ROADMAP.md: mark phase complete
|
||||
if (fs.existsSync(roadmapPath)) {
|
||||
let roadmapContent = fs.readFileSync(roadmapPath, 'utf-8');
|
||||
|
||||
// Checkbox: - [ ] Phase N: → - [x] Phase N: (...completed DATE)
|
||||
const checkboxPattern = new RegExp(
|
||||
`(-\\s*\\[)[ ](\\]\\s*.*Phase\\s+${escapeRegex(phaseNum)}[:\\s][^\\n]*)`,
|
||||
'i'
|
||||
);
|
||||
roadmapContent = replaceInCurrentMilestone(roadmapContent, checkboxPattern, `$1x$2 (completed ${today})`);
|
||||
|
||||
// Progress table: update Status to Complete, add date
|
||||
const phaseEscaped = escapeRegex(phaseNum);
|
||||
const tablePattern = new RegExp(
|
||||
`(\\|\\s*${phaseEscaped}\\.?\\s[^|]*\\|[^|]*\\|)\\s*[^|]*(\\|)\\s*[^|]*(\\|)`,
|
||||
'i'
|
||||
);
|
||||
roadmapContent = replaceInCurrentMilestone(
|
||||
roadmapContent, tablePattern,
|
||||
`$1 Complete $2 ${today} $3`
|
||||
);
|
||||
|
||||
// Update plan count in phase section
|
||||
const planCountPattern = new RegExp(
|
||||
`(#{2,4}\\s*Phase\\s+${phaseEscaped}[\\s\\S]*?\\*\\*Plans:\\*\\*\\s*)[^\\n]+`,
|
||||
'i'
|
||||
);
|
||||
roadmapContent = replaceInCurrentMilestone(
|
||||
roadmapContent, planCountPattern,
|
||||
`$1${summaryCount}/${planCount} plans complete`
|
||||
);
|
||||
|
||||
fs.writeFileSync(roadmapPath, roadmapContent, 'utf-8');
|
||||
|
||||
// Update REQUIREMENTS.md traceability for this phase's requirements
|
||||
const reqPath = path.join(cwd, '.planning', 'REQUIREMENTS.md');
|
||||
if (fs.existsSync(reqPath)) {
|
||||
// Extract the current phase section from roadmap (scoped to avoid cross-phase matching)
|
||||
const phaseEsc = escapeRegex(phaseNum);
|
||||
const currentMilestoneRoadmap = extractCurrentMilestone(roadmapContent, cwd);
|
||||
const phaseSectionMatch = currentMilestoneRoadmap.match(
|
||||
new RegExp(`(#{2,4}\\s*Phase\\s+${phaseEsc}[:\\s][\\s\\S]*?)(?=#{2,4}\\s*Phase\\s+|$)`, 'i')
|
||||
);
|
||||
|
||||
const sectionText = phaseSectionMatch ? phaseSectionMatch[1] : '';
|
||||
const reqMatch = sectionText.match(/\*\*Requirements:\*\*\s*([^\n]+)/i);
|
||||
|
||||
if (reqMatch) {
|
||||
const reqIds = reqMatch[1].replace(/[\[\]]/g, '').split(/[,\s]+/).map(r => r.trim()).filter(Boolean);
|
||||
let reqContent = fs.readFileSync(reqPath, 'utf-8');
|
||||
|
||||
for (const reqId of reqIds) {
|
||||
const reqEscaped = escapeRegex(reqId);
|
||||
// Update checkbox: - [ ] **REQ-ID** → - [x] **REQ-ID**
|
||||
reqContent = reqContent.replace(
|
||||
new RegExp(`(-\\s*\\[)[ ](\\]\\s*\\*\\*${reqEscaped}\\*\\*)`, 'gi'),
|
||||
'$1x$2'
|
||||
);
|
||||
// Update traceability table: | REQ-ID | Phase N | Pending/In Progress | → | REQ-ID | Phase N | Complete |
|
||||
reqContent = reqContent.replace(
|
||||
new RegExp(`(\\|\\s*${reqEscaped}\\s*\\|[^|]+\\|)\\s*(?:Pending|In Progress)\\s*(\\|)`, 'gi'),
|
||||
'$1 Complete $2'
|
||||
);
|
||||
}
|
||||
|
||||
fs.writeFileSync(reqPath, reqContent, 'utf-8');
|
||||
requirementsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Find next phase — check both filesystem AND roadmap
|
||||
// Phases may be defined in ROADMAP.md but not yet scaffolded to disk,
|
||||
// so a filesystem-only scan would incorrectly report is_last_phase:true
|
||||
let nextPhaseNum = null;
|
||||
let nextPhaseName = null;
|
||||
let isLastPhase = true;
|
||||
|
||||
try {
|
||||
const isDirInMilestone = getMilestonePhaseFilter(cwd);
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name)
|
||||
.filter(isDirInMilestone)
|
||||
.sort((a, b) => comparePhaseNum(a, b));
|
||||
|
||||
// Find the next phase directory after current
|
||||
for (const dir of dirs) {
|
||||
const dm = dir.match(/^(\d+[A-Z]?(?:\.\d+)*)-?(.*)/i);
|
||||
if (dm) {
|
||||
if (comparePhaseNum(dm[1], phaseNum) > 0) {
|
||||
nextPhaseNum = dm[1];
|
||||
nextPhaseName = dm[2] || null;
|
||||
isLastPhase = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// Fallback: if filesystem found no next phase, check ROADMAP.md
|
||||
// for phases that are defined but not yet planned (no directory on disk)
|
||||
if (isLastPhase && fs.existsSync(roadmapPath)) {
|
||||
try {
|
||||
const roadmapForPhases = extractCurrentMilestone(fs.readFileSync(roadmapPath, 'utf-8'), cwd);
|
||||
const phasePattern = /#{2,4}\s*Phase\s+(\d+[A-Z]?(?:\.\d+)*)\s*:\s*([^\n]+)/gi;
|
||||
let pm;
|
||||
while ((pm = phasePattern.exec(roadmapForPhases)) !== null) {
|
||||
if (comparePhaseNum(pm[1], phaseNum) > 0) {
|
||||
nextPhaseNum = pm[1];
|
||||
nextPhaseName = pm[2].replace(/\(INSERTED\)/i, '').trim().toLowerCase().replace(/\s+/g, '-');
|
||||
isLastPhase = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
// Update STATE.md
|
||||
if (fs.existsSync(statePath)) {
|
||||
let stateContent = fs.readFileSync(statePath, 'utf-8');
|
||||
|
||||
// Update Current Phase
|
||||
stateContent = stateContent.replace(
|
||||
/(\*\*Current Phase:\*\*\s*).*/,
|
||||
`$1${nextPhaseNum || phaseNum}`
|
||||
);
|
||||
|
||||
// Update Current Phase Name
|
||||
if (nextPhaseName) {
|
||||
stateContent = stateContent.replace(
|
||||
/(\*\*Current Phase Name:\*\*\s*).*/,
|
||||
`$1${nextPhaseName.replace(/-/g, ' ')}`
|
||||
);
|
||||
}
|
||||
|
||||
// Update Status
|
||||
stateContent = stateContent.replace(
|
||||
/(\*\*Status:\*\*\s*).*/,
|
||||
`$1${isLastPhase ? 'Milestone complete' : 'Ready to plan'}`
|
||||
);
|
||||
|
||||
// Update Current Plan
|
||||
stateContent = stateContent.replace(
|
||||
/(\*\*Current Plan:\*\*\s*).*/,
|
||||
`$1Not started`
|
||||
);
|
||||
|
||||
// Update Last Activity
|
||||
stateContent = stateContent.replace(
|
||||
/(\*\*Last Activity:\*\*\s*).*/,
|
||||
`$1${today}`
|
||||
);
|
||||
|
||||
// Update Last Activity Description
|
||||
stateContent = stateContent.replace(
|
||||
/(\*\*Last Activity Description:\*\*\s*).*/,
|
||||
`$1Phase ${phaseNum} complete${nextPhaseNum ? `, transitioned to Phase ${nextPhaseNum}` : ''}`
|
||||
);
|
||||
|
||||
// Increment Completed Phases counter (#956)
|
||||
const completedMatch = stateContent.match(/\*\*Completed Phases:\*\*\s*(\d+)/);
|
||||
if (completedMatch) {
|
||||
const newCompleted = parseInt(completedMatch[1], 10) + 1;
|
||||
stateContent = stateContent.replace(
|
||||
/(\*\*Completed Phases:\*\*\s*)\d+/,
|
||||
`$1${newCompleted}`
|
||||
);
|
||||
|
||||
// Recalculate percent based on completed / total (#956)
|
||||
const totalMatch = stateContent.match(/\*\*Total Phases:\*\*\s*(\d+)/);
|
||||
if (totalMatch) {
|
||||
const totalPhases = parseInt(totalMatch[1], 10);
|
||||
if (totalPhases > 0) {
|
||||
const newPercent = Math.round((newCompleted / totalPhases) * 100);
|
||||
stateContent = stateContent.replace(
|
||||
/(\*\*Progress:\*\*\s*)\d+%/,
|
||||
`$1${newPercent}%`
|
||||
);
|
||||
// Also update percent field if it exists separately
|
||||
stateContent = stateContent.replace(
|
||||
/(percent:\s*)\d+/,
|
||||
`$1${newPercent}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writeStateMd(statePath, stateContent, cwd);
|
||||
}
|
||||
|
||||
const result = {
|
||||
completed_phase: phaseNum,
|
||||
phase_name: phaseInfo.phase_name,
|
||||
plans_executed: `${summaryCount}/${planCount}`,
|
||||
next_phase: nextPhaseNum,
|
||||
next_phase_name: nextPhaseName,
|
||||
is_last_phase: isLastPhase,
|
||||
date: today,
|
||||
roadmap_updated: fs.existsSync(roadmapPath),
|
||||
state_updated: fs.existsSync(statePath),
|
||||
requirements_updated: requirementsUpdated,
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
cmdPhasesList,
|
||||
cmdPhaseNextDecimal,
|
||||
cmdFindPhase,
|
||||
cmdPhasePlanIndex,
|
||||
cmdPhaseAdd,
|
||||
cmdPhaseInsert,
|
||||
cmdPhaseRemove,
|
||||
cmdPhaseComplete,
|
||||
};
|
||||
931
get-shit-done/bin/lib/profile-output.cjs
Normal file
931
get-shit-done/bin/lib/profile-output.cjs
Normal file
@@ -0,0 +1,931 @@
|
||||
/**
|
||||
* Profile Output — profile rendering, questionnaire, and artifact generation
|
||||
*
|
||||
* Renders profiling analysis into user-facing artifacts:
|
||||
* - write-profile: USER-PROFILE.md from analysis JSON
|
||||
* - profile-questionnaire: fallback when no sessions available
|
||||
* - generate-dev-preferences: dev-preferences.md command artifact
|
||||
* - generate-claude-profile: Developer Profile section in CLAUDE.md
|
||||
* - generate-claude-md: full CLAUDE.md with managed sections
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const { output, error, safeReadFile } = require('./core.cjs');
|
||||
|
||||
// ─── Constants ────────────────────────────────────────────────────────────────
|
||||
|
||||
const DIMENSION_KEYS = [
|
||||
'communication_style', 'decision_speed', 'explanation_depth',
|
||||
'debugging_approach', 'ux_philosophy', 'vendor_philosophy',
|
||||
'frustration_triggers', 'learning_style'
|
||||
];
|
||||
|
||||
const PROFILING_QUESTIONS = [
|
||||
{
|
||||
dimension: 'communication_style',
|
||||
header: 'Communication Style',
|
||||
context: 'Think about the last few times you asked Claude to build or change something. How did you frame the request?',
|
||||
question: 'When you ask Claude to build something, how much context do you typically provide?',
|
||||
options: [
|
||||
{ label: 'Minimal -- "fix the bug", "add dark mode", just say what\'s needed', value: 'a', rating: 'terse-direct' },
|
||||
{ label: 'Some context -- explain what and why in a paragraph or two', value: 'b', rating: 'conversational' },
|
||||
{ label: 'Detailed specs -- headers, numbered lists, problem analysis, constraints', value: 'c', rating: 'detailed-structured' },
|
||||
{ label: 'It depends on the task -- simple tasks get short prompts, complex ones get detailed specs', value: 'd', rating: 'mixed' },
|
||||
],
|
||||
},
|
||||
{
|
||||
dimension: 'decision_speed',
|
||||
header: 'Decision Making',
|
||||
context: 'Think about times when Claude presented you with multiple options -- like choosing a library, picking an architecture, or selecting an approach.',
|
||||
question: 'When Claude presents you with options, how do you typically decide?',
|
||||
options: [
|
||||
{ label: 'Pick quickly based on gut feeling or past experience', value: 'a', rating: 'fast-intuitive' },
|
||||
{ label: 'Ask for a comparison table or pros/cons, then decide', value: 'b', rating: 'deliberate-informed' },
|
||||
{ label: 'Research independently (read docs, check GitHub stars) before deciding', value: 'c', rating: 'research-first' },
|
||||
{ label: 'Let Claude recommend -- I generally trust the suggestion', value: 'd', rating: 'delegator' },
|
||||
],
|
||||
},
|
||||
{
|
||||
dimension: 'explanation_depth',
|
||||
header: 'Explanation Preferences',
|
||||
context: 'Think about when Claude explains code it wrote or an approach it took. How much detail feels right?',
|
||||
question: 'When Claude explains something, how much detail do you want?',
|
||||
options: [
|
||||
{ label: 'Just the code -- I\'ll read it and figure it out myself', value: 'a', rating: 'code-only' },
|
||||
{ label: 'Brief explanation with the code -- a sentence or two about the approach', value: 'b', rating: 'concise' },
|
||||
{ label: 'Detailed walkthrough -- explain the approach, trade-offs, and code structure', value: 'c', rating: 'detailed' },
|
||||
{ label: 'Deep dive -- teach me the concepts behind it so I understand the fundamentals', value: 'd', rating: 'educational' },
|
||||
],
|
||||
},
|
||||
{
|
||||
dimension: 'debugging_approach',
|
||||
header: 'Debugging Style',
|
||||
context: 'Think about the last few times something broke in your code. How did you approach it with Claude?',
|
||||
question: 'When something breaks, how do you typically approach debugging with Claude?',
|
||||
options: [
|
||||
{ label: 'Paste the error and say "fix it" -- get it working fast', value: 'a', rating: 'fix-first' },
|
||||
{ label: 'Share the error plus context, ask Claude to diagnose what went wrong', value: 'b', rating: 'diagnostic' },
|
||||
{ label: 'Investigate myself first, then ask Claude about my specific theories', value: 'c', rating: 'hypothesis-driven' },
|
||||
{ label: 'Walk through the code together step by step to understand the issue', value: 'd', rating: 'collaborative' },
|
||||
],
|
||||
},
|
||||
{
|
||||
dimension: 'ux_philosophy',
|
||||
header: 'UX Philosophy',
|
||||
context: 'Think about user-facing features you have built recently. How did you balance functionality with design?',
|
||||
question: 'When building user-facing features, what do you prioritize?',
|
||||
options: [
|
||||
{ label: 'Get it working first, polish the UI later (or never)', value: 'a', rating: 'function-first' },
|
||||
{ label: 'Basic usability from the start -- nothing ugly, but no pixel-perfection', value: 'b', rating: 'pragmatic' },
|
||||
{ label: 'Design and UX are as important as functionality -- I care about the experience', value: 'c', rating: 'design-conscious' },
|
||||
{ label: 'I mostly build backend, CLI, or infrastructure -- UX is minimal', value: 'd', rating: 'backend-focused' },
|
||||
],
|
||||
},
|
||||
{
|
||||
dimension: 'vendor_philosophy',
|
||||
header: 'Library & Vendor Choices',
|
||||
context: 'Think about the last time you needed a library or service for a project. How did you go about choosing it?',
|
||||
question: 'When choosing libraries or services, what is your typical approach?',
|
||||
options: [
|
||||
{ label: 'Use whatever Claude suggests -- speed matters more than the perfect choice', value: 'a', rating: 'pragmatic-fast' },
|
||||
{ label: 'Prefer well-known, battle-tested options (React, PostgreSQL, Express)', value: 'b', rating: 'conservative' },
|
||||
{ label: 'Research alternatives, read docs, compare benchmarks before committing', value: 'c', rating: 'thorough-evaluator' },
|
||||
{ label: 'Strong opinions -- I already know what I like and I stick with it', value: 'd', rating: 'opinionated' },
|
||||
],
|
||||
},
|
||||
{
|
||||
dimension: 'frustration_triggers',
|
||||
header: 'Frustration Triggers',
|
||||
context: 'Think about moments when working with AI coding assistants that made you frustrated or annoyed.',
|
||||
question: 'What frustrates you most when working with AI coding assistants?',
|
||||
options: [
|
||||
{ label: 'Doing things I didn\'t ask for -- adding features, refactoring code, scope creep', value: 'a', rating: 'scope-creep' },
|
||||
{ label: 'Not following instructions precisely -- ignoring constraints or requirements I stated', value: 'b', rating: 'instruction-adherence' },
|
||||
{ label: 'Over-explaining or being too verbose -- just give me the code and move on', value: 'c', rating: 'verbosity' },
|
||||
{ label: 'Breaking working code while fixing something else -- regressions', value: 'd', rating: 'regression' },
|
||||
],
|
||||
},
|
||||
{
|
||||
dimension: 'learning_style',
|
||||
header: 'Learning Preferences',
|
||||
context: 'Think about encountering something new -- an unfamiliar library, a codebase you inherited, a concept you hadn\'t used before.',
|
||||
question: 'When you encounter something new in your codebase, how do you prefer to learn about it?',
|
||||
options: [
|
||||
{ label: 'Read the code directly -- I figure things out by reading and experimenting', value: 'a', rating: 'self-directed' },
|
||||
{ label: 'Ask Claude to explain the relevant parts to me', value: 'b', rating: 'guided' },
|
||||
{ label: 'Read official docs and tutorials first, then try things', value: 'c', rating: 'documentation-first' },
|
||||
{ label: 'See a working example, then modify it to understand how it works', value: 'd', rating: 'example-driven' },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const CLAUDE_INSTRUCTIONS = {
|
||||
communication_style: {
|
||||
'terse-direct': 'Keep responses concise and action-oriented. Skip lengthy preambles. Match this developer\'s direct style.',
|
||||
'conversational': 'Use a natural conversational tone. Explain reasoning briefly alongside code. Engage with the developer\'s questions.',
|
||||
'detailed-structured': 'Match this developer\'s structured communication: use headers for sections, numbered lists for steps, and acknowledge provided context before responding.',
|
||||
'mixed': 'Adapt response detail to match the complexity of each request. Brief for simple tasks, detailed for complex ones.',
|
||||
},
|
||||
decision_speed: {
|
||||
'fast-intuitive': 'Present a single strong recommendation with brief justification. Skip lengthy comparisons unless asked.',
|
||||
'deliberate-informed': 'Present options in a structured comparison table with pros/cons. Let the developer make the final call.',
|
||||
'research-first': 'Include links to docs, GitHub repos, or benchmarks when recommending tools. Support the developer\'s research process.',
|
||||
'delegator': 'Make clear recommendations with confidence. Explain your reasoning briefly, but own the suggestion.',
|
||||
},
|
||||
explanation_depth: {
|
||||
'code-only': 'Prioritize code output. Add comments inline rather than prose explanations. Skip walkthroughs unless asked.',
|
||||
'concise': 'Pair code with a brief explanation (1-2 sentences) of the approach. Keep prose minimal.',
|
||||
'detailed': 'Explain the approach, key trade-offs, and code structure alongside the implementation. Use headers to organize.',
|
||||
'educational': 'Teach the underlying concepts and principles, not just the implementation. Relate new patterns to fundamentals.',
|
||||
},
|
||||
debugging_approach: {
|
||||
'fix-first': 'Prioritize the fix. Show the corrected code first, then optionally explain what was wrong. Minimize diagnostic preamble.',
|
||||
'diagnostic': 'Diagnose the root cause before presenting the fix. Explain what went wrong and why the fix addresses it.',
|
||||
'hypothesis-driven': 'Engage with the developer\'s theories. Validate or refine their hypotheses before jumping to solutions.',
|
||||
'collaborative': 'Walk through the debugging process step by step. Explain the investigation approach, not just the conclusion.',
|
||||
},
|
||||
ux_philosophy: {
|
||||
'function-first': 'Focus on functionality and correctness. Keep UI minimal and functional. Skip design polish unless requested.',
|
||||
'pragmatic': 'Build clean, usable interfaces without over-engineering. Apply basic design principles (spacing, alignment, contrast).',
|
||||
'design-conscious': 'Invest in UX quality: thoughtful spacing, smooth transitions, responsive layouts. Treat design as a first-class concern.',
|
||||
'backend-focused': 'Optimize for developer experience (clear APIs, good error messages, helpful CLI output) over visual design.',
|
||||
},
|
||||
vendor_philosophy: {
|
||||
'pragmatic-fast': 'Suggest libraries quickly based on popularity and reliability. Don\'t over-analyze choices for non-critical dependencies.',
|
||||
'conservative': 'Recommend well-established, widely-adopted tools with strong community support. Avoid bleeding-edge options.',
|
||||
'thorough-evaluator': 'Compare alternatives with specific metrics (bundle size, GitHub stars, maintenance activity). Support informed decisions.',
|
||||
'opinionated': 'Respect the developer\'s existing tool preferences. Ask before suggesting alternatives to their preferred stack.',
|
||||
},
|
||||
frustration_triggers: {
|
||||
'scope-creep': 'Do exactly what is asked -- nothing more. Never add unrequested features, refactoring, or "improvements". Ask before expanding scope.',
|
||||
'instruction-adherence': 'Follow instructions precisely. Re-read constraints before responding. If requirements conflict, flag the conflict rather than silently choosing.',
|
||||
'verbosity': 'Be concise. Lead with code, follow with brief explanation only if needed. Avoid restating the problem or unnecessary context.',
|
||||
'regression': 'Before modifying working code, verify the change is safe. Run existing tests mentally. Flag potential regression risks explicitly.',
|
||||
},
|
||||
learning_style: {
|
||||
'self-directed': 'Point to relevant code sections and let the developer explore. Add signposts (file paths, function names) rather than full explanations.',
|
||||
'guided': 'Explain concepts in context of the developer\'s codebase. Use their actual code as examples when teaching.',
|
||||
'documentation-first': 'Link to official documentation and relevant sections. Structure explanations like reference material.',
|
||||
'example-driven': 'Lead with working code examples. Show a minimal example first, then explain how to extend or modify it.',
|
||||
},
|
||||
};
|
||||
|
||||
const CLAUDE_MD_FALLBACKS = {
|
||||
project: 'Project not yet initialized. Run /gsd:new-project to set up.',
|
||||
stack: 'Technology stack not yet documented. Will populate after codebase mapping or first phase.',
|
||||
conventions: 'Conventions not yet established. Will populate as patterns emerge during development.',
|
||||
architecture: 'Architecture not yet mapped. Follow existing patterns found in the codebase.',
|
||||
};
|
||||
|
||||
const CLAUDE_MD_PROFILE_PLACEHOLDER = [
|
||||
'<!-- GSD:profile-start -->',
|
||||
'## Developer Profile',
|
||||
'',
|
||||
'> Profile not yet configured. Run `/gsd:profile-user` to generate your developer profile.',
|
||||
'> This section is managed by `generate-claude-profile` -- do not edit manually.',
|
||||
'<!-- GSD:profile-end -->',
|
||||
].join('\n');
|
||||
|
||||
// ─── Helper Functions ─────────────────────────────────────────────────────────
|
||||
|
||||
function isAmbiguousAnswer(dimension, value) {
|
||||
if (dimension === 'communication_style' && value === 'd') return true;
|
||||
const question = PROFILING_QUESTIONS.find(q => q.dimension === dimension);
|
||||
if (!question) return false;
|
||||
const option = question.options.find(o => o.value === value);
|
||||
if (!option) return false;
|
||||
return option.rating === 'mixed';
|
||||
}
|
||||
|
||||
function generateClaudeInstruction(dimension, rating) {
|
||||
const dimInstructions = CLAUDE_INSTRUCTIONS[dimension];
|
||||
if (dimInstructions && dimInstructions[rating]) {
|
||||
return dimInstructions[rating];
|
||||
}
|
||||
return `Adapt to this developer's ${dimension.replace(/_/g, ' ')} preference: ${rating}.`;
|
||||
}
|
||||
|
||||
function extractSectionContent(fileContent, sectionName) {
|
||||
const startMarker = `<!-- GSD:${sectionName}-start`;
|
||||
const endMarker = `<!-- GSD:${sectionName}-end -->`;
|
||||
const startIdx = fileContent.indexOf(startMarker);
|
||||
const endIdx = fileContent.indexOf(endMarker);
|
||||
if (startIdx === -1 || endIdx === -1) return null;
|
||||
const startTagEnd = fileContent.indexOf('-->', startIdx);
|
||||
if (startTagEnd === -1) return null;
|
||||
return fileContent.substring(startTagEnd + 3, endIdx);
|
||||
}
|
||||
|
||||
function buildSection(sectionName, sourceFile, content) {
|
||||
return [
|
||||
`<!-- GSD:${sectionName}-start source:${sourceFile} -->`,
|
||||
content,
|
||||
`<!-- GSD:${sectionName}-end -->`,
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
function updateSection(fileContent, sectionName, newContent) {
|
||||
const startMarker = `<!-- GSD:${sectionName}-start`;
|
||||
const endMarker = `<!-- GSD:${sectionName}-end -->`;
|
||||
const startIdx = fileContent.indexOf(startMarker);
|
||||
const endIdx = fileContent.indexOf(endMarker);
|
||||
if (startIdx !== -1 && endIdx !== -1) {
|
||||
const before = fileContent.substring(0, startIdx);
|
||||
const after = fileContent.substring(endIdx + endMarker.length);
|
||||
return { content: before + newContent + after, action: 'replaced' };
|
||||
}
|
||||
return { content: fileContent.trimEnd() + '\n\n' + newContent + '\n', action: 'appended' };
|
||||
}
|
||||
|
||||
function detectManualEdit(fileContent, sectionName, expectedContent) {
|
||||
const currentContent = extractSectionContent(fileContent, sectionName);
|
||||
if (currentContent === null) return false;
|
||||
const normalize = (s) => s.trim().replace(/\n{3,}/g, '\n\n');
|
||||
return normalize(currentContent) !== normalize(expectedContent);
|
||||
}
|
||||
|
||||
function extractMarkdownSection(content, sectionName) {
|
||||
if (!content) return null;
|
||||
const lines = content.split('\n');
|
||||
let capturing = false;
|
||||
const result = [];
|
||||
const headingPattern = new RegExp(`^## ${sectionName}\\s*$`);
|
||||
for (const line of lines) {
|
||||
if (headingPattern.test(line)) {
|
||||
capturing = true;
|
||||
result.push(line);
|
||||
continue;
|
||||
}
|
||||
if (capturing && /^## /.test(line)) break;
|
||||
if (capturing) result.push(line);
|
||||
}
|
||||
return result.length > 0 ? result.join('\n').trim() : null;
|
||||
}
|
||||
|
||||
// ─── CLAUDE.md Section Generators ─────────────────────────────────────────────
|
||||
|
||||
function generateProjectSection(cwd) {
|
||||
const projectPath = path.join(cwd, '.planning', 'PROJECT.md');
|
||||
const content = safeReadFile(projectPath);
|
||||
if (!content) {
|
||||
return { content: CLAUDE_MD_FALLBACKS.project, source: 'PROJECT.md', hasFallback: true };
|
||||
}
|
||||
const parts = [];
|
||||
const h1Match = content.match(/^# (.+)$/m);
|
||||
if (h1Match) parts.push(`**${h1Match[1]}**`);
|
||||
const whatThisIs = extractMarkdownSection(content, 'What This Is');
|
||||
if (whatThisIs) {
|
||||
const body = whatThisIs.replace(/^## What This Is\s*/i, '').trim();
|
||||
if (body) parts.push(body);
|
||||
}
|
||||
const coreValue = extractMarkdownSection(content, 'Core Value');
|
||||
if (coreValue) {
|
||||
const body = coreValue.replace(/^## Core Value\s*/i, '').trim();
|
||||
if (body) parts.push(`**Core Value:** ${body}`);
|
||||
}
|
||||
const constraints = extractMarkdownSection(content, 'Constraints');
|
||||
if (constraints) {
|
||||
const body = constraints.replace(/^## Constraints\s*/i, '').trim();
|
||||
if (body) parts.push(`### Constraints\n\n${body}`);
|
||||
}
|
||||
if (parts.length === 0) {
|
||||
return { content: CLAUDE_MD_FALLBACKS.project, source: 'PROJECT.md', hasFallback: true };
|
||||
}
|
||||
return { content: parts.join('\n\n'), source: 'PROJECT.md', hasFallback: false };
|
||||
}
|
||||
|
||||
function generateStackSection(cwd) {
|
||||
const codebasePath = path.join(cwd, '.planning', 'codebase', 'STACK.md');
|
||||
const researchPath = path.join(cwd, '.planning', 'research', 'STACK.md');
|
||||
let content = safeReadFile(codebasePath);
|
||||
let source = 'codebase/STACK.md';
|
||||
if (!content) {
|
||||
content = safeReadFile(researchPath);
|
||||
source = 'research/STACK.md';
|
||||
}
|
||||
if (!content) {
|
||||
return { content: CLAUDE_MD_FALLBACKS.stack, source: 'STACK.md', hasFallback: true };
|
||||
}
|
||||
const lines = content.split('\n');
|
||||
const summaryLines = [];
|
||||
let inTable = false;
|
||||
for (const line of lines) {
|
||||
if (line.startsWith('#')) {
|
||||
if (!line.startsWith('# ') || summaryLines.length > 0) summaryLines.push(line);
|
||||
continue;
|
||||
}
|
||||
if (line.startsWith('|')) { inTable = true; summaryLines.push(line); continue; }
|
||||
if (inTable && line.trim() === '') inTable = false;
|
||||
if (line.startsWith('- ') || line.startsWith('* ')) summaryLines.push(line);
|
||||
}
|
||||
const summary = summaryLines.length > 0 ? summaryLines.join('\n') : content.trim();
|
||||
return { content: summary, source, hasFallback: false };
|
||||
}
|
||||
|
||||
function generateConventionsSection(cwd) {
|
||||
const conventionsPath = path.join(cwd, '.planning', 'codebase', 'CONVENTIONS.md');
|
||||
const content = safeReadFile(conventionsPath);
|
||||
if (!content) {
|
||||
return { content: CLAUDE_MD_FALLBACKS.conventions, source: 'CONVENTIONS.md', hasFallback: true };
|
||||
}
|
||||
const lines = content.split('\n');
|
||||
const summaryLines = [];
|
||||
for (const line of lines) {
|
||||
if (line.startsWith('#')) { if (!line.startsWith('# ')) summaryLines.push(line); continue; }
|
||||
if (line.startsWith('- ') || line.startsWith('* ') || line.startsWith('|')) summaryLines.push(line);
|
||||
}
|
||||
const summary = summaryLines.length > 0 ? summaryLines.join('\n') : content.trim();
|
||||
return { content: summary, source: 'CONVENTIONS.md', hasFallback: false };
|
||||
}
|
||||
|
||||
function generateArchitectureSection(cwd) {
|
||||
const architecturePath = path.join(cwd, '.planning', 'codebase', 'ARCHITECTURE.md');
|
||||
const content = safeReadFile(architecturePath);
|
||||
if (!content) {
|
||||
return { content: CLAUDE_MD_FALLBACKS.architecture, source: 'ARCHITECTURE.md', hasFallback: true };
|
||||
}
|
||||
const lines = content.split('\n');
|
||||
const summaryLines = [];
|
||||
for (const line of lines) {
|
||||
if (line.startsWith('#')) { if (!line.startsWith('# ')) summaryLines.push(line); continue; }
|
||||
if (line.startsWith('- ') || line.startsWith('* ') || line.startsWith('|') || line.startsWith('```')) summaryLines.push(line);
|
||||
}
|
||||
const summary = summaryLines.length > 0 ? summaryLines.join('\n') : content.trim();
|
||||
return { content: summary, source: 'ARCHITECTURE.md', hasFallback: false };
|
||||
}
|
||||
|
||||
// ─── Commands ─────────────────────────────────────────────────────────────────
|
||||
|
||||
function cmdWriteProfile(cwd, options, raw) {
|
||||
if (!options.input) {
|
||||
error('--input <analysis-json-path> is required');
|
||||
}
|
||||
|
||||
let analysisPath = options.input;
|
||||
if (!path.isAbsolute(analysisPath)) analysisPath = path.join(cwd, analysisPath);
|
||||
if (!fs.existsSync(analysisPath)) error(`Analysis file not found: ${analysisPath}`);
|
||||
|
||||
let analysis;
|
||||
try {
|
||||
analysis = JSON.parse(fs.readFileSync(analysisPath, 'utf-8'));
|
||||
} catch (err) {
|
||||
error(`Failed to parse analysis JSON: ${err.message}`);
|
||||
}
|
||||
|
||||
if (!analysis.dimensions || typeof analysis.dimensions !== 'object') {
|
||||
error('Analysis JSON must contain a "dimensions" object');
|
||||
}
|
||||
if (!analysis.profile_version) {
|
||||
error('Analysis JSON must contain "profile_version"');
|
||||
}
|
||||
|
||||
const SENSITIVE_PATTERNS = [
|
||||
/sk-[a-zA-Z0-9]{20,}/g,
|
||||
/Bearer\s+[a-zA-Z0-9._-]+/gi,
|
||||
/password\s*[:=]\s*\S+/gi,
|
||||
/secret\s*[:=]\s*\S+/gi,
|
||||
/token\s*[:=]\s*\S+/gi,
|
||||
/api[_-]?key\s*[:=]\s*\S+/gi,
|
||||
/\/Users\/[a-zA-Z0-9._-]+\//g,
|
||||
/\/home\/[a-zA-Z0-9._-]+\//g,
|
||||
/ghp_[a-zA-Z0-9]{36}/g,
|
||||
/gho_[a-zA-Z0-9]{36}/g,
|
||||
/xoxb-[a-zA-Z0-9-]+/g,
|
||||
];
|
||||
|
||||
let redactedCount = 0;
|
||||
|
||||
function redactSensitive(text) {
|
||||
if (typeof text !== 'string') return text;
|
||||
let result = text;
|
||||
for (const pattern of SENSITIVE_PATTERNS) {
|
||||
pattern.lastIndex = 0;
|
||||
const matches = result.match(pattern);
|
||||
if (matches) {
|
||||
redactedCount += matches.length;
|
||||
result = result.replace(pattern, '[REDACTED]');
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
for (const dimKey of Object.keys(analysis.dimensions)) {
|
||||
const dim = analysis.dimensions[dimKey];
|
||||
if (dim.evidence && Array.isArray(dim.evidence)) {
|
||||
for (let i = 0; i < dim.evidence.length; i++) {
|
||||
const ev = dim.evidence[i];
|
||||
if (ev.quote) ev.quote = redactSensitive(ev.quote);
|
||||
if (ev.example) ev.example = redactSensitive(ev.example);
|
||||
if (ev.signal) ev.signal = redactSensitive(ev.signal);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (redactedCount > 0) {
|
||||
process.stderr.write(`Sensitive content redacted: ${redactedCount} pattern(s) removed from evidence quotes\n`);
|
||||
}
|
||||
|
||||
const templatePath = path.join(__dirname, '..', '..', 'templates', 'user-profile.md');
|
||||
if (!fs.existsSync(templatePath)) error(`Template not found: ${templatePath}`);
|
||||
let template = fs.readFileSync(templatePath, 'utf-8');
|
||||
|
||||
const dimensionLabels = {
|
||||
communication_style: 'Communication',
|
||||
decision_speed: 'Decisions',
|
||||
explanation_depth: 'Explanations',
|
||||
debugging_approach: 'Debugging',
|
||||
ux_philosophy: 'UX Philosophy',
|
||||
vendor_philosophy: 'Vendor Philosophy',
|
||||
frustration_triggers: 'Frustration Triggers',
|
||||
learning_style: 'Learning Style',
|
||||
};
|
||||
|
||||
const summaryLines = [];
|
||||
let highCount = 0, mediumCount = 0, lowCount = 0, dimensionsScored = 0;
|
||||
|
||||
for (const dimKey of DIMENSION_KEYS) {
|
||||
const dim = analysis.dimensions[dimKey];
|
||||
if (!dim) continue;
|
||||
const conf = (dim.confidence || '').toUpperCase();
|
||||
if (conf === 'HIGH' || conf === 'MEDIUM' || conf === 'LOW') dimensionsScored++;
|
||||
if (conf === 'HIGH') {
|
||||
highCount++;
|
||||
if (dim.claude_instruction) summaryLines.push(`- **${dimensionLabels[dimKey] || dimKey}:** ${dim.claude_instruction} (HIGH)`);
|
||||
} else if (conf === 'MEDIUM') {
|
||||
mediumCount++;
|
||||
if (dim.claude_instruction) summaryLines.push(`- **${dimensionLabels[dimKey] || dimKey}:** ${dim.claude_instruction} (MEDIUM)`);
|
||||
} else if (conf === 'LOW') {
|
||||
lowCount++;
|
||||
}
|
||||
}
|
||||
|
||||
const summaryInstructions = summaryLines.length > 0
|
||||
? summaryLines.join('\n')
|
||||
: '- No high or medium confidence dimensions scored yet.';
|
||||
|
||||
template = template.replace(/\{\{generated_at\}\}/g, new Date().toISOString());
|
||||
template = template.replace(/\{\{data_source\}\}/g, analysis.data_source || 'session_analysis');
|
||||
template = template.replace(/\{\{projects_list\}\}/g, (analysis.projects_list || analysis.projects_analyzed || []).join(', '));
|
||||
template = template.replace(/\{\{message_count\}\}/g, String(analysis.message_count || analysis.messages_analyzed || 0));
|
||||
template = template.replace(/\{\{summary_instructions\}\}/g, summaryInstructions);
|
||||
template = template.replace(/\{\{profile_version\}\}/g, analysis.profile_version);
|
||||
template = template.replace(/\{\{projects_count\}\}/g, String((analysis.projects_list || analysis.projects_analyzed || []).length));
|
||||
template = template.replace(/\{\{dimensions_scored\}\}/g, String(dimensionsScored));
|
||||
template = template.replace(/\{\{high_confidence_count\}\}/g, String(highCount));
|
||||
template = template.replace(/\{\{medium_confidence_count\}\}/g, String(mediumCount));
|
||||
template = template.replace(/\{\{low_confidence_count\}\}/g, String(lowCount));
|
||||
template = template.replace(/\{\{sensitive_excluded_summary\}\}/g,
|
||||
redactedCount > 0 ? `${redactedCount} pattern(s) redacted` : 'None detected');
|
||||
|
||||
for (const dimKey of DIMENSION_KEYS) {
|
||||
const dim = analysis.dimensions[dimKey] || {};
|
||||
const rating = dim.rating || 'UNSCORED';
|
||||
const confidence = dim.confidence || 'UNSCORED';
|
||||
const instruction = dim.claude_instruction || 'No strong preference detected. Ask the developer when this dimension is relevant.';
|
||||
const summary = dim.summary || '';
|
||||
|
||||
let evidenceBlock = '';
|
||||
const evidenceArr = dim.evidence_quotes || dim.evidence;
|
||||
if (evidenceArr && Array.isArray(evidenceArr) && evidenceArr.length > 0) {
|
||||
const evidenceLines = evidenceArr.map(ev => {
|
||||
const signal = ev.signal || ev.pattern || '';
|
||||
const quote = ev.quote || ev.example || '';
|
||||
const project = ev.project || 'unknown';
|
||||
return `- **Signal:** ${signal} / **Example:** "${quote}" -- project: ${project}`;
|
||||
});
|
||||
evidenceBlock = evidenceLines.join('\n');
|
||||
} else {
|
||||
evidenceBlock = '- No evidence collected for this dimension.';
|
||||
}
|
||||
|
||||
template = template.replace(new RegExp(`\\{\\{${dimKey}\\.rating\\}\\}`, 'g'), rating);
|
||||
template = template.replace(new RegExp(`\\{\\{${dimKey}\\.confidence\\}\\}`, 'g'), confidence);
|
||||
template = template.replace(new RegExp(`\\{\\{${dimKey}\\.claude_instruction\\}\\}`, 'g'), instruction);
|
||||
template = template.replace(new RegExp(`\\{\\{${dimKey}\\.summary\\}\\}`, 'g'), summary);
|
||||
template = template.replace(new RegExp(`\\{\\{${dimKey}\\.evidence\\}\\}`, 'g'), evidenceBlock);
|
||||
}
|
||||
|
||||
let outputPath = options.output;
|
||||
if (!outputPath) {
|
||||
outputPath = path.join(os.homedir(), '.claude', 'get-shit-done', 'USER-PROFILE.md');
|
||||
} else if (!path.isAbsolute(outputPath)) {
|
||||
outputPath = path.join(cwd, outputPath);
|
||||
}
|
||||
|
||||
fs.mkdirSync(path.dirname(outputPath), { recursive: true });
|
||||
fs.writeFileSync(outputPath, template, 'utf-8');
|
||||
|
||||
const result = {
|
||||
profile_path: outputPath,
|
||||
dimensions_scored: dimensionsScored,
|
||||
high_confidence: highCount,
|
||||
medium_confidence: mediumCount,
|
||||
low_confidence: lowCount,
|
||||
sensitive_redacted: redactedCount,
|
||||
source: analysis.data_source || 'session_analysis',
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
function cmdProfileQuestionnaire(options, raw) {
|
||||
if (!options.answers) {
|
||||
const questionsOutput = {
|
||||
mode: 'interactive',
|
||||
questions: PROFILING_QUESTIONS.map(q => ({
|
||||
dimension: q.dimension,
|
||||
header: q.header,
|
||||
context: q.context,
|
||||
question: q.question,
|
||||
options: q.options.map(o => ({ label: o.label, value: o.value })),
|
||||
})),
|
||||
};
|
||||
output(questionsOutput, raw);
|
||||
return;
|
||||
}
|
||||
|
||||
const answerValues = options.answers.split(',').map(a => a.trim());
|
||||
if (answerValues.length !== PROFILING_QUESTIONS.length) {
|
||||
error(`Expected ${PROFILING_QUESTIONS.length} answers (comma-separated), got ${answerValues.length}`);
|
||||
}
|
||||
|
||||
const analysis = {
|
||||
profile_version: '1.0',
|
||||
analyzed_at: new Date().toISOString(),
|
||||
data_source: 'questionnaire',
|
||||
projects_analyzed: [],
|
||||
messages_analyzed: 0,
|
||||
message_threshold: 'questionnaire',
|
||||
sensitive_excluded: [],
|
||||
dimensions: {},
|
||||
};
|
||||
|
||||
for (let i = 0; i < PROFILING_QUESTIONS.length; i++) {
|
||||
const question = PROFILING_QUESTIONS[i];
|
||||
const answerValue = answerValues[i];
|
||||
const selectedOption = question.options.find(o => o.value === answerValue);
|
||||
|
||||
if (!selectedOption) {
|
||||
error(`Invalid answer "${answerValue}" for ${question.dimension}. Valid values: ${question.options.map(o => o.value).join(', ')}`);
|
||||
}
|
||||
|
||||
const ambiguous = isAmbiguousAnswer(question.dimension, answerValue);
|
||||
|
||||
analysis.dimensions[question.dimension] = {
|
||||
rating: selectedOption.rating,
|
||||
confidence: ambiguous ? 'LOW' : 'MEDIUM',
|
||||
evidence_count: 1,
|
||||
cross_project_consistent: null,
|
||||
evidence: [{
|
||||
signal: 'Self-reported via questionnaire',
|
||||
quote: selectedOption.label,
|
||||
project: 'N/A (questionnaire)',
|
||||
}],
|
||||
summary: `Developer self-reported as ${selectedOption.rating} for ${question.header.toLowerCase()}.`,
|
||||
claude_instruction: generateClaudeInstruction(question.dimension, selectedOption.rating),
|
||||
};
|
||||
}
|
||||
|
||||
output(analysis, raw);
|
||||
}
|
||||
|
||||
function cmdGenerateDevPreferences(cwd, options, raw) {
|
||||
if (!options.analysis) error('--analysis <path> is required');
|
||||
|
||||
let analysisPath = options.analysis;
|
||||
if (!path.isAbsolute(analysisPath)) analysisPath = path.join(cwd, analysisPath);
|
||||
if (!fs.existsSync(analysisPath)) error(`Analysis file not found: ${analysisPath}`);
|
||||
|
||||
let analysis;
|
||||
try {
|
||||
analysis = JSON.parse(fs.readFileSync(analysisPath, 'utf-8'));
|
||||
} catch (err) {
|
||||
error(`Failed to parse analysis JSON: ${err.message}`);
|
||||
}
|
||||
|
||||
if (!analysis.dimensions || typeof analysis.dimensions !== 'object') {
|
||||
error('Analysis JSON must contain a "dimensions" object');
|
||||
}
|
||||
|
||||
const devPrefLabels = {
|
||||
communication_style: 'Communication',
|
||||
decision_speed: 'Decision Support',
|
||||
explanation_depth: 'Explanations',
|
||||
debugging_approach: 'Debugging',
|
||||
ux_philosophy: 'UX Approach',
|
||||
vendor_philosophy: 'Library & Tool Choices',
|
||||
frustration_triggers: 'Boundaries',
|
||||
learning_style: 'Learning Support',
|
||||
};
|
||||
|
||||
const templatePath = path.join(__dirname, '..', '..', 'templates', 'dev-preferences.md');
|
||||
if (!fs.existsSync(templatePath)) error(`Template not found: ${templatePath}`);
|
||||
let template = fs.readFileSync(templatePath, 'utf-8');
|
||||
|
||||
const directiveLines = [];
|
||||
const dimensionsIncluded = [];
|
||||
|
||||
for (const dimKey of DIMENSION_KEYS) {
|
||||
const dim = analysis.dimensions[dimKey];
|
||||
if (!dim) continue;
|
||||
const label = devPrefLabels[dimKey] || dimKey;
|
||||
const confidence = dim.confidence || 'UNSCORED';
|
||||
let instruction = dim.claude_instruction;
|
||||
if (!instruction) {
|
||||
const lookup = CLAUDE_INSTRUCTIONS[dimKey];
|
||||
if (lookup && dim.rating && lookup[dim.rating]) {
|
||||
instruction = lookup[dim.rating];
|
||||
} else {
|
||||
instruction = `Adapt to this developer's ${dimKey.replace(/_/g, ' ')} preference.`;
|
||||
}
|
||||
}
|
||||
directiveLines.push(`### ${label}\n${instruction} (${confidence} confidence)\n`);
|
||||
dimensionsIncluded.push(dimKey);
|
||||
}
|
||||
|
||||
const directivesBlock = directiveLines.join('\n').trim();
|
||||
template = template.replace(/\{\{behavioral_directives\}\}/g, directivesBlock);
|
||||
template = template.replace(/\{\{generated_at\}\}/g, new Date().toISOString());
|
||||
template = template.replace(/\{\{data_source\}\}/g, analysis.data_source || 'session_analysis');
|
||||
|
||||
let stackBlock;
|
||||
if (analysis.data_source === 'questionnaire') {
|
||||
stackBlock = 'Stack preferences not available (questionnaire-only profile). Run `/gsd:profile-user --refresh` with session data to populate.';
|
||||
} else if (options.stack) {
|
||||
stackBlock = options.stack;
|
||||
} else {
|
||||
stackBlock = 'Stack preferences will be populated from session analysis.';
|
||||
}
|
||||
template = template.replace(/\{\{stack_preferences\}\}/g, stackBlock);
|
||||
|
||||
let outputPath = options.output;
|
||||
if (!outputPath) {
|
||||
outputPath = path.join(os.homedir(), '.claude', 'commands', 'gsd', 'dev-preferences.md');
|
||||
} else if (!path.isAbsolute(outputPath)) {
|
||||
outputPath = path.join(cwd, outputPath);
|
||||
}
|
||||
|
||||
fs.mkdirSync(path.dirname(outputPath), { recursive: true });
|
||||
fs.writeFileSync(outputPath, template, 'utf-8');
|
||||
|
||||
const result = {
|
||||
command_path: outputPath,
|
||||
command_name: '/gsd:dev-preferences',
|
||||
dimensions_included: dimensionsIncluded,
|
||||
source: analysis.data_source || 'session_analysis',
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
function cmdGenerateClaudeProfile(cwd, options, raw) {
|
||||
if (!options.analysis) error('--analysis <path> is required');
|
||||
|
||||
let analysisPath = options.analysis;
|
||||
if (!path.isAbsolute(analysisPath)) analysisPath = path.join(cwd, analysisPath);
|
||||
if (!fs.existsSync(analysisPath)) error(`Analysis file not found: ${analysisPath}`);
|
||||
|
||||
let analysis;
|
||||
try {
|
||||
analysis = JSON.parse(fs.readFileSync(analysisPath, 'utf-8'));
|
||||
} catch (err) {
|
||||
error(`Failed to parse analysis JSON: ${err.message}`);
|
||||
}
|
||||
|
||||
if (!analysis.dimensions || typeof analysis.dimensions !== 'object') {
|
||||
error('Analysis JSON must contain a "dimensions" object');
|
||||
}
|
||||
|
||||
const profileLabels = {
|
||||
communication_style: 'Communication',
|
||||
decision_speed: 'Decisions',
|
||||
explanation_depth: 'Explanations',
|
||||
debugging_approach: 'Debugging',
|
||||
ux_philosophy: 'UX Philosophy',
|
||||
vendor_philosophy: 'Vendor Choices',
|
||||
frustration_triggers: 'Frustrations',
|
||||
learning_style: 'Learning',
|
||||
};
|
||||
|
||||
const dataSource = analysis.data_source || 'session_analysis';
|
||||
const tableRows = [];
|
||||
const directiveLines = [];
|
||||
const dimensionsIncluded = [];
|
||||
|
||||
for (const dimKey of DIMENSION_KEYS) {
|
||||
const dim = analysis.dimensions[dimKey];
|
||||
if (!dim) continue;
|
||||
const label = profileLabels[dimKey] || dimKey;
|
||||
const rating = dim.rating || 'UNSCORED';
|
||||
const confidence = dim.confidence || 'UNSCORED';
|
||||
tableRows.push(`| ${label} | ${rating} | ${confidence} |`);
|
||||
let instruction = dim.claude_instruction;
|
||||
if (!instruction) {
|
||||
const lookup = CLAUDE_INSTRUCTIONS[dimKey];
|
||||
if (lookup && dim.rating && lookup[dim.rating]) {
|
||||
instruction = lookup[dim.rating];
|
||||
} else {
|
||||
instruction = `Adapt to this developer's ${dimKey.replace(/_/g, ' ')} preference.`;
|
||||
}
|
||||
}
|
||||
directiveLines.push(`- **${label}:** ${instruction}`);
|
||||
dimensionsIncluded.push(dimKey);
|
||||
}
|
||||
|
||||
const sectionLines = [
|
||||
'<!-- GSD:profile-start -->',
|
||||
'## Developer Profile',
|
||||
'',
|
||||
`> Generated by GSD from ${dataSource}. Run \`/gsd:profile-user --refresh\` to update.`,
|
||||
'',
|
||||
'| Dimension | Rating | Confidence |',
|
||||
'|-----------|--------|------------|',
|
||||
...tableRows,
|
||||
'',
|
||||
'**Directives:**',
|
||||
...directiveLines,
|
||||
'<!-- GSD:profile-end -->',
|
||||
];
|
||||
|
||||
const sectionContent = sectionLines.join('\n');
|
||||
|
||||
let targetPath;
|
||||
if (options.global) {
|
||||
targetPath = path.join(os.homedir(), '.claude', 'CLAUDE.md');
|
||||
} else if (options.output) {
|
||||
targetPath = path.isAbsolute(options.output) ? options.output : path.join(cwd, options.output);
|
||||
} else {
|
||||
targetPath = path.join(cwd, 'CLAUDE.md');
|
||||
}
|
||||
|
||||
let action;
|
||||
|
||||
if (fs.existsSync(targetPath)) {
|
||||
let existingContent = fs.readFileSync(targetPath, 'utf-8');
|
||||
const startMarker = '<!-- GSD:profile-start -->';
|
||||
const endMarker = '<!-- GSD:profile-end -->';
|
||||
const startIdx = existingContent.indexOf(startMarker);
|
||||
const endIdx = existingContent.indexOf(endMarker);
|
||||
|
||||
if (startIdx !== -1 && endIdx !== -1) {
|
||||
const before = existingContent.substring(0, startIdx);
|
||||
const after = existingContent.substring(endIdx + endMarker.length);
|
||||
existingContent = before + sectionContent + after;
|
||||
action = 'updated';
|
||||
} else {
|
||||
existingContent = existingContent.trimEnd() + '\n\n' + sectionContent + '\n';
|
||||
action = 'appended';
|
||||
}
|
||||
fs.writeFileSync(targetPath, existingContent, 'utf-8');
|
||||
} else {
|
||||
fs.mkdirSync(path.dirname(targetPath), { recursive: true });
|
||||
fs.writeFileSync(targetPath, sectionContent + '\n', 'utf-8');
|
||||
action = 'created';
|
||||
}
|
||||
|
||||
const result = {
|
||||
claude_md_path: targetPath,
|
||||
action,
|
||||
dimensions_included: dimensionsIncluded,
|
||||
is_global: !!options.global,
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
function cmdGenerateClaudeMd(cwd, options, raw) {
|
||||
const MANAGED_SECTIONS = ['project', 'stack', 'conventions', 'architecture'];
|
||||
const generators = {
|
||||
project: generateProjectSection,
|
||||
stack: generateStackSection,
|
||||
conventions: generateConventionsSection,
|
||||
architecture: generateArchitectureSection,
|
||||
};
|
||||
const sectionHeadings = {
|
||||
project: '## Project',
|
||||
stack: '## Technology Stack',
|
||||
conventions: '## Conventions',
|
||||
architecture: '## Architecture',
|
||||
};
|
||||
|
||||
const generated = {};
|
||||
const sectionsGenerated = [];
|
||||
const sectionsFallback = [];
|
||||
const sectionsSkipped = [];
|
||||
|
||||
for (const name of MANAGED_SECTIONS) {
|
||||
const gen = generators[name](cwd);
|
||||
generated[name] = gen;
|
||||
if (gen.hasFallback) {
|
||||
sectionsFallback.push(name);
|
||||
} else {
|
||||
sectionsGenerated.push(name);
|
||||
}
|
||||
}
|
||||
|
||||
let outputPath = options.output;
|
||||
if (!outputPath) {
|
||||
outputPath = path.join(cwd, 'CLAUDE.md');
|
||||
} else if (!path.isAbsolute(outputPath)) {
|
||||
outputPath = path.join(cwd, outputPath);
|
||||
}
|
||||
|
||||
let existingContent = safeReadFile(outputPath);
|
||||
let action;
|
||||
|
||||
if (existingContent === null) {
|
||||
const sections = [];
|
||||
for (const name of MANAGED_SECTIONS) {
|
||||
const gen = generated[name];
|
||||
const heading = sectionHeadings[name];
|
||||
const body = `${heading}\n\n${gen.content}`;
|
||||
sections.push(buildSection(name, gen.source, body));
|
||||
}
|
||||
sections.push('');
|
||||
sections.push(CLAUDE_MD_PROFILE_PLACEHOLDER);
|
||||
existingContent = sections.join('\n\n') + '\n';
|
||||
action = 'created';
|
||||
fs.mkdirSync(path.dirname(outputPath), { recursive: true });
|
||||
fs.writeFileSync(outputPath, existingContent, 'utf-8');
|
||||
} else {
|
||||
action = 'updated';
|
||||
let fileContent = existingContent;
|
||||
|
||||
for (const name of MANAGED_SECTIONS) {
|
||||
const gen = generated[name];
|
||||
const heading = sectionHeadings[name];
|
||||
const body = `${heading}\n\n${gen.content}`;
|
||||
const fullSection = buildSection(name, gen.source, body);
|
||||
const hasMarkers = fileContent.indexOf(`<!-- GSD:${name}-start`) !== -1;
|
||||
|
||||
if (hasMarkers) {
|
||||
if (options.auto) {
|
||||
const expectedBody = `${heading}\n\n${gen.content}`;
|
||||
if (detectManualEdit(fileContent, name, expectedBody)) {
|
||||
sectionsSkipped.push(name);
|
||||
const genIdx = sectionsGenerated.indexOf(name);
|
||||
if (genIdx !== -1) sectionsGenerated.splice(genIdx, 1);
|
||||
const fbIdx = sectionsFallback.indexOf(name);
|
||||
if (fbIdx !== -1) sectionsFallback.splice(fbIdx, 1);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
const result = updateSection(fileContent, name, fullSection);
|
||||
fileContent = result.content;
|
||||
} else {
|
||||
const result = updateSection(fileContent, name, fullSection);
|
||||
fileContent = result.content;
|
||||
}
|
||||
}
|
||||
|
||||
if (!options.auto && fileContent.indexOf('<!-- GSD:profile-start') === -1) {
|
||||
fileContent = fileContent.trimEnd() + '\n\n' + CLAUDE_MD_PROFILE_PLACEHOLDER + '\n';
|
||||
}
|
||||
|
||||
fs.writeFileSync(outputPath, fileContent, 'utf-8');
|
||||
}
|
||||
|
||||
const finalContent = safeReadFile(outputPath);
|
||||
let profileStatus;
|
||||
if (finalContent && finalContent.indexOf('<!-- GSD:profile-start') !== -1) {
|
||||
if (action === 'created' || existingContent.indexOf('<!-- GSD:profile-start') === -1) {
|
||||
profileStatus = 'placeholder_added';
|
||||
} else {
|
||||
profileStatus = 'exists';
|
||||
}
|
||||
} else {
|
||||
profileStatus = 'already_present';
|
||||
}
|
||||
|
||||
const genCount = sectionsGenerated.length;
|
||||
const totalManaged = MANAGED_SECTIONS.length;
|
||||
let message = `Generated ${genCount}/${totalManaged} sections.`;
|
||||
if (sectionsFallback.length > 0) message += ` Fallback: ${sectionsFallback.join(', ')}.`;
|
||||
if (sectionsSkipped.length > 0) message += ` Skipped (manually edited): ${sectionsSkipped.join(', ')}.`;
|
||||
if (profileStatus === 'placeholder_added') message += ' Run /gsd:profile-user to unlock Developer Profile.';
|
||||
|
||||
const result = {
|
||||
claude_md_path: outputPath,
|
||||
action,
|
||||
sections_generated: sectionsGenerated,
|
||||
sections_fallback: sectionsFallback,
|
||||
sections_skipped: sectionsSkipped,
|
||||
sections_total: totalManaged,
|
||||
profile_status: profileStatus,
|
||||
message,
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
cmdWriteProfile,
|
||||
cmdProfileQuestionnaire,
|
||||
cmdGenerateDevPreferences,
|
||||
cmdGenerateClaudeProfile,
|
||||
cmdGenerateClaudeMd,
|
||||
PROFILING_QUESTIONS,
|
||||
CLAUDE_INSTRUCTIONS,
|
||||
};
|
||||
537
get-shit-done/bin/lib/profile-pipeline.cjs
Normal file
537
get-shit-done/bin/lib/profile-pipeline.cjs
Normal file
@@ -0,0 +1,537 @@
|
||||
/**
|
||||
* Profile Pipeline — session scanning, message extraction, and sampling
|
||||
*
|
||||
* Reads Claude Code session history (read-only) to extract user messages
|
||||
* for behavioral profiling. Three commands:
|
||||
* - scan-sessions: list all projects and sessions
|
||||
* - extract-messages: extract user messages from a specific project
|
||||
* - profile-sample: multi-project sampling with recency weighting
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const readline = require('readline');
|
||||
const { output, error, safeReadFile } = require('./core.cjs');
|
||||
|
||||
// ─── Session I/O Helpers ──────────────────────────────────────────────────────
|
||||
|
||||
function getSessionsDir(overridePath) {
|
||||
const dir = overridePath || path.join(os.homedir(), '.claude', 'projects');
|
||||
if (!fs.existsSync(dir)) return null;
|
||||
return dir;
|
||||
}
|
||||
|
||||
function scanProjectDir(projectDirPath) {
|
||||
const entries = fs.readdirSync(projectDirPath);
|
||||
const sessions = [];
|
||||
|
||||
for (const entry of entries) {
|
||||
if (!entry.endsWith('.jsonl')) continue;
|
||||
const sessionId = entry.replace('.jsonl', '');
|
||||
const filePath = path.join(projectDirPath, entry);
|
||||
const stat = fs.statSync(filePath);
|
||||
|
||||
sessions.push({
|
||||
sessionId,
|
||||
filePath,
|
||||
size: stat.size,
|
||||
modified: stat.mtime,
|
||||
});
|
||||
}
|
||||
|
||||
sessions.sort((a, b) => b.modified - a.modified);
|
||||
return sessions;
|
||||
}
|
||||
|
||||
function readSessionIndex(projectDirPath) {
|
||||
try {
|
||||
const indexPath = path.join(projectDirPath, 'sessions-index.json');
|
||||
const raw = fs.readFileSync(indexPath, 'utf-8');
|
||||
const parsed = JSON.parse(raw);
|
||||
const entries = new Map();
|
||||
for (const entry of (parsed.entries || [])) {
|
||||
if (entry.sessionId) {
|
||||
entries.set(entry.sessionId, entry);
|
||||
}
|
||||
}
|
||||
return { originalPath: parsed.originalPath || null, entries };
|
||||
} catch {
|
||||
return { originalPath: null, entries: new Map() };
|
||||
}
|
||||
}
|
||||
|
||||
function getProjectName(projectDirName, indexData, firstRecordCwd) {
|
||||
if (indexData && indexData.originalPath) {
|
||||
return path.basename(indexData.originalPath);
|
||||
}
|
||||
if (firstRecordCwd) {
|
||||
return path.basename(firstRecordCwd);
|
||||
}
|
||||
return projectDirName;
|
||||
}
|
||||
|
||||
function formatBytes(bytes) {
|
||||
if (bytes < 1024) return `${bytes} B`;
|
||||
if (bytes < 1048576) return `${(bytes / 1024).toFixed(1)} KB`;
|
||||
if (bytes < 1073741824) return `${(bytes / 1048576).toFixed(1)} MB`;
|
||||
return `${(bytes / 1073741824).toFixed(1)} GB`;
|
||||
}
|
||||
|
||||
function formatProjectTable(projects) {
|
||||
let out = '';
|
||||
out += 'Project'.padEnd(35) + 'Sessions'.padEnd(10) + 'Size'.padEnd(10) + 'Last Active\n';
|
||||
out += '-'.repeat(75) + '\n';
|
||||
for (const p of projects) {
|
||||
const name = p.name.length > 33 ? p.name.substring(0, 30) + '...' : p.name;
|
||||
out += name.padEnd(35) + String(p.sessionCount).padEnd(10) +
|
||||
p.totalSizeHuman.padEnd(10) + p.lastActive + '\n';
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function formatSessionTable(sessions) {
|
||||
let out = '';
|
||||
out += ' Session ID'.padEnd(42) + 'Size'.padEnd(10) + 'Modified\n';
|
||||
out += ' ' + '-'.repeat(70) + '\n';
|
||||
for (const s of sessions) {
|
||||
const id = s.sessionId.length > 38 ? s.sessionId.substring(0, 35) + '...' : s.sessionId;
|
||||
out += ' ' + id.padEnd(40) + formatBytes(s.size).padEnd(10) +
|
||||
new Date(s.modified).toISOString().replace('T', ' ').substring(0, 19) + '\n';
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
// ─── Message Extraction Helpers ───────────────────────────────────────────────
|
||||
|
||||
function isGenuineUserMessage(record) {
|
||||
if (record.type !== 'user') return false;
|
||||
if (record.userType !== 'external') return false;
|
||||
if (record.isMeta === true) return false;
|
||||
if (record.isSidechain === true) return false;
|
||||
const content = record.message?.content;
|
||||
if (typeof content !== 'string') return false;
|
||||
if (content.length === 0) return false;
|
||||
if (content.startsWith('<local-command')) return false;
|
||||
if (content.startsWith('<command-')) return false;
|
||||
if (content.startsWith('<task-notification')) return false;
|
||||
if (content.startsWith('<local-command-stdout')) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
function truncateContent(content, maxLen = 2000) {
|
||||
if (content.length <= maxLen) return content;
|
||||
return content.substring(0, maxLen) + '... [truncated]';
|
||||
}
|
||||
|
||||
async function streamExtractMessages(filePath, filterFn, maxMessages = 300) {
|
||||
const rl = readline.createInterface({
|
||||
input: fs.createReadStream(filePath),
|
||||
crlfDelay: Infinity,
|
||||
terminal: false,
|
||||
});
|
||||
|
||||
const messages = [];
|
||||
const sessionId = path.basename(filePath, '.jsonl');
|
||||
|
||||
for await (const line of rl) {
|
||||
if (messages.length >= maxMessages) break;
|
||||
let record;
|
||||
try {
|
||||
record = JSON.parse(line);
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
if (!filterFn(record)) continue;
|
||||
messages.push({
|
||||
sessionId,
|
||||
projectPath: record.cwd || null,
|
||||
timestamp: record.timestamp || null,
|
||||
content: truncateContent(record.message.content),
|
||||
});
|
||||
}
|
||||
|
||||
return messages;
|
||||
}
|
||||
|
||||
// ─── Commands ─────────────────────────────────────────────────────────────────
|
||||
|
||||
async function cmdScanSessions(overridePath, options, raw) {
|
||||
const sessionsDir = getSessionsDir(overridePath);
|
||||
if (!sessionsDir) {
|
||||
const searchedPath = overridePath || '~/.claude/projects';
|
||||
error(`No Claude Code sessions found at ${searchedPath}.${overridePath ? '' : ' Is Claude Code installed?'}`);
|
||||
}
|
||||
|
||||
process.stderr.write('Reading your session history (read-only, nothing is modified or sent anywhere)...\n');
|
||||
|
||||
let projectDirs;
|
||||
try {
|
||||
projectDirs = fs.readdirSync(sessionsDir).filter(entry => {
|
||||
const fullPath = path.join(sessionsDir, entry);
|
||||
try {
|
||||
return fs.statSync(fullPath).isDirectory();
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
error(`Cannot read sessions directory: ${err.message}`);
|
||||
}
|
||||
|
||||
const projects = [];
|
||||
|
||||
for (const dirName of projectDirs) {
|
||||
const projectPath = path.join(sessionsDir, dirName);
|
||||
const sessions = scanProjectDir(projectPath);
|
||||
if (sessions.length === 0) continue;
|
||||
|
||||
const indexData = readSessionIndex(projectPath);
|
||||
const projectName = getProjectName(dirName, indexData);
|
||||
|
||||
if (indexData.entries.size === 0 && !options.json) {
|
||||
process.stderr.write(`Index not found for ${projectName}, scanning directory...\n`);
|
||||
}
|
||||
|
||||
const totalSize = sessions.reduce((sum, s) => sum + s.size, 0);
|
||||
const lastActive = sessions[0].modified.toISOString();
|
||||
const oldest = sessions[sessions.length - 1].modified.toISOString();
|
||||
const newest = sessions[0].modified.toISOString();
|
||||
|
||||
const project = {
|
||||
name: projectName,
|
||||
directory: dirName,
|
||||
sessionCount: sessions.length,
|
||||
totalSize,
|
||||
totalSizeHuman: formatBytes(totalSize),
|
||||
lastActive: lastActive.replace('T', ' ').substring(0, 19),
|
||||
dateRange: { first: oldest, last: newest },
|
||||
};
|
||||
|
||||
if (options.verbose) {
|
||||
project.sessions = sessions.map(s => {
|
||||
const indexed = indexData.entries.get(s.sessionId);
|
||||
const session = {
|
||||
sessionId: s.sessionId,
|
||||
size: s.size,
|
||||
sizeHuman: formatBytes(s.size),
|
||||
modified: s.modified.toISOString(),
|
||||
};
|
||||
if (indexed) {
|
||||
if (indexed.summary) session.summary = indexed.summary;
|
||||
if (indexed.messageCount !== undefined) session.messageCount = indexed.messageCount;
|
||||
if (indexed.created) session.created = indexed.created;
|
||||
}
|
||||
return session;
|
||||
});
|
||||
}
|
||||
|
||||
projects.push(project);
|
||||
}
|
||||
|
||||
projects.sort((a, b) => b.dateRange.last.localeCompare(a.dateRange.last));
|
||||
|
||||
if (options.json || raw) {
|
||||
output(projects, raw);
|
||||
} else {
|
||||
process.stdout.write('\n' + formatProjectTable(projects));
|
||||
if (options.verbose) {
|
||||
for (const p of projects) {
|
||||
process.stdout.write(`\n ${p.name} (${p.sessionCount} sessions):\n`);
|
||||
if (p.sessions) {
|
||||
process.stdout.write(formatSessionTable(p.sessions));
|
||||
}
|
||||
}
|
||||
}
|
||||
process.stdout.write(`\nTotal: ${projects.length} projects\n`);
|
||||
process.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
async function cmdExtractMessages(projectArg, options, raw, overridePath) {
|
||||
const sessionsDir = getSessionsDir(overridePath);
|
||||
if (!sessionsDir) {
|
||||
const searchedPath = overridePath || '~/.claude/projects';
|
||||
error(`No Claude Code sessions found at ${searchedPath}.${overridePath ? '' : ' Is Claude Code installed?'}`);
|
||||
}
|
||||
|
||||
let projectDirs;
|
||||
try {
|
||||
projectDirs = fs.readdirSync(sessionsDir).filter(entry => {
|
||||
const fullPath = path.join(sessionsDir, entry);
|
||||
try {
|
||||
return fs.statSync(fullPath).isDirectory();
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
error(`Cannot read sessions directory: ${err.message}`);
|
||||
}
|
||||
|
||||
let matchedDir = null;
|
||||
let matchedName = null;
|
||||
|
||||
for (const dirName of projectDirs) {
|
||||
if (dirName === projectArg) {
|
||||
matchedDir = dirName;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!matchedDir) {
|
||||
const lowerArg = projectArg.toLowerCase();
|
||||
const matches = projectDirs.filter(d => d.toLowerCase().includes(lowerArg));
|
||||
if (matches.length === 1) {
|
||||
matchedDir = matches[0];
|
||||
} else if (matches.length > 1) {
|
||||
const exactNameMatches = [];
|
||||
for (const dirName of matches) {
|
||||
const indexData = readSessionIndex(path.join(sessionsDir, dirName));
|
||||
const pName = getProjectName(dirName, indexData);
|
||||
if (pName.toLowerCase() === lowerArg) {
|
||||
exactNameMatches.push({ dirName, name: pName });
|
||||
}
|
||||
}
|
||||
if (exactNameMatches.length === 1) {
|
||||
matchedDir = exactNameMatches[0].dirName;
|
||||
matchedName = exactNameMatches[0].name;
|
||||
} else {
|
||||
const names = matches.map(d => {
|
||||
const idx = readSessionIndex(path.join(sessionsDir, d));
|
||||
return ` - ${getProjectName(d, idx)} (${d})`;
|
||||
});
|
||||
error(`Multiple projects match "${projectArg}":\n${names.join('\n')}\nBe more specific.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!matchedDir) {
|
||||
const available = projectDirs.map(d => {
|
||||
const idx = readSessionIndex(path.join(sessionsDir, d));
|
||||
return ` - ${getProjectName(d, idx)}`;
|
||||
});
|
||||
error(`No project matching "${projectArg}". Available projects:\n${available.join('\n')}`);
|
||||
}
|
||||
|
||||
const projectPath = path.join(sessionsDir, matchedDir);
|
||||
const indexData = readSessionIndex(projectPath);
|
||||
const projectName = matchedName || getProjectName(matchedDir, indexData);
|
||||
|
||||
process.stderr.write('Reading your session history (read-only, nothing is modified or sent anywhere)...\n');
|
||||
|
||||
let sessions = scanProjectDir(projectPath);
|
||||
|
||||
if (options.sessionId) {
|
||||
sessions = sessions.filter(s => s.sessionId === options.sessionId);
|
||||
if (sessions.length === 0) {
|
||||
error(`Session "${options.sessionId}" not found in project "${projectName}".`);
|
||||
}
|
||||
}
|
||||
|
||||
if (options.limit && options.limit > 0) {
|
||||
sessions = sessions.slice(0, options.limit);
|
||||
}
|
||||
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'gsd-pipeline-'));
|
||||
const outputPath = path.join(tmpDir, 'extracted-messages.jsonl');
|
||||
|
||||
let sessionsProcessed = 0;
|
||||
let sessionsSkipped = 0;
|
||||
let messagesExtracted = 0;
|
||||
let messagesTruncated = 0;
|
||||
const total = sessions.length;
|
||||
const batchLimit = 300;
|
||||
|
||||
for (let i = 0; i < sessions.length; i++) {
|
||||
if (messagesExtracted >= batchLimit) break;
|
||||
|
||||
const session = sessions[i];
|
||||
process.stderr.write(`\rProcessing session ${i + 1}/${total}...`);
|
||||
|
||||
try {
|
||||
const remaining = batchLimit - messagesExtracted;
|
||||
const msgs = await streamExtractMessages(session.filePath, isGenuineUserMessage, remaining);
|
||||
for (const msg of msgs) {
|
||||
fs.appendFileSync(outputPath, JSON.stringify(msg) + '\n');
|
||||
messagesExtracted++;
|
||||
if (msg.content.endsWith('... [truncated]')) {
|
||||
messagesTruncated++;
|
||||
}
|
||||
}
|
||||
sessionsProcessed++;
|
||||
} catch (err) {
|
||||
sessionsSkipped++;
|
||||
process.stderr.write(`\nWarning: Skipped session ${session.sessionId}: ${err.message}\n`);
|
||||
}
|
||||
}
|
||||
|
||||
process.stderr.write('\r' + ' '.repeat(60) + '\r');
|
||||
|
||||
const result = {
|
||||
output_file: outputPath,
|
||||
project: projectName,
|
||||
sessions_processed: sessionsProcessed,
|
||||
sessions_skipped: sessionsSkipped,
|
||||
messages_extracted: messagesExtracted,
|
||||
messages_truncated: messagesTruncated,
|
||||
};
|
||||
|
||||
if (sessionsSkipped > 0 && sessionsProcessed > 0) {
|
||||
process.stdout.write(JSON.stringify(result, null, 2));
|
||||
process.exit(2);
|
||||
} else if (sessionsProcessed === 0 && sessionsSkipped > 0) {
|
||||
process.stdout.write(JSON.stringify(result, null, 2));
|
||||
process.exit(1);
|
||||
} else {
|
||||
output(result, raw);
|
||||
}
|
||||
}
|
||||
|
||||
async function cmdProfileSample(overridePath, options, raw) {
|
||||
const sessionsDir = getSessionsDir(overridePath);
|
||||
if (!sessionsDir) {
|
||||
const searchedPath = overridePath || '~/.claude/projects';
|
||||
error(`No Claude Code sessions found at ${searchedPath}.${overridePath ? '' : ' Is Claude Code installed?'}`);
|
||||
}
|
||||
|
||||
process.stderr.write('Reading your session history (read-only, nothing is modified or sent anywhere)...\n');
|
||||
|
||||
const limit = options.limit || 150;
|
||||
const maxChars = options.maxChars || 500;
|
||||
|
||||
let projectDirs;
|
||||
try {
|
||||
projectDirs = fs.readdirSync(sessionsDir).filter(entry => {
|
||||
const fullPath = path.join(sessionsDir, entry);
|
||||
try {
|
||||
return fs.statSync(fullPath).isDirectory();
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
error(`Cannot read sessions directory: ${err.message}`);
|
||||
}
|
||||
|
||||
if (projectDirs.length === 0) {
|
||||
error('No project directories found in sessions directory.');
|
||||
}
|
||||
|
||||
const projectMeta = [];
|
||||
for (const dirName of projectDirs) {
|
||||
const projectPath = path.join(sessionsDir, dirName);
|
||||
const sessions = scanProjectDir(projectPath);
|
||||
if (sessions.length === 0) continue;
|
||||
const indexData = readSessionIndex(projectPath);
|
||||
const projectName = getProjectName(dirName, indexData);
|
||||
const lastActive = sessions[0].modified;
|
||||
projectMeta.push({ dirName, projectPath, sessions, projectName, lastActive });
|
||||
}
|
||||
|
||||
projectMeta.sort((a, b) => b.lastActive - a.lastActive);
|
||||
|
||||
const projectCount = projectMeta.length;
|
||||
if (projectCount === 0) {
|
||||
error('No projects with sessions found.');
|
||||
}
|
||||
|
||||
const perProjectCap = options.maxPerProject || Math.max(5, Math.floor(limit / projectCount));
|
||||
|
||||
const recencyThreshold = Date.now() - 30 * 24 * 60 * 60 * 1000;
|
||||
const allMessages = [];
|
||||
let skippedContextDumps = 0;
|
||||
const projectBreakdown = [];
|
||||
|
||||
for (const proj of projectMeta) {
|
||||
if (allMessages.length >= limit) break;
|
||||
|
||||
const cappedSessions = proj.sessions.slice(0, perProjectCap);
|
||||
|
||||
let projectMessages = 0;
|
||||
let projectSessionsUsed = 0;
|
||||
|
||||
for (const session of cappedSessions) {
|
||||
if (allMessages.length >= limit) break;
|
||||
|
||||
const isRecent = session.modified.getTime() >= recencyThreshold;
|
||||
const perSessionMax = isRecent ? 10 : 3;
|
||||
|
||||
const remaining = Math.min(perSessionMax, limit - allMessages.length);
|
||||
|
||||
try {
|
||||
const msgs = await streamExtractMessages(session.filePath, isGenuineUserMessage, remaining);
|
||||
let sessionUsed = false;
|
||||
|
||||
for (const msg of msgs) {
|
||||
if (allMessages.length >= limit) break;
|
||||
|
||||
const content = msg.content || '';
|
||||
if (content.startsWith('This session is being continued')) {
|
||||
skippedContextDumps++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const lines = content.split('\n').filter(l => l.trim().length > 0);
|
||||
if (lines.length > 3) {
|
||||
const logPattern = /^\[?(DEBUG|INFO|WARN|ERROR|LOG)\]?/i;
|
||||
const timestampPattern = /^\d{4}-\d{2}-\d{2}/;
|
||||
const logLines = lines.filter(l => logPattern.test(l.trim()) || timestampPattern.test(l.trim()));
|
||||
if (logLines.length / lines.length > 0.8) {
|
||||
skippedContextDumps++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
const truncated = truncateContent(content, maxChars);
|
||||
|
||||
allMessages.push({
|
||||
sessionId: msg.sessionId,
|
||||
projectName: proj.projectName,
|
||||
projectPath: msg.projectPath,
|
||||
timestamp: msg.timestamp,
|
||||
content: truncated,
|
||||
});
|
||||
|
||||
projectMessages++;
|
||||
sessionUsed = true;
|
||||
}
|
||||
if (sessionUsed) projectSessionsUsed++;
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (projectMessages > 0) {
|
||||
projectBreakdown.push({
|
||||
project: proj.projectName,
|
||||
messages: projectMessages,
|
||||
sessions: projectSessionsUsed,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'gsd-profile-'));
|
||||
const outputPath = path.join(tmpDir, 'profile-sample.jsonl');
|
||||
for (const msg of allMessages) {
|
||||
fs.appendFileSync(outputPath, JSON.stringify(msg) + '\n');
|
||||
}
|
||||
|
||||
const result = {
|
||||
output_file: outputPath,
|
||||
projects_sampled: projectBreakdown.length,
|
||||
messages_sampled: allMessages.length,
|
||||
per_project_cap: perProjectCap,
|
||||
message_char_limit: maxChars,
|
||||
skipped_context_dumps: skippedContextDumps,
|
||||
project_breakdown: projectBreakdown,
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
cmdScanSessions,
|
||||
cmdExtractMessages,
|
||||
cmdProfileSample,
|
||||
};
|
||||
306
get-shit-done/bin/lib/roadmap.cjs
Normal file
306
get-shit-done/bin/lib/roadmap.cjs
Normal file
@@ -0,0 +1,306 @@
|
||||
/**
|
||||
* Roadmap — Roadmap parsing and update operations
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { escapeRegex, normalizePhaseName, output, error, findPhaseInternal, stripShippedMilestones, extractCurrentMilestone, replaceInCurrentMilestone } = require('./core.cjs');
|
||||
|
||||
function cmdRoadmapGetPhase(cwd, phaseNum, raw) {
|
||||
const roadmapPath = path.join(cwd, '.planning', 'ROADMAP.md');
|
||||
|
||||
if (!fs.existsSync(roadmapPath)) {
|
||||
output({ found: false, error: 'ROADMAP.md not found' }, raw, '');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const content = extractCurrentMilestone(fs.readFileSync(roadmapPath, 'utf-8'), cwd);
|
||||
|
||||
// Escape special regex chars in phase number, handle decimal
|
||||
const escapedPhase = escapeRegex(phaseNum);
|
||||
|
||||
// Match "## Phase X:", "### Phase X:", or "#### Phase X:" with optional name
|
||||
const phasePattern = new RegExp(
|
||||
`#{2,4}\\s*Phase\\s+${escapedPhase}:\\s*([^\\n]+)`,
|
||||
'i'
|
||||
);
|
||||
const headerMatch = content.match(phasePattern);
|
||||
|
||||
if (!headerMatch) {
|
||||
// Fallback: check if phase exists in summary list but missing detail section
|
||||
const checklistPattern = new RegExp(
|
||||
`-\\s*\\[[ x]\\]\\s*\\*\\*Phase\\s+${escapedPhase}:\\s*([^*]+)\\*\\*`,
|
||||
'i'
|
||||
);
|
||||
const checklistMatch = content.match(checklistPattern);
|
||||
|
||||
if (checklistMatch) {
|
||||
// Phase exists in summary but missing detail section - malformed ROADMAP
|
||||
output({
|
||||
found: false,
|
||||
phase_number: phaseNum,
|
||||
phase_name: checklistMatch[1].trim(),
|
||||
error: 'malformed_roadmap',
|
||||
message: `Phase ${phaseNum} exists in summary list but missing "### Phase ${phaseNum}:" detail section. ROADMAP.md needs both formats.`
|
||||
}, raw, '');
|
||||
return;
|
||||
}
|
||||
|
||||
output({ found: false, phase_number: phaseNum }, raw, '');
|
||||
return;
|
||||
}
|
||||
|
||||
const phaseName = headerMatch[1].trim();
|
||||
const headerIndex = headerMatch.index;
|
||||
|
||||
// Find the end of this section (next ## or ### phase header, or end of file)
|
||||
const restOfContent = content.slice(headerIndex);
|
||||
const nextHeaderMatch = restOfContent.match(/\n#{2,4}\s+Phase\s+\d/i);
|
||||
const sectionEnd = nextHeaderMatch
|
||||
? headerIndex + nextHeaderMatch.index
|
||||
: content.length;
|
||||
|
||||
const section = content.slice(headerIndex, sectionEnd).trim();
|
||||
|
||||
// Extract goal if present (supports both **Goal:** and **Goal**: formats)
|
||||
const goalMatch = section.match(/\*\*Goal(?::\*\*|\*\*:)\s*([^\n]+)/i);
|
||||
const goal = goalMatch ? goalMatch[1].trim() : null;
|
||||
|
||||
// Extract success criteria as structured array
|
||||
const criteriaMatch = section.match(/\*\*Success Criteria\*\*[^\n]*:\s*\n((?:\s*\d+\.\s*[^\n]+\n?)+)/i);
|
||||
const success_criteria = criteriaMatch
|
||||
? criteriaMatch[1].trim().split('\n').map(line => line.replace(/^\s*\d+\.\s*/, '').trim()).filter(Boolean)
|
||||
: [];
|
||||
|
||||
output(
|
||||
{
|
||||
found: true,
|
||||
phase_number: phaseNum,
|
||||
phase_name: phaseName,
|
||||
goal,
|
||||
success_criteria,
|
||||
section,
|
||||
},
|
||||
raw,
|
||||
section
|
||||
);
|
||||
} catch (e) {
|
||||
error('Failed to read ROADMAP.md: ' + e.message);
|
||||
}
|
||||
}
|
||||
|
||||
function cmdRoadmapAnalyze(cwd, raw) {
|
||||
const roadmapPath = path.join(cwd, '.planning', 'ROADMAP.md');
|
||||
|
||||
if (!fs.existsSync(roadmapPath)) {
|
||||
output({ error: 'ROADMAP.md not found', milestones: [], phases: [], current_phase: null }, raw);
|
||||
return;
|
||||
}
|
||||
|
||||
const rawContent = fs.readFileSync(roadmapPath, 'utf-8');
|
||||
const content = extractCurrentMilestone(rawContent, cwd);
|
||||
const phasesDir = path.join(cwd, '.planning', 'phases');
|
||||
|
||||
// Extract all phase headings: ## Phase N: Name or ### Phase N: Name
|
||||
const phasePattern = /#{2,4}\s*Phase\s+(\d+[A-Z]?(?:\.\d+)*)\s*:\s*([^\n]+)/gi;
|
||||
const phases = [];
|
||||
let match;
|
||||
|
||||
while ((match = phasePattern.exec(content)) !== null) {
|
||||
const phaseNum = match[1];
|
||||
const phaseName = match[2].replace(/\(INSERTED\)/i, '').trim();
|
||||
|
||||
// Extract goal from the section
|
||||
const sectionStart = match.index;
|
||||
const restOfContent = content.slice(sectionStart);
|
||||
const nextHeader = restOfContent.match(/\n#{2,4}\s+Phase\s+\d/i);
|
||||
const sectionEnd = nextHeader ? sectionStart + nextHeader.index : content.length;
|
||||
const section = content.slice(sectionStart, sectionEnd);
|
||||
|
||||
const goalMatch = section.match(/\*\*Goal(?::\*\*|\*\*:)\s*([^\n]+)/i);
|
||||
const goal = goalMatch ? goalMatch[1].trim() : null;
|
||||
|
||||
const dependsMatch = section.match(/\*\*Depends on(?::\*\*|\*\*:)\s*([^\n]+)/i);
|
||||
const depends_on = dependsMatch ? dependsMatch[1].trim() : null;
|
||||
|
||||
// Check completion on disk
|
||||
const normalized = normalizePhaseName(phaseNum);
|
||||
let diskStatus = 'no_directory';
|
||||
let planCount = 0;
|
||||
let summaryCount = 0;
|
||||
let hasContext = false;
|
||||
let hasResearch = false;
|
||||
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name);
|
||||
const dirMatch = dirs.find(d => d.startsWith(normalized + '-') || d === normalized);
|
||||
|
||||
if (dirMatch) {
|
||||
const phaseFiles = fs.readdirSync(path.join(phasesDir, dirMatch));
|
||||
planCount = phaseFiles.filter(f => f.endsWith('-PLAN.md') || f === 'PLAN.md').length;
|
||||
summaryCount = phaseFiles.filter(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md').length;
|
||||
hasContext = phaseFiles.some(f => f.endsWith('-CONTEXT.md') || f === 'CONTEXT.md');
|
||||
hasResearch = phaseFiles.some(f => f.endsWith('-RESEARCH.md') || f === 'RESEARCH.md');
|
||||
|
||||
if (summaryCount >= planCount && planCount > 0) diskStatus = 'complete';
|
||||
else if (summaryCount > 0) diskStatus = 'partial';
|
||||
else if (planCount > 0) diskStatus = 'planned';
|
||||
else if (hasResearch) diskStatus = 'researched';
|
||||
else if (hasContext) diskStatus = 'discussed';
|
||||
else diskStatus = 'empty';
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// Check ROADMAP checkbox status
|
||||
const checkboxPattern = new RegExp(`-\\s*\\[(x| )\\]\\s*.*Phase\\s+${escapeRegex(phaseNum)}[:\\s]`, 'i');
|
||||
const checkboxMatch = content.match(checkboxPattern);
|
||||
const roadmapComplete = checkboxMatch ? checkboxMatch[1] === 'x' : false;
|
||||
|
||||
// If roadmap marks phase complete, trust that over disk file structure.
|
||||
// Phases completed before GSD tracking (or via external tools) may lack
|
||||
// the standard PLAN/SUMMARY pairs but are still done.
|
||||
if (roadmapComplete && diskStatus !== 'complete') {
|
||||
diskStatus = 'complete';
|
||||
}
|
||||
|
||||
phases.push({
|
||||
number: phaseNum,
|
||||
name: phaseName,
|
||||
goal,
|
||||
depends_on,
|
||||
plan_count: planCount,
|
||||
summary_count: summaryCount,
|
||||
has_context: hasContext,
|
||||
has_research: hasResearch,
|
||||
disk_status: diskStatus,
|
||||
roadmap_complete: roadmapComplete,
|
||||
});
|
||||
}
|
||||
|
||||
// Extract milestone info
|
||||
const milestones = [];
|
||||
const milestonePattern = /##\s*(.*v(\d+\.\d+)[^(\n]*)/gi;
|
||||
let mMatch;
|
||||
while ((mMatch = milestonePattern.exec(content)) !== null) {
|
||||
milestones.push({
|
||||
heading: mMatch[1].trim(),
|
||||
version: 'v' + mMatch[2],
|
||||
});
|
||||
}
|
||||
|
||||
// Find current and next phase
|
||||
const currentPhase = phases.find(p => p.disk_status === 'planned' || p.disk_status === 'partial') || null;
|
||||
const nextPhase = phases.find(p => p.disk_status === 'empty' || p.disk_status === 'no_directory' || p.disk_status === 'discussed' || p.disk_status === 'researched') || null;
|
||||
|
||||
// Aggregated stats
|
||||
const totalPlans = phases.reduce((sum, p) => sum + p.plan_count, 0);
|
||||
const totalSummaries = phases.reduce((sum, p) => sum + p.summary_count, 0);
|
||||
const completedPhases = phases.filter(p => p.disk_status === 'complete').length;
|
||||
|
||||
// Detect phases in summary list without detail sections (malformed ROADMAP)
|
||||
const checklistPattern = /-\s*\[[ x]\]\s*\*\*Phase\s+(\d+[A-Z]?(?:\.\d+)*)/gi;
|
||||
const checklistPhases = new Set();
|
||||
let checklistMatch;
|
||||
while ((checklistMatch = checklistPattern.exec(content)) !== null) {
|
||||
checklistPhases.add(checklistMatch[1]);
|
||||
}
|
||||
const detailPhases = new Set(phases.map(p => p.number));
|
||||
const missingDetails = [...checklistPhases].filter(p => !detailPhases.has(p));
|
||||
|
||||
const result = {
|
||||
milestones,
|
||||
phases,
|
||||
phase_count: phases.length,
|
||||
completed_phases: completedPhases,
|
||||
total_plans: totalPlans,
|
||||
total_summaries: totalSummaries,
|
||||
progress_percent: totalPlans > 0 ? Math.min(100, Math.round((totalSummaries / totalPlans) * 100)) : 0,
|
||||
current_phase: currentPhase ? currentPhase.number : null,
|
||||
next_phase: nextPhase ? nextPhase.number : null,
|
||||
missing_phase_details: missingDetails.length > 0 ? missingDetails : null,
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
function cmdRoadmapUpdatePlanProgress(cwd, phaseNum, raw) {
|
||||
if (!phaseNum) {
|
||||
error('phase number required for roadmap update-plan-progress');
|
||||
}
|
||||
|
||||
const roadmapPath = path.join(cwd, '.planning', 'ROADMAP.md');
|
||||
|
||||
const phaseInfo = findPhaseInternal(cwd, phaseNum);
|
||||
if (!phaseInfo) {
|
||||
error(`Phase ${phaseNum} not found`);
|
||||
}
|
||||
|
||||
const planCount = phaseInfo.plans.length;
|
||||
const summaryCount = phaseInfo.summaries.length;
|
||||
|
||||
if (planCount === 0) {
|
||||
output({ updated: false, reason: 'No plans found', plan_count: 0, summary_count: 0 }, raw, 'no plans');
|
||||
return;
|
||||
}
|
||||
|
||||
const isComplete = summaryCount >= planCount;
|
||||
const status = isComplete ? 'Complete' : summaryCount > 0 ? 'In Progress' : 'Planned';
|
||||
const today = new Date().toISOString().split('T')[0];
|
||||
|
||||
if (!fs.existsSync(roadmapPath)) {
|
||||
output({ updated: false, reason: 'ROADMAP.md not found', plan_count: planCount, summary_count: summaryCount }, raw, 'no roadmap');
|
||||
return;
|
||||
}
|
||||
|
||||
let roadmapContent = fs.readFileSync(roadmapPath, 'utf-8');
|
||||
const phaseEscaped = escapeRegex(phaseNum);
|
||||
|
||||
// Progress table row: update Plans column (summaries/plans) and Status column
|
||||
const tablePattern = new RegExp(
|
||||
`(\\|\\s*${phaseEscaped}\\.?\\s[^|]*\\|)[^|]*(\\|)\\s*[^|]*(\\|)\\s*[^|]*(\\|)`,
|
||||
'i'
|
||||
);
|
||||
const dateField = isComplete ? ` ${today} ` : ' ';
|
||||
roadmapContent = replaceInCurrentMilestone(
|
||||
roadmapContent, tablePattern,
|
||||
`$1 ${summaryCount}/${planCount} $2 ${status.padEnd(11)}$3${dateField}$4`
|
||||
);
|
||||
|
||||
// Update plan count in phase detail section
|
||||
const planCountPattern = new RegExp(
|
||||
`(#{2,4}\\s*Phase\\s+${phaseEscaped}[\\s\\S]*?\\*\\*Plans:\\*\\*\\s*)[^\\n]+`,
|
||||
'i'
|
||||
);
|
||||
const planCountText = isComplete
|
||||
? `${summaryCount}/${planCount} plans complete`
|
||||
: `${summaryCount}/${planCount} plans executed`;
|
||||
roadmapContent = replaceInCurrentMilestone(roadmapContent, planCountPattern, `$1${planCountText}`);
|
||||
|
||||
// If complete: check checkbox
|
||||
if (isComplete) {
|
||||
const checkboxPattern = new RegExp(
|
||||
`(-\\s*\\[)[ ](\\]\\s*.*Phase\\s+${phaseEscaped}[:\\s][^\\n]*)`,
|
||||
'i'
|
||||
);
|
||||
roadmapContent = replaceInCurrentMilestone(roadmapContent, checkboxPattern, `$1x$2 (completed ${today})`);
|
||||
}
|
||||
|
||||
fs.writeFileSync(roadmapPath, roadmapContent, 'utf-8');
|
||||
|
||||
output({
|
||||
updated: true,
|
||||
phase: phaseNum,
|
||||
plan_count: planCount,
|
||||
summary_count: summaryCount,
|
||||
status,
|
||||
complete: isComplete,
|
||||
}, raw, `${summaryCount}/${planCount} ${status}`);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
cmdRoadmapGetPhase,
|
||||
cmdRoadmapAnalyze,
|
||||
cmdRoadmapUpdatePlanProgress,
|
||||
};
|
||||
848
get-shit-done/bin/lib/state.cjs
Normal file
848
get-shit-done/bin/lib/state.cjs
Normal file
@@ -0,0 +1,848 @@
|
||||
/**
|
||||
* State — STATE.md operations and progression engine
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { escapeRegex, loadConfig, getMilestoneInfo, getMilestonePhaseFilter, normalizeMd, output, error } = require('./core.cjs');
|
||||
const { extractFrontmatter, reconstructFrontmatter } = require('./frontmatter.cjs');
|
||||
|
||||
// Shared helper: extract a field value from STATE.md content.
|
||||
// Supports both **Field:** bold and plain Field: format.
|
||||
function stateExtractField(content, fieldName) {
|
||||
const escaped = escapeRegex(fieldName);
|
||||
const boldPattern = new RegExp(`\\*\\*${escaped}:\\*\\*\\s*(.+)`, 'i');
|
||||
const boldMatch = content.match(boldPattern);
|
||||
if (boldMatch) return boldMatch[1].trim();
|
||||
const plainPattern = new RegExp(`^${escaped}:\\s*(.+)`, 'im');
|
||||
const plainMatch = content.match(plainPattern);
|
||||
return plainMatch ? plainMatch[1].trim() : null;
|
||||
}
|
||||
|
||||
function cmdStateLoad(cwd, raw) {
|
||||
const config = loadConfig(cwd);
|
||||
const planningDir = path.join(cwd, '.planning');
|
||||
|
||||
let stateRaw = '';
|
||||
try {
|
||||
stateRaw = fs.readFileSync(path.join(planningDir, 'STATE.md'), 'utf-8');
|
||||
} catch {}
|
||||
|
||||
const configExists = fs.existsSync(path.join(planningDir, 'config.json'));
|
||||
const roadmapExists = fs.existsSync(path.join(planningDir, 'ROADMAP.md'));
|
||||
const stateExists = stateRaw.length > 0;
|
||||
|
||||
const result = {
|
||||
config,
|
||||
state_raw: stateRaw,
|
||||
state_exists: stateExists,
|
||||
roadmap_exists: roadmapExists,
|
||||
config_exists: configExists,
|
||||
};
|
||||
|
||||
// For --raw, output a condensed key=value format
|
||||
if (raw) {
|
||||
const c = config;
|
||||
const lines = [
|
||||
`model_profile=${c.model_profile}`,
|
||||
`commit_docs=${c.commit_docs}`,
|
||||
`branching_strategy=${c.branching_strategy}`,
|
||||
`phase_branch_template=${c.phase_branch_template}`,
|
||||
`milestone_branch_template=${c.milestone_branch_template}`,
|
||||
`parallelization=${c.parallelization}`,
|
||||
`research=${c.research}`,
|
||||
`plan_checker=${c.plan_checker}`,
|
||||
`verifier=${c.verifier}`,
|
||||
`config_exists=${configExists}`,
|
||||
`roadmap_exists=${roadmapExists}`,
|
||||
`state_exists=${stateExists}`,
|
||||
];
|
||||
process.stdout.write(lines.join('\n'));
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
output(result);
|
||||
}
|
||||
|
||||
function cmdStateGet(cwd, section, raw) {
|
||||
const statePath = path.join(cwd, '.planning', 'STATE.md');
|
||||
try {
|
||||
const content = fs.readFileSync(statePath, 'utf-8');
|
||||
|
||||
if (!section) {
|
||||
output({ content }, raw, content);
|
||||
return;
|
||||
}
|
||||
|
||||
// Try to find markdown section or field
|
||||
const fieldEscaped = section.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
|
||||
// Check for **field:** value (bold format)
|
||||
const boldPattern = new RegExp(`\\*\\*${fieldEscaped}:\\*\\*\\s*(.*)`, 'i');
|
||||
const boldMatch = content.match(boldPattern);
|
||||
if (boldMatch) {
|
||||
output({ [section]: boldMatch[1].trim() }, raw, boldMatch[1].trim());
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for field: value (plain format)
|
||||
const plainPattern = new RegExp(`^${fieldEscaped}:\\s*(.*)`, 'im');
|
||||
const plainMatch = content.match(plainPattern);
|
||||
if (plainMatch) {
|
||||
output({ [section]: plainMatch[1].trim() }, raw, plainMatch[1].trim());
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for ## Section
|
||||
const sectionPattern = new RegExp(`##\\s*${fieldEscaped}\\s*\n([\\s\\S]*?)(?=\\n##|$)`, 'i');
|
||||
const sectionMatch = content.match(sectionPattern);
|
||||
if (sectionMatch) {
|
||||
output({ [section]: sectionMatch[1].trim() }, raw, sectionMatch[1].trim());
|
||||
return;
|
||||
}
|
||||
|
||||
output({ error: `Section or field "${section}" not found` }, raw, '');
|
||||
} catch {
|
||||
error('STATE.md not found');
|
||||
}
|
||||
}
|
||||
|
||||
function readTextArgOrFile(cwd, value, filePath, label) {
|
||||
if (!filePath) return value;
|
||||
|
||||
const resolvedPath = path.isAbsolute(filePath) ? filePath : path.join(cwd, filePath);
|
||||
try {
|
||||
return fs.readFileSync(resolvedPath, 'utf-8').trimEnd();
|
||||
} catch {
|
||||
throw new Error(`${label} file not found: ${filePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
function cmdStatePatch(cwd, patches, raw) {
|
||||
const statePath = path.join(cwd, '.planning', 'STATE.md');
|
||||
try {
|
||||
let content = fs.readFileSync(statePath, 'utf-8');
|
||||
const results = { updated: [], failed: [] };
|
||||
|
||||
for (const [field, value] of Object.entries(patches)) {
|
||||
const fieldEscaped = field.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
// Try **Field:** bold format first, then plain Field: format
|
||||
const boldPattern = new RegExp(`(\\*\\*${fieldEscaped}:\\*\\*\\s*)(.*)`, 'i');
|
||||
const plainPattern = new RegExp(`(^${fieldEscaped}:\\s*)(.*)`, 'im');
|
||||
|
||||
if (boldPattern.test(content)) {
|
||||
content = content.replace(boldPattern, (_match, prefix) => `${prefix}${value}`);
|
||||
results.updated.push(field);
|
||||
} else if (plainPattern.test(content)) {
|
||||
content = content.replace(plainPattern, (_match, prefix) => `${prefix}${value}`);
|
||||
results.updated.push(field);
|
||||
} else {
|
||||
results.failed.push(field);
|
||||
}
|
||||
}
|
||||
|
||||
if (results.updated.length > 0) {
|
||||
writeStateMd(statePath, content, cwd);
|
||||
}
|
||||
|
||||
output(results, raw, results.updated.length > 0 ? 'true' : 'false');
|
||||
} catch {
|
||||
error('STATE.md not found');
|
||||
}
|
||||
}
|
||||
|
||||
function cmdStateUpdate(cwd, field, value) {
|
||||
if (!field || value === undefined) {
|
||||
error('field and value required for state update');
|
||||
}
|
||||
|
||||
const statePath = path.join(cwd, '.planning', 'STATE.md');
|
||||
try {
|
||||
let content = fs.readFileSync(statePath, 'utf-8');
|
||||
const fieldEscaped = field.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
// Try **Field:** bold format first, then plain Field: format
|
||||
const boldPattern = new RegExp(`(\\*\\*${fieldEscaped}:\\*\\*\\s*)(.*)`, 'i');
|
||||
const plainPattern = new RegExp(`(^${fieldEscaped}:\\s*)(.*)`, 'im');
|
||||
if (boldPattern.test(content)) {
|
||||
content = content.replace(boldPattern, (_match, prefix) => `${prefix}${value}`);
|
||||
writeStateMd(statePath, content, cwd);
|
||||
output({ updated: true });
|
||||
} else if (plainPattern.test(content)) {
|
||||
content = content.replace(plainPattern, (_match, prefix) => `${prefix}${value}`);
|
||||
writeStateMd(statePath, content, cwd);
|
||||
output({ updated: true });
|
||||
} else {
|
||||
output({ updated: false, reason: `Field "${field}" not found in STATE.md` });
|
||||
}
|
||||
} catch {
|
||||
output({ updated: false, reason: 'STATE.md not found' });
|
||||
}
|
||||
}
|
||||
|
||||
// ─── State Progression Engine ────────────────────────────────────────────────
|
||||
|
||||
function stateExtractField(content, fieldName) {
|
||||
const escaped = fieldName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
// Try **Field:** bold format first
|
||||
const boldPattern = new RegExp(`\\*\\*${escaped}:\\*\\*\\s*(.+)`, 'i');
|
||||
const boldMatch = content.match(boldPattern);
|
||||
if (boldMatch) return boldMatch[1].trim();
|
||||
// Fall back to plain Field: format
|
||||
const plainPattern = new RegExp(`^${escaped}:\\s*(.+)`, 'im');
|
||||
const plainMatch = content.match(plainPattern);
|
||||
return plainMatch ? plainMatch[1].trim() : null;
|
||||
}
|
||||
|
||||
function stateReplaceField(content, fieldName, newValue) {
|
||||
const escaped = fieldName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
// Try **Field:** bold format first, then plain Field: format
|
||||
const boldPattern = new RegExp(`(\\*\\*${escaped}:\\*\\*\\s*)(.*)`, 'i');
|
||||
if (boldPattern.test(content)) {
|
||||
return content.replace(boldPattern, (_match, prefix) => `${prefix}${newValue}`);
|
||||
}
|
||||
const plainPattern = new RegExp(`(^${escaped}:\\s*)(.*)`, 'im');
|
||||
if (plainPattern.test(content)) {
|
||||
return content.replace(plainPattern, (_match, prefix) => `${prefix}${newValue}`);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function cmdStateAdvancePlan(cwd, raw) {
|
||||
const statePath = path.join(cwd, '.planning', 'STATE.md');
|
||||
if (!fs.existsSync(statePath)) { output({ error: 'STATE.md not found' }, raw); return; }
|
||||
|
||||
let content = fs.readFileSync(statePath, 'utf-8');
|
||||
const currentPlan = parseInt(stateExtractField(content, 'Current Plan'), 10);
|
||||
const totalPlans = parseInt(stateExtractField(content, 'Total Plans in Phase'), 10);
|
||||
const today = new Date().toISOString().split('T')[0];
|
||||
|
||||
if (isNaN(currentPlan) || isNaN(totalPlans)) {
|
||||
output({ error: 'Cannot parse Current Plan or Total Plans in Phase from STATE.md' }, raw);
|
||||
return;
|
||||
}
|
||||
|
||||
if (currentPlan >= totalPlans) {
|
||||
content = stateReplaceField(content, 'Status', 'Phase complete — ready for verification') || content;
|
||||
content = stateReplaceField(content, 'Last Activity', today) || content;
|
||||
writeStateMd(statePath, content, cwd);
|
||||
output({ advanced: false, reason: 'last_plan', current_plan: currentPlan, total_plans: totalPlans, status: 'ready_for_verification' }, raw, 'false');
|
||||
} else {
|
||||
const newPlan = currentPlan + 1;
|
||||
content = stateReplaceField(content, 'Current Plan', String(newPlan)) || content;
|
||||
content = stateReplaceField(content, 'Status', 'Ready to execute') || content;
|
||||
content = stateReplaceField(content, 'Last Activity', today) || content;
|
||||
writeStateMd(statePath, content, cwd);
|
||||
output({ advanced: true, previous_plan: currentPlan, current_plan: newPlan, total_plans: totalPlans }, raw, 'true');
|
||||
}
|
||||
}
|
||||
|
||||
function cmdStateRecordMetric(cwd, options, raw) {
|
||||
const statePath = path.join(cwd, '.planning', 'STATE.md');
|
||||
if (!fs.existsSync(statePath)) { output({ error: 'STATE.md not found' }, raw); return; }
|
||||
|
||||
let content = fs.readFileSync(statePath, 'utf-8');
|
||||
const { phase, plan, duration, tasks, files } = options;
|
||||
|
||||
if (!phase || !plan || !duration) {
|
||||
output({ error: 'phase, plan, and duration required' }, raw);
|
||||
return;
|
||||
}
|
||||
|
||||
// Find Performance Metrics section and its table
|
||||
const metricsPattern = /(##\s*Performance Metrics[\s\S]*?\n\|[^\n]+\n\|[-|\s]+\n)([\s\S]*?)(?=\n##|\n$|$)/i;
|
||||
const metricsMatch = content.match(metricsPattern);
|
||||
|
||||
if (metricsMatch) {
|
||||
let tableBody = metricsMatch[2].trimEnd();
|
||||
const newRow = `| Phase ${phase} P${plan} | ${duration} | ${tasks || '-'} tasks | ${files || '-'} files |`;
|
||||
|
||||
if (tableBody.trim() === '' || tableBody.includes('None yet')) {
|
||||
tableBody = newRow;
|
||||
} else {
|
||||
tableBody = tableBody + '\n' + newRow;
|
||||
}
|
||||
|
||||
content = content.replace(metricsPattern, (_match, header) => `${header}${tableBody}\n`);
|
||||
writeStateMd(statePath, content, cwd);
|
||||
output({ recorded: true, phase, plan, duration }, raw, 'true');
|
||||
} else {
|
||||
output({ recorded: false, reason: 'Performance Metrics section not found in STATE.md' }, raw, 'false');
|
||||
}
|
||||
}
|
||||
|
||||
function cmdStateUpdateProgress(cwd, raw) {
|
||||
const statePath = path.join(cwd, '.planning', 'STATE.md');
|
||||
if (!fs.existsSync(statePath)) { output({ error: 'STATE.md not found' }, raw); return; }
|
||||
|
||||
let content = fs.readFileSync(statePath, 'utf-8');
|
||||
|
||||
// Count summaries across current milestone phases only
|
||||
const phasesDir = path.join(cwd, '.planning', 'phases');
|
||||
let totalPlans = 0;
|
||||
let totalSummaries = 0;
|
||||
|
||||
if (fs.existsSync(phasesDir)) {
|
||||
const isDirInMilestone = getMilestonePhaseFilter(cwd);
|
||||
const phaseDirs = fs.readdirSync(phasesDir, { withFileTypes: true })
|
||||
.filter(e => e.isDirectory()).map(e => e.name)
|
||||
.filter(isDirInMilestone);
|
||||
for (const dir of phaseDirs) {
|
||||
const files = fs.readdirSync(path.join(phasesDir, dir));
|
||||
totalPlans += files.filter(f => f.match(/-PLAN\.md$/i)).length;
|
||||
totalSummaries += files.filter(f => f.match(/-SUMMARY\.md$/i)).length;
|
||||
}
|
||||
}
|
||||
|
||||
const percent = totalPlans > 0 ? Math.min(100, Math.round(totalSummaries / totalPlans * 100)) : 0;
|
||||
const barWidth = 10;
|
||||
const filled = Math.round(percent / 100 * barWidth);
|
||||
const bar = '\u2588'.repeat(filled) + '\u2591'.repeat(barWidth - filled);
|
||||
const progressStr = `[${bar}] ${percent}%`;
|
||||
|
||||
// Try **Progress:** bold format first, then plain Progress: format
|
||||
const boldProgressPattern = /(\*\*Progress:\*\*\s*).*/i;
|
||||
const plainProgressPattern = /^(Progress:\s*).*/im;
|
||||
if (boldProgressPattern.test(content)) {
|
||||
content = content.replace(boldProgressPattern, (_match, prefix) => `${prefix}${progressStr}`);
|
||||
writeStateMd(statePath, content, cwd);
|
||||
output({ updated: true, percent, completed: totalSummaries, total: totalPlans, bar: progressStr }, raw, progressStr);
|
||||
} else if (plainProgressPattern.test(content)) {
|
||||
content = content.replace(plainProgressPattern, (_match, prefix) => `${prefix}${progressStr}`);
|
||||
writeStateMd(statePath, content, cwd);
|
||||
output({ updated: true, percent, completed: totalSummaries, total: totalPlans, bar: progressStr }, raw, progressStr);
|
||||
} else {
|
||||
output({ updated: false, reason: 'Progress field not found in STATE.md' }, raw, 'false');
|
||||
}
|
||||
}
|
||||
|
||||
function cmdStateAddDecision(cwd, options, raw) {
|
||||
const statePath = path.join(cwd, '.planning', 'STATE.md');
|
||||
if (!fs.existsSync(statePath)) { output({ error: 'STATE.md not found' }, raw); return; }
|
||||
|
||||
const { phase, summary, summary_file, rationale, rationale_file } = options;
|
||||
let summaryText = null;
|
||||
let rationaleText = '';
|
||||
|
||||
try {
|
||||
summaryText = readTextArgOrFile(cwd, summary, summary_file, 'summary');
|
||||
rationaleText = readTextArgOrFile(cwd, rationale || '', rationale_file, 'rationale');
|
||||
} catch (err) {
|
||||
output({ added: false, reason: err.message }, raw, 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
if (!summaryText) { output({ error: 'summary required' }, raw); return; }
|
||||
|
||||
let content = fs.readFileSync(statePath, 'utf-8');
|
||||
const entry = `- [Phase ${phase || '?'}]: ${summaryText}${rationaleText ? ` — ${rationaleText}` : ''}`;
|
||||
|
||||
// Find Decisions section (various heading patterns)
|
||||
const sectionPattern = /(###?\s*(?:Decisions|Decisions Made|Accumulated.*Decisions)\s*\n)([\s\S]*?)(?=\n###?|\n##[^#]|$)/i;
|
||||
const match = content.match(sectionPattern);
|
||||
|
||||
if (match) {
|
||||
let sectionBody = match[2];
|
||||
// Remove placeholders
|
||||
sectionBody = sectionBody.replace(/None yet\.?\s*\n?/gi, '').replace(/No decisions yet\.?\s*\n?/gi, '');
|
||||
sectionBody = sectionBody.trimEnd() + '\n' + entry + '\n';
|
||||
content = content.replace(sectionPattern, (_match, header) => `${header}${sectionBody}`);
|
||||
writeStateMd(statePath, content, cwd);
|
||||
output({ added: true, decision: entry }, raw, 'true');
|
||||
} else {
|
||||
output({ added: false, reason: 'Decisions section not found in STATE.md' }, raw, 'false');
|
||||
}
|
||||
}
|
||||
|
||||
function cmdStateAddBlocker(cwd, text, raw) {
|
||||
const statePath = path.join(cwd, '.planning', 'STATE.md');
|
||||
if (!fs.existsSync(statePath)) { output({ error: 'STATE.md not found' }, raw); return; }
|
||||
const blockerOptions = typeof text === 'object' && text !== null ? text : { text };
|
||||
let blockerText = null;
|
||||
|
||||
try {
|
||||
blockerText = readTextArgOrFile(cwd, blockerOptions.text, blockerOptions.text_file, 'blocker');
|
||||
} catch (err) {
|
||||
output({ added: false, reason: err.message }, raw, 'false');
|
||||
return;
|
||||
}
|
||||
|
||||
if (!blockerText) { output({ error: 'text required' }, raw); return; }
|
||||
|
||||
let content = fs.readFileSync(statePath, 'utf-8');
|
||||
const entry = `- ${blockerText}`;
|
||||
|
||||
const sectionPattern = /(###?\s*(?:Blockers|Blockers\/Concerns|Concerns)\s*\n)([\s\S]*?)(?=\n###?|\n##[^#]|$)/i;
|
||||
const match = content.match(sectionPattern);
|
||||
|
||||
if (match) {
|
||||
let sectionBody = match[2];
|
||||
sectionBody = sectionBody.replace(/None\.?\s*\n?/gi, '').replace(/None yet\.?\s*\n?/gi, '');
|
||||
sectionBody = sectionBody.trimEnd() + '\n' + entry + '\n';
|
||||
content = content.replace(sectionPattern, (_match, header) => `${header}${sectionBody}`);
|
||||
writeStateMd(statePath, content, cwd);
|
||||
output({ added: true, blocker: blockerText }, raw, 'true');
|
||||
} else {
|
||||
output({ added: false, reason: 'Blockers section not found in STATE.md' }, raw, 'false');
|
||||
}
|
||||
}
|
||||
|
||||
function cmdStateResolveBlocker(cwd, text, raw) {
|
||||
const statePath = path.join(cwd, '.planning', 'STATE.md');
|
||||
if (!fs.existsSync(statePath)) { output({ error: 'STATE.md not found' }, raw); return; }
|
||||
if (!text) { output({ error: 'text required' }, raw); return; }
|
||||
|
||||
let content = fs.readFileSync(statePath, 'utf-8');
|
||||
|
||||
const sectionPattern = /(###?\s*(?:Blockers|Blockers\/Concerns|Concerns)\s*\n)([\s\S]*?)(?=\n###?|\n##[^#]|$)/i;
|
||||
const match = content.match(sectionPattern);
|
||||
|
||||
if (match) {
|
||||
const sectionBody = match[2];
|
||||
const lines = sectionBody.split('\n');
|
||||
const filtered = lines.filter(line => {
|
||||
if (!line.startsWith('- ')) return true;
|
||||
return !line.toLowerCase().includes(text.toLowerCase());
|
||||
});
|
||||
|
||||
let newBody = filtered.join('\n');
|
||||
// If section is now empty, add placeholder
|
||||
if (!newBody.trim() || !newBody.includes('- ')) {
|
||||
newBody = 'None\n';
|
||||
}
|
||||
|
||||
content = content.replace(sectionPattern, (_match, header) => `${header}${newBody}`);
|
||||
writeStateMd(statePath, content, cwd);
|
||||
output({ resolved: true, blocker: text }, raw, 'true');
|
||||
} else {
|
||||
output({ resolved: false, reason: 'Blockers section not found in STATE.md' }, raw, 'false');
|
||||
}
|
||||
}
|
||||
|
||||
function cmdStateRecordSession(cwd, options, raw) {
|
||||
const statePath = path.join(cwd, '.planning', 'STATE.md');
|
||||
if (!fs.existsSync(statePath)) { output({ error: 'STATE.md not found' }, raw); return; }
|
||||
|
||||
let content = fs.readFileSync(statePath, 'utf-8');
|
||||
const now = new Date().toISOString();
|
||||
const updated = [];
|
||||
|
||||
// Update Last session / Last Date
|
||||
let result = stateReplaceField(content, 'Last session', now);
|
||||
if (result) { content = result; updated.push('Last session'); }
|
||||
result = stateReplaceField(content, 'Last Date', now);
|
||||
if (result) { content = result; updated.push('Last Date'); }
|
||||
|
||||
// Update Stopped at
|
||||
if (options.stopped_at) {
|
||||
result = stateReplaceField(content, 'Stopped At', options.stopped_at);
|
||||
if (!result) result = stateReplaceField(content, 'Stopped at', options.stopped_at);
|
||||
if (result) { content = result; updated.push('Stopped At'); }
|
||||
}
|
||||
|
||||
// Update Resume file
|
||||
const resumeFile = options.resume_file || 'None';
|
||||
result = stateReplaceField(content, 'Resume File', resumeFile);
|
||||
if (!result) result = stateReplaceField(content, 'Resume file', resumeFile);
|
||||
if (result) { content = result; updated.push('Resume File'); }
|
||||
|
||||
if (updated.length > 0) {
|
||||
writeStateMd(statePath, content, cwd);
|
||||
output({ recorded: true, updated }, raw, 'true');
|
||||
} else {
|
||||
output({ recorded: false, reason: 'No session fields found in STATE.md' }, raw, 'false');
|
||||
}
|
||||
}
|
||||
|
||||
function cmdStateSnapshot(cwd, raw) {
|
||||
const statePath = path.join(cwd, '.planning', 'STATE.md');
|
||||
|
||||
if (!fs.existsSync(statePath)) {
|
||||
output({ error: 'STATE.md not found' }, raw);
|
||||
return;
|
||||
}
|
||||
|
||||
const content = fs.readFileSync(statePath, 'utf-8');
|
||||
|
||||
// Extract basic fields
|
||||
const currentPhase = stateExtractField(content, 'Current Phase');
|
||||
const currentPhaseName = stateExtractField(content, 'Current Phase Name');
|
||||
const totalPhasesRaw = stateExtractField(content, 'Total Phases');
|
||||
const currentPlan = stateExtractField(content, 'Current Plan');
|
||||
const totalPlansRaw = stateExtractField(content, 'Total Plans in Phase');
|
||||
const status = stateExtractField(content, 'Status');
|
||||
const progressRaw = stateExtractField(content, 'Progress');
|
||||
const lastActivity = stateExtractField(content, 'Last Activity');
|
||||
const lastActivityDesc = stateExtractField(content, 'Last Activity Description');
|
||||
const pausedAt = stateExtractField(content, 'Paused At');
|
||||
|
||||
// Parse numeric fields
|
||||
const totalPhases = totalPhasesRaw ? parseInt(totalPhasesRaw, 10) : null;
|
||||
const totalPlansInPhase = totalPlansRaw ? parseInt(totalPlansRaw, 10) : null;
|
||||
const progressPercent = progressRaw ? parseInt(progressRaw.replace('%', ''), 10) : null;
|
||||
|
||||
// Extract decisions table
|
||||
const decisions = [];
|
||||
const decisionsMatch = content.match(/##\s*Decisions Made[\s\S]*?\n\|[^\n]+\n\|[-|\s]+\n([\s\S]*?)(?=\n##|\n$|$)/i);
|
||||
if (decisionsMatch) {
|
||||
const tableBody = decisionsMatch[1];
|
||||
const rows = tableBody.trim().split('\n').filter(r => r.includes('|'));
|
||||
for (const row of rows) {
|
||||
const cells = row.split('|').map(c => c.trim()).filter(Boolean);
|
||||
if (cells.length >= 3) {
|
||||
decisions.push({
|
||||
phase: cells[0],
|
||||
summary: cells[1],
|
||||
rationale: cells[2],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract blockers list
|
||||
const blockers = [];
|
||||
const blockersMatch = content.match(/##\s*Blockers\s*\n([\s\S]*?)(?=\n##|$)/i);
|
||||
if (blockersMatch) {
|
||||
const blockersSection = blockersMatch[1];
|
||||
const items = blockersSection.match(/^-\s+(.+)$/gm) || [];
|
||||
for (const item of items) {
|
||||
blockers.push(item.replace(/^-\s+/, '').trim());
|
||||
}
|
||||
}
|
||||
|
||||
// Extract session info
|
||||
const session = {
|
||||
last_date: null,
|
||||
stopped_at: null,
|
||||
resume_file: null,
|
||||
};
|
||||
|
||||
const sessionMatch = content.match(/##\s*Session\s*\n([\s\S]*?)(?=\n##|$)/i);
|
||||
if (sessionMatch) {
|
||||
const sessionSection = sessionMatch[1];
|
||||
const lastDateMatch = sessionSection.match(/\*\*Last Date:\*\*\s*(.+)/i)
|
||||
|| sessionSection.match(/^Last Date:\s*(.+)/im);
|
||||
const stoppedAtMatch = sessionSection.match(/\*\*Stopped At:\*\*\s*(.+)/i)
|
||||
|| sessionSection.match(/^Stopped At:\s*(.+)/im);
|
||||
const resumeFileMatch = sessionSection.match(/\*\*Resume File:\*\*\s*(.+)/i)
|
||||
|| sessionSection.match(/^Resume File:\s*(.+)/im);
|
||||
|
||||
if (lastDateMatch) session.last_date = lastDateMatch[1].trim();
|
||||
if (stoppedAtMatch) session.stopped_at = stoppedAtMatch[1].trim();
|
||||
if (resumeFileMatch) session.resume_file = resumeFileMatch[1].trim();
|
||||
}
|
||||
|
||||
const result = {
|
||||
current_phase: currentPhase,
|
||||
current_phase_name: currentPhaseName,
|
||||
total_phases: totalPhases,
|
||||
current_plan: currentPlan,
|
||||
total_plans_in_phase: totalPlansInPhase,
|
||||
status,
|
||||
progress_percent: progressPercent,
|
||||
last_activity: lastActivity,
|
||||
last_activity_desc: lastActivityDesc,
|
||||
decisions,
|
||||
blockers,
|
||||
paused_at: pausedAt,
|
||||
session,
|
||||
};
|
||||
|
||||
output(result, raw);
|
||||
}
|
||||
|
||||
// ─── State Frontmatter Sync ──────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Extract machine-readable fields from STATE.md markdown body and build
|
||||
* a YAML frontmatter object. Allows hooks and scripts to read state
|
||||
* reliably via `state json` instead of fragile regex parsing.
|
||||
*/
|
||||
function buildStateFrontmatter(bodyContent, cwd) {
|
||||
const currentPhase = stateExtractField(bodyContent, 'Current Phase');
|
||||
const currentPhaseName = stateExtractField(bodyContent, 'Current Phase Name');
|
||||
const currentPlan = stateExtractField(bodyContent, 'Current Plan');
|
||||
const totalPhasesRaw = stateExtractField(bodyContent, 'Total Phases');
|
||||
const totalPlansRaw = stateExtractField(bodyContent, 'Total Plans in Phase');
|
||||
const status = stateExtractField(bodyContent, 'Status');
|
||||
const progressRaw = stateExtractField(bodyContent, 'Progress');
|
||||
const lastActivity = stateExtractField(bodyContent, 'Last Activity');
|
||||
const stoppedAt = stateExtractField(bodyContent, 'Stopped At') || stateExtractField(bodyContent, 'Stopped at');
|
||||
const pausedAt = stateExtractField(bodyContent, 'Paused At');
|
||||
|
||||
let milestone = null;
|
||||
let milestoneName = null;
|
||||
if (cwd) {
|
||||
try {
|
||||
const info = getMilestoneInfo(cwd);
|
||||
milestone = info.version;
|
||||
milestoneName = info.name;
|
||||
} catch {}
|
||||
}
|
||||
|
||||
let totalPhases = totalPhasesRaw ? parseInt(totalPhasesRaw, 10) : null;
|
||||
let completedPhases = null;
|
||||
let totalPlans = totalPlansRaw ? parseInt(totalPlansRaw, 10) : null;
|
||||
let completedPlans = null;
|
||||
|
||||
if (cwd) {
|
||||
try {
|
||||
const phasesDir = path.join(cwd, '.planning', 'phases');
|
||||
if (fs.existsSync(phasesDir)) {
|
||||
const isDirInMilestone = getMilestonePhaseFilter(cwd);
|
||||
const phaseDirs = fs.readdirSync(phasesDir, { withFileTypes: true })
|
||||
.filter(e => e.isDirectory()).map(e => e.name)
|
||||
.filter(isDirInMilestone);
|
||||
let diskTotalPlans = 0;
|
||||
let diskTotalSummaries = 0;
|
||||
let diskCompletedPhases = 0;
|
||||
|
||||
for (const dir of phaseDirs) {
|
||||
const files = fs.readdirSync(path.join(phasesDir, dir));
|
||||
const plans = files.filter(f => f.match(/-PLAN\.md$/i)).length;
|
||||
const summaries = files.filter(f => f.match(/-SUMMARY\.md$/i)).length;
|
||||
diskTotalPlans += plans;
|
||||
diskTotalSummaries += summaries;
|
||||
if (plans > 0 && summaries >= plans) diskCompletedPhases++;
|
||||
}
|
||||
totalPhases = isDirInMilestone.phaseCount > 0
|
||||
? Math.max(phaseDirs.length, isDirInMilestone.phaseCount)
|
||||
: phaseDirs.length;
|
||||
completedPhases = diskCompletedPhases;
|
||||
totalPlans = diskTotalPlans;
|
||||
completedPlans = diskTotalSummaries;
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
let progressPercent = null;
|
||||
if (progressRaw) {
|
||||
const pctMatch = progressRaw.match(/(\d+)%/);
|
||||
if (pctMatch) progressPercent = parseInt(pctMatch[1], 10);
|
||||
}
|
||||
|
||||
// Normalize status to one of: planning, discussing, executing, verifying, paused, completed, unknown
|
||||
let normalizedStatus = status || 'unknown';
|
||||
const statusLower = (status || '').toLowerCase();
|
||||
if (statusLower.includes('paused') || statusLower.includes('stopped') || pausedAt) {
|
||||
normalizedStatus = 'paused';
|
||||
} else if (statusLower.includes('executing') || statusLower.includes('in progress')) {
|
||||
normalizedStatus = 'executing';
|
||||
} else if (statusLower.includes('planning') || statusLower.includes('ready to plan')) {
|
||||
normalizedStatus = 'planning';
|
||||
} else if (statusLower.includes('discussing')) {
|
||||
normalizedStatus = 'discussing';
|
||||
} else if (statusLower.includes('verif')) {
|
||||
normalizedStatus = 'verifying';
|
||||
} else if (statusLower.includes('complete') || statusLower.includes('done')) {
|
||||
normalizedStatus = 'completed';
|
||||
} else if (statusLower.includes('ready to execute')) {
|
||||
normalizedStatus = 'executing';
|
||||
}
|
||||
|
||||
const fm = { gsd_state_version: '1.0' };
|
||||
|
||||
if (milestone) fm.milestone = milestone;
|
||||
if (milestoneName) fm.milestone_name = milestoneName;
|
||||
if (currentPhase) fm.current_phase = currentPhase;
|
||||
if (currentPhaseName) fm.current_phase_name = currentPhaseName;
|
||||
if (currentPlan) fm.current_plan = currentPlan;
|
||||
fm.status = normalizedStatus;
|
||||
if (stoppedAt) fm.stopped_at = stoppedAt;
|
||||
if (pausedAt) fm.paused_at = pausedAt;
|
||||
fm.last_updated = new Date().toISOString();
|
||||
if (lastActivity) fm.last_activity = lastActivity;
|
||||
|
||||
const progress = {};
|
||||
if (totalPhases !== null) progress.total_phases = totalPhases;
|
||||
if (completedPhases !== null) progress.completed_phases = completedPhases;
|
||||
if (totalPlans !== null) progress.total_plans = totalPlans;
|
||||
if (completedPlans !== null) progress.completed_plans = completedPlans;
|
||||
if (progressPercent !== null) progress.percent = progressPercent;
|
||||
if (Object.keys(progress).length > 0) fm.progress = progress;
|
||||
|
||||
return fm;
|
||||
}
|
||||
|
||||
function stripFrontmatter(content) {
|
||||
return content.replace(/^---\n[\s\S]*?\n---\n*/, '');
|
||||
}
|
||||
|
||||
function syncStateFrontmatter(content, cwd) {
|
||||
const body = stripFrontmatter(content);
|
||||
const fm = buildStateFrontmatter(body, cwd);
|
||||
const yamlStr = reconstructFrontmatter(fm);
|
||||
return `---\n${yamlStr}\n---\n\n${body}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Write STATE.md with synchronized YAML frontmatter.
|
||||
* All STATE.md writes should use this instead of raw writeFileSync.
|
||||
*/
|
||||
function writeStateMd(statePath, content, cwd) {
|
||||
const synced = syncStateFrontmatter(content, cwd);
|
||||
fs.writeFileSync(statePath, normalizeMd(synced), 'utf-8');
|
||||
}
|
||||
|
||||
function cmdStateJson(cwd, raw) {
|
||||
const statePath = path.join(cwd, '.planning', 'STATE.md');
|
||||
if (!fs.existsSync(statePath)) {
|
||||
output({ error: 'STATE.md not found' }, raw, 'STATE.md not found');
|
||||
return;
|
||||
}
|
||||
|
||||
const content = fs.readFileSync(statePath, 'utf-8');
|
||||
const fm = extractFrontmatter(content);
|
||||
|
||||
if (!fm || Object.keys(fm).length === 0) {
|
||||
const body = stripFrontmatter(content);
|
||||
const built = buildStateFrontmatter(body, cwd);
|
||||
output(built, raw, JSON.stringify(built, null, 2));
|
||||
return;
|
||||
}
|
||||
|
||||
output(fm, raw, JSON.stringify(fm, null, 2));
|
||||
}
|
||||
|
||||
/**
|
||||
* Update STATE.md when a new phase begins execution.
|
||||
* Updates body text fields (Current focus, Status, Last Activity, Current Position)
|
||||
* and synchronizes frontmatter via writeStateMd.
|
||||
* Fixes: #1102 (plan counts), #1103 (status/last_activity), #1104 (body text).
|
||||
*/
|
||||
function cmdStateBeginPhase(cwd, phaseNumber, phaseName, planCount, raw) {
|
||||
const statePath = path.join(cwd, '.planning', 'STATE.md');
|
||||
if (!fs.existsSync(statePath)) {
|
||||
output({ error: 'STATE.md not found' }, raw);
|
||||
return;
|
||||
}
|
||||
|
||||
let content = fs.readFileSync(statePath, 'utf-8');
|
||||
const today = new Date().toISOString().split('T')[0];
|
||||
const updated = [];
|
||||
|
||||
// Update Status field
|
||||
const statusValue = `Executing Phase ${phaseNumber}`;
|
||||
let result = stateReplaceField(content, 'Status', statusValue);
|
||||
if (result) { content = result; updated.push('Status'); }
|
||||
|
||||
// Update Last Activity
|
||||
result = stateReplaceField(content, 'Last Activity', today);
|
||||
if (result) { content = result; updated.push('Last Activity'); }
|
||||
|
||||
// Update Last Activity Description if it exists
|
||||
const activityDesc = `Phase ${phaseNumber} execution started`;
|
||||
result = stateReplaceField(content, 'Last Activity Description', activityDesc);
|
||||
if (result) { content = result; updated.push('Last Activity Description'); }
|
||||
|
||||
// Update Current Phase
|
||||
result = stateReplaceField(content, 'Current Phase', String(phaseNumber));
|
||||
if (result) { content = result; updated.push('Current Phase'); }
|
||||
|
||||
// Update Current Phase Name
|
||||
if (phaseName) {
|
||||
result = stateReplaceField(content, 'Current Phase Name', phaseName);
|
||||
if (result) { content = result; updated.push('Current Phase Name'); }
|
||||
}
|
||||
|
||||
// Update Current Plan to 1 (starting from the first plan)
|
||||
result = stateReplaceField(content, 'Current Plan', '1');
|
||||
if (result) { content = result; updated.push('Current Plan'); }
|
||||
|
||||
// Update Total Plans in Phase
|
||||
if (planCount) {
|
||||
result = stateReplaceField(content, 'Total Plans in Phase', String(planCount));
|
||||
if (result) { content = result; updated.push('Total Plans in Phase'); }
|
||||
}
|
||||
|
||||
// Update **Current focus:** body text line (#1104)
|
||||
const focusLabel = phaseName ? `Phase ${phaseNumber} — ${phaseName}` : `Phase ${phaseNumber}`;
|
||||
const focusPattern = /(\*\*Current focus:\*\*\s*).*/i;
|
||||
if (focusPattern.test(content)) {
|
||||
content = content.replace(focusPattern, (_match, prefix) => `${prefix}${focusLabel}`);
|
||||
updated.push('Current focus');
|
||||
}
|
||||
|
||||
// Update ## Current Position section (#1104)
|
||||
const positionPattern = /(##\s*Current Position\s*\n)([\s\S]*?)(?=\n##|$)/i;
|
||||
const positionMatch = content.match(positionPattern);
|
||||
if (positionMatch) {
|
||||
const newPosition = `Phase: ${phaseNumber}${phaseName ? ` (${phaseName})` : ''} — EXECUTING\nPlan: 1 of ${planCount || '?'}\n`;
|
||||
content = content.replace(positionPattern, (_match, header) => `${header}${newPosition}`);
|
||||
updated.push('Current Position');
|
||||
}
|
||||
|
||||
if (updated.length > 0) {
|
||||
writeStateMd(statePath, content, cwd);
|
||||
}
|
||||
|
||||
output({ updated, phase: phaseNumber, phase_name: phaseName || null, plan_count: planCount || null }, raw, updated.length > 0 ? 'true' : 'false');
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a WAITING.json signal file when GSD hits a decision point.
|
||||
* External watchers (fswatch, polling, orchestrators) can detect this.
|
||||
* File is written to .planning/WAITING.json (or .gsd/WAITING.json if .gsd exists).
|
||||
* Fixes #1034.
|
||||
*/
|
||||
function cmdSignalWaiting(cwd, type, question, options, phase, raw) {
|
||||
const gsdDir = fs.existsSync(path.join(cwd, '.gsd')) ? path.join(cwd, '.gsd') : path.join(cwd, '.planning');
|
||||
const waitingPath = path.join(gsdDir, 'WAITING.json');
|
||||
|
||||
const signal = {
|
||||
status: 'waiting',
|
||||
type: type || 'decision_point',
|
||||
question: question || null,
|
||||
options: options ? options.split('|').map(o => o.trim()) : [],
|
||||
since: new Date().toISOString(),
|
||||
phase: phase || null,
|
||||
};
|
||||
|
||||
try {
|
||||
fs.mkdirSync(gsdDir, { recursive: true });
|
||||
fs.writeFileSync(waitingPath, JSON.stringify(signal, null, 2), 'utf-8');
|
||||
output({ signaled: true, path: waitingPath }, raw, 'true');
|
||||
} catch (e) {
|
||||
output({ signaled: false, error: e.message }, raw, 'false');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove the WAITING.json signal file when user answers and agent resumes.
|
||||
*/
|
||||
function cmdSignalResume(cwd, raw) {
|
||||
const paths = [
|
||||
path.join(cwd, '.gsd', 'WAITING.json'),
|
||||
path.join(cwd, '.planning', 'WAITING.json'),
|
||||
];
|
||||
|
||||
let removed = false;
|
||||
for (const p of paths) {
|
||||
if (fs.existsSync(p)) {
|
||||
try { fs.unlinkSync(p); removed = true; } catch {}
|
||||
}
|
||||
}
|
||||
|
||||
output({ resumed: true, removed }, raw, removed ? 'true' : 'false');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
stateExtractField,
|
||||
stateReplaceField,
|
||||
writeStateMd,
|
||||
cmdStateLoad,
|
||||
cmdStateGet,
|
||||
cmdStatePatch,
|
||||
cmdStateUpdate,
|
||||
cmdStateAdvancePlan,
|
||||
cmdStateRecordMetric,
|
||||
cmdStateUpdateProgress,
|
||||
cmdStateAddDecision,
|
||||
cmdStateAddBlocker,
|
||||
cmdStateResolveBlocker,
|
||||
cmdStateRecordSession,
|
||||
cmdStateSnapshot,
|
||||
cmdStateJson,
|
||||
cmdStateBeginPhase,
|
||||
cmdSignalWaiting,
|
||||
cmdSignalResume,
|
||||
};
|
||||
222
get-shit-done/bin/lib/template.cjs
Normal file
222
get-shit-done/bin/lib/template.cjs
Normal file
@@ -0,0 +1,222 @@
|
||||
/**
|
||||
* Template — Template selection and fill operations
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { normalizePhaseName, findPhaseInternal, generateSlugInternal, normalizeMd, toPosixPath, output, error } = require('./core.cjs');
|
||||
const { reconstructFrontmatter } = require('./frontmatter.cjs');
|
||||
|
||||
function cmdTemplateSelect(cwd, planPath, raw) {
|
||||
if (!planPath) {
|
||||
error('plan-path required');
|
||||
}
|
||||
|
||||
try {
|
||||
const fullPath = path.join(cwd, planPath);
|
||||
const content = fs.readFileSync(fullPath, 'utf-8');
|
||||
|
||||
// Simple heuristics
|
||||
const taskMatch = content.match(/###\s*Task\s*\d+/g) || [];
|
||||
const taskCount = taskMatch.length;
|
||||
|
||||
const decisionMatch = content.match(/decision/gi) || [];
|
||||
const hasDecisions = decisionMatch.length > 0;
|
||||
|
||||
// Count file mentions
|
||||
const fileMentions = new Set();
|
||||
const filePattern = /`([^`]+\.[a-zA-Z]+)`/g;
|
||||
let m;
|
||||
while ((m = filePattern.exec(content)) !== null) {
|
||||
if (m[1].includes('/') && !m[1].startsWith('http')) {
|
||||
fileMentions.add(m[1]);
|
||||
}
|
||||
}
|
||||
const fileCount = fileMentions.size;
|
||||
|
||||
let template = 'templates/summary-standard.md';
|
||||
let type = 'standard';
|
||||
|
||||
if (taskCount <= 2 && fileCount <= 3 && !hasDecisions) {
|
||||
template = 'templates/summary-minimal.md';
|
||||
type = 'minimal';
|
||||
} else if (hasDecisions || fileCount > 6 || taskCount > 5) {
|
||||
template = 'templates/summary-complex.md';
|
||||
type = 'complex';
|
||||
}
|
||||
|
||||
const result = { template, type, taskCount, fileCount, hasDecisions };
|
||||
output(result, raw, template);
|
||||
} catch (e) {
|
||||
// Fallback to standard
|
||||
output({ template: 'templates/summary-standard.md', type: 'standard', error: e.message }, raw, 'templates/summary-standard.md');
|
||||
}
|
||||
}
|
||||
|
||||
function cmdTemplateFill(cwd, templateType, options, raw) {
|
||||
if (!templateType) { error('template type required: summary, plan, or verification'); }
|
||||
if (!options.phase) { error('--phase required'); }
|
||||
|
||||
const phaseInfo = findPhaseInternal(cwd, options.phase);
|
||||
if (!phaseInfo || !phaseInfo.found) { output({ error: 'Phase not found', phase: options.phase }, raw); return; }
|
||||
|
||||
const padded = normalizePhaseName(options.phase);
|
||||
const today = new Date().toISOString().split('T')[0];
|
||||
const phaseName = options.name || phaseInfo.phase_name || 'Unnamed';
|
||||
const phaseSlug = phaseInfo.phase_slug || generateSlugInternal(phaseName);
|
||||
const phaseId = `${padded}-${phaseSlug}`;
|
||||
const planNum = (options.plan || '01').padStart(2, '0');
|
||||
const fields = options.fields || {};
|
||||
|
||||
let frontmatter, body, fileName;
|
||||
|
||||
switch (templateType) {
|
||||
case 'summary': {
|
||||
frontmatter = {
|
||||
phase: phaseId,
|
||||
plan: planNum,
|
||||
subsystem: '[primary category]',
|
||||
tags: [],
|
||||
provides: [],
|
||||
affects: [],
|
||||
'tech-stack': { added: [], patterns: [] },
|
||||
'key-files': { created: [], modified: [] },
|
||||
'key-decisions': [],
|
||||
'patterns-established': [],
|
||||
duration: '[X]min',
|
||||
completed: today,
|
||||
...fields,
|
||||
};
|
||||
body = [
|
||||
`# Phase ${options.phase}: ${phaseName} Summary`,
|
||||
'',
|
||||
'**[Substantive one-liner describing outcome]**',
|
||||
'',
|
||||
'## Performance',
|
||||
'- **Duration:** [time]',
|
||||
'- **Tasks:** [count completed]',
|
||||
'- **Files modified:** [count]',
|
||||
'',
|
||||
'## Accomplishments',
|
||||
'- [Key outcome 1]',
|
||||
'- [Key outcome 2]',
|
||||
'',
|
||||
'## Task Commits',
|
||||
'1. **Task 1: [task name]** - `hash`',
|
||||
'',
|
||||
'## Files Created/Modified',
|
||||
'- `path/to/file.ts` - What it does',
|
||||
'',
|
||||
'## Decisions & Deviations',
|
||||
'[Key decisions or "None - followed plan as specified"]',
|
||||
'',
|
||||
'## Next Phase Readiness',
|
||||
'[What\'s ready for next phase]',
|
||||
].join('\n');
|
||||
fileName = `${padded}-${planNum}-SUMMARY.md`;
|
||||
break;
|
||||
}
|
||||
case 'plan': {
|
||||
const planType = options.type || 'execute';
|
||||
const wave = parseInt(options.wave) || 1;
|
||||
frontmatter = {
|
||||
phase: phaseId,
|
||||
plan: planNum,
|
||||
type: planType,
|
||||
wave,
|
||||
depends_on: [],
|
||||
files_modified: [],
|
||||
autonomous: true,
|
||||
user_setup: [],
|
||||
must_haves: { truths: [], artifacts: [], key_links: [] },
|
||||
...fields,
|
||||
};
|
||||
body = [
|
||||
`# Phase ${options.phase} Plan ${planNum}: [Title]`,
|
||||
'',
|
||||
'## Objective',
|
||||
'- **What:** [What this plan builds]',
|
||||
'- **Why:** [Why it matters for the phase goal]',
|
||||
'- **Output:** [Concrete deliverable]',
|
||||
'',
|
||||
'## Context',
|
||||
'@.planning/PROJECT.md',
|
||||
'@.planning/ROADMAP.md',
|
||||
'@.planning/STATE.md',
|
||||
'',
|
||||
'## Tasks',
|
||||
'',
|
||||
'<task type="code">',
|
||||
' <name>[Task name]</name>',
|
||||
' <files>[file paths]</files>',
|
||||
' <action>[What to do]</action>',
|
||||
' <verify>[How to verify]</verify>',
|
||||
' <done>[Definition of done]</done>',
|
||||
'</task>',
|
||||
'',
|
||||
'## Verification',
|
||||
'[How to verify this plan achieved its objective]',
|
||||
'',
|
||||
'## Success Criteria',
|
||||
'- [ ] [Criterion 1]',
|
||||
'- [ ] [Criterion 2]',
|
||||
].join('\n');
|
||||
fileName = `${padded}-${planNum}-PLAN.md`;
|
||||
break;
|
||||
}
|
||||
case 'verification': {
|
||||
frontmatter = {
|
||||
phase: phaseId,
|
||||
verified: new Date().toISOString(),
|
||||
status: 'pending',
|
||||
score: '0/0 must-haves verified',
|
||||
...fields,
|
||||
};
|
||||
body = [
|
||||
`# Phase ${options.phase}: ${phaseName} — Verification`,
|
||||
'',
|
||||
'## Observable Truths',
|
||||
'| # | Truth | Status | Evidence |',
|
||||
'|---|-------|--------|----------|',
|
||||
'| 1 | [Truth] | pending | |',
|
||||
'',
|
||||
'## Required Artifacts',
|
||||
'| Artifact | Expected | Status | Details |',
|
||||
'|----------|----------|--------|---------|',
|
||||
'| [path] | [what] | pending | |',
|
||||
'',
|
||||
'## Key Link Verification',
|
||||
'| From | To | Via | Status | Details |',
|
||||
'|------|----|----|--------|---------|',
|
||||
'| [source] | [target] | [connection] | pending | |',
|
||||
'',
|
||||
'## Requirements Coverage',
|
||||
'| Requirement | Status | Blocking Issue |',
|
||||
'|-------------|--------|----------------|',
|
||||
'| [req] | pending | |',
|
||||
'',
|
||||
'## Result',
|
||||
'[Pending verification]',
|
||||
].join('\n');
|
||||
fileName = `${padded}-VERIFICATION.md`;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
error(`Unknown template type: ${templateType}. Available: summary, plan, verification`);
|
||||
return;
|
||||
}
|
||||
|
||||
const fullContent = `---\n${reconstructFrontmatter(frontmatter)}\n---\n\n${body}\n`;
|
||||
const outPath = path.join(cwd, phaseInfo.directory, fileName);
|
||||
|
||||
if (fs.existsSync(outPath)) {
|
||||
output({ error: 'File already exists', path: toPosixPath(path.relative(cwd, outPath)) }, raw);
|
||||
return;
|
||||
}
|
||||
|
||||
fs.writeFileSync(outPath, normalizeMd(fullContent), 'utf-8');
|
||||
const relPath = toPosixPath(path.relative(cwd, outPath));
|
||||
output({ created: true, path: relPath, template: templateType }, raw, relPath);
|
||||
}
|
||||
|
||||
module.exports = { cmdTemplateSelect, cmdTemplateFill };
|
||||
842
get-shit-done/bin/lib/verify.cjs
Normal file
842
get-shit-done/bin/lib/verify.cjs
Normal file
@@ -0,0 +1,842 @@
|
||||
/**
|
||||
* Verify — Verification suite, consistency, and health validation
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const { safeReadFile, normalizePhaseName, execGit, findPhaseInternal, getMilestoneInfo, stripShippedMilestones, extractCurrentMilestone, output, error } = require('./core.cjs');
|
||||
const { extractFrontmatter, parseMustHavesBlock } = require('./frontmatter.cjs');
|
||||
const { writeStateMd } = require('./state.cjs');
|
||||
|
||||
function cmdVerifySummary(cwd, summaryPath, checkFileCount, raw) {
|
||||
if (!summaryPath) {
|
||||
error('summary-path required');
|
||||
}
|
||||
|
||||
const fullPath = path.join(cwd, summaryPath);
|
||||
const checkCount = checkFileCount || 2;
|
||||
|
||||
// Check 1: Summary exists
|
||||
if (!fs.existsSync(fullPath)) {
|
||||
const result = {
|
||||
passed: false,
|
||||
checks: {
|
||||
summary_exists: false,
|
||||
files_created: { checked: 0, found: 0, missing: [] },
|
||||
commits_exist: false,
|
||||
self_check: 'not_found',
|
||||
},
|
||||
errors: ['SUMMARY.md not found'],
|
||||
};
|
||||
output(result, raw, 'failed');
|
||||
return;
|
||||
}
|
||||
|
||||
const content = fs.readFileSync(fullPath, 'utf-8');
|
||||
const errors = [];
|
||||
|
||||
// Check 2: Spot-check files mentioned in summary
|
||||
const mentionedFiles = new Set();
|
||||
const patterns = [
|
||||
/`([^`]+\.[a-zA-Z]+)`/g,
|
||||
/(?:Created|Modified|Added|Updated|Edited):\s*`?([^\s`]+\.[a-zA-Z]+)`?/gi,
|
||||
];
|
||||
|
||||
for (const pattern of patterns) {
|
||||
let m;
|
||||
while ((m = pattern.exec(content)) !== null) {
|
||||
const filePath = m[1];
|
||||
if (filePath && !filePath.startsWith('http') && filePath.includes('/')) {
|
||||
mentionedFiles.add(filePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const filesToCheck = Array.from(mentionedFiles).slice(0, checkCount);
|
||||
const missing = [];
|
||||
for (const file of filesToCheck) {
|
||||
if (!fs.existsSync(path.join(cwd, file))) {
|
||||
missing.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
// Check 3: Commits exist
|
||||
const commitHashPattern = /\b[0-9a-f]{7,40}\b/g;
|
||||
const hashes = content.match(commitHashPattern) || [];
|
||||
let commitsExist = false;
|
||||
if (hashes.length > 0) {
|
||||
for (const hash of hashes.slice(0, 3)) {
|
||||
const result = execGit(cwd, ['cat-file', '-t', hash]);
|
||||
if (result.exitCode === 0 && result.stdout === 'commit') {
|
||||
commitsExist = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check 4: Self-check section
|
||||
let selfCheck = 'not_found';
|
||||
const selfCheckPattern = /##\s*(?:Self[- ]?Check|Verification|Quality Check)/i;
|
||||
if (selfCheckPattern.test(content)) {
|
||||
const passPattern = /(?:all\s+)?(?:pass|✓|✅|complete|succeeded)/i;
|
||||
const failPattern = /(?:fail|✗|❌|incomplete|blocked)/i;
|
||||
const checkSection = content.slice(content.search(selfCheckPattern));
|
||||
if (failPattern.test(checkSection)) {
|
||||
selfCheck = 'failed';
|
||||
} else if (passPattern.test(checkSection)) {
|
||||
selfCheck = 'passed';
|
||||
}
|
||||
}
|
||||
|
||||
if (missing.length > 0) errors.push('Missing files: ' + missing.join(', '));
|
||||
if (!commitsExist && hashes.length > 0) errors.push('Referenced commit hashes not found in git history');
|
||||
if (selfCheck === 'failed') errors.push('Self-check section indicates failure');
|
||||
|
||||
const checks = {
|
||||
summary_exists: true,
|
||||
files_created: { checked: filesToCheck.length, found: filesToCheck.length - missing.length, missing },
|
||||
commits_exist: commitsExist,
|
||||
self_check: selfCheck,
|
||||
};
|
||||
|
||||
const passed = missing.length === 0 && selfCheck !== 'failed';
|
||||
const result = { passed, checks, errors };
|
||||
output(result, raw, passed ? 'passed' : 'failed');
|
||||
}
|
||||
|
||||
function cmdVerifyPlanStructure(cwd, filePath, raw) {
|
||||
if (!filePath) { error('file path required'); }
|
||||
const fullPath = path.isAbsolute(filePath) ? filePath : path.join(cwd, filePath);
|
||||
const content = safeReadFile(fullPath);
|
||||
if (!content) { output({ error: 'File not found', path: filePath }, raw); return; }
|
||||
|
||||
const fm = extractFrontmatter(content);
|
||||
const errors = [];
|
||||
const warnings = [];
|
||||
|
||||
// Check required frontmatter fields
|
||||
const required = ['phase', 'plan', 'type', 'wave', 'depends_on', 'files_modified', 'autonomous', 'must_haves'];
|
||||
for (const field of required) {
|
||||
if (fm[field] === undefined) errors.push(`Missing required frontmatter field: ${field}`);
|
||||
}
|
||||
|
||||
// Parse and check task elements
|
||||
const taskPattern = /<task[^>]*>([\s\S]*?)<\/task>/g;
|
||||
const tasks = [];
|
||||
let taskMatch;
|
||||
while ((taskMatch = taskPattern.exec(content)) !== null) {
|
||||
const taskContent = taskMatch[1];
|
||||
const nameMatch = taskContent.match(/<name>([\s\S]*?)<\/name>/);
|
||||
const taskName = nameMatch ? nameMatch[1].trim() : 'unnamed';
|
||||
const hasFiles = /<files>/.test(taskContent);
|
||||
const hasAction = /<action>/.test(taskContent);
|
||||
const hasVerify = /<verify>/.test(taskContent);
|
||||
const hasDone = /<done>/.test(taskContent);
|
||||
|
||||
if (!nameMatch) errors.push('Task missing <name> element');
|
||||
if (!hasAction) errors.push(`Task '${taskName}' missing <action>`);
|
||||
if (!hasVerify) warnings.push(`Task '${taskName}' missing <verify>`);
|
||||
if (!hasDone) warnings.push(`Task '${taskName}' missing <done>`);
|
||||
if (!hasFiles) warnings.push(`Task '${taskName}' missing <files>`);
|
||||
|
||||
tasks.push({ name: taskName, hasFiles, hasAction, hasVerify, hasDone });
|
||||
}
|
||||
|
||||
if (tasks.length === 0) warnings.push('No <task> elements found');
|
||||
|
||||
// Wave/depends_on consistency
|
||||
if (fm.wave && parseInt(fm.wave) > 1 && (!fm.depends_on || (Array.isArray(fm.depends_on) && fm.depends_on.length === 0))) {
|
||||
warnings.push('Wave > 1 but depends_on is empty');
|
||||
}
|
||||
|
||||
// Autonomous/checkpoint consistency
|
||||
const hasCheckpoints = /<task\s+type=["']?checkpoint/.test(content);
|
||||
if (hasCheckpoints && fm.autonomous !== 'false' && fm.autonomous !== false) {
|
||||
errors.push('Has checkpoint tasks but autonomous is not false');
|
||||
}
|
||||
|
||||
output({
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
warnings,
|
||||
task_count: tasks.length,
|
||||
tasks,
|
||||
frontmatter_fields: Object.keys(fm),
|
||||
}, raw, errors.length === 0 ? 'valid' : 'invalid');
|
||||
}
|
||||
|
||||
function cmdVerifyPhaseCompleteness(cwd, phase, raw) {
|
||||
if (!phase) { error('phase required'); }
|
||||
const phaseInfo = findPhaseInternal(cwd, phase);
|
||||
if (!phaseInfo || !phaseInfo.found) {
|
||||
output({ error: 'Phase not found', phase }, raw);
|
||||
return;
|
||||
}
|
||||
|
||||
const errors = [];
|
||||
const warnings = [];
|
||||
const phaseDir = path.join(cwd, phaseInfo.directory);
|
||||
|
||||
// List plans and summaries
|
||||
let files;
|
||||
try { files = fs.readdirSync(phaseDir); } catch { output({ error: 'Cannot read phase directory' }, raw); return; }
|
||||
|
||||
const plans = files.filter(f => f.match(/-PLAN\.md$/i));
|
||||
const summaries = files.filter(f => f.match(/-SUMMARY\.md$/i));
|
||||
|
||||
// Extract plan IDs (everything before -PLAN.md)
|
||||
const planIds = new Set(plans.map(p => p.replace(/-PLAN\.md$/i, '')));
|
||||
const summaryIds = new Set(summaries.map(s => s.replace(/-SUMMARY\.md$/i, '')));
|
||||
|
||||
// Plans without summaries
|
||||
const incompletePlans = [...planIds].filter(id => !summaryIds.has(id));
|
||||
if (incompletePlans.length > 0) {
|
||||
errors.push(`Plans without summaries: ${incompletePlans.join(', ')}`);
|
||||
}
|
||||
|
||||
// Summaries without plans (orphans)
|
||||
const orphanSummaries = [...summaryIds].filter(id => !planIds.has(id));
|
||||
if (orphanSummaries.length > 0) {
|
||||
warnings.push(`Summaries without plans: ${orphanSummaries.join(', ')}`);
|
||||
}
|
||||
|
||||
output({
|
||||
complete: errors.length === 0,
|
||||
phase: phaseInfo.phase_number,
|
||||
plan_count: plans.length,
|
||||
summary_count: summaries.length,
|
||||
incomplete_plans: incompletePlans,
|
||||
orphan_summaries: orphanSummaries,
|
||||
errors,
|
||||
warnings,
|
||||
}, raw, errors.length === 0 ? 'complete' : 'incomplete');
|
||||
}
|
||||
|
||||
function cmdVerifyReferences(cwd, filePath, raw) {
|
||||
if (!filePath) { error('file path required'); }
|
||||
const fullPath = path.isAbsolute(filePath) ? filePath : path.join(cwd, filePath);
|
||||
const content = safeReadFile(fullPath);
|
||||
if (!content) { output({ error: 'File not found', path: filePath }, raw); return; }
|
||||
|
||||
const found = [];
|
||||
const missing = [];
|
||||
|
||||
// Find @-references: @path/to/file (must contain / to be a file path)
|
||||
const atRefs = content.match(/@([^\s\n,)]+\/[^\s\n,)]+)/g) || [];
|
||||
for (const ref of atRefs) {
|
||||
const cleanRef = ref.slice(1); // remove @
|
||||
const resolved = cleanRef.startsWith('~/')
|
||||
? path.join(process.env.HOME || '', cleanRef.slice(2))
|
||||
: path.join(cwd, cleanRef);
|
||||
if (fs.existsSync(resolved)) {
|
||||
found.push(cleanRef);
|
||||
} else {
|
||||
missing.push(cleanRef);
|
||||
}
|
||||
}
|
||||
|
||||
// Find backtick file paths that look like real paths (contain / and have extension)
|
||||
const backtickRefs = content.match(/`([^`]+\/[^`]+\.[a-zA-Z]{1,10})`/g) || [];
|
||||
for (const ref of backtickRefs) {
|
||||
const cleanRef = ref.slice(1, -1); // remove backticks
|
||||
if (cleanRef.startsWith('http') || cleanRef.includes('${') || cleanRef.includes('{{')) continue;
|
||||
if (found.includes(cleanRef) || missing.includes(cleanRef)) continue; // dedup
|
||||
const resolved = path.join(cwd, cleanRef);
|
||||
if (fs.existsSync(resolved)) {
|
||||
found.push(cleanRef);
|
||||
} else {
|
||||
missing.push(cleanRef);
|
||||
}
|
||||
}
|
||||
|
||||
output({
|
||||
valid: missing.length === 0,
|
||||
found: found.length,
|
||||
missing,
|
||||
total: found.length + missing.length,
|
||||
}, raw, missing.length === 0 ? 'valid' : 'invalid');
|
||||
}
|
||||
|
||||
function cmdVerifyCommits(cwd, hashes, raw) {
|
||||
if (!hashes || hashes.length === 0) { error('At least one commit hash required'); }
|
||||
|
||||
const valid = [];
|
||||
const invalid = [];
|
||||
for (const hash of hashes) {
|
||||
const result = execGit(cwd, ['cat-file', '-t', hash]);
|
||||
if (result.exitCode === 0 && result.stdout.trim() === 'commit') {
|
||||
valid.push(hash);
|
||||
} else {
|
||||
invalid.push(hash);
|
||||
}
|
||||
}
|
||||
|
||||
output({
|
||||
all_valid: invalid.length === 0,
|
||||
valid,
|
||||
invalid,
|
||||
total: hashes.length,
|
||||
}, raw, invalid.length === 0 ? 'valid' : 'invalid');
|
||||
}
|
||||
|
||||
function cmdVerifyArtifacts(cwd, planFilePath, raw) {
|
||||
if (!planFilePath) { error('plan file path required'); }
|
||||
const fullPath = path.isAbsolute(planFilePath) ? planFilePath : path.join(cwd, planFilePath);
|
||||
const content = safeReadFile(fullPath);
|
||||
if (!content) { output({ error: 'File not found', path: planFilePath }, raw); return; }
|
||||
|
||||
const artifacts = parseMustHavesBlock(content, 'artifacts');
|
||||
if (artifacts.length === 0) {
|
||||
output({ error: 'No must_haves.artifacts found in frontmatter', path: planFilePath }, raw);
|
||||
return;
|
||||
}
|
||||
|
||||
const results = [];
|
||||
for (const artifact of artifacts) {
|
||||
if (typeof artifact === 'string') continue; // skip simple string items
|
||||
const artPath = artifact.path;
|
||||
if (!artPath) continue;
|
||||
|
||||
const artFullPath = path.join(cwd, artPath);
|
||||
const exists = fs.existsSync(artFullPath);
|
||||
const check = { path: artPath, exists, issues: [], passed: false };
|
||||
|
||||
if (exists) {
|
||||
const fileContent = safeReadFile(artFullPath) || '';
|
||||
const lineCount = fileContent.split('\n').length;
|
||||
|
||||
if (artifact.min_lines && lineCount < artifact.min_lines) {
|
||||
check.issues.push(`Only ${lineCount} lines, need ${artifact.min_lines}`);
|
||||
}
|
||||
if (artifact.contains && !fileContent.includes(artifact.contains)) {
|
||||
check.issues.push(`Missing pattern: ${artifact.contains}`);
|
||||
}
|
||||
if (artifact.exports) {
|
||||
const exports = Array.isArray(artifact.exports) ? artifact.exports : [artifact.exports];
|
||||
for (const exp of exports) {
|
||||
if (!fileContent.includes(exp)) check.issues.push(`Missing export: ${exp}`);
|
||||
}
|
||||
}
|
||||
check.passed = check.issues.length === 0;
|
||||
} else {
|
||||
check.issues.push('File not found');
|
||||
}
|
||||
|
||||
results.push(check);
|
||||
}
|
||||
|
||||
const passed = results.filter(r => r.passed).length;
|
||||
output({
|
||||
all_passed: passed === results.length,
|
||||
passed,
|
||||
total: results.length,
|
||||
artifacts: results,
|
||||
}, raw, passed === results.length ? 'valid' : 'invalid');
|
||||
}
|
||||
|
||||
function cmdVerifyKeyLinks(cwd, planFilePath, raw) {
|
||||
if (!planFilePath) { error('plan file path required'); }
|
||||
const fullPath = path.isAbsolute(planFilePath) ? planFilePath : path.join(cwd, planFilePath);
|
||||
const content = safeReadFile(fullPath);
|
||||
if (!content) { output({ error: 'File not found', path: planFilePath }, raw); return; }
|
||||
|
||||
const keyLinks = parseMustHavesBlock(content, 'key_links');
|
||||
if (keyLinks.length === 0) {
|
||||
output({ error: 'No must_haves.key_links found in frontmatter', path: planFilePath }, raw);
|
||||
return;
|
||||
}
|
||||
|
||||
const results = [];
|
||||
for (const link of keyLinks) {
|
||||
if (typeof link === 'string') continue;
|
||||
const check = { from: link.from, to: link.to, via: link.via || '', verified: false, detail: '' };
|
||||
|
||||
const sourceContent = safeReadFile(path.join(cwd, link.from || ''));
|
||||
if (!sourceContent) {
|
||||
check.detail = 'Source file not found';
|
||||
} else if (link.pattern) {
|
||||
try {
|
||||
const regex = new RegExp(link.pattern);
|
||||
if (regex.test(sourceContent)) {
|
||||
check.verified = true;
|
||||
check.detail = 'Pattern found in source';
|
||||
} else {
|
||||
const targetContent = safeReadFile(path.join(cwd, link.to || ''));
|
||||
if (targetContent && regex.test(targetContent)) {
|
||||
check.verified = true;
|
||||
check.detail = 'Pattern found in target';
|
||||
} else {
|
||||
check.detail = `Pattern "${link.pattern}" not found in source or target`;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
check.detail = `Invalid regex pattern: ${link.pattern}`;
|
||||
}
|
||||
} else {
|
||||
// No pattern: just check source references target
|
||||
if (sourceContent.includes(link.to || '')) {
|
||||
check.verified = true;
|
||||
check.detail = 'Target referenced in source';
|
||||
} else {
|
||||
check.detail = 'Target not referenced in source';
|
||||
}
|
||||
}
|
||||
|
||||
results.push(check);
|
||||
}
|
||||
|
||||
const verified = results.filter(r => r.verified).length;
|
||||
output({
|
||||
all_verified: verified === results.length,
|
||||
verified,
|
||||
total: results.length,
|
||||
links: results,
|
||||
}, raw, verified === results.length ? 'valid' : 'invalid');
|
||||
}
|
||||
|
||||
function cmdValidateConsistency(cwd, raw) {
|
||||
const roadmapPath = path.join(cwd, '.planning', 'ROADMAP.md');
|
||||
const phasesDir = path.join(cwd, '.planning', 'phases');
|
||||
const errors = [];
|
||||
const warnings = [];
|
||||
|
||||
// Check for ROADMAP
|
||||
if (!fs.existsSync(roadmapPath)) {
|
||||
errors.push('ROADMAP.md not found');
|
||||
output({ passed: false, errors, warnings }, raw, 'failed');
|
||||
return;
|
||||
}
|
||||
|
||||
const roadmapContentRaw = fs.readFileSync(roadmapPath, 'utf-8');
|
||||
const roadmapContent = extractCurrentMilestone(roadmapContentRaw, cwd);
|
||||
|
||||
// Extract phases from ROADMAP (archived milestones already stripped)
|
||||
const roadmapPhases = new Set();
|
||||
const phasePattern = /#{2,4}\s*Phase\s+(\d+[A-Z]?(?:\.\d+)*)\s*:/gi;
|
||||
let m;
|
||||
while ((m = phasePattern.exec(roadmapContent)) !== null) {
|
||||
roadmapPhases.add(m[1]);
|
||||
}
|
||||
|
||||
// Get phases on disk
|
||||
const diskPhases = new Set();
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name);
|
||||
for (const dir of dirs) {
|
||||
const dm = dir.match(/^(\d+[A-Z]?(?:\.\d+)*)/i);
|
||||
if (dm) diskPhases.add(dm[1]);
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// Check: phases in ROADMAP but not on disk
|
||||
for (const p of roadmapPhases) {
|
||||
if (!diskPhases.has(p) && !diskPhases.has(normalizePhaseName(p))) {
|
||||
warnings.push(`Phase ${p} in ROADMAP.md but no directory on disk`);
|
||||
}
|
||||
}
|
||||
|
||||
// Check: phases on disk but not in ROADMAP
|
||||
for (const p of diskPhases) {
|
||||
const unpadded = String(parseInt(p, 10));
|
||||
if (!roadmapPhases.has(p) && !roadmapPhases.has(unpadded)) {
|
||||
warnings.push(`Phase ${p} exists on disk but not in ROADMAP.md`);
|
||||
}
|
||||
}
|
||||
|
||||
// Check: sequential phase numbers (integers only)
|
||||
const integerPhases = [...diskPhases]
|
||||
.filter(p => !p.includes('.'))
|
||||
.map(p => parseInt(p, 10))
|
||||
.sort((a, b) => a - b);
|
||||
|
||||
for (let i = 1; i < integerPhases.length; i++) {
|
||||
if (integerPhases[i] !== integerPhases[i - 1] + 1) {
|
||||
warnings.push(`Gap in phase numbering: ${integerPhases[i - 1]} → ${integerPhases[i]}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Check: plan numbering within phases
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name).sort();
|
||||
|
||||
for (const dir of dirs) {
|
||||
const phaseFiles = fs.readdirSync(path.join(phasesDir, dir));
|
||||
const plans = phaseFiles.filter(f => f.endsWith('-PLAN.md')).sort();
|
||||
|
||||
// Extract plan numbers
|
||||
const planNums = plans.map(p => {
|
||||
const pm = p.match(/-(\d{2})-PLAN\.md$/);
|
||||
return pm ? parseInt(pm[1], 10) : null;
|
||||
}).filter(n => n !== null);
|
||||
|
||||
for (let i = 1; i < planNums.length; i++) {
|
||||
if (planNums[i] !== planNums[i - 1] + 1) {
|
||||
warnings.push(`Gap in plan numbering in ${dir}: plan ${planNums[i - 1]} → ${planNums[i]}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Check: plans without summaries (completed plans)
|
||||
const summaries = phaseFiles.filter(f => f.endsWith('-SUMMARY.md'));
|
||||
const planIds = new Set(plans.map(p => p.replace('-PLAN.md', '')));
|
||||
const summaryIds = new Set(summaries.map(s => s.replace('-SUMMARY.md', '')));
|
||||
|
||||
// Summary without matching plan is suspicious
|
||||
for (const sid of summaryIds) {
|
||||
if (!planIds.has(sid)) {
|
||||
warnings.push(`Summary ${sid}-SUMMARY.md in ${dir} has no matching PLAN.md`);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// Check: frontmatter in plans has required fields
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name);
|
||||
|
||||
for (const dir of dirs) {
|
||||
const phaseFiles = fs.readdirSync(path.join(phasesDir, dir));
|
||||
const plans = phaseFiles.filter(f => f.endsWith('-PLAN.md'));
|
||||
|
||||
for (const plan of plans) {
|
||||
const content = fs.readFileSync(path.join(phasesDir, dir, plan), 'utf-8');
|
||||
const fm = extractFrontmatter(content);
|
||||
|
||||
if (!fm.wave) {
|
||||
warnings.push(`${dir}/${plan}: missing 'wave' in frontmatter`);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
const passed = errors.length === 0;
|
||||
output({ passed, errors, warnings, warning_count: warnings.length }, raw, passed ? 'passed' : 'failed');
|
||||
}
|
||||
|
||||
function cmdValidateHealth(cwd, options, raw) {
|
||||
// Guard: detect if CWD is the home directory (likely accidental)
|
||||
const resolved = path.resolve(cwd);
|
||||
if (resolved === os.homedir()) {
|
||||
output({
|
||||
status: 'error',
|
||||
errors: [{ code: 'E010', message: `CWD is home directory (${resolved}) — health check would read the wrong .planning/ directory. Run from your project root instead.`, fix: 'cd into your project directory and retry' }],
|
||||
warnings: [],
|
||||
info: [{ code: 'I010', message: `Resolved CWD: ${resolved}` }],
|
||||
repairable_count: 0,
|
||||
}, raw);
|
||||
return;
|
||||
}
|
||||
|
||||
const planningDir = path.join(cwd, '.planning');
|
||||
const projectPath = path.join(planningDir, 'PROJECT.md');
|
||||
const roadmapPath = path.join(planningDir, 'ROADMAP.md');
|
||||
const statePath = path.join(planningDir, 'STATE.md');
|
||||
const configPath = path.join(planningDir, 'config.json');
|
||||
const phasesDir = path.join(planningDir, 'phases');
|
||||
|
||||
const errors = [];
|
||||
const warnings = [];
|
||||
const info = [];
|
||||
const repairs = [];
|
||||
|
||||
// Helper to add issue
|
||||
const addIssue = (severity, code, message, fix, repairable = false) => {
|
||||
const issue = { code, message, fix, repairable };
|
||||
if (severity === 'error') errors.push(issue);
|
||||
else if (severity === 'warning') warnings.push(issue);
|
||||
else info.push(issue);
|
||||
};
|
||||
|
||||
// ─── Check 1: .planning/ exists ───────────────────────────────────────────
|
||||
if (!fs.existsSync(planningDir)) {
|
||||
addIssue('error', 'E001', '.planning/ directory not found', 'Run /gsd:new-project to initialize');
|
||||
output({
|
||||
status: 'broken',
|
||||
errors,
|
||||
warnings,
|
||||
info,
|
||||
repairable_count: 0,
|
||||
}, raw);
|
||||
return;
|
||||
}
|
||||
|
||||
// ─── Check 2: PROJECT.md exists and has required sections ─────────────────
|
||||
if (!fs.existsSync(projectPath)) {
|
||||
addIssue('error', 'E002', 'PROJECT.md not found', 'Run /gsd:new-project to create');
|
||||
} else {
|
||||
const content = fs.readFileSync(projectPath, 'utf-8');
|
||||
const requiredSections = ['## What This Is', '## Core Value', '## Requirements'];
|
||||
for (const section of requiredSections) {
|
||||
if (!content.includes(section)) {
|
||||
addIssue('warning', 'W001', `PROJECT.md missing section: ${section}`, 'Add section manually');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Check 3: ROADMAP.md exists ───────────────────────────────────────────
|
||||
if (!fs.existsSync(roadmapPath)) {
|
||||
addIssue('error', 'E003', 'ROADMAP.md not found', 'Run /gsd:new-milestone to create roadmap');
|
||||
}
|
||||
|
||||
// ─── Check 4: STATE.md exists and references valid phases ─────────────────
|
||||
if (!fs.existsSync(statePath)) {
|
||||
addIssue('error', 'E004', 'STATE.md not found', 'Run /gsd:health --repair to regenerate', true);
|
||||
repairs.push('regenerateState');
|
||||
} else {
|
||||
const stateContent = fs.readFileSync(statePath, 'utf-8');
|
||||
// Extract phase references from STATE.md
|
||||
const phaseRefs = [...stateContent.matchAll(/[Pp]hase\s+(\d+(?:\.\d+)*)/g)].map(m => m[1]);
|
||||
// Get disk phases
|
||||
const diskPhases = new Set();
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
if (e.isDirectory()) {
|
||||
const m = e.name.match(/^(\d+(?:\.\d+)*)/);
|
||||
if (m) diskPhases.add(m[1]);
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
// Check for invalid references
|
||||
for (const ref of phaseRefs) {
|
||||
const normalizedRef = String(parseInt(ref, 10)).padStart(2, '0');
|
||||
if (!diskPhases.has(ref) && !diskPhases.has(normalizedRef) && !diskPhases.has(String(parseInt(ref, 10)))) {
|
||||
// Only warn if phases dir has any content (not just an empty project)
|
||||
if (diskPhases.size > 0) {
|
||||
addIssue('warning', 'W002', `STATE.md references phase ${ref}, but only phases ${[...diskPhases].sort().join(', ')} exist`, 'Run /gsd:health --repair to regenerate STATE.md', true);
|
||||
if (!repairs.includes('regenerateState')) repairs.push('regenerateState');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Check 5: config.json valid JSON + valid schema ───────────────────────
|
||||
if (!fs.existsSync(configPath)) {
|
||||
addIssue('warning', 'W003', 'config.json not found', 'Run /gsd:health --repair to create with defaults', true);
|
||||
repairs.push('createConfig');
|
||||
} else {
|
||||
try {
|
||||
const raw = fs.readFileSync(configPath, 'utf-8');
|
||||
const parsed = JSON.parse(raw);
|
||||
// Validate known fields
|
||||
const validProfiles = ['quality', 'balanced', 'budget', 'inherit'];
|
||||
if (parsed.model_profile && !validProfiles.includes(parsed.model_profile)) {
|
||||
addIssue('warning', 'W004', `config.json: invalid model_profile "${parsed.model_profile}"`, `Valid values: ${validProfiles.join(', ')}`);
|
||||
}
|
||||
} catch (err) {
|
||||
addIssue('error', 'E005', `config.json: JSON parse error - ${err.message}`, 'Run /gsd:health --repair to reset to defaults', true);
|
||||
repairs.push('resetConfig');
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Check 5b: Nyquist validation key presence ──────────────────────────
|
||||
if (fs.existsSync(configPath)) {
|
||||
try {
|
||||
const configRaw = fs.readFileSync(configPath, 'utf-8');
|
||||
const configParsed = JSON.parse(configRaw);
|
||||
if (configParsed.workflow && configParsed.workflow.nyquist_validation === undefined) {
|
||||
addIssue('warning', 'W008', 'config.json: workflow.nyquist_validation absent (defaults to enabled but agents may skip)', 'Run /gsd:health --repair to add key', true);
|
||||
if (!repairs.includes('addNyquistKey')) repairs.push('addNyquistKey');
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
// ─── Check 6: Phase directory naming (NN-name format) ─────────────────────
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
if (e.isDirectory() && !e.name.match(/^\d{2}(?:\.\d+)*-[\w-]+$/)) {
|
||||
addIssue('warning', 'W005', `Phase directory "${e.name}" doesn't follow NN-name format`, 'Rename to match pattern (e.g., 01-setup)');
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// ─── Check 7: Orphaned plans (PLAN without SUMMARY) ───────────────────────
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
if (!e.isDirectory()) continue;
|
||||
const phaseFiles = fs.readdirSync(path.join(phasesDir, e.name));
|
||||
const plans = phaseFiles.filter(f => f.endsWith('-PLAN.md') || f === 'PLAN.md');
|
||||
const summaries = phaseFiles.filter(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md');
|
||||
const summaryBases = new Set(summaries.map(s => s.replace('-SUMMARY.md', '').replace('SUMMARY.md', '')));
|
||||
|
||||
for (const plan of plans) {
|
||||
const planBase = plan.replace('-PLAN.md', '').replace('PLAN.md', '');
|
||||
if (!summaryBases.has(planBase)) {
|
||||
addIssue('info', 'I001', `${e.name}/${plan} has no SUMMARY.md`, 'May be in progress');
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// ─── Check 7b: Nyquist VALIDATION.md consistency ────────────────────────
|
||||
try {
|
||||
const phaseEntries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
for (const e of phaseEntries) {
|
||||
if (!e.isDirectory()) continue;
|
||||
const phaseFiles = fs.readdirSync(path.join(phasesDir, e.name));
|
||||
const hasResearch = phaseFiles.some(f => f.endsWith('-RESEARCH.md'));
|
||||
const hasValidation = phaseFiles.some(f => f.endsWith('-VALIDATION.md'));
|
||||
if (hasResearch && !hasValidation) {
|
||||
const researchFile = phaseFiles.find(f => f.endsWith('-RESEARCH.md'));
|
||||
const researchContent = fs.readFileSync(path.join(phasesDir, e.name, researchFile), 'utf-8');
|
||||
if (researchContent.includes('## Validation Architecture')) {
|
||||
addIssue('warning', 'W009', `Phase ${e.name}: has Validation Architecture in RESEARCH.md but no VALIDATION.md`, 'Re-run /gsd:plan-phase with --research to regenerate');
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// ─── Check 8: Run existing consistency checks ─────────────────────────────
|
||||
// Inline subset of cmdValidateConsistency
|
||||
if (fs.existsSync(roadmapPath)) {
|
||||
const roadmapContentRaw = fs.readFileSync(roadmapPath, 'utf-8');
|
||||
const roadmapContent = extractCurrentMilestone(roadmapContentRaw, cwd);
|
||||
const roadmapPhases = new Set();
|
||||
const phasePattern = /#{2,4}\s*Phase\s+(\d+[A-Z]?(?:\.\d+)*)\s*:/gi;
|
||||
let m;
|
||||
while ((m = phasePattern.exec(roadmapContent)) !== null) {
|
||||
roadmapPhases.add(m[1]);
|
||||
}
|
||||
|
||||
const diskPhases = new Set();
|
||||
try {
|
||||
const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
|
||||
for (const e of entries) {
|
||||
if (e.isDirectory()) {
|
||||
const dm = e.name.match(/^(\d+[A-Z]?(?:\.\d+)*)/i);
|
||||
if (dm) diskPhases.add(dm[1]);
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// Phases in ROADMAP but not on disk
|
||||
for (const p of roadmapPhases) {
|
||||
const padded = String(parseInt(p, 10)).padStart(2, '0');
|
||||
if (!diskPhases.has(p) && !diskPhases.has(padded)) {
|
||||
addIssue('warning', 'W006', `Phase ${p} in ROADMAP.md but no directory on disk`, 'Create phase directory or remove from roadmap');
|
||||
}
|
||||
}
|
||||
|
||||
// Phases on disk but not in ROADMAP
|
||||
for (const p of diskPhases) {
|
||||
const unpadded = String(parseInt(p, 10));
|
||||
if (!roadmapPhases.has(p) && !roadmapPhases.has(unpadded)) {
|
||||
addIssue('warning', 'W007', `Phase ${p} exists on disk but not in ROADMAP.md`, 'Add to roadmap or remove directory');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Perform repairs if requested ─────────────────────────────────────────
|
||||
const repairActions = [];
|
||||
if (options.repair && repairs.length > 0) {
|
||||
for (const repair of repairs) {
|
||||
try {
|
||||
switch (repair) {
|
||||
case 'createConfig':
|
||||
case 'resetConfig': {
|
||||
const defaults = {
|
||||
model_profile: 'balanced',
|
||||
commit_docs: true,
|
||||
search_gitignored: false,
|
||||
branching_strategy: 'none',
|
||||
phase_branch_template: 'gsd/phase-{phase}-{slug}',
|
||||
milestone_branch_template: 'gsd/{milestone}-{slug}',
|
||||
workflow: {
|
||||
research: true,
|
||||
plan_check: true,
|
||||
verifier: true,
|
||||
nyquist_validation: true,
|
||||
},
|
||||
parallelization: true,
|
||||
brave_search: false,
|
||||
};
|
||||
fs.writeFileSync(configPath, JSON.stringify(defaults, null, 2), 'utf-8');
|
||||
repairActions.push({ action: repair, success: true, path: 'config.json' });
|
||||
break;
|
||||
}
|
||||
case 'regenerateState': {
|
||||
// Create timestamped backup before overwriting
|
||||
if (fs.existsSync(statePath)) {
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, 19);
|
||||
const backupPath = `${statePath}.bak-${timestamp}`;
|
||||
fs.copyFileSync(statePath, backupPath);
|
||||
repairActions.push({ action: 'backupState', success: true, path: backupPath });
|
||||
}
|
||||
// Generate minimal STATE.md from ROADMAP.md structure
|
||||
const milestone = getMilestoneInfo(cwd);
|
||||
let stateContent = `# Session State\n\n`;
|
||||
stateContent += `## Project Reference\n\n`;
|
||||
stateContent += `See: .planning/PROJECT.md\n\n`;
|
||||
stateContent += `## Position\n\n`;
|
||||
stateContent += `**Milestone:** ${milestone.version} ${milestone.name}\n`;
|
||||
stateContent += `**Current phase:** (determining...)\n`;
|
||||
stateContent += `**Status:** Resuming\n\n`;
|
||||
stateContent += `## Session Log\n\n`;
|
||||
stateContent += `- ${new Date().toISOString().split('T')[0]}: STATE.md regenerated by /gsd:health --repair\n`;
|
||||
writeStateMd(statePath, stateContent, cwd);
|
||||
repairActions.push({ action: repair, success: true, path: 'STATE.md' });
|
||||
break;
|
||||
}
|
||||
case 'addNyquistKey': {
|
||||
if (fs.existsSync(configPath)) {
|
||||
try {
|
||||
const configRaw = fs.readFileSync(configPath, 'utf-8');
|
||||
const configParsed = JSON.parse(configRaw);
|
||||
if (!configParsed.workflow) configParsed.workflow = {};
|
||||
if (configParsed.workflow.nyquist_validation === undefined) {
|
||||
configParsed.workflow.nyquist_validation = true;
|
||||
fs.writeFileSync(configPath, JSON.stringify(configParsed, null, 2), 'utf-8');
|
||||
}
|
||||
repairActions.push({ action: repair, success: true, path: 'config.json' });
|
||||
} catch (err) {
|
||||
repairActions.push({ action: repair, success: false, error: err.message });
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
repairActions.push({ action: repair, success: false, error: err.message });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Determine overall status ─────────────────────────────────────────────
|
||||
let status;
|
||||
if (errors.length > 0) {
|
||||
status = 'broken';
|
||||
} else if (warnings.length > 0) {
|
||||
status = 'degraded';
|
||||
} else {
|
||||
status = 'healthy';
|
||||
}
|
||||
|
||||
const repairableCount = errors.filter(e => e.repairable).length +
|
||||
warnings.filter(w => w.repairable).length;
|
||||
|
||||
output({
|
||||
status,
|
||||
errors,
|
||||
warnings,
|
||||
info,
|
||||
repairable_count: repairableCount,
|
||||
repairs_performed: repairActions.length > 0 ? repairActions : undefined,
|
||||
}, raw);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
cmdVerifySummary,
|
||||
cmdVerifyPlanStructure,
|
||||
cmdVerifyPhaseCompleteness,
|
||||
cmdVerifyReferences,
|
||||
cmdVerifyCommits,
|
||||
cmdVerifyArtifacts,
|
||||
cmdVerifyKeyLinks,
|
||||
cmdValidateConsistency,
|
||||
cmdValidateHealth,
|
||||
};
|
||||
Reference in New Issue
Block a user