chore: initial backup of Claude Code configuration
Includes: CLAUDE.md, settings.json, agents, commands, rules, skills, hooks, contexts, evals, get-shit-done, plugin configs (installed list and marketplace sources). Excludes credentials, runtime caches, telemetry, session data, and plugin binary cache.
This commit is contained in:
712
get-shit-done/bin/lib/core.cjs
Normal file
712
get-shit-done/bin/lib/core.cjs
Normal file
@@ -0,0 +1,712 @@
|
||||
/**
|
||||
* Core — Shared utilities, constants, and internal helpers
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { execSync, spawnSync } = require('child_process');
|
||||
const { MODEL_PROFILES } = require('./model-profiles.cjs');
|
||||
|
||||
// ─── Path helpers ────────────────────────────────────────────────────────────
|
||||
|
||||
/** Normalize a relative path to always use forward slashes (cross-platform). */
|
||||
function toPosixPath(p) {
|
||||
return p.split(path.sep).join('/');
|
||||
}
|
||||
|
||||
// ─── Output helpers ───────────────────────────────────────────────────────────
|
||||
|
||||
function output(result, raw, rawValue) {
|
||||
if (raw && rawValue !== undefined) {
|
||||
process.stdout.write(String(rawValue));
|
||||
} else {
|
||||
const json = JSON.stringify(result, null, 2);
|
||||
// Large payloads exceed Claude Code's Bash tool buffer (~50KB).
|
||||
// Write to tmpfile and output the path prefixed with @file: so callers can detect it.
|
||||
if (json.length > 50000) {
|
||||
const tmpPath = path.join(require('os').tmpdir(), `gsd-${Date.now()}.json`);
|
||||
fs.writeFileSync(tmpPath, json, 'utf-8');
|
||||
process.stdout.write('@file:' + tmpPath);
|
||||
} else {
|
||||
process.stdout.write(json);
|
||||
}
|
||||
}
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
function error(message) {
|
||||
process.stderr.write('Error: ' + message + '\n');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// ─── File & Config utilities ──────────────────────────────────────────────────
|
||||
|
||||
function safeReadFile(filePath) {
|
||||
try {
|
||||
return fs.readFileSync(filePath, 'utf-8');
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function loadConfig(cwd) {
|
||||
const configPath = path.join(cwd, '.planning', 'config.json');
|
||||
const defaults = {
|
||||
model_profile: 'balanced',
|
||||
commit_docs: true,
|
||||
search_gitignored: false,
|
||||
branching_strategy: 'none',
|
||||
phase_branch_template: 'gsd/phase-{phase}-{slug}',
|
||||
milestone_branch_template: 'gsd/{milestone}-{slug}',
|
||||
research: true,
|
||||
plan_checker: true,
|
||||
verifier: true,
|
||||
nyquist_validation: true,
|
||||
parallelization: true,
|
||||
brave_search: false,
|
||||
resolve_model_ids: false, // when true, resolve aliases (opus/sonnet/haiku) to full model IDs
|
||||
};
|
||||
|
||||
try {
|
||||
const raw = fs.readFileSync(configPath, 'utf-8');
|
||||
const parsed = JSON.parse(raw);
|
||||
|
||||
// Migrate deprecated "depth" key to "granularity" with value mapping
|
||||
if ('depth' in parsed && !('granularity' in parsed)) {
|
||||
const depthToGranularity = { quick: 'coarse', standard: 'standard', comprehensive: 'fine' };
|
||||
parsed.granularity = depthToGranularity[parsed.depth] || parsed.depth;
|
||||
delete parsed.depth;
|
||||
try { fs.writeFileSync(configPath, JSON.stringify(parsed, null, 2), 'utf-8'); } catch {}
|
||||
}
|
||||
|
||||
const get = (key, nested) => {
|
||||
if (parsed[key] !== undefined) return parsed[key];
|
||||
if (nested && parsed[nested.section] && parsed[nested.section][nested.field] !== undefined) {
|
||||
return parsed[nested.section][nested.field];
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
const parallelization = (() => {
|
||||
const val = get('parallelization');
|
||||
if (typeof val === 'boolean') return val;
|
||||
if (typeof val === 'object' && val !== null && 'enabled' in val) return val.enabled;
|
||||
return defaults.parallelization;
|
||||
})();
|
||||
|
||||
return {
|
||||
model_profile: get('model_profile') ?? defaults.model_profile,
|
||||
commit_docs: get('commit_docs', { section: 'planning', field: 'commit_docs' }) ?? defaults.commit_docs,
|
||||
search_gitignored: get('search_gitignored', { section: 'planning', field: 'search_gitignored' }) ?? defaults.search_gitignored,
|
||||
branching_strategy: get('branching_strategy', { section: 'git', field: 'branching_strategy' }) ?? defaults.branching_strategy,
|
||||
phase_branch_template: get('phase_branch_template', { section: 'git', field: 'phase_branch_template' }) ?? defaults.phase_branch_template,
|
||||
milestone_branch_template: get('milestone_branch_template', { section: 'git', field: 'milestone_branch_template' }) ?? defaults.milestone_branch_template,
|
||||
research: get('research', { section: 'workflow', field: 'research' }) ?? defaults.research,
|
||||
plan_checker: get('plan_checker', { section: 'workflow', field: 'plan_check' }) ?? defaults.plan_checker,
|
||||
verifier: get('verifier', { section: 'workflow', field: 'verifier' }) ?? defaults.verifier,
|
||||
nyquist_validation: get('nyquist_validation', { section: 'workflow', field: 'nyquist_validation' }) ?? defaults.nyquist_validation,
|
||||
parallelization,
|
||||
brave_search: get('brave_search') ?? defaults.brave_search,
|
||||
resolve_model_ids: get('resolve_model_ids') ?? defaults.resolve_model_ids,
|
||||
model_overrides: parsed.model_overrides || null,
|
||||
};
|
||||
} catch {
|
||||
return defaults;
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Git utilities ────────────────────────────────────────────────────────────
|
||||
|
||||
function isGitIgnored(cwd, targetPath) {
|
||||
try {
|
||||
// --no-index checks .gitignore rules regardless of whether the file is tracked.
|
||||
// Without it, git check-ignore returns "not ignored" for tracked files even when
|
||||
// .gitignore explicitly lists them — a common source of confusion when .planning/
|
||||
// was committed before being added to .gitignore.
|
||||
execSync('git check-ignore -q --no-index -- ' + targetPath.replace(/[^a-zA-Z0-9._\-/]/g, ''), {
|
||||
cwd,
|
||||
stdio: 'pipe',
|
||||
});
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Markdown normalization ─────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Normalize markdown to fix common markdownlint violations.
|
||||
* Applied at write points so GSD-generated .planning/ files are IDE-friendly.
|
||||
*
|
||||
* Rules enforced:
|
||||
* MD022 — Blank lines around headings
|
||||
* MD031 — Blank lines around fenced code blocks
|
||||
* MD032 — Blank lines around lists
|
||||
* MD012 — No multiple consecutive blank lines (collapsed to 2 max)
|
||||
* MD047 — Files end with a single newline
|
||||
*/
|
||||
function normalizeMd(content) {
|
||||
if (!content || typeof content !== 'string') return content;
|
||||
|
||||
// Normalize line endings to LF for consistent processing
|
||||
let text = content.replace(/\r\n/g, '\n');
|
||||
|
||||
const lines = text.split('\n');
|
||||
const result = [];
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
const prev = i > 0 ? lines[i - 1] : '';
|
||||
const prevTrimmed = prev.trimEnd();
|
||||
const trimmed = line.trimEnd();
|
||||
|
||||
// MD022: Blank line before headings (skip first line and frontmatter delimiters)
|
||||
if (/^#{1,6}\s/.test(trimmed) && i > 0 && prevTrimmed !== '' && prevTrimmed !== '---') {
|
||||
result.push('');
|
||||
}
|
||||
|
||||
// MD031: Blank line before fenced code blocks
|
||||
if (/^```/.test(trimmed) && i > 0 && prevTrimmed !== '' && !isInsideFencedBlock(lines, i)) {
|
||||
result.push('');
|
||||
}
|
||||
|
||||
// MD032: Blank line before lists (- item, * item, N. item, - [ ] item)
|
||||
if (/^(\s*[-*+]\s|\s*\d+\.\s)/.test(line) && i > 0 &&
|
||||
prevTrimmed !== '' && !/^(\s*[-*+]\s|\s*\d+\.\s)/.test(prev) &&
|
||||
prevTrimmed !== '---') {
|
||||
result.push('');
|
||||
}
|
||||
|
||||
result.push(line);
|
||||
|
||||
// MD022: Blank line after headings
|
||||
if (/^#{1,6}\s/.test(trimmed) && i < lines.length - 1) {
|
||||
const next = lines[i + 1];
|
||||
if (next !== undefined && next.trimEnd() !== '') {
|
||||
result.push('');
|
||||
}
|
||||
}
|
||||
|
||||
// MD031: Blank line after closing fenced code blocks
|
||||
if (/^```\s*$/.test(trimmed) && isClosingFence(lines, i) && i < lines.length - 1) {
|
||||
const next = lines[i + 1];
|
||||
if (next !== undefined && next.trimEnd() !== '') {
|
||||
result.push('');
|
||||
}
|
||||
}
|
||||
|
||||
// MD032: Blank line after last list item in a block
|
||||
if (/^(\s*[-*+]\s|\s*\d+\.\s)/.test(line) && i < lines.length - 1) {
|
||||
const next = lines[i + 1];
|
||||
if (next !== undefined && next.trimEnd() !== '' &&
|
||||
!/^(\s*[-*+]\s|\s*\d+\.\s)/.test(next) &&
|
||||
!/^\s/.test(next)) {
|
||||
// Only add blank line if next line is not a continuation/indented line
|
||||
result.push('');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
text = result.join('\n');
|
||||
|
||||
// MD012: Collapse 3+ consecutive blank lines to 2
|
||||
text = text.replace(/\n{3,}/g, '\n\n');
|
||||
|
||||
// MD047: Ensure file ends with exactly one newline
|
||||
text = text.replace(/\n*$/, '\n');
|
||||
|
||||
return text;
|
||||
}
|
||||
|
||||
/** Check if line index i is inside an already-open fenced code block */
|
||||
function isInsideFencedBlock(lines, i) {
|
||||
let fenceCount = 0;
|
||||
for (let j = 0; j < i; j++) {
|
||||
if (/^```/.test(lines[j].trimEnd())) fenceCount++;
|
||||
}
|
||||
return fenceCount % 2 === 1;
|
||||
}
|
||||
|
||||
/** Check if a ``` line is a closing fence (odd number of fences up to and including this one) */
|
||||
function isClosingFence(lines, i) {
|
||||
let fenceCount = 0;
|
||||
for (let j = 0; j <= i; j++) {
|
||||
if (/^```/.test(lines[j].trimEnd())) fenceCount++;
|
||||
}
|
||||
return fenceCount % 2 === 0;
|
||||
}
|
||||
|
||||
function execGit(cwd, args) {
|
||||
const result = spawnSync('git', args, {
|
||||
cwd,
|
||||
stdio: 'pipe',
|
||||
encoding: 'utf-8',
|
||||
});
|
||||
return {
|
||||
exitCode: result.status ?? 1,
|
||||
stdout: (result.stdout ?? '').toString().trim(),
|
||||
stderr: (result.stderr ?? '').toString().trim(),
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Phase utilities ──────────────────────────────────────────────────────────
|
||||
|
||||
function escapeRegex(value) {
|
||||
return String(value).replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
}
|
||||
|
||||
function normalizePhaseName(phase) {
|
||||
const match = String(phase).match(/^(\d+)([A-Z])?((?:\.\d+)*)/i);
|
||||
if (!match) return phase;
|
||||
const padded = match[1].padStart(2, '0');
|
||||
const letter = match[2] ? match[2].toUpperCase() : '';
|
||||
const decimal = match[3] || '';
|
||||
return padded + letter + decimal;
|
||||
}
|
||||
|
||||
function comparePhaseNum(a, b) {
|
||||
const pa = String(a).match(/^(\d+)([A-Z])?((?:\.\d+)*)/i);
|
||||
const pb = String(b).match(/^(\d+)([A-Z])?((?:\.\d+)*)/i);
|
||||
if (!pa || !pb) return String(a).localeCompare(String(b));
|
||||
const intDiff = parseInt(pa[1], 10) - parseInt(pb[1], 10);
|
||||
if (intDiff !== 0) return intDiff;
|
||||
// No letter sorts before letter: 12 < 12A < 12B
|
||||
const la = (pa[2] || '').toUpperCase();
|
||||
const lb = (pb[2] || '').toUpperCase();
|
||||
if (la !== lb) {
|
||||
if (!la) return -1;
|
||||
if (!lb) return 1;
|
||||
return la < lb ? -1 : 1;
|
||||
}
|
||||
// Segment-by-segment decimal comparison: 12A < 12A.1 < 12A.1.2 < 12A.2
|
||||
const aDecParts = pa[3] ? pa[3].slice(1).split('.').map(p => parseInt(p, 10)) : [];
|
||||
const bDecParts = pb[3] ? pb[3].slice(1).split('.').map(p => parseInt(p, 10)) : [];
|
||||
const maxLen = Math.max(aDecParts.length, bDecParts.length);
|
||||
if (aDecParts.length === 0 && bDecParts.length > 0) return -1;
|
||||
if (bDecParts.length === 0 && aDecParts.length > 0) return 1;
|
||||
for (let i = 0; i < maxLen; i++) {
|
||||
const av = Number.isFinite(aDecParts[i]) ? aDecParts[i] : 0;
|
||||
const bv = Number.isFinite(bDecParts[i]) ? bDecParts[i] : 0;
|
||||
if (av !== bv) return av - bv;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
function searchPhaseInDir(baseDir, relBase, normalized) {
|
||||
try {
|
||||
const entries = fs.readdirSync(baseDir, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name).sort((a, b) => comparePhaseNum(a, b));
|
||||
const match = dirs.find(d => d.startsWith(normalized));
|
||||
if (!match) return null;
|
||||
|
||||
const dirMatch = match.match(/^(\d+[A-Z]?(?:\.\d+)*)-?(.*)/i);
|
||||
const phaseNumber = dirMatch ? dirMatch[1] : normalized;
|
||||
const phaseName = dirMatch && dirMatch[2] ? dirMatch[2] : null;
|
||||
const phaseDir = path.join(baseDir, match);
|
||||
const phaseFiles = fs.readdirSync(phaseDir);
|
||||
|
||||
const plans = phaseFiles.filter(f => f.endsWith('-PLAN.md') || f === 'PLAN.md').sort();
|
||||
const summaries = phaseFiles.filter(f => f.endsWith('-SUMMARY.md') || f === 'SUMMARY.md').sort();
|
||||
const hasResearch = phaseFiles.some(f => f.endsWith('-RESEARCH.md') || f === 'RESEARCH.md');
|
||||
const hasContext = phaseFiles.some(f => f.endsWith('-CONTEXT.md') || f === 'CONTEXT.md');
|
||||
const hasVerification = phaseFiles.some(f => f.endsWith('-VERIFICATION.md') || f === 'VERIFICATION.md');
|
||||
|
||||
const completedPlanIds = new Set(
|
||||
summaries.map(s => s.replace('-SUMMARY.md', '').replace('SUMMARY.md', ''))
|
||||
);
|
||||
const incompletePlans = plans.filter(p => {
|
||||
const planId = p.replace('-PLAN.md', '').replace('PLAN.md', '');
|
||||
return !completedPlanIds.has(planId);
|
||||
});
|
||||
|
||||
return {
|
||||
found: true,
|
||||
directory: toPosixPath(path.join(relBase, match)),
|
||||
phase_number: phaseNumber,
|
||||
phase_name: phaseName,
|
||||
phase_slug: phaseName ? phaseName.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, '') : null,
|
||||
plans,
|
||||
summaries,
|
||||
incomplete_plans: incompletePlans,
|
||||
has_research: hasResearch,
|
||||
has_context: hasContext,
|
||||
has_verification: hasVerification,
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function findPhaseInternal(cwd, phase) {
|
||||
if (!phase) return null;
|
||||
|
||||
const phasesDir = path.join(cwd, '.planning', 'phases');
|
||||
const normalized = normalizePhaseName(phase);
|
||||
|
||||
// Search current phases first
|
||||
const current = searchPhaseInDir(phasesDir, '.planning/phases', normalized);
|
||||
if (current) return current;
|
||||
|
||||
// Search archived milestone phases (newest first)
|
||||
const milestonesDir = path.join(cwd, '.planning', 'milestones');
|
||||
if (!fs.existsSync(milestonesDir)) return null;
|
||||
|
||||
try {
|
||||
const milestoneEntries = fs.readdirSync(milestonesDir, { withFileTypes: true });
|
||||
const archiveDirs = milestoneEntries
|
||||
.filter(e => e.isDirectory() && /^v[\d.]+-phases$/.test(e.name))
|
||||
.map(e => e.name)
|
||||
.sort()
|
||||
.reverse();
|
||||
|
||||
for (const archiveName of archiveDirs) {
|
||||
const version = archiveName.match(/^(v[\d.]+)-phases$/)[1];
|
||||
const archivePath = path.join(milestonesDir, archiveName);
|
||||
const relBase = '.planning/milestones/' + archiveName;
|
||||
const result = searchPhaseInDir(archivePath, relBase, normalized);
|
||||
if (result) {
|
||||
result.archived = version;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function getArchivedPhaseDirs(cwd) {
|
||||
const milestonesDir = path.join(cwd, '.planning', 'milestones');
|
||||
const results = [];
|
||||
|
||||
if (!fs.existsSync(milestonesDir)) return results;
|
||||
|
||||
try {
|
||||
const milestoneEntries = fs.readdirSync(milestonesDir, { withFileTypes: true });
|
||||
// Find v*-phases directories, sort newest first
|
||||
const phaseDirs = milestoneEntries
|
||||
.filter(e => e.isDirectory() && /^v[\d.]+-phases$/.test(e.name))
|
||||
.map(e => e.name)
|
||||
.sort()
|
||||
.reverse();
|
||||
|
||||
for (const archiveName of phaseDirs) {
|
||||
const version = archiveName.match(/^(v[\d.]+)-phases$/)[1];
|
||||
const archivePath = path.join(milestonesDir, archiveName);
|
||||
const entries = fs.readdirSync(archivePath, { withFileTypes: true });
|
||||
const dirs = entries.filter(e => e.isDirectory()).map(e => e.name).sort((a, b) => comparePhaseNum(a, b));
|
||||
|
||||
for (const dir of dirs) {
|
||||
results.push({
|
||||
name: dir,
|
||||
milestone: version,
|
||||
basePath: path.join('.planning', 'milestones', archiveName),
|
||||
fullPath: path.join(archivePath, dir),
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
// ─── Roadmap milestone scoping ───────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Strip shipped milestone content wrapped in <details> blocks.
|
||||
* Used to isolate current milestone phases when searching ROADMAP.md
|
||||
* for phase headings or checkboxes — prevents matching archived milestone
|
||||
* phases that share the same numbers as current milestone phases.
|
||||
*/
|
||||
function stripShippedMilestones(content) {
|
||||
return content.replace(/<details>[\s\S]*?<\/details>/gi, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the current milestone section from ROADMAP.md by positive lookup.
|
||||
*
|
||||
* Instead of stripping <details> blocks (negative heuristic that breaks if
|
||||
* agents wrap the current milestone in <details>), this finds the section
|
||||
* matching the current milestone version and returns only that content.
|
||||
*
|
||||
* Falls back to stripShippedMilestones() if:
|
||||
* - cwd is not provided
|
||||
* - STATE.md doesn't exist or has no milestone field
|
||||
* - Version can't be found in ROADMAP.md
|
||||
*
|
||||
* @param {string} content - Full ROADMAP.md content
|
||||
* @param {string} [cwd] - Working directory for reading STATE.md
|
||||
* @returns {string} Content scoped to current milestone
|
||||
*/
|
||||
function extractCurrentMilestone(content, cwd) {
|
||||
if (!cwd) return stripShippedMilestones(content);
|
||||
|
||||
// 1. Get current milestone version from STATE.md frontmatter
|
||||
let version = null;
|
||||
try {
|
||||
const statePath = path.join(cwd, '.planning', 'STATE.md');
|
||||
if (fs.existsSync(statePath)) {
|
||||
const stateRaw = fs.readFileSync(statePath, 'utf-8');
|
||||
const milestoneMatch = stateRaw.match(/^milestone:\s*(.+)/m);
|
||||
if (milestoneMatch) {
|
||||
version = milestoneMatch[1].trim();
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
// 2. Fallback: derive version from getMilestoneInfo pattern in ROADMAP.md itself
|
||||
if (!version) {
|
||||
// Check for 🚧 in-progress marker
|
||||
const inProgressMatch = content.match(/🚧\s*\*\*v(\d+\.\d+)\s/);
|
||||
if (inProgressMatch) {
|
||||
version = 'v' + inProgressMatch[1];
|
||||
}
|
||||
}
|
||||
|
||||
if (!version) return stripShippedMilestones(content);
|
||||
|
||||
// 3. Find the section matching this version
|
||||
// Match headings like: ## Roadmap v3.0: Name, ## v3.0 Name, etc.
|
||||
const escapedVersion = escapeRegex(version);
|
||||
const sectionPattern = new RegExp(
|
||||
`(^#{1,3}\\s+.*${escapedVersion}[^\\n]*)`,
|
||||
'mi'
|
||||
);
|
||||
const sectionMatch = content.match(sectionPattern);
|
||||
|
||||
if (!sectionMatch) return stripShippedMilestones(content);
|
||||
|
||||
const sectionStart = sectionMatch.index;
|
||||
|
||||
// Find the end: next milestone heading at same or higher level, or EOF
|
||||
// Milestone headings look like: ## v2.0, ## Roadmap v2.0, ## ✅ v1.0, etc.
|
||||
const headingLevel = sectionMatch[1].match(/^(#{1,3})\s/)[1].length;
|
||||
const restContent = content.slice(sectionStart + sectionMatch[0].length);
|
||||
const nextMilestonePattern = new RegExp(
|
||||
`^#{1,${headingLevel}}\\s+(?:.*v\\d+\\.\\d+|✅|📋|🚧)`,
|
||||
'mi'
|
||||
);
|
||||
const nextMatch = restContent.match(nextMilestonePattern);
|
||||
|
||||
let sectionEnd;
|
||||
if (nextMatch) {
|
||||
sectionEnd = sectionStart + sectionMatch[0].length + nextMatch.index;
|
||||
} else {
|
||||
sectionEnd = content.length;
|
||||
}
|
||||
|
||||
// Return everything before the current milestone section (non-milestone content
|
||||
// like title, overview) plus the current milestone section
|
||||
const beforeMilestones = content.slice(0, sectionStart);
|
||||
const currentSection = content.slice(sectionStart, sectionEnd);
|
||||
|
||||
// Also include any content before the first milestone heading (title, overview, etc.)
|
||||
// but strip any <details> blocks in it (these are definitely shipped)
|
||||
const preamble = beforeMilestones.replace(/<details>[\s\S]*?<\/details>/gi, '');
|
||||
|
||||
return preamble + currentSection;
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace a pattern only in the current milestone section of ROADMAP.md
|
||||
* (everything after the last </details> close tag). Used for write operations
|
||||
* that must not accidentally modify archived milestone checkboxes/tables.
|
||||
*/
|
||||
function replaceInCurrentMilestone(content, pattern, replacement) {
|
||||
const lastDetailsClose = content.lastIndexOf('</details>');
|
||||
if (lastDetailsClose === -1) {
|
||||
return content.replace(pattern, replacement);
|
||||
}
|
||||
const offset = lastDetailsClose + '</details>'.length;
|
||||
const before = content.slice(0, offset);
|
||||
const after = content.slice(offset);
|
||||
return before + after.replace(pattern, replacement);
|
||||
}
|
||||
|
||||
// ─── Roadmap & model utilities ────────────────────────────────────────────────
|
||||
|
||||
function getRoadmapPhaseInternal(cwd, phaseNum) {
|
||||
if (!phaseNum) return null;
|
||||
const roadmapPath = path.join(cwd, '.planning', 'ROADMAP.md');
|
||||
if (!fs.existsSync(roadmapPath)) return null;
|
||||
|
||||
try {
|
||||
const content = extractCurrentMilestone(fs.readFileSync(roadmapPath, 'utf-8'), cwd);
|
||||
const escapedPhase = escapeRegex(phaseNum.toString());
|
||||
const phasePattern = new RegExp(`#{2,4}\\s*Phase\\s+${escapedPhase}:\\s*([^\\n]+)`, 'i');
|
||||
const headerMatch = content.match(phasePattern);
|
||||
if (!headerMatch) return null;
|
||||
|
||||
const phaseName = headerMatch[1].trim();
|
||||
const headerIndex = headerMatch.index;
|
||||
const restOfContent = content.slice(headerIndex);
|
||||
const nextHeaderMatch = restOfContent.match(/\n#{2,4}\s+Phase\s+\d/i);
|
||||
const sectionEnd = nextHeaderMatch ? headerIndex + nextHeaderMatch.index : content.length;
|
||||
const section = content.slice(headerIndex, sectionEnd).trim();
|
||||
|
||||
const goalMatch = section.match(/\*\*Goal(?:\*\*:|\*?\*?:\*\*)\s*([^\n]+)/i);
|
||||
const goal = goalMatch ? goalMatch[1].trim() : null;
|
||||
|
||||
return {
|
||||
found: true,
|
||||
phase_number: phaseNum.toString(),
|
||||
phase_name: phaseName,
|
||||
goal,
|
||||
section,
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Model alias resolution ───────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Map short model aliases to full model IDs.
|
||||
* Updated each release to match current model versions.
|
||||
* Users can override with model_overrides in config.json for custom/latest models.
|
||||
*/
|
||||
const MODEL_ALIAS_MAP = {
|
||||
'opus': 'claude-opus-4-0',
|
||||
'sonnet': 'claude-sonnet-4-5',
|
||||
'haiku': 'claude-haiku-3-5',
|
||||
};
|
||||
|
||||
function resolveModelInternal(cwd, agentType) {
|
||||
const config = loadConfig(cwd);
|
||||
|
||||
// Check per-agent override first
|
||||
const override = config.model_overrides?.[agentType];
|
||||
if (override) {
|
||||
return override;
|
||||
}
|
||||
|
||||
// Fall back to profile lookup
|
||||
const profile = String(config.model_profile || 'balanced').toLowerCase();
|
||||
const agentModels = MODEL_PROFILES[agentType];
|
||||
if (!agentModels) return 'sonnet';
|
||||
if (profile === 'inherit') return 'inherit';
|
||||
const alias = agentModels[profile] || agentModels['balanced'] || 'sonnet';
|
||||
|
||||
// If resolve_model_ids is true, map alias to full model ID
|
||||
// This prevents 404s when the Task tool passes aliases directly to the API
|
||||
if (config.resolve_model_ids) {
|
||||
return MODEL_ALIAS_MAP[alias] || alias;
|
||||
}
|
||||
|
||||
return alias;
|
||||
}
|
||||
|
||||
// ─── Misc utilities ───────────────────────────────────────────────────────────
|
||||
|
||||
function pathExistsInternal(cwd, targetPath) {
|
||||
const fullPath = path.isAbsolute(targetPath) ? targetPath : path.join(cwd, targetPath);
|
||||
try {
|
||||
fs.statSync(fullPath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function generateSlugInternal(text) {
|
||||
if (!text) return null;
|
||||
return text.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, '');
|
||||
}
|
||||
|
||||
function getMilestoneInfo(cwd) {
|
||||
try {
|
||||
const roadmap = fs.readFileSync(path.join(cwd, '.planning', 'ROADMAP.md'), 'utf-8');
|
||||
|
||||
// First: check for list-format roadmaps using 🚧 (in-progress) marker
|
||||
// e.g. "- 🚧 **v2.1 Belgium** — Phases 24-28 (in progress)"
|
||||
const inProgressMatch = roadmap.match(/🚧\s*\*\*v(\d+\.\d+)\s+([^*]+)\*\*/);
|
||||
if (inProgressMatch) {
|
||||
return {
|
||||
version: 'v' + inProgressMatch[1],
|
||||
name: inProgressMatch[2].trim(),
|
||||
};
|
||||
}
|
||||
|
||||
// Second: heading-format roadmaps — strip shipped milestones in <details> blocks
|
||||
const cleaned = stripShippedMilestones(roadmap);
|
||||
// Extract version and name from the same ## heading for consistency
|
||||
const headingMatch = cleaned.match(/## .*v(\d+\.\d+)[:\s]+([^\n(]+)/);
|
||||
if (headingMatch) {
|
||||
return {
|
||||
version: 'v' + headingMatch[1],
|
||||
name: headingMatch[2].trim(),
|
||||
};
|
||||
}
|
||||
// Fallback: try bare version match
|
||||
const versionMatch = cleaned.match(/v(\d+\.\d+)/);
|
||||
return {
|
||||
version: versionMatch ? versionMatch[0] : 'v1.0',
|
||||
name: 'milestone',
|
||||
};
|
||||
} catch {
|
||||
return { version: 'v1.0', name: 'milestone' };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a filter function that checks whether a phase directory belongs
|
||||
* to the current milestone based on ROADMAP.md phase headings.
|
||||
* If no ROADMAP exists or no phases are listed, returns a pass-all filter.
|
||||
*/
|
||||
function getMilestonePhaseFilter(cwd) {
|
||||
const milestonePhaseNums = new Set();
|
||||
try {
|
||||
const roadmap = extractCurrentMilestone(fs.readFileSync(path.join(cwd, '.planning', 'ROADMAP.md'), 'utf-8'), cwd);
|
||||
const phasePattern = /#{2,4}\s*Phase\s+(\d+[A-Z]?(?:\.\d+)*)\s*:/gi;
|
||||
let m;
|
||||
while ((m = phasePattern.exec(roadmap)) !== null) {
|
||||
milestonePhaseNums.add(m[1]);
|
||||
}
|
||||
} catch {}
|
||||
|
||||
if (milestonePhaseNums.size === 0) {
|
||||
const passAll = () => true;
|
||||
passAll.phaseCount = 0;
|
||||
return passAll;
|
||||
}
|
||||
|
||||
const normalized = new Set(
|
||||
[...milestonePhaseNums].map(n => (n.replace(/^0+/, '') || '0').toLowerCase())
|
||||
);
|
||||
|
||||
function isDirInMilestone(dirName) {
|
||||
const m = dirName.match(/^0*(\d+[A-Za-z]?(?:\.\d+)*)/);
|
||||
if (!m) return false;
|
||||
return normalized.has(m[1].toLowerCase());
|
||||
}
|
||||
isDirInMilestone.phaseCount = milestonePhaseNums.size;
|
||||
return isDirInMilestone;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
output,
|
||||
error,
|
||||
safeReadFile,
|
||||
loadConfig,
|
||||
isGitIgnored,
|
||||
execGit,
|
||||
normalizeMd,
|
||||
escapeRegex,
|
||||
normalizePhaseName,
|
||||
comparePhaseNum,
|
||||
searchPhaseInDir,
|
||||
findPhaseInternal,
|
||||
getArchivedPhaseDirs,
|
||||
getRoadmapPhaseInternal,
|
||||
resolveModelInternal,
|
||||
pathExistsInternal,
|
||||
generateSlugInternal,
|
||||
getMilestoneInfo,
|
||||
getMilestonePhaseFilter,
|
||||
stripShippedMilestones,
|
||||
extractCurrentMilestone,
|
||||
replaceInCurrentMilestone,
|
||||
toPosixPath,
|
||||
MODEL_ALIAS_MAP,
|
||||
};
|
||||
Reference in New Issue
Block a user