scripts/generate-docs-index.mjs walks docs/**/*.md, parses YAML front-matter (or first H1 fallback) and a Nygard-style ## Status section, then writes docs/INDEX.md with grouped tables for ADRs, Specs, Plans (with archive subsection), Patterns, and Other. Pure Node 20 (no external deps); idempotent — running it twice produces byte-identical output. Excludes adr/templates/, the ADR README, INDEX.md itself, and any *_*.md sidecar file. Wire-up: - package.json: docs:index → node scripts/generate-docs-index.mjs Initial run indexed 35 docs across the existing structure; the generated INDEX.md is committed so the table is reviewable in the PR before hooking generation into a pre-commit step.
277 lines
8.5 KiB
JavaScript
277 lines
8.5 KiB
JavaScript
#!/usr/bin/env node
|
|
// Generate docs/INDEX.md from the front-matter and headings of every
|
|
// .md file under docs/. Pure Node 20 — no external dependencies.
|
|
//
|
|
// Usage: `npm run docs:index` (or `node scripts/generate-docs-index.mjs`).
|
|
//
|
|
// Idempotent: rewriting INDEX.md from the same inputs produces identical
|
|
// output (apart from the generation date in the header), so the script
|
|
// is safe to run repeatedly and in pre-commit hooks.
|
|
|
|
import { readdir, readFile, writeFile } from 'node:fs/promises';
|
|
import { join, relative, basename, sep } from 'node:path';
|
|
import { fileURLToPath } from 'node:url';
|
|
|
|
const SCRIPT_DIR = fileURLToPath(new URL('.', import.meta.url));
|
|
const REPO_ROOT = join(SCRIPT_DIR, '..');
|
|
const DOCS_DIR = join(REPO_ROOT, 'docs');
|
|
const INDEX_PATH = join(DOCS_DIR, 'INDEX.md');
|
|
|
|
// Paths (relative to repo root, forward-slashed) that the index should
|
|
// skip entirely. Templates and archived plans aren't useful in the live
|
|
// roster; sidecar files prefixed with `_` are personal Obsidian scratch.
|
|
const EXCLUDE_PATTERNS = [
|
|
/^docs\/adr\/templates\//,
|
|
/^docs\/adr\/README\.md$/,
|
|
/\/_[^/]+\.md$/,
|
|
/^docs\/INDEX\.md$/,
|
|
];
|
|
|
|
async function walk(dir) {
|
|
const entries = await readdir(dir, { withFileTypes: true });
|
|
const files = [];
|
|
for (const e of entries) {
|
|
const full = join(dir, e.name);
|
|
if (e.isDirectory()) {
|
|
files.push(...(await walk(full)));
|
|
} else if (e.isFile() && e.name.endsWith('.md')) {
|
|
files.push(full);
|
|
}
|
|
}
|
|
return files;
|
|
}
|
|
|
|
// Minimal YAML front-matter parser. Front-matter in this repo is restricted
|
|
// to flat `key: value` pairs, so a hand-rolled parser is enough — and
|
|
// keeps the script dependency-free.
|
|
function parseFrontMatter(content) {
|
|
if (!content.startsWith('---\n')) return { data: {}, body: content };
|
|
const end = content.indexOf('\n---\n', 4);
|
|
if (end === -1) return { data: {}, body: content };
|
|
const block = content.slice(4, end);
|
|
const data = {};
|
|
for (const raw of block.split('\n')) {
|
|
const line = raw.trim();
|
|
if (!line || line.startsWith('#')) continue;
|
|
const m = line.match(/^([A-Za-z][\w-]*)\s*:\s*(.*?)\s*$/);
|
|
if (!m) continue;
|
|
let val = m[2];
|
|
if (
|
|
(val.startsWith('"') && val.endsWith('"')) ||
|
|
(val.startsWith("'") && val.endsWith("'"))
|
|
) {
|
|
val = val.slice(1, -1);
|
|
}
|
|
data[m[1]] = val;
|
|
}
|
|
return { data, body: content.slice(end + 5) };
|
|
}
|
|
|
|
function extractFirstH1(text) {
|
|
const m = text.match(/^#\s+(.+?)\s*$/m);
|
|
return m ? m[1] : null;
|
|
}
|
|
|
|
// For Nygard-style ADRs the status lives under a `## Status` heading
|
|
// instead of YAML front-matter. Pull the first non-empty line after the
|
|
// heading so the index can still show it.
|
|
function extractStatusSection(text) {
|
|
const m = text.match(/^##\s+Status\s*\n+([^\n#].*?)(?:\n|$)/m);
|
|
return m ? m[1].trim() : null;
|
|
}
|
|
|
|
function isExcluded(relPath) {
|
|
return EXCLUDE_PATTERNS.some((rx) => rx.test(relPath));
|
|
}
|
|
|
|
// Map a path under docs/ to one of the four named sections, or "Other".
|
|
// Folder-based first; root-level docs fall back to a name-prefix rule
|
|
// so legacy `scrum4me-*.md` files still surface under Specs until the
|
|
// docs-restructure migrates them into `docs/specs/`.
|
|
function categorize(relPath) {
|
|
const parts = relPath.split('/');
|
|
if (parts[0] !== 'docs') return 'Other';
|
|
if (parts.length === 2) {
|
|
return /^scrum4me-/.test(parts[1]) ? 'Specs' : 'Other';
|
|
}
|
|
const sub = parts[1];
|
|
if (sub === 'adr') return 'ADRs';
|
|
if (sub === 'specs') return 'Specs';
|
|
if (sub === 'plans') return 'Plans';
|
|
if (sub === 'patterns') return 'Patterns';
|
|
return 'Other';
|
|
}
|
|
|
|
function adrNumber(filename) {
|
|
const m = filename.match(/^(\d{4})-/);
|
|
return m ? parseInt(m[1], 10) : null;
|
|
}
|
|
|
|
function escapePipe(s) {
|
|
return String(s).replace(/\|/g, '\\|');
|
|
}
|
|
|
|
async function main() {
|
|
const files = await walk(DOCS_DIR);
|
|
const docs = [];
|
|
|
|
for (const full of files) {
|
|
const rel = relative(REPO_ROOT, full).split(sep).join('/');
|
|
if (isExcluded(rel)) continue;
|
|
|
|
const content = await readFile(full, 'utf8');
|
|
const { data, body } = parseFrontMatter(content);
|
|
|
|
const title =
|
|
data.title || extractFirstH1(body) || basename(full, '.md');
|
|
const status = data.status || extractStatusSection(body) || '';
|
|
const date = data.date || data.last_updated || '';
|
|
const linkPath = './' + rel.replace(/^docs\//, '');
|
|
const category = categorize(rel);
|
|
|
|
docs.push({
|
|
rel,
|
|
title,
|
|
status,
|
|
date,
|
|
linkPath,
|
|
category,
|
|
basename: basename(full),
|
|
});
|
|
}
|
|
|
|
const groups = { ADRs: [], Specs: [], Plans: [], Patterns: [], Other: [] };
|
|
for (const d of docs) {
|
|
if (groups[d.category]) groups[d.category].push(d);
|
|
}
|
|
|
|
groups.ADRs.sort((a, b) => {
|
|
const na = adrNumber(a.basename) ?? 9999;
|
|
const nb = adrNumber(b.basename) ?? 9999;
|
|
if (na !== nb) return na - nb;
|
|
return a.basename.localeCompare(b.basename);
|
|
});
|
|
for (const k of ['Specs', 'Plans', 'Patterns', 'Other']) {
|
|
groups[k].sort((a, b) => a.rel.localeCompare(b.rel));
|
|
}
|
|
|
|
const lines = [];
|
|
lines.push(
|
|
'<!-- Generated by scripts/generate-docs-index.mjs. Do not edit by hand. Run `npm run docs:index`. -->'
|
|
);
|
|
lines.push('');
|
|
lines.push('# Documentation Index');
|
|
lines.push('');
|
|
lines.push(
|
|
`Auto-generated on ${new Date().toISOString().slice(0, 10)} from front-matter and headings.`
|
|
);
|
|
lines.push('');
|
|
|
|
// --- ADRs ---
|
|
lines.push('## Architecture Decision Records');
|
|
lines.push('');
|
|
if (groups.ADRs.length === 0) {
|
|
lines.push('_No ADRs yet._');
|
|
lines.push('');
|
|
} else {
|
|
lines.push('| # | Title | Status |');
|
|
lines.push('|---|---|---|');
|
|
for (const d of groups.ADRs) {
|
|
const n = adrNumber(d.basename);
|
|
const num = n !== null ? String(n).padStart(4, '0') : '—';
|
|
lines.push(
|
|
`| ${num} | [${escapePipe(d.title)}](${d.linkPath}) | ${escapePipe(d.status || '—')} |`
|
|
);
|
|
}
|
|
lines.push('');
|
|
}
|
|
|
|
// --- Specs ---
|
|
lines.push('## Specifications');
|
|
lines.push('');
|
|
if (groups.Specs.length === 0) {
|
|
lines.push('_No specs yet._');
|
|
lines.push('');
|
|
} else {
|
|
lines.push('| Title | Status | Updated |');
|
|
lines.push('|---|---|---|');
|
|
for (const d of groups.Specs) {
|
|
lines.push(
|
|
`| [${escapePipe(d.title)}](${d.linkPath}) | ${escapePipe(d.status || '—')} | ${escapePipe(d.date || '—')} |`
|
|
);
|
|
}
|
|
lines.push('');
|
|
}
|
|
|
|
// --- Plans (with archive subsection) ---
|
|
lines.push('## Plans');
|
|
lines.push('');
|
|
const plansActive = groups.Plans.filter((d) => !d.rel.includes('/archive/'));
|
|
const plansArchive = groups.Plans.filter((d) => d.rel.includes('/archive/'));
|
|
if (plansActive.length === 0) {
|
|
lines.push('_No active plans._');
|
|
lines.push('');
|
|
} else {
|
|
lines.push('| Title | Status | Updated |');
|
|
lines.push('|---|---|---|');
|
|
for (const d of plansActive) {
|
|
lines.push(
|
|
`| [${escapePipe(d.title)}](${d.linkPath}) | ${escapePipe(d.status || '—')} | ${escapePipe(d.date || '—')} |`
|
|
);
|
|
}
|
|
lines.push('');
|
|
}
|
|
if (plansArchive.length > 0) {
|
|
lines.push('### Archive');
|
|
lines.push('');
|
|
lines.push('| Title | Updated |');
|
|
lines.push('|---|---|');
|
|
for (const d of plansArchive) {
|
|
lines.push(
|
|
`| [${escapePipe(d.title)}](${d.linkPath}) | ${escapePipe(d.date || '—')} |`
|
|
);
|
|
}
|
|
lines.push('');
|
|
}
|
|
|
|
// --- Patterns ---
|
|
lines.push('## Patterns');
|
|
lines.push('');
|
|
if (groups.Patterns.length === 0) {
|
|
lines.push('_No patterns yet._');
|
|
lines.push('');
|
|
} else {
|
|
lines.push('| Title | Status | Updated |');
|
|
lines.push('|---|---|---|');
|
|
for (const d of groups.Patterns) {
|
|
lines.push(
|
|
`| [${escapePipe(d.title)}](${d.linkPath}) | ${escapePipe(d.status || '—')} | ${escapePipe(d.date || '—')} |`
|
|
);
|
|
}
|
|
lines.push('');
|
|
}
|
|
|
|
// --- Other (catches design/, api/, runbooks/, etc. until they get
|
|
// dedicated sections after the docs-restructure) ---
|
|
if (groups.Other.length > 0) {
|
|
lines.push('## Other Docs');
|
|
lines.push('');
|
|
lines.push('| Title | Path | Status | Updated |');
|
|
lines.push('|---|---|---|---|');
|
|
for (const d of groups.Other) {
|
|
lines.push(
|
|
`| [${escapePipe(d.title)}](${d.linkPath}) | \`${d.rel.replace(/^docs\//, '')}\` | ${escapePipe(d.status || '—')} | ${escapePipe(d.date || '—')} |`
|
|
);
|
|
}
|
|
lines.push('');
|
|
}
|
|
|
|
const out = lines.join('\n');
|
|
await writeFile(INDEX_PATH, out, 'utf8');
|
|
console.log(`Wrote ${relative(REPO_ROOT, INDEX_PATH)} (${docs.length} docs indexed)`);
|
|
}
|
|
|
|
main().catch((err) => {
|
|
console.error(err);
|
|
process.exit(1);
|
|
});
|