* feat(PBI-76): one-shot localStorage→user-settings migration helper Reads all legacy keys (sprint_pb_*, pbi_*, story_sort, debug-mode, and dynamic *_filter_kind/*_filter_status for jobs columns) and returns a typed UserSettings patch plus the keys to clear. Idempotent via scrum4me:settings_migrated=v1 marker. Skips invalid values silently so existing corrupt entries do not block migration. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com> * feat(PBI-76): bridge runs one-shot localStorage migration After hydrate, scans legacy localStorage keys via buildMigrationPatch and, if any data is found, pushes one bulk patch to the server, applies it locally, then removes the legacy keys. Demo accounts skip the migration entirely. Cancellable on unmount to avoid setState on unmounted component. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com> * feat(PBI-76): migrate sprint-backlog to user-settings store Replaces six useState+useEffect+localStorage flows with selectors from useUserSettingsStore. Defaults are applied at the selector level (filterStatus 'OPEN', sort 'code', etc) so the component matches its previous behaviour. The collapsed Set is derived from the persisted array, falling back to auto-collapse-DONE when no preference exists yet. setPref calls are fire-and-forget — the optimistic flow handles the local state update. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com> * feat(PBI-76): migrate pbi-list to user-settings store Same pattern as sprint-backlog: replaces local useState + localStorage hydration/persist with selectors from useUserSettingsStore. filterPopoverOpen blijft lokaal — die was nooit gepersisteerd in pbi-list. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com> * feat(PBI-76): migrate story-panel sort to user-settings store Single pref (sortMode) — replaces sync localStorage useState initializer with a selector. Default 'priority' applied at the read site. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com> * feat(PBI-76): migrate jobs-column to user-settings store Per-instance filter state (kinds + statuses) now lives under views.jobsColumns[storageKeyPrefix] in user-settings. Removes the local CSV-encoding helpers — store keeps arrays natively. A single persist() call writes both fields together so the two arrays cannot drift in optimistic mid-flight updates. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com> * feat(PBI-76): migrate debug-mode to user-settings store DebugToggle reads debugMode from user-settings.devTools and toggles via setPref. Removes the standalone stores/debug-store.ts (no consumers left). Body classlist update only fires after the store is hydrated to avoid a flash on initial paint. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com> * chore(PBI-76): remove unused readLocalStoragePref helper No consumers left after migrating sprint-backlog, pbi-list, story-panel, jobs-column, and debug-store to user-settings. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com> * test(PBI-76): mock user-settings action in backlog integration test PbiList now imports the user-settings store, which transitively loads actions/user-settings.ts → lib/prisma. The vitest jsdom environment has no DATABASE_URL, so we add a mock alongside the existing action mocks. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com> * fix(docs): allow balanced parens in markdown link URLs Previously the link-checker regex stopped at the first ')', breaking on Next.js route-group paths like `app/(app)/...`. The new regex matches one level of balanced parens inside the URL. Caught by CI on PR #188 — pre-existing breakage from PBI-78 plan doc that was already merged on main. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com> --------- Co-authored-by: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
117 lines
3.1 KiB
JavaScript
117 lines
3.1 KiB
JavaScript
#!/usr/bin/env node
|
|
/**
|
|
* Doc-link checker: walks docs/ (and README.md, CLAUDE.md, AGENTS.md),
|
|
* extracts relative markdown links, and verifies that every target file
|
|
* (and optional #anchor) actually exists.
|
|
*
|
|
* Exits 0 if all links are valid, 1 if any are broken.
|
|
*/
|
|
|
|
import { readFileSync, existsSync, readdirSync, statSync } from 'fs';
|
|
import { resolve, dirname, extname } from 'path';
|
|
import { fileURLToPath } from 'url';
|
|
|
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
const ROOT = resolve(__dirname, '..');
|
|
|
|
// Collect all .md files under a directory recursively
|
|
function collectMd(dir) {
|
|
const results = [];
|
|
for (const entry of readdirSync(dir)) {
|
|
const full = resolve(dir, entry);
|
|
const stat = statSync(full);
|
|
if (stat.isDirectory()) {
|
|
results.push(...collectMd(full));
|
|
} else if (extname(entry) === '.md') {
|
|
results.push(full);
|
|
}
|
|
}
|
|
return results;
|
|
}
|
|
|
|
// Convert a heading text to a GitHub-style anchor slug
|
|
function toSlug(text) {
|
|
return text
|
|
.toLowerCase()
|
|
.replace(/[^\w\s-]/g, '')
|
|
.trim()
|
|
.replace(/\s+/g, '-');
|
|
}
|
|
|
|
// Extract all heading slugs from a markdown file
|
|
function headingSlugs(filePath) {
|
|
const content = readFileSync(filePath, 'utf8');
|
|
const slugs = new Set();
|
|
for (const line of content.split('\n')) {
|
|
const m = line.match(/^#{1,6}\s+(.+)/);
|
|
if (m) slugs.add(toSlug(m[1]));
|
|
}
|
|
return slugs;
|
|
}
|
|
|
|
// Match `[label](url)` where url may contain one level of balanced parens
|
|
// (e.g. Next.js route groups like `app/(app)/...`).
|
|
const LINK_RE = /\[(?:[^\]]*)\]\(((?:[^()]+|\([^()]*\))+)\)/g;
|
|
|
|
function checkFile(filePath) {
|
|
const content = readFileSync(filePath, 'utf8');
|
|
const failures = [];
|
|
let m;
|
|
while ((m = LINK_RE.exec(content)) !== null) {
|
|
const raw = m[1];
|
|
// Skip external links and anchors-only
|
|
if (/^https?:\/\//.test(raw) || /^mailto:/.test(raw) || raw.startsWith('#')) continue;
|
|
|
|
const [pathPart, anchor] = raw.split('#');
|
|
const target = resolve(dirname(filePath), pathPart);
|
|
|
|
if (!existsSync(target)) {
|
|
failures.push({ file: filePath, link: raw, reason: 'file not found' });
|
|
continue;
|
|
}
|
|
|
|
if (anchor) {
|
|
const slugs = headingSlugs(target);
|
|
if (!slugs.has(anchor)) {
|
|
failures.push({ file: filePath, link: raw, reason: `anchor #${anchor} not found` });
|
|
}
|
|
}
|
|
}
|
|
return failures;
|
|
}
|
|
|
|
const roots = [
|
|
resolve(ROOT, 'docs'),
|
|
resolve(ROOT, 'README.md'),
|
|
resolve(ROOT, 'CLAUDE.md'),
|
|
resolve(ROOT, 'AGENTS.md'),
|
|
];
|
|
|
|
const files = [];
|
|
for (const r of roots) {
|
|
if (!existsSync(r)) continue;
|
|
const stat = statSync(r);
|
|
if (stat.isDirectory()) {
|
|
files.push(...collectMd(r));
|
|
} else {
|
|
files.push(r);
|
|
}
|
|
}
|
|
|
|
const allFailures = [];
|
|
for (const f of files) {
|
|
allFailures.push(...checkFile(f));
|
|
}
|
|
|
|
if (allFailures.length === 0) {
|
|
console.log(`✓ All doc links valid (${files.length} files checked)`);
|
|
process.exit(0);
|
|
} else {
|
|
console.error(`\n✗ Broken doc links (${allFailures.length}):\n`);
|
|
for (const { file, link, reason } of allFailures) {
|
|
const rel = file.replace(ROOT + '/', '');
|
|
console.error(` ${rel}\n → ${link} (${reason})`);
|
|
}
|
|
console.error('');
|
|
process.exit(1);
|
|
}
|