feat(ST-004): add backlog markdown parser module
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
3a7861df44
commit
7ff1213a53
1 changed files with 186 additions and 0 deletions
186
prisma/seed-data/parse-backlog.ts
Normal file
186
prisma/seed-data/parse-backlog.ts
Normal file
|
|
@ -0,0 +1,186 @@
|
|||
import { readFile } from 'node:fs/promises'
|
||||
import path from 'node:path'
|
||||
|
||||
export type ParsedStory = {
|
||||
ref: string
|
||||
title: string
|
||||
description: string
|
||||
acceptance_criteria: string
|
||||
status: 'DONE' | 'OPEN'
|
||||
sort_order: number
|
||||
}
|
||||
|
||||
export type ParsedMilestone = {
|
||||
key: string
|
||||
title: string
|
||||
goal: string
|
||||
priority: 1 | 2 | 3 | 4
|
||||
sprint_status: 'ACTIVE' | 'COMPLETED'
|
||||
sort_order: number
|
||||
stories: ParsedStory[]
|
||||
}
|
||||
|
||||
const MILESTONE_HEADER = /^### (M[\d.]+):\s*(.+?)\s*$/
|
||||
const TASK_BULLET = /^- \[(x| )\] \*\*(ST-\d+)\*\*\s+(.+?)\s*$/
|
||||
const SUB_BULLET = /^ {2}- (.+?)\s*$/
|
||||
const NESTED_LINE = /^ {4,}\S/
|
||||
const SECTION_BREAK = /^---\s*$/
|
||||
|
||||
const MILESTONE_PRIORITY: Record<string, 1 | 2 | 3 | 4> = {
|
||||
M0: 1,
|
||||
M1: 1,
|
||||
M2: 1,
|
||||
M3: 1,
|
||||
'M3.5': 2,
|
||||
M4: 2,
|
||||
M5: 3,
|
||||
M6: 4,
|
||||
}
|
||||
|
||||
const MILESTONE_GOAL: Record<string, string> = {
|
||||
M0: 'Project, database, auth, navigatieshell',
|
||||
M1: "Producten, PBI's, gesplitst scherm",
|
||||
M2: 'Stories als blokken, dnd-kit, Zustand',
|
||||
M3: 'Sprint aanmaken, stories slepen, taken',
|
||||
'M3.5': 'Story-claim, persoonlijk Kanban-bord per product',
|
||||
M4: 'Alle endpoints, tokenbeheer',
|
||||
M5: 'Todo CRUD, promotie naar PBI/story; Data Table + detail-kaart',
|
||||
M6: 'Foutafhandeling, toegankelijkheid, CI/CD, beveiliging',
|
||||
}
|
||||
|
||||
const MILESTONE_SPRINT_STATUS: Record<string, ParsedMilestone['sprint_status']> = {
|
||||
M0: 'COMPLETED',
|
||||
M1: 'COMPLETED',
|
||||
M2: 'COMPLETED',
|
||||
M3: 'COMPLETED',
|
||||
'M3.5': 'ACTIVE',
|
||||
M4: 'COMPLETED',
|
||||
M5: 'COMPLETED',
|
||||
M6: 'COMPLETED',
|
||||
}
|
||||
|
||||
const KNOWN_KEYS = Object.keys(MILESTONE_PRIORITY)
|
||||
|
||||
export async function loadBacklog(repoRoot: string): Promise<ParsedMilestone[]> {
|
||||
const file = path.join(repoRoot, 'docs/scrum4me-backlog.md')
|
||||
const md = await readFile(file, 'utf8')
|
||||
|
||||
const milestones: ParsedMilestone[] = []
|
||||
let current: ParsedMilestone | null = null
|
||||
let pending: { story: ParsedStory; bodyLines: string[] } | null = null
|
||||
|
||||
const flushPending = () => {
|
||||
if (!pending) return
|
||||
pending.story.description = pending.bodyLines.join('\n').trim()
|
||||
pending = null
|
||||
}
|
||||
|
||||
for (const raw of md.split('\n')) {
|
||||
const headerMatch = raw.match(MILESTONE_HEADER)
|
||||
if (headerMatch && KNOWN_KEYS.includes(headerMatch[1])) {
|
||||
flushPending()
|
||||
const key = headerMatch[1]
|
||||
current = {
|
||||
key,
|
||||
title: headerMatch[2],
|
||||
goal: MILESTONE_GOAL[key],
|
||||
priority: MILESTONE_PRIORITY[key],
|
||||
sprint_status: MILESTONE_SPRINT_STATUS[key],
|
||||
sort_order: milestones.length + 1,
|
||||
stories: [],
|
||||
}
|
||||
milestones.push(current)
|
||||
continue
|
||||
}
|
||||
|
||||
if (SECTION_BREAK.test(raw)) {
|
||||
flushPending()
|
||||
continue
|
||||
}
|
||||
|
||||
if (!current) continue
|
||||
|
||||
const taskMatch = raw.match(TASK_BULLET)
|
||||
if (taskMatch) {
|
||||
flushPending()
|
||||
const story: ParsedStory = {
|
||||
ref: taskMatch[2],
|
||||
title: `${taskMatch[2]}: ${taskMatch[3]}`,
|
||||
description: '',
|
||||
acceptance_criteria: '',
|
||||
status: taskMatch[1] === 'x' ? 'DONE' : 'OPEN',
|
||||
sort_order: current.stories.length + 1,
|
||||
}
|
||||
current.stories.push(story)
|
||||
pending = { story, bodyLines: [] }
|
||||
continue
|
||||
}
|
||||
|
||||
if (!pending) continue
|
||||
|
||||
const subMatch = raw.match(SUB_BULLET)
|
||||
if (subMatch) {
|
||||
const content = subMatch[1]
|
||||
if (/^Done when:/i.test(content)) {
|
||||
pending.story.acceptance_criteria = content.replace(/^Done when:\s*/i, '').trim()
|
||||
} else {
|
||||
pending.bodyLines.push(content)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (NESTED_LINE.test(raw)) {
|
||||
const tail = raw.trim()
|
||||
if (pending.bodyLines.length > 0) {
|
||||
pending.bodyLines[pending.bodyLines.length - 1] += '\n' + tail
|
||||
} else {
|
||||
pending.bodyLines.push(tail)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (raw.trim() === '') continue
|
||||
|
||||
flushPending()
|
||||
}
|
||||
|
||||
flushPending()
|
||||
|
||||
if (milestones.length < 8) {
|
||||
throw new Error(
|
||||
`Backlog parser found only ${milestones.length} milestones (expected 8). Format may have drifted in ${file}.`,
|
||||
)
|
||||
}
|
||||
|
||||
const totalStories = milestones.reduce((acc, m) => acc + m.stories.length, 0)
|
||||
if (totalStories < 60) {
|
||||
throw new Error(
|
||||
`Backlog parser found only ${totalStories} stories (expected ≥ 60). Format may have drifted in ${file}.`,
|
||||
)
|
||||
}
|
||||
|
||||
return milestones
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
const repoRoot = path.resolve(path.dirname(new URL(import.meta.url).pathname), '..', '..')
|
||||
loadBacklog(repoRoot)
|
||||
.then((milestones) => {
|
||||
const total = milestones.reduce((acc, m) => acc + m.stories.length, 0)
|
||||
const done = milestones.reduce(
|
||||
(acc, m) => acc + m.stories.filter((s) => s.status === 'DONE').length,
|
||||
0,
|
||||
)
|
||||
const open = total - done
|
||||
console.log(`Parsed ${milestones.length} milestones, ${total} stories (${done} DONE, ${open} OPEN)`)
|
||||
for (const m of milestones) {
|
||||
console.log(
|
||||
` ${m.key.padEnd(5)} ${m.title.padEnd(36)} priority=${m.priority} sprint=${m.sprint_status} stories=${m.stories.length}`,
|
||||
)
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue