feat(PBI-100/T-1100): generator-script voor product-docs migratie

Leest docs/{adr,architecture,patterns,plans,runbooks,specs,api,manual}/*.md
en produceert prisma/migrations/<ts>_seed_scrum4me_product_docs/migration.sql
met dollar-quoted INSERTs + ON CONFLICT DO NOTHING. Hergebruikt
parseProductDocMd en setProductDocFrontmatterFields voor frontmatter-
normalisatie. CUID-generatie via crypto.randomUUID() (geen extra dep nodig).
This commit is contained in:
Janpeter Visser 2026-05-16 16:20:39 +02:00
parent a71ea6f380
commit 4010e8c296
2 changed files with 175 additions and 0 deletions

View file

@ -17,6 +17,7 @@
"postinstall": "prisma generate --generator client",
"db:insert-milestone": "tsx scripts/insert-milestone.ts",
"db:sync-model-prices": "tsx scripts/sync-model-prices.ts",
"db:gen-product-docs-migration": "tsx scripts/generate-product-docs-migration.ts",
"create-admin": "tsx scripts/create-admin.ts",
"seed": "prisma db seed",
"docs:index": "node scripts/generate-docs-index.mjs",

View file

@ -0,0 +1,174 @@
// Generator voor één-shot Prisma data-migratie die alle in-scope markdown-
// docs uit `docs/{adr,architecture,patterns,plans,runbooks,specs,api,manual}/`
// als ProductDoc-rijen insert voor het Scrum4Me-product (code='SCRUM4ME').
//
// Geen DB-access — alleen file-IO + SQL-string-output. Idempotent via
// `ON CONFLICT (product_id, folder, slug) DO NOTHING` in de gegenereerde SQL.
//
// Gebruik:
// npm run db:gen-product-docs-migration
//
// Output: `prisma/migrations/<14digits>_seed_scrum4me_product_docs/migration.sql`
//
// Faalt fail-fast bij: ontbrekende frontmatter, ongeldige slug, >100K chars.
// Zie docs/plans/PBI-100-seed-scrum4me-docs.md voor de context.
import * as crypto from 'node:crypto'
import * as fs from 'node:fs/promises'
import * as path from 'node:path'
import { parseProductDocMd } from '../lib/product-doc-parser'
import {
setProductDocFrontmatterFields,
todayIsoDate,
} from '../lib/product-doc-frontmatter'
const PRODUCT_CODE = 'SCRUM4ME'
const DOC_ROOT = 'docs'
const MAX_CONTENT_LEN = 100_000
const SLUG_RE = /^[a-z0-9][a-z0-9-]{0,79}$/
// API-folder ↔ DB-enum mapping (hardcoded; spiegelt lib/product-doc-folder.ts).
const FOLDER_API_TO_DB = {
adr: 'ADR',
architecture: 'ARCHITECTURE',
patterns: 'PATTERNS',
plans: 'PLANS',
runbooks: 'RUNBOOKS',
specs: 'SPECS',
manual: 'MANUAL',
api: 'API',
} as const
const FOLDERS = Object.keys(FOLDER_API_TO_DB) as (keyof typeof FOLDER_API_TO_DB)[]
function pgString(s: string): string {
return `'${s.replace(/'/g, "''")}'`
}
function pickUniqueDollarTag(content: string): string {
for (let attempt = 0; attempt < 32; attempt++) {
const tag = `doc_${crypto.randomBytes(4).toString('hex')}`
if (!content.includes(`$${tag}$`)) return tag
}
throw new Error('Kon na 32 pogingen geen vrije dollar-quote tag vinden')
}
async function main() {
const repoRoot = process.cwd()
const inserts: string[] = []
const errors: string[] = []
const perFolder: Record<string, number> = {}
let total = 0
for (const folder of FOLDERS) {
const dir = path.join(repoRoot, DOC_ROOT, folder)
let entries: string[]
try {
entries = await fs.readdir(dir)
} catch (err) {
errors.push(`${folder}: kan map niet lezen (${(err as Error).message})`)
continue
}
const files = entries
.filter((f) => f.endsWith('.md') && f.toLowerCase() !== 'readme.md')
.sort()
perFolder[folder] = 0
for (const file of files) {
const filePath = path.join(dir, file)
const slug = file.replace(/\.md$/, '')
if (!SLUG_RE.test(slug)) {
errors.push(`${folder}/${file}: ongeldige slug "${slug}"`)
continue
}
const raw = await fs.readFile(filePath, 'utf8')
if (raw.length > MAX_CONTENT_LEN) {
errors.push(`${folder}/${file}: ${raw.length} chars > limiet ${MAX_CONTENT_LEN}`)
continue
}
const parsed = parseProductDocMd(raw)
if (!parsed.ok) {
errors.push(`${folder}/${file}: ${JSON.stringify(parsed.errors)}`)
continue
}
const normalized = setProductDocFrontmatterFields(raw, {
last_updated: todayIsoDate(),
})
const tag = pickUniqueDollarTag(normalized)
const id = crypto.randomUUID()
const folderEnum = FOLDER_API_TO_DB[folder]
inserts.push(
`INSERT INTO product_docs (id, product_id, folder, slug, title, content_md, status, created_by, created_at, updated_at)
SELECT
${pgString(id)},
p.id,
${pgString(folderEnum)}::"ProductDocFolder",
${pgString(slug)},
${pgString(parsed.frontmatter.title)},
$${tag}$${normalized}$${tag}$,
${pgString(parsed.frontmatter.status)},
p.user_id,
NOW(),
NOW()
FROM products p WHERE p.code = ${pgString(PRODUCT_CODE)}
ON CONFLICT (product_id, folder, slug) DO NOTHING;`,
)
perFolder[folder]++
total++
}
}
if (errors.length > 0) {
console.error('Generator-fouten:')
for (const e of errors) console.error(` - ${e}`)
process.exit(1)
}
const ts = new Date()
.toISOString()
.replace(/[-:T.Z]/g, '')
.slice(0, 14)
const migDirName = `${ts}_seed_scrum4me_product_docs`
const migDir = path.join(repoRoot, 'prisma', 'migrations', migDirName)
await fs.mkdir(migDir, { recursive: true })
const folderCounts = Object.entries(perFolder)
.map(([f, n]) => ` -- ${f}: ${n}`)
.join('\n')
const header = `-- Generated by scripts/generate-product-docs-migration.ts on ${todayIsoDate()}.
-- Seeds ${total} ProductDoc rows for product code='${PRODUCT_CODE}'.
-- Idempotent via ON CONFLICT (product_id, folder, slug) DO NOTHING.
--
-- Per-folder counts:
${folderCounts}
UPDATE products
SET enabled_doc_folders = ARRAY['ADR','ARCHITECTURE','PATTERNS','PLANS','RUNBOOKS','SPECS','MANUAL','API']::"ProductDocFolder"[]
WHERE code = ${pgString(PRODUCT_CODE)};
`
const outPath = path.join(migDir, 'migration.sql')
await fs.writeFile(outPath, header + inserts.join('\n\n') + '\n', 'utf8')
console.log(`Wrote ${total} INSERTs to ${path.relative(repoRoot, outPath)}`)
for (const [f, n] of Object.entries(perFolder)) {
console.log(` ${f}: ${n}`)
}
}
main().catch((err) => {
console.error(err)
process.exit(1)
})