Compare commits
28 commits
fix/agent-
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
27cba872a8 | ||
|
|
7e049ebdef | ||
|
|
84b3afbefa | ||
|
|
2b746af1a3 | ||
|
|
5c1f047259 | ||
|
|
68c4d037cf | ||
|
|
e0c2536a8c | ||
|
|
8b72a00127 | ||
|
|
97420b93cf | ||
|
|
7d5a7576bf | ||
|
|
87f554083d | ||
|
|
93b50254e5 | ||
|
|
0514810115 | ||
|
|
7c6781e47f | ||
|
|
fda7be3214 | ||
|
|
84d7bb0add | ||
|
|
44e9280de1 | ||
|
|
6581a9ef33 | ||
|
|
eea3c4b993 | ||
|
|
2b11b999c0 | ||
|
|
e5423de319 | ||
|
|
f6d0807a81 | ||
|
|
08d4b48190 | ||
|
|
faa1463cd7 | ||
|
|
ae63876f21 | ||
|
|
b604a828a1 | ||
|
|
f7821c05be | ||
|
|
199ff06a88 |
39 changed files with 3469 additions and 61 deletions
|
|
@ -7,3 +7,5 @@ OPS_AGENT_URL="http://127.0.0.1:3099"
|
|||
REPO_PATHS="/srv/scrum4me/repos/scrum4me,/srv/ops/repos/ops-dashboard"
|
||||
# Comma-separated list of systemd unit names to show on the /systemd page (must match commands.yml allowed list)
|
||||
SYSTEMD_UNITS="scrum4me-web,ops-agent"
|
||||
# Worker run-logs directory inside the container (read-only bind mount; see docker-compose.yml)
|
||||
WORKER_LOGS_DIR="/var/worker-logs/idea"
|
||||
|
|
|
|||
69
app/_components/AuditWidget.tsx
Normal file
69
app/_components/AuditWidget.tsx
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import Link from 'next/link'
|
||||
import { apiFetch } from '@/lib/csrf'
|
||||
import { relativeTime } from '@/lib/utils'
|
||||
|
||||
type LatestRun = { id: string; flow_key: string; status: string; started_at: string }
|
||||
export type AuditInitial =
|
||||
| { data: LatestRun | null; error: null }
|
||||
| { data: null; error: string }
|
||||
|
||||
const STATUS_STYLES: Record<string, string> = {
|
||||
pending: 'bg-zinc-100 text-zinc-600 dark:bg-zinc-800 dark:text-zinc-400',
|
||||
running: 'bg-amber-100 text-amber-700 dark:bg-amber-900/30 dark:text-amber-400',
|
||||
success: 'bg-green-100 text-green-800 dark:bg-green-900/30 dark:text-green-400',
|
||||
failed: 'bg-red-100 text-red-700 dark:bg-red-900/30 dark:text-red-400',
|
||||
cancelled: 'bg-zinc-100 text-zinc-600 dark:bg-zinc-800 dark:text-zinc-400',
|
||||
}
|
||||
|
||||
async function fetchLatestRun(): Promise<LatestRun | null> {
|
||||
const res = await apiFetch('/api/audit/latest')
|
||||
if (!res.ok) throw new Error(`${res.status}`)
|
||||
const json = (await res.json()) as { run: LatestRun | null }
|
||||
return json.run
|
||||
}
|
||||
|
||||
export default function AuditWidget({ initial }: { initial: AuditInitial }) {
|
||||
const [data, setData] = useState<LatestRun | null>(initial.data)
|
||||
const [error, setError] = useState<string | null>(initial.error)
|
||||
|
||||
const refresh = useCallback(async () => {
|
||||
try {
|
||||
const run = await fetchLatestRun()
|
||||
setData(run)
|
||||
setError(null)
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'refresh failed')
|
||||
}
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
const id = setInterval(refresh, 30_000)
|
||||
return () => clearInterval(id)
|
||||
}, [refresh])
|
||||
|
||||
return (
|
||||
<Link href="/audit" className="block rounded-lg border bg-card p-5 transition-colors hover:bg-accent">
|
||||
<h2 className="text-sm font-medium text-muted-foreground">Audit</h2>
|
||||
{error ? (
|
||||
<p className="mt-2 text-sm text-destructive truncate">{error}</p>
|
||||
) : data ? (
|
||||
<div className="mt-2 space-y-1">
|
||||
<div className="flex items-center gap-2">
|
||||
<span
|
||||
className={`inline-flex items-center rounded-full px-2 py-0.5 text-xs font-medium ${STATUS_STYLES[data.status] ?? ''}`}
|
||||
>
|
||||
{data.status}
|
||||
</span>
|
||||
<span className="text-xs text-muted-foreground">{relativeTime(new Date(data.started_at))}</span>
|
||||
</div>
|
||||
<p className="font-mono text-xs text-muted-foreground truncate">{data.flow_key}</p>
|
||||
</div>
|
||||
) : (
|
||||
<p className="mt-2 text-sm text-muted-foreground">geen runs</p>
|
||||
)}
|
||||
</Link>
|
||||
)
|
||||
}
|
||||
76
app/_components/CaddyWidget.tsx
Normal file
76
app/_components/CaddyWidget.tsx
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import Link from 'next/link'
|
||||
import { parseCertList } from '@/lib/parse-caddy'
|
||||
import { fetchAgentOutput } from '@/lib/agent-fetch'
|
||||
|
||||
type CaddyData = { soonestExpiryMs: number | null; count: number; expiringWarning: boolean }
|
||||
export type CaddyInitial = { data: CaddyData; error: null } | { data: null; error: string }
|
||||
|
||||
async function refreshCaddy(): Promise<CaddyData> {
|
||||
const output = await fetchAgentOutput('caddy_list_certs')
|
||||
const certs = parseCertList(output)
|
||||
const expiryTimes = certs
|
||||
.filter((c) => c.notAfter)
|
||||
.map((c) => new Date(c.notAfter).getTime())
|
||||
const soonestExpiryMs = expiryTimes.length > 0 ? Math.min(...expiryTimes) : null
|
||||
const expiringWarning = certs.some((c) => c.expiringWarning)
|
||||
return { soonestExpiryMs, count: certs.length, expiringWarning }
|
||||
}
|
||||
|
||||
function daysUntil(ms: number): number {
|
||||
return Math.floor((ms - Date.now()) / (1000 * 60 * 60 * 24))
|
||||
}
|
||||
|
||||
export default function CaddyWidget({ initial }: { initial: CaddyInitial }) {
|
||||
const [data, setData] = useState<CaddyData | null>(initial.data)
|
||||
const [error, setError] = useState<string | null>(initial.error)
|
||||
|
||||
const refresh = useCallback(async () => {
|
||||
try {
|
||||
const d = await refreshCaddy()
|
||||
setData(d)
|
||||
setError(null)
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'refresh failed')
|
||||
}
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
const id = setInterval(refresh, 30_000)
|
||||
return () => clearInterval(id)
|
||||
}, [refresh])
|
||||
|
||||
return (
|
||||
<Link href="/caddy" className="block rounded-lg border bg-card p-5 transition-colors hover:bg-accent">
|
||||
<h2 className="text-sm font-medium text-muted-foreground">Caddy / TLS</h2>
|
||||
{error ? (
|
||||
<p className="mt-2 text-sm text-destructive truncate">{error}</p>
|
||||
) : data ? (
|
||||
<div className="mt-2">
|
||||
{data.soonestExpiryMs !== null ? (
|
||||
<div className="flex items-center gap-2">
|
||||
<p className="text-2xl font-semibold">
|
||||
{daysUntil(data.soonestExpiryMs)}
|
||||
<span className="text-sm font-normal text-muted-foreground"> dagen tot expiry</span>
|
||||
</p>
|
||||
{data.expiringWarning && (
|
||||
<span className="inline-flex items-center rounded-full px-2 py-0.5 text-xs font-medium bg-red-100 text-red-700 dark:bg-red-900/30 dark:text-red-400">
|
||||
<30d
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
<p className="text-sm text-muted-foreground">no certs</p>
|
||||
)}
|
||||
<p className="mt-1 text-xs text-muted-foreground">
|
||||
{data.count} cert{data.count !== 1 ? 's' : ''}
|
||||
</p>
|
||||
</div>
|
||||
) : (
|
||||
<p className="mt-2 text-sm text-muted-foreground">—</p>
|
||||
)}
|
||||
</Link>
|
||||
)
|
||||
}
|
||||
54
app/_components/DockerWidget.tsx
Normal file
54
app/_components/DockerWidget.tsx
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import Link from 'next/link'
|
||||
import { parseDockerPs } from '@/lib/parse-docker'
|
||||
import { fetchAgentOutput } from '@/lib/agent-fetch'
|
||||
|
||||
type DockerData = { running: number; total: number }
|
||||
export type DockerInitial = { data: DockerData; error: null } | { data: null; error: string }
|
||||
|
||||
async function refreshDocker(): Promise<DockerData> {
|
||||
const output = await fetchAgentOutput('docker_ps')
|
||||
const containers = parseDockerPs(output)
|
||||
return {
|
||||
running: containers.filter((c) => c.status.toLowerCase().startsWith('up')).length,
|
||||
total: containers.length,
|
||||
}
|
||||
}
|
||||
|
||||
export default function DockerWidget({ initial }: { initial: DockerInitial }) {
|
||||
const [data, setData] = useState<DockerData | null>(initial.data)
|
||||
const [error, setError] = useState<string | null>(initial.error)
|
||||
|
||||
const refresh = useCallback(async () => {
|
||||
try {
|
||||
const d = await refreshDocker()
|
||||
setData(d)
|
||||
setError(null)
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'refresh failed')
|
||||
}
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
const id = setInterval(refresh, 30_000)
|
||||
return () => clearInterval(id)
|
||||
}, [refresh])
|
||||
|
||||
return (
|
||||
<Link href="/docker" className="block rounded-lg border bg-card p-5 transition-colors hover:bg-accent">
|
||||
<h2 className="text-sm font-medium text-muted-foreground">Docker</h2>
|
||||
{error ? (
|
||||
<p className="mt-2 text-sm text-destructive truncate">{error}</p>
|
||||
) : data ? (
|
||||
<p className="mt-2 text-2xl font-semibold">
|
||||
{data.running}
|
||||
<span className="text-sm font-normal text-muted-foreground"> / {data.total} running</span>
|
||||
</p>
|
||||
) : (
|
||||
<p className="mt-2 text-sm text-muted-foreground">—</p>
|
||||
)}
|
||||
</Link>
|
||||
)
|
||||
}
|
||||
77
app/_components/GitWidget.tsx
Normal file
77
app/_components/GitWidget.tsx
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import Link from 'next/link'
|
||||
import { parseGitStatus } from '@/lib/parse-git'
|
||||
import { fetchAgentOutput } from '@/lib/agent-fetch'
|
||||
|
||||
type GitData = { dirty: number; total: number }
|
||||
export type GitInitial =
|
||||
| { configured: false }
|
||||
| { data: GitData; error: null }
|
||||
| { data: null; error: string }
|
||||
|
||||
async function refreshGit(repos: string[]): Promise<GitData> {
|
||||
const results = await Promise.allSettled(
|
||||
repos.map(async (path) => {
|
||||
const output = await fetchAgentOutput('git_status', [path])
|
||||
return parseGitStatus(output)
|
||||
}),
|
||||
)
|
||||
const dirty = results.filter(
|
||||
(r) => r.status === 'fulfilled' && r.value.dirty,
|
||||
).length
|
||||
return { dirty, total: repos.length }
|
||||
}
|
||||
|
||||
export default function GitWidget({ initial, repos }: { initial: GitInitial; repos: string[] }) {
|
||||
const notConfigured = 'configured' in initial && initial.configured === false
|
||||
const [data, setData] = useState<GitData | null>(
|
||||
!notConfigured && 'data' in initial ? initial.data : null,
|
||||
)
|
||||
const [error, setError] = useState<string | null>(
|
||||
!notConfigured && 'error' in initial ? initial.error : null,
|
||||
)
|
||||
|
||||
const refresh = useCallback(async () => {
|
||||
if (notConfigured || repos.length === 0) return
|
||||
try {
|
||||
const d = await refreshGit(repos)
|
||||
setData(d)
|
||||
setError(null)
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'refresh failed')
|
||||
}
|
||||
}, [notConfigured, repos])
|
||||
|
||||
useEffect(() => {
|
||||
if (notConfigured) return
|
||||
const id = setInterval(refresh, 30_000)
|
||||
return () => clearInterval(id)
|
||||
}, [refresh, notConfigured])
|
||||
|
||||
return (
|
||||
<Link href="/git" className="block rounded-lg border bg-card p-5 transition-colors hover:bg-accent">
|
||||
<h2 className="text-sm font-medium text-muted-foreground">Git</h2>
|
||||
{notConfigured ? (
|
||||
<p className="mt-2 text-sm text-muted-foreground">niet geconfigureerd</p>
|
||||
) : error ? (
|
||||
<p className="mt-2 text-sm text-destructive truncate">{error}</p>
|
||||
) : data ? (
|
||||
<p
|
||||
className={[
|
||||
'mt-2 text-2xl font-semibold',
|
||||
data.dirty === 0 ? 'text-green-600' : 'text-orange-500',
|
||||
].join(' ')}
|
||||
>
|
||||
{data.dirty}/{data.total}
|
||||
<span className="text-sm font-normal text-muted-foreground">
|
||||
{' '}repos uncommitted
|
||||
</span>
|
||||
</p>
|
||||
) : (
|
||||
<p className="mt-2 text-sm text-muted-foreground">—</p>
|
||||
)}
|
||||
</Link>
|
||||
)
|
||||
}
|
||||
79
app/_components/SystemdWidget.tsx
Normal file
79
app/_components/SystemdWidget.tsx
Normal file
|
|
@ -0,0 +1,79 @@
|
|||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import Link from 'next/link'
|
||||
import { parseSystemctlStatus } from '@/lib/parse-systemd'
|
||||
import { fetchAgentOutput } from '@/lib/agent-fetch'
|
||||
|
||||
type SystemdData = { healthy: number; total: number }
|
||||
export type SystemdInitial =
|
||||
| { configured: false }
|
||||
| { data: SystemdData; error: null }
|
||||
| { data: null; error: string }
|
||||
|
||||
async function refreshSystemd(units: string[]): Promise<SystemdData> {
|
||||
const results = await Promise.allSettled(
|
||||
units.map(async (unit) => {
|
||||
const output = await fetchAgentOutput('systemctl_status', [unit])
|
||||
return parseSystemctlStatus(output, unit)
|
||||
}),
|
||||
)
|
||||
const healthy = results.filter(
|
||||
(r) => r.status === 'fulfilled' && r.value.activeState === 'active',
|
||||
).length
|
||||
return { healthy, total: units.length }
|
||||
}
|
||||
|
||||
export default function SystemdWidget({ initial, units }: { initial: SystemdInitial; units: string[] }) {
|
||||
const notConfigured = 'configured' in initial && initial.configured === false
|
||||
const [data, setData] = useState<SystemdData | null>(
|
||||
!notConfigured && 'data' in initial ? initial.data : null,
|
||||
)
|
||||
const [error, setError] = useState<string | null>(
|
||||
!notConfigured && 'error' in initial ? initial.error : null,
|
||||
)
|
||||
|
||||
const refresh = useCallback(async () => {
|
||||
if (notConfigured || units.length === 0) return
|
||||
try {
|
||||
const d = await refreshSystemd(units)
|
||||
setData(d)
|
||||
setError(null)
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'refresh failed')
|
||||
}
|
||||
}, [notConfigured, units])
|
||||
|
||||
useEffect(() => {
|
||||
if (notConfigured) return
|
||||
const id = setInterval(refresh, 30_000)
|
||||
return () => clearInterval(id)
|
||||
}, [refresh, notConfigured])
|
||||
|
||||
return (
|
||||
<Link href="/systemd" className="block rounded-lg border bg-card p-5 transition-colors hover:bg-accent">
|
||||
<h2 className="text-sm font-medium text-muted-foreground">systemd</h2>
|
||||
{notConfigured ? (
|
||||
<p className="mt-2 text-sm text-muted-foreground">niet geconfigureerd</p>
|
||||
) : error ? (
|
||||
<p className="mt-2 text-sm text-destructive truncate">{error}</p>
|
||||
) : data ? (
|
||||
<p
|
||||
className={[
|
||||
'mt-2 text-2xl font-semibold',
|
||||
data.total > 0 && data.healthy === data.total
|
||||
? 'text-green-600'
|
||||
: data.healthy > 0
|
||||
? 'text-orange-500'
|
||||
: 'text-destructive',
|
||||
].join(' ')}
|
||||
>
|
||||
{data.healthy}/{data.total}
|
||||
<span className="text-sm font-normal text-muted-foreground"> healthy</span>
|
||||
</p>
|
||||
) : (
|
||||
<p className="mt-2 text-sm text-muted-foreground">—</p>
|
||||
)}
|
||||
</Link>
|
||||
)
|
||||
}
|
||||
20
app/api/audit/latest/route.ts
Normal file
20
app/api/audit/latest/route.ts
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
import { NextResponse } from 'next/server'
|
||||
import { getCurrentUser } from '@/lib/session'
|
||||
import { prisma } from '@/lib/prisma'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function GET() {
|
||||
const user = await getCurrentUser()
|
||||
if (!user) return NextResponse.json({ error: 'unauthorized' }, { status: 401 })
|
||||
|
||||
const run = await prisma.flowRun.findFirst({
|
||||
where: { user_id: user.id },
|
||||
orderBy: { started_at: 'desc' },
|
||||
select: { id: true, flow_key: true, status: true, started_at: true },
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
run: run ? { ...run, started_at: run.started_at.toISOString() } : null,
|
||||
})
|
||||
}
|
||||
32
app/api/worker-logs/[name]/route.ts
Normal file
32
app/api/worker-logs/[name]/route.ts
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
import { NextRequest } from 'next/server'
|
||||
import { getCurrentUser } from '@/lib/session'
|
||||
import { readRunLog, WorkerLogError } from '@/lib/worker-logs'
|
||||
import { parseRunLog } from '@/lib/parse-worker-log'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
// GET /api/worker-logs/<file>.log — full parsed timeline for one run-log.
|
||||
export async function GET(
|
||||
_request: NextRequest,
|
||||
{ params }: { params: Promise<{ name: string }> },
|
||||
) {
|
||||
const user = await getCurrentUser()
|
||||
if (!user) {
|
||||
return Response.json({ error: 'unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { name: rawName } = await params
|
||||
const name = decodeURIComponent(rawName)
|
||||
|
||||
try {
|
||||
const raw = await readRunLog(name)
|
||||
return Response.json(parseRunLog(raw, name))
|
||||
} catch (err) {
|
||||
if (err instanceof WorkerLogError) {
|
||||
const status = err.code === 'invalid' ? 400 : err.code === 'not-found' ? 404 : 500
|
||||
return Response.json({ error: err.message }, { status })
|
||||
}
|
||||
const message = err instanceof Error ? err.message : 'failed to read worker log'
|
||||
return Response.json({ error: message }, { status: 500 })
|
||||
}
|
||||
}
|
||||
25
app/api/worker-logs/route.ts
Normal file
25
app/api/worker-logs/route.ts
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
import { NextRequest } from 'next/server'
|
||||
import { getCurrentUser } from '@/lib/session'
|
||||
import { listRunLogs } from '@/lib/worker-logs'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
// GET /api/worker-logs?limit=10 — newest-first run-log summaries for the table.
|
||||
export async function GET(request: NextRequest) {
|
||||
const user = await getCurrentUser()
|
||||
if (!user) {
|
||||
return Response.json({ error: 'unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const limitParam = request.nextUrl.searchParams.get('limit')
|
||||
const limit = limitParam ? Number(limitParam) : 10
|
||||
|
||||
try {
|
||||
const logs = await listRunLogs(limit)
|
||||
return Response.json({ logs })
|
||||
} catch (err) {
|
||||
// Surfaces a missing bind mount legibly (e.g. WORKER_LOGS_DIR not mounted).
|
||||
const message = err instanceof Error ? err.message : 'failed to list worker logs'
|
||||
return Response.json({ error: message }, { status: 500 })
|
||||
}
|
||||
}
|
||||
25
app/caddy/_components/caddy-codemirror.tsx
Normal file
25
app/caddy/_components/caddy-codemirror.tsx
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
'use client'
|
||||
import CodeMirror from '@uiw/react-codemirror'
|
||||
import { caddyfileLanguage } from '@/lib/codemirror/caddyfile-mode'
|
||||
import { EditorView } from '@codemirror/view'
|
||||
|
||||
type Props = {
|
||||
value: string
|
||||
onChange: (next: string) => void
|
||||
readOnly?: boolean
|
||||
}
|
||||
|
||||
export default function CaddyCodeMirror({ value, onChange, readOnly }: Props) {
|
||||
return (
|
||||
<CodeMirror
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
readOnly={readOnly}
|
||||
extensions={[caddyfileLanguage, EditorView.lineWrapping]}
|
||||
theme="dark"
|
||||
height="480px"
|
||||
basicSetup={{ lineNumbers: true, foldGutter: false, highlightActiveLine: !readOnly }}
|
||||
className="rounded-lg border border-border overflow-hidden text-xs"
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
|
@ -1,11 +1,21 @@
|
|||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import dynamic from 'next/dynamic'
|
||||
import Link from 'next/link'
|
||||
import { useFlowRun } from '@/hooks/useFlowRun'
|
||||
import ConfirmDialog from '@/components/ConfirmDialog'
|
||||
import StreamingTerminal from '@/components/StreamingTerminal'
|
||||
|
||||
const CaddyCodeMirror = dynamic(() => import('./caddy-codemirror'), {
|
||||
ssr: false,
|
||||
loading: () => (
|
||||
<div className="h-[480px] rounded-lg border border-border bg-zinc-950 p-4 text-xs text-zinc-500">
|
||||
Loading editor…
|
||||
</div>
|
||||
),
|
||||
})
|
||||
|
||||
type Phase = 'edit' | 'writing' | 'validating' | 'validated' | 'saving' | 'saved'
|
||||
|
||||
type DialogPending = 'validate' | 'save' | null
|
||||
|
|
@ -106,17 +116,13 @@ export default function CaddyEditor({ initialContent, initialError }: Props) {
|
|||
</span>
|
||||
)}
|
||||
</div>
|
||||
<textarea
|
||||
<CaddyCodeMirror
|
||||
value={content}
|
||||
onChange={(e) => {
|
||||
setContent(e.target.value)
|
||||
// Reset validated state if user edits after validation
|
||||
onChange={(next) => {
|
||||
setContent(next)
|
||||
if (phase === 'validated' || phase === 'saved') setPhase('edit')
|
||||
}}
|
||||
readOnly={isActive}
|
||||
rows={24}
|
||||
spellCheck={false}
|
||||
className="w-full rounded-lg border border-border bg-zinc-950 p-4 font-mono text-xs text-zinc-100 focus:outline-none focus:ring-1 focus:ring-ring resize-y disabled:opacity-50"
|
||||
/>
|
||||
</div>
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import { redirect } from 'next/navigation'
|
||||
import Link from 'next/link'
|
||||
import { codeToHtml } from 'shiki'
|
||||
import { createHighlighter, type Highlighter } from 'shiki'
|
||||
import caddyfileGrammar from '@/lib/grammars/caddyfile.json'
|
||||
import { getCurrentUser } from '@/lib/session'
|
||||
import { execAgent } from '@/lib/agent-client'
|
||||
import { parseCertList, type CertInfo } from '@/lib/parse-caddy'
|
||||
|
|
@ -8,6 +9,18 @@ import CaddyView from './_components/caddy-view'
|
|||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
let highlighterPromise: Promise<Highlighter> | null = null
|
||||
function getHighlighter() {
|
||||
if (!highlighterPromise) {
|
||||
highlighterPromise = createHighlighter({
|
||||
themes: ['github-dark'],
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
langs: [caddyfileGrammar as any],
|
||||
})
|
||||
}
|
||||
return highlighterPromise
|
||||
}
|
||||
|
||||
export default async function CaddyPage() {
|
||||
const user = await getCurrentUser()
|
||||
if (!user) redirect('/login')
|
||||
|
|
@ -16,7 +29,8 @@ export default async function CaddyPage() {
|
|||
let configError: string | null = null
|
||||
try {
|
||||
const raw = await execAgent('caddy_show_config')
|
||||
configHtml = await codeToHtml(raw || '# (empty)', {
|
||||
const highlighter = await getHighlighter()
|
||||
configHtml = highlighter.codeToHtml(raw || '# (empty)', {
|
||||
lang: 'caddyfile',
|
||||
theme: 'github-dark',
|
||||
})
|
||||
|
|
|
|||
|
|
@ -5,6 +5,11 @@ import { getCurrentUser } from '@/lib/session'
|
|||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const FLOWS = [
|
||||
{
|
||||
href: '/flows/redeploy-all',
|
||||
title: 'Redeploy All',
|
||||
desc: 'Volledige stack-redeploy: scrum4me-web + MCP-worker (cache-busted)',
|
||||
},
|
||||
{
|
||||
href: '/flows/update-scrum4me-web',
|
||||
title: 'Update Scrum4Me website',
|
||||
|
|
|
|||
135
app/flows/redeploy-all/_components/flow-panel.tsx
Normal file
135
app/flows/redeploy-all/_components/flow-panel.tsx
Normal file
|
|
@ -0,0 +1,135 @@
|
|||
'use client'
|
||||
|
||||
import { useState, useCallback } from 'react'
|
||||
import Link from 'next/link'
|
||||
import { useFlowRun } from '@/hooks/useFlowRun'
|
||||
import StreamingTerminal from '@/components/StreamingTerminal'
|
||||
import ConfirmDialog from '@/components/ConfirmDialog'
|
||||
|
||||
const FLOW_KEY = 'redeploy_all'
|
||||
|
||||
const STEPS = [
|
||||
'git status Scrum4Me (show current state)',
|
||||
'git fetch Scrum4Me (fetch remote refs)',
|
||||
'git log (commits ahead of upstream)',
|
||||
'git pull --ff-only Scrum4Me (aborts if dirty)',
|
||||
'npm ci (install dependencies)',
|
||||
'prisma migrate deploy (apply migrations)',
|
||||
'npm run build (build application)',
|
||||
'systemctl restart scrum4me-web',
|
||||
'smoke test: curl /api/products (expect 200 or 401)',
|
||||
'git status scrum4me-docker (show current state)',
|
||||
'git fetch scrum4me-docker (fetch remote refs)',
|
||||
'git pull --ff-only scrum4me-docker (aborts if dirty)',
|
||||
'git pull --ff-only scrum4me-mcp (lokale sync)',
|
||||
'rebuild worker image — cache-busted MCP clone',
|
||||
'docker compose up -d --force-recreate worker-idea',
|
||||
'wait for worker pre-flight to pass',
|
||||
]
|
||||
|
||||
export default function FlowPanel() {
|
||||
const [pendingDryRun, setPendingDryRun] = useState<boolean | null>(null)
|
||||
const [completedFlowRunId, setCompletedFlowRunId] = useState<string | null>(null)
|
||||
|
||||
const handleComplete = useCallback((flowRunId: string) => {
|
||||
setCompletedFlowRunId(flowRunId)
|
||||
}, [])
|
||||
|
||||
const flowRun = useFlowRun(handleComplete)
|
||||
|
||||
const handleConfirm = useCallback(() => {
|
||||
if (pendingDryRun === null) return
|
||||
const dryRun = pendingDryRun
|
||||
setPendingDryRun(null)
|
||||
setCompletedFlowRunId(null)
|
||||
flowRun.startFlow(FLOW_KEY, dryRun)
|
||||
}, [pendingDryRun, flowRun])
|
||||
|
||||
const handleReset = useCallback(() => {
|
||||
flowRun.reset()
|
||||
setCompletedFlowRunId(null)
|
||||
}, [flowRun])
|
||||
|
||||
return (
|
||||
<div className="space-y-6">
|
||||
<div className="rounded-lg border border-border p-5 space-y-4">
|
||||
<div>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
Volledige stack-redeploy: eerst de hoofd-app (scrum4me-web — pull,
|
||||
migrate, build, restart), dan de MCP-worker (cache-busted image
|
||||
rebuild zodat de nieuwe scrum4me-mcp code wordt opgepikt).
|
||||
</p>
|
||||
<p className="mt-1 text-xs text-muted-foreground font-mono">
|
||||
repos: Scrum4Me · scrum4me-docker · scrum4me-mcp
|
||||
</p>
|
||||
</div>
|
||||
<ol className="space-y-1">
|
||||
{STEPS.map((step, i) => (
|
||||
<li key={i} className="flex gap-2 text-xs font-mono text-muted-foreground">
|
||||
<span className="text-border min-w-[1.5rem]">{i + 1}.</span>
|
||||
<span>{step}</span>
|
||||
</li>
|
||||
))}
|
||||
</ol>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-3">
|
||||
<button
|
||||
onClick={() => setPendingDryRun(false)}
|
||||
disabled={flowRun.status === 'running'}
|
||||
className="rounded-lg bg-foreground text-background px-4 py-2 text-sm font-medium hover:opacity-90 disabled:opacity-50 transition-opacity"
|
||||
>
|
||||
Run
|
||||
</button>
|
||||
<button
|
||||
onClick={() => setPendingDryRun(true)}
|
||||
disabled={flowRun.status === 'running'}
|
||||
className="rounded-lg border border-border px-4 py-2 text-sm hover:bg-muted/50 disabled:opacity-50 transition-colors"
|
||||
>
|
||||
Dry Run
|
||||
</button>
|
||||
{flowRun.status !== 'idle' && flowRun.status !== 'running' && (
|
||||
<button
|
||||
onClick={handleReset}
|
||||
className="text-xs text-muted-foreground hover:text-foreground transition-colors"
|
||||
>
|
||||
Reset
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{flowRun.status !== 'idle' && (
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm font-medium">Output</span>
|
||||
{completedFlowRunId && (
|
||||
<Link
|
||||
href={`/audit/${completedFlowRunId}`}
|
||||
className="text-xs text-muted-foreground hover:text-foreground transition-colors"
|
||||
>
|
||||
View in audit log →
|
||||
</Link>
|
||||
)}
|
||||
</div>
|
||||
<StreamingTerminal
|
||||
lines={flowRun.lines}
|
||||
status={flowRun.status}
|
||||
error={flowRun.error}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<ConfirmDialog
|
||||
open={pendingDryRun !== null}
|
||||
title={pendingDryRun ? 'Dry Run: Redeploy All' : 'Run: Redeploy All'}
|
||||
commandPreview={
|
||||
pendingDryRun
|
||||
? `[DRY RUN] flow: ${FLOW_KEY}\n\nAll steps will be shown without executing.`
|
||||
: `flow: ${FLOW_KEY}\n\nSteps:\n${STEPS.map((s, i) => ` ${i + 1}. ${s}`).join('\n')}`
|
||||
}
|
||||
onConfirm={handleConfirm}
|
||||
onCancel={() => setPendingDryRun(null)}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
27
app/flows/redeploy-all/page.tsx
Normal file
27
app/flows/redeploy-all/page.tsx
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
import Link from 'next/link'
|
||||
import { redirect } from 'next/navigation'
|
||||
import { getCurrentUser } from '@/lib/session'
|
||||
import FlowPanel from './_components/flow-panel'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export default async function RedeployAllPage() {
|
||||
const user = await getCurrentUser()
|
||||
if (!user) redirect('/login')
|
||||
|
||||
return (
|
||||
<div className="min-h-screen bg-background p-6">
|
||||
<div className="mx-auto max-w-4xl space-y-6">
|
||||
<div className="flex items-center gap-3">
|
||||
<Link href="/" className="text-sm text-muted-foreground hover:text-foreground">
|
||||
← Home
|
||||
</Link>
|
||||
<span className="text-muted-foreground">/</span>
|
||||
<h1 className="text-2xl font-semibold tracking-tight">Redeploy All</h1>
|
||||
</div>
|
||||
|
||||
<FlowPanel />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
import type { Metadata } from "next";
|
||||
import { Geist, Geist_Mono } from "next/font/google";
|
||||
import "./globals.css";
|
||||
import AppNav from "@/components/AppNav";
|
||||
|
||||
const geistSans = Geist({
|
||||
variable: "--font-geist-sans",
|
||||
|
|
@ -13,8 +14,8 @@ const geistMono = Geist_Mono({
|
|||
});
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "Create Next App",
|
||||
description: "Generated by create next app",
|
||||
title: "Ops Dashboard",
|
||||
description: "Live overzicht en bediening van Docker, systemd, Caddy en deploys.",
|
||||
};
|
||||
|
||||
export default function RootLayout({
|
||||
|
|
@ -27,7 +28,10 @@ export default function RootLayout({
|
|||
lang="en"
|
||||
className={`${geistSans.variable} ${geistMono.variable} h-full antialiased`}
|
||||
>
|
||||
<body className="min-h-full flex flex-col">{children}</body>
|
||||
<body className="min-h-full flex flex-col">
|
||||
<AppNav />
|
||||
<main className="flex-1">{children}</main>
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
143
app/page.tsx
143
app/page.tsx
|
|
@ -1,23 +1,127 @@
|
|||
import Link from 'next/link'
|
||||
import { redirect } from 'next/navigation'
|
||||
import { getCurrentUser } from '@/lib/session'
|
||||
import { execAgent } from '@/lib/agent-client'
|
||||
import { prisma } from '@/lib/prisma'
|
||||
import { parseDockerPs } from '@/lib/parse-docker'
|
||||
import { parseCertList } from '@/lib/parse-caddy'
|
||||
import { parseSystemctlStatus } from '@/lib/parse-systemd'
|
||||
import { parseGitStatus } from '@/lib/parse-git'
|
||||
import DockerWidget, { type DockerInitial } from './_components/DockerWidget'
|
||||
import CaddyWidget, { type CaddyInitial } from './_components/CaddyWidget'
|
||||
import SystemdWidget, { type SystemdInitial } from './_components/SystemdWidget'
|
||||
import GitWidget, { type GitInitial } from './_components/GitWidget'
|
||||
import AuditWidget, { type AuditInitial } from './_components/AuditWidget'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const SECTIONS = [
|
||||
{ href: '/docker', title: 'Docker', desc: 'Containers en status' },
|
||||
{ href: '/git', title: 'Git', desc: 'Repo checkouts en diffs' },
|
||||
{ href: '/systemd', title: 'systemd', desc: 'Services en journals' },
|
||||
{ href: '/caddy', title: 'Caddy', desc: 'Config en certs' },
|
||||
{ href: '/flows', title: 'Flows', desc: 'Multi-step deployments' },
|
||||
{ href: '/audit', title: 'Audit', desc: 'Command-log en runs' },
|
||||
{ href: '/settings', title: 'Settings', desc: 'Backups en config' },
|
||||
]
|
||||
|
||||
export default async function Home() {
|
||||
const user = await getCurrentUser()
|
||||
if (!user) redirect('/login')
|
||||
|
||||
const systemdUnits = (process.env.SYSTEMD_UNITS ?? '')
|
||||
.split(',')
|
||||
.map((u) => u.trim())
|
||||
.filter(Boolean)
|
||||
|
||||
const repoPaths = (process.env.REPO_PATHS ?? '')
|
||||
.split(',')
|
||||
.map((p) => p.trim())
|
||||
.filter(Boolean)
|
||||
|
||||
const [[dockerResult, caddyResult, auditResult], unitResults, repoResults] = await Promise.all([
|
||||
Promise.allSettled([
|
||||
execAgent('docker_ps'),
|
||||
execAgent('caddy_list_certs'),
|
||||
prisma.flowRun.findFirst({
|
||||
where: { user_id: user.id },
|
||||
orderBy: { started_at: 'desc' },
|
||||
select: { id: true, flow_key: true, status: true, started_at: true },
|
||||
}),
|
||||
]),
|
||||
Promise.allSettled(systemdUnits.map((unit) => execAgent('systemctl_status', [unit]))),
|
||||
Promise.allSettled(repoPaths.map((path) => execAgent('git_status', [path]))),
|
||||
])
|
||||
|
||||
// Docker widget initial state
|
||||
const dockerInitial: DockerInitial =
|
||||
dockerResult.status === 'rejected'
|
||||
? { data: null, error: dockerResult.reason instanceof Error ? dockerResult.reason.message : 'failed' }
|
||||
: (() => {
|
||||
const containers = parseDockerPs(dockerResult.value)
|
||||
return {
|
||||
data: {
|
||||
running: containers.filter((c) => c.status.toLowerCase().startsWith('up')).length,
|
||||
total: containers.length,
|
||||
},
|
||||
error: null,
|
||||
}
|
||||
})()
|
||||
|
||||
// Caddy widget initial state
|
||||
const caddyInitial: CaddyInitial =
|
||||
caddyResult.status === 'rejected'
|
||||
? { data: null, error: caddyResult.reason instanceof Error ? caddyResult.reason.message : 'failed' }
|
||||
: (() => {
|
||||
const certs = parseCertList(caddyResult.value)
|
||||
const expiryTimes = certs
|
||||
.filter((c) => c.notAfter)
|
||||
.map((c) => new Date(c.notAfter).getTime())
|
||||
return {
|
||||
data: {
|
||||
soonestExpiryMs: expiryTimes.length > 0 ? Math.min(...expiryTimes) : null,
|
||||
count: certs.length,
|
||||
expiringWarning: certs.some((c) => c.expiringWarning),
|
||||
},
|
||||
error: null,
|
||||
}
|
||||
})()
|
||||
|
||||
// Systemd widget initial state
|
||||
let systemdInitial: SystemdInitial
|
||||
if (systemdUnits.length === 0) {
|
||||
systemdInitial = { configured: false }
|
||||
} else if (unitResults.every((r) => r.status === 'rejected')) {
|
||||
const first = unitResults[0]
|
||||
systemdInitial = {
|
||||
data: null,
|
||||
error: first.status === 'rejected' && first.reason instanceof Error ? first.reason.message : 'all units failed',
|
||||
}
|
||||
} else {
|
||||
const healthy = unitResults.reduce((count, r, i) => {
|
||||
if (r.status !== 'fulfilled') return count
|
||||
return parseSystemctlStatus(r.value, systemdUnits[i]).activeState === 'active' ? count + 1 : count
|
||||
}, 0)
|
||||
systemdInitial = { data: { healthy, total: systemdUnits.length }, error: null }
|
||||
}
|
||||
|
||||
// Git widget initial state
|
||||
let gitInitial: GitInitial
|
||||
if (repoPaths.length === 0) {
|
||||
gitInitial = { configured: false }
|
||||
} else if (repoResults.every((r) => r.status === 'rejected')) {
|
||||
const first = repoResults[0]
|
||||
gitInitial = {
|
||||
data: null,
|
||||
error: first.status === 'rejected' && first.reason instanceof Error ? first.reason.message : 'all repos failed',
|
||||
}
|
||||
} else {
|
||||
const dirty = repoResults.filter(
|
||||
(r) => r.status === 'fulfilled' && parseGitStatus(r.value).dirty,
|
||||
).length
|
||||
gitInitial = { data: { dirty, total: repoPaths.length }, error: null }
|
||||
}
|
||||
|
||||
// Audit widget initial state
|
||||
const auditInitial: AuditInitial =
|
||||
auditResult.status === 'rejected'
|
||||
? { data: null, error: auditResult.reason instanceof Error ? auditResult.reason.message : 'failed' }
|
||||
: {
|
||||
data: auditResult.value
|
||||
? { ...auditResult.value, started_at: auditResult.value.started_at.toISOString() }
|
||||
: null,
|
||||
error: null,
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="min-h-screen bg-background p-6">
|
||||
<div className="mx-auto max-w-6xl space-y-6">
|
||||
|
|
@ -25,17 +129,12 @@ export default async function Home() {
|
|||
<h1 className="text-2xl font-semibold tracking-tight">Ops Dashboard</h1>
|
||||
<p className="text-sm text-muted-foreground">Welkom {user.email}</p>
|
||||
</div>
|
||||
<div className="grid grid-cols-1 gap-4 sm:grid-cols-2 md:grid-cols-3">
|
||||
{SECTIONS.map((s) => (
|
||||
<Link
|
||||
key={s.href}
|
||||
href={s.href}
|
||||
className="block rounded-lg border bg-card p-5 transition-colors hover:bg-accent"
|
||||
>
|
||||
<h2 className="text-lg font-medium">{s.title}</h2>
|
||||
<p className="mt-1 text-sm text-muted-foreground">{s.desc}</p>
|
||||
</Link>
|
||||
))}
|
||||
<div className="grid grid-cols-1 gap-4 sm:grid-cols-2 lg:grid-cols-3">
|
||||
<DockerWidget initial={dockerInitial} />
|
||||
<CaddyWidget initial={caddyInitial} />
|
||||
<SystemdWidget initial={systemdInitial} units={systemdUnits} />
|
||||
<GitWidget initial={gitInitial} repos={repoPaths} />
|
||||
<AuditWidget initial={auditInitial} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
291
app/worker-logs/_components/run-log-detail.tsx
Normal file
291
app/worker-logs/_components/run-log-detail.tsx
Normal file
|
|
@ -0,0 +1,291 @@
|
|||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useState, type ReactElement } from 'react'
|
||||
import type { LogEvent, MetaTag, ParsedRunLog } from '@/lib/parse-worker-log'
|
||||
import { cn, formatDuration } from '@/lib/utils'
|
||||
|
||||
async function fetchDetail(fileName: string): Promise<ParsedRunLog> {
|
||||
const res = await fetch(`/api/worker-logs/${encodeURIComponent(fileName)}`, { cache: 'no-store' })
|
||||
const body = await res.json().catch(() => ({}))
|
||||
if (!res.ok) throw new Error(body?.error ?? `request failed (${res.status})`)
|
||||
return body as ParsedRunLog
|
||||
}
|
||||
|
||||
const META_TAG_STYLES: Record<MetaTag, string> = {
|
||||
claim: 'text-muted-foreground',
|
||||
auth: 'text-muted-foreground',
|
||||
quota: 'text-muted-foreground',
|
||||
'no-job': 'text-muted-foreground',
|
||||
claimed: 'text-blue-600 dark:text-blue-400',
|
||||
worktree: 'text-muted-foreground',
|
||||
config: 'text-blue-600 dark:text-blue-400',
|
||||
payload: 'text-muted-foreground',
|
||||
spawn: 'text-blue-600 dark:text-blue-400',
|
||||
'claude-done': 'text-blue-600 dark:text-blue-400',
|
||||
cleanup: 'text-muted-foreground',
|
||||
exit: 'text-muted-foreground',
|
||||
error: 'text-destructive',
|
||||
'token-expired': 'text-destructive',
|
||||
timeout: 'text-muted-foreground',
|
||||
other: 'text-muted-foreground',
|
||||
}
|
||||
|
||||
function timeOnly(ts: string | null): string {
|
||||
if (!ts) return ''
|
||||
const d = new Date(ts)
|
||||
return isNaN(d.getTime()) ? '' : d.toLocaleTimeString()
|
||||
}
|
||||
|
||||
function inputPreview(input: string): string {
|
||||
const oneLine = input.replace(/\s+/g, ' ').trim()
|
||||
return oneLine.length > 100 ? `${oneLine.slice(0, 100)}…` : oneLine
|
||||
}
|
||||
|
||||
function TruncNote({ chars }: { chars?: number }) {
|
||||
return (
|
||||
<div className="mt-0.5 text-[11px] italic text-muted-foreground">
|
||||
— afgekapt{chars != null ? ` (${chars} chars totaal)` : ''}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
function EventBlock({ event }: { event: LogEvent }): ReactElement {
|
||||
switch (event.kind) {
|
||||
case 'meta':
|
||||
return (
|
||||
<div className="flex gap-2 py-0.5 font-mono text-[11px] leading-relaxed">
|
||||
<span className="shrink-0 text-muted-foreground/60">{timeOnly(event.ts)}</span>
|
||||
<span className={cn('shrink-0 uppercase tracking-wide', META_TAG_STYLES[event.tag])}>
|
||||
{event.tag}
|
||||
</span>
|
||||
<span className="break-all text-muted-foreground">{event.text}</span>
|
||||
</div>
|
||||
)
|
||||
|
||||
case 'system-init':
|
||||
return (
|
||||
<div className="my-2 rounded-lg border border-border bg-card p-3 text-xs">
|
||||
<div className="mb-1 font-medium text-foreground">Sessie gestart</div>
|
||||
<div className="grid grid-cols-2 gap-x-4 gap-y-1 text-muted-foreground sm:grid-cols-3">
|
||||
<div>
|
||||
<span className="text-foreground/70">model</span> {event.model}
|
||||
</div>
|
||||
<div>
|
||||
<span className="text-foreground/70">permission</span> {event.permissionMode}
|
||||
</div>
|
||||
<div>
|
||||
<span className="text-foreground/70">claude</span> v{event.version || '?'}
|
||||
</div>
|
||||
</div>
|
||||
{event.cwd && (
|
||||
<div className="mt-1 break-all font-mono text-[11px] text-muted-foreground">
|
||||
cwd: {event.cwd}
|
||||
</div>
|
||||
)}
|
||||
{(event.tools.length > 0 || event.mcpServers.length > 0) && (
|
||||
<details className="mt-2">
|
||||
<summary className="cursor-pointer text-muted-foreground hover:text-foreground">
|
||||
{event.tools.length} tools · {event.mcpServers.length} MCP-server(s)
|
||||
</summary>
|
||||
<div className="mt-1 font-mono text-[11px] text-muted-foreground">
|
||||
{event.mcpServers.length > 0 && <div>mcp: {event.mcpServers.join(', ')}</div>}
|
||||
<div className="break-words">{event.tools.join(', ')}</div>
|
||||
</div>
|
||||
</details>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
|
||||
case 'assistant-text':
|
||||
return (
|
||||
<div className="my-1.5 border-l-2 border-blue-300 pl-3 dark:border-blue-700">
|
||||
<div className="whitespace-pre-wrap text-sm text-foreground">{event.text}</div>
|
||||
{event.truncated && <TruncNote />}
|
||||
</div>
|
||||
)
|
||||
|
||||
case 'thinking':
|
||||
return (
|
||||
<details className="my-1 pl-3">
|
||||
<summary className="cursor-pointer text-xs italic text-muted-foreground hover:text-foreground">
|
||||
thinking…
|
||||
</summary>
|
||||
<div className="mt-1 whitespace-pre-wrap border-l-2 border-border pl-3 text-xs italic text-muted-foreground">
|
||||
{event.text}
|
||||
{event.truncated && <TruncNote />}
|
||||
</div>
|
||||
</details>
|
||||
)
|
||||
|
||||
case 'tool-call':
|
||||
return (
|
||||
<details open className="my-1">
|
||||
<summary className="cursor-pointer list-none">
|
||||
<span className="inline-flex max-w-full items-center gap-2 rounded-md bg-muted px-2 py-1 text-xs">
|
||||
<span className="shrink-0 font-medium text-foreground">▸ {event.name}</span>
|
||||
<span className="truncate font-mono text-[11px] text-muted-foreground">
|
||||
{inputPreview(event.input)}
|
||||
</span>
|
||||
</span>
|
||||
</summary>
|
||||
<pre className="ml-2 mt-1 overflow-x-auto rounded-md border border-border bg-muted/30 p-2 font-mono text-[11px] leading-relaxed">
|
||||
{event.input}
|
||||
</pre>
|
||||
{event.truncated && <TruncNote />}
|
||||
</details>
|
||||
)
|
||||
|
||||
case 'tool-result':
|
||||
return (
|
||||
<details className="my-1">
|
||||
<summary className="cursor-pointer list-none">
|
||||
<span
|
||||
className={cn(
|
||||
'inline-flex items-center gap-2 rounded-md px-2 py-1 text-xs',
|
||||
event.isError
|
||||
? 'bg-destructive/10 text-destructive'
|
||||
: 'bg-muted text-muted-foreground',
|
||||
)}
|
||||
>
|
||||
<span>{event.isError ? '✕ result (error)' : '◂ result'}</span>
|
||||
<span className="text-[11px] opacity-70">{event.fullLength} chars</span>
|
||||
</span>
|
||||
</summary>
|
||||
<pre
|
||||
className={cn(
|
||||
'ml-2 mt-1 max-h-80 overflow-auto whitespace-pre-wrap break-all rounded-md border p-2 font-mono text-[11px] leading-relaxed',
|
||||
event.isError ? 'border-destructive/30 bg-destructive/5' : 'border-border bg-muted/30',
|
||||
)}
|
||||
>
|
||||
{event.body || '(body weggelaten — timeline ingekort)'}
|
||||
</pre>
|
||||
{event.truncated && <TruncNote chars={event.fullLength} />}
|
||||
</details>
|
||||
)
|
||||
|
||||
case 'rate-limit':
|
||||
return (
|
||||
<div className="my-1 text-xs">
|
||||
<span className="rounded-md bg-amber-100 px-2 py-0.5 text-amber-800 dark:bg-amber-900/30 dark:text-amber-400">
|
||||
rate limit: {event.status}
|
||||
</span>
|
||||
</div>
|
||||
)
|
||||
|
||||
case 'result':
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'my-2 rounded-lg border p-3',
|
||||
event.isError
|
||||
? 'border-destructive/30 bg-destructive/10'
|
||||
: 'border-green-300 bg-green-50 dark:border-green-800 dark:bg-green-900/20',
|
||||
)}
|
||||
>
|
||||
<div className="flex flex-wrap items-center gap-3 text-xs">
|
||||
<span className="font-medium text-foreground">Resultaat: {event.subtype}</span>
|
||||
{event.durationMs != null && (
|
||||
<span className="text-muted-foreground">{formatDuration(event.durationMs)}</span>
|
||||
)}
|
||||
{event.numTurns != null && (
|
||||
<span className="text-muted-foreground">{event.numTurns} turns</span>
|
||||
)}
|
||||
{event.totalCostUsd != null && (
|
||||
<span className="text-muted-foreground">${event.totalCostUsd.toFixed(2)}</span>
|
||||
)}
|
||||
</div>
|
||||
{event.resultText && (
|
||||
<div className="mt-2 whitespace-pre-wrap text-sm text-foreground">
|
||||
{event.resultText}
|
||||
</div>
|
||||
)}
|
||||
{event.resultTruncated && <TruncNote />}
|
||||
</div>
|
||||
)
|
||||
|
||||
case 'raw':
|
||||
return (
|
||||
<div className="break-all py-0.5 font-mono text-[11px] text-muted-foreground/70">
|
||||
{event.text}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export default function RunLogDetail({ fileName }: { fileName: string }) {
|
||||
const [data, setData] = useState<ParsedRunLog | null>(null)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const [loading, setLoading] = useState(true)
|
||||
|
||||
const load = useCallback(async () => {
|
||||
try {
|
||||
const d = await fetchDetail(fileName)
|
||||
setData(d)
|
||||
setError(null)
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'kon log niet laden')
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
}, [fileName])
|
||||
|
||||
useEffect(() => {
|
||||
setLoading(true)
|
||||
setData(null)
|
||||
setError(null)
|
||||
load()
|
||||
}, [load])
|
||||
|
||||
// Keep refreshing while the run is still in progress.
|
||||
useEffect(() => {
|
||||
if (!data?.inProgress) return
|
||||
const id = setInterval(load, 5000)
|
||||
return () => clearInterval(id)
|
||||
}, [data?.inProgress, load])
|
||||
|
||||
if (loading) {
|
||||
return <div className="animate-pulse text-xs text-muted-foreground">log laden…</div>
|
||||
}
|
||||
if (error) {
|
||||
return (
|
||||
<div className="rounded-md border border-destructive/30 bg-destructive/10 px-3 py-2 text-xs text-destructive">
|
||||
{error}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
if (!data) return null
|
||||
|
||||
const { summary, events } = data
|
||||
|
||||
return (
|
||||
<div className="space-y-2">
|
||||
<div className="flex flex-wrap items-center gap-3 text-xs text-muted-foreground">
|
||||
<span className="font-mono text-foreground">{summary.fileName}</span>
|
||||
{summary.jobId && <span className="font-mono">job {summary.jobId}</span>}
|
||||
{summary.model && <span>{summary.model}</span>}
|
||||
{summary.permissionMode && <span>{summary.permissionMode}</span>}
|
||||
{summary.durationMs != null && <span>{formatDuration(summary.durationMs)}</span>}
|
||||
{data.inProgress && (
|
||||
<span className="animate-pulse text-amber-600 dark:text-amber-400">● running…</span>
|
||||
)}
|
||||
{data.responseTruncated && (
|
||||
<span className="italic">timeline ingekort (zeer grote log)</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{summary.errorSummary && (
|
||||
<div className="rounded-md border border-destructive/30 bg-destructive/10 px-3 py-2 text-xs text-destructive">
|
||||
{summary.errorSummary}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="rounded-lg border border-border bg-background p-3">
|
||||
{events.length === 0 ? (
|
||||
<div className="text-xs text-muted-foreground">geen events</div>
|
||||
) : (
|
||||
events.map((event, i) => <EventBlock key={i} event={event} />)
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
202
app/worker-logs/_components/worker-logs-view.tsx
Normal file
202
app/worker-logs/_components/worker-logs-view.tsx
Normal file
|
|
@ -0,0 +1,202 @@
|
|||
'use client'
|
||||
|
||||
import { Fragment, useCallback, useEffect, useState } from 'react'
|
||||
import type { RunLogSummary, RunStatus } from '@/lib/parse-worker-log'
|
||||
import { cn, formatDuration, relativeTime } from '@/lib/utils'
|
||||
import RunLogDetail from './run-log-detail'
|
||||
|
||||
const LIMIT_OPTIONS = [10, 25, 50, 100]
|
||||
const COLUMN_COUNT = 7
|
||||
|
||||
const STATUS_STYLES: Record<RunStatus, { badge: string; dot: string }> = {
|
||||
idle: {
|
||||
badge: 'bg-zinc-100 text-zinc-600 dark:bg-zinc-800 dark:text-zinc-400',
|
||||
dot: 'bg-zinc-400 dark:bg-zinc-500',
|
||||
},
|
||||
running: {
|
||||
badge: 'bg-amber-100 text-amber-700 dark:bg-amber-900/30 dark:text-amber-400',
|
||||
dot: 'bg-amber-500 dark:bg-amber-400',
|
||||
},
|
||||
success: {
|
||||
badge: 'bg-green-100 text-green-800 dark:bg-green-900/30 dark:text-green-400',
|
||||
dot: 'bg-green-500 dark:bg-green-400',
|
||||
},
|
||||
error: {
|
||||
badge: 'bg-red-100 text-red-700 dark:bg-red-900/30 dark:text-red-400',
|
||||
dot: 'bg-red-500 dark:bg-red-400',
|
||||
},
|
||||
'token-expired': {
|
||||
badge: 'bg-red-100 text-red-700 dark:bg-red-900/30 dark:text-red-400',
|
||||
dot: 'bg-red-500 dark:bg-red-400',
|
||||
},
|
||||
unknown: {
|
||||
badge: 'bg-zinc-100 text-zinc-600 dark:bg-zinc-800 dark:text-zinc-400',
|
||||
dot: 'bg-zinc-400 dark:bg-zinc-500',
|
||||
},
|
||||
}
|
||||
|
||||
export function StatusBadge({ status }: { status: RunStatus }) {
|
||||
const s = STATUS_STYLES[status]
|
||||
return (
|
||||
<span
|
||||
className={cn(
|
||||
'inline-flex items-center gap-1.5 rounded-full px-2 py-0.5 text-xs font-medium',
|
||||
s.badge,
|
||||
)}
|
||||
>
|
||||
<span className={cn('size-1.5 rounded-full', s.dot)} />
|
||||
{status}
|
||||
</span>
|
||||
)
|
||||
}
|
||||
|
||||
async function fetchLogs(limit: number): Promise<RunLogSummary[]> {
|
||||
const res = await fetch(`/api/worker-logs?limit=${limit}`, { cache: 'no-store' })
|
||||
const body = await res.json().catch(() => ({}))
|
||||
if (!res.ok) throw new Error(body?.error ?? `request failed (${res.status})`)
|
||||
return (body.logs ?? []) as RunLogSummary[]
|
||||
}
|
||||
|
||||
type Props = {
|
||||
initialLogs: RunLogSummary[]
|
||||
initialError: string | null
|
||||
}
|
||||
|
||||
export default function WorkerLogsView({ initialLogs, initialError }: Props) {
|
||||
const [logs, setLogs] = useState<RunLogSummary[]>(initialLogs)
|
||||
const [limit, setLimit] = useState(10)
|
||||
const [selected, setSelected] = useState<string | null>(null)
|
||||
const [error, setError] = useState<string | null>(initialError)
|
||||
const [refreshing, setRefreshing] = useState(false)
|
||||
const [lastUpdated, setLastUpdated] = useState<Date>(new Date())
|
||||
|
||||
const refresh = useCallback(async () => {
|
||||
setRefreshing(true)
|
||||
try {
|
||||
const data = await fetchLogs(limit)
|
||||
setLogs(data)
|
||||
setError(null)
|
||||
setLastUpdated(new Date())
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'refresh failed')
|
||||
} finally {
|
||||
setRefreshing(false)
|
||||
}
|
||||
}, [limit])
|
||||
|
||||
useEffect(() => {
|
||||
refresh()
|
||||
const id = setInterval(refresh, 10000)
|
||||
return () => clearInterval(id)
|
||||
}, [refresh])
|
||||
|
||||
return (
|
||||
<div className="space-y-4">
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="text-xs text-muted-foreground">toon</span>
|
||||
{LIMIT_OPTIONS.map((opt) => (
|
||||
<button
|
||||
key={opt}
|
||||
onClick={() => setLimit(opt)}
|
||||
className={cn(
|
||||
'rounded-md border px-2 py-1 text-xs transition-colors',
|
||||
limit === opt
|
||||
? 'border-foreground/30 bg-muted font-medium text-foreground'
|
||||
: 'border-border text-muted-foreground hover:bg-muted/50',
|
||||
)}
|
||||
>
|
||||
{opt}
|
||||
</button>
|
||||
))}
|
||||
{refreshing && (
|
||||
<span className="text-xs text-muted-foreground animate-pulse">refreshing…</span>
|
||||
)}
|
||||
</div>
|
||||
<span className="text-xs text-muted-foreground">
|
||||
updated {lastUpdated.toLocaleTimeString()} · auto-refreshes every 10s
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<div className="rounded-lg border border-destructive/30 bg-destructive/10 px-4 py-3 text-sm text-destructive">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="overflow-x-auto rounded-lg border border-border">
|
||||
<table className="w-full text-sm">
|
||||
<thead>
|
||||
<tr className="border-b border-border bg-muted/50">
|
||||
<th className="px-4 py-3 text-left font-medium text-muted-foreground">Started</th>
|
||||
<th className="px-4 py-3 text-left font-medium text-muted-foreground">Status</th>
|
||||
<th className="px-4 py-3 text-left font-medium text-muted-foreground">Job</th>
|
||||
<th className="px-4 py-3 text-left font-medium text-muted-foreground">Model</th>
|
||||
<th className="px-4 py-3 text-left font-medium text-muted-foreground">Turns</th>
|
||||
<th className="px-4 py-3 text-left font-medium text-muted-foreground">Duration</th>
|
||||
<th className="px-4 py-3 text-left font-medium text-muted-foreground">Cost</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{logs.length === 0 && !error ? (
|
||||
<tr>
|
||||
<td colSpan={COLUMN_COUNT} className="px-4 py-8 text-center text-muted-foreground">
|
||||
No worker runs found
|
||||
</td>
|
||||
</tr>
|
||||
) : (
|
||||
logs.map((log) => {
|
||||
const isSelected = selected === log.fileName
|
||||
return (
|
||||
<Fragment key={log.fileName}>
|
||||
<tr
|
||||
onClick={() => setSelected(isSelected ? null : log.fileName)}
|
||||
title={log.errorSummary ?? undefined}
|
||||
className={cn(
|
||||
'cursor-pointer border-b border-border transition-colors',
|
||||
isSelected ? 'bg-muted/50' : 'hover:bg-muted/30',
|
||||
)}
|
||||
>
|
||||
<td className="px-4 py-3 text-xs">
|
||||
{log.startedAt ? (
|
||||
<span title={new Date(log.startedAt).toLocaleString()}>
|
||||
{relativeTime(new Date(log.startedAt))}
|
||||
</span>
|
||||
) : (
|
||||
<span className="font-mono">{log.runId}</span>
|
||||
)}
|
||||
</td>
|
||||
<td className="px-4 py-3">
|
||||
<StatusBadge status={log.status} />
|
||||
</td>
|
||||
<td className="px-4 py-3 font-mono text-xs text-muted-foreground">
|
||||
{log.jobId ? `…${log.jobId.slice(-8)}` : '—'}
|
||||
</td>
|
||||
<td className="px-4 py-3 text-xs text-muted-foreground">{log.model ?? '—'}</td>
|
||||
<td className="px-4 py-3 text-xs text-muted-foreground">
|
||||
{log.numTurns ?? '—'}
|
||||
</td>
|
||||
<td className="px-4 py-3 text-xs text-muted-foreground">
|
||||
{log.durationMs != null ? formatDuration(log.durationMs) : '—'}
|
||||
</td>
|
||||
<td className="px-4 py-3 text-xs text-muted-foreground">
|
||||
{log.totalCostUsd != null ? `$${log.totalCostUsd.toFixed(2)}` : '—'}
|
||||
</td>
|
||||
</tr>
|
||||
{isSelected && (
|
||||
<tr className="border-b border-border bg-muted/20">
|
||||
<td colSpan={COLUMN_COUNT} className="px-4 py-4">
|
||||
<RunLogDetail fileName={log.fileName} />
|
||||
</td>
|
||||
</tr>
|
||||
)}
|
||||
</Fragment>
|
||||
)
|
||||
})
|
||||
)}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
34
app/worker-logs/page.tsx
Normal file
34
app/worker-logs/page.tsx
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
import { redirect } from 'next/navigation'
|
||||
import { getCurrentUser } from '@/lib/session'
|
||||
import { listRunLogs } from '@/lib/worker-logs'
|
||||
import type { RunLogSummary } from '@/lib/parse-worker-log'
|
||||
import WorkerLogsView from './_components/worker-logs-view'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export default async function WorkerLogsPage() {
|
||||
const user = await getCurrentUser()
|
||||
if (!user) redirect('/login')
|
||||
|
||||
let initialLogs: RunLogSummary[] = []
|
||||
let initialError: string | null = null
|
||||
try {
|
||||
initialLogs = await listRunLogs(10)
|
||||
} catch (err) {
|
||||
initialError = err instanceof Error ? err.message : 'Failed to read worker logs'
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="min-h-screen bg-background p-6">
|
||||
<div className="mx-auto max-w-6xl space-y-6">
|
||||
<div>
|
||||
<h1 className="text-2xl font-semibold tracking-tight">Worker Logs</h1>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
Recente runs van de Scrum4Me-worker — klik een rij voor de uitgewerkte timeline
|
||||
</p>
|
||||
</div>
|
||||
<WorkerLogsView initialLogs={initialLogs} initialError={initialError} />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
49
components/AppNav.tsx
Normal file
49
components/AppNav.tsx
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
'use client'
|
||||
|
||||
import Link from 'next/link'
|
||||
import { usePathname } from 'next/navigation'
|
||||
import { cn } from '@/lib/utils'
|
||||
|
||||
const NAV_ITEMS = [
|
||||
{ href: '/', label: 'Dashboard' },
|
||||
{ href: '/docker', label: 'Docker' },
|
||||
{ href: '/git', label: 'Git' },
|
||||
{ href: '/systemd', label: 'systemd' },
|
||||
{ href: '/caddy', label: 'Caddy' },
|
||||
{ href: '/flows', label: 'Flows' },
|
||||
{ href: '/audit', label: 'Audit' },
|
||||
{ href: '/worker-logs', label: 'Worker Logs' },
|
||||
{ href: '/settings', label: 'Settings' },
|
||||
]
|
||||
|
||||
export default function AppNav() {
|
||||
const pathname = usePathname()
|
||||
|
||||
return (
|
||||
<nav className="sticky top-0 z-10 border-b border-border bg-background/95 backdrop-blur">
|
||||
<div className="mx-auto max-w-6xl px-6 py-3 flex items-center gap-6">
|
||||
<Link href="/" className="mr-2 text-sm font-semibold tracking-tight shrink-0">
|
||||
Ops Dashboard
|
||||
</Link>
|
||||
{NAV_ITEMS.map((item) => {
|
||||
const isActive =
|
||||
item.href === '/' ? pathname === '/' : pathname.startsWith(item.href)
|
||||
return (
|
||||
<Link
|
||||
key={item.href}
|
||||
href={item.href}
|
||||
className={cn(
|
||||
'text-sm transition-colors',
|
||||
isActive
|
||||
? 'text-foreground font-medium'
|
||||
: 'text-muted-foreground hover:text-foreground',
|
||||
)}
|
||||
>
|
||||
{item.label}
|
||||
</Link>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
</nav>
|
||||
)
|
||||
}
|
||||
|
|
@ -1,7 +1,10 @@
|
|||
# Block to add to /srv/scrum4me/caddy/Caddyfile
|
||||
# After adding, restart Caddy (not reload — see deploy notes):
|
||||
# docker compose restart caddy
|
||||
# Pre-condition: the Caddy container must share the Docker network of the ops-dashboard compose stack
|
||||
# so that Docker service-name resolution works.
|
||||
|
||||
ops.jp-visser.nl {
|
||||
reverse_proxy 172.18.0.1:3001
|
||||
# Use Docker service-name; Caddy must share a network with ops-dashboard.
|
||||
reverse_proxy ops-dashboard:3000
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
# Add the ops-dashboard service under the `services:` key.
|
||||
#
|
||||
# Build the image first:
|
||||
# docker build -t ops-dashboard /srv/ops/ops-dashboard
|
||||
# docker build -t ops-dashboard /srv/ops/repos/ops-dashboard
|
||||
#
|
||||
# Then bring the service up:
|
||||
# docker compose -f /srv/scrum4me/compose/docker-compose.yml up -d ops-dashboard
|
||||
|
|
@ -10,7 +10,7 @@
|
|||
services:
|
||||
ops-dashboard:
|
||||
build:
|
||||
context: /srv/ops/ops-dashboard
|
||||
context: /srv/ops/repos/ops-dashboard
|
||||
env_file: /srv/ops/ops-dashboard.env
|
||||
ports:
|
||||
- "127.0.0.1:3001:3000"
|
||||
|
|
|
|||
128
docs/handleiding.md
Normal file
128
docs/handleiding.md
Normal file
|
|
@ -0,0 +1,128 @@
|
|||
# Handleiding — Ops Dashboard
|
||||
|
||||
Voor de dagelijkse beheerder van een single-host server-stack (Docker, systemd, Git, Caddy, Postgres). Deze handleiding beschrijft *hoe* je de app gebruikt, niet *hoe* hij werkt — voor dat laatste zie [`specs/technical.md`](./specs/technical.md).
|
||||
|
||||
## Eerste inlog
|
||||
|
||||
1. Open `https://<jouw-host>/` (bv. `https://ops.jp-visser.nl`).
|
||||
2. Je wordt doorgestuurd naar `/login`. Vul de admin-credentials in die je tijdens deploy in `SEED_USER_EMAIL` / `SEED_USER_PASSWORD` hebt gezet.
|
||||
3. Na succesvolle login zit je 24 uur ingelogd via een HttpOnly-cookie. Daarna opnieuw inloggen.
|
||||
|
||||
Wachtwoord vergeten? Geen reset-flow in de app — los op met een SQL-update:
|
||||
|
||||
```bash
|
||||
# Genereer een nieuwe bcrypt-hash voor je nieuwe wachtwoord
|
||||
docker run --rm -e PW='<nieuw-wachtwoord>' node:22-alpine sh -c '
|
||||
cd /tmp && npm init -y >/dev/null 2>&1 && npm install --silent bcryptjs >/dev/null 2>&1
|
||||
node -e "console.log(require(\"bcryptjs\").hashSync(process.env.PW, 12))"
|
||||
'
|
||||
# Plak in psql:
|
||||
docker exec -it scrum4me-postgres psql -U scrum4me -d ops_dashboard \
|
||||
-c "UPDATE \"User\" SET pwd_hash = '<hash>' WHERE email = '<jouw-email>';"
|
||||
```
|
||||
|
||||
## Dashboard (home)
|
||||
|
||||
Vijf live status-widgets, auto-refresh ~5 sec:
|
||||
|
||||
| Widget | Toont |
|
||||
|---|---|
|
||||
| **Docker** | Aantal draaiende containers + lijst |
|
||||
| **Git** | Branch en uncommitted-status per geconfigureerd repo-pad |
|
||||
| **systemd** | Service-status (active/inactive/failed) per geconfigureerde unit |
|
||||
| **Caddy** | TLS-certs met dichtstbijzijnde expiratiedatum (geel = <30 dagen) |
|
||||
| **Audit** | Laatste flow-run met timestamp en exit-status |
|
||||
|
||||
Klik een widget aan om naar de detail-pagina te gaan.
|
||||
|
||||
## Modules
|
||||
|
||||
### `/docker` — Containers
|
||||
|
||||
Tabel van `docker ps` met auto-refresh. Klik op een container-naam voor detail (logs, image, ports, status).
|
||||
|
||||
Read-only — geen start/stop/restart vanuit de UI. Voor wijzigingen: een **Flow** (zie hieronder).
|
||||
|
||||
### `/git` — Repositories
|
||||
|
||||
Per geconfigureerd pad in `REPO_PATHS` (env var): branch, uncommitted-files (M/A/D/??), laatste 3 commits. Klik door voor diff-viewer.
|
||||
|
||||
Read-only — pulls/commits gaan via een Flow.
|
||||
|
||||
### `/systemd` — Services
|
||||
|
||||
Lijst van services uit `SYSTEMD_UNITS` (env var). Toont status, laatste log-regels. Klik door voor full journal-tail van die unit.
|
||||
|
||||
Restart-knop: alleen voor units die in `commands.yml` zijn whitelisted én in `sudoers.d/ops-agent` met `NOPASSWD` staan.
|
||||
|
||||
### `/caddy` — Reverse-proxy & TLS
|
||||
|
||||
Toont de actieve Caddyfile (syntax-highlighted) plus alle TLS-certs (subject + expiry). "Edit"-knop opent een editor — opslaan **valideert** de Caddyfile via `caddy validate` voordat het wordt geschreven.
|
||||
|
||||
### `/flows` — Multi-step deployments
|
||||
|
||||
Twee voor-gedefinieerde flows:
|
||||
|
||||
- **Update Scrum4Me website** — pull main, build container, restart, smoke-test
|
||||
- **Update Caddy config** — schrijf nieuwe Caddyfile, valideer, restart Caddy, verifieer dat alle hostnames nog reageren
|
||||
|
||||
Een flow draait stap-voor-stap met **dry-run** als standaard. Na dry-run zie je per stap wat het gaat doen. Klik "Run" om echt uit te voeren. Tijdens executie zie je live stdout/stderr per stap.
|
||||
|
||||
### `/audit` — Flow-runs
|
||||
|
||||
Chronologische lijst van alle gestarte flows: starttijd, duur, exit-status, wie 'm startte. Klik door voor de volledige output (stdout/stderr per stap).
|
||||
|
||||
### `/settings/backups` — Backups
|
||||
|
||||
Postgres backup-management:
|
||||
|
||||
- Lijst van bestaande dump-bestanden in `/srv/scrum4me/backups`
|
||||
- "Backup now"-knop maakt een dump met timestamp-naam
|
||||
- Restore-runbook (handmatige stappen — geen automatische restore vanuit UI om de blast-radius klein te houden)
|
||||
|
||||
## Veelvoorkomende taken
|
||||
|
||||
### Container hangt — wat nu?
|
||||
|
||||
1. `/docker` → klik container-naam → bekijk logs
|
||||
2. Diagnose? Open een SSH-sessie en gebruik `docker logs`, `docker exec` etc. (Niet vanuit de UI — dat is buiten scope.)
|
||||
3. Restart nodig? Voeg de container toe aan `commands.yml` whitelist (op de host) + run via `/flows`
|
||||
|
||||
### Caddy-config wijzigen
|
||||
|
||||
1. `/caddy` → "Edit"
|
||||
2. Pas Caddyfile aan in de editor
|
||||
3. Save → app draait `caddy validate` → bij succes wordt het geschreven en Caddy herstart
|
||||
4. Verifieer in `/caddy` dat het cert-overzicht klopt
|
||||
|
||||
> Voor breaking changes (verkeerde syntax of niet-bestaande site): de validate-stap blokkeert. Bij twijfel: maak eerst een backup van `/srv/scrum4me/caddy/Caddyfile`.
|
||||
|
||||
### Sprint mergen via flow
|
||||
|
||||
`/flows/update-scrum4me-web` — kies branch (default `main`), klik dry-run, lees wat het doet, klik "Run". Stap-output stream live. Na success: smoke-test verifieert dat de homepage 200 geeft.
|
||||
|
||||
## Wat kan **niet** vanuit de UI
|
||||
|
||||
- SSH-toegang of arbitrary shell-commando's (alleen whitelisted commands.yml-keys)
|
||||
- User-management (één admin via seed; multi-user is buiten scope)
|
||||
- Container starten met andere image of args (alleen restart van bestaande)
|
||||
- Wachtwoord reset (SQL-update vereist)
|
||||
- Cert handmatig forceren (Caddy doet auto-ACME)
|
||||
|
||||
## Logs voor incident-response
|
||||
|
||||
| Component | Log-locatie |
|
||||
|---|---|
|
||||
| Dashboard app | `docker logs scrum4me-ops-dashboard` |
|
||||
| ops-agent | `journalctl -u ops-agent -f` |
|
||||
| Caddy | `docker logs scrum4me-caddy` |
|
||||
| Postgres | `docker logs scrum4me-postgres` |
|
||||
|
||||
Audit-trail van wat-doet-wie-wanneer: tabel `FlowRun` + `FlowStep` in de `ops_dashboard` database, of via `/audit` in de UI.
|
||||
|
||||
## Veiligheidsadvies
|
||||
|
||||
- Houd port 3099 (ops-agent) **niet** open naar de buitenwereld. UFW-regel scoped op `172.18.0.0/16`. Zie [`runbooks/post-install.md`](./runbooks/post-install.md).
|
||||
- Roteer `OPS_AGENT_SECRET` jaarlijks: nieuw secret in `.env` én `/etc/ops-agent/secret`, dan beide herstarten.
|
||||
- Voeg geen wildcards toe in `sudoers.d/ops-agent` — elke `systemctl`-actie moet een expliciete service-naam zijn.
|
||||
- `commands.yml` is single source of truth voor wat de agent mag — alles wat niet in de whitelist staat, kan een aanvaller niet uitvoeren ook al heeft hij het secret.
|
||||
446
docs/runbooks/tailscale-setup.md
Normal file
446
docs/runbooks/tailscale-setup.md
Normal file
|
|
@ -0,0 +1,446 @@
|
|||
# Ubuntu-omgeving (Postgres + app) via Tailscale bereikbaar maken vanaf de Mac
|
||||
|
||||
## Context
|
||||
|
||||
Er zijn twee Scrum4Me-omgevingen:
|
||||
- **Omgeving 1** — productie: Vercel + Neon (managed Postgres).
|
||||
- **Omgeving 2** — nieuw: een eigen Ubuntu-server (`scrum4me-srv`) die de
|
||||
**volledige Scrum4Me-app (Next.js achter een reverse proxy) + zelf-gehoste
|
||||
Postgres** gaat draaien.
|
||||
|
||||
Het doel: vanaf de Mac (`janpeters-macbook-pro`) omgeving 2 kunnen gebruiken —
|
||||
voor (1) een DB-client (psql/GUI), (2) de `scrum4me-docker` runner lokaal in
|
||||
Docker, en (3) lokale dev van de hoofd-Scrum4Me-app.
|
||||
|
||||
Tailscale is al geïnstalleerd en verbonden op beide machines:
|
||||
- `janpeters-macbook-pro` → `100.73.234.116`
|
||||
- `scrum4me-srv` → `100.118.195.120` (Linux, SSH aan)
|
||||
|
||||
Wat nog ontbreekt: zowel Postgres als de Next.js-app op de Ubuntu-server
|
||||
luisteren standaard alleen op `localhost` en zijn nog niet bereikbaar over de
|
||||
Tailscale-interface. De database zelf is **al volledig ingericht** (schema +
|
||||
data) — er is geen migratie- of seed-werk nodig, alleen netwerk-, auth- en
|
||||
connectie-configuratie.
|
||||
|
||||
**Beslissingen (van de gebruiker):**
|
||||
- App-deploy op Ubuntu: **reverse proxy** (nginx/Caddy) vóór Next.js.
|
||||
- DB-toegang: **hele tailnet** mag erbij (`100.64.0.0/10`) — bewuste keuze;
|
||||
later eventueel te versmallen via Tailscale ACLs/groups.
|
||||
- Postgres-rol: **nog onzeker** — het plan voegt een controle toe en adviseert
|
||||
een dedicated rol.
|
||||
|
||||
**Canonieke `SCRUM4ME_BASE_URL`:** `http://100.118.195.120` (reverse proxy op
|
||||
poort 80 op de Tailscale-interface, **plain HTTP**). Tailscale (WireGuard)
|
||||
verzorgt de transportencryptie binnen de tailnet, dus een tweede TLS-laag is
|
||||
hier niet nodig. Dit raw-IP-adres resolvet ook vanuit een Docker-container
|
||||
(geen MagicDNS-afhankelijkheid). HTTPS op de proxy is optionele hardening — zie
|
||||
de noot onderaan; kies je daarvoor, gebruik dan een hostnaam die óók vanuit
|
||||
Docker oplost en pas álle URL's hieronder consistent aan.
|
||||
|
||||
**Bevinding uit de codebase:** in `scrum4me-docker` is de DB-koppeling puur
|
||||
config. Zowel `bin/run-one-job.ts` (regel 30, 115) als de MCP-server
|
||||
(`mcp-config.json` regel 9-10) lezen `DATABASE_URL` / `DIRECT_URL` uit de
|
||||
omgeving. `bin/check-tokens.sh` (regel 35-38) doet bovendien een harde
|
||||
`curl ${SCRUM4ME_BASE_URL}/api/products` — onbereikbaarheid is fataal
|
||||
(regel 52-57). Er zijn **geen code-wijzigingen** nodig — alleen `.env`.
|
||||
|
||||
## Voorwaarden (aantoonbaar voldaan vóór uitvoering)
|
||||
|
||||
- [ ] Tailscale actief op beide machines (`tailscale status` toont beide nodes)
|
||||
- [ ] SSH naar scrum4me-srv werkt (`ssh scrum4me-srv echo ok`)
|
||||
- [ ] DB-schema aanwezig (tabellen + data) — géén migratie nodig
|
||||
|
||||
## Voorwaarden (input van de gebruiker nodig)
|
||||
|
||||
- Postgres-rol + wachtwoord + databasenaam op de Ubuntu-server (de "USER",
|
||||
"PASS", "DBNAME" hieronder). Niet in de chat delen — alleen lokaal invullen.
|
||||
- De reverse proxy biedt de app aan op `http://100.118.195.120` (poort 80).
|
||||
Wijkt dit af, pas dan overal de canonieke URL consistent aan.
|
||||
|
||||
---
|
||||
|
||||
## Deel A — Ubuntu: Postgres openstellen op de Tailscale-interface
|
||||
|
||||
Uit te voeren op `scrum4me-srv` (via `ssh scrum4me-srv` of `tailscale ssh`).
|
||||
|
||||
1. **Tailscale-IP bevestigen**
|
||||
```bash
|
||||
tailscale status
|
||||
tailscale ip -4 # verwacht: 100.118.195.120
|
||||
```
|
||||
|
||||
2. **`listen_addresses` uitbreiden** — Postgres bindt standaard alleen aan
|
||||
localhost. Vind het configbestand en pas aan:
|
||||
```bash
|
||||
sudo -u postgres psql -c 'SHOW config_file;' # bv. /etc/postgresql/16/main/postgresql.conf
|
||||
```
|
||||
Zet in dat bestand:
|
||||
```
|
||||
listen_addresses = 'localhost,100.118.195.120'
|
||||
```
|
||||
Bewust **niet** `'*'` — zo bindt Postgres alleen aan localhost + het
|
||||
Tailscale-adres, nooit aan de publieke interface.
|
||||
|
||||
> ⚠️ **Boot-order:** door aan `100.118.195.120` te binden moet `tailscale0`
|
||||
> al bestaan bij boot. Stap A6 maakt de systemd-ordering verplicht — sla A6
|
||||
> niet over, anders faalt Postgres na een reboot.
|
||||
|
||||
3. **Rol, auth-methode en grants controleren/instellen** (voorkomt een
|
||||
login- of permission-fout ná goede netwerkconfig). De rol is nog onzeker,
|
||||
dus eerst inventariseren:
|
||||
```bash
|
||||
sudo -u postgres psql -c '\du'
|
||||
sudo -u postgres psql -c 'SHOW password_encryption;'
|
||||
```
|
||||
**Advies:** gebruik (of maak) een **dedicated runtime-rol** die alleen de
|
||||
rechten heeft die de app/runner nodig heeft — geen superuser:
|
||||
```sql
|
||||
CREATE ROLE scrum4me_app LOGIN PASSWORD 'lokaal-wachtwoord';
|
||||
GRANT CONNECT ON DATABASE DBNAME TO scrum4me_app;
|
||||
|
||||
-- runtime-rechten op het bestaande (gevulde) public-schema:
|
||||
GRANT USAGE ON SCHEMA public TO scrum4me_app;
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA public TO scrum4me_app;
|
||||
GRANT USAGE, SELECT, UPDATE ON ALL SEQUENCES IN SCHEMA public TO scrum4me_app;
|
||||
|
||||
-- zodat ook later toegevoegde tabellen/sequences werken:
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA public
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO scrum4me_app;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA public
|
||||
GRANT USAGE, SELECT, UPDATE ON SEQUENCES TO scrum4me_app;
|
||||
```
|
||||
**Migraties:** deze runtime-rol krijgt bewust géén DDL-rechten. De DB is al
|
||||
ingericht, dus in normale operatie draaien er geen migraties via deze rol.
|
||||
Moet je later vanaf de Mac toch een Prisma-migratie draaien, gebruik dan de
|
||||
DB-owner-rol (apart wachtwoord), niet `scrum4me_app`.
|
||||
|
||||
**SCRAM-verifier:** stap A4 kiest `scram-sha-256`. Een rol waarvan het
|
||||
wachtwoord nog als **md5** is opgeslagen kan dan niet inloggen. Forceer een
|
||||
SCRAM-verifier door het wachtwoord opnieuw te zetten (alleen lokaal op de
|
||||
server, niet in chat/docs delen):
|
||||
```sql
|
||||
ALTER ROLE scrum4me_app WITH PASSWORD 'lokaal-wachtwoord';
|
||||
```
|
||||
|
||||
4. **`pg_hba.conf` — toegang vanaf de tailnet toestaan**
|
||||
```bash
|
||||
sudo -u postgres psql -c 'SHOW hba_file;'
|
||||
```
|
||||
Voeg een regel toe (boven de bestaande `host`-regels). De gebruiker koos
|
||||
bewust voor toegang vanaf de **hele tailnet**; maak rol en database wel
|
||||
expliciet i.p.v. `all all`:
|
||||
```
|
||||
# Scrum4Me clients via Tailscale
|
||||
host DBNAME scrum4me_app 100.64.0.0/10 scram-sha-256
|
||||
```
|
||||
Let op: hiermee mag elke tailnet-node mét geldige credentials verbinden.
|
||||
Wil je dat later inperken, doe dat via Tailscale ACLs/groups of versmal
|
||||
het CIDR naar specifieke node-IP's.
|
||||
|
||||
5. **Firewall (defense-in-depth)** — alleen relevant als `ufw` actief is:
|
||||
```bash
|
||||
sudo ufw status
|
||||
sudo ufw allow in on tailscale0 to any port 5432 proto tcp
|
||||
```
|
||||
Open 5432 **nooit** generiek (`sudo ufw allow 5432` zonder interface) —
|
||||
dat zou de DB internet-breed openstellen.
|
||||
|
||||
6. **Boot-order — VERPLICHT.** Postgres bindt aan `100.118.195.120`, een adres
|
||||
dat pas bestaat nadat `tailscaled` `tailscale0` heeft opgezet.
|
||||
**Zonder deze override faalt Postgres bij reboot** ("cannot assign requested
|
||||
address"). Voeg een systemd-override toe:
|
||||
```bash
|
||||
sudo systemctl edit postgresql # of postgresql@<versie>-main
|
||||
```
|
||||
```ini
|
||||
[Unit]
|
||||
After=tailscaled.service
|
||||
Requires=tailscaled.service
|
||||
```
|
||||
|
||||
7. **Postgres herstarten en verifiëren**
|
||||
```bash
|
||||
sudo systemctl restart postgresql
|
||||
sudo ss -tlnp | grep 5432 # moet 127.0.0.1:5432 én 100.118.195.120:5432 tonen
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Deel B — Ubuntu: de Scrum4Me-app (reverse proxy) bereikbaar maken op Tailscale
|
||||
|
||||
De runner-tokencheck (`check-tokens.sh`) cURL't `${SCRUM4ME_BASE_URL}/api/products`
|
||||
en faalt hard als die URL onbereikbaar is. De Next.js-app draait achter een
|
||||
reverse proxy, dus de **proxy** moet op het Tailscale-adres luisteren — niet
|
||||
alleen op `localhost`. Canoniek: `http://100.118.195.120` (poort 80, plain HTTP).
|
||||
|
||||
1. **Reverse proxy op de Tailscale-interface laten luisteren (poort 80)**
|
||||
- **nginx:** in het server-block het `listen`-adres aan het Tailscale-IP
|
||||
binden:
|
||||
```
|
||||
listen 100.118.195.120:80;
|
||||
```
|
||||
`sudo nginx -t` → `sudo systemctl reload nginx`.
|
||||
- **Caddy:** site-adres `http://100.118.195.120:80` in de `Caddyfile`.
|
||||
- Next.js zelf mag op `127.0.0.1:<intern>` blijven; alleen de proxy is
|
||||
extern bereikbaar.
|
||||
|
||||
2. **Boot-order — VERPLICHT.** Net als Postgres bindt de proxy aan een adres
|
||||
dat `tailscaled` eerst moet aanmaken. **Zonder deze override faalt nginx/Caddy
|
||||
bij reboot.**
|
||||
```bash
|
||||
sudo systemctl edit nginx # of caddy
|
||||
```
|
||||
```ini
|
||||
[Unit]
|
||||
After=tailscaled.service
|
||||
Requires=tailscaled.service
|
||||
```
|
||||
|
||||
3. **Firewall voor poort 80** — alleen bij actieve `ufw`:
|
||||
```bash
|
||||
sudo ufw allow in on tailscale0 to any port 80 proto tcp
|
||||
```
|
||||
|
||||
4. **Lokaal op de server verifiëren**
|
||||
```bash
|
||||
curl -fsS -H "Authorization: Bearer $SCRUM4ME_TOKEN" \
|
||||
http://100.118.195.120/api/products >/dev/null && echo OK
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Deel C — Mac: connectiviteit verifiëren (DB én app)
|
||||
|
||||
1. **Tailscale-bereik** (al bevestigd: `scrum4me-srv` zichtbaar op
|
||||
`100.118.195.120`):
|
||||
```bash
|
||||
tailscale status
|
||||
tailscale ping scrum4me-srv
|
||||
```
|
||||
|
||||
2. **MagicDNS check** — kan de Mac de server op hostnaam bereiken?
|
||||
```bash
|
||||
ping -c1 scrum4me-srv
|
||||
```
|
||||
Zo ja: native macOS-clients mogen `scrum4me-srv` als host gebruiken.
|
||||
|
||||
3. **Postgres — TCP- en psql-test**
|
||||
```bash
|
||||
nc -vz 100.118.195.120 5432
|
||||
psql "postgresql://USER:PASS@100.118.195.120:5432/DBNAME?sslmode=disable" -c '\dt'
|
||||
```
|
||||
|
||||
4. **App — vanaf de Mac én vanuit een Docker-container** (Docker Desktop heeft
|
||||
geen Tailscale-MagicDNS; daarom de canonieke raw-IP-URL):
|
||||
```bash
|
||||
# vanaf de Mac
|
||||
curl -fsS -H "Authorization: Bearer $SCRUM4ME_TOKEN" \
|
||||
http://100.118.195.120/api/products >/dev/null && echo "mac OK"
|
||||
|
||||
# vanuit een container (simuleert de runner)
|
||||
docker run --rm --env SCRUM4ME_TOKEN alpine sh -lc \
|
||||
'wget -qO- --header "Authorization: Bearer $SCRUM4ME_TOKEN" \
|
||||
http://100.118.195.120/api/products >/dev/null && echo "docker OK"'
|
||||
```
|
||||
Slaagt de container-test niet (geen route naar de tailnet vanuit Docker
|
||||
Desktop), dan moet Tailscale in/naast de runner-container draaien — apart
|
||||
uit te zoeken; eerst de directe route testen.
|
||||
|
||||
---
|
||||
|
||||
## Deel D — De drie consumenten koppelen
|
||||
|
||||
Welke host elke consument gebruikt verschilt — MagicDNS werkt wél native op
|
||||
macOS, maar niet binnen een Docker-container:
|
||||
|
||||
| Consumer | Host | Reden |
|
||||
|---|---|---|
|
||||
| DB-client (psql/TablePlus) native | `scrum4me-srv` | MagicDNS werkt op macOS |
|
||||
| scrum4me-docker runner (Docker) | `100.118.195.120` | Docker Desktop heeft geen MagicDNS |
|
||||
| Hoofd-app lokale dev | `scrum4me-srv` | MagicDNS werkt op macOS |
|
||||
|
||||
### 1. DB-client (psql / TablePlus / DBeaver) — native op macOS
|
||||
Connection-string:
|
||||
```
|
||||
postgresql://USER:PASS@scrum4me-srv:5432/DBNAME?sslmode=disable
|
||||
```
|
||||
GUI-clients: host `scrum4me-srv` (of `100.118.195.120`), poort `5432`,
|
||||
SSL uit.
|
||||
|
||||
### 2. scrum4me-docker runner — bewerk `/Users/janpetervisser/Development/scrum4me-docker/.env`
|
||||
```
|
||||
DATABASE_URL=postgresql://USER:PASS@100.118.195.120:5432/DBNAME?sslmode=disable
|
||||
DIRECT_URL=postgresql://USER:PASS@100.118.195.120:5432/DBNAME?sslmode=disable
|
||||
SCRUM4ME_BASE_URL=http://100.118.195.120
|
||||
```
|
||||
Belangrijk:
|
||||
- **Gebruik het rauwe Tailscale-IP, niet `scrum4me-srv`.** Docker Desktop-
|
||||
containers krijgen geen Tailscale-MagicDNS; de hostnaam resolvet niet
|
||||
binnen de container.
|
||||
- Laat de Neon-specifieke params (`channel_binding=require`,
|
||||
`sslmode=verify-full`) weg — die gelden niet voor zelf-gehoste Postgres.
|
||||
- Zorg dat `SCRUM4ME_TOKEN` een token van **omgeving 2** is — de tokencheck
|
||||
loopt tegen de Ubuntu-app, niet meer tegen Vercel.
|
||||
|
||||
> `SCRUM4ME_TOKEN` haal je op via de Ubuntu-app: Settings → API Tokens → nieuw
|
||||
> token aanmaken. Een bestaand Vercel-token werkt **niet** tegen de
|
||||
> Ubuntu-omgeving.
|
||||
|
||||
### 3. Hoofd-Scrum4Me-app (lokale dev) — bewerk `/Users/janpetervisser/Development/Scrum4Me`
|
||||
Dit is een **andere repo** dan `scrum4me-docker`. In die repo de `.env.local`
|
||||
(of `.env`) aanpassen. De app draait native op macOS, dus de MagicDNS-hostnaam
|
||||
`scrum4me-srv` mag hier wél:
|
||||
```
|
||||
DATABASE_URL=postgresql://USER:PASS@scrum4me-srv:5432/DBNAME?sslmode=disable
|
||||
DIRECT_URL=postgresql://USER:PASS@scrum4me-srv:5432/DBNAME?sslmode=disable
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## SSL-keuze
|
||||
|
||||
Aanbeveling: **`sslmode=disable`** voor Postgres en **plain HTTP** voor de app-
|
||||
URL. Tailscale (WireGuard) versleutelt het transport al end-to-end binnen de
|
||||
tailnet; een tweede TLS-laag op een zelf-gehoste Postgres of op de proxy levert
|
||||
hier vooral configuratie-gedoe op.
|
||||
|
||||
Optionele hardening (later, samenhangend uit te voeren):
|
||||
- TLS-certs op Postgres + `sslmode=require`.
|
||||
- HTTPS op de reverse proxy. Doe dit dan met een **DNS-naam die ook vanuit
|
||||
Docker oplost** (raw-IP + cert geeft validatiefouten), en pas `SCRUM4ME_BASE_URL`
|
||||
én alle verificatie-`curl`s consistent aan naar die `https://`-hostnaam.
|
||||
|
||||
## Twee omgevingen naast elkaar houden
|
||||
|
||||
Omdat omgeving 1 (Neon) blijft bestaan: bewaar twee env-varianten, bv.
|
||||
`.env.neon` en `.env.ubuntu`, en symlink de actieve naar `.env`:
|
||||
```bash
|
||||
ln -sf .env.ubuntu .env # activeer Ubuntu-omgeving
|
||||
ln -sf .env.neon .env # activeer Neon-omgeving
|
||||
```
|
||||
Lichtgewicht en voorkomt dat je per ongeluk de verkeerde DB raakt.
|
||||
|
||||
## Te wijzigen bestanden
|
||||
|
||||
**Op `scrum4me-srv`:**
|
||||
- `postgresql.conf` — `listen_addresses`.
|
||||
- `pg_hba.conf` — tailnet-regel voor `DBNAME` + dedicated rol.
|
||||
- Postgres-rol — dedicated `scrum4me_app`-rol + grants + `ALTER ROLE ... PASSWORD`.
|
||||
- nginx/Caddy-config — `listen` op `100.118.195.120:80`.
|
||||
- systemd-overrides — `After=/Requires=tailscaled.service` voor `postgresql`
|
||||
en de proxy.
|
||||
- evt. `ufw`-regels voor poort 5432 en 80 op `tailscale0`.
|
||||
|
||||
**Op de Mac:**
|
||||
- `/Users/janpetervisser/Development/scrum4me-docker/.env` — `DATABASE_URL`,
|
||||
`DIRECT_URL`, `SCRUM4ME_BASE_URL`, `SCRUM4ME_TOKEN`.
|
||||
- `/Users/janpetervisser/Development/Scrum4Me/.env.local` — `DATABASE_URL`,
|
||||
`DIRECT_URL` (andere repo).
|
||||
- Géén codewijzigingen in `scrum4me-docker`.
|
||||
|
||||
## Verificatie (end-to-end)
|
||||
|
||||
1. **Netwerk:** `nc -vz 100.118.195.120 5432` vanaf de Mac slaagt.
|
||||
2. **DB-client:** `psql ".../DBNAME?sslmode=disable" -c '\dt'` toont de
|
||||
Scrum4Me-tabellen; een test-`INSERT`/`SELECT` bevestigt dat de
|
||||
`scrum4me_app`-grants kloppen.
|
||||
3. **App-bereik:** de `curl`/`docker run`-tests uit Deel C-4 geven beide `OK`.
|
||||
4. **Reboot-test:** herstart `scrum4me-srv`; controleer daarna met
|
||||
`sudo ss -tlnp` dat Postgres én de proxy weer op `100.118.195.120` luisteren,
|
||||
en herhaal de Mac/Docker-connectiviteitstests.
|
||||
5. **Runner:** na `.env`-update `docker compose up -d --force-recreate`,
|
||||
dan `docker compose logs -f` — `check-tokens.sh` moet
|
||||
"OK: 100.118.195.120:5432 reachable" én "OK: SCRUM4ME_TOKEN works" loggen,
|
||||
en de daemon-loop moet een job kunnen claimen uit de Ubuntu-DB.
|
||||
6. **Hoofd-app:** lokale dev-server in `/Users/janpetervisser/Development/Scrum4Me`
|
||||
start en leest data uit de Ubuntu-DB.
|
||||
|
||||
## Veelvoorkomende fouten
|
||||
|
||||
| Fout | Oorzaak | Fix |
|
||||
|---|---|---|
|
||||
| `could not translate host name "scrum4me-srv"` | MagicDNS niet actief (Docker) | Gebruik raw IP `100.118.195.120` |
|
||||
| `cannot assign requested address` bij Postgres-start | `tailscale0` bestaat nog niet | A6 systemd-override toevoegen |
|
||||
| `FATAL: password authentication failed` | SCRAM-verifier niet bijgewerkt | `ALTER ROLE scrum4me_app WITH PASSWORD '...'` herhalen |
|
||||
|
||||
---
|
||||
|
||||
# Addendum — uitvoering Ubuntu-kant 2026-05-14
|
||||
|
||||
> Deel A + B zijn uitgevoerd. De server bleek een **andere topologie** te hebben
|
||||
> dan dit plan aannam: Postgres én de reverse proxy draaien als **Docker
|
||||
> containers**, niet host-geïnstalleerd. Dit addendum beschrijft wat er feitelijk
|
||||
> is gebeurd. Deel C + D (Mac-kant) staan nog open.
|
||||
|
||||
## Vastgestelde topologie (wijkt af van de aannames)
|
||||
|
||||
| Plan nam aan | Werkelijkheid op `scrum4me-srv` |
|
||||
|---|---|
|
||||
| Host-Postgres (`/etc/postgresql/...`, `systemctl postgresql`) | Docker container `scrum4me-postgres` (postgres:17), data-volume `/srv/scrum4me/postgres` |
|
||||
| Host nginx/Caddy | Docker container `scrum4me-caddy` (caddy:2), al luisterend op `0.0.0.0:80` + `:443` |
|
||||
| Migratie/seed mogelijk nodig | Bevestigd niet nodig — db `scrum4me` was gevuld |
|
||||
|
||||
Concrete waarden die het plan openliet:
|
||||
- **Host** = dit ís `scrum4me-srv` (`100.118.195.120`) — Deel A/B dus direct uitgevoerd, niet via SSH.
|
||||
- **DBNAME** = `scrum4me`
|
||||
- **Rol** = `scrum4me_app` aangemaakt (non-superuser, DML-only), wachtwoord lokaal gegenereerd via `openssl rand -hex 24`.
|
||||
|
||||
## Deel A — zoals feitelijk uitgevoerd (Docker-variant)
|
||||
|
||||
| Plan-stap | Aanpassing |
|
||||
|---|---|
|
||||
| **A2** `listen_addresses` in `postgresql.conf` | **N.v.t.** — de container luistert intern al op `0.0.0.0`. Host-exposure = Docker port-mapping. In `/srv/scrum4me/compose/docker-compose.yml` toegevoegd: `- "100.118.195.120:5432:5432"` náást de bestaande `127.0.0.1:5432:5432`. Specifiek IP i.p.v. `0.0.0.0` — Docker's iptables-DNAT scoped dan op dat IP, publiek blijft dicht. |
|
||||
| **A3** rol + grants | Identiek SQL, maar uitgevoerd via `docker exec -i scrum4me-postgres psql -U scrum4me -d scrum4me`. Idempotent script (`CREATE ROLE` of `ALTER ROLE ... PASSWORD`). De `ALTER ROLE ... PASSWORD` zet meteen een SCRAM-verifier. **Let op:** `CREATE ROLE` op de gedeelde productie-DB wordt door de auto-mode classifier geblokkeerd — moet via een script dat de gebruiker zelf draait. |
|
||||
| **A4** `pg_hba.conf` | Bestand zit in het data-volume: host-pad `/srv/scrum4me/postgres/pg_hba.conf` (root-owned, sudo nodig). Regel toegevoegd onderaan (append is veilig — first-match, geen conflict). **Bevinding:** de postgres-image heeft al een catch-all `host all all all scram-sha-256` — onze scoped regel is dáárdoor strikt genomen redundant. Echte bescherming = IP-scoped port-binding + ufw. Catch-all strakker maken = aparte taak (hij draagt de docker-netwerk-clients). |
|
||||
| **A5** ufw | Identiek: `ufw allow in on tailscale0 to any port 5432 proto tcp`. |
|
||||
| **A6** boot-order | **Niet** `postgresql.service` (bestaat niet) maar `docker.service`. Drop-in `/etc/systemd/system/docker.service.d/tailscale-order.conf`. Bewust `After=tailscaled.service` + **`Wants=`** i.p.v. het door het plan voorgestelde `Requires=` — `Requires` op `docker.service` is fragiel (faalt tailscaled ooit, dan start de hele docker-stack niet). `After=` lost de race op; `Wants=` trekt tailscaled mee zonder hard-fail. |
|
||||
| **A7** restart + verify | `docker compose up -d postgres` (recreate — `restart` pakt port-wijzigingen niet). `ss -tln` toont nu `127.0.0.1:5432` én `100.118.195.120:5432`. Verificatie met een wegwerp-container: `docker run --rm --network host postgres:17 psql "postgresql://scrum4me_app:...@100.118.195.120:5432/scrum4me?sslmode=disable" -c '\dt'` — `--network host` simuleert exact hoe de Mac het ziet. |
|
||||
|
||||
## Deel B — zoals feitelijk uitgevoerd (Docker-variant)
|
||||
|
||||
| Plan-stap | Aanpassing |
|
||||
|---|---|
|
||||
| **B1** proxy op tailscale-interface | **Grotendeels al gedaan** — de Caddy-container publiceert al `0.0.0.0:80`, dus luistert al op `tailscale0`. Alleen een site-block toegevoegd aan `/srv/scrum4me/caddy/Caddyfile`: `100.118.195.120:80 { reverse_proxy 172.18.0.1:3000 }`. |
|
||||
| **B2** boot-order proxy | **Niet nodig.** Caddy bindt aan `0.0.0.0:80`, niet aan een IP-specifiek adres — er is geen `tailscale0`-race. (Alleen Postgres had de IP-specifieke binding, vandaar dat A6 wél nodig was.) |
|
||||
| **B3** ufw poort 80 | **Niet nodig.** Poort 80 stond al op `ALLOW IN Anywhere`. |
|
||||
| **B4** verifiëren | `curl -sI http://100.118.195.120/` → `200 OK`, geen redirect. `/api/products` → 401 (bereikbaar, auth vereist). |
|
||||
|
||||
## Bugs / valkuilen tegengekomen tijdens uitvoering
|
||||
|
||||
1. **Caddy single-file bind-mount wordt stale na een atomic-rename edit.**
|
||||
`/srv/scrum4me/caddy/Caddyfile` is als enkel bestand ge-bind-mount. Editors
|
||||
(en de Edit-tooling) schrijven vaak via write-temp + rename = nieuwe inode.
|
||||
De container blijft naar de oude inode wijzen → `caddy reload` leest de
|
||||
**oude** content, schijnbaar zonder fout. Symptoom hier: het nieuwe
|
||||
site-block leek "stil gedropt" door Caddy's adapter, maar de container zág
|
||||
het block simpelweg niet.
|
||||
**Fix / regel:** na een Caddyfile-edit `docker compose up -d --force-recreate
|
||||
caddy` (of `restart`) — **niet** `caddy reload`. De recreate her-bindt de
|
||||
mount op de nieuwe inode. (Eerder in het project werkte een Caddyfile-edit
|
||||
wél, juist omdat daar toevallig een `restart` op volgde.)
|
||||
|
||||
2. **`http://<IP>` vs `<IP>:80` syntax — bleek een rode haring.**
|
||||
Aanvankelijk leek Caddy's Caddyfile-adapter `http://100.118.195.120` te
|
||||
droppen. Geïsoleerd getest werkte beide syntaxen prima; het echte probleem
|
||||
was bug #1 (stale mount). De definitieve regel gebruikt `100.118.195.120:80`
|
||||
— ondubbelzinnig plain-HTTP-op-poort-80.
|
||||
|
||||
## Verificatie-status
|
||||
|
||||
| Plan verificatie-stap | Status |
|
||||
|---|---|
|
||||
| 1. `nc`/TCP naar 5432 | ✓ `psql` als `scrum4me_app` via `100.118.195.120:5432` werkt, ziet tabellen |
|
||||
| 2. DB-client grants | ✓ `SELECT` op `idea_products` werkt onder `scrum4me_app` |
|
||||
| 3. App-bereik | ✓ `http://100.118.195.120/` → 200, `/api/products` → 401 |
|
||||
| 4. **Reboot-test** | ✗ **Nog niet gedaan** — productie-server niet herstart. Handmatig uitvoeren op rustig moment; check daarna `ss -tlnp \| grep 5432`. |
|
||||
| 5. Runner | — Mac-kant (Deel C/D), nog open |
|
||||
| 6. Hoofd-app lokale dev | — Mac-kant, nog open |
|
||||
|
||||
## Gewijzigde bestanden op `scrum4me-srv`
|
||||
|
||||
- `/srv/scrum4me/compose/docker-compose.yml` — postgres `ports`: extra `100.118.195.120:5432:5432`
|
||||
- `/srv/scrum4me/caddy/Caddyfile` — site-block `100.118.195.120:80`
|
||||
- `/srv/scrum4me/postgres/pg_hba.conf` — tailnet-regel (+ `.bak-<timestamp>`)
|
||||
- `/etc/systemd/system/docker.service.d/tailscale-order.conf` — boot-order drop-in (nieuw)
|
||||
- ufw — regel `5432/tcp on tailscale0`
|
||||
- Postgres-rol `scrum4me_app` — aangemaakt met grants op db `scrum4me`
|
||||
147
docs/specs/functional.md
Normal file
147
docs/specs/functional.md
Normal file
|
|
@ -0,0 +1,147 @@
|
|||
# Functionele specificatie — Ops Dashboard
|
||||
|
||||
## Doel
|
||||
|
||||
Eén web-UI waarmee de eigenaar van een single-host server-stack (Docker + systemd + Git-checkouts + Caddy + Postgres) dezelfde operaties kan uitvoeren die anders in een SSH-terminal gebeuren — met audit-log, herhaalbare flows en minder typefouten.
|
||||
|
||||
**Schaal:** één host, één admin-gebruiker. Multi-host/team is buiten scope.
|
||||
|
||||
## Gebruikers en rollen
|
||||
|
||||
| Rol | Beschrijving | Hoeveelheid |
|
||||
|---|---|---|
|
||||
| **admin** | Volle toegang tot alle modules en flows. Single account, geseed via env. | 1 |
|
||||
|
||||
Geen RBAC, geen tenant-isolatie, geen "view-only"-modus. Wie inlogt kan alles.
|
||||
|
||||
## Functionele scope per module
|
||||
|
||||
### Dashboard (`/`)
|
||||
|
||||
5 live status-widgets met auto-refresh ~5s:
|
||||
|
||||
| Widget | Data-bron | Indicator |
|
||||
|---|---|---|
|
||||
| Docker | `docker ps --format json` | Count van running containers, lijst (naam + status) |
|
||||
| Git | `git status --short --branch` per pad in `REPO_PATHS` | Branch + dirty-vlag |
|
||||
| systemd | `systemctl is-active <unit>` per item in `SYSTEMD_UNITS` | Active / Inactive / Failed |
|
||||
| Caddy | `caddy admin-cmd certificates` (of equiv. shell-output parse) | Aantal certs + dichtstbijzijnde expiry |
|
||||
| Audit | DB-query op `FlowRun` desc | Laatste run + status |
|
||||
|
||||
**Acceptatie:**
|
||||
- Widget laadt < 1 s na page-load
|
||||
- Auto-refresh werkt in achtergrond zonder volledig herrenderen
|
||||
- Bij fout (agent down, command faalt): widget toont rood errorblok, niet de hele page
|
||||
|
||||
### Auth (`/login`)
|
||||
|
||||
- Email + wachtwoord (single user)
|
||||
- 5 failed attempts in 1 minuut → 429 rate-limit per IP
|
||||
- Succesvolle login → session-cookie 24u, HttpOnly, SameSite=strict, Secure (production)
|
||||
- `/api/auth/logout` invalideert sessie en wist cookie
|
||||
|
||||
### Docker (`/docker`)
|
||||
|
||||
- Lijst running containers (CONTAINER ID, IMAGE, COMMAND, CREATED, STATUS, PORTS, NAMES)
|
||||
- Auto-refresh elke 5s
|
||||
- `/docker/[name]` → detail-page met logs (laatste 200 regels), image-info, environment
|
||||
- **Geen** start/stop/restart vanuit UI — alleen via flows
|
||||
|
||||
### Git (`/git`)
|
||||
|
||||
- Per pad in `REPO_PATHS`: huidige branch, ahead/behind count, modified-files-count, laatste 3 commits
|
||||
- `/git/[repo]` → diff-viewer voor uncommitted changes + commit-historie laatste 20
|
||||
|
||||
### systemd (`/systemd`)
|
||||
|
||||
- Per unit in `SYSTEMD_UNITS`: active/inactive/failed, last-changed-timestamp
|
||||
- `/systemd/[unit]` → laatste 100 journal-regels van die unit, met level-filter
|
||||
- **Restart-actie**: alleen voor units die expliciet in `sudoers.d/ops-agent` met NOPASSWD staan
|
||||
|
||||
### Caddy (`/caddy`)
|
||||
|
||||
- Toon huidige `/srv/scrum4me/caddy/Caddyfile` met syntax-highlighting
|
||||
- Toon alle uitgegeven certs (subject, issuer, expiry, dichtstbijzijnde eerst)
|
||||
- Geel-warning bij expiry < 30 dagen, rood bij < 7 dagen
|
||||
- `/caddy/edit` → editor met save-knop; save valideert via `caddy validate` voor commit en restart van caddy-container
|
||||
|
||||
### Flows (`/flows`)
|
||||
|
||||
Twee voor-gedefinieerde flows in YAML in `ops-agent/flows.example/`:
|
||||
|
||||
| Flow | Stappen |
|
||||
|---|---|
|
||||
| `update_scrum4me_web` | git pull → npm run build → docker compose up -d --build → smoke-test op homepage |
|
||||
| `update_caddy_config` | write nieuw Caddyfile → caddy validate → docker compose restart caddy → check cert renewal |
|
||||
|
||||
Per flow:
|
||||
- Dry-run default (toont alleen wat het zou doen)
|
||||
- "Run"-knop voert echt uit; toont live SSE-stream van stdout/stderr per stap
|
||||
- Bij stap-fail: stop, markeer FlowRun als `failed`, latere stappen niet uitgevoerd
|
||||
- Bij success: FlowRun = `success`, totaalduur opgeslagen
|
||||
|
||||
### Audit (`/audit`)
|
||||
|
||||
- Lijst van alle `FlowRun` records, default 50 laatste, sort desc op `started_at`
|
||||
- Filter op status, datumrange, flow-name
|
||||
- `/audit/[flow_run_id]` → volledige output per stap, scrollable
|
||||
|
||||
### Settings/Backups (`/settings/backups`)
|
||||
|
||||
- Lijst van `.sql.gz` bestanden in `/srv/scrum4me/backups`, met size + mtime
|
||||
- "Backup now"-knop → maakt nieuwe dump met `pg_dumpall` voor alle databases
|
||||
- Restore: **handmatig vanuit terminal** — UI toont alleen de stappen als runbook
|
||||
|
||||
## State-machine flows
|
||||
|
||||
```
|
||||
┌────────┐
|
||||
│ pending │
|
||||
└────┬────┘
|
||||
│ (start request)
|
||||
┌────▼────┐
|
||||
│ running │
|
||||
└────┬────┘
|
||||
┌────┼────┬────────┐
|
||||
▼ ▼ ▼ ▼
|
||||
success failed cancelled timeout (>30min)
|
||||
```
|
||||
|
||||
`pending` → `running`: bij ontvangst start-request
|
||||
`running` → `success`: alle stappen exit-code 0
|
||||
`running` → `failed`: een stap exit-code ≠ 0
|
||||
`running` → `cancelled`: user klikt cancel
|
||||
`running` → `timeout`: na 30 min nog steeds running (cleanup-job)
|
||||
|
||||
## Hard limits
|
||||
|
||||
- Max 1 actieve flow tegelijk (lock-file in `/var/run/agent/`); 2e start-request → 409 Conflict
|
||||
- Stdout/stderr per stap geknipt op 64 KB om audit-log niet te laten exploderen
|
||||
- Session-TTL hard 24 uur, geen "remember me"
|
||||
- Auto-refresh max 1 keer per 5 seconden om agent niet te overbelasten
|
||||
|
||||
## Niet-functionele eisen
|
||||
|
||||
| Eis | Doel |
|
||||
|---|---|
|
||||
| First load < 1s | Single-user, lokale Postgres, geen onnodige round-trips |
|
||||
| Module-page TTI < 2s | Server-side render met direct agent-call, geen client-fetch waterval |
|
||||
| Audit-trail volledig | Elke flow-start logt user + tijdstempel + args; elke command-execution logt exit + duration |
|
||||
| Geen geheime data in URLs | Tokens in headers, secrets nooit in query-params |
|
||||
| CSP strict | `script-src 'self' 'unsafe-inline'`; geen externe CDNs |
|
||||
| HTTPS-only in productie | Caddy auto-ACME; cookies Secure-flag in prod-mode |
|
||||
|
||||
## Buiten scope
|
||||
|
||||
- Meerdere admins / RBAC
|
||||
- Meerdere hosts / cluster-management
|
||||
- Custom container starten (alleen restart bestaande)
|
||||
- Real-time alerts (geen pager, geen email)
|
||||
- Externe monitoring-integratie (Grafana/Prometheus/Sentry)
|
||||
- Wachtwoord-reset-flow / SSO
|
||||
|
||||
## Verwante documenten
|
||||
|
||||
- [Technische specificatie](./technical.md) — hoe het werkt
|
||||
- [Handleiding](../handleiding.md) — hoe je het gebruikt
|
||||
- [Post-install runbook](../runbooks/post-install.md) — eerste deploy
|
||||
270
docs/specs/technical.md
Normal file
270
docs/specs/technical.md
Normal file
|
|
@ -0,0 +1,270 @@
|
|||
# Technische specificatie — Ops Dashboard
|
||||
|
||||
## Architectuur in één plaatje
|
||||
|
||||
```
|
||||
┌────────────────┐ HTTPS ┌──────┐ HTTP ┌─────────────────┐
|
||||
│ Browser (jou) ├─────────►│Caddy ├────────►│ ops-dashboard │
|
||||
│ │ │ :443 │ │ Next.js 16 :3000│
|
||||
└────────────────┘ └──────┘ └────┬────────┬───┘
|
||||
│ │
|
||||
HMAC HTTP │ │ TCP/SQL
|
||||
:3099 │ │
|
||||
┌───────────────▼┐ │
|
||||
│ ops-agent │ │
|
||||
│ Fastify on host│ │
|
||||
│ spawn/exec │ │
|
||||
└───┬────────────┘ │
|
||||
│ │
|
||||
┌───────┴───────┐ ┌───────▼────────┐
|
||||
│ Whitelisted │ │ Postgres 17 │
|
||||
│ host commands │ │ db=ops_dashb.. │
|
||||
│ docker/git/etc │ └────────────────┘
|
||||
└───────────────┘
|
||||
```
|
||||
|
||||
Drie processen, één host:
|
||||
|
||||
1. **ops-dashboard** — Next.js app in Docker, op compose-bridge, exposed via Caddy
|
||||
2. **ops-agent** — Node/Fastify service direct op host (geen container), heeft sudoers + docker.sock access
|
||||
3. **postgres** — Docker container, dezelfde als die Scrum4Me al gebruikt; ops-dashboard heeft eigen DB `ops_dashboard`
|
||||
|
||||
## Stack
|
||||
|
||||
| Laag | Technologie | Versie | Reden |
|
||||
|---|---|---|---|
|
||||
| App framework | Next.js | 16.2 (App Router) | RSC server-side fetching matched onze "render with agent data" patroon |
|
||||
| UI library | React | 19 | Bundled bij Next 16 |
|
||||
| Styling | Tailwind CSS | 4 | Utility-first; geen custom design system |
|
||||
| UI primitives | `@base-ui/react` | 1.4 | Headless components, geen Radix-lock-in |
|
||||
| Code highlighting | shiki | 1.29 | Server-side highlighting in Caddyfile view |
|
||||
| Database ORM | Prisma | 7.8 (via `@prisma/adapter-pg`) | Same as Scrum4Me; één skill om beide te onderhouden |
|
||||
| Auth (password) | bcryptjs | 3 | Geen native bindings nodig |
|
||||
| Session | Custom in `lib/session.ts` | — | Eenvoudig: token in DB, hash in cookie |
|
||||
| Agent | Fastify | 5 | Lichtgewicht, native SSE-streaming |
|
||||
| Agent whitelist | js-yaml | 4 | Read-only configfile |
|
||||
|
||||
## Deploy-topologie
|
||||
|
||||
| Component | Locatie | Beheer |
|
||||
|---|---|---|
|
||||
| ops-dashboard | Docker container `scrum4me-ops-dashboard`, image `ops-dashboard:latest` | `docker compose` in `/srv/scrum4me/compose/docker-compose.yml` |
|
||||
| ops-agent | systemd unit `ops-agent.service`, host-binary `/opt/ops-agent/dist/index.js` | systemd, geïnstalleerd via `deploy/ops-agent/setup.sh` |
|
||||
| Caddyfile-route | Block in `/srv/scrum4me/caddy/Caddyfile` | Handmatig, na add restart Caddy-container |
|
||||
| Database | Postgres-container `scrum4me-postgres`, db `ops_dashboard` | Hergebruik bestaande container |
|
||||
| Backups | `/srv/scrum4me/backups/*.sql.gz` | Cron of handmatig via UI |
|
||||
|
||||
Caddy routeert `ops.jp-visser.nl` → service-naam `ops-dashboard:3000` op compose-bridge.
|
||||
|
||||
## Data-model
|
||||
|
||||
```
|
||||
User
|
||||
├── id cuid (string PK)
|
||||
├── email unique
|
||||
├── pwd_hash bcrypt $2b$12$...
|
||||
└── created_at
|
||||
|
||||
Session
|
||||
├── id cuid (PK)
|
||||
├── user_id → User
|
||||
├── token_hash sha256 hex (cookie waarde wordt gehashed opgeslagen)
|
||||
└── expires_at 24h na create
|
||||
|
||||
FlowRun
|
||||
├── id cuid (PK)
|
||||
├── user_id → User
|
||||
├── flow_name string (bv. "update_scrum4me_web")
|
||||
├── status enum: pending|running|success|failed|cancelled
|
||||
├── started_at
|
||||
├── finished_at nullable
|
||||
└── (1:N) FlowStep
|
||||
|
||||
FlowStep
|
||||
├── id cuid (PK)
|
||||
├── flow_run_id → FlowRun (cascade delete)
|
||||
├── step_index int
|
||||
├── name string (zoals in YAML flow-definitie)
|
||||
├── exit_code int nullable
|
||||
├── stdout text (max 64KB, geknipt)
|
||||
├── stderr text (max 64KB, geknipt)
|
||||
├── started_at
|
||||
└── finished_at nullable
|
||||
```
|
||||
|
||||
Migrations in `prisma/migrations/`. Seed in `prisma/seed.ts` (creëert eerste admin uit `SEED_USER_*`).
|
||||
|
||||
## Auth-flow
|
||||
|
||||
```
|
||||
1. Browser GET /login
|
||||
← Set-Cookie: csrf_token=<uuid>; SameSite=strict; httpOnly=false
|
||||
← HTML form
|
||||
|
||||
2. Browser POST /api/auth/login
|
||||
Headers:
|
||||
Cookie: csrf_token=<uuid>; ops_session=...
|
||||
x-csrf-token: <uuid> ← double-submit CSRF check
|
||||
Body: { email, password }
|
||||
|
||||
3. Server:
|
||||
a. proxy.ts CSRF check (cookie==header)
|
||||
b. /api/auth/login route:
|
||||
- rate-limit per IP (5/min)
|
||||
- prisma.user.findUnique({ email })
|
||||
- bcrypt.compare(password, user.pwd_hash)
|
||||
c. Bij succes:
|
||||
- generateSessionToken (32 bytes hex)
|
||||
- prisma.session.create({ token_hash: sha256(token), expires_at: now+24h })
|
||||
- Set-Cookie ops_session=<token>; HttpOnly; SameSite=strict; Secure (in prod)
|
||||
|
||||
4. Browser GET /<any-protected-path>
|
||||
Server: proxy.ts → als geen ops_session cookie → redirect /login
|
||||
Anders: getCurrentUser() leest cookie, hashed, prisma.session.findUnique({ token_hash })
|
||||
```
|
||||
|
||||
CSRF: double-submit cookie pattern. CSP, X-Frame-Options, X-Content-Type-Options, Referrer-Policy via proxy.ts response-headers.
|
||||
|
||||
## Agent-protocol
|
||||
|
||||
Dashboard → agent communicatie via `lib/agent-client.ts`:
|
||||
|
||||
```
|
||||
POST http://172.18.0.1:3099/agent/v1/exec
|
||||
Headers:
|
||||
Authorization: Bearer <OPS_AGENT_SECRET>
|
||||
Content-Type: application/json
|
||||
Body:
|
||||
{ command_key: "docker_ps", args?: string[], stdin?: string }
|
||||
```
|
||||
|
||||
Response: SSE stream
|
||||
|
||||
```
|
||||
event: stdout
|
||||
data: {"data": "<chunk>"}
|
||||
|
||||
event: stderr
|
||||
data: {"data": "<chunk>"}
|
||||
|
||||
event: exit
|
||||
data: {"code": 0}
|
||||
```
|
||||
|
||||
Agent server-side flow per call:
|
||||
1. `req.body.command_key` → lookup in `/etc/ops-agent/commands.yml`
|
||||
2. Bij hit: spawn `def.cmd[0]` met `def.cmd.slice(1) ++ args` (geen shell, geen interpolatie)
|
||||
3. Stream stdout/stderr chunks naar SSE
|
||||
4. Bij `child.close`: write `event: exit`, end response
|
||||
5. Bij `child.error`: write `event: error`, end response
|
||||
6. Bij `reply.raw.close` (client-disconnect): `child.kill()`
|
||||
7. Audit-log naar journalctl: `{audit:true, command_key, args, exit_code, duration_ms}`
|
||||
|
||||
`commands.yml` voorbeeld:
|
||||
|
||||
```yaml
|
||||
docker_ps:
|
||||
cmd: ["docker", "ps", "--format", "json"]
|
||||
description: "List running containers"
|
||||
|
||||
git_status:
|
||||
cmd: ["git", "status", "--short", "--branch"]
|
||||
cwd_pattern: true # args[0] = cwd, rest = command args
|
||||
description: "Git status in a repo"
|
||||
|
||||
systemctl_restart_caddy:
|
||||
cmd: ["sudo", "/usr/bin/systemctl", "restart", "caddy"]
|
||||
description: "Restart caddy service"
|
||||
```
|
||||
|
||||
Geen `command_key` in whitelist → 403 Forbidden.
|
||||
|
||||
## Flows engine
|
||||
|
||||
YAML-definitie in `ops-agent/flows.example/*.yml`:
|
||||
|
||||
```yaml
|
||||
name: update_scrum4me_web
|
||||
description: Pull main, build, restart container, verify
|
||||
steps:
|
||||
- name: Pull latest main
|
||||
command_key: git_pull
|
||||
args: ["/srv/scrum4me/repos/Scrum4Me", "main"]
|
||||
precondition: git_status_clean
|
||||
- name: Build container
|
||||
command_key: docker_compose_build
|
||||
args: ["scrum4me-web"]
|
||||
- name: Restart
|
||||
command_key: docker_compose_up
|
||||
args: ["-d", "scrum4me-web"]
|
||||
- name: Smoke test
|
||||
command_key: curl_status
|
||||
args: ["https://scrum4me.jp-visser.nl"]
|
||||
expect_exit_code: 0
|
||||
```
|
||||
|
||||
Runner (`ops-agent/src/lib/flow-runner.ts`):
|
||||
- Sequential, fail-fast
|
||||
- Per stap: check preconditions, spawn, capture stdout/stderr, store in FlowStep
|
||||
- Bij dry-run: vervang `spawn` door log van `def.cmd ++ args`
|
||||
- Bij echte run: stream via SSE naar dashboard `/api/flows/run` route
|
||||
|
||||
## Realtime in de UI
|
||||
|
||||
Niet via WebSocket of Server-Sent Events op de dashboard-side. Auto-refresh wordt server-rendered (`export const dynamic = 'force-dynamic'`) met client-side `useEffect(setInterval, 5000)` om `router.refresh()` te triggeren.
|
||||
|
||||
Flow-execution: client opent `EventSource` op `/api/flows/run/[id]` die de SSE van de agent doorstuurt.
|
||||
|
||||
## Configuratie
|
||||
|
||||
Verplicht in `.env`:
|
||||
|
||||
```bash
|
||||
DATABASE_URL=postgresql://USER:PASS@postgres:5432/ops_dashboard
|
||||
OPS_AGENT_URL=http://172.18.0.1:3099
|
||||
OPS_AGENT_SECRET=<hex-32-bytes>
|
||||
SEED_USER_EMAIL=admin@example.com
|
||||
SEED_USER_PASSWORD=<sterk-wachtwoord>
|
||||
```
|
||||
|
||||
Optioneel:
|
||||
|
||||
```bash
|
||||
SYSTEMD_UNITS=scrum4me-web,ops-agent # comma-separated
|
||||
REPO_PATHS=/srv/scrum4me/repos/Scrum4Me,… # comma-separated absolute paths
|
||||
```
|
||||
|
||||
Bij start: app valideert dat verplichte env vars gezet zijn; faalt fast met duidelijke error.
|
||||
|
||||
## Security-eigenschappen
|
||||
|
||||
| Eigenschap | Implementatie |
|
||||
|---|---|
|
||||
| Wachtwoord-hashing | bcrypt 12 rounds |
|
||||
| Session-cookie | HttpOnly, SameSite=strict, Secure in prod, 24u TTL |
|
||||
| CSRF | Double-submit cookie pattern, validated in `proxy.ts` voor POSTs |
|
||||
| CSP | Strict in response headers — geen inline scripts behalve Next.js internals met nonce |
|
||||
| Agent-auth | HMAC via Bearer-token (`OPS_AGENT_SECRET`) — symmetrisch |
|
||||
| Command-injection | `spawn(bin, args, {shell: false})` — geen shell-interpolatie ooit |
|
||||
| Whitelist | `commands.yml` is single source of truth voor wat draaibaar is |
|
||||
| Sudo | `sudoers.d/ops-agent` met absolute paden + service-namen, geen wildcards |
|
||||
| Audit | Elke `/agent/v1/exec` call logt naar journalctl met `{audit:true, …}` markeer |
|
||||
| Rate-limit | Login 5/min/IP; agent per-secret zonder rate-limit (single-user trust) |
|
||||
| Bind | Agent bindt op `0.0.0.0:3099`; UFW staat alleen `172.18.0.0/16` toe |
|
||||
|
||||
## Niet-functionele eigenschappen
|
||||
|
||||
| Eigenschap | Specificatie |
|
||||
|---|---|
|
||||
| Geen multi-tenancy | Eén user-row in DB, app verifieert alleen "is er een geldig session-record"; geen `WHERE user_id = ?` filter (single-tenant) |
|
||||
| Geen retry/queue | Failed flows blijven failed; user moet handmatig opnieuw klikken |
|
||||
| Geen migrations-automation | `prisma migrate deploy` is **niet** in de boot-flow; doe je expliciet bij elke deploy |
|
||||
| Geen graceful shutdown | Container SIGTERM → in-flight requests verloren; geen drain |
|
||||
| Logging | Stdout/stderr van containers via `docker logs`; agent via `journalctl -u ops-agent`; geen aggregator |
|
||||
|
||||
## Open punten
|
||||
|
||||
- **Echte caddyfile-grammar** (IDEA-061) — nu nginx-fallback
|
||||
- **Multi-user / RBAC** — buiten scope, mogelijk later
|
||||
- **Rate-limit op agent** — voor multi-user toekomst nodig
|
||||
- **Real-time alerts** — momenteel pull-based, push naar Slack/Tailscale-only nog niet
|
||||
41
lib/agent-fetch.ts
Normal file
41
lib/agent-fetch.ts
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
import { apiFetch } from '@/lib/csrf'
|
||||
|
||||
export async function fetchAgentOutput(commandKey: string, args: string[] = []): Promise<string> {
|
||||
const res = await apiFetch('/api/agent/exec', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ command_key: commandKey, args }),
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
const text = await res.text()
|
||||
throw new Error(`agent ${res.status}: ${text}`)
|
||||
}
|
||||
|
||||
const reader = res.body?.getReader()
|
||||
if (!reader) throw new Error('no response body')
|
||||
|
||||
const decoder = new TextDecoder()
|
||||
let buffer = ''
|
||||
let output = ''
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
buffer += decoder.decode(value, { stream: true })
|
||||
const lines = buffer.split('\n')
|
||||
buffer = lines.pop() ?? ''
|
||||
for (const line of lines) {
|
||||
if (line.startsWith('data:')) {
|
||||
try {
|
||||
const parsed = JSON.parse(line.slice(5).trim()) as { data?: string }
|
||||
if (parsed.data !== undefined) output += parsed.data
|
||||
} catch {
|
||||
// ignore malformed SSE
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
31
lib/codemirror/caddyfile-mode.ts
Normal file
31
lib/codemirror/caddyfile-mode.ts
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
import { StreamLanguage, type StreamParser } from '@codemirror/language'
|
||||
|
||||
const CADDY_DIRECTIVES = new Set([
|
||||
'reverse_proxy', 'encode', 'file_server', 'handle', 'handle_errors',
|
||||
'root', 'header', 'redir', 'rewrite', 'respond', 'route', 'tls',
|
||||
'log', 'basicauth', 'request_body', 'try_files', 'php_fastcgi',
|
||||
'templates', 'import', 'bind', 'metrics', 'admin', 'auto_https',
|
||||
])
|
||||
const CADDY_GLOBAL = new Set(['email', 'storage', 'order', 'servers', 'log'])
|
||||
|
||||
const parser: StreamParser<unknown> = {
|
||||
token(stream) {
|
||||
if (stream.eatSpace()) return null
|
||||
if (stream.match(/^#.*/)) return 'comment'
|
||||
if (stream.match(/^"(?:[^"\\]|\\.)*"/)) return 'string'
|
||||
if (stream.match(/^@[A-Za-z_][\w-]*/)) return 'variableName'
|
||||
if (stream.match(/^[{}]/)) return 'brace'
|
||||
const word = stream.match(/^[A-Za-z_][\w.-]*/) as RegExpMatchArray | null
|
||||
if (word) {
|
||||
const w = word[0]
|
||||
if (CADDY_DIRECTIVES.has(w)) return 'keyword'
|
||||
if (CADDY_GLOBAL.has(w)) return 'typeName'
|
||||
return 'variableName'
|
||||
}
|
||||
stream.next()
|
||||
return null
|
||||
},
|
||||
languageData: { commentTokens: { line: '#' } },
|
||||
}
|
||||
|
||||
export const caddyfileLanguage = StreamLanguage.define(parser)
|
||||
83
lib/grammars/caddyfile.json
Normal file
83
lib/grammars/caddyfile.json
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
{
|
||||
"name": "caddyfile",
|
||||
"scopeName": "source.Caddyfile",
|
||||
"fileTypes": ["Caddyfile"],
|
||||
"patterns": [
|
||||
{ "include": "#comment" },
|
||||
{ "include": "#site-address" },
|
||||
{ "include": "#named-matcher-def" },
|
||||
{ "include": "#named-matcher-ref" },
|
||||
{ "include": "#directive" },
|
||||
{ "include": "#placeholder" },
|
||||
{ "include": "#string-double" },
|
||||
{ "include": "#string-backtick" },
|
||||
{ "include": "#number" },
|
||||
{ "include": "#braces" }
|
||||
],
|
||||
"repository": {
|
||||
"comment": {
|
||||
"name": "comment.line.number-sign.caddyfile",
|
||||
"match": "#.*$"
|
||||
},
|
||||
"site-address": {
|
||||
"name": "entity.name.section.caddyfile",
|
||||
"match": "^(?:https?://)?[a-zA-Z0-9][a-zA-Z0-9.*-]*(?::[0-9]+)?(?=\\s*(?:\\{|,|$))"
|
||||
},
|
||||
"named-matcher-def": {
|
||||
"name": "entity.other.attribute-name.caddyfile",
|
||||
"match": "@[a-zA-Z_][a-zA-Z0-9_-]*(?=\\s)"
|
||||
},
|
||||
"named-matcher-ref": {
|
||||
"name": "entity.other.attribute-name.caddyfile",
|
||||
"match": "(?<=\\s)@[a-zA-Z_][a-zA-Z0-9_-]*"
|
||||
},
|
||||
"directive": {
|
||||
"patterns": [
|
||||
{
|
||||
"name": "keyword.control.caddyfile",
|
||||
"match": "\\b(reverse_proxy|encode|file_server|handle_errors|handle_path|handle|root|header|request_header|response_header|redir|respond|rewrite|uri|try_files|php_fastcgi|push|templates|basicauth|forward_auth|map|vars|log|tls|bind|import|snippet|abort|error|static_response|acme_server|invoke)\\b"
|
||||
},
|
||||
{
|
||||
"name": "support.function.caddyfile",
|
||||
"match": "\\b(on_demand|off|auto|internal|force|strip_prefix|replace|path_regexp|method|host|header_regexp|remote_ip|client_ip|not|query|cookie|expression|path|protocol|vars_regexp|file|jwt|geo_ip)\\b"
|
||||
},
|
||||
{
|
||||
"name": "keyword.other.option.caddyfile",
|
||||
"match": "\\b(auto_https|admin|debug|grace_period|shutdown_delay|servers|storage|order|email|acme_ca|acme_ca_root|acme_eab|ocsp_stapling|key_type|cert_issuer|local_certs|skip_install_trust|renew_interval|check_interval|persistent_key|insecure_secrets_log|prefer_wildcard|resolvers|max_size|retention|format|output|level|sampling|include|exclude|dial|upstream|transport|lb_policy|health_uri|health_interval|health_timeout|health_status|health_body|flush_interval|buffer_requests|buffer_responses|max_buffer_size|trusted_proxies|to|from|prefix|replacements|gzip|zstd|br)\\b"
|
||||
}
|
||||
]
|
||||
},
|
||||
"placeholder": {
|
||||
"name": "variable.other.caddyfile",
|
||||
"match": "\\{(?:http\\.(?:request|response|vars|regexp|handlers)|tls|env|vars|system|time|rand|counter|uuid|path|query|header|cookie|form|file|dir|args|blocks|labels|err|http)[^}]*\\}"
|
||||
},
|
||||
"string-double": {
|
||||
"name": "string.quoted.double.caddyfile",
|
||||
"begin": "\"",
|
||||
"end": "\"",
|
||||
"patterns": [
|
||||
{
|
||||
"name": "constant.character.escape.caddyfile",
|
||||
"match": "\\\\."
|
||||
},
|
||||
{
|
||||
"name": "variable.other.caddyfile",
|
||||
"match": "\\{[^}]+\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"string-backtick": {
|
||||
"name": "string.quoted.other.caddyfile",
|
||||
"begin": "`",
|
||||
"end": "`"
|
||||
},
|
||||
"number": {
|
||||
"name": "constant.numeric.caddyfile",
|
||||
"match": "\\b[0-9]+(?:\\.[0-9]+)?(?:s|ms|m|h|d|kb|mb|gb)?\\b"
|
||||
},
|
||||
"braces": {
|
||||
"name": "punctuation.section.block.caddyfile",
|
||||
"match": "[{}]"
|
||||
}
|
||||
}
|
||||
}
|
||||
444
lib/parse-worker-log.ts
Normal file
444
lib/parse-worker-log.ts
Normal file
|
|
@ -0,0 +1,444 @@
|
|||
// lib/parse-worker-log.ts
|
||||
//
|
||||
// Parser for Scrum4Me worker run-logs (/srv/scrum4me/worker-logs/idea/runs/*.log).
|
||||
// Each file is produced by `tsx run-one-job.ts > run_log 2>&1` and is a mix of
|
||||
// plain-text `[run-one-job]` annotation lines and Claude Code `stream-json`
|
||||
// event lines (the worker spawns `claude --output-format stream-json --verbose`).
|
||||
//
|
||||
// Two entry points:
|
||||
// summarizeRunLog(raw, fileName) — one cheap line scan, for the table.
|
||||
// parseRunLog(raw, fileName) — full event timeline, for the detail panel.
|
||||
//
|
||||
// Pure module, no dependencies — mirrors lib/parse-docker.ts / lib/parse-systemd.ts.
|
||||
|
||||
export type RunStatus = 'idle' | 'running' | 'success' | 'error' | 'token-expired' | 'unknown'
|
||||
|
||||
export interface RunLogSummary {
|
||||
fileName: string
|
||||
runId: string
|
||||
startedAt: string | null
|
||||
status: RunStatus
|
||||
jobId: string | null
|
||||
model: string | null
|
||||
permissionMode: string | null
|
||||
durationMs: number | null
|
||||
numTurns: number | null
|
||||
totalCostUsd: number | null
|
||||
exitCode: number | null
|
||||
eventCount: number
|
||||
inProgress: boolean
|
||||
errorSummary: string | null
|
||||
}
|
||||
|
||||
export type MetaTag =
|
||||
| 'claim'
|
||||
| 'auth'
|
||||
| 'quota'
|
||||
| 'no-job'
|
||||
| 'claimed'
|
||||
| 'worktree'
|
||||
| 'config'
|
||||
| 'payload'
|
||||
| 'spawn'
|
||||
| 'claude-done'
|
||||
| 'cleanup'
|
||||
| 'exit'
|
||||
| 'error'
|
||||
| 'token-expired'
|
||||
| 'timeout'
|
||||
| 'other'
|
||||
|
||||
export type LogEvent =
|
||||
| { kind: 'meta'; ts: string | null; tag: MetaTag; text: string }
|
||||
| {
|
||||
kind: 'system-init'
|
||||
ts: string | null
|
||||
model: string
|
||||
permissionMode: string
|
||||
tools: string[]
|
||||
mcpServers: string[]
|
||||
sessionId: string
|
||||
cwd: string
|
||||
version: string
|
||||
}
|
||||
| { kind: 'assistant-text'; ts: string | null; text: string; truncated: boolean }
|
||||
| { kind: 'thinking'; ts: string | null; text: string; truncated: boolean }
|
||||
| { kind: 'tool-call'; ts: string | null; id: string; name: string; input: string; truncated: boolean }
|
||||
| {
|
||||
kind: 'tool-result'
|
||||
ts: string | null
|
||||
toolUseId: string
|
||||
isError: boolean
|
||||
body: string
|
||||
truncated: boolean
|
||||
fullLength: number
|
||||
}
|
||||
| { kind: 'rate-limit'; ts: string | null; status: string }
|
||||
| {
|
||||
kind: 'result'
|
||||
ts: string | null
|
||||
subtype: string
|
||||
isError: boolean
|
||||
durationMs: number | null
|
||||
numTurns: number | null
|
||||
totalCostUsd: number | null
|
||||
resultText: string
|
||||
resultTruncated: boolean
|
||||
}
|
||||
| { kind: 'raw'; ts: string | null; text: string }
|
||||
|
||||
export interface ParsedRunLog {
|
||||
summary: RunLogSummary
|
||||
events: LogEvent[]
|
||||
inProgress: boolean
|
||||
responseTruncated: boolean
|
||||
}
|
||||
|
||||
// Per-item caps keep the detail payload bounded even for ~350 KB raw logs.
|
||||
const TOOL_RESULT_CAP = 8 * 1024
|
||||
const TEXT_CAP = 16 * 1024
|
||||
const TOOL_INPUT_CAP = 4 * 1024
|
||||
const RESPONSE_CAP = 1_500_000
|
||||
|
||||
const META_RE = /^(\S+)\s+\[run-one-job\]\s+(.*)$/
|
||||
|
||||
function cap(s: string, max: number): { text: string; truncated: boolean } {
|
||||
if (s.length <= max) return { text: s, truncated: false }
|
||||
return { text: s.slice(0, max), truncated: true }
|
||||
}
|
||||
|
||||
/** Strip the `.log` / `.log.gz` suffix — the run id is the timestamp filename. */
|
||||
export function runIdFromFileName(fileName: string): string {
|
||||
return fileName.replace(/\.log(\.gz)?$/, '')
|
||||
}
|
||||
|
||||
/** run-agent.sh names each file `$(date -u +%Y%m%dT%H%M%SZ).log`, so the name is the start time. */
|
||||
function startedAtFromRunId(runId: string): string | null {
|
||||
const m = runId.match(/^(\d{4})(\d{2})(\d{2})T(\d{2})(\d{2})(\d{2})Z$/)
|
||||
if (!m) return null
|
||||
return `${m[1]}-${m[2]}-${m[3]}T${m[4]}:${m[5]}:${m[6]}Z`
|
||||
}
|
||||
|
||||
function classifyMeta(msg: string): MetaTag {
|
||||
if (msg.startsWith('claim attempt')) return 'claim'
|
||||
if (msg.startsWith('auth ok')) return 'auth'
|
||||
if (msg.startsWith('quota probe')) return 'quota'
|
||||
if (msg.startsWith('no job claimed')) return 'no-job'
|
||||
if (msg.startsWith('claimed job_id=')) return 'claimed'
|
||||
if (msg.startsWith('worktree path=')) return 'worktree'
|
||||
if (msg.startsWith('config ')) return 'config'
|
||||
if (msg.startsWith('payload written')) return 'payload'
|
||||
if (msg.startsWith('spawn claude')) return 'spawn'
|
||||
if (msg.startsWith('claude done')) return 'claude-done'
|
||||
if (msg.startsWith('cleanup')) return 'cleanup'
|
||||
if (msg.startsWith('exit code=')) return 'exit'
|
||||
if (msg.startsWith('ERROR')) return 'error'
|
||||
if (msg.startsWith('TOKEN_EXPIRED detected')) return 'token-expired'
|
||||
if (msg.startsWith('claim timeout')) return 'timeout'
|
||||
return 'other'
|
||||
}
|
||||
|
||||
/** Cheap single-pass summary for the table — at most one JSON.parse (the result line). */
|
||||
export function summarizeRunLog(raw: string, fileName: string): RunLogSummary {
|
||||
const runId = runIdFromFileName(fileName)
|
||||
const lines = raw.split('\n')
|
||||
|
||||
let jobId: string | null = null
|
||||
let model: string | null = null
|
||||
let permissionMode: string | null = null
|
||||
let claudeExit: number | null = null
|
||||
let runExit: number | null = null
|
||||
let durationMs: number | null = null
|
||||
let numTurns: number | null = null
|
||||
let totalCostUsd: number | null = null
|
||||
let eventCount = 0
|
||||
let hasResult = false
|
||||
let resultIsError = false
|
||||
let resultSubtype: string | null = null
|
||||
let tokenExpired = false
|
||||
let hasErrorLine = false
|
||||
let firstErrorMsg: string | null = null
|
||||
|
||||
for (const line of lines) {
|
||||
if (!line) continue
|
||||
const m = line.match(META_RE)
|
||||
if (m) {
|
||||
const msg = m[2]
|
||||
if (msg.startsWith('claimed job_id=')) {
|
||||
jobId = msg.slice('claimed job_id='.length).trim() || jobId
|
||||
} else if (msg.startsWith('config ')) {
|
||||
model = /\bmodel=(\S+)/.exec(msg)?.[1] ?? model
|
||||
permissionMode = /\bpermission_mode=(\S+)/.exec(msg)?.[1] ?? permissionMode
|
||||
} else if (msg.startsWith('claude done')) {
|
||||
const e = /\bexit_code=(-?\d+)/.exec(msg)
|
||||
if (e) claudeExit = Number(e[1])
|
||||
const d = /\bduration_ms=(\d+)/.exec(msg)
|
||||
if (d) durationMs = Number(d[1])
|
||||
} else if (msg.startsWith('exit code=')) {
|
||||
const e = /exit code=(-?\d+)/.exec(msg)
|
||||
if (e) runExit = Number(e[1])
|
||||
} else if (msg.startsWith('TOKEN_EXPIRED detected')) {
|
||||
tokenExpired = true
|
||||
} else if (msg.startsWith('ERROR')) {
|
||||
hasErrorLine = true
|
||||
if (!firstErrorMsg) firstErrorMsg = msg.replace(/^ERROR\s*/, '').slice(0, 300)
|
||||
}
|
||||
continue
|
||||
}
|
||||
const trimmed = line.trimStart()
|
||||
if (trimmed.startsWith('{')) {
|
||||
eventCount++
|
||||
if (!hasResult && trimmed.startsWith('{"type":"result"')) {
|
||||
try {
|
||||
const obj = JSON.parse(trimmed)
|
||||
hasResult = true
|
||||
resultIsError = !!obj.is_error
|
||||
resultSubtype = typeof obj.subtype === 'string' ? obj.subtype : null
|
||||
if (typeof obj.num_turns === 'number') numTurns = obj.num_turns
|
||||
if (typeof obj.total_cost_usd === 'number') totalCostUsd = obj.total_cost_usd
|
||||
if (durationMs == null && typeof obj.duration_ms === 'number') durationMs = obj.duration_ms
|
||||
} catch {
|
||||
// malformed result line — ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const exitCode = claudeExit ?? runExit
|
||||
const terminal = runExit != null || hasResult || hasErrorLine || tokenExpired
|
||||
const inProgress = !terminal
|
||||
|
||||
let status: RunStatus
|
||||
if (tokenExpired) {
|
||||
status = 'token-expired'
|
||||
} else if (jobId) {
|
||||
if (inProgress) {
|
||||
status = 'running'
|
||||
} else if (
|
||||
resultIsError ||
|
||||
hasErrorLine ||
|
||||
(claudeExit != null && claudeExit !== 0) ||
|
||||
(runExit != null && runExit !== 0)
|
||||
) {
|
||||
status = 'error'
|
||||
} else {
|
||||
status = 'success'
|
||||
}
|
||||
} else {
|
||||
// No job was claimed this iteration — the worker was idle / waiting.
|
||||
status = 'idle'
|
||||
}
|
||||
|
||||
let errorSummary: string | null = null
|
||||
if (status === 'error' || status === 'token-expired') {
|
||||
errorSummary =
|
||||
firstErrorMsg ??
|
||||
(tokenExpired ? 'TOKEN_EXPIRED detected in output' : null) ??
|
||||
(resultIsError ? `result: ${resultSubtype ?? 'error'}` : null) ??
|
||||
(exitCode != null ? `exit code ${exitCode}` : null)
|
||||
}
|
||||
|
||||
return {
|
||||
fileName,
|
||||
runId,
|
||||
startedAt: startedAtFromRunId(runId),
|
||||
status,
|
||||
jobId,
|
||||
model,
|
||||
permissionMode,
|
||||
durationMs,
|
||||
numTurns,
|
||||
totalCostUsd,
|
||||
exitCode,
|
||||
eventCount,
|
||||
inProgress,
|
||||
errorSummary,
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeContent(content: unknown): string {
|
||||
if (typeof content === 'string') return content
|
||||
if (Array.isArray(content)) {
|
||||
return content
|
||||
.map((b) => {
|
||||
if (typeof b === 'string') return b
|
||||
if (b && typeof b === 'object' && typeof (b as { text?: unknown }).text === 'string') {
|
||||
return (b as { text: string }).text
|
||||
}
|
||||
return JSON.stringify(b)
|
||||
})
|
||||
.join('\n')
|
||||
}
|
||||
if (content == null) return ''
|
||||
return JSON.stringify(content)
|
||||
}
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any -- stream-json events are genuinely dynamic */
|
||||
function pushJsonEvent(events: LogEvent[], obj: any): void {
|
||||
const type = obj?.type
|
||||
const ts: string | null = typeof obj?.timestamp === 'string' ? obj.timestamp : null
|
||||
|
||||
if (type === 'system') {
|
||||
const mcp = Array.isArray(obj.mcp_servers)
|
||||
? obj.mcp_servers.map((s: any) => (typeof s?.name === 'string' ? s.name : String(s)))
|
||||
: []
|
||||
events.push({
|
||||
kind: 'system-init',
|
||||
ts,
|
||||
model: typeof obj.model === 'string' ? obj.model : '—',
|
||||
permissionMode: typeof obj.permissionMode === 'string' ? obj.permissionMode : '—',
|
||||
tools: Array.isArray(obj.tools) ? obj.tools.filter((t: unknown) => typeof t === 'string') : [],
|
||||
mcpServers: mcp,
|
||||
sessionId: typeof obj.session_id === 'string' ? obj.session_id : '',
|
||||
cwd: typeof obj.cwd === 'string' ? obj.cwd : '',
|
||||
version: typeof obj.claude_code_version === 'string' ? obj.claude_code_version : '',
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (type === 'rate_limit_event') {
|
||||
events.push({
|
||||
kind: 'rate-limit',
|
||||
ts,
|
||||
status: typeof obj.rate_limit_info?.status === 'string' ? obj.rate_limit_info.status : 'unknown',
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (type === 'assistant') {
|
||||
const content = obj?.message?.content
|
||||
if (Array.isArray(content)) {
|
||||
for (const block of content) {
|
||||
if (block?.type === 'text' && typeof block.text === 'string') {
|
||||
const c = cap(block.text, TEXT_CAP)
|
||||
events.push({ kind: 'assistant-text', ts, text: c.text, truncated: c.truncated })
|
||||
} else if (block?.type === 'thinking' && typeof block.thinking === 'string') {
|
||||
const c = cap(block.thinking, TEXT_CAP)
|
||||
events.push({ kind: 'thinking', ts, text: c.text, truncated: c.truncated })
|
||||
} else if (block?.type === 'tool_use') {
|
||||
let inputStr: string
|
||||
try {
|
||||
inputStr = JSON.stringify(block.input, null, 2)
|
||||
} catch {
|
||||
inputStr = String(block.input)
|
||||
}
|
||||
const c = cap(inputStr, TOOL_INPUT_CAP)
|
||||
events.push({
|
||||
kind: 'tool-call',
|
||||
ts,
|
||||
id: typeof block.id === 'string' ? block.id : '',
|
||||
name: typeof block.name === 'string' ? block.name : 'tool',
|
||||
input: c.text,
|
||||
truncated: c.truncated,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (type === 'user') {
|
||||
const content = obj?.message?.content
|
||||
if (Array.isArray(content)) {
|
||||
for (const block of content) {
|
||||
if (block?.type === 'tool_result') {
|
||||
const body = normalizeContent(block.content)
|
||||
const c = cap(body, TOOL_RESULT_CAP)
|
||||
events.push({
|
||||
kind: 'tool-result',
|
||||
ts,
|
||||
toolUseId: typeof block.tool_use_id === 'string' ? block.tool_use_id : '',
|
||||
isError: !!block.is_error,
|
||||
body: c.text,
|
||||
truncated: c.truncated,
|
||||
fullLength: body.length,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (type === 'result') {
|
||||
const c = cap(typeof obj.result === 'string' ? obj.result : '', TEXT_CAP)
|
||||
events.push({
|
||||
kind: 'result',
|
||||
ts,
|
||||
subtype: typeof obj.subtype === 'string' ? obj.subtype : 'unknown',
|
||||
isError: !!obj.is_error,
|
||||
durationMs: typeof obj.duration_ms === 'number' ? obj.duration_ms : null,
|
||||
numTurns: typeof obj.num_turns === 'number' ? obj.num_turns : null,
|
||||
totalCostUsd: typeof obj.total_cost_usd === 'number' ? obj.total_cost_usd : null,
|
||||
resultText: c.text,
|
||||
resultTruncated: c.truncated,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Unknown event type — keep a compact raw note so nothing is silently dropped.
|
||||
events.push({ kind: 'raw', ts, text: cap(`${type ?? 'event'}: ${JSON.stringify(obj)}`, 2048).text })
|
||||
}
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
|
||||
function estimateSize(e: LogEvent): number {
|
||||
switch (e.kind) {
|
||||
case 'assistant-text':
|
||||
case 'thinking':
|
||||
case 'raw':
|
||||
return e.text.length
|
||||
case 'tool-call':
|
||||
return e.input.length
|
||||
case 'tool-result':
|
||||
return e.body.length
|
||||
case 'result':
|
||||
return e.resultText.length
|
||||
default:
|
||||
return 64
|
||||
}
|
||||
}
|
||||
|
||||
/** Bound the whole payload — drop tool-result bodies oldest-first if still too large. */
|
||||
function enforceResponseCap(events: LogEvent[]): boolean {
|
||||
let total = 0
|
||||
for (const e of events) total += estimateSize(e)
|
||||
if (total <= RESPONSE_CAP) return false
|
||||
for (const e of events) {
|
||||
if (total <= RESPONSE_CAP) break
|
||||
if (e.kind === 'tool-result' && e.body) {
|
||||
total -= e.body.length
|
||||
e.body = ''
|
||||
e.truncated = true
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
/** Full event timeline for the detail panel. */
|
||||
export function parseRunLog(raw: string, fileName: string): ParsedRunLog {
|
||||
const summary = summarizeRunLog(raw, fileName)
|
||||
const events: LogEvent[] = []
|
||||
|
||||
for (const line of raw.split('\n')) {
|
||||
if (!line.trim()) continue
|
||||
const m = line.match(META_RE)
|
||||
if (m) {
|
||||
events.push({ kind: 'meta', ts: m[1], tag: classifyMeta(m[2]), text: m[2] })
|
||||
continue
|
||||
}
|
||||
const trimmed = line.trimStart()
|
||||
if (trimmed.startsWith('{')) {
|
||||
try {
|
||||
pushJsonEvent(events, JSON.parse(trimmed))
|
||||
} catch {
|
||||
// partial / malformed JSON line (e.g. a log read mid-write) — keep it raw
|
||||
events.push({ kind: 'raw', ts: null, text: cap(line, TOOL_RESULT_CAP).text })
|
||||
}
|
||||
continue
|
||||
}
|
||||
// Non-JSON, non-meta noise (e.g. a bare `Warning: ...` from claude).
|
||||
events.push({ kind: 'raw', ts: null, text: cap(line, TOOL_RESULT_CAP).text })
|
||||
}
|
||||
|
||||
const responseTruncated = enforceResponseCap(events)
|
||||
return { summary, events, inProgress: summary.inProgress, responseTruncated }
|
||||
}
|
||||
22
lib/utils.ts
22
lib/utils.ts
|
|
@ -4,3 +4,25 @@ import { twMerge } from "tailwind-merge"
|
|||
export function cn(...inputs: ClassValue[]) {
|
||||
return twMerge(clsx(inputs))
|
||||
}
|
||||
|
||||
export function relativeTime(date: Date): string {
|
||||
const diff = Date.now() - date.getTime()
|
||||
const minutes = Math.floor(diff / 60_000)
|
||||
if (minutes < 1) return 'net nu'
|
||||
if (minutes < 60) return `${minutes}m geleden`
|
||||
const hours = Math.floor(minutes / 60)
|
||||
if (hours < 24) return `${hours}u geleden`
|
||||
return `${Math.floor(hours / 24)}d geleden`
|
||||
}
|
||||
|
||||
/** Human-readable duration from a millisecond count. */
|
||||
export function formatDuration(ms: number): string {
|
||||
if (ms < 1000) return `${ms}ms`
|
||||
const totalSec = Math.round(ms / 1000)
|
||||
if (totalSec < 60) return `${totalSec}s`
|
||||
const minutes = Math.floor(totalSec / 60)
|
||||
const seconds = totalSec % 60
|
||||
if (minutes < 60) return `${minutes}m ${seconds}s`
|
||||
const hours = Math.floor(minutes / 60)
|
||||
return `${hours}u ${minutes % 60}m`
|
||||
}
|
||||
|
|
|
|||
116
lib/worker-logs.ts
Normal file
116
lib/worker-logs.ts
Normal file
|
|
@ -0,0 +1,116 @@
|
|||
// lib/worker-logs.ts
|
||||
//
|
||||
// Server-only filesystem access to the worker run-logs. The directory is
|
||||
// mounted read-only into the ops-dashboard container (see docker-compose.yml:
|
||||
// `/srv/scrum4me/worker-logs:/var/worker-logs:ro`). Path configurable via the
|
||||
// WORKER_LOGS_DIR env var.
|
||||
//
|
||||
// Only imported by server components and route handlers — never by a
|
||||
// 'use client' file.
|
||||
|
||||
import 'server-only'
|
||||
import { readdir, readFile } from 'node:fs/promises'
|
||||
import { gunzipSync } from 'node:zlib'
|
||||
import { join, resolve } from 'node:path'
|
||||
import { summarizeRunLog, type RunLogSummary } from './parse-worker-log'
|
||||
|
||||
const WORKER_LOGS_DIR = process.env.WORKER_LOGS_DIR ?? '/var/worker-logs/idea'
|
||||
const RUNS_DIR = join(WORKER_LOGS_DIR, 'runs')
|
||||
|
||||
/** Selectable row counts for the table. */
|
||||
export const LIMIT_OPTIONS = [10, 25, 50, 100] as const
|
||||
const DEFAULT_LIMIT = 10
|
||||
|
||||
// Filenames are `$(date -u +%Y%m%dT%H%M%SZ).log` — no slashes, no dots beyond
|
||||
// the literal suffix, so this regex alone rules out path traversal.
|
||||
const NAME_RE = /^\d{8}T\d{6}Z\.log(\.gz)?$/
|
||||
|
||||
export type WorkerLogErrorCode = 'invalid' | 'not-found' | 'unavailable'
|
||||
|
||||
export class WorkerLogError extends Error {
|
||||
readonly code: WorkerLogErrorCode
|
||||
constructor(message: string, code: WorkerLogErrorCode) {
|
||||
super(message)
|
||||
this.name = 'WorkerLogError'
|
||||
this.code = code
|
||||
}
|
||||
}
|
||||
|
||||
/** Clamp an arbitrary requested limit down to the largest allowed option. */
|
||||
export function clampLimit(n: number): number {
|
||||
if (!Number.isFinite(n)) return DEFAULT_LIMIT
|
||||
let chosen: number = DEFAULT_LIMIT
|
||||
for (const opt of LIMIT_OPTIONS) {
|
||||
if (n >= opt) chosen = opt
|
||||
}
|
||||
return chosen
|
||||
}
|
||||
|
||||
export function isValidLogName(name: string): boolean {
|
||||
return NAME_RE.test(name)
|
||||
}
|
||||
|
||||
function resolveLogPath(name: string): string {
|
||||
if (!isValidLogName(name)) {
|
||||
throw new WorkerLogError(`invalid log name: ${name}`, 'invalid')
|
||||
}
|
||||
const base = resolve(RUNS_DIR)
|
||||
const full = resolve(base, name)
|
||||
// Defense-in-depth: the regex already forbids traversal, but confirm anyway.
|
||||
if (full !== join(base, name)) {
|
||||
throw new WorkerLogError(`path escapes worker logs dir: ${name}`, 'invalid')
|
||||
}
|
||||
return full
|
||||
}
|
||||
|
||||
async function readLogFile(name: string): Promise<string> {
|
||||
const full = resolveLogPath(name)
|
||||
if (name.endsWith('.gz')) {
|
||||
const buf = await readFile(full)
|
||||
return gunzipSync(buf).toString('utf8')
|
||||
}
|
||||
return readFile(full, 'utf8')
|
||||
}
|
||||
|
||||
/** Newest-first summaries for the table. Sorts by filename, slices, then reads. */
|
||||
export async function listRunLogs(limit: number): Promise<RunLogSummary[]> {
|
||||
const n = clampLimit(limit)
|
||||
|
||||
let entries: string[]
|
||||
try {
|
||||
entries = await readdir(RUNS_DIR)
|
||||
} catch (err) {
|
||||
throw new WorkerLogError(
|
||||
`cannot read worker logs dir ${RUNS_DIR}: ${(err as Error).message}`,
|
||||
'unavailable',
|
||||
)
|
||||
}
|
||||
|
||||
// Filename is `YYYYMMDDTHHMMSSZ` — lexicographic order == chronological order.
|
||||
// Sort + slice BEFORE touching file content (the dir holds ~12k files).
|
||||
const names = entries.filter(isValidLogName).sort().reverse().slice(0, n)
|
||||
|
||||
return Promise.all(
|
||||
names.map(async (name) => {
|
||||
try {
|
||||
return summarizeRunLog(await readLogFile(name), name)
|
||||
} catch {
|
||||
// A single unreadable / mid-rotation file must not break the table.
|
||||
return { ...summarizeRunLog('', name), status: 'unknown' as const, inProgress: false }
|
||||
}
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
/** Raw contents of one run-log (gunzipped if needed). */
|
||||
export async function readRunLog(name: string): Promise<string> {
|
||||
try {
|
||||
return await readLogFile(name)
|
||||
} catch (err) {
|
||||
if (err instanceof WorkerLogError) throw err
|
||||
if ((err as NodeJS.ErrnoException).code === 'ENOENT') {
|
||||
throw new WorkerLogError(`log not found: ${name}`, 'not-found')
|
||||
}
|
||||
throw new WorkerLogError(`cannot read log ${name}: ${(err as Error).message}`, 'unavailable')
|
||||
}
|
||||
}
|
||||
|
|
@ -107,6 +107,20 @@ commands:
|
|||
- ops-dashboard
|
||||
description: "Build a docker compose service image"
|
||||
|
||||
docker_compose_build_worker_fresh:
|
||||
# De worker-idea Dockerfile clonet scrum4me-mcp van GitHub in een aparte
|
||||
# laag. Een gewone docker compose build hergebruikt die laag zolang
|
||||
# MCP_GIT_REF gelijk blijft (= altijd 'main'), dus nieuwe MCP-commits worden
|
||||
# NIET opgepikt. MCP_CACHE_BUST met een verse timestamp invalideert de
|
||||
# clone-laag. sh -c is nodig om $(date) te evalueren (geen shell-injectie:
|
||||
# vaste string, geen externe input).
|
||||
cmd:
|
||||
- sh
|
||||
- -c
|
||||
- "docker compose build --build-arg MCP_CACHE_BUST=$(date +%s) worker-idea"
|
||||
cwd: "/srv/scrum4me/compose"
|
||||
description: "Rebuild worker-idea image, busting the scrum4me-mcp clone cache so the latest MCP code is pulled"
|
||||
|
||||
docker_compose_up:
|
||||
cmd: ["docker", "compose", "up", "-d"]
|
||||
cwd: "/srv/scrum4me/compose"
|
||||
|
|
|
|||
86
ops-agent/flows.example/redeploy_all.yml
Normal file
86
ops-agent/flows.example/redeploy_all.yml
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
# Volledige redeploy van de Scrum4Me-stack — alle drie de repos in één flow.
|
||||
# Copy to /etc/ops-agent/flows/redeploy_all.yml on the host.
|
||||
#
|
||||
# Dit is de gecombineerde werkwijze: eerst de hoofd-app (scrum4me-web),
|
||||
# dan de worker (scrum4me-docker image met verse scrum4me-mcp clone).
|
||||
# Equivalent aan update_scrum4me_web.yml gevolgd door update_mcp_worker.yml,
|
||||
# maar als één atomaire flow met audit-trail.
|
||||
#
|
||||
# Volgorde-redenering:
|
||||
# - Web eerst: de DB-migratie (stap 6) is additief en niet-breaking, dus
|
||||
# veilig terwijl de oude worker nog draait.
|
||||
# - Worker daarna: de nieuwe MCP-code kan afhankelijk zijn van de nieuwe
|
||||
# DB-kolommen/enums uit de web-migratie.
|
||||
#
|
||||
# Steps:
|
||||
# 1-9. scrum4me-web: status, fetch, log-ahead, pull, npm ci, migrate,
|
||||
# build, restart service, smoke-test
|
||||
# 10-16. worker: status + fetch + pull scrum4me-docker, pull scrum4me-mcp,
|
||||
# cache-busted image rebuild, container recreate, health-wait
|
||||
#
|
||||
# Let op: de worker-rebuild MOET docker_compose_build_worker_fresh gebruiken,
|
||||
# niet docker_compose_build — anders blijft de scrum4me-mcp clone-laag
|
||||
# gecached en wordt nieuwe MCP-code gemist.
|
||||
|
||||
name: Redeploy All
|
||||
description: Volledige stack-redeploy — scrum4me-web (pull/migrate/build/restart) gevolgd door de MCP-worker (cache-busted image rebuild)
|
||||
steps:
|
||||
# --- scrum4me-web -------------------------------------------------------
|
||||
- command_key: git_status
|
||||
args: ["/srv/scrum4me/repos/Scrum4Me"]
|
||||
on_failure: continue
|
||||
|
||||
- command_key: git_fetch
|
||||
args: ["/srv/scrum4me/repos/Scrum4Me"]
|
||||
on_failure: abort
|
||||
|
||||
- command_key: git_log_ahead
|
||||
args: ["/srv/scrum4me/repos/Scrum4Me"]
|
||||
on_failure: continue
|
||||
|
||||
- command_key: git_pull
|
||||
args: ["/srv/scrum4me/repos/Scrum4Me"]
|
||||
on_failure: abort
|
||||
|
||||
- command_key: npm_ci
|
||||
on_failure: abort
|
||||
|
||||
- command_key: prisma_migrate_deploy
|
||||
on_failure: abort
|
||||
|
||||
- command_key: npm_run_build
|
||||
on_failure: abort
|
||||
|
||||
- command_key: systemctl_restart
|
||||
args: ["scrum4me-web"]
|
||||
on_failure: abort
|
||||
|
||||
- command_key: curl_smoke_scrum4me_thuis
|
||||
on_failure: continue
|
||||
|
||||
# --- MCP-worker ---------------------------------------------------------
|
||||
- command_key: git_status
|
||||
args: ["/srv/scrum4me/repos/scrum4me-docker"]
|
||||
on_failure: continue
|
||||
|
||||
- command_key: git_fetch
|
||||
args: ["/srv/scrum4me/repos/scrum4me-docker"]
|
||||
on_failure: abort
|
||||
|
||||
- command_key: git_pull
|
||||
args: ["/srv/scrum4me/repos/scrum4me-docker"]
|
||||
on_failure: abort
|
||||
|
||||
- command_key: git_pull
|
||||
args: ["/srv/scrum4me/repos/scrum4me-mcp"]
|
||||
on_failure: continue
|
||||
|
||||
- command_key: docker_compose_build_worker_fresh
|
||||
on_failure: abort
|
||||
|
||||
- command_key: docker_compose_up_recreate
|
||||
args: ["worker-idea"]
|
||||
on_failure: abort
|
||||
|
||||
- command_key: wait_for_health_worker
|
||||
on_failure: continue
|
||||
|
|
@ -2,15 +2,21 @@
|
|||
# Copy to /etc/ops-agent/flows/update_mcp_worker.yml on the host.
|
||||
#
|
||||
# Steps:
|
||||
# 1. Show current git status (informational)
|
||||
# 2. Fetch remote refs
|
||||
# 3. Fast-forward pull (aborts if working tree is dirty)
|
||||
# 4. Rebuild the Docker image
|
||||
# 5. Recreate the container in detached mode (force-recreate picks up new image)
|
||||
# 6. Wait for worker pre-flight to pass (checks /var/log/agent/current)
|
||||
# 1. Show current git status of scrum4me-docker (informational)
|
||||
# 2. Fetch remote refs for scrum4me-docker
|
||||
# 3. Fast-forward pull scrum4me-docker (aborts if working tree is dirty)
|
||||
# 4. Fast-forward pull scrum4me-mcp — sync van de lokale repo. De image
|
||||
# cloned MCP zelf van GitHub, dus dit is alleen lokale referentie;
|
||||
# on_failure: continue zodat een dirty mcp-tree de deploy niet blokkeert.
|
||||
# 5. Rebuild the worker image MET cache-bust. Een gewone build hergebruikt
|
||||
# de scrum4me-mcp clone-laag (MCP_GIT_REF blijft 'main'), dus nieuwe
|
||||
# MCP-commits worden gemist. docker_compose_build_worker_fresh forceert
|
||||
# een verse clone via MCP_CACHE_BUST.
|
||||
# 6. Recreate the container (force-recreate picks up the new image)
|
||||
# 7. Wait for worker pre-flight to pass (checks /var/log/agent/current)
|
||||
|
||||
name: Update MCP Worker
|
||||
description: Pull latest code, rebuild Docker image, and restart the MCP worker service
|
||||
description: Pull latest code, rebuild the worker image with a fresh scrum4me-mcp clone, and recreate the worker container
|
||||
steps:
|
||||
- command_key: git_status
|
||||
args: ["/srv/scrum4me/repos/scrum4me-docker"]
|
||||
|
|
@ -24,8 +30,11 @@ steps:
|
|||
args: ["/srv/scrum4me/repos/scrum4me-docker"]
|
||||
on_failure: abort
|
||||
|
||||
- command_key: docker_compose_build
|
||||
args: ["worker-idea"]
|
||||
- command_key: git_pull
|
||||
args: ["/srv/scrum4me/repos/scrum4me-mcp"]
|
||||
on_failure: continue
|
||||
|
||||
- command_key: docker_compose_build_worker_fresh
|
||||
on_failure: abort
|
||||
|
||||
- command_key: docker_compose_up_recreate
|
||||
|
|
|
|||
|
|
@ -108,20 +108,33 @@ export async function execRoutes(app: FastifyInstance): Promise<void> {
|
|||
sendEvent('stderr', chunk.toString());
|
||||
});
|
||||
|
||||
child.on('close', (code) => {
|
||||
auditLog(command_key, args, code, Date.now() - startedAt);
|
||||
reply.raw.write(`event: exit\ndata: ${JSON.stringify({ code })}\n\n`);
|
||||
reply.raw.end();
|
||||
});
|
||||
|
||||
child.on('error', (err) => {
|
||||
auditLog(command_key, args, null, Date.now() - startedAt);
|
||||
reply.raw.write(`event: error\ndata: ${JSON.stringify({ message: err.message })}\n\n`);
|
||||
reply.raw.end();
|
||||
});
|
||||
|
||||
req.raw.on('close', () => {
|
||||
child.kill();
|
||||
});
|
||||
// Houd de route-handler open totdat het kind klaar is. Zonder dit return-t
|
||||
// de async functie meteen, finaliseert Fastify de reply, en triggert dat
|
||||
// `req.raw.on('close')` → `child.kill()` voordat het kind iets kon doen.
|
||||
await new Promise<void>((resolve) => {
|
||||
let settled = false
|
||||
const finish = () => {
|
||||
if (settled) return
|
||||
settled = true
|
||||
resolve()
|
||||
}
|
||||
child.on('close', (code) => {
|
||||
auditLog(command_key, args, code, Date.now() - startedAt)
|
||||
reply.raw.write(`event: exit\ndata: ${JSON.stringify({ code })}\n\n`)
|
||||
reply.raw.end()
|
||||
finish()
|
||||
})
|
||||
child.on('error', (err) => {
|
||||
auditLog(command_key, args, null, Date.now() - startedAt)
|
||||
reply.raw.write(`event: error\ndata: ${JSON.stringify({ message: err.message })}\n\n`)
|
||||
reply.raw.end()
|
||||
finish()
|
||||
})
|
||||
// Detect client disconnect via response stream (niet request stream —
|
||||
// die fired al direct na request body parse).
|
||||
reply.raw.on('close', () => {
|
||||
if (!settled) child.kill()
|
||||
})
|
||||
})
|
||||
});
|
||||
}
|
||||
|
|
|
|||
224
package-lock.json
generated
224
package-lock.json
generated
|
|
@ -9,13 +9,19 @@
|
|||
"version": "0.1.0",
|
||||
"dependencies": {
|
||||
"@base-ui/react": "^1.4.1",
|
||||
"@codemirror/language": "^6.12.3",
|
||||
"@codemirror/legacy-modes": "^6.5.2",
|
||||
"@codemirror/state": "^6.6.0",
|
||||
"@codemirror/view": "^6.42.1",
|
||||
"@prisma/adapter-pg": "^7.8.0",
|
||||
"@prisma/client": "^7.8.0",
|
||||
"@types/bcryptjs": "^2.4.6",
|
||||
"@types/pg": "^8.20.0",
|
||||
"@uiw/react-codemirror": "^4.25.9",
|
||||
"bcryptjs": "^3.0.3",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"codemirror": "^6.0.2",
|
||||
"lucide-react": "^1.14.0",
|
||||
"next": "16.2.6",
|
||||
"pg": "^8.20.0",
|
||||
|
|
@ -551,6 +557,108 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/autocomplete": {
|
||||
"version": "6.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/autocomplete/-/autocomplete-6.20.2.tgz",
|
||||
"integrity": "sha512-G5FPkgIiLjOgZMjqVjvuKQ1rGPtHogLldJr33eFJdVLtmwY+giGrlv/ewljLz6b9BSQLkjxuwBc6g6omDM+YxQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@codemirror/language": "^6.0.0",
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.17.0",
|
||||
"@lezer/common": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/commands": {
|
||||
"version": "6.10.3",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/commands/-/commands-6.10.3.tgz",
|
||||
"integrity": "sha512-JFRiqhKu+bvSkDLI+rUhJwSxQxYb759W5GBezE8Uc8mHLqC9aV/9aTC7yJSqCtB3F00pylrLCwnyS91Ap5ej4Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@codemirror/language": "^6.0.0",
|
||||
"@codemirror/state": "^6.6.0",
|
||||
"@codemirror/view": "^6.27.0",
|
||||
"@lezer/common": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/language": {
|
||||
"version": "6.12.3",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.12.3.tgz",
|
||||
"integrity": "sha512-QwCZW6Tt1siP37Jet9Tb02Zs81TQt6qQrZR2H+eGMcFsL1zMrk2/b9CLC7/9ieP1fjIUMgviLWMmgiHoJrj+ZA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.23.0",
|
||||
"@lezer/common": "^1.5.0",
|
||||
"@lezer/highlight": "^1.0.0",
|
||||
"@lezer/lr": "^1.0.0",
|
||||
"style-mod": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/legacy-modes": {
|
||||
"version": "6.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/legacy-modes/-/legacy-modes-6.5.2.tgz",
|
||||
"integrity": "sha512-/jJbwSTazlQEDOQw2FJ8LEEKVS72pU0lx6oM54kGpL8t/NJ2Jda3CZ4pcltiKTdqYSRk3ug1B3pil1gsjA6+8Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@codemirror/language": "^6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/lint": {
|
||||
"version": "6.9.6",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/lint/-/lint-6.9.6.tgz",
|
||||
"integrity": "sha512-6Kp7r6XfCi/D/5sdXieMfg9pJU1bUEx96WITuLU6ESaKizCz0QHFMjY/TaFSbigDdEAIgi93itLBIUETP4oK+A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.42.0",
|
||||
"crelt": "^1.0.5"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/search": {
|
||||
"version": "6.7.0",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/search/-/search-6.7.0.tgz",
|
||||
"integrity": "sha512-ZvGm99wc/s2cITtMT15LFdn8aH/aS+V+DqyGq/N5ZlV5vWtH+nILvC2nw0zX7ByNoHHDZ2IxxdW38O0tc5nVHg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.37.0",
|
||||
"crelt": "^1.0.5"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/state": {
|
||||
"version": "6.6.0",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.6.0.tgz",
|
||||
"integrity": "sha512-4nbvra5R5EtiCzr9BTHiTLc+MLXK2QGiAVYMyi8PkQd3SR+6ixar/Q/01Fa21TBIDOZXgeWV4WppsQolSreAPQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@marijn/find-cluster-break": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/theme-one-dark": {
|
||||
"version": "6.1.3",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/theme-one-dark/-/theme-one-dark-6.1.3.tgz",
|
||||
"integrity": "sha512-NzBdIvEJmx6fjeremiGp3t/okrLPYT0d9orIc7AFun8oZcRk58aejkqhv6spnz4MLAevrKNPMQYXEWMg4s+sKA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@codemirror/language": "^6.0.0",
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.0.0",
|
||||
"@lezer/highlight": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/view": {
|
||||
"version": "6.42.1",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.42.1.tgz",
|
||||
"integrity": "sha512-ToN3oFc0nsxNUYVF5P0ztLgbC4UPPjPtA9aKYhkOKQaZASpOUo6ISXyQLP66ctVwlDc+j6Jv0uK5IFALkiXztg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@codemirror/state": "^6.6.0",
|
||||
"crelt": "^1.0.6",
|
||||
"style-mod": "^4.1.0",
|
||||
"w3c-keyname": "^2.2.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@cspotcode/source-map-support": {
|
||||
"version": "0.8.1",
|
||||
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
|
||||
|
|
@ -1406,6 +1514,36 @@
|
|||
"integrity": "sha512-M5UknZPHRu3DEDWoipU6sE8PdkZ6Z/S+v4dD+Ke8IaNlpdSQah50lz1KtcFBa2vsdOnwbbnxJwVM4wty6udA5w==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@lezer/common": {
|
||||
"version": "1.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.5.2.tgz",
|
||||
"integrity": "sha512-sxQE460fPZyU3sdc8lafxiPwJHBzZRy/udNFynGQky1SePYBdhkBl1kOagA9uT3pxR8K09bOrmTUqA9wb/PjSQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@lezer/highlight": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.2.3.tgz",
|
||||
"integrity": "sha512-qXdH7UqTvGfdVBINrgKhDsVTJTxactNNxLk7+UMwZhU13lMHaOBlJe9Vqp907ya56Y3+ed2tlqzys7jDkTmW0g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@lezer/common": "^1.3.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@lezer/lr": {
|
||||
"version": "1.4.10",
|
||||
"resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.10.tgz",
|
||||
"integrity": "sha512-rnCpTIBafOx4mRp43xOxDJbFipJm/c0cia/V5TiGlhmMa+wsSdoGmUN3w5Bqrks/09Q/D4tNAmWaT8p6NRi77A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@lezer/common": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@marijn/find-cluster-break": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@marijn/find-cluster-break/-/find-cluster-break-1.0.2.tgz",
|
||||
"integrity": "sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@modelcontextprotocol/sdk": {
|
||||
"version": "1.29.0",
|
||||
"resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.29.0.tgz",
|
||||
|
|
@ -2562,6 +2700,59 @@
|
|||
"integrity": "sha512-lrpDziQipxCEeK5kWxvljWYhUvOiB2A9izZd9B2AFarYAkqZshb4lPbRs7zKEic6eGtH8V/2qJW+dPp9OtF6bw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@uiw/codemirror-extensions-basic-setup": {
|
||||
"version": "4.25.9",
|
||||
"resolved": "https://registry.npmjs.org/@uiw/codemirror-extensions-basic-setup/-/codemirror-extensions-basic-setup-4.25.9.tgz",
|
||||
"integrity": "sha512-QFAqr+pu6lDmNpAlecODcF49TlsrZ0bj15zPzfhiqSDl+Um3EsDLFLppixC7kFLn+rdDM2LTvVjn5CPvefpRgw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@codemirror/autocomplete": "^6.0.0",
|
||||
"@codemirror/commands": "^6.0.0",
|
||||
"@codemirror/language": "^6.0.0",
|
||||
"@codemirror/lint": "^6.0.0",
|
||||
"@codemirror/search": "^6.0.0",
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://jaywcjlove.github.io/#/sponsor"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@codemirror/autocomplete": ">=6.0.0",
|
||||
"@codemirror/commands": ">=6.0.0",
|
||||
"@codemirror/language": ">=6.0.0",
|
||||
"@codemirror/lint": ">=6.0.0",
|
||||
"@codemirror/search": ">=6.0.0",
|
||||
"@codemirror/state": ">=6.0.0",
|
||||
"@codemirror/view": ">=6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@uiw/react-codemirror": {
|
||||
"version": "4.25.9",
|
||||
"resolved": "https://registry.npmjs.org/@uiw/react-codemirror/-/react-codemirror-4.25.9.tgz",
|
||||
"integrity": "sha512-HftqCBUYShAOH0pGi1CHP8vfm5L8fQ3+0j0VI6lQD6QpK+UBu3J7nxfEN5O/BXMilMNf9ZyFJRvRcuMMOLHMng==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.18.6",
|
||||
"@codemirror/commands": "^6.1.0",
|
||||
"@codemirror/state": "^6.1.1",
|
||||
"@codemirror/theme-one-dark": "^6.0.0",
|
||||
"@uiw/codemirror-extensions-basic-setup": "4.25.9",
|
||||
"codemirror": "^6.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://jaywcjlove.github.io/#/sponsor"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@babel/runtime": ">=7.11.0",
|
||||
"@codemirror/state": ">=6.0.0",
|
||||
"@codemirror/theme-one-dark": ">=6.0.0",
|
||||
"@codemirror/view": ">=6.0.0",
|
||||
"codemirror": ">=6.0.0",
|
||||
"react": ">=17.0.0",
|
||||
"react-dom": ">=17.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@ungap/structured-clone": {
|
||||
"version": "1.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.1.tgz",
|
||||
|
|
@ -3130,6 +3321,21 @@
|
|||
"integrity": "sha512-Oofo0pq3IKnsFtuHqSF7TqBfr71aeyZDVJ0HpmqB7FBM2qEigL0iPONSCZSO9pE9dZTAxANe5XHG9Uy0YMv8cg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/codemirror": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/codemirror/-/codemirror-6.0.2.tgz",
|
||||
"integrity": "sha512-VhydHotNW5w1UGK0Qj96BwSk/Zqbp9WbnyK2W/eVMv4QyF41INRGpjUhFJY7/uDNuudSc33a/PKr4iDqRduvHw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@codemirror/autocomplete": "^6.0.0",
|
||||
"@codemirror/commands": "^6.0.0",
|
||||
"@codemirror/language": "^6.0.0",
|
||||
"@codemirror/lint": "^6.0.0",
|
||||
"@codemirror/search": "^6.0.0",
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/color-convert": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||
|
|
@ -3269,6 +3475,12 @@
|
|||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/crelt": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz",
|
||||
"integrity": "sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/cross-spawn": {
|
||||
"version": "7.0.6",
|
||||
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
|
||||
|
|
@ -6953,6 +7165,12 @@
|
|||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/style-mod": {
|
||||
"version": "4.1.3",
|
||||
"resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.3.tgz",
|
||||
"integrity": "sha512-i/n8VsZydrugj3Iuzll8+x/00GH2vnYsk1eomD8QiRrSAeW6ItbCQDtfXCeJHd0iwiNagqjQkvpvREEPtW3IoQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/styled-jsx": {
|
||||
"version": "5.1.6",
|
||||
"resolved": "https://registry.npmjs.org/styled-jsx/-/styled-jsx-5.1.6.tgz",
|
||||
|
|
@ -7447,6 +7665,12 @@
|
|||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/w3c-keyname": {
|
||||
"version": "2.2.8",
|
||||
"resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz",
|
||||
"integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/web-streams-polyfill": {
|
||||
"version": "3.3.3",
|
||||
"resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz",
|
||||
|
|
|
|||
|
|
@ -13,13 +13,19 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@base-ui/react": "^1.4.1",
|
||||
"@codemirror/language": "^6.12.3",
|
||||
"@codemirror/legacy-modes": "^6.5.2",
|
||||
"@codemirror/state": "^6.6.0",
|
||||
"@codemirror/view": "^6.42.1",
|
||||
"@prisma/adapter-pg": "^7.8.0",
|
||||
"@prisma/client": "^7.8.0",
|
||||
"@types/bcryptjs": "^2.4.6",
|
||||
"@types/pg": "^8.20.0",
|
||||
"@uiw/react-codemirror": "^4.25.9",
|
||||
"bcryptjs": "^3.0.3",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"codemirror": "^6.0.2",
|
||||
"lucide-react": "^1.14.0",
|
||||
"next": "16.2.6",
|
||||
"pg": "^8.20.0",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue