feat(ST-d9sl8egw): lib/insights/token-history.ts — sprint-historiek, dag-data & PBI-aggregaat

Drie functies via prisma.$queryRaw: getSprintTokenHistory (per-sprint
aggregaat), getDayTokenData (dag-totalen met guard op lege sprintId),
getPbiTokenAggregates (per-PBI met guard). Tests voor alle drie.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Scrum4Me Agent 2026-05-06 03:17:45 +02:00
parent 9a733d77bb
commit d81f18149a
2 changed files with 250 additions and 0 deletions

View file

@ -0,0 +1,74 @@
import { describe, it, expect, vi, beforeEach } from 'vitest'
const { mockQueryRaw } = vi.hoisted(() => ({ mockQueryRaw: vi.fn() }))
vi.mock('@/lib/prisma', () => ({
prisma: { $queryRaw: mockQueryRaw },
}))
import {
getSprintTokenHistory,
getDayTokenData,
getPbiTokenAggregates,
} from '@/lib/insights/token-history'
beforeEach(() => {
vi.clearAllMocks()
})
describe('getSprintTokenHistory', () => {
it('returns mapped sprint rows', async () => {
mockQueryRaw.mockResolvedValueOnce([
{ sprint_id: 'sp-1', sprint_goal: 'Goal A', total_tokens: BigInt(5000), total_cost: 0.1, job_count: BigInt(2) },
])
const rows = await getSprintTokenHistory('user-1')
expect(rows).toHaveLength(1)
expect(rows[0].sprintId).toBe('sp-1')
expect(rows[0].totalTokens).toBe(5000)
expect(rows[0].totalCostUsd).toBe(0.1)
expect(rows[0].jobCount).toBe(2)
})
it('returns zero cost when total_cost is null', async () => {
mockQueryRaw.mockResolvedValueOnce([
{ sprint_id: 'sp-2', sprint_goal: 'Goal B', total_tokens: BigInt(0), total_cost: null, job_count: BigInt(0) },
])
const rows = await getSprintTokenHistory('user-1')
expect(rows[0].totalCostUsd).toBe(0)
})
})
describe('getDayTokenData', () => {
it('returns empty array for empty sprintId', async () => {
const rows = await getDayTokenData('user-1', '')
expect(rows).toHaveLength(0)
expect(mockQueryRaw).not.toHaveBeenCalled()
})
it('maps day rows with ISO date string', async () => {
mockQueryRaw.mockResolvedValueOnce([
{ day: new Date('2026-05-01T00:00:00Z'), total_tokens: BigInt(2000), total_cost: 0.05 },
])
const rows = await getDayTokenData('user-1', 'sprint-1')
expect(rows).toHaveLength(1)
expect(rows[0].day).toBe('2026-05-01')
expect(rows[0].totalTokens).toBe(2000)
})
})
describe('getPbiTokenAggregates', () => {
it('returns empty array for empty sprintId', async () => {
const rows = await getPbiTokenAggregates('user-1', '')
expect(rows).toHaveLength(0)
expect(mockQueryRaw).not.toHaveBeenCalled()
})
it('maps pbi rows', async () => {
mockQueryRaw.mockResolvedValueOnce([
{ pbi_id: 'pbi-1', pbi_code: 'M1', pbi_title: 'First PBI', total_tokens: BigInt(3000), total_cost: 0.08 },
])
const rows = await getPbiTokenAggregates('user-1', 'sprint-1')
expect(rows[0].pbiCode).toBe('M1')
expect(rows[0].totalTokens).toBe(3000)
})
})

View file

@ -0,0 +1,176 @@
import { prisma } from '@/lib/prisma'
export interface SprintTokenRow {
sprintId: string
sprintGoal: string
totalTokens: number
totalCostUsd: number
jobCount: number
}
export interface DayTokenRow {
day: string
totalTokens: number
totalCostUsd: number
}
export interface PbiTokenRow {
pbiId: string
pbiCode: string
pbiTitle: string
totalTokens: number
totalCostUsd: number
}
type RawSprintRow = {
sprint_id: string
sprint_goal: string
total_tokens: bigint
total_cost: number | null
job_count: bigint
}
type RawDayRow = {
day: Date
total_tokens: bigint
total_cost: number | null
}
type RawPbiRow = {
pbi_id: string
pbi_code: string
pbi_title: string
total_tokens: bigint
total_cost: number | null
}
export async function getSprintTokenHistory(
userId: string,
productId?: string,
limit = 8,
): Promise<SprintTokenRow[]> {
const rows = productId
? await prisma.$queryRaw<RawSprintRow[]>`
SELECT
sp.id AS sprint_id,
sp.sprint_goal,
COALESCE(SUM(cj.input_tokens + cj.output_tokens + cj.cache_read_tokens + cj.cache_write_tokens), 0) AS total_tokens,
SUM(
cj.input_tokens * mp.input_price_per_1m / 1000000.0
+ cj.output_tokens * mp.output_price_per_1m / 1000000.0
+ cj.cache_read_tokens * mp.cache_read_price_per_1m / 1000000.0
+ cj.cache_write_tokens * mp.cache_write_price_per_1m / 1000000.0
) FILTER (WHERE cj.input_tokens IS NOT NULL) AS total_cost,
COUNT(*) FILTER (WHERE cj.input_tokens IS NOT NULL) AS job_count
FROM claude_jobs cj
JOIN tasks t ON cj.task_id = t.id
JOIN stories s ON t.story_id = s.id
JOIN sprints sp ON s.sprint_id = sp.id
LEFT JOIN model_prices mp ON mp.model_id = cj.model_id
WHERE cj.user_id = ${userId}
AND cj.status = 'DONE'
AND cj.product_id = ${productId}
GROUP BY sp.id, sp.sprint_goal
ORDER BY sp.created_at DESC
LIMIT ${limit}
`
: await prisma.$queryRaw<RawSprintRow[]>`
SELECT
sp.id AS sprint_id,
sp.sprint_goal,
COALESCE(SUM(cj.input_tokens + cj.output_tokens + cj.cache_read_tokens + cj.cache_write_tokens), 0) AS total_tokens,
SUM(
cj.input_tokens * mp.input_price_per_1m / 1000000.0
+ cj.output_tokens * mp.output_price_per_1m / 1000000.0
+ cj.cache_read_tokens * mp.cache_read_price_per_1m / 1000000.0
+ cj.cache_write_tokens * mp.cache_write_price_per_1m / 1000000.0
) FILTER (WHERE cj.input_tokens IS NOT NULL) AS total_cost,
COUNT(*) FILTER (WHERE cj.input_tokens IS NOT NULL) AS job_count
FROM claude_jobs cj
JOIN tasks t ON cj.task_id = t.id
JOIN stories s ON t.story_id = s.id
JOIN sprints sp ON s.sprint_id = sp.id
LEFT JOIN model_prices mp ON mp.model_id = cj.model_id
WHERE cj.user_id = ${userId}
AND cj.status = 'DONE'
GROUP BY sp.id, sp.sprint_goal
ORDER BY sp.created_at DESC
LIMIT ${limit}
`
return rows.map(r => ({
sprintId: r.sprint_id,
sprintGoal: r.sprint_goal,
totalTokens: Number(r.total_tokens),
totalCostUsd: Number(r.total_cost ?? 0),
jobCount: Number(r.job_count),
}))
}
export async function getDayTokenData(userId: string, sprintId: string): Promise<DayTokenRow[]> {
if (!sprintId) return []
const rows = await prisma.$queryRaw<RawDayRow[]>`
SELECT
DATE(cj.finished_at) AS day,
COALESCE(SUM(cj.input_tokens + cj.output_tokens + cj.cache_read_tokens + cj.cache_write_tokens), 0) AS total_tokens,
SUM(
cj.input_tokens * mp.input_price_per_1m / 1000000.0
+ cj.output_tokens * mp.output_price_per_1m / 1000000.0
+ cj.cache_read_tokens * mp.cache_read_price_per_1m / 1000000.0
+ cj.cache_write_tokens * mp.cache_write_price_per_1m / 1000000.0
) FILTER (WHERE cj.input_tokens IS NOT NULL) AS total_cost
FROM claude_jobs cj
JOIN tasks t ON cj.task_id = t.id
JOIN stories s ON t.story_id = s.id
LEFT JOIN model_prices mp ON mp.model_id = cj.model_id
WHERE cj.user_id = ${userId}
AND s.sprint_id = ${sprintId}
AND cj.status = 'DONE'
AND cj.finished_at IS NOT NULL
GROUP BY DATE(cj.finished_at)
ORDER BY day ASC
`
return rows.map(r => ({
day: r.day.toISOString().slice(0, 10),
totalTokens: Number(r.total_tokens),
totalCostUsd: Number(r.total_cost ?? 0),
}))
}
export async function getPbiTokenAggregates(userId: string, sprintId: string): Promise<PbiTokenRow[]> {
if (!sprintId) return []
const rows = await prisma.$queryRaw<RawPbiRow[]>`
SELECT
p.id AS pbi_id,
p.code AS pbi_code,
p.title AS pbi_title,
COALESCE(SUM(cj.input_tokens + cj.output_tokens + cj.cache_read_tokens + cj.cache_write_tokens), 0) AS total_tokens,
SUM(
cj.input_tokens * mp.input_price_per_1m / 1000000.0
+ cj.output_tokens * mp.output_price_per_1m / 1000000.0
+ cj.cache_read_tokens * mp.cache_read_price_per_1m / 1000000.0
+ cj.cache_write_tokens * mp.cache_write_price_per_1m / 1000000.0
) FILTER (WHERE cj.input_tokens IS NOT NULL) AS total_cost
FROM claude_jobs cj
JOIN tasks t ON cj.task_id = t.id
JOIN stories s ON t.story_id = s.id
JOIN pbis p ON s.pbi_id = p.id
LEFT JOIN model_prices mp ON mp.model_id = cj.model_id
WHERE cj.user_id = ${userId}
AND s.sprint_id = ${sprintId}
AND cj.status = 'DONE'
GROUP BY p.id, p.code, p.title
ORDER BY total_cost DESC
`
return rows.map(r => ({
pbiId: r.pbi_id,
pbiCode: r.pbi_code,
pbiTitle: r.pbi_title,
totalTokens: Number(r.total_tokens),
totalCostUsd: Number(r.total_cost ?? 0),
}))
}