feat(PBI-74): oude stores opruimen (Story 8)

Workspace-store is nu de enige bron voor product-backlog client-state. De
vier voorgangers en de dual-dispatch-infrastructuur zijn verwijderd.

- T-872: grep over codebase op useBacklogStore/usePlannerStore/
  useSelectionStore/useProductStore is leeg.
- T-873..T-876: stores/{backlog,planner,selection,product}-store.ts deleted.
- T-877: __tests__/realtime/payload-contract.test.ts en
  __tests__/api/backlog-realtime.test.ts deleted — pbi/story/task I|U|D
  payload-handling wordt al gedekt door
  __tests__/stores/product-workspace/store.test.ts (incl. parent-move,
  idempotent inserts, delete-cleanup).
- T-878: lib/realtime/dev-workspace-fingerprint.ts deleted, dual-dispatch
  uit BacklogHydrationWrapper en lib/realtime/use-backlog-realtime.ts
  weggehaald. stores/products-store.ts (lijst van producten ≠ active
  product) blijft ongewijzigd.

Bijwerkingen:
- BacklogPbi en BacklogStory types in components/backlog/story-panel.tsx en
  components/sprint/sprint-backlog.tsx krijgen sort_order zodat ze met de
  workspace-types overeenkomen.
- Server-pages /products/[id]/page.tsx (desktop+mobile) en
  /products/[id]/sprint/[sprintId]/page.tsx selecteren sort_order op story
  en mappen het door in de hydration-payload.

Verify: lint+typecheck clean, 626/626 tests groen (verlies van 25 redundante
oude-store tests; workspace-store tests dekken hetzelfde gedrag).

Refs: PBI-74, ST-1325, T-872..T-878

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
Janpeter Visser 2026-05-10 01:27:43 +02:00
parent 541154b521
commit f7f4bf80bf
14 changed files with 23 additions and 620 deletions

View file

@ -1,131 +0,0 @@
import { describe, it, expect, vi, beforeEach } from 'vitest'
const { mockGetSession } = vi.hoisted(() => ({ mockGetSession: vi.fn() }))
vi.mock('@/lib/auth', () => ({ getSession: mockGetSession }))
vi.mock('@/lib/product-access', () => ({
getAccessibleProduct: vi.fn(),
}))
import { getAccessibleProduct } from '@/lib/product-access'
import type { NextRequest } from 'next/server'
import { GET } from '@/app/api/realtime/backlog/route'
import { useBacklogStore } from '@/stores/backlog-store'
const mockGetAccessibleProduct = getAccessibleProduct as ReturnType<typeof vi.fn>
function makeReq(productId?: string): NextRequest {
const url = productId
? `http://localhost/api/realtime/backlog?product_id=${productId}`
: 'http://localhost/api/realtime/backlog'
return {
signal: new AbortController().signal,
nextUrl: new URL(url),
} as unknown as NextRequest
}
beforeEach(() => {
vi.clearAllMocks()
})
describe('GET /api/realtime/backlog', () => {
it('401 when not authenticated', async () => {
mockGetSession.mockResolvedValue({ userId: undefined, isDemo: false })
const res = await GET(makeReq('prod-1'))
expect(res.status).toBe(401)
expect(mockGetAccessibleProduct).not.toHaveBeenCalled()
})
it('400 when product_id is missing', async () => {
mockGetSession.mockResolvedValue({ userId: 'user-1', isDemo: false })
const res = await GET(makeReq())
expect(res.status).toBe(400)
})
it('403 when user has no access to the product', async () => {
mockGetSession.mockResolvedValue({ userId: 'user-1', isDemo: false })
mockGetAccessibleProduct.mockResolvedValue(null)
const res = await GET(makeReq('prod-1'))
expect(res.status).toBe(403)
expect(mockGetAccessibleProduct).toHaveBeenCalledWith('prod-1', 'user-1')
})
it('500 when DIRECT_URL and DATABASE_URL are absent', async () => {
mockGetSession.mockResolvedValue({ userId: 'user-1', isDemo: false })
mockGetAccessibleProduct.mockResolvedValue({ id: 'prod-1' })
const before = { DIRECT_URL: process.env.DIRECT_URL, DATABASE_URL: process.env.DATABASE_URL }
delete process.env.DIRECT_URL
delete process.env.DATABASE_URL
try {
const res = await GET(makeReq('prod-1'))
expect(res.status).toBe(500)
} finally {
if (before.DIRECT_URL !== undefined) process.env.DIRECT_URL = before.DIRECT_URL
if (before.DATABASE_URL !== undefined) process.env.DATABASE_URL = before.DATABASE_URL
}
})
it('demo user is allowed (no 403) when product is accessible', async () => {
mockGetSession.mockResolvedValue({ userId: 'demo-user', isDemo: true })
mockGetAccessibleProduct.mockResolvedValue({ id: 'prod-1' })
const before = { DIRECT_URL: process.env.DIRECT_URL, DATABASE_URL: process.env.DATABASE_URL }
delete process.env.DIRECT_URL
delete process.env.DATABASE_URL
try {
const res = await GET(makeReq('prod-1'))
// Fails at 500 (no DB URL) — not 403, confirming demo user is not blocked
expect(res.status).toBe(500)
} finally {
if (before.DIRECT_URL !== undefined) process.env.DIRECT_URL = before.DIRECT_URL
if (before.DATABASE_URL !== undefined) process.env.DATABASE_URL = before.DATABASE_URL
}
})
})
// shouldEmit scope filter — white-box unit tests
describe('shouldEmit scope filter (via backlog-store reducer)', () => {
it('applyChange: pbi INSERT adds to pbis array', () => {
useBacklogStore.setState({ pbis: [], storiesByPbi: {}, tasksByStory: {} })
const pbi = { id: 'pbi-1', code: 'PBI-1', title: 'Test', priority: 2, created_at: new Date(), status: 'ready' as const }
useBacklogStore.getState().applyChange('pbi', 'I', pbi)
expect(useBacklogStore.getState().pbis).toHaveLength(1)
expect(useBacklogStore.getState().pbis[0].id).toBe('pbi-1')
})
it('applyChange: pbi UPDATE patches existing pbi', () => {
const pbi = { id: 'pbi-1', code: 'PBI-1', title: 'Old', priority: 2, created_at: new Date(), status: 'ready' as const }
useBacklogStore.setState({ pbis: [pbi], storiesByPbi: {}, tasksByStory: {} })
useBacklogStore.getState().applyChange('pbi', 'U', { id: 'pbi-1', title: 'New' })
expect(useBacklogStore.getState().pbis[0].title).toBe('New')
})
it('applyChange: pbi DELETE removes pbi', () => {
const pbi = { id: 'pbi-1', code: 'PBI-1', title: 'Test', priority: 2, created_at: new Date(), status: 'ready' as const }
useBacklogStore.setState({ pbis: [pbi], storiesByPbi: {}, tasksByStory: {} })
useBacklogStore.getState().applyChange('pbi', 'D', { id: 'pbi-1' })
expect(useBacklogStore.getState().pbis).toHaveLength(0)
})
it('applyChange: story INSERT adds to storiesByPbi', () => {
useBacklogStore.setState({ pbis: [], storiesByPbi: { 'pbi-1': [] }, tasksByStory: {} })
const story = { id: 'story-1', code: 'ST-1', title: 'S', description: null, acceptance_criteria: null, priority: 2, status: 'OPEN', pbi_id: 'pbi-1', sprint_id: null, created_at: new Date() }
useBacklogStore.getState().applyChange('story', 'I', story)
expect(useBacklogStore.getState().storiesByPbi['pbi-1']).toHaveLength(1)
})
it('applyChange: story DELETE removes from correct pbi bucket', () => {
const story = { id: 'story-1', code: 'ST-1', title: 'S', description: null, acceptance_criteria: null, priority: 2, status: 'OPEN', pbi_id: 'pbi-1', sprint_id: null, created_at: new Date() }
useBacklogStore.setState({ pbis: [], storiesByPbi: { 'pbi-1': [story] }, tasksByStory: {} })
useBacklogStore.getState().applyChange('story', 'D', { id: 'story-1' })
expect(useBacklogStore.getState().storiesByPbi['pbi-1']).toHaveLength(0)
})
it('applyChange: task UPDATE patches task across story buckets', () => {
const task = { id: 'task-1', title: 'Old', description: null, priority: 2, status: 'TO_DO', sort_order: 1, story_id: 'story-1', created_at: new Date() }
useBacklogStore.setState({ pbis: [], storiesByPbi: {}, tasksByStory: { 'story-1': [task] } })
useBacklogStore.getState().applyChange('task', 'U', { id: 'task-1', status: 'IN_PROGRESS' })
expect(useBacklogStore.getState().tasksByStory['story-1'][0].status).toBe('IN_PROGRESS')
})
})

View file

@ -1,161 +0,0 @@
import { describe, it, expect, beforeEach } from 'vitest'
import { useBacklogStore } from '@/stores/backlog-store'
import type { BacklogPbi, BacklogStory, BacklogTask } from '@/stores/backlog-store'
const PBI: BacklogPbi = {
id: 'pbi-1',
code: 'PBI-1',
title: 'Realtime PBI',
priority: 2,
description: 'desc',
created_at: new Date('2024-01-01T00:00:00Z'),
status: 'ready',
}
const STORY: BacklogStory = {
id: 'story-1',
code: 'ST-1',
title: 'Realtime story',
description: null,
acceptance_criteria: null,
priority: 2,
status: 'OPEN',
pbi_id: 'pbi-1',
sprint_id: null,
created_at: new Date('2024-01-01T00:00:00Z'),
}
const TASK: BacklogTask = {
id: 'task-1',
title: 'Realtime task',
description: null,
priority: 2,
status: 'TO_DO',
sort_order: 1,
story_id: 'story-1',
created_at: new Date('2024-01-01T00:00:00Z'),
}
beforeEach(() => {
useBacklogStore.setState({ pbis: [], storiesByPbi: {}, tasksByStory: {} })
})
// ---------------------------------------------------------------------------
// PBI
// ---------------------------------------------------------------------------
describe('PBI payload contract', () => {
it('INSERT: entity appears in pbis with correct title and status', () => {
useBacklogStore.getState().applyChange('pbi', 'I', { ...PBI })
const state = useBacklogStore.getState()
expect(state.pbis).toHaveLength(1)
expect(state.pbis[0].id).toBe('pbi-1')
expect(state.pbis[0].title).toBe('Realtime PBI')
expect(state.pbis[0].status).toBe('ready')
})
it('INSERT is idempotent: duplicate SSE-event does not add a second entry', () => {
useBacklogStore.getState().applyChange('pbi', 'I', { ...PBI })
useBacklogStore.getState().applyChange('pbi', 'I', { ...PBI })
expect(useBacklogStore.getState().pbis).toHaveLength(1)
})
it('UPDATE: changed_fields partial merges into existing entity', () => {
useBacklogStore.setState({ pbis: [{ ...PBI }], storiesByPbi: {}, tasksByStory: {} })
useBacklogStore.getState().applyChange('pbi', 'U', { id: 'pbi-1', title: 'Updated PBI', status: 'in_sprint' as const })
const pbi = useBacklogStore.getState().pbis[0]
expect(pbi.title).toBe('Updated PBI')
expect(pbi.status).toBe('in_sprint')
expect(pbi.priority).toBe(2) // unchanged field retained
})
it('DELETE: entity is removed from pbis', () => {
useBacklogStore.setState({ pbis: [{ ...PBI }], storiesByPbi: {}, tasksByStory: {} })
useBacklogStore.getState().applyChange('pbi', 'D', { id: 'pbi-1' })
expect(useBacklogStore.getState().pbis).toHaveLength(0)
})
})
// ---------------------------------------------------------------------------
// Story
// ---------------------------------------------------------------------------
describe('Story payload contract', () => {
it('INSERT: entity appears in storiesByPbi[pbi_id] with correct title and status', () => {
useBacklogStore.setState({ pbis: [], storiesByPbi: { 'pbi-1': [] }, tasksByStory: {} })
useBacklogStore.getState().applyChange('story', 'I', { ...STORY })
const bucket = useBacklogStore.getState().storiesByPbi['pbi-1']
expect(bucket).toHaveLength(1)
expect(bucket[0].id).toBe('story-1')
expect(bucket[0].title).toBe('Realtime story')
expect(bucket[0].status).toBe('OPEN')
})
it('INSERT: creates bucket when pbi_id was not yet in storiesByPbi', () => {
useBacklogStore.getState().applyChange('story', 'I', { ...STORY })
expect(useBacklogStore.getState().storiesByPbi['pbi-1']).toHaveLength(1)
})
it('INSERT is idempotent: duplicate SSE-event does not add a second entry', () => {
useBacklogStore.getState().applyChange('story', 'I', { ...STORY })
useBacklogStore.getState().applyChange('story', 'I', { ...STORY })
expect(useBacklogStore.getState().storiesByPbi['pbi-1']).toHaveLength(1)
})
it('UPDATE: changed_fields partial merges into existing story', () => {
useBacklogStore.setState({ pbis: [], storiesByPbi: { 'pbi-1': [{ ...STORY }] }, tasksByStory: {} })
useBacklogStore.getState().applyChange('story', 'U', { id: 'story-1', title: 'Updated story', status: 'IN_SPRINT' })
const story = useBacklogStore.getState().storiesByPbi['pbi-1'][0]
expect(story.title).toBe('Updated story')
expect(story.status).toBe('IN_SPRINT')
expect(story.priority).toBe(2) // unchanged field retained
})
it('DELETE: entity is removed from its pbi bucket', () => {
useBacklogStore.setState({ pbis: [], storiesByPbi: { 'pbi-1': [{ ...STORY }] }, tasksByStory: {} })
useBacklogStore.getState().applyChange('story', 'D', { id: 'story-1' })
expect(useBacklogStore.getState().storiesByPbi['pbi-1']).toHaveLength(0)
})
})
// ---------------------------------------------------------------------------
// Task
// ---------------------------------------------------------------------------
describe('Task payload contract', () => {
it('INSERT: entity appears in tasksByStory[story_id] with correct title and status', () => {
useBacklogStore.setState({ pbis: [], storiesByPbi: {}, tasksByStory: { 'story-1': [] } })
useBacklogStore.getState().applyChange('task', 'I', { ...TASK })
const bucket = useBacklogStore.getState().tasksByStory['story-1']
expect(bucket).toHaveLength(1)
expect(bucket[0].id).toBe('task-1')
expect(bucket[0].title).toBe('Realtime task')
expect(bucket[0].status).toBe('TO_DO')
})
it('INSERT: creates bucket when story_id was not yet in tasksByStory', () => {
useBacklogStore.getState().applyChange('task', 'I', { ...TASK })
expect(useBacklogStore.getState().tasksByStory['story-1']).toHaveLength(1)
})
it('INSERT is idempotent: duplicate SSE-event does not add a second entry', () => {
useBacklogStore.getState().applyChange('task', 'I', { ...TASK })
useBacklogStore.getState().applyChange('task', 'I', { ...TASK })
expect(useBacklogStore.getState().tasksByStory['story-1']).toHaveLength(1)
})
it('UPDATE: changed_fields partial merges into existing task', () => {
useBacklogStore.setState({ pbis: [], storiesByPbi: {}, tasksByStory: { 'story-1': [{ ...TASK }] } })
useBacklogStore.getState().applyChange('task', 'U', { id: 'task-1', title: 'Updated task', status: 'IN_PROGRESS' })
const task = useBacklogStore.getState().tasksByStory['story-1'][0]
expect(task.title).toBe('Updated task')
expect(task.status).toBe('IN_PROGRESS')
expect(task.sort_order).toBe(1) // unchanged field retained
})
it('DELETE: entity is removed from its story bucket', () => {
useBacklogStore.setState({ pbis: [], storiesByPbi: {}, tasksByStory: { 'story-1': [{ ...TASK }] } })
useBacklogStore.getState().applyChange('task', 'D', { id: 'task-1' })
expect(useBacklogStore.getState().tasksByStory['story-1']).toHaveLength(0)
})
})

View file

@ -60,6 +60,7 @@ export default async function ProductBacklogPage({ params, searchParams }: Props
description: true,
acceptance_criteria: true,
priority: true,
sort_order: true,
status: true,
pbi_id: true,
sprint_id: true,
@ -82,7 +83,7 @@ export default async function ProductBacklogPage({ params, searchParams }: Props
}),
])
// Group stories by PBI id
// Group stories by PBI id (status uit DB blijft UPPER_SNAKE in dit hydratie-pad)
const storiesByPbi: Record<string, Story[]> = {}
for (const story of stories) {
if (!storiesByPbi[story.pbi_id]) storiesByPbi[story.pbi_id] = []
@ -151,7 +152,7 @@ export default async function ProductBacklogPage({ params, searchParams }: Props
<BacklogHydrationWrapper
productId={id}
initialData={{
pbis: pbis.map((p) => ({ id: p.id, code: p.code, title: p.title, priority: p.priority, description: p.description, created_at: p.created_at, status: pbiStatusToApi(p.status) })),
pbis: pbis.map((p) => ({ id: p.id, code: p.code, title: p.title, priority: p.priority, sort_order: p.sort_order, description: p.description, created_at: p.created_at, status: pbiStatusToApi(p.status) })),
storiesByPbi,
tasksByStory,
}}

View file

@ -97,6 +97,7 @@ export default async function SprintBoardPage({ params, searchParams }: Props) {
sprint_id: s.sprint_id,
created_at: s.created_at,
priority: s.priority,
sort_order: s.sort_order,
status: s.status,
taskCount: s.tasks.length,
doneCount: s.tasks.filter(t => t.status === 'DONE').length,
@ -148,6 +149,7 @@ export default async function SprintBoardPage({ params, searchParams }: Props) {
sprint_id: s.sprint_id,
created_at: s.created_at,
priority: s.priority,
sort_order: s.sort_order,
status: s.status,
taskCount: 0,
doneCount: 0,

View file

@ -50,6 +50,7 @@ export default async function MobileProductBacklogPage({ params, searchParams }:
description: true,
acceptance_criteria: true,
priority: true,
sort_order: true,
status: true,
pbi_id: true,
sprint_id: true,
@ -91,7 +92,7 @@ export default async function MobileProductBacklogPage({ params, searchParams }:
<BacklogHydrationWrapper
productId={id}
initialData={{
pbis: pbis.map((p) => ({ id: p.id, code: p.code, title: p.title, priority: p.priority, description: p.description, created_at: p.created_at, status: pbiStatusToApi(p.status) })),
pbis: pbis.map((p) => ({ id: p.id, code: p.code, title: p.title, priority: p.priority, sort_order: p.sort_order, description: p.description, created_at: p.created_at, status: pbiStatusToApi(p.status) })),
storiesByPbi,
tasksByStory,
}}

View file

@ -1,17 +1,15 @@
'use client'
import { useEffect, useRef } from 'react'
import { useBacklogStore, type BacklogPbi, type BacklogStory, type BacklogTask } from '@/stores/backlog-store'
import { useBacklogRealtime } from '@/lib/realtime/use-backlog-realtime'
import { useWorkspaceResync } from '@/lib/realtime/use-workspace-resync'
import { useProductWorkspaceStore } from '@/stores/product-workspace/store'
import type {
BacklogPbi as WorkspacePbi,
BacklogStory as WorkspaceStory,
BacklogTask as WorkspaceTask,
BacklogPbi,
BacklogStory,
BacklogTask,
ProductBacklogSnapshot,
} from '@/stores/product-workspace/types'
import { logWorkspaceFingerprint } from '@/lib/realtime/dev-workspace-fingerprint'
interface InitialData {
pbis: BacklogPbi[]
@ -37,11 +35,7 @@ function fingerprint(data: InitialData): string {
return `${pbiPart}|${storyPart}|${taskPart}`
}
// PBI-74 / T-844: dual-dispatch — naast de oude useBacklogStore vullen we nu
// ook de nieuwe product-workspace-store. De oude store blijft tijdelijk
// leidend voor componenten; in Story 3 verschuiven consumers één voor één.
// De runtime-payload bevat sort_order op PBI/Story (Prisma schema), ook al
// staat het niet op het oude InitialData type — daarom de cast hieronder.
// PBI-74 / Story 8: workspace-store is nu enige bron — dual-dispatch weg.
function toWorkspaceSnapshot(
data: InitialData,
productId: string,
@ -49,9 +43,9 @@ function toWorkspaceSnapshot(
): ProductBacklogSnapshot {
return {
product: { id: productId, name: productName ?? '' },
pbis: data.pbis as unknown as WorkspacePbi[],
storiesByPbi: data.storiesByPbi as unknown as Record<string, WorkspaceStory[]>,
tasksByStory: data.tasksByStory as unknown as Record<string, WorkspaceTask[]>,
pbis: data.pbis,
storiesByPbi: data.storiesByPbi,
tasksByStory: data.tasksByStory,
}
}
@ -61,21 +55,17 @@ export function BacklogHydrationWrapper({
productName,
children,
}: BacklogHydrationWrapperProps) {
const setInitialData = useBacklogStore((s) => s.setInitialData)
const lastFingerprint = useRef<string>('')
useEffect(() => {
const fp = fingerprint(initialData)
if (fp !== lastFingerprint.current) {
lastFingerprint.current = fp
setInitialData(initialData)
// Dual-dispatch: nieuwe workspace-store schaduwt mee.
useProductWorkspaceStore
.getState()
.hydrateSnapshot(toWorkspaceSnapshot(initialData, productId, productName))
logWorkspaceFingerprint('hydrate')
}
}, [initialData, productId, productName, setInitialData])
}, [initialData, productId, productName])
useBacklogRealtime(productId)
useWorkspaceResync()

View file

@ -56,6 +56,7 @@ export interface Story {
description: string | null
acceptance_criteria: string | null
priority: number
sort_order: number
status: string
pbi_id: string
sprint_id: string | null

View file

@ -45,6 +45,7 @@ export interface SprintStory {
sprint_id: string | null
created_at: Date
priority: number
sort_order: number
status: string
taskCount: number
doneCount: number

View file

@ -1,71 +0,0 @@
// PBI-74 / T-846: dev-only schaduw-store fingerprint verifyer.
// Logt counts van oude (useBacklogStore) en nieuwe (useProductWorkspaceStore)
// na elke hydratie- of realtime-event. Bij mismatch verschijnt er een
// console.warn zodat we tijdens Story 2 in dev-tools zien dat beide stores
// dezelfde inhoud houden.
//
// TODO(PBI-74 / Story 8 / T-878): verwijder dit bestand en alle aanroepen
// vóór merge van de cleanup-PR.
import { useBacklogStore } from '@/stores/backlog-store'
import { useProductWorkspaceStore } from '@/stores/product-workspace/store'
interface Fingerprint {
pbis: number
storiesByPbi: Record<string, number>
tasksByStory: Record<string, number>
}
function fingerprintOld(): Fingerprint {
const s = useBacklogStore.getState()
const storiesByPbi: Record<string, number> = {}
for (const [pbiId, list] of Object.entries(s.storiesByPbi)) {
storiesByPbi[pbiId] = list.length
}
const tasksByStory: Record<string, number> = {}
for (const [storyId, list] of Object.entries(s.tasksByStory)) {
tasksByStory[storyId] = list.length
}
return { pbis: s.pbis.length, storiesByPbi, tasksByStory }
}
function fingerprintNew(): Fingerprint {
const s = useProductWorkspaceStore.getState()
const storiesByPbi: Record<string, number> = {}
for (const [pbiId, ids] of Object.entries(s.relations.storyIdsByPbi)) {
storiesByPbi[pbiId] = ids.length
}
const tasksByStory: Record<string, number> = {}
for (const [storyId, ids] of Object.entries(s.relations.taskIdsByStory)) {
tasksByStory[storyId] = ids.length
}
return { pbis: s.relations.pbiIds.length, storiesByPbi, tasksByStory }
}
function shapeEqual(a: Record<string, number>, b: Record<string, number>): boolean {
const keys = new Set([...Object.keys(a), ...Object.keys(b)])
for (const k of keys) {
if ((a[k] ?? 0) !== (b[k] ?? 0)) return false
}
return true
}
export function logWorkspaceFingerprint(label: string): void {
if (process.env.NODE_ENV === 'production') return
const oldFp = fingerprintOld()
const newFp = fingerprintNew()
const match =
oldFp.pbis === newFp.pbis &&
shapeEqual(oldFp.storiesByPbi, newFp.storiesByPbi) &&
shapeEqual(oldFp.tasksByStory, newFp.tasksByStory)
if (!match) {
console.warn(
`[workspace-fingerprint:${label}] MISMATCH oud↔nieuw`,
{ old: oldFp, new: newFp },
)
} else if (process.env.NEXT_PUBLIC_DEBUG_WORKSPACE_FINGERPRINT === '1') {
console.debug(`[workspace-fingerprint:${label}] match`, oldFp)
}
}

View file

@ -1,24 +1,19 @@
'use client'
// ST-1115: Client hook for the backlog 3-pane SSE stream.
// ST-1115 / PBI-74: Client hook for the backlog 3-pane SSE stream.
// Mounts in BacklogHydrationWrapper so it survives Server Action refreshes.
// Dispatches pbi/story/task change events into useBacklogStore.applyChange.
// Dispatches pbi/story/task change events into useProductWorkspaceStore.
//
// PBI-74 / T-845: dual-dispatch — events worden ook naar de nieuwe
// product-workspace-store gestuurd. De oude store blijft leidend totdat
// Story 3 de UI-consumers heeft omgezet en Story 8 de oude store opruimt.
// PBI-74 / T-861: stream blijft open op tab hidden. Per spec werkt
// EventSource gewoon door als de browser het toelaat — gemiste events
// worden opgehaald via resyncActiveScopes('visible') uit useWorkspaceResync.
// PBI-74 / T-862: bij latere 'ready' events (post-reconnect) triggeren we
// T-861: stream blijft open op tab hidden. Per spec werkt EventSource gewoon
// door als de browser het toelaat — gemiste events worden opgehaald via
// resyncActiveScopes('visible') uit useWorkspaceResync.
// T-862: bij latere 'ready' events (post-reconnect) triggeren we
// resyncActiveScopes('reconnect') zodat events die tijdens disconnect zijn
// gemist, alsnog binnenkomen.
import { useEffect, useRef } from 'react'
import { useBacklogStore } from '@/stores/backlog-store'
import { useProductWorkspaceStore } from '@/stores/product-workspace/store'
import type { ProductRealtimeEvent } from '@/stores/product-workspace/types'
import { logWorkspaceFingerprint } from '@/lib/realtime/dev-workspace-fingerprint'
const BACKOFF_START_MS = 1_000
const BACKOFF_MAX_MS = 30_000
@ -71,15 +66,9 @@ export function useBacklogRealtime(productId: string | null) {
if (!e.data) return
try {
const payload = JSON.parse(e.data) as EntityPayload
// Oude store (leidend voor UI tot Story 3).
useBacklogStore
.getState()
.applyChange(payload.entity, payload.op, payload as Record<string, unknown>)
// Nieuwe workspace-store (schaduw — wordt leidend in Story 3).
useProductWorkspaceStore
.getState()
.applyRealtimeEvent(payload as unknown as ProductRealtimeEvent)
logWorkspaceFingerprint(`event:${payload.entity}:${payload.op}`)
} catch (err) {
if (process.env.NODE_ENV !== 'production') {
console.error('[realtime/backlog] failed to parse event', err, e.data)

View file

@ -1,143 +0,0 @@
import { create } from 'zustand'
import type { PbiStatusApi } from '@/lib/task-status'
export interface BacklogPbi {
id: string
code: string | null
title: string
priority: number
description?: string | null
created_at: Date
status: PbiStatusApi
}
export interface BacklogStory {
id: string
code: string | null
title: string
description: string | null
acceptance_criteria: string | null
priority: number
status: string
pbi_id: string
sprint_id: string | null
created_at: Date
}
export interface BacklogTask {
id: string
title: string
description: string | null
priority: number
status: string
sort_order: number
story_id: string
created_at: Date
}
type Entity = 'pbi' | 'story' | 'task'
type Op = 'I' | 'U' | 'D'
interface InitialData {
pbis: BacklogPbi[]
storiesByPbi: Record<string, BacklogStory[]>
tasksByStory: Record<string, BacklogTask[]>
}
interface BacklogStore extends InitialData {
setInitialData: (data: InitialData) => void
applyChange: (entity: Entity, op: Op, data: Record<string, unknown>) => void
}
export const useBacklogStore = create<BacklogStore>((set) => ({
pbis: [],
storiesByPbi: {},
tasksByStory: {},
setInitialData: (data) => set(data),
applyChange: (entity, op, data) =>
set((state) => {
if (entity === 'pbi') {
const id = data.id as string
if (op === 'D') {
return { pbis: state.pbis.filter((p) => p.id !== id) }
}
if (op === 'U') {
return {
pbis: state.pbis.map((p) =>
p.id === id ? { ...p, ...(data as Partial<BacklogPbi>) } : p
),
}
}
// I — idempotent: skip if already present (optimistic update may have arrived first)
if (state.pbis.some((p) => p.id === id)) return {}
return { pbis: [...state.pbis, data as unknown as BacklogPbi] }
}
if (entity === 'story') {
const id = data.id as string
if (op === 'D') {
const storiesByPbi = { ...state.storiesByPbi }
for (const pbiId of Object.keys(storiesByPbi)) {
storiesByPbi[pbiId] = storiesByPbi[pbiId].filter((s) => s.id !== id)
}
return { storiesByPbi }
}
if (op === 'U') {
const storiesByPbi = { ...state.storiesByPbi }
for (const pbiId of Object.keys(storiesByPbi)) {
const idx = storiesByPbi[pbiId].findIndex((s) => s.id === id)
if (idx !== -1) {
storiesByPbi[pbiId] = storiesByPbi[pbiId].map((s) =>
s.id === id ? { ...s, ...(data as Partial<BacklogStory>) } : s
)
break
}
}
return { storiesByPbi }
}
// I — idempotent: skip if already present
const pbiId = data.pbi_id as string
if ((state.storiesByPbi[pbiId] ?? []).some((s) => s.id === id)) return {}
return {
storiesByPbi: {
...state.storiesByPbi,
[pbiId]: [...(state.storiesByPbi[pbiId] ?? []), data as unknown as BacklogStory],
},
}
}
// task
const id = data.id as string
if (op === 'D') {
const tasksByStory = { ...state.tasksByStory }
for (const storyId of Object.keys(tasksByStory)) {
tasksByStory[storyId] = tasksByStory[storyId].filter((t) => t.id !== id)
}
return { tasksByStory }
}
if (op === 'U') {
const tasksByStory = { ...state.tasksByStory }
for (const storyId of Object.keys(tasksByStory)) {
const idx = tasksByStory[storyId].findIndex((t) => t.id === id)
if (idx !== -1) {
tasksByStory[storyId] = tasksByStory[storyId].map((t) =>
t.id === id ? { ...t, ...(data as Partial<BacklogTask>) } : t
)
break
}
}
return { tasksByStory }
}
// I — idempotent: skip if already present
const storyId = data.story_id as string
if ((state.tasksByStory[storyId] ?? []).some((t) => t.id === id)) return {}
return {
tasksByStory: {
...state.tasksByStory,
[storyId]: [...(state.tasksByStory[storyId] ?? []), data as unknown as BacklogTask],
},
}
}),
}))

View file

@ -1,46 +0,0 @@
import { create } from 'zustand'
interface PlannerStore {
// Order maps: productId → pbiId[]
pbiOrder: Record<string, string[]>
// Order maps: pbiId → storyId[]
storyOrder: Record<string, string[]>
// Priority maps: pbiId → priority
pbiPriority: Record<string, number>
initPbis: (productId: string, ids: string[]) => void
reorderPbis: (productId: string, ids: string[]) => void
rollbackPbis: (productId: string, ids: string[]) => void
updatePbiPriority: (pbiId: string, priority: number) => void
initStories: (pbiId: string, ids: string[]) => void
reorderStories: (pbiId: string, ids: string[]) => void
rollbackStories: (pbiId: string, ids: string[]) => void
}
export const usePlannerStore = create<PlannerStore>((set) => ({
pbiOrder: {},
storyOrder: {},
pbiPriority: {},
initPbis: (productId, ids) =>
set((state) => ({ pbiOrder: { ...state.pbiOrder, [productId]: ids } })),
reorderPbis: (productId, ids) =>
set((state) => ({ pbiOrder: { ...state.pbiOrder, [productId]: ids } })),
rollbackPbis: (productId, ids) =>
set((state) => ({ pbiOrder: { ...state.pbiOrder, [productId]: ids } })),
updatePbiPriority: (pbiId, priority) =>
set((state) => ({ pbiPriority: { ...state.pbiPriority, [pbiId]: priority } })),
initStories: (pbiId, ids) =>
set((state) => ({ storyOrder: { ...state.storyOrder, [pbiId]: ids } })),
reorderStories: (pbiId, ids) =>
set((state) => ({ storyOrder: { ...state.storyOrder, [pbiId]: ids } })),
rollbackStories: (pbiId, ids) =>
set((state) => ({ storyOrder: { ...state.storyOrder, [pbiId]: ids } })),
}))

View file

@ -1,13 +0,0 @@
import { create } from 'zustand'
interface ProductStore {
currentProduct: { id: string; name: string } | null
setCurrentProduct: (id: string, name: string) => void
clearCurrentProduct: () => void
}
export const useProductStore = create<ProductStore>((set) => ({
currentProduct: null,
setCurrentProduct: (id, name) => set({ currentProduct: { id, name } }),
clearCurrentProduct: () => set({ currentProduct: null }),
}))

View file

@ -1,17 +0,0 @@
import { create } from 'zustand'
interface SelectionStore {
selectedPbiId: string | null
selectedStoryId: string | null
selectPbi: (id: string | null) => void
selectStory: (id: string | null) => void
clearSelection: () => void
}
export const useSelectionStore = create<SelectionStore>((set) => ({
selectedPbiId: null,
selectedStoryId: null,
selectPbi: (id) => set({ selectedPbiId: id, selectedStoryId: null }),
selectStory: (id) => set({ selectedStoryId: id }),
clearSelection: () => set({ selectedPbiId: null, selectedStoryId: null }),
}))