PBI-9 + PBI-47: worktree foundation, product-worktrees, P0 fixes, PAUSED flow
Adds two interlocking PBIs:
PBI-9 — Worktree foundation + persistent product-worktrees for idea-jobs
- src/git/worktree-paths.ts: centralised root + skip-set + lock-path helpers
- src/git/file-lock.ts: proper-lockfile wrapper, deadlock-safe ordered acquire
- src/git/product-worktree.ts: detached-HEAD worktree per product, .scratch/
excluded via git rev-parse --git-path (handles linked .git file)
- src/git/job-locks.ts: setupProductWorktrees + releaseLocksOnTerminal
- wait-for-job.ts: idea-branch wires product-worktrees for IDEA_GRILL/MAKE_PLAN
- update-job-status.ts + pbi-cascade.ts + stale-reset: release on all four
server-side terminal transitions (DONE/FAILED/CANCELLED/stale)
- cleanup-my-worktrees: skip _products/ + *.lock
- README: worktrees section with single-host invariant + advisory-lock path
PBI-47 — Sprint-flow P0 corrections + PAUSED flow with rich pause_context
- prisma schema: ClaudeJob.{base_sha,head_sha} + SprintRun.pause_context
- tryClaimJob captures base_sha; prepareDoneUpdate captures head_sha
- verify-task-against-plan diffs vs base_sha (no more origin/main fallback);
rejects with MISSING_BASE_SHA when null — fixes per-task verify-scope P0
- pr.ts: createPullRequest enableAutoMerge default false; new
enableAutoMergeOnPr with --match-head-commit guard + 5-category typed
EnableAutoMergeResult — fixes STORY auto-merge timing P0
- src/flow/{effects,worktree-lease,pr-flow,sprint-run}.ts: pure transition
modules + idempotent declarative effects executor
- update-job-status: STORY auto-merge fires only on the last task of the
story (story.status === DONE), with head_sha as merge guard; MERGE_CONFLICT
routes to sprint-run flow which produces CREATE_CLAUDE_QUESTION +
SET_SPRINT_RUN_STATUS effects with rich pause_context
Tests: 31 test files, 242 passing. Pure-transition tests cover STORY 3-tasks
auto-merge timing, SPRINT draft→ready, MERGE_CONFLICT pause/resume, file-lock
deadlock prevention, worktree-lease lifecycle, delete-only verify (ALIGNED),
per-job verify scope (base_sha isolation), 5-category auto-merge errors.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
454d96ee04
commit
f7f5a487ec
29 changed files with 1731 additions and 46 deletions
38
src/git/file-lock.ts
Normal file
38
src/git/file-lock.ts
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
import lockfile from 'proper-lockfile'
|
||||
|
||||
export async function acquireFileLock(lockPath: string): Promise<() => Promise<void>> {
|
||||
const release = await lockfile.lock(lockPath, {
|
||||
realpath: false,
|
||||
stale: 30_000,
|
||||
update: 5_000,
|
||||
retries: { retries: 60, factor: 1, minTimeout: 1_000, maxTimeout: 1_000 },
|
||||
})
|
||||
let released = false
|
||||
return async () => {
|
||||
if (released) return
|
||||
released = true
|
||||
await release()
|
||||
}
|
||||
}
|
||||
|
||||
export async function acquireFileLocksOrdered(
|
||||
lockPaths: string[],
|
||||
): Promise<() => Promise<void>> {
|
||||
const sorted = [...lockPaths].sort()
|
||||
const releases: Array<() => Promise<void>> = []
|
||||
try {
|
||||
for (const p of sorted) {
|
||||
releases.push(await acquireFileLock(p))
|
||||
}
|
||||
} catch (err) {
|
||||
for (const r of releases.reverse()) {
|
||||
await r().catch(() => {})
|
||||
}
|
||||
throw err
|
||||
}
|
||||
return async () => {
|
||||
for (const r of releases.reverse()) {
|
||||
await r().catch(() => {})
|
||||
}
|
||||
}
|
||||
}
|
||||
69
src/git/job-locks.ts
Normal file
69
src/git/job-locks.ts
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
import * as fs from 'node:fs/promises'
|
||||
import * as path from 'node:path'
|
||||
import { acquireFileLocksOrdered } from './file-lock.js'
|
||||
import {
|
||||
getProductWorktreeLockPath,
|
||||
getWorktreeRoot,
|
||||
} from './worktree-paths.js'
|
||||
import {
|
||||
getOrCreateProductWorktree,
|
||||
syncProductWorktree,
|
||||
} from './product-worktree.js'
|
||||
|
||||
type JobReleases = Map<string, Array<() => Promise<void>>>
|
||||
const jobReleases: JobReleases = new Map()
|
||||
|
||||
export async function setupProductWorktrees(
|
||||
jobId: string,
|
||||
productIds: string[],
|
||||
resolveRepoRoot: (productId: string) => Promise<string | null>,
|
||||
): Promise<Array<{ productId: string; worktreePath: string }>> {
|
||||
if (productIds.length === 0) return []
|
||||
|
||||
// Ensure parent dir exists so lockfile creation succeeds
|
||||
await fs.mkdir(path.join(getWorktreeRoot(), '_products'), { recursive: true })
|
||||
|
||||
// Lock-first, alphabetically sorted (deadlock prevention for multi-product idea-jobs)
|
||||
const sorted = [...productIds].sort()
|
||||
const lockPaths = sorted.map(getProductWorktreeLockPath)
|
||||
const releaseAll = await acquireFileLocksOrdered(lockPaths)
|
||||
registerJobLockReleases(jobId, [releaseAll])
|
||||
|
||||
// After lock-acquire, create/reuse worktrees and sync
|
||||
const out: Array<{ productId: string; worktreePath: string }> = []
|
||||
for (const productId of sorted) {
|
||||
const repoRoot = await resolveRepoRoot(productId)
|
||||
if (!repoRoot) continue
|
||||
const { worktreePath } = await getOrCreateProductWorktree({ repoRoot, productId })
|
||||
await syncProductWorktree({ worktreePath })
|
||||
out.push({ productId, worktreePath })
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
export function registerJobLockReleases(
|
||||
jobId: string,
|
||||
releases: Array<() => Promise<void>>,
|
||||
): void {
|
||||
const existing = jobReleases.get(jobId) ?? []
|
||||
jobReleases.set(jobId, [...existing, ...releases])
|
||||
}
|
||||
|
||||
export async function releaseLocksOnTerminal(jobId: string): Promise<void> {
|
||||
const releases = jobReleases.get(jobId)
|
||||
if (!releases) return // idempotent — already released or never locked
|
||||
jobReleases.delete(jobId)
|
||||
for (const release of releases) {
|
||||
try {
|
||||
await release()
|
||||
} catch (err) {
|
||||
console.warn(`[job-locks] release failed for job ${jobId}:`, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For tests
|
||||
export function _resetJobReleasesForTest(): void {
|
||||
jobReleases.clear()
|
||||
}
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
import { execFile } from 'node:child_process'
|
||||
import { promisify } from 'node:util'
|
||||
import * as path from 'node:path'
|
||||
import * as os from 'node:os'
|
||||
import { getWorktreeRoot } from './worktree-paths.js'
|
||||
|
||||
const exec = promisify(execFile)
|
||||
|
||||
|
|
@ -12,10 +12,17 @@ export async function createPullRequest(opts: {
|
|||
body: string
|
||||
/** Open as draft PR (mens moet 'm later ready-for-review zetten). Default false. */
|
||||
draft?: boolean
|
||||
/** Schakel auto-merge (squash) in. Default true. Voor sprint-mode: false. */
|
||||
/**
|
||||
* PBI-47 (P0): default changed to false. Auto-merge is now enabled
|
||||
* separately via `enableAutoMergeOnPr` only on the **last task** of a
|
||||
* STORY-mode story, with a head-SHA guard to prevent racing earlier
|
||||
* task merges. Callers may still pass `true` for one-off PRs that
|
||||
* are immediately ready to merge; in that case we use the new typed
|
||||
* helper rather than the previous fire-and-forget gh call.
|
||||
*/
|
||||
enableAutoMerge?: boolean
|
||||
}): Promise<{ url: string } | { error: string }> {
|
||||
const { worktreePath, branchName, title, body, draft = false, enableAutoMerge = true } = opts
|
||||
const { worktreePath, branchName, title, body, draft = false, enableAutoMerge = false } = opts
|
||||
|
||||
let url: string
|
||||
try {
|
||||
|
|
@ -40,21 +47,14 @@ export async function createPullRequest(opts: {
|
|||
return { error: `gh pr create failed: ${msg.slice(0, 300)}` }
|
||||
}
|
||||
|
||||
// Best-effort: enable auto-merge (squash) on the freshly created PR. If the
|
||||
// repo doesn't have "Allow auto-merge" turned on, or the token lacks scope,
|
||||
// gh exits non-zero and we just log. The PR is still valid; auto-merge can
|
||||
// be turned on manually. We do NOT fail the whole createPullRequest call —
|
||||
// the URL was successfully obtained which is the contract this returns.
|
||||
// Bij draft + sprint-flow slaan we dit over: de PR moet eerst handmatig of
|
||||
// via markPullRequestReady ready-for-review worden gezet.
|
||||
// Legacy opt-in: enableAutoMerge=true and not draft → fire the new typed
|
||||
// helper without head-SHA guard (caller didn't supply one). Result is
|
||||
// logged but not propagated — same shape as before.
|
||||
if (enableAutoMerge && !draft) {
|
||||
try {
|
||||
await exec('gh', ['pr', 'merge', '--auto', '--squash', url], { cwd: worktreePath })
|
||||
} catch (err) {
|
||||
const stderr =
|
||||
(err as { stderr?: string }).stderr ?? (err as Error).message ?? ''
|
||||
const result = await enableAutoMergeOnPr({ prUrl: url, cwd: worktreePath })
|
||||
if (!result.ok) {
|
||||
console.warn(
|
||||
`[createPullRequest] auto-merge enable failed for ${url}: ${stderr.slice(0, 200)}`,
|
||||
`[createPullRequest] auto-merge enable failed for ${url}: ${result.reason} ${result.stderr.slice(0, 200)}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
@ -62,6 +62,51 @@ export async function createPullRequest(opts: {
|
|||
return { url }
|
||||
}
|
||||
|
||||
export type AutoMergeFailReason =
|
||||
| 'CHECKS_FAILED'
|
||||
| 'MERGE_CONFLICT'
|
||||
| 'GH_AUTH_ERROR'
|
||||
| 'AUTO_MERGE_NOT_ALLOWED'
|
||||
| 'UNKNOWN'
|
||||
|
||||
export type EnableAutoMergeResult =
|
||||
| { ok: true }
|
||||
| { ok: false; reason: AutoMergeFailReason; stderr: string }
|
||||
|
||||
function classifyAutoMergeError(stderr: string): AutoMergeFailReason {
|
||||
if (/conflict|not in mergeable state|dirty/i.test(stderr)) return 'MERGE_CONFLICT'
|
||||
if (/checks? failed|status check|required check/i.test(stderr)) return 'CHECKS_FAILED'
|
||||
if (/authentication|HTTP 401|HTTP 403|permission|gh auth/i.test(stderr)) return 'GH_AUTH_ERROR'
|
||||
if (/auto-?merge.*not.*allowed|auto-?merge.*disabled/i.test(stderr)) return 'AUTO_MERGE_NOT_ALLOWED'
|
||||
return 'UNKNOWN'
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable auto-merge (squash) on a PR with an optional head-SHA guard.
|
||||
*
|
||||
* PBI-47 (P0): when `expectedHeadSha` is provided we pass `--match-head-commit`
|
||||
* so GitHub only activates auto-merge if the remote head still matches the
|
||||
* SHA the caller observed. This prevents racing late pushes from another
|
||||
* worker triggering a merge of a different commit set.
|
||||
*/
|
||||
export async function enableAutoMergeOnPr(opts: {
|
||||
prUrl: string
|
||||
expectedHeadSha?: string
|
||||
cwd?: string
|
||||
}): Promise<EnableAutoMergeResult> {
|
||||
try {
|
||||
const args = ['pr', 'merge', '--auto', '--squash']
|
||||
if (opts.expectedHeadSha) args.push('--match-head-commit', opts.expectedHeadSha)
|
||||
args.push(opts.prUrl)
|
||||
await exec('gh', args, opts.cwd ? { cwd: opts.cwd } : {})
|
||||
return { ok: true }
|
||||
} catch (err) {
|
||||
const stderr =
|
||||
(err as { stderr?: string }).stderr ?? (err as Error).message ?? ''
|
||||
return { ok: false, reason: classifyAutoMergeError(stderr), stderr: stderr.slice(0, 500) }
|
||||
}
|
||||
}
|
||||
|
||||
// Zet een draft-PR over naar "ready for review". Gebruikt bij sprint-mode
|
||||
// wanneer alle stories in de SprintRun DONE zijn — mens reviewt en mergt zelf.
|
||||
export async function markPullRequestReady(opts: {
|
||||
|
|
@ -163,8 +208,7 @@ export async function createRevertPullRequest(opts: {
|
|||
pbiCode,
|
||||
} = opts
|
||||
|
||||
const worktreeDir =
|
||||
process.env.SCRUM4ME_AGENT_WORKTREE_DIR ?? path.join(os.homedir(), '.scrum4me-agent-worktrees')
|
||||
const worktreeDir = getWorktreeRoot()
|
||||
const wtPath = path.join(worktreeDir, `revert-${jobId}`)
|
||||
const revertBranch = `revert/${originalBranch}-${jobId.slice(-8)}`
|
||||
|
||||
|
|
|
|||
66
src/git/product-worktree.ts
Normal file
66
src/git/product-worktree.ts
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
import { execFile } from 'node:child_process'
|
||||
import { promisify } from 'node:util'
|
||||
import * as fs from 'node:fs/promises'
|
||||
import * as path from 'node:path'
|
||||
import { getProductWorktreePath } from './worktree-paths.js'
|
||||
|
||||
const exec = promisify(execFile)
|
||||
|
||||
export async function getOrCreateProductWorktree(opts: {
|
||||
repoRoot: string
|
||||
productId: string
|
||||
}): Promise<{ worktreePath: string; created: boolean }> {
|
||||
const worktreePath = getProductWorktreePath(opts.productId)
|
||||
await fs.mkdir(path.dirname(worktreePath), { recursive: true })
|
||||
|
||||
try {
|
||||
await fs.access(worktreePath)
|
||||
return { worktreePath, created: false }
|
||||
} catch {
|
||||
// Path bestaat niet — aanmaken
|
||||
}
|
||||
|
||||
await exec('git', ['fetch', 'origin', '--prune'], { cwd: opts.repoRoot })
|
||||
await exec('git', ['worktree', 'add', '--detach', worktreePath, 'origin/main'], {
|
||||
cwd: opts.repoRoot,
|
||||
})
|
||||
|
||||
// Resolve REAL exclude-pad (linked worktree heeft .git als file, niet directory)
|
||||
const { stdout } = await exec('git', ['rev-parse', '--git-path', 'info/exclude'], {
|
||||
cwd: worktreePath,
|
||||
})
|
||||
const excludePath = path.resolve(worktreePath, stdout.trim())
|
||||
const existing = await fs.readFile(excludePath, 'utf8').catch(() => '')
|
||||
if (!existing.split('\n').includes('.scratch/')) {
|
||||
const sep = existing === '' || existing.endsWith('\n') ? '' : '\n'
|
||||
await fs.appendFile(excludePath, `${sep}.scratch/\n`)
|
||||
}
|
||||
|
||||
return { worktreePath, created: true }
|
||||
}
|
||||
|
||||
export async function syncProductWorktree(opts: { worktreePath: string }): Promise<void> {
|
||||
const { worktreePath } = opts
|
||||
await exec('git', ['fetch', 'origin', '--prune'], { cwd: worktreePath })
|
||||
await exec('git', ['reset', '--hard', 'origin/main'], { cwd: worktreePath })
|
||||
await exec('git', ['clean', '-fd', '-e', '.scratch/'], { cwd: worktreePath })
|
||||
// Wis .scratch/ inhoud, behoud de map
|
||||
const scratch = path.join(worktreePath, '.scratch')
|
||||
await fs.rm(scratch, { recursive: true, force: true })
|
||||
await fs.mkdir(scratch, { recursive: true })
|
||||
}
|
||||
|
||||
export async function removeProductWorktree(opts: {
|
||||
repoRoot: string
|
||||
productId: string
|
||||
}): Promise<{ removed: boolean }> {
|
||||
const worktreePath = getProductWorktreePath(opts.productId)
|
||||
try {
|
||||
await exec('git', ['worktree', 'remove', '--force', worktreePath], {
|
||||
cwd: opts.repoRoot,
|
||||
})
|
||||
return { removed: true }
|
||||
} catch {
|
||||
return { removed: false }
|
||||
}
|
||||
}
|
||||
19
src/git/worktree-paths.ts
Normal file
19
src/git/worktree-paths.ts
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
import * as os from 'node:os'
|
||||
import * as path from 'node:path'
|
||||
|
||||
export const SYSTEM_WORKTREE_DIRS = new Set(['_products'])
|
||||
|
||||
export function getWorktreeRoot(): string {
|
||||
return (
|
||||
process.env.SCRUM4ME_AGENT_WORKTREE_DIR
|
||||
?? path.join(os.homedir(), '.scrum4me-agent-worktrees')
|
||||
)
|
||||
}
|
||||
|
||||
export function getProductWorktreePath(productId: string): string {
|
||||
return path.join(getWorktreeRoot(), '_products', productId)
|
||||
}
|
||||
|
||||
export function getProductWorktreeLockPath(productId: string): string {
|
||||
return path.join(getWorktreeRoot(), '_products', `${productId}.lock`)
|
||||
}
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
import { execFile } from 'node:child_process'
|
||||
import { promisify } from 'node:util'
|
||||
import * as path from 'node:path'
|
||||
import * as os from 'node:os'
|
||||
import * as fs from 'node:fs/promises'
|
||||
import { getWorktreeRoot } from './worktree-paths.js'
|
||||
|
||||
const exec = promisify(execFile)
|
||||
|
||||
|
|
@ -50,9 +50,7 @@ export async function createWorktreeForJob(opts: {
|
|||
const { repoRoot, jobId, baseRef = 'origin/main', reuseBranch = false } = opts
|
||||
let { branchName } = opts
|
||||
|
||||
const parent =
|
||||
process.env.SCRUM4ME_AGENT_WORKTREE_DIR ??
|
||||
path.join(os.homedir(), '.scrum4me-agent-worktrees')
|
||||
const parent = getWorktreeRoot()
|
||||
|
||||
await fs.mkdir(parent, { recursive: true })
|
||||
|
||||
|
|
@ -121,9 +119,7 @@ export async function removeWorktreeForJob(opts: {
|
|||
}): Promise<{ removed: boolean }> {
|
||||
const { repoRoot, jobId, keepBranch = false } = opts
|
||||
|
||||
const parent =
|
||||
process.env.SCRUM4ME_AGENT_WORKTREE_DIR ??
|
||||
path.join(os.homedir(), '.scrum4me-agent-worktrees')
|
||||
const parent = getWorktreeRoot()
|
||||
|
||||
const worktreePath = path.join(parent, jobId)
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue