diff --git a/packages/control-api/src/webhook-gateway.test.ts b/packages/control-api/src/webhook-gateway.test.ts index b0da043..5dd2505 100644 --- a/packages/control-api/src/webhook-gateway.test.ts +++ b/packages/control-api/src/webhook-gateway.test.ts @@ -122,6 +122,122 @@ describe('webhook gateway HTTP endpoint', () => { } }); + it('deduplicates GitHub webhook retries by delivery id', async () => { + registerWebhookGatewaySource(workspacePath, { + key: 'github-main', + provider: 'github', + secret: 'github-secret', + actor: 'github-bot', + }); + + const handle = await startWorkgraphServer({ + workspacePath, + host: '127.0.0.1', + port: 0, + }); + try { + const payload = JSON.stringify({ + action: 'synchronize', + pull_request: { + number: 42, + }, + }); + const signature = signGithub(payload, 'github-secret'); + const first = await fetch(`${handle.baseUrl}/webhook-gateway/github-main`, { + method: 'POST', + headers: { + 'content-type': 'application/json', + 'x-github-event': 'pull_request', + 'x-github-delivery': 'delivery-dup-1', + 'x-hub-signature-256': signature, + }, + body: payload, + }); + const firstBody = await first.json() as Record; + expect(first.status).toBe(202); + expect(firstBody.accepted).toBe(true); + + const second = await fetch(`${handle.baseUrl}/webhook-gateway/github-main`, { + method: 'POST', + headers: { + 'content-type': 'application/json', + 'x-github-event': 'pull_request', + 'x-github-delivery': 'delivery-dup-1', + 'x-hub-signature-256': signature, + }, + body: payload, + }); + const secondBody = await second.json() as Record; + expect(second.status).toBe(200); + expect(secondBody.accepted).toBe(false); + expect(secondBody.reason).toBe('duplicate'); + expect(secondBody.duplicateBy).toBe('deliveryId'); + + const recent = ledger.recent(workspacePath, 20); + const gatewayEntries = recent.filter((entry) => entry.target.includes('.workgraph/webhook-gateway/github-main/delivery-dup-1')); + expect(gatewayEntries).toHaveLength(1); + } finally { + await handle.close(); + } + }); + + it('deduplicates GitHub webhook retries by payload digest', async () => { + registerWebhookGatewaySource(workspacePath, { + key: 'github-main', + provider: 'github', + secret: 'github-secret', + actor: 'github-bot', + }); + + const handle = await startWorkgraphServer({ + workspacePath, + host: '127.0.0.1', + port: 0, + }); + try { + const payload = JSON.stringify({ + action: 'opened', + issue: { + number: 77, + }, + }); + const signature = signGithub(payload, 'github-secret'); + const first = await fetch(`${handle.baseUrl}/webhook-gateway/github-main`, { + method: 'POST', + headers: { + 'content-type': 'application/json', + 'x-github-event': 'issues', + 'x-github-delivery': 'delivery-digest-1', + 'x-hub-signature-256': signature, + }, + body: payload, + }); + expect(first.status).toBe(202); + + const second = await fetch(`${handle.baseUrl}/webhook-gateway/github-main`, { + method: 'POST', + headers: { + 'content-type': 'application/json', + 'x-github-event': 'issues', + 'x-github-delivery': 'delivery-digest-2', + 'x-hub-signature-256': signature, + }, + body: payload, + }); + const secondBody = await second.json() as Record; + expect(second.status).toBe(200); + expect(secondBody.accepted).toBe(false); + expect(secondBody.reason).toBe('duplicate'); + expect(secondBody.duplicateBy).toBe('payloadDigest'); + + const recent = ledger.recent(workspacePath, 20); + const gatewayEntries = recent.filter((entry) => entry.target.includes('.workgraph/webhook-gateway/github-main/')); + expect(gatewayEntries).toHaveLength(1); + } finally { + await handle.close(); + } + }); + it('rejects invalid GitHub signatures', async () => { registerWebhookGatewaySource(workspacePath, { key: 'github-main', diff --git a/packages/control-api/src/webhook-gateway.ts b/packages/control-api/src/webhook-gateway.ts index 82b7990..55dc591 100644 --- a/packages/control-api/src/webhook-gateway.ts +++ b/packages/control-api/src/webhook-gateway.ts @@ -12,6 +12,10 @@ const DEFAULT_LOG_LIMIT = 50; const MAX_LOG_LIMIT = 1_000; const MAX_WEBHOOK_BODY_BYTES = 2 * 1024 * 1024; const SLACK_SIGNATURE_MAX_AGE_SECONDS = 60 * 5; +const WEBHOOK_DEDUP_TTL_MS = 5 * 60_000; +const WEBHOOK_DEDUP_MAX_ENTRIES = 1_000; + +const recentWebhookDedup = new Map(); export type WebhookGatewayProvider = 'github' | 'linear' | 'slack' | 'generic'; type LogStatus = 'accepted' | 'rejected'; @@ -339,6 +343,41 @@ export function registerWebhookGatewayEndpoint(app: any, workspacePath: string): return; } + const duplicateBy = detectRecentWebhookDuplicate( + workspacePath, + source.key, + adapted.deliveryId, + payloadDigest, + ); + if (duplicateBy) { + const duplicateLog: WebhookGatewayLogEntry = { + id: randomUUID(), + ts: new Date().toISOString(), + sourceKey: source.key, + provider: source.provider, + eventType: adapted.eventType, + actor, + status: 'accepted', + statusCode: 200, + signatureVerified: verification.verified, + message: `Duplicate webhook ignored (${duplicateBy}).`, + deliveryId: adapted.deliveryId, + payloadDigest, + }; + appendWebhookGatewayLog(workspacePath, duplicateLog); + writeWebhookGatewayHttpResponse(res, 200, { + ok: true, + accepted: false, + reason: 'duplicate', + duplicateBy, + source: source.key, + provider: source.provider, + eventType: adapted.eventType, + deliveryId: adapted.deliveryId, + }); + return; + } + appendWebhookGatewayLedgerEvent(workspacePath, source, { eventType: adapted.eventType, deliveryId: adapted.deliveryId, @@ -378,6 +417,57 @@ export function registerWebhookGatewayEndpoint(app: any, workspacePath: string): }); } +function detectRecentWebhookDuplicate( + workspacePath: string, + sourceKey: string, + deliveryId: string, + payloadDigest: string, +): 'deliveryId' | 'payloadDigest' | null { + const nowMs = Date.now(); + evictExpiredWebhookDedupEntries(nowMs); + const deliveryKey = `${workspacePath}|${sourceKey}|delivery|${deliveryId}`; + if (isWebhookDedupHit(deliveryKey, nowMs)) { + return 'deliveryId'; + } + const payloadKey = `${workspacePath}|${sourceKey}|digest|${payloadDigest}`; + if (isWebhookDedupHit(payloadKey, nowMs)) { + return 'payloadDigest'; + } + rememberWebhookDedupKey(deliveryKey, nowMs); + rememberWebhookDedupKey(payloadKey, nowMs); + return null; +} + +function isWebhookDedupHit(key: string, nowMs: number): boolean { + const expiresAt = recentWebhookDedup.get(key); + if (!expiresAt) return false; + if (expiresAt <= nowMs) { + recentWebhookDedup.delete(key); + return false; + } + // Re-insert to refresh LRU order while keeping original expiration. + recentWebhookDedup.delete(key); + recentWebhookDedup.set(key, expiresAt); + return true; +} + +function rememberWebhookDedupKey(key: string, nowMs: number): void { + recentWebhookDedup.set(key, nowMs + WEBHOOK_DEDUP_TTL_MS); + while (recentWebhookDedup.size > WEBHOOK_DEDUP_MAX_ENTRIES) { + const oldest = recentWebhookDedup.keys().next().value as string | undefined; + if (!oldest) break; + recentWebhookDedup.delete(oldest); + } +} + +function evictExpiredWebhookDedupEntries(nowMs: number): void { + for (const [key, expiresAt] of recentWebhookDedup.entries()) { + if (expiresAt <= nowMs) { + recentWebhookDedup.delete(key); + } + } +} + function readWebhookGatewayStore(workspacePath: string): WebhookGatewayStoreFile { const filePath = webhookGatewayStorePath(workspacePath); if (!fs.existsSync(filePath)) { diff --git a/packages/kernel/src/index.ts b/packages/kernel/src/index.ts index f695231..cec6c6c 100644 --- a/packages/kernel/src/index.ts +++ b/packages/kernel/src/index.ts @@ -4,6 +4,7 @@ export * as ledger from './ledger.js'; export * as auth from './auth.js'; export * as store from './store.js'; export * as thread from './thread.js'; +export * as threadContext from './thread-context.js'; export * as mission from './mission.js'; export * as missionOrchestrator from './mission-orchestrator.js'; export * as capability from './capability.js'; diff --git a/packages/kernel/src/thread-context.test.ts b/packages/kernel/src/thread-context.test.ts new file mode 100644 index 0000000..94a4a19 --- /dev/null +++ b/packages/kernel/src/thread-context.test.ts @@ -0,0 +1,126 @@ +import fs from 'node:fs'; +import os from 'node:os'; +import path from 'node:path'; +import matter from 'gray-matter'; +import { afterEach, beforeEach, describe, expect, it } from 'vitest'; +import { loadRegistry, saveRegistry } from './registry.js'; +import * as thread from './thread.js'; +import * as threadContext from './thread-context.js'; + +let workspacePath: string; +let threadPath: string; + +beforeEach(() => { + workspacePath = fs.mkdtempSync(path.join(os.tmpdir(), 'wg-thread-context-')); + const registry = loadRegistry(workspacePath); + saveRegistry(workspacePath, registry); + threadPath = thread.createThread( + workspacePath, + 'Thread Context Fixture', + 'Validate thread context operations', + 'agent-seed', + ).path; +}); + +afterEach(() => { + fs.rmSync(workspacePath, { recursive: true, force: true }); +}); + +describe('thread context store', () => { + it('adds entries and lists metadata/content', () => { + const first = threadContext.addThreadContextEntry(workspacePath, threadPath, { + title: 'Architecture note', + content: 'We should keep context local to a thread for retrieval.', + source: 'adr-001', + addedBy: 'agent-a', + relevanceScore: 0.9, + }); + const second = threadContext.addThreadContextEntry(workspacePath, threadPath, { + title: 'Research snippet', + content: 'BM25 ranking is robust for keyword retrieval tasks.', + addedBy: 'agent-b', + relevanceScore: 0.4, + }); + + expect(first.path).toContain('.workgraph/thread-context/thread-context-fixture/context/'); + expect(second.path).toContain('.workgraph/thread-context/thread-context-fixture/context/'); + + const listed = threadContext.listThreadContextEntries(workspacePath, threadPath); + expect(listed).toHaveLength(2); + expect(listed[0].title).toBe('Architecture note'); + expect(listed[0].source).toBe('adr-001'); + expect(listed[1].title).toBe('Research snippet'); + }); + + it('searches entries with BM25 ranking', () => { + threadContext.addThreadContextEntry(workspacePath, threadPath, { + title: 'Webhook dedup design', + content: 'Use payload digest and delivery id dedup in the webhook gateway.', + addedBy: 'agent-a', + relevanceScore: 0.8, + }); + threadContext.addThreadContextEntry(workspacePath, threadPath, { + title: 'Standalone threads', + content: 'Add a top-level thread creation tool with idempotency support.', + addedBy: 'agent-a', + relevanceScore: 0.6, + }); + threadContext.addThreadContextEntry(workspacePath, threadPath, { + title: 'Unrelated note', + content: 'Color palette and spacing tweaks for dashboard.', + addedBy: 'agent-a', + relevanceScore: 0.9, + }); + + const results = threadContext.searchThreadContextEntries( + workspacePath, + threadPath, + 'webhook delivery dedup', + { limit: 2 }, + ); + + expect(results).toHaveLength(1); + expect(results[0].title).toBe('Webhook dedup design'); + expect(results[0].bm25_score).toBeGreaterThan(0); + expect(results[0].snippet.toLowerCase()).toContain('dedup'); + }); + + it('prunes stale and low relevance entries', () => { + const stale = threadContext.addThreadContextEntry(workspacePath, threadPath, { + title: 'Old context', + content: 'This note is no longer fresh.', + addedBy: 'agent-a', + relevanceScore: 0.9, + }); + const low = threadContext.addThreadContextEntry(workspacePath, threadPath, { + title: 'Low signal', + content: 'This entry should be removed by relevance.', + addedBy: 'agent-a', + relevanceScore: 0.1, + }); + const keep = threadContext.addThreadContextEntry(workspacePath, threadPath, { + title: 'Keep context', + content: 'High relevance and recent entry.', + addedBy: 'agent-a', + relevanceScore: 0.95, + }); + + const staleAbsPath = path.join(workspacePath, stale.path); + const staleParsed = matter(fs.readFileSync(staleAbsPath, 'utf-8')); + staleParsed.data.added_at = '2000-01-01T00:00:00.000Z'; + fs.writeFileSync(staleAbsPath, matter.stringify(staleParsed.content, staleParsed.data), 'utf-8'); + + const pruned = threadContext.pruneThreadContextEntries(workspacePath, threadPath, { + maxAgeMinutes: 60, + minRelevance: 0.5, + now: new Date('2026-03-11T00:00:00.000Z'), + }); + + expect(pruned.removedCount).toBe(2); + expect(pruned.keptCount).toBe(1); + expect(pruned.removed.map((entry) => entry.path).sort()).toEqual([low.path, stale.path].sort()); + const remaining = threadContext.listThreadContextEntries(workspacePath, threadPath); + expect(remaining).toHaveLength(1); + expect(remaining[0].path).toBe(keep.path); + }); +}); diff --git a/packages/kernel/src/thread-context.ts b/packages/kernel/src/thread-context.ts new file mode 100644 index 0000000..46ed492 --- /dev/null +++ b/packages/kernel/src/thread-context.ts @@ -0,0 +1,486 @@ +import { createHash } from 'node:crypto'; +import fs from 'node:fs'; +import path from 'node:path'; +import matter from 'gray-matter'; +import * as store from './store.js'; + +const THREAD_CONTEXT_ROOT = '.workgraph/thread-context'; +const THREAD_CONTEXT_SUBDIRECTORY = 'context'; +const DEFAULT_RELEVANCE_SCORE = 0.5; +const BM25_K1 = 1.2; +const BM25_B = 0.75; + +export interface ThreadContextEntry { + path: string; + threadPath: string; + threadTid: string; + title: string; + source?: string; + added_by: string; + added_at: string; + relevance_score: number; + content: string; +} + +export interface AddThreadContextEntryInput { + title: string; + content: string; + source?: string; + addedBy: string; + relevanceScore?: number; +} + +export interface ThreadContextSearchResult { + path: string; + title: string; + source?: string; + added_by: string; + added_at: string; + relevance_score: number; + bm25_score: number; + snippet: string; +} + +export interface ThreadContextSummary { + threadPath: string; + threadTid: string; + totalEntries: number; + topEntries: Array<{ + path: string; + title: string; + source?: string; + added_at: string; + relevance_score: number; + }>; +} + +export interface PruneThreadContextInput { + maxAgeMinutes?: number; + minRelevance?: number; + now?: Date; +} + +export interface PruneThreadContextResult { + threadPath: string; + threadTid: string; + removedCount: number; + keptCount: number; + removed: Array<{ + path: string; + title: string; + added_at: string; + relevance_score: number; + reasons: Array<'max_age' | 'min_relevance'>; + }>; +} + +interface ThreadContextEntryRecord extends ThreadContextEntry { + absolutePath: string; +} + +export function addThreadContextEntry( + workspacePath: string, + rawThreadPath: string, + input: AddThreadContextEntryInput, +): ThreadContextEntry { + const threadPath = normalizeThreadPath(rawThreadPath); + const title = normalizeRequiredString(input.title, 'Context entry title is required.'); + const content = normalizeRequiredString(input.content, 'Context entry content is required.'); + const addedBy = normalizeRequiredString(input.addedBy, 'Context entry actor is required.'); + const source = normalizeOptionalString(input.source); + const relevanceScore = normalizeRelevanceScore(input.relevanceScore); + const addedAt = new Date().toISOString(); + const target = resolveThreadContextLocation(workspacePath, threadPath); + if (!fs.existsSync(target.contextDir)) { + fs.mkdirSync(target.contextDir, { recursive: true }); + } + const fileName = buildContextEntryFileName(title, content, addedAt, addedBy); + const absolutePath = path.join(target.contextDir, fileName); + const relativePath = path.relative(workspacePath, absolutePath).replace(/\\/g, '/'); + const frontmatter = { + title, + ...(source ? { source } : {}), + added_by: addedBy, + added_at: addedAt, + relevance_score: relevanceScore, + }; + const rendered = matter.stringify(`${content}\n`, frontmatter); + fs.writeFileSync(absolutePath, rendered, 'utf-8'); + return { + path: relativePath, + threadPath, + threadTid: target.threadTid, + title, + ...(source ? { source } : {}), + added_by: addedBy, + added_at: addedAt, + relevance_score: relevanceScore, + content, + }; +} + +export function listThreadContextEntries(workspacePath: string, rawThreadPath: string): ThreadContextEntry[] { + const threadPath = normalizeThreadPath(rawThreadPath); + return readThreadContextEntries(workspacePath, threadPath).map(stripRecordPathForPublicResult); +} + +export function summarizeThreadContext( + workspacePath: string, + rawThreadPath: string, + options: { topN?: number } = {}, +): ThreadContextSummary { + const threadPath = normalizeThreadPath(rawThreadPath); + const location = resolveThreadContextLocation(workspacePath, threadPath); + const entries = readThreadContextEntries(workspacePath, threadPath); + const topN = normalizePositiveInt(options.topN, 3); + const topEntries = entries + .slice() + .sort((left, right) => + right.relevance_score - left.relevance_score + || right.added_at.localeCompare(left.added_at) + || left.path.localeCompare(right.path)) + .slice(0, topN) + .map((entry) => ({ + path: entry.path, + title: entry.title, + ...(entry.source ? { source: entry.source } : {}), + added_at: entry.added_at, + relevance_score: entry.relevance_score, + })); + return { + threadPath, + threadTid: location.threadTid, + totalEntries: entries.length, + topEntries, + }; +} + +export function searchThreadContextEntries( + workspacePath: string, + rawThreadPath: string, + query: string, + options: { limit?: number } = {}, +): ThreadContextSearchResult[] { + const threadPath = normalizeThreadPath(rawThreadPath); + const normalizedQuery = normalizeRequiredString(query, 'Search query is required.'); + const entries = readThreadContextEntries(workspacePath, threadPath); + if (entries.length === 0) return []; + const queryTokens = tokenize(normalizedQuery); + if (queryTokens.length === 0) return []; + const queryTerms = uniqueStrings(queryTokens); + const indexed = entries.map((entry) => buildSearchRecord(entry)); + const averageDocLength = indexed.reduce((sum, item) => sum + item.docLength, 0) / indexed.length; + const docFrequency = new Map(); + for (const term of queryTerms) { + let count = 0; + for (const item of indexed) { + if (item.termFrequency.has(term)) count += 1; + } + docFrequency.set(term, count); + } + const limit = normalizePositiveInt(options.limit, 10); + const scored = indexed + .map((item) => ({ + item, + score: computeBm25Score(item, queryTerms, docFrequency, indexed.length, averageDocLength), + })) + .filter((entry) => entry.score > 0) + .sort((left, right) => + right.score - left.score + || right.item.entry.relevance_score - left.item.entry.relevance_score + || right.item.entry.added_at.localeCompare(left.item.entry.added_at) + || left.item.entry.path.localeCompare(right.item.entry.path)) + .slice(0, limit); + return scored.map(({ item, score }) => ({ + path: item.entry.path, + title: item.entry.title, + ...(item.entry.source ? { source: item.entry.source } : {}), + added_by: item.entry.added_by, + added_at: item.entry.added_at, + relevance_score: item.entry.relevance_score, + bm25_score: roundScore(score), + snippet: buildSnippet(item.entry.content, queryTerms), + })); +} + +export function pruneThreadContextEntries( + workspacePath: string, + rawThreadPath: string, + options: PruneThreadContextInput = {}, +): PruneThreadContextResult { + const threadPath = normalizeThreadPath(rawThreadPath); + const location = resolveThreadContextLocation(workspacePath, threadPath); + const entries = readThreadContextEntries(workspacePath, threadPath); + const nowMs = (options.now ?? new Date()).getTime(); + const maxAgeMinutes = options.maxAgeMinutes !== undefined + ? normalizePositiveInt(options.maxAgeMinutes, 0) + : undefined; + const minRelevance = options.minRelevance !== undefined + ? normalizeRelevanceScore(options.minRelevance) + : undefined; + const cutoffMs = maxAgeMinutes !== undefined + ? nowMs - maxAgeMinutes * 60_000 + : undefined; + const removed: PruneThreadContextResult['removed'] = []; + let keptCount = 0; + for (const entry of entries) { + const reasons: Array<'max_age' | 'min_relevance'> = []; + if (cutoffMs !== undefined) { + const addedAtMs = Date.parse(entry.added_at); + if (Number.isFinite(addedAtMs) && addedAtMs < cutoffMs) { + reasons.push('max_age'); + } + } + if (minRelevance !== undefined && entry.relevance_score < minRelevance) { + reasons.push('min_relevance'); + } + if (reasons.length === 0) { + keptCount += 1; + continue; + } + fs.rmSync(entry.absolutePath, { force: true }); + removed.push({ + path: entry.path, + title: entry.title, + added_at: entry.added_at, + relevance_score: entry.relevance_score, + reasons, + }); + } + return { + threadPath, + threadTid: location.threadTid, + removedCount: removed.length, + keptCount, + removed, + }; +} + +function readThreadContextEntries(workspacePath: string, threadPath: string): ThreadContextEntryRecord[] { + const location = resolveThreadContextLocation(workspacePath, threadPath); + if (!fs.existsSync(location.contextDir)) return []; + const files = fs.readdirSync(location.contextDir, { withFileTypes: true }) + .filter((entry) => entry.isFile() && entry.name.endsWith('.md')) + .map((entry) => path.join(location.contextDir, entry.name)) + .sort((left, right) => left.localeCompare(right)); + const output: ThreadContextEntryRecord[] = []; + for (const absolutePath of files) { + let parsed: matter.GrayMatterFile; + try { + parsed = matter(fs.readFileSync(absolutePath, 'utf-8')); + } catch { + continue; + } + const title = normalizeOptionalString(parsed.data.title) ?? path.basename(absolutePath, '.md'); + const source = normalizeOptionalString(parsed.data.source); + const addedBy = normalizeOptionalString(parsed.data.added_by) ?? 'unknown'; + const addedAt = normalizeIsoString(parsed.data.added_at) ?? new Date(0).toISOString(); + const relevanceScore = normalizeRelevanceScore(parsed.data.relevance_score); + const relativePath = path.relative(workspacePath, absolutePath).replace(/\\/g, '/'); + output.push({ + path: relativePath, + threadPath, + threadTid: location.threadTid, + title, + ...(source ? { source } : {}), + added_by: addedBy, + added_at: addedAt, + relevance_score: relevanceScore, + content: parsed.content.trim(), + absolutePath, + }); + } + return output.sort((left, right) => + right.relevance_score - left.relevance_score + || right.added_at.localeCompare(left.added_at) + || left.path.localeCompare(right.path)); +} + +function resolveThreadContextLocation( + workspacePath: string, + threadPath: string, +): { threadPath: string; threadTid: string; contextDir: string } { + const threadInstance = store.read(workspacePath, threadPath); + if (!threadInstance || threadInstance.type !== 'thread') { + throw new Error(`Thread not found: ${threadPath}`); + } + const frontmatterTid = normalizeOptionalString(threadInstance.fields.tid); + const fallbackTid = sanitizeThreadTid(path.basename(threadPath, '.md')); + const threadTid = sanitizeThreadTid(frontmatterTid ?? fallbackTid); + const contextDir = path.join(workspacePath, THREAD_CONTEXT_ROOT, threadTid, THREAD_CONTEXT_SUBDIRECTORY); + return { + threadPath, + threadTid, + contextDir, + }; +} + +function buildContextEntryFileName(title: string, content: string, addedAtIso: string, actor: string): string { + const stamp = addedAtIso + .replace(/:/g, '') + .replace(/\./g, '') + .replace('T', '-') + .replace('Z', ''); + const slug = slugify(title) || 'context'; + const digest = createHash('sha1') + .update(`${title}\n${content}\n${addedAtIso}\n${actor}`) + .digest('hex') + .slice(0, 8); + return `${stamp}-${slug}-${digest}.md`; +} + +function slugify(value: string): string { + return value + .toLowerCase() + .replace(/[^a-z0-9]+/g, '-') + .replace(/^-+|-+$/g, '') + .slice(0, 64); +} + +function sanitizeThreadTid(value: string): string { + const sanitized = value + .trim() + .toLowerCase() + .replace(/[^a-z0-9._-]+/g, '-') + .replace(/^-+|-+$/g, '') + .slice(0, 96); + return sanitized || 'thread'; +} + +function normalizeThreadPath(value: string): string { + const trimmed = String(value ?? '').trim().replace(/^\.\//, '').replace(/\\/g, '/'); + if (!trimmed) { + throw new Error('Thread path is required.'); + } + const withPrefix = trimmed.includes('/') ? trimmed : `threads/${trimmed}`; + return withPrefix.endsWith('.md') ? withPrefix : `${withPrefix}.md`; +} + +function normalizeRequiredString(value: unknown, message: string): string { + const normalized = normalizeOptionalString(value); + if (!normalized) throw new Error(message); + return normalized; +} + +function normalizeOptionalString(value: unknown): string | undefined { + if (typeof value !== 'string') return undefined; + const trimmed = value.trim(); + return trimmed.length > 0 ? trimmed : undefined; +} + +function normalizeIsoString(value: unknown): string | undefined { + if (typeof value !== 'string') return undefined; + const parsed = Date.parse(value); + if (!Number.isFinite(parsed)) return undefined; + return new Date(parsed).toISOString(); +} + +function normalizeRelevanceScore(value: unknown): number { + if (typeof value !== 'number' || !Number.isFinite(value)) return DEFAULT_RELEVANCE_SCORE; + if (value < 0) return 0; + if (value > 1) return 1; + return roundScore(value); +} + +function normalizePositiveInt(value: unknown, fallback: number): number { + const parsed = Number.parseInt(String(value ?? ''), 10); + if (!Number.isFinite(parsed) || parsed < 0) return fallback; + return parsed; +} + +function tokenize(value: string): string[] { + return value + .toLowerCase() + .split(/[^a-z0-9]+/g) + .map((token) => token.trim()) + .filter((token) => token.length > 1); +} + +function uniqueStrings(values: string[]): string[] { + return [...new Set(values)]; +} + +function buildSearchRecord(entry: ThreadContextEntryRecord): { + entry: ThreadContextEntryRecord; + termFrequency: Map; + docLength: number; +} { + const searchable = [entry.title, entry.source ?? '', entry.content].join('\n'); + const tokens = tokenize(searchable); + const termFrequency = new Map(); + for (const token of tokens) { + termFrequency.set(token, (termFrequency.get(token) ?? 0) + 1); + } + return { + entry, + termFrequency, + docLength: Math.max(1, tokens.length), + }; +} + +function computeBm25Score( + record: { + termFrequency: Map; + docLength: number; + entry: ThreadContextEntryRecord; + }, + queryTerms: string[], + docFrequency: Map, + docCount: number, + averageDocLength: number, +): number { + const avgDocLength = averageDocLength > 0 ? averageDocLength : 1; + let bm25Score = 0; + for (const term of queryTerms) { + const tf = record.termFrequency.get(term) ?? 0; + if (tf <= 0) continue; + const df = docFrequency.get(term) ?? 0; + if (df <= 0) continue; + const idf = Math.log(1 + ((docCount - df + 0.5) / (df + 0.5))); + const denominator = tf + BM25_K1 * (1 - BM25_B + BM25_B * (record.docLength / avgDocLength)); + bm25Score += idf * ((tf * (BM25_K1 + 1)) / denominator); + } + if (bm25Score <= 0) return 0; + // Relevance frontmatter nudges tie-breaks without replacing BM25 ranking. + return bm25Score + record.entry.relevance_score * 0.05; +} + +function buildSnippet(content: string, queryTerms: string[]): string { + const normalized = content.trim(); + if (!normalized) return ''; + const lower = normalized.toLowerCase(); + let matchIndex = -1; + let matchedTerm = ''; + for (const term of queryTerms) { + const idx = lower.indexOf(term.toLowerCase()); + if (idx >= 0 && (matchIndex < 0 || idx < matchIndex)) { + matchIndex = idx; + matchedTerm = term; + } + } + if (matchIndex < 0) { + return normalized.slice(0, 240); + } + const window = Math.max(60, matchedTerm.length + 40); + const start = Math.max(0, matchIndex - window); + const end = Math.min(normalized.length, matchIndex + matchedTerm.length + window); + return normalized.slice(start, end); +} + +function roundScore(value: number): number { + return Math.round(value * 1_000_000) / 1_000_000; +} + +function stripRecordPathForPublicResult(entry: ThreadContextEntryRecord): ThreadContextEntry { + return { + path: entry.path, + threadPath: entry.threadPath, + threadTid: entry.threadTid, + title: entry.title, + ...(entry.source ? { source: entry.source } : {}), + added_by: entry.added_by, + added_at: entry.added_at, + relevance_score: entry.relevance_score, + content: entry.content, + }; +} diff --git a/packages/mcp-server/src/mcp-server.test.ts b/packages/mcp-server/src/mcp-server.test.ts index b28b080..8c72ced 100644 --- a/packages/mcp-server/src/mcp-server.test.ts +++ b/packages/mcp-server/src/mcp-server.test.ts @@ -124,6 +124,13 @@ describe('workgraph mcp server', () => { }, }); expect(isToolError(claimed)).toBe(false); + const claimedPayload = getStructured<{ + thread: { path: string }; + context: { threadPath: string; totalEntries: number }; + }>(claimed); + expect(claimedPayload.thread.path).toBe(coordinationThread.path); + expect(claimedPayload.context.threadPath).toBe(coordinationThread.path); + expect(claimedPayload.context.totalEntries).toBe(0); const done = await client.callTool({ name: 'workgraph_thread_done', @@ -217,7 +224,12 @@ describe('workgraph mcp server', () => { 'workgraph_mission_progress', 'wg_post_message', 'wg_ask', + 'wg_create_thread', 'wg_spawn_thread', + 'wg_thread_context_add', + 'wg_thread_context_search', + 'wg_thread_context_list', + 'wg_thread_context_prune', 'wg_heartbeat', ]; for (const name of expectedTools) { @@ -435,6 +447,95 @@ describe('workgraph mcp server', () => { expect(spawnReplayPayload.data.operation).toBe('replayed'); expect(spawnReplayPayload.data.thread.path).toBe(spawnedPayload.data.thread.path); + const createdStandalone = await client.callTool({ + name: 'wg_create_thread', + arguments: { + title: 'Standalone MCP task', + goal: 'Create a top-level thread without parent', + idempotencyKey: 'create-idem-key', + priority: 'high', + tags: ['standalone'], + }, + }); + expect(isToolError(createdStandalone)).toBe(false); + const createdStandalonePayload = getStructured<{ + data: { operation: string; thread: { path: string; parent: string | null } }; + }>(createdStandalone); + expect(createdStandalonePayload.data.operation).toBe('created'); + expect(createdStandalonePayload.data.thread.parent).toBeNull(); + + const createReplay = await client.callTool({ + name: 'wg_create_thread', + arguments: { + title: 'Standalone MCP task', + goal: 'Create a top-level thread without parent', + idempotencyKey: 'create-idem-key', + priority: 'high', + tags: ['standalone'], + }, + }); + expect(isToolError(createReplay)).toBe(false); + const createReplayPayload = getStructured<{ + data: { operation: string; thread: { path: string } }; + }>(createReplay); + expect(createReplayPayload.data.operation).toBe('replayed'); + expect(createReplayPayload.data.thread.path).toBe(createdStandalonePayload.data.thread.path); + + const contextAdded = await client.callTool({ + name: 'wg_thread_context_add', + arguments: { + threadPath: createdStandalonePayload.data.thread.path, + title: 'Decision record', + content: 'Use delivery-id plus digest dedup in gateway.', + source: 'adr/2026-03-11', + relevance: 0.8, + }, + }); + expect(isToolError(contextAdded)).toBe(false); + + const contextList = await client.callTool({ + name: 'wg_thread_context_list', + arguments: { + threadPath: createdStandalonePayload.data.thread.path, + }, + }); + expect(isToolError(contextList)).toBe(false); + const contextListPayload = getStructured<{ + data: { count: number; entries: Array<{ title: string }> }; + }>(contextList); + expect(contextListPayload.data.count).toBe(1); + expect(contextListPayload.data.entries[0]?.title).toBe('Decision record'); + + const contextSearch = await client.callTool({ + name: 'wg_thread_context_search', + arguments: { + threadPath: createdStandalonePayload.data.thread.path, + query: 'delivery dedup', + limit: 5, + }, + }); + expect(isToolError(contextSearch)).toBe(false); + const contextSearchPayload = getStructured<{ + data: { count: number; results: Array<{ title: string; bm25_score: number }> }; + }>(contextSearch); + expect(contextSearchPayload.data.count).toBe(1); + expect(contextSearchPayload.data.results[0]?.title).toBe('Decision record'); + expect(contextSearchPayload.data.results[0]?.bm25_score ?? 0).toBeGreaterThan(0); + + const contextPrune = await client.callTool({ + name: 'wg_thread_context_prune', + arguments: { + threadPath: createdStandalonePayload.data.thread.path, + minRelevance: 0.9, + }, + }); + expect(isToolError(contextPrune)).toBe(false); + const contextPrunePayload = getStructured<{ + data: { removed_count: number; kept_count: number }; + }>(contextPrune); + expect(contextPrunePayload.data.removed_count).toBe(1); + expect(contextPrunePayload.data.kept_count).toBe(0); + const heartbeatResult = await client.callTool({ name: 'wg_heartbeat', arguments: { diff --git a/packages/mcp-server/src/mcp/result.ts b/packages/mcp-server/src/mcp/result.ts index 7f12390..4d02b14 100644 --- a/packages/mcp-server/src/mcp/result.ts +++ b/packages/mcp-server/src/mcp/result.ts @@ -29,6 +29,11 @@ export type CollaborationToolName = | 'wg_post_message' | 'wg_ask' | 'wg_spawn_thread' + | 'wg_create_thread' + | 'wg_thread_context_add' + | 'wg_thread_context_search' + | 'wg_thread_context_list' + | 'wg_thread_context_prune' | 'wg_heartbeat'; export interface CollaborationToolError { diff --git a/packages/mcp-server/src/mcp/tools/collaboration-tools.ts b/packages/mcp-server/src/mcp/tools/collaboration-tools.ts index cce75cf..c3a6bf0 100644 --- a/packages/mcp-server/src/mcp/tools/collaboration-tools.ts +++ b/packages/mcp-server/src/mcp/tools/collaboration-tools.ts @@ -5,6 +5,7 @@ import { agent as agentModule, conversation as conversationModule, store as storeModule, + threadContext as threadContextModule, thread as threadModule, } from '@versatly/workgraph-kernel'; import { checkWriteGate, resolveActor } from '../auth.js'; @@ -19,6 +20,7 @@ import { type WorkgraphMcpServerOptions } from '../types.js'; const agent = agentModule; const conversation = conversationModule; const store = storeModule; +const threadContext = threadContextModule; const thread = threadModule; const MESSAGE_TYPES = ['message', 'note', 'decision', 'system', 'ask', 'reply'] as const; @@ -239,6 +241,86 @@ export function registerCollaborationTools(server: McpServer, options: Workgraph }, ); + server.registerTool( + 'wg_create_thread', + { + title: 'WorkGraph Create Thread', + description: 'Create a standalone top-level thread with optional idempotency key.', + inputSchema: { + actor: z.string().optional().describe('Actor identity for write attribution.'), + title: z.string().min(1).describe('New standalone thread title.'), + goal: z.string().min(1).describe('Thread goal/body seed.'), + priority: z.string().optional().describe('Optional priority override.'), + deps: z.array(z.string()).optional().describe('Optional dependency thread refs.'), + tags: z.array(z.string()).optional().describe('Optional thread tags.'), + contextRefs: z.array(z.string()).optional().describe('Optional context refs to seed on the new thread.'), + space: z.string().optional().describe('Optional space ref for the new thread.'), + idempotencyKey: z.string().optional().describe('Stable idempotency key for retry-safe thread creation.'), + }, + annotations: { + destructiveHint: true, + idempotentHint: false, + }, + }, + async (args) => { + try { + const actor = resolveActor(options.workspacePath, args.actor, options.defaultActor); + assertWriteAllowed(options, actor, ['thread:create', 'mcp:write'], { + action: 'mcp.collaboration.create-thread', + target: 'threads', + }); + const idempotencyKey = normalizeOptionalString(args.idempotencyKey); + if (idempotencyKey) { + const existing = findCreatedThreadByKey(options.workspacePath, idempotencyKey); + if (existing) { + assertCreateReplayCompatible(existing, args); + return collaborationOkResult('wg_create_thread', actor, { + operation: 'replayed', + idempotency: { + key: idempotencyKey, + replayed: true, + }, + thread: serializeThread(existing), + }); + } + } + const created = thread.createThread(options.workspacePath, args.title, args.goal, actor, { + priority: args.priority, + deps: args.deps, + space: normalizeOptionalString(args.space), + context_refs: args.contextRefs, + tags: args.tags, + }); + const withMetadata = store.update( + options.workspacePath, + created.path, + { + mcp_created_by: actor, + mcp_created_at: new Date().toISOString(), + ...(idempotencyKey ? { mcp_create_idempotency_key: idempotencyKey } : {}), + }, + undefined, + actor, + { + skipAuthorization: true, + action: 'mcp.collaboration.create.store', + requiredCapabilities: ['thread:create', 'thread:manage'], + }, + ); + return collaborationOkResult('wg_create_thread', actor, { + operation: 'created', + idempotency: { + key: idempotencyKey, + replayed: false, + }, + thread: serializeThread(withMetadata), + }); + } catch (error) { + return collaborationErrorResult('wg_create_thread', error); + } + }, + ); + server.registerTool( 'wg_spawn_thread', { @@ -340,6 +422,173 @@ export function registerCollaborationTools(server: McpServer, options: Workgraph }, ); + server.registerTool( + 'wg_thread_context_add', + { + title: 'WorkGraph Thread Context Add', + description: 'Add a searchable context entry scoped to a specific thread.', + inputSchema: { + threadPath: z.string().min(1).describe('Target thread path (threads/.md).'), + actor: z.string().optional().describe('Actor identity for write attribution.'), + title: z.string().min(1).describe('Context entry title.'), + content: z.string().min(1).describe('Context markdown/body content.'), + source: z.string().optional().describe('Optional source reference (URL, doc ID, note).'), + relevance: z.number().min(0).max(1).optional().describe('Optional relevance score (0..1).'), + }, + annotations: { + destructiveHint: true, + idempotentHint: false, + }, + }, + async (args) => { + try { + const actor = resolveActor(options.workspacePath, args.actor, options.defaultActor); + assertWriteAllowed(options, actor, ['thread:update', 'mcp:write'], { + action: 'mcp.collaboration.thread-context.add', + target: normalizeThreadPath(args.threadPath), + }); + const threadPath = assertThreadExists(options.workspacePath, args.threadPath); + const created = threadContext.addThreadContextEntry(options.workspacePath, threadPath, { + title: args.title, + content: args.content, + source: normalizeOptionalString(args.source), + addedBy: actor, + relevanceScore: args.relevance, + }); + const summary = threadContext.summarizeThreadContext(options.workspacePath, threadPath, { topN: 3 }); + return collaborationOkResult('wg_thread_context_add', actor, { + operation: 'created', + thread_path: threadPath, + context_entry: serializeThreadContextEntry(created), + summary: serializeThreadContextSummary(summary), + }); + } catch (error) { + return collaborationErrorResult('wg_thread_context_add', error); + } + }, + ); + + server.registerTool( + 'wg_thread_context_search', + { + title: 'WorkGraph Thread Context Search', + description: 'Search a thread-specific context store using BM25 keyword ranking.', + inputSchema: { + threadPath: z.string().min(1).describe('Target thread path (threads/.md).'), + actor: z.string().optional().describe('Optional actor identity used for attribution/context.'), + query: z.string().min(1).describe('Keyword query string for BM25 search.'), + limit: z.number().int().min(1).max(100).optional().describe('Max search results to return.'), + }, + annotations: { + destructiveHint: false, + idempotentHint: true, + }, + }, + async (args) => { + try { + const actor = resolveActor(options.workspacePath, args.actor, options.defaultActor); + const threadPath = assertThreadExists(options.workspacePath, args.threadPath); + const results = threadContext.searchThreadContextEntries( + options.workspacePath, + threadPath, + args.query, + { + limit: args.limit, + }, + ); + const summary = threadContext.summarizeThreadContext(options.workspacePath, threadPath, { topN: 3 }); + return collaborationOkResult('wg_thread_context_search', actor, { + operation: 'searched', + thread_path: threadPath, + query: args.query, + count: results.length, + results: results.map((entry) => serializeThreadContextSearchResult(entry)), + summary: serializeThreadContextSummary(summary), + }); + } catch (error) { + return collaborationErrorResult('wg_thread_context_search', error); + } + }, + ); + + server.registerTool( + 'wg_thread_context_list', + { + title: 'WorkGraph Thread Context List', + description: 'List all context entries available for a thread.', + inputSchema: { + threadPath: z.string().min(1).describe('Target thread path (threads/.md).'), + actor: z.string().optional().describe('Optional actor identity used for attribution/context.'), + }, + annotations: { + destructiveHint: false, + idempotentHint: true, + }, + }, + async (args) => { + try { + const actor = resolveActor(options.workspacePath, args.actor, options.defaultActor); + const threadPath = assertThreadExists(options.workspacePath, args.threadPath); + const entries = threadContext.listThreadContextEntries(options.workspacePath, threadPath); + const summary = threadContext.summarizeThreadContext(options.workspacePath, threadPath, { topN: 3 }); + return collaborationOkResult('wg_thread_context_list', actor, { + operation: 'listed', + thread_path: threadPath, + count: entries.length, + entries: entries.map((entry) => serializeThreadContextEntry(entry)), + summary: serializeThreadContextSummary(summary), + }); + } catch (error) { + return collaborationErrorResult('wg_thread_context_list', error); + } + }, + ); + + server.registerTool( + 'wg_thread_context_prune', + { + title: 'WorkGraph Thread Context Prune', + description: 'Prune stale and/or low-relevance thread context entries.', + inputSchema: { + threadPath: z.string().min(1).describe('Target thread path (threads/.md).'), + actor: z.string().optional().describe('Actor identity for write attribution.'), + maxAge: z.union([z.number().int().min(1), z.string().min(1)]).optional() + .describe('Optional age threshold in minutes or shorthand like 6h / 7d.'), + minRelevance: z.number().min(0).max(1).optional().describe('Optional minimum relevance to keep (0..1).'), + }, + annotations: { + destructiveHint: true, + idempotentHint: false, + }, + }, + async (args) => { + try { + const actor = resolveActor(options.workspacePath, args.actor, options.defaultActor); + assertWriteAllowed(options, actor, ['thread:update', 'mcp:write'], { + action: 'mcp.collaboration.thread-context.prune', + target: normalizeThreadPath(args.threadPath), + }); + const threadPath = assertThreadExists(options.workspacePath, args.threadPath); + const maxAgeMinutes = parseMaxAgeMinutes(args.maxAge); + const result = threadContext.pruneThreadContextEntries(options.workspacePath, threadPath, { + ...(maxAgeMinutes !== undefined ? { maxAgeMinutes } : {}), + ...(typeof args.minRelevance === 'number' ? { minRelevance: args.minRelevance } : {}), + }); + const summary = threadContext.summarizeThreadContext(options.workspacePath, threadPath, { topN: 3 }); + return collaborationOkResult('wg_thread_context_prune', actor, { + operation: 'pruned', + thread_path: threadPath, + removed_count: result.removedCount, + kept_count: result.keptCount, + removed: result.removed, + summary: serializeThreadContextSummary(summary), + }); + } catch (error) { + return collaborationErrorResult('wg_thread_context_prune', error); + } + }, + ); + server.registerTool( 'wg_heartbeat', { @@ -587,6 +836,49 @@ function findSpawnedThreadByKey( ) ?? null; } +function findCreatedThreadByKey(workspacePath: string, idempotencyKey: string) { + return store.list(workspacePath, 'thread').find((entry) => + !normalizeOptionalString(entry.fields.parent) && + normalizeOptionalString(entry.fields.mcp_create_idempotency_key) === idempotencyKey + ) ?? null; +} + +function assertCreateReplayCompatible( + existing: { fields: Record }, + input: { + title: string; + goal: string; + priority?: string; + space?: string; + }, +): void { + const existingTitle = normalizeOptionalString(existing.fields.title); + const existingGoal = normalizeOptionalString(existing.fields.goal); + const existingPriority = normalizeOptionalString(existing.fields.priority) ?? 'medium'; + const existingSpace = normalizeOptionalString(existing.fields.space); + const requestedPriority = normalizeOptionalString(input.priority) ?? 'medium'; + const requestedSpace = normalizeOptionalString(input.space); + if ( + existingTitle !== input.title || + existingGoal !== input.goal || + existingPriority !== requestedPriority || + existingSpace !== requestedSpace + ) { + throw new McpToolError( + 'IDEMPOTENCY_CONFLICT', + 'Create-thread idempotency key was reused with different thread payload.', + { + details: { + previous_title: existingTitle ?? null, + previous_goal: existingGoal ?? null, + previous_priority: existingPriority, + previous_space: existingSpace ?? null, + }, + }, + ); + } +} + function assertPostReplayCompatible( existing: conversationModule.ConversationEventRecord, input: { @@ -697,6 +989,79 @@ function serializeThread(entry: { path: string; fields: Record }; } +function serializeThreadContextEntry(entry: threadContextModule.ThreadContextEntry) { + return { + path: entry.path, + title: entry.title, + source: entry.source ?? null, + added_by: entry.added_by, + added_at: entry.added_at, + relevance_score: entry.relevance_score, + content: entry.content, + }; +} + +function serializeThreadContextSearchResult(entry: threadContextModule.ThreadContextSearchResult) { + return { + path: entry.path, + title: entry.title, + source: entry.source ?? null, + added_by: entry.added_by, + added_at: entry.added_at, + relevance_score: entry.relevance_score, + bm25_score: entry.bm25_score, + snippet: entry.snippet, + }; +} + +function serializeThreadContextSummary(summary: threadContextModule.ThreadContextSummary) { + return { + thread_path: summary.threadPath, + thread_tid: summary.threadTid, + total_entries: summary.totalEntries, + top_entries: summary.topEntries.map((entry) => ({ + path: entry.path, + title: entry.title, + source: entry.source ?? null, + added_at: entry.added_at, + relevance_score: entry.relevance_score, + })), + }; +} + +function parseMaxAgeMinutes(value: unknown): number | undefined { + if (value === undefined || value === null) return undefined; + if (typeof value === 'number') { + if (!Number.isFinite(value) || value <= 0) { + throw new McpToolError('BAD_INPUT', 'maxAge must be a positive number of minutes.'); + } + return Math.floor(value); + } + const raw = normalizeOptionalString(value); + if (!raw) { + throw new McpToolError('BAD_INPUT', 'maxAge must be a positive number of minutes or shorthand (e.g. 6h, 7d).'); + } + const matched = raw.match(/^(\d+)\s*(m|min|mins|minute|minutes|h|hr|hrs|hour|hours|d|day|days)?$/i); + if (!matched) { + throw new McpToolError('BAD_INPUT', `Invalid maxAge "${raw}". Use minutes (number) or shorthand like 6h / 7d.`); + } + const amount = Number.parseInt(matched[1], 10); + if (!Number.isFinite(amount) || amount <= 0) { + throw new McpToolError('BAD_INPUT', 'maxAge must be greater than zero.'); + } + const unit = (matched[2] ?? 'm').toLowerCase(); + if (unit === 'm' || unit === 'min' || unit === 'mins' || unit === 'minute' || unit === 'minutes') { + return amount; + } + if (unit === 'h' || unit === 'hr' || unit === 'hrs' || unit === 'hour' || unit === 'hours') { + return amount * 60; + } + if (unit === 'd' || unit === 'day' || unit === 'days') { + return amount * 60 * 24; + } + throw new McpToolError('BAD_INPUT', `Unsupported maxAge unit "${unit}".`); +} + function normalizeThreadPath(value: string): string { const trimmed = String(value ?? '').trim().replace(/^\.\//, '').replace(/\\/g, '/'); if (!trimmed) { diff --git a/packages/mcp-server/src/mcp/tools/write-tools.ts b/packages/mcp-server/src/mcp/tools/write-tools.ts index e5ee5c7..78689a1 100644 --- a/packages/mcp-server/src/mcp/tools/write-tools.ts +++ b/packages/mcp-server/src/mcp/tools/write-tools.ts @@ -6,6 +6,7 @@ import { mission as missionModule, missionOrchestrator as missionOrchestratorModule, orientation as orientationModule, + threadContext as threadContextModule, thread as threadModule, triggerEngine as triggerEngineModule, } from '@versatly/workgraph-kernel'; @@ -18,6 +19,7 @@ const dispatch = dispatchModule; const mission = missionModule; const missionOrchestrator = missionOrchestratorModule; const orientation = orientationModule; +const threadContext = threadContextModule; const thread = threadModule; const triggerEngine = triggerEngineModule; @@ -267,7 +269,14 @@ export function registerWriteTools(server: McpServer, options: WorkgraphMcpServe }); if (!gate.allowed) return errorResult(gate.reason); const updated = thread.claim(options.workspacePath, args.threadPath, actor); - return okResult({ thread: updated }, `Claimed ${updated.path} as ${actor}.`); + const contextSummary = threadContext.summarizeThreadContext(options.workspacePath, updated.path, { topN: 3 }); + return okResult( + { + thread: updated, + context: contextSummary, + }, + `Claimed ${updated.path} as ${actor}.`, + ); } catch (error) { return errorResult(error); }