Skip to content

Commit 5c0f754

Browse files
committed
perf(dashboard): incremental cache event fetching with watermark
After initial 200-event load, subsequent polls only fetch events newer than the last seen timestamp. Session stats computed client-side from cached events instead of a separate DB query. Eliminates redundant full-table scans and log parsing on every 15s poll cycle.
1 parent 06307a1 commit 5c0f754

File tree

4 files changed

+100
-28
lines changed

4 files changed

+100
-28
lines changed

packages/dashboard/src-tauri/src/commands.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -183,8 +183,8 @@ pub fn get_session_cache_stats(max_lines: Option<usize>, limit: Option<usize>) -
183183
}
184184

185185
#[tauri::command]
186-
pub fn get_cache_events_from_db(limit: Option<usize>) -> Vec<db::DbCacheEvent> {
187-
db::get_cache_events_from_db(limit.unwrap_or(200))
186+
pub fn get_cache_events_from_db(limit: Option<usize>, since_timestamp: Option<i64>) -> Vec<db::DbCacheEvent> {
187+
db::get_cache_events_from_db(limit.unwrap_or(200), since_timestamp)
188188
}
189189

190190
#[tauri::command]

packages/dashboard/src-tauri/src/db.rs

Lines changed: 40 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -492,27 +492,49 @@ fn match_log_cause(
492492
}
493493
}
494494

495-
fn load_raw_db_cache_events(limit: usize) -> Result<Vec<RawDbCacheEvent>, rusqlite::Error> {
495+
fn load_raw_db_cache_events(limit: usize, since_timestamp: Option<i64>) -> Result<Vec<RawDbCacheEvent>, rusqlite::Error> {
496496
let Some(opencode_db_path) = resolve_opencode_db_path() else {
497497
return Ok(Vec::new());
498498
};
499499

500500
let conn = open_readonly(&opencode_db_path)?;
501-
let mut stmt = conn.prepare(
502-
"SELECT CAST(m.id AS TEXT), m.session_id, m.time_created,
503-
COALESCE(CAST(json_extract(m.data, '$.tokens.input') AS INTEGER), 0) AS input_tokens,
504-
COALESCE(CAST(json_extract(m.data, '$.tokens.cache.read') AS INTEGER), 0) AS cache_read,
505-
COALESCE(CAST(json_extract(m.data, '$.tokens.cache.write') AS INTEGER), 0) AS cache_write,
506-
COALESCE(CAST(json_extract(m.data, '$.tokens.total') AS INTEGER), 0) AS total_tokens,
507-
CAST(json_extract(m.data, '$.agent') AS TEXT) AS agent
508-
FROM message m
509-
WHERE json_extract(m.data, '$.role') = 'assistant'
510-
AND COALESCE(CAST(json_extract(m.data, '$.tokens.total') AS INTEGER), 0) > 0
511-
ORDER BY m.time_created DESC
512-
LIMIT ?1",
513-
)?;
514501

515-
let rows = stmt.query_map([limit as i64], |row| {
502+
let (sql, params): (String, Vec<Box<dyn rusqlite::types::ToSql>>) = if let Some(since) = since_timestamp {
503+
(
504+
"SELECT CAST(m.id AS TEXT), m.session_id, m.time_created,
505+
COALESCE(CAST(json_extract(m.data, '$.tokens.input') AS INTEGER), 0) AS input_tokens,
506+
COALESCE(CAST(json_extract(m.data, '$.tokens.cache.read') AS INTEGER), 0) AS cache_read,
507+
COALESCE(CAST(json_extract(m.data, '$.tokens.cache.write') AS INTEGER), 0) AS cache_write,
508+
COALESCE(CAST(json_extract(m.data, '$.tokens.total') AS INTEGER), 0) AS total_tokens,
509+
CAST(json_extract(m.data, '$.agent') AS TEXT) AS agent
510+
FROM message m
511+
WHERE json_extract(m.data, '$.role') = 'assistant'
512+
AND COALESCE(CAST(json_extract(m.data, '$.tokens.total') AS INTEGER), 0) > 0
513+
AND m.time_created > ?1
514+
ORDER BY m.time_created DESC
515+
LIMIT ?2".to_string(),
516+
vec![Box::new(since) as Box<dyn rusqlite::types::ToSql>, Box::new(limit as i64)],
517+
)
518+
} else {
519+
(
520+
"SELECT CAST(m.id AS TEXT), m.session_id, m.time_created,
521+
COALESCE(CAST(json_extract(m.data, '$.tokens.input') AS INTEGER), 0) AS input_tokens,
522+
COALESCE(CAST(json_extract(m.data, '$.tokens.cache.read') AS INTEGER), 0) AS cache_read,
523+
COALESCE(CAST(json_extract(m.data, '$.tokens.cache.write') AS INTEGER), 0) AS cache_write,
524+
COALESCE(CAST(json_extract(m.data, '$.tokens.total') AS INTEGER), 0) AS total_tokens,
525+
CAST(json_extract(m.data, '$.agent') AS TEXT) AS agent
526+
FROM message m
527+
WHERE json_extract(m.data, '$.role') = 'assistant'
528+
AND COALESCE(CAST(json_extract(m.data, '$.tokens.total') AS INTEGER), 0) > 0
529+
ORDER BY m.time_created DESC
530+
LIMIT ?1".to_string(),
531+
vec![Box::new(limit as i64) as Box<dyn rusqlite::types::ToSql>],
532+
)
533+
};
534+
535+
let mut stmt = conn.prepare(&sql)?;
536+
let params_refs: Vec<&dyn rusqlite::types::ToSql> = params.iter().map(|p| p.as_ref()).collect();
537+
let rows = stmt.query_map(params_refs.as_slice(), |row| {
516538
Ok(RawDbCacheEvent {
517539
message_id: row.get(0)?,
518540
session_id: row.get(1)?,
@@ -584,15 +606,15 @@ fn build_db_cache_events(rows: Vec<RawDbCacheEvent>, enrich_causes: bool) -> Vec
584606
chronological
585607
}
586608

587-
pub fn get_cache_events_from_db(limit: usize) -> Vec<DbCacheEvent> {
588-
load_raw_db_cache_events(limit)
609+
pub fn get_cache_events_from_db(limit: usize, since_timestamp: Option<i64>) -> Vec<DbCacheEvent> {
610+
load_raw_db_cache_events(limit, since_timestamp)
589611
.map(|rows| build_db_cache_events(rows, true))
590612
.unwrap_or_default()
591613
}
592614

593615
pub fn get_session_cache_stats_from_db(limit: usize) -> Vec<SessionCacheStats> {
594616
// Reuse raw rows instead of re-querying + re-parsing logs
595-
let events = load_raw_db_cache_events(200)
617+
let events = load_raw_db_cache_events(200, None)
596618
.map(|rows| build_db_cache_events(rows, false)) // skip log enrichment for stats
597619
.unwrap_or_default();
598620
let mut map: HashMap<String, (usize, i64, i64, i64, i64, usize)> = HashMap::new();

packages/dashboard/src/components/CacheDiagnostics/CacheDiagnostics.tsx

Lines changed: 52 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { createSignal, For, Show, onCleanup, onMount } from "solid-js";
22
import type { DbCacheEvent, SessionCacheStats } from "../../lib/types";
3-
import { formatDateTime, getCacheEventsFromDb, getSessionCacheStatsFromDb, getSessions, truncate } from "../../lib/api";
3+
import { formatDateTime, getCacheEventsFromDb, getSessions, truncate } from "../../lib/api";
44

55
export default function CacheDiagnostics() {
66
const [events, setEvents] = createSignal<DbCacheEvent[]>([]);
@@ -12,15 +12,61 @@ export default function CacheDiagnostics() {
1212
const [hideSubagents, setHideSubagents] = createSignal(true);
1313
const [subagentIds, setSubagentIds] = createSignal<Set<string>>(new Set());
1414

15+
// Track watermark for incremental fetching — only fetch new events after initial load
16+
let watermark: number | null = null;
17+
1518
const fetchData = async () => {
1619
try {
17-
const [eventsData, statsData, sessions] = await Promise.all([
18-
getCacheEventsFromDb(200),
19-
getSessionCacheStatsFromDb(20),
20+
const [newEvents, sessions] = await Promise.all([
21+
getCacheEventsFromDb(200, watermark),
2022
getSessions(),
2123
]);
22-
setEvents(eventsData);
23-
setSessionStats(statsData);
24+
25+
if (watermark === null) {
26+
// Initial load — use full result
27+
setEvents(newEvents);
28+
} else if (newEvents.length > 0) {
29+
// Incremental — prepend new events (they're newest-first from DB, but
30+
// build_db_cache_events reverses to chronological), trim to 200
31+
setEvents(prev => [...prev, ...newEvents].slice(-200));
32+
}
33+
34+
// Update watermark to latest timestamp
35+
const allEvents = events();
36+
if (allEvents.length > 0) {
37+
watermark = Math.max(...allEvents.map(e => e.timestamp));
38+
}
39+
40+
// Compute session stats client-side from cached events (no extra DB query)
41+
const statsMap = new Map<string, { count: number; read: number; write: number; input: number; lastTs: number; busts: number }>();
42+
for (const e of allEvents) {
43+
if (!e.session_id) continue;
44+
const s = statsMap.get(e.session_id) ?? { count: 0, read: 0, write: 0, input: 0, lastTs: 0, busts: 0 };
45+
s.count++;
46+
s.read += e.cache_read;
47+
s.write += e.cache_write;
48+
s.input += e.input_tokens;
49+
if (e.timestamp > s.lastTs) s.lastTs = e.timestamp;
50+
if (e.severity === "bust" || e.severity === "full_bust") s.busts++;
51+
statsMap.set(e.session_id, s);
52+
}
53+
const stats = [...statsMap.entries()]
54+
.map(([sid, s]) => {
55+
const total = s.read + s.write + s.input;
56+
return {
57+
session_id: sid,
58+
event_count: s.count,
59+
total_cache_read: s.read,
60+
total_cache_write: s.write,
61+
total_input: s.input,
62+
hit_ratio: total > 0 ? s.read / total : 0,
63+
last_timestamp: new Date(s.lastTs).toISOString(),
64+
bust_count: s.busts,
65+
};
66+
})
67+
.sort((a, b) => b.last_timestamp.localeCompare(a.last_timestamp));
68+
setSessionStats(stats);
69+
2470
// Build session ID → title lookup and subagent set
2571
const names: Record<string, string> = {};
2672
const subs = new Set<string>();

packages/dashboard/src/lib/api.ts

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -143,9 +143,13 @@ export async function getCacheEvents(
143143
}
144144

145145
export async function getCacheEventsFromDb(
146-
limit?: number
146+
limit?: number,
147+
sinceTimestamp?: number | null
147148
): Promise<DbCacheEvent[]> {
148-
return invoke("get_cache_events_from_db", { limit: limit ?? 200 });
149+
return invoke("get_cache_events_from_db", {
150+
limit: limit ?? 200,
151+
sinceTimestamp: sinceTimestamp ?? null,
152+
});
149153
}
150154

151155
// ── Config API ──────────────────────────────────────────────

0 commit comments

Comments
 (0)