Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
87 changes: 23 additions & 64 deletions packages/app/src/app/api/gpu-metrics/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,67 +2,37 @@
* DO NOT ADD CACHING (blob, CDN, or unstable_cache) to this route.
* It fetches live GitHub Actions artifacts which change while a run is in progress.
*/
import AdmZip from 'adm-zip';
import { NextRequest, NextResponse } from 'next/server';

import { GITHUB_API_BASE, GITHUB_OWNER, GITHUB_REPO } from '@semianalysisai/inferencex-constants';

import { parseCsvData } from '@/components/gpu-power/types';

interface GithubArtifact {
id: number;
name: string;
archive_download_url: string;
}

/** Paginated GitHub API fetch (same pattern as unofficial-run route). */
async function githubFetchAll(url: string, token: string): Promise<unknown[]> {
const items: unknown[] = [];
let page = 1;
while (true) {
const sep = url.includes('?') ? '&' : '?';
const resp = await fetch(`${url}${sep}per_page=100&page=${page}`, {
headers: { Authorization: `Bearer ${token}`, Accept: 'application/vnd.github.v3+json' },
});
if (!resp.ok) break;
const data = await resp.json();
const arr = Array.isArray(data) ? data : (data.artifacts ?? data.workflow_runs ?? []);
if (arr.length === 0) break;
items.push(...arr);
if (arr.length < 100) break;
page++;
}
return items;
}
import {
downloadGithubArtifact,
extractZipEntries,
fetchGithubRunArtifacts,
fetchGithubWorkflowRun,
getGithubToken,
normalizeGithubRunInfo,
type GithubWorkflowRun,
} from '@/lib/github-artifacts';

const MAX_ARTIFACT_BYTES = 50 * 1024 * 1024;

async function fetchGpuMetrics(runId: string) {
const githubToken = process.env.GITHUB_TOKEN;
const githubToken = getGithubToken();
if (!githubToken) throw new Error('GitHub token not configured');

const runResp = await fetch(
`${GITHUB_API_BASE}/repos/${GITHUB_OWNER}/${GITHUB_REPO}/actions/runs/${runId}`,
{
headers: { Authorization: `Bearer ${githubToken}`, Accept: 'application/vnd.github.v3+json' },
},
);
const runResp = await fetchGithubWorkflowRun(runId, githubToken);
if (!runResp.ok) throw new Error(`Failed to fetch workflow run: ${runResp.status}`);
const run = await runResp.json();
const run = (await runResp.json()) as GithubWorkflowRun;

const artifacts = (await githubFetchAll(
`${GITHUB_API_BASE}/repos/${GITHUB_OWNER}/${GITHUB_REPO}/actions/runs/${runId}/artifacts`,
githubToken,
)) as GithubArtifact[];
const artifacts = await fetchGithubRunArtifacts(runId, githubToken);

const gpuArtifacts = artifacts.filter((a) => a.name.startsWith('gpu_metrics'));
if (gpuArtifacts.length === 0) throw new Error('No gpu_metrics artifacts found for this run');

const parsedArtifacts: { name: string; data: ReturnType<typeof parseCsvData> }[] = [];
for (const artifact of gpuArtifacts) {
const dlResp = await fetch(artifact.archive_download_url, {
headers: { Authorization: `Bearer ${githubToken}`, Accept: 'application/vnd.github.v3+json' },
});
const dlResp = await downloadGithubArtifact(artifact.archive_download_url, githubToken);
if (!dlResp.ok) {
console.warn(`Failed to download artifact ${artifact.name}: ${dlResp.statusText}`);
continue;
Expand All @@ -74,32 +44,21 @@ async function fetchGpuMetrics(runId: string) {
continue;
}

const zip = new AdmZip(Buffer.from(await dlResp.arrayBuffer()));
const rows = [];
for (const entry of zip.getEntries()) {
if (!entry.entryName.endsWith('.csv')) continue;
try {
rows.push(...parseCsvData(zip.readAsText(entry)));
} catch (e) {
console.warn(`Failed to parse CSV ${entry.entryName} from ${artifact.name}:`, e);
}
}
const rows = extractZipEntries(
Buffer.from(await dlResp.arrayBuffer()),
'.csv',
(_entryName, contents) => parseCsvData(contents),
(entryName, error) => {
console.warn(`Failed to parse CSV ${entryName} from ${artifact.name}:`, error);
},
);
if (rows.length > 0) parsedArtifacts.push({ name: artifact.name, data: rows });
}

if (parsedArtifacts.length === 0) throw new Error('No GPU metrics data found in artifacts');

return {
runInfo: {
id: run.id,
name: run.name,
branch: run.head_branch,
sha: run.head_sha,
createdAt: run.created_at,
url: run.html_url,
conclusion: run.conclusion,
status: run.status,
},
runInfo: normalizeGithubRunInfo(run),
artifacts: parsedArtifacts,
};
}
Expand Down
91 changes: 22 additions & 69 deletions packages/app/src/app/api/unofficial-run/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,38 +2,20 @@
* DO NOT ADD CACHING (blob, CDN, or unstable_cache) to this route.
* It fetches live GitHub Actions artifacts which change while a run is in progress.
*/
import AdmZip from 'adm-zip';
import { NextRequest, NextResponse } from 'next/server';

import { GITHUB_API_BASE, GITHUB_OWNER, GITHUB_REPO } from '@semianalysisai/inferencex-constants';
import { mapBenchmarkRow } from '@semianalysisai/inferencex-db/etl/benchmark-mapper';
import { createSkipTracker } from '@semianalysisai/inferencex-db/etl/skip-tracker';

interface GithubArtifact {
id: number;
name: string;
archive_download_url: string;
}

/** Paginated GitHub API fetch. */
async function githubFetchAll(url: string, token: string): Promise<unknown[]> {
const items: unknown[] = [];
let page = 1;
while (true) {
const sep = url.includes('?') ? '&' : '?';
const resp = await fetch(`${url}${sep}per_page=100&page=${page}`, {
headers: { Authorization: `Bearer ${token}`, Accept: 'application/vnd.github.v3+json' },
});
if (!resp.ok) break;
const data = await resp.json();
const arr = Array.isArray(data) ? data : (data.artifacts ?? data.workflow_runs ?? []);
if (arr.length === 0) break;
items.push(...arr);
if (arr.length < 100) break;
page++;
}
return items;
}
import {
downloadGithubArtifact,
extractZipEntries,
fetchGithubRunArtifacts,
fetchGithubWorkflowRun,
getGithubToken,
getRunDate,
normalizeGithubRunInfo,
type GithubWorkflowRun,
} from '@/lib/github-artifacts';

/** Normalize raw artifact rows into the BenchmarkRow shape the frontend expects. */
export function normalizeArtifactRows(rawRows: Record<string, unknown>[], date: string) {
Expand Down Expand Up @@ -72,19 +54,12 @@ export function normalizeArtifactRows(rawRows: Record<string, unknown>[], date:
return results;
}

/** Extract and parse all JSON files from a ZIP buffer. */
/** Extract all valid JSON files from a ZIP buffer; malformed JSON entries are skipped. */
function extractJsonFromZip(buffer: Buffer): Record<string, unknown>[] {
const zip = new AdmZip(buffer);
const results: Record<string, unknown>[] = [];
for (const entry of zip.getEntries()) {
if (!entry.entryName.endsWith('.json')) continue;
try {
const data = JSON.parse(zip.readAsText(entry));
if (Array.isArray(data)) results.push(...data);
else results.push(data);
} catch {}
}
return results;
return extractZipEntries(buffer, '.json', (_entryName, contents) => {
const data = JSON.parse(contents) as Record<string, unknown> | Record<string, unknown>[];
return Array.isArray(data) ? data : [data];
});
}

export async function GET(request: NextRequest) {
Expand All @@ -93,35 +68,24 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'runId must be a numeric value' }, { status: 400 });
}

const githubToken = process.env.GITHUB_TOKEN;
const githubToken = getGithubToken();
if (!githubToken) {
return NextResponse.json({ error: 'GitHub token not configured' }, { status: 500 });
}

try {
// Fetch workflow run metadata
const runResp = await fetch(
`${GITHUB_API_BASE}/repos/${GITHUB_OWNER}/${GITHUB_REPO}/actions/runs/${runId}`,
{
headers: {
Authorization: `Bearer ${githubToken}`,
Accept: 'application/vnd.github.v3+json',
},
},
);
const runResp = await fetchGithubWorkflowRun(runId, githubToken);
if (!runResp.ok) {
return NextResponse.json(
{ error: `GitHub API: ${runResp.statusText}` },
{ status: runResp.status },
);
}
const run = await runResp.json();
const run = (await runResp.json()) as GithubWorkflowRun;

// Fetch artifacts, find latest results_bmk
const artifacts = (await githubFetchAll(
`${GITHUB_API_BASE}/repos/${GITHUB_OWNER}/${GITHUB_REPO}/actions/runs/${runId}/artifacts`,
githubToken,
)) as GithubArtifact[];
const artifacts = await fetchGithubRunArtifacts(runId, githubToken);

const bmkArtifact = artifacts
.filter((a) => a.name === 'results_bmk')
Expand All @@ -132,9 +96,7 @@ export async function GET(request: NextRequest) {
}

// Download and extract benchmark data
const dlResp = await fetch(bmkArtifact.archive_download_url, {
headers: { Authorization: `Bearer ${githubToken}`, Accept: 'application/vnd.github.v3+json' },
});
const dlResp = await downloadGithubArtifact(bmkArtifact.archive_download_url, githubToken);
if (!dlResp.ok) {
return NextResponse.json(
{ error: `Artifact download failed: ${dlResp.statusText}` },
Expand All @@ -143,21 +105,12 @@ export async function GET(request: NextRequest) {
}

const rawRows = extractJsonFromZip(Buffer.from(await dlResp.arrayBuffer()));
const date = run.created_at
? run.created_at.split('T')[0]
: new Date().toISOString().split('T')[0];
const date = getRunDate(run);
const benchmarks = normalizeArtifactRows(rawRows, date);

return NextResponse.json({
runInfo: {
id: run.id,
name: run.name,
branch: run.head_branch,
sha: run.head_sha,
createdAt: run.created_at,
url: run.html_url,
conclusion: run.conclusion,
status: run.status,
...normalizeGithubRunInfo(run),
isNonMainBranch: run.head_branch !== 'main',
},
benchmarks,
Expand Down
Loading
Loading