Skip to content
/ docs Public

Aggregate Docs

Aggregate Docs #20

name: Aggregate Docs
on:
push:
branches: [main]
repository_dispatch:
types: [docs-updated]
schedule:
- cron: '0 0 * * *'
workflow_dispatch:
concurrency:
group: aggregate-docs
cancel-in-progress: true
permissions:
contents: write
jobs:
aggregate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/github-script@v8
name: Aggregate docs from source repos
env:
PUSH_TOKEN: ${{ secrets.DOCS_TOKEN }}
with:
script: |
const fs = require('fs');
const path = require('path');
// Read config files
const repos = JSON.parse(fs.readFileSync('repos.json', 'utf8'));
const docsConfig = JSON.parse(fs.readFileSync('docs.json', 'utf8'));
// Reset products array (start fresh each run)
docsConfig.navigation.products = [];
// Helper to recursively copy a directory's contents
const copyContents = (src, dest) => {
for (const entry of fs.readdirSync(src, { withFileTypes: true })) {
if (entry.name === 'docs.json' || entry.name === 'assets') continue;
const srcPath = path.join(src, entry.name);
const destPath = path.join(dest, entry.name);
if (entry.isDirectory()) {
fs.mkdirSync(destPath, { recursive: true });
copyContents(srcPath, destPath);
} else {
fs.cpSync(srcPath, destPath);
}
}
};
// Process each source repo
for (const { owner, repo, docsPath = 'docs', ref = 'main' } of repos) {
core.startGroup(`Processing ${owner}/${repo}`);
const cloneDir = path.join(process.env.RUNNER_TEMP, 'repos', repo);
await io.rmRF(cloneDir);
// Clone the repo (shallow, specific branch)
await exec.exec('git', [
'clone', '--depth=1', '--branch', ref,
`https://x-access-token:${process.env.PUSH_TOKEN}@github.com/${owner}/${repo}.git`,
cloneDir
]);
const sourceDir = path.join(cloneDir, docsPath);
const sourceConfig = path.join(sourceDir, 'docs.json');
if (!fs.existsSync(sourceConfig)) {
core.warning(`No docs.json found in ${owner}/${repo}/${docsPath}, skipping`);
core.endGroup();
continue;
}
// Read the source repo's docs.json and merge navigation.products
const subConfig = JSON.parse(fs.readFileSync(sourceConfig, 'utf8'));
const subProducts = subConfig.navigation?.products ?? [];
docsConfig.navigation.products.push(...subProducts);
// Determine which content directories this repo owns
// (e.g., json-schema/ contains json-schema/introduction.mdx, etc.)
// We look at the top-level entries in the source docs folder
// (excluding docs.json and assets) and sync only those into the working tree
const sourceEntries = fs.readdirSync(sourceDir, { withFileTypes: true })
.filter(e => e.name !== 'docs.json' && e.name !== 'assets');
// Remove the old version of this repo's content directories
for (const entry of sourceEntries) {
const target = path.join('.', entry.name);
if (fs.existsSync(target)) {
await io.rmRF(target);
}
}
// Copy fresh content from the source repo
copyContents(sourceDir, '.');
core.endGroup();
}
// Update docs.json with merged navigation (preserving all other fields)
fs.writeFileSync(
'docs.json',
JSON.stringify(docsConfig, null, 4) + '\n'
);
core.info(`Aggregated ${docsConfig.navigation.products.length} product(s)`);
// Commit and push to main (if anything changed)
await exec.exec('git', ['add', '.']);
let hasChanges = false;
try {
await exec.exec('git', ['diff', '--cached', '--quiet']);
} catch {
hasChanges = true;
}
if (!hasChanges) {
core.info('No changes detected, skipping commit');
return;
}
await exec.exec('git', ['config', 'user.name', 'github-actions[bot]']);
await exec.exec('git', [
'config', 'user.email',
'github-actions[bot]@users.noreply.github.com'
]);
await exec.exec('git', ['commit', '-m', 'docs: aggregate from source repos']);
await exec.exec('git', ['push']);
core.info('Docs aggregated and pushed successfully');