Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .changeset/generalize-fork-management.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
---
---

Generalize ccc-dev/ into multi-repo fork management tool
102 changes: 66 additions & 36 deletions .pnpmfile.cjs
Original file line number Diff line number Diff line change
@@ -1,65 +1,95 @@
// .pnpmfile.cjs — Two jobs:
//
// 1. Auto-replay: clone + patch CCC on first `pnpm install` (if pins exist).
// 1. Auto-replay: clone + patch managed forks on first `pnpm install` (if pins exist).
// replay.sh handles git clone, merge replay, lockfile removal, and source
// patching (jq exports rewrite). It does NOT run pnpm install
// internally — the root workspace install handles CCC deps alongside
// internally — the root workspace install handles fork deps alongside
// everything else.
//
// 2. readPackage hook: rewrite CCC deps from catalog ranges to workspace:*.
// CCC packages live in pnpm-workspace.yaml, so you'd expect pnpm to link
// 2. readPackage hook: rewrite fork deps from catalog ranges to workspace:*.
// Fork packages live in pnpm-workspace.yaml, so you'd expect pnpm to link
// them automatically. It doesn't — catalog: specifiers resolve to a semver
// range (e.g. ^1.12.2) BEFORE workspace linking is considered, so pnpm
// fetches from the registry even with link-workspace-packages = true.
// This hook intercepts every package.json at resolution time and forces
// workspace:* for any dep whose name matches a local CCC package.
// When CCC is not cloned, hasCcc is false and the hook is a no-op, so
// the catalog range falls through to the registry normally.
// workspace:* for any dep whose name matches a local fork package.
// When no forks are cloned, the hook is a no-op, so catalog ranges fall
// through to the registry normally.

const { execSync } = require("child_process");
const { execFileSync } = require("child_process");
const { existsSync, readdirSync, readFileSync } = require("fs");
const { join } = require("path");

const cccCache = join(__dirname, "ccc-dev", "ccc");
const cccRefs = join(__dirname, "ccc-dev", "pins", "REFS");
// Discover all *-fork/ directories with config.json
const forkDirs = [];
for (const entry of readdirSync(__dirname, { withFileTypes: true })) {
if (!entry.isDirectory() || !entry.name.endsWith("-fork")) continue;
const configPath = join(__dirname, entry.name, "config.json");
if (existsSync(configPath)) {
const config = JSON.parse(readFileSync(configPath, "utf8"));
if (!config.cloneDir) continue;
forkDirs.push({
name: entry.name,
dir: join(__dirname, entry.name),
config,
});
}
}

// 1. Auto-replay CCC pins on first pnpm install
// Skip when ccc:record is running — it rebuilds pins from scratch.
// 1. Auto-replay fork pins on first pnpm install
// Skip when fork:record is running — it rebuilds pins from scratch.
// Detect via argv since pnpmfile loads before npm_lifecycle_event is set.
const isCccRecord = process.argv.some((a) => a === "ccc:record");
if (!isCccRecord && !existsSync(cccCache) && existsSync(cccRefs)) {
try {
execSync("bash ccc-dev/replay.sh", {
cwd: __dirname,
stdio: ["ignore", "pipe", "pipe"],
});
} catch (err) {
process.stderr.write("Replaying CCC pins…\n");
process.stderr.write(err.stdout?.toString() ?? "");
process.stderr.write(err.stderr?.toString() ?? "");
throw err;
const isRecord = process.argv.some((a) => a === "fork:record");
if (!isRecord) {
for (const fork of forkDirs) {
const cloneDir = join(fork.dir, fork.config.cloneDir);
const hasPins = existsSync(join(fork.dir, "pins", "manifest"));
if (!existsSync(cloneDir) && hasPins) {
try {
execFileSync("bash", ["fork-scripts/replay.sh", fork.name], {
cwd: __dirname,
stdio: ["ignore", "pipe", "pipe"],
});
} catch (err) {
process.stderr.write(`Replaying ${fork.name} pins…\n`);
process.stderr.write(err.stdout?.toString() ?? "");
process.stderr.write(err.stderr?.toString() ?? "");
throw err;
}
}
}
}

// 2. Discover local CCC packages and build the override map
const cccPkgs = join(cccCache, "packages");
// 2. Discover local fork packages and build the override map
const localOverrides = {};
if (existsSync(cccPkgs)) {
for (const dir of readdirSync(cccPkgs, { withFileTypes: true })) {
if (!dir.isDirectory()) continue;
const pkgJsonPath = join(cccPkgs, dir.name, "package.json");
if (!existsSync(pkgJsonPath)) continue;
const { name } = JSON.parse(readFileSync(pkgJsonPath, "utf8"));
if (name) {
localOverrides[name] = "workspace:*";
for (const fork of forkDirs) {
const cloneDir = join(fork.dir, fork.config.cloneDir);
if (!existsSync(cloneDir)) continue;
const includes = fork.config.workspace?.include ?? [];
const excludes = new Set(fork.config.workspace?.exclude ?? []);
for (const pattern of includes) {
// Simple glob: only supports trailing /* (e.g. "packages/*")
const base = pattern.replace(/\/\*$/, "");
const pkgsRoot = join(cloneDir, base);
if (!existsSync(pkgsRoot)) continue;
for (const dir of readdirSync(pkgsRoot, { withFileTypes: true })) {
if (!dir.isDirectory()) continue;
const relPath = `${base}/${dir.name}`;
if (excludes.has(relPath)) continue;
const pkgJsonPath = join(pkgsRoot, dir.name, "package.json");
if (!existsSync(pkgJsonPath)) continue;
const { name } = JSON.parse(readFileSync(pkgJsonPath, "utf8"));
if (name) {
localOverrides[name] = "workspace:*";
}
}
}
}

const hasCcc = Object.keys(localOverrides).length > 0;
const hasOverrides = Object.keys(localOverrides).length > 0;

function readPackage(pkg) {
if (!hasCcc) return pkg;
if (!hasOverrides) return pkg;

for (const field of [
"dependencies",
Expand Down
47 changes: 30 additions & 17 deletions AGENTS.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,32 +18,45 @@

## PR Workflow

1. **Routine Pre-PR Validation**: `pnpm check:full`, it wipes derived state and regenerates from scratch. If `ccc-dev/ccc/` has pending work, the wipe is skipped to prevent data loss — re-record or push CCC changes first for a clean validation
1. **Routine Pre-PR Validation**: `pnpm check:full`, it wipes derived state and regenerates from scratch. If any fork clone has pending work, the wipe is skipped to prevent data loss — re-record or push fork changes first for a clean validation
2. **Open a PR**: Run `pnpm changeset` to generate a changeset entry, then push the branch and present a clickable markdown link `[title](url)` where the URL is a GitHub compare URL (`quick_pull=1`). Base branch is `master`. Prefill "title" (concise, under 70 chars) and "body" (markdown with ## Why and ## Changes sections)
3. **Fetch PR review comments**: Use the GitHub REST API via curl. Fetch all three comment types (issue comments, reviews, and inline comments). Categorize feedback by actionability (action required / informational), not by source (human / bot). Reviewers reply asynchronously — poll every minute until comments arrive

## CCC Local Development (ccc-dev/)
## Fork Management (fork-scripts/ + *-fork/)

The `ccc-dev/` system uses a record/replay mechanism for deterministic builds of a local CCC fork:
The `fork-scripts/` system uses a record/replay mechanism for deterministic builds of external repo forks. Each fork lives in a `<name>-fork/` directory with a `config.json` specifying upstream URL, fork URL, merge refs, and workspace config. Scripts in `fork-scripts/` are generic and accept the fork directory as their first argument.

- `ccc-dev/pins/` is **committed** to git (base SHAs, merge refs, conflict resolutions, local patches), regenerated by `pnpm ccc:record`
- `ccc-dev/ccc/` is **not in git** — it is rebuilt from pins on `pnpm install`
- The developer may have **pending work** in `ccc-dev/ccc/`. Run `pnpm ccc:status` (exit 0 = safe to wipe, exit 1 = has custom work) before any operation that would destroy it. `pnpm ccc:record`, `pnpm ccc:clean`, and `pnpm ccc:reset` already guard against this automatically
- `.pnpmfile.cjs` silently rewrites all `@ckb-ccc/*` dependencies to `workspace:*` when `ccc-dev/ccc/` exists. Local CCC packages override published ones without any visible change in package.json files
- `pnpm install` has a side effect: if `ccc-dev/pins/REFS` exists but `ccc-dev/ccc/` does not, it automatically runs `ccc-dev/replay.sh` to rebuild CCC from pins. This is intentional
- `ccc-dev/patch.sh` rewrites CCC package exports to point at `.ts` source instead of `.d.ts`, then creates a deterministic git commit (fixed author/date) so record and replay produce the same `pins/HEAD` hash. This is why imports from `@ckb-ccc/*` resolve to TypeScript source files inside `node_modules` — it is not a bug
- `ccc-dev/tsgo-filter.sh` is a bash wrapper around `tsgo` that filters out diagnostics originating from `ccc-dev/ccc/`. CCC source does not satisfy this repo's strict tsconfig (`verbatimModuleSyntax`, `noUncheckedIndexedAccess`, `noImplicitOverride`), so the wrapper suppresses those errors while still reporting errors in stack source
### Per-fork directory structure

### Opening a CCC upstream PR
Each `<name>-fork/` contains:
- `config.json` — upstream URL, fork URL, refs to merge, cloneDir, workspace include/exclude
- `pins/` — **committed** to git (manifest + counted resolutions + local patches), regenerated by `pnpm fork:record <name>-fork`
- `pins/HEAD` — expected final SHA after full replay
- `pins/manifest` — base SHA + merge refs (TSV, one per line)
- `pins/res-N.resolution` — conflict resolution for merge step N (counted format: `--- path` file headers, `CONFLICT ours=N base=M theirs=K resolution=R` conflict headers followed by R resolution lines; parser is purely positional — reads counts and skips lines, never inspects content)
- `pins/local-*.patch` — local development patches (applied after merges + patch.sh)
- `<cloneDir>/` — **not in git** — rebuilt from pins on `pnpm install`

In `ccc-dev/ccc/`, branch off `origin/master` (or relevant branch), push to fork (`phroi/ccc`), open PR against `ckb-devrel/ccc`. Before pushing, run the CCC CI steps (`ccc-dev/ccc/.github/workflows/check.yaml`) with `CI=true`.
### Key behaviors

Once the PR is open, replace the local patch with a merge ref:
- The developer may have **pending work** in a fork clone. Run `pnpm fork:status <name>-fork` (exit 0 = safe to wipe, exit 1 = has custom work) before any operation that would destroy it. `fork:record`, `fork:clean`, and `fork:reset` already guard against this automatically
- `.pnpmfile.cjs` scans all `*-fork/config.json` directories and silently rewrites matching dependencies to `workspace:*` when clones exist. Local fork packages override published ones without any visible change in package.json files
- `pnpm install` has a side effect: if `<name>-fork/pins/manifest` exists but the clone does not, it automatically runs `fork-scripts/replay.sh` to rebuild from pins. This is intentional
- `fork-scripts/patch.sh` rewrites fork package exports to point at `.ts` source instead of `.d.ts`, then creates a deterministic git commit (fixed author/date) so record and replay produce the same HEAD hash. This is why imports from fork packages resolve to TypeScript source files — it is not a bug
- `fork-scripts/tsgo-filter.sh` is a bash wrapper around `tsgo` that filters out diagnostics originating from all `*-fork/` clone paths. Fork source may not satisfy this repo's strict tsconfig, so the wrapper suppresses those errors while still reporting errors in stack source
- `pnpm fork:save <name>-fork [description]` captures local work as a patch in `pins/`. Patches survive re-records and replays
- `pnpm fork:record` regenerates the fork workspace entries in `pnpm-workspace.yaml` (between `@generated` markers) from all `*-fork/config.json` files — manual edits to that section are overwritten on re-record

1. Delete the patch from `ccc-dev/pins/local/`
2. Add the PR number to `ccc:record` in `package.json` — order PRs by target branch from upstream to downstream, so each group merges cleanly onto its base before the next layer begins
3. Run `pnpm ccc:record`
4. Run `pnpm check:full` to verify the merge ref reproduces what the local patch achieved
### CCC upstream contributions

Work locally via `ccc-fork/` first. Only push to the fork (`phroi/ccc`) when changes are validated against the stack. Do not open PRs against `ckb-devrel/ccc` prematurely — keep changes on the fork until they are production-ready and the maintainer decides to upstream.

1. Develop and test in `ccc-fork/ccc/` on the `wip` branch
2. When ready, use `pnpm fork:push ccc-fork` to cherry-pick commits onto a PR branch
3. Push the PR branch to `phroi/ccc` for review
4. Add the PR number to `refs` in `ccc-fork/config.json` — order PRs by target branch from upstream to downstream, so each group merges cleanly onto its base before the next layer begins
5. Run `pnpm fork:record ccc-fork` and `pnpm check:full` to verify
6. Only open an upstream PR against `ckb-devrel/ccc` when the maintainer explicitly decides to upstream

## Reference Repos

Expand Down
28 changes: 15 additions & 13 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -54,17 +54,17 @@ graph TD;
click F "https://github.com/ickb/stack/tree/master/packages/sdk" "Go to @ickb/sdk"
```

## Develop CCC
## Develop with Forks

When `ccc-dev/pins/REFS` is committed, `pnpm install` automatically sets up the CCC local development environment on first run (by replaying pinned merges via `ccc-dev/replay.sh`). No manual setup step is needed — just clone and install:
When `<name>-fork/pins/manifest` is committed, `pnpm install` automatically sets up the local fork development environment on first run (by replaying pinned merges via `fork-scripts/replay.sh`). No manual setup step is needed — just clone and install:

```bash
git clone git@github.com:ickb/stack.git && cd stack && pnpm install
```

To redo the setup from scratch: `pnpm ccc:clean && pnpm install`.
To redo the setup from scratch: `pnpm fork:clean-all && pnpm install`.

See [ccc-dev/README.md](ccc-dev/README.md) for recording new pins, developing CCC PRs, and the full workflow.
See [ccc-fork/README.md](ccc-fork/README.md) for recording new pins, developing CCC PRs, and the full workflow.

## Reference

Expand All @@ -81,15 +81,17 @@ This clones two repos into the project root (both are git-ignored and made read-

## Developer Scripts

| Command | Description |
| ------------------- | ------------------------------------------------------------------------------------- |
| `pnpm coworker` | Launch an interactive AI Coworker session (full autonomy, opus model). |
| `pnpm coworker:ask` | One-shot AI query for scripting (sonnet model, stateless). Used by `pnpm ccc:record`. |
| `pnpm ccc:status` | Check if CCC clone matches pinned state. Exit 0 = safe to wipe. |
| `pnpm ccc:record` | Record CCC pins (clone, merge refs, build). Guarded against pending work. |
| `pnpm ccc:clean` | Remove CCC clone, keep pins (guarded). Re-replay on next `pnpm install`. |
| `pnpm ccc:reset` | Remove CCC clone and pins (guarded). Restores published CCC packages. |
| `pnpm check:full` | Wipe derived state and validate from scratch. Skips wipe if CCC has pending work. |
| Command | Description |
| -------------------------------- | --------------------------------------------------------------------------------- |
| `pnpm coworker` | Launch an interactive AI Coworker session (full autonomy, opus model). |
| `pnpm coworker:ask` | One-shot AI query for scripting (sonnet model, stateless). Used by fork:record. |
| `pnpm fork:status <name>-fork` | Check if fork clone matches pinned state. Exit 0 = safe to wipe. |
| `pnpm fork:record <name>-fork` | Record fork pins (clone, merge refs, build). Guarded against pending work. |
| `pnpm fork:save <name>-fork` | Capture local fork work as a patch in pins/ (survives re-records and replays). |
| `pnpm fork:push <name>-fork` | Cherry-pick commits from wip branch onto a PR branch for pushing to the fork. |
| `pnpm fork:clean <name>-fork` | Remove fork clone, keep pins (guarded). Re-replay on next `pnpm install`. |
| `pnpm fork:reset <name>-fork` | Remove fork clone and pins (guarded). Restores published packages. |
| `pnpm check:full` | Wipe derived state and validate from scratch. Skips wipe if forks have pending work.|

## Epoch Semantic Versioning

Expand Down
2 changes: 1 addition & 1 deletion apps/faucet/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
"scripts": {
"test": "vitest",
"test:ci": "vitest run",
"build": "[ -d ../../ccc-dev/ccc ] && exec bash ../../ccc-dev/tsgo-filter.sh || tsgo",
"build": "bash ../../fork-scripts/tsgo-filter.sh",
"lint": "eslint ./src",
"clean": "rm -fr dist",
"clean:deep": "rm -fr dist node_modules",
Expand Down
2 changes: 1 addition & 1 deletion apps/interface/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
"type": "module",
"scripts": {
"dev": "vite",
"build": "([ -d ../../ccc-dev/ccc ] && exec bash ../../ccc-dev/tsgo-filter.sh || tsgo) && vite build",
"build": "bash ../../fork-scripts/tsgo-filter.sh && vite build",
"preview": "vite preview",
"lint": "eslint ./src",
"clean": "rm -fr dist",
Expand Down
29 changes: 23 additions & 6 deletions apps/interface/vite.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,26 @@ import { defineConfig } from "vite";
import tailwindcss from "@tailwindcss/vite";
import react from "@vitejs/plugin-react";
import basicSsl from '@vitejs/plugin-basic-ssl'
import { existsSync } from "fs";
import { existsSync, readdirSync, readFileSync } from "fs";
import { join } from "path";

const hasCccSource = existsSync("../../ccc-dev/ccc");
// Detect if any managed fork clones are present
const root = join(__dirname, "../..");
const hasForkSource = (() => {
try {
for (const entry of readdirSync(root, { withFileTypes: true })) {
if (!entry.isDirectory() || !entry.name.endsWith("-fork")) continue;
const configPath = join(root, entry.name, "config.json");
if (!existsSync(configPath)) continue;
const { cloneDir } = JSON.parse(readFileSync(configPath, "utf8"));
if (!cloneDir) continue;
if (existsSync(join(root, entry.name, cloneDir))) return true;
}
} catch (err) {
console.error("Failed to detect fork sources:", err);
}
return false;
})();

// https://vitejs.dev/config/
export default defineConfig({
Expand All @@ -14,8 +31,8 @@ export default defineConfig({
plugins: [
tailwindcss(),
react({
// CCC source uses decorators — skip babel, let esbuild handle them
...(hasCccSource && { exclude: [/\/ccc-dev\/ccc\//] }),
// Fork source uses decorators — skip babel, let esbuild handle them
...(hasForkSource && { exclude: [/\w+-fork\/\w+\//] }),
babel: {
plugins: [["babel-plugin-react-compiler"]],
},
Expand All @@ -24,10 +41,10 @@ export default defineConfig({
],
build: {
rollupOptions: {
// CCC source uses `export { SomeType }` instead of `export type { SomeType }`.
// Fork source uses `export { SomeType }` instead of `export type { SomeType }`.
// esbuild strips the type declarations but can't strip value-looking re-exports,
// so rollup sees missing exports. Shimming is safe — they're never used at runtime.
...(hasCccSource && { shimMissingExports: true }),
...(hasForkSource && { shimMissingExports: true }),
},
},
});
2 changes: 1 addition & 1 deletion apps/sampler/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
"scripts": {
"test": "vitest",
"test:ci": "vitest run",
"build": "[ -d ../../ccc-dev/ccc ] && exec bash ../../ccc-dev/tsgo-filter.sh || tsgo",
"build": "bash ../../fork-scripts/tsgo-filter.sh",
"lint": "eslint ./src",
"clean": "rm -fr dist",
"clean:deep": "rm -fr dist node_modules",
Expand Down
Loading