diff --git a/.changeset/generalize-fork-management.md b/.changeset/generalize-fork-management.md new file mode 100644 index 0000000..0c21aab --- /dev/null +++ b/.changeset/generalize-fork-management.md @@ -0,0 +1,4 @@ +--- +--- + +Generalize ccc-dev/ into multi-repo fork management tool diff --git a/.pnpmfile.cjs b/.pnpmfile.cjs index ce3cee7..c2ae705 100644 --- a/.pnpmfile.cjs +++ b/.pnpmfile.cjs @@ -1,65 +1,95 @@ // .pnpmfile.cjs — Two jobs: // -// 1. Auto-replay: clone + patch CCC on first `pnpm install` (if pins exist). +// 1. Auto-replay: clone + patch managed forks on first `pnpm install` (if pins exist). // replay.sh handles git clone, merge replay, lockfile removal, and source // patching (jq exports rewrite). It does NOT run pnpm install -// internally — the root workspace install handles CCC deps alongside +// internally — the root workspace install handles fork deps alongside // everything else. // -// 2. readPackage hook: rewrite CCC deps from catalog ranges to workspace:*. -// CCC packages live in pnpm-workspace.yaml, so you'd expect pnpm to link +// 2. readPackage hook: rewrite fork deps from catalog ranges to workspace:*. +// Fork packages live in pnpm-workspace.yaml, so you'd expect pnpm to link // them automatically. It doesn't — catalog: specifiers resolve to a semver // range (e.g. ^1.12.2) BEFORE workspace linking is considered, so pnpm // fetches from the registry even with link-workspace-packages = true. // This hook intercepts every package.json at resolution time and forces -// workspace:* for any dep whose name matches a local CCC package. -// When CCC is not cloned, hasCcc is false and the hook is a no-op, so -// the catalog range falls through to the registry normally. +// workspace:* for any dep whose name matches a local fork package. +// When no forks are cloned, the hook is a no-op, so catalog ranges fall +// through to the registry normally. -const { execSync } = require("child_process"); +const { execFileSync } = require("child_process"); const { existsSync, readdirSync, readFileSync } = require("fs"); const { join } = require("path"); -const cccCache = join(__dirname, "ccc-dev", "ccc"); -const cccRefs = join(__dirname, "ccc-dev", "pins", "REFS"); +// Discover all *-fork/ directories with config.json +const forkDirs = []; +for (const entry of readdirSync(__dirname, { withFileTypes: true })) { + if (!entry.isDirectory() || !entry.name.endsWith("-fork")) continue; + const configPath = join(__dirname, entry.name, "config.json"); + if (existsSync(configPath)) { + const config = JSON.parse(readFileSync(configPath, "utf8")); + if (!config.cloneDir) continue; + forkDirs.push({ + name: entry.name, + dir: join(__dirname, entry.name), + config, + }); + } +} -// 1. Auto-replay CCC pins on first pnpm install -// Skip when ccc:record is running — it rebuilds pins from scratch. +// 1. Auto-replay fork pins on first pnpm install +// Skip when fork:record is running — it rebuilds pins from scratch. // Detect via argv since pnpmfile loads before npm_lifecycle_event is set. -const isCccRecord = process.argv.some((a) => a === "ccc:record"); -if (!isCccRecord && !existsSync(cccCache) && existsSync(cccRefs)) { - try { - execSync("bash ccc-dev/replay.sh", { - cwd: __dirname, - stdio: ["ignore", "pipe", "pipe"], - }); - } catch (err) { - process.stderr.write("Replaying CCC pins…\n"); - process.stderr.write(err.stdout?.toString() ?? ""); - process.stderr.write(err.stderr?.toString() ?? ""); - throw err; +const isRecord = process.argv.some((a) => a === "fork:record"); +if (!isRecord) { + for (const fork of forkDirs) { + const cloneDir = join(fork.dir, fork.config.cloneDir); + const hasPins = existsSync(join(fork.dir, "pins", "manifest")); + if (!existsSync(cloneDir) && hasPins) { + try { + execFileSync("bash", ["fork-scripts/replay.sh", fork.name], { + cwd: __dirname, + stdio: ["ignore", "pipe", "pipe"], + }); + } catch (err) { + process.stderr.write(`Replaying ${fork.name} pins…\n`); + process.stderr.write(err.stdout?.toString() ?? ""); + process.stderr.write(err.stderr?.toString() ?? ""); + throw err; + } + } } } -// 2. Discover local CCC packages and build the override map -const cccPkgs = join(cccCache, "packages"); +// 2. Discover local fork packages and build the override map const localOverrides = {}; -if (existsSync(cccPkgs)) { - for (const dir of readdirSync(cccPkgs, { withFileTypes: true })) { - if (!dir.isDirectory()) continue; - const pkgJsonPath = join(cccPkgs, dir.name, "package.json"); - if (!existsSync(pkgJsonPath)) continue; - const { name } = JSON.parse(readFileSync(pkgJsonPath, "utf8")); - if (name) { - localOverrides[name] = "workspace:*"; +for (const fork of forkDirs) { + const cloneDir = join(fork.dir, fork.config.cloneDir); + if (!existsSync(cloneDir)) continue; + const includes = fork.config.workspace?.include ?? []; + const excludes = new Set(fork.config.workspace?.exclude ?? []); + for (const pattern of includes) { + // Simple glob: only supports trailing /* (e.g. "packages/*") + const base = pattern.replace(/\/\*$/, ""); + const pkgsRoot = join(cloneDir, base); + if (!existsSync(pkgsRoot)) continue; + for (const dir of readdirSync(pkgsRoot, { withFileTypes: true })) { + if (!dir.isDirectory()) continue; + const relPath = `${base}/${dir.name}`; + if (excludes.has(relPath)) continue; + const pkgJsonPath = join(pkgsRoot, dir.name, "package.json"); + if (!existsSync(pkgJsonPath)) continue; + const { name } = JSON.parse(readFileSync(pkgJsonPath, "utf8")); + if (name) { + localOverrides[name] = "workspace:*"; + } } } } -const hasCcc = Object.keys(localOverrides).length > 0; +const hasOverrides = Object.keys(localOverrides).length > 0; function readPackage(pkg) { - if (!hasCcc) return pkg; + if (!hasOverrides) return pkg; for (const field of [ "dependencies", diff --git a/AGENTS.md b/AGENTS.md index eea35f3..516dbe7 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -18,32 +18,45 @@ ## PR Workflow -1. **Routine Pre-PR Validation**: `pnpm check:full`, it wipes derived state and regenerates from scratch. If `ccc-dev/ccc/` has pending work, the wipe is skipped to prevent data loss — re-record or push CCC changes first for a clean validation +1. **Routine Pre-PR Validation**: `pnpm check:full`, it wipes derived state and regenerates from scratch. If any fork clone has pending work, the wipe is skipped to prevent data loss — re-record or push fork changes first for a clean validation 2. **Open a PR**: Run `pnpm changeset` to generate a changeset entry, then push the branch and present a clickable markdown link `[title](url)` where the URL is a GitHub compare URL (`quick_pull=1`). Base branch is `master`. Prefill "title" (concise, under 70 chars) and "body" (markdown with ## Why and ## Changes sections) 3. **Fetch PR review comments**: Use the GitHub REST API via curl. Fetch all three comment types (issue comments, reviews, and inline comments). Categorize feedback by actionability (action required / informational), not by source (human / bot). Reviewers reply asynchronously — poll every minute until comments arrive -## CCC Local Development (ccc-dev/) +## Fork Management (fork-scripts/ + *-fork/) -The `ccc-dev/` system uses a record/replay mechanism for deterministic builds of a local CCC fork: +The `fork-scripts/` system uses a record/replay mechanism for deterministic builds of external repo forks. Each fork lives in a `-fork/` directory with a `config.json` specifying upstream URL, fork URL, merge refs, and workspace config. Scripts in `fork-scripts/` are generic and accept the fork directory as their first argument. -- `ccc-dev/pins/` is **committed** to git (base SHAs, merge refs, conflict resolutions, local patches), regenerated by `pnpm ccc:record` -- `ccc-dev/ccc/` is **not in git** — it is rebuilt from pins on `pnpm install` -- The developer may have **pending work** in `ccc-dev/ccc/`. Run `pnpm ccc:status` (exit 0 = safe to wipe, exit 1 = has custom work) before any operation that would destroy it. `pnpm ccc:record`, `pnpm ccc:clean`, and `pnpm ccc:reset` already guard against this automatically -- `.pnpmfile.cjs` silently rewrites all `@ckb-ccc/*` dependencies to `workspace:*` when `ccc-dev/ccc/` exists. Local CCC packages override published ones without any visible change in package.json files -- `pnpm install` has a side effect: if `ccc-dev/pins/REFS` exists but `ccc-dev/ccc/` does not, it automatically runs `ccc-dev/replay.sh` to rebuild CCC from pins. This is intentional -- `ccc-dev/patch.sh` rewrites CCC package exports to point at `.ts` source instead of `.d.ts`, then creates a deterministic git commit (fixed author/date) so record and replay produce the same `pins/HEAD` hash. This is why imports from `@ckb-ccc/*` resolve to TypeScript source files inside `node_modules` — it is not a bug -- `ccc-dev/tsgo-filter.sh` is a bash wrapper around `tsgo` that filters out diagnostics originating from `ccc-dev/ccc/`. CCC source does not satisfy this repo's strict tsconfig (`verbatimModuleSyntax`, `noUncheckedIndexedAccess`, `noImplicitOverride`), so the wrapper suppresses those errors while still reporting errors in stack source +### Per-fork directory structure -### Opening a CCC upstream PR +Each `-fork/` contains: +- `config.json` — upstream URL, fork URL, refs to merge, cloneDir, workspace include/exclude +- `pins/` — **committed** to git (manifest + counted resolutions + local patches), regenerated by `pnpm fork:record -fork` + - `pins/HEAD` — expected final SHA after full replay + - `pins/manifest` — base SHA + merge refs (TSV, one per line) + - `pins/res-N.resolution` — conflict resolution for merge step N (counted format: `--- path` file headers, `CONFLICT ours=N base=M theirs=K resolution=R` conflict headers followed by R resolution lines; parser is purely positional — reads counts and skips lines, never inspects content) + - `pins/local-*.patch` — local development patches (applied after merges + patch.sh) +- `/` — **not in git** — rebuilt from pins on `pnpm install` -In `ccc-dev/ccc/`, branch off `origin/master` (or relevant branch), push to fork (`phroi/ccc`), open PR against `ckb-devrel/ccc`. Before pushing, run the CCC CI steps (`ccc-dev/ccc/.github/workflows/check.yaml`) with `CI=true`. +### Key behaviors -Once the PR is open, replace the local patch with a merge ref: +- The developer may have **pending work** in a fork clone. Run `pnpm fork:status -fork` (exit 0 = safe to wipe, exit 1 = has custom work) before any operation that would destroy it. `fork:record`, `fork:clean`, and `fork:reset` already guard against this automatically +- `.pnpmfile.cjs` scans all `*-fork/config.json` directories and silently rewrites matching dependencies to `workspace:*` when clones exist. Local fork packages override published ones without any visible change in package.json files +- `pnpm install` has a side effect: if `-fork/pins/manifest` exists but the clone does not, it automatically runs `fork-scripts/replay.sh` to rebuild from pins. This is intentional +- `fork-scripts/patch.sh` rewrites fork package exports to point at `.ts` source instead of `.d.ts`, then creates a deterministic git commit (fixed author/date) so record and replay produce the same HEAD hash. This is why imports from fork packages resolve to TypeScript source files — it is not a bug +- `fork-scripts/tsgo-filter.sh` is a bash wrapper around `tsgo` that filters out diagnostics originating from all `*-fork/` clone paths. Fork source may not satisfy this repo's strict tsconfig, so the wrapper suppresses those errors while still reporting errors in stack source +- `pnpm fork:save -fork [description]` captures local work as a patch in `pins/`. Patches survive re-records and replays +- `pnpm fork:record` regenerates the fork workspace entries in `pnpm-workspace.yaml` (between `@generated` markers) from all `*-fork/config.json` files — manual edits to that section are overwritten on re-record -1. Delete the patch from `ccc-dev/pins/local/` -2. Add the PR number to `ccc:record` in `package.json` — order PRs by target branch from upstream to downstream, so each group merges cleanly onto its base before the next layer begins -3. Run `pnpm ccc:record` -4. Run `pnpm check:full` to verify the merge ref reproduces what the local patch achieved +### CCC upstream contributions + +Work locally via `ccc-fork/` first. Only push to the fork (`phroi/ccc`) when changes are validated against the stack. Do not open PRs against `ckb-devrel/ccc` prematurely — keep changes on the fork until they are production-ready and the maintainer decides to upstream. + +1. Develop and test in `ccc-fork/ccc/` on the `wip` branch +2. When ready, use `pnpm fork:push ccc-fork` to cherry-pick commits onto a PR branch +3. Push the PR branch to `phroi/ccc` for review +4. Add the PR number to `refs` in `ccc-fork/config.json` — order PRs by target branch from upstream to downstream, so each group merges cleanly onto its base before the next layer begins +5. Run `pnpm fork:record ccc-fork` and `pnpm check:full` to verify +6. Only open an upstream PR against `ckb-devrel/ccc` when the maintainer explicitly decides to upstream ## Reference Repos diff --git a/README.md b/README.md index 62be57a..f8ad029 100644 --- a/README.md +++ b/README.md @@ -54,17 +54,17 @@ graph TD; click F "https://github.com/ickb/stack/tree/master/packages/sdk" "Go to @ickb/sdk" ``` -## Develop CCC +## Develop with Forks -When `ccc-dev/pins/REFS` is committed, `pnpm install` automatically sets up the CCC local development environment on first run (by replaying pinned merges via `ccc-dev/replay.sh`). No manual setup step is needed — just clone and install: +When `-fork/pins/manifest` is committed, `pnpm install` automatically sets up the local fork development environment on first run (by replaying pinned merges via `fork-scripts/replay.sh`). No manual setup step is needed — just clone and install: ```bash git clone git@github.com:ickb/stack.git && cd stack && pnpm install ``` -To redo the setup from scratch: `pnpm ccc:clean && pnpm install`. +To redo the setup from scratch: `pnpm fork:clean-all && pnpm install`. -See [ccc-dev/README.md](ccc-dev/README.md) for recording new pins, developing CCC PRs, and the full workflow. +See [ccc-fork/README.md](ccc-fork/README.md) for recording new pins, developing CCC PRs, and the full workflow. ## Reference @@ -81,15 +81,17 @@ This clones two repos into the project root (both are git-ignored and made read- ## Developer Scripts -| Command | Description | -| ------------------- | ------------------------------------------------------------------------------------- | -| `pnpm coworker` | Launch an interactive AI Coworker session (full autonomy, opus model). | -| `pnpm coworker:ask` | One-shot AI query for scripting (sonnet model, stateless). Used by `pnpm ccc:record`. | -| `pnpm ccc:status` | Check if CCC clone matches pinned state. Exit 0 = safe to wipe. | -| `pnpm ccc:record` | Record CCC pins (clone, merge refs, build). Guarded against pending work. | -| `pnpm ccc:clean` | Remove CCC clone, keep pins (guarded). Re-replay on next `pnpm install`. | -| `pnpm ccc:reset` | Remove CCC clone and pins (guarded). Restores published CCC packages. | -| `pnpm check:full` | Wipe derived state and validate from scratch. Skips wipe if CCC has pending work. | +| Command | Description | +| -------------------------------- | --------------------------------------------------------------------------------- | +| `pnpm coworker` | Launch an interactive AI Coworker session (full autonomy, opus model). | +| `pnpm coworker:ask` | One-shot AI query for scripting (sonnet model, stateless). Used by fork:record. | +| `pnpm fork:status -fork` | Check if fork clone matches pinned state. Exit 0 = safe to wipe. | +| `pnpm fork:record -fork` | Record fork pins (clone, merge refs, build). Guarded against pending work. | +| `pnpm fork:save -fork` | Capture local fork work as a patch in pins/ (survives re-records and replays). | +| `pnpm fork:push -fork` | Cherry-pick commits from wip branch onto a PR branch for pushing to the fork. | +| `pnpm fork:clean -fork` | Remove fork clone, keep pins (guarded). Re-replay on next `pnpm install`. | +| `pnpm fork:reset -fork` | Remove fork clone and pins (guarded). Restores published packages. | +| `pnpm check:full` | Wipe derived state and validate from scratch. Skips wipe if forks have pending work.| ## Epoch Semantic Versioning diff --git a/apps/faucet/package.json b/apps/faucet/package.json index b70cecc..fc82e4e 100644 --- a/apps/faucet/package.json +++ b/apps/faucet/package.json @@ -31,7 +31,7 @@ "scripts": { "test": "vitest", "test:ci": "vitest run", - "build": "[ -d ../../ccc-dev/ccc ] && exec bash ../../ccc-dev/tsgo-filter.sh || tsgo", + "build": "bash ../../fork-scripts/tsgo-filter.sh", "lint": "eslint ./src", "clean": "rm -fr dist", "clean:deep": "rm -fr dist node_modules", diff --git a/apps/interface/package.json b/apps/interface/package.json index 913078f..65d5435 100644 --- a/apps/interface/package.json +++ b/apps/interface/package.json @@ -13,7 +13,7 @@ "type": "module", "scripts": { "dev": "vite", - "build": "([ -d ../../ccc-dev/ccc ] && exec bash ../../ccc-dev/tsgo-filter.sh || tsgo) && vite build", + "build": "bash ../../fork-scripts/tsgo-filter.sh && vite build", "preview": "vite preview", "lint": "eslint ./src", "clean": "rm -fr dist", diff --git a/apps/interface/vite.config.ts b/apps/interface/vite.config.ts index 28994b2..16a43df 100644 --- a/apps/interface/vite.config.ts +++ b/apps/interface/vite.config.ts @@ -2,9 +2,26 @@ import { defineConfig } from "vite"; import tailwindcss from "@tailwindcss/vite"; import react from "@vitejs/plugin-react"; import basicSsl from '@vitejs/plugin-basic-ssl' -import { existsSync } from "fs"; +import { existsSync, readdirSync, readFileSync } from "fs"; +import { join } from "path"; -const hasCccSource = existsSync("../../ccc-dev/ccc"); +// Detect if any managed fork clones are present +const root = join(__dirname, "../.."); +const hasForkSource = (() => { + try { + for (const entry of readdirSync(root, { withFileTypes: true })) { + if (!entry.isDirectory() || !entry.name.endsWith("-fork")) continue; + const configPath = join(root, entry.name, "config.json"); + if (!existsSync(configPath)) continue; + const { cloneDir } = JSON.parse(readFileSync(configPath, "utf8")); + if (!cloneDir) continue; + if (existsSync(join(root, entry.name, cloneDir))) return true; + } + } catch (err) { + console.error("Failed to detect fork sources:", err); + } + return false; +})(); // https://vitejs.dev/config/ export default defineConfig({ @@ -14,8 +31,8 @@ export default defineConfig({ plugins: [ tailwindcss(), react({ - // CCC source uses decorators — skip babel, let esbuild handle them - ...(hasCccSource && { exclude: [/\/ccc-dev\/ccc\//] }), + // Fork source uses decorators — skip babel, let esbuild handle them + ...(hasForkSource && { exclude: [/\w+-fork\/\w+\//] }), babel: { plugins: [["babel-plugin-react-compiler"]], }, @@ -24,10 +41,10 @@ export default defineConfig({ ], build: { rollupOptions: { - // CCC source uses `export { SomeType }` instead of `export type { SomeType }`. + // Fork source uses `export { SomeType }` instead of `export type { SomeType }`. // esbuild strips the type declarations but can't strip value-looking re-exports, // so rollup sees missing exports. Shimming is safe — they're never used at runtime. - ...(hasCccSource && { shimMissingExports: true }), + ...(hasForkSource && { shimMissingExports: true }), }, }, }); diff --git a/apps/sampler/package.json b/apps/sampler/package.json index 86f3db3..8d319e1 100644 --- a/apps/sampler/package.json +++ b/apps/sampler/package.json @@ -31,7 +31,7 @@ "scripts": { "test": "vitest", "test:ci": "vitest run", - "build": "[ -d ../../ccc-dev/ccc ] && exec bash ../../ccc-dev/tsgo-filter.sh || tsgo", + "build": "bash ../../fork-scripts/tsgo-filter.sh", "lint": "eslint ./src", "clean": "rm -fr dist", "clean:deep": "rm -fr dist node_modules", diff --git a/ccc-dev/README.md b/ccc-dev/README.md deleted file mode 100644 index a920368..0000000 --- a/ccc-dev/README.md +++ /dev/null @@ -1,128 +0,0 @@ -# CCC Local Development - -## Why - -CCC has unreleased branches (`releases/next`, `releases/udt`) that this project depends on. This system deterministically merges them locally so the monorepo can build against unpublished CCC changes until they're published upstream. - -## How it works - -1. **Auto-replay** — `.pnpmfile.cjs` runs at `pnpm install` time. If `ccc-dev/pins/REFS` exists but `ccc-dev/ccc/` doesn't, it auto-triggers `replay.sh` to clone and set up CCC. - -2. **Workspace override** — When `ccc-dev/ccc/` is present, `.pnpmfile.cjs` auto-discovers all CCC packages and rewrites `@ckb-ccc/*` dependencies to `workspace:*` — no manual `pnpm.overrides` needed. This is necessary because `catalog:` specifiers resolve to a semver range _before_ pnpm considers workspace linking — even with `link-workspace-packages = true`, pnpm fetches from the registry without this hook. When CCC is not cloned, the hook is a no-op and deps resolve from the registry normally. - -3. **Source-level types** — `patch.sh` (called by both `record.sh` and `replay.sh`) patches CCC's `package.json` exports to point TypeScript at `.ts` source instead of built `.d.ts`, then creates a deterministic git commit (fixed author/date) so record and replay produce the same `pins/HEAD` hash. This gives real-time type feedback when editing across the CCC/stack boundary — changes in CCC source are immediately visible to stack packages without rebuilding. - -4. **Diagnostic filtering** — `ccc-dev/tsgo-filter.sh` is a bash wrapper around `tsgo` used by stack package builds. Because CCC `.ts` source is type-checked under the stack's stricter tsconfig (`verbatimModuleSyntax`, `noImplicitOverride`, `noUncheckedIndexedAccess`), plain `tsgo` would report hundreds of CCC diagnostics that aren't real integration errors. The wrapper emits output normally and only fails on diagnostics from stack source files. When CCC is not cloned, packages fall back to plain `tsgo`. - -## `pins/` format - -``` -ccc-dev/pins/ - REFS # Line 1: base SHA. Lines 2+: "SHA refname" (one per merge) - HEAD # Expected final SHA after all merges + patches (integrity check) - resolutions/ # Saved conflict resolution files, organized by merge index - 1/path/to/file # Resolved file for merge step 1 - 2/path/to/file # Resolved file for merge step 2 - local/ # Git patch files applied after merges + source patching - 001-name.patch # Applied in sorted filename order with deterministic commits -``` - -## Recording - -Recording captures the current upstream state and any conflict resolutions: - -```bash -pnpm ccc:record -``` - -This runs `ccc-dev/record.sh` which clones CCC, merges the configured refs, uses AI Coworker to resolve any conflicts, patches for source-level type resolution, and writes `pins/`. Commit the resulting `ccc-dev/pins/` directory so other contributors get the same build. - -The `ccc:record` script in `package.json` is preconfigured with the current refs: - -```json -{ - "scripts": { - "ccc:record": "bash ccc-dev/record.sh releases/next releases/udt" - } -} -``` - -### Ref auto-detection - -`record.sh` accepts any number of refs and auto-detects their type: - -```bash -# Usage: ccc-dev/record.sh -# - ^[0-9a-f]{7,40}$ → commit SHA -# - ^[0-9]+$ → GitHub PR number -# - everything else → branch name - -# Examples: -bash ccc-dev/record.sh releases/next releases/udt -bash ccc-dev/record.sh 268 releases/next -bash ccc-dev/record.sh abc1234 -``` - -Refs are merged sequentially onto a `wip` branch, then CCC is built. On merge conflicts, the script auto-resolves them using AI Coworker. - -## Local patches - -Local patches (`pins/local/*.patch`) are project-specific changes applied to CCC on top of the merged upstream refs. They are committed to git alongside other pins and applied deterministically during both recording and replay. - -Use local patches for CCC changes that this project needs but that haven't been merged upstream yet (e.g., a DAO safety check contributed via a CCC PR). - -To create a local patch: - -1. Make changes in `ccc-dev/ccc/` on the `wip` branch -2. Generate a patch: `git -C ccc-dev/ccc diff > ccc-dev/pins/local/001-description.patch` -3. Re-record to verify: `pnpm ccc:record` - -Patches are applied in sorted filename order. Use numeric prefixes (`001-`, `002-`) to control ordering. Both `record.sh` and `replay.sh` apply local patches with deterministic git identity and timestamps so the resulting `pins/HEAD` hash is reproducible. - -## Developing CCC PRs - -### Setup - -Record upstream refs alongside a PR: - -```bash -pnpm ccc:record 666 -``` - -This merges `releases/next`, `releases/udt`, and PR #666 onto the `wip` branch. -You stay on `wip` — all upstream + PR changes are available. VS Code sees the full merged state with diagnostics using CCC's own tsconfig rules. - -### Development loop - -1. **Edit code** on `wip` in `ccc-dev/ccc/`. Commit normally. -2. **Rebuild**: `pnpm build` (builds stack packages with CCC type integration). -3. **Run tests**: `pnpm test` - -### Pushing your changes - -Extract your commits (those after the recording) onto the PR branch: - -```bash -pnpm ccc:push -cd ccc-dev/ccc -git remote add fork https://github.com/YOUR_USER/ccc.git -git push fork pr-666:your-branch-name -git checkout wip # return to development -``` - -## Switching modes - -**Check for pending work:** `pnpm ccc:status` — exit 0 if `ccc-dev/ccc/` matches pinned state (safe to wipe), exit 1 otherwise. - -**Local CCC (default when `pins/` is committed):** `pnpm install` auto-replays pins and overrides deps. - -**Published CCC:** `pnpm ccc:reset && pnpm install` — removes clone and pins, restores published packages. - -**Re-record:** `pnpm ccc:record` wipes and re-records everything from scratch. Aborts if `ccc-dev/ccc/` has pending work. - -**Force re-replay:** `pnpm ccc:clean && pnpm install` — removes clone but keeps pins, replays on next install. - -## Requirements - -- **Recording** (`pnpm ccc:record`): Requires the AI Coworker CLI (installed as a devDependency; invoked via `pnpm coworker:ask`) for automated conflict resolution (only when merging refs). -- **Replay** (`pnpm install`): No extra tools needed — works for any contributor with just pnpm. diff --git a/ccc-dev/pins/HEAD b/ccc-dev/pins/HEAD deleted file mode 100644 index 2112815..0000000 --- a/ccc-dev/pins/HEAD +++ /dev/null @@ -1 +0,0 @@ -0c76ce0de0a3306dd7c466a933d43f44b32f7450 diff --git a/ccc-dev/pins/REFS b/ccc-dev/pins/REFS deleted file mode 100644 index eb53521..0000000 --- a/ccc-dev/pins/REFS +++ /dev/null @@ -1,4 +0,0 @@ -50d657beea36de3ebbd80ee88209842644daef34 -8e63e3a21f1824445b2c339ffe4927a2a8af1bcf 359 -0e18748fb139d71338c109d71aae5b149cb58af3 releases/next -0ad2a5f6305d4964b00394bc8a6ed50136fdffa8 releases/udt diff --git a/ccc-dev/pins/resolutions/3/packages/core/src/ckb/transactionErrors.ts b/ccc-dev/pins/resolutions/3/packages/core/src/ckb/transactionErrors.ts deleted file mode 100644 index 692af12..0000000 --- a/ccc-dev/pins/resolutions/3/packages/core/src/ckb/transactionErrors.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { fixedPointToString } from "../fixedPoint/index.js"; -import { Num, numFrom, NumLike } from "../num/index.js"; -import { Script, ScriptLike } from "./script.js"; - -export class ErrorTransactionInsufficientCapacity extends Error { - public readonly amount: Num; - public readonly isForChange: boolean; - - constructor( - amountLike: NumLike, - reason?: { - isForChange?: boolean; - }, - ) { - const amount = numFrom(amountLike); - const isForChange = reason?.isForChange ?? false; - super( - `Insufficient CKB, need ${fixedPointToString(amount)} extra CKB${isForChange ? " for the change cell" : ""}`, - ); - this.amount = amount; - this.isForChange = isForChange; - } -} - -export class ErrorNervosDaoOutputLimit extends Error { - public readonly count: number; - public readonly limit: number; - - constructor(count: number) { - super( - `NervosDAO transaction has ${count} output cells, exceeding the limit of 64`, - ); - this.count = count; - this.limit = 64; - } -} - -/** - * @deprecated Use `ErrorUdtInsufficientCoin` from `@ckb-ccc/udt` instead. - */ -export class ErrorTransactionInsufficientCoin extends Error { - public readonly amount: Num; - public readonly type: Script; - - constructor(amountLike: NumLike, typeLike: ScriptLike) { - const amount = numFrom(amountLike); - const type = Script.from(typeLike); - super(`Insufficient coin, need ${amount} extra coin`); - this.amount = amount; - this.type = type; - } -} diff --git a/ccc-dev/pins/resolutions/3/vitest.config.mts b/ccc-dev/pins/resolutions/3/vitest.config.mts deleted file mode 100644 index f1fb8be..0000000 --- a/ccc-dev/pins/resolutions/3/vitest.config.mts +++ /dev/null @@ -1,17 +0,0 @@ -import { defineConfig, coverageConfigDefaults } from "vitest/config"; - -const packages = ["packages/core", "packages/did-ckb", "packages/type-id", "packages/udt"]; - -export default defineConfig({ - test: { - projects: packages, - coverage: { - include: packages, - exclude: [ - "**/dist/**", - "**/dist.commonjs/**", - ...coverageConfigDefaults.exclude, - ], - }, - }, -}); diff --git a/ccc-dev/record.sh b/ccc-dev/record.sh deleted file mode 100644 index f243cb8..0000000 --- a/ccc-dev/record.sh +++ /dev/null @@ -1,173 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -# Usage: ccc-dev/record.sh [ref ...] -# ref auto-detection: -# ^[0-9a-f]{7,40}$ → commit SHA -# ^[0-9]+$ → GitHub PR number -# everything else → branch name -# No refs → just clone, no merges - -REPO_URL="https://github.com/ckb-devrel/ccc.git" -SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" -REPO_DIR="$SCRIPT_DIR/ccc" -PATCH_DIR="$SCRIPT_DIR/pins" - -# Guard: abort if ccc-dev/ccc/ has pending work -if ! bash "$SCRIPT_DIR/status.sh" >/dev/null 2>&1; then - bash "$SCRIPT_DIR/status.sh" >&2 - echo "" >&2 - echo "ERROR: ccc-dev/ccc/ has pending work that would be lost." >&2 - echo "Push with 'pnpm ccc:push', commit, or remove ccc-dev/ccc/ manually." >&2 - exit 1 -fi - -# Always start fresh — wipe previous clone and pins (preserve local patches) -LOCAL_BAK="" -if [ -d "$PATCH_DIR/local" ]; then - LOCAL_BAK=$(mktemp -d) - cp -r "$PATCH_DIR/local" "$LOCAL_BAK/local" -fi -rm -rf "$REPO_DIR" "$PATCH_DIR" - -cleanup_on_error() { - rm -rf "$REPO_DIR" "$PATCH_DIR" - # Restore local patches from backup so they aren't lost on failure - if [ -n "${LOCAL_BAK:-}" ] && [ -d "$LOCAL_BAK/local" ]; then - mkdir -p "$PATCH_DIR" - cp -r "$LOCAL_BAK/local" "$PATCH_DIR/local" - rm -rf "$LOCAL_BAK" - fi - echo "FAILED — cleaned up ccc-dev/ccc/ and pins/ (local patches preserved)" >&2 -} -trap cleanup_on_error ERR - -git clone --filter=blob:none "$REPO_URL" "$REPO_DIR" - -# Record base SHA before any merges -BASE_SHA=$(git -C "$REPO_DIR" rev-parse HEAD) -git -C "$REPO_DIR" checkout -b wip - -MERGE_IDX=0 - -for REF in "$@"; do - MERGE_IDX=$((MERGE_IDX + 1)) - - # Pin identity and dates so merge commits are deterministic across runs - export GIT_AUTHOR_NAME="ci" GIT_AUTHOR_EMAIL="ci@local" - export GIT_COMMITTER_NAME="ci" GIT_COMMITTER_EMAIL="ci@local" - export GIT_AUTHOR_DATE="@$MERGE_IDX +0000" - export GIT_COMMITTER_DATE="@$MERGE_IDX +0000" - - # Case A: full (7-40 char) hex commit SHA - if [[ $REF =~ ^[0-9a-f]{7,40}$ ]]; then - git -C "$REPO_DIR" fetch --depth=1 origin "$REF" - MERGE_REF="FETCH_HEAD" - - # Case B: all digits → GitHub pull request number - elif [[ $REF =~ ^[0-9]+$ ]]; then - git -C "$REPO_DIR" fetch origin "pull/$REF/head:pr-$REF" - MERGE_REF="pr-$REF" - - # Case C: branch name - else - git -C "$REPO_DIR" fetch origin "refs/heads/$REF:$REF" - MERGE_REF="$REF" - fi - - # Capture the resolved SHA for this ref before merging - MERGE_SHA=$(git -C "$REPO_DIR" rev-parse "$MERGE_REF") - - # Use explicit merge message so record and replay produce identical commits - MERGE_MSG="Merge $REF into wip" - - if ! git -C "$REPO_DIR" merge --no-ff -m "$MERGE_MSG" "$MERGE_REF"; then - # Capture conflicted file list BEFORE resolution - mapfile -t CONFLICTED < <(git -C "$REPO_DIR" diff --name-only --diff-filter=U) - - # Resolve each conflicted file with AI Coworker - for FILE in "${CONFLICTED[@]}"; do - pnpm --silent coworker:ask \ - -p "You are a merge conflict resolver. Output ONLY the resolved file content. Merge both sides meaningfully. No explanations, no code fences, no extra text." \ - < "$REPO_DIR/$FILE" > "$REPO_DIR/${FILE}.resolved" - - # Validate resolution - if [ ! -s "$REPO_DIR/${FILE}.resolved" ]; then - echo "ERROR: AI Coworker returned empty resolution for $FILE" >&2 - exit 1 - fi - if grep -q '<<<<<<<' "$REPO_DIR/${FILE}.resolved"; then - echo "ERROR: Conflict markers remain in $FILE after resolution" >&2 - exit 1 - fi - - mv "$REPO_DIR/${FILE}.resolved" "$REPO_DIR/$FILE" - git -C "$REPO_DIR" add "$FILE" - done - - # Overwrite MERGE_MSG so merge --continue uses our deterministic message - echo "$MERGE_MSG" > "$REPO_DIR/.git/MERGE_MSG" - GIT_EDITOR=true git -C "$REPO_DIR" merge --continue - - # Save resolved versions of conflicted files - for FILE in "${CONFLICTED[@]}"; do - DEST="$PATCH_DIR/resolutions/$MERGE_IDX/$FILE" - mkdir -p "$(dirname "$DEST")" - cp "$REPO_DIR/$FILE" "$DEST" - done - fi - - # Append merge SHA + ref name to REFS - mkdir -p "$PATCH_DIR" - echo "$MERGE_SHA $REF" >> "$PATCH_DIR/REFS" -done - -bash "$SCRIPT_DIR/patch.sh" "$REPO_DIR" "$MERGE_IDX" - -# Restore preserved local patches -if [ -n "$LOCAL_BAK" ] && [ -d "$LOCAL_BAK/local" ]; then - mkdir -p "$PATCH_DIR" - cp -r "$LOCAL_BAK/local" "$PATCH_DIR/local" - rm -rf "$LOCAL_BAK" -fi - -# Apply local patches (sorted by filename for deterministic order) -LOCAL_DIR="$PATCH_DIR/local" -if [ -d "$LOCAL_DIR" ]; then - LOCAL_IDX=$((MERGE_IDX + 2)) - for PATCH_FILE in $(find "$LOCAL_DIR" -name '*.patch' | sort); do - PATCH_NAME=$(basename "$PATCH_FILE" .patch) - echo "Applying local patch: $PATCH_NAME" - - export GIT_AUTHOR_NAME="ci" GIT_AUTHOR_EMAIL="ci@local" - export GIT_COMMITTER_NAME="ci" GIT_COMMITTER_EMAIL="ci@local" - export GIT_AUTHOR_DATE="@$LOCAL_IDX +0000" - export GIT_COMMITTER_DATE="@$LOCAL_IDX +0000" - - git -C "$REPO_DIR" apply "$PATCH_FILE" - git -C "$REPO_DIR" add -A - git -C "$REPO_DIR" commit -m "$PATCH_NAME" - LOCAL_IDX=$((LOCAL_IDX + 1)) - done -fi - -# Prepend BASE SHA as first line of REFS -mkdir -p "$PATCH_DIR" -if [ -f "$PATCH_DIR/REFS" ]; then - REFS_CONTENT="$BASE_SHA"$'\n'"$(cat "$PATCH_DIR/REFS")" -else - REFS_CONTENT="$BASE_SHA" -fi -echo "$REFS_CONTENT" > "$PATCH_DIR/REFS" - -# Save HEAD SHA for replay integrity verification -git -C "$REPO_DIR" rev-parse HEAD > "$PATCH_DIR/HEAD" - -echo "Pins recorded in $PATCH_DIR/" -echo " BASE=$BASE_SHA" -echo " REFS=$(wc -l < "$PATCH_DIR/REFS") lines" -if [ -d "$PATCH_DIR/resolutions" ]; then - echo " Resolutions: $(find "$PATCH_DIR/resolutions" -type f | wc -l) file(s)" -else - echo " Resolutions: none (no conflicts)" -fi diff --git a/ccc-dev/replay.sh b/ccc-dev/replay.sh deleted file mode 100644 index 18331c4..0000000 --- a/ccc-dev/replay.sh +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -# Usage: ccc-dev/replay.sh -# Deterministic replay from pinned SHAs + conflict resolutions - -REPO_URL="https://github.com/ckb-devrel/ccc.git" -SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" -REPO_DIR="$SCRIPT_DIR/ccc" -PATCH_DIR="$SCRIPT_DIR/pins" - -# Skip if already cloned -if [ -d "$REPO_DIR" ]; then - echo "ccc-dev/ccc/ already exists, skipping (remove it to redo setup)" >&2 - exit 0 -fi - -# Skip if no pins to replay -if [ ! -f "$PATCH_DIR/REFS" ]; then - echo "No CCC pins to replay, skipping" >&2 - exit 0 -fi - -trap 'rm -rf "$REPO_DIR"; echo "FAILED — cleaned up ccc-dev/ccc/" >&2' ERR - -BASE_SHA=$(head -1 "$PATCH_DIR/REFS") -git clone --filter=blob:none "$REPO_URL" "$REPO_DIR" -git -C "$REPO_DIR" checkout "$BASE_SHA" -git -C "$REPO_DIR" checkout -b wip - -MERGE_IDX=0 -while IFS=' ' read -r SHA REF_NAME; do - MERGE_IDX=$((MERGE_IDX + 1)) - echo "Replaying merge $MERGE_IDX: $REF_NAME ($SHA)" >&2 - - # Pin identity and dates to match record.sh for deterministic commits - export GIT_AUTHOR_NAME="ci" GIT_AUTHOR_EMAIL="ci@local" - export GIT_COMMITTER_NAME="ci" GIT_COMMITTER_EMAIL="ci@local" - export GIT_AUTHOR_DATE="@$MERGE_IDX +0000" - export GIT_COMMITTER_DATE="@$MERGE_IDX +0000" - - git -C "$REPO_DIR" fetch origin "$SHA" - - # Use explicit merge message matching record.sh for deterministic commits - MERGE_MSG="Merge $REF_NAME into wip" - - if ! git -C "$REPO_DIR" merge --no-ff -m "$MERGE_MSG" FETCH_HEAD; then - # Apply saved conflict resolutions - RESOLUTION_DIR="$PATCH_DIR/resolutions/$MERGE_IDX" - if [ ! -d "$RESOLUTION_DIR" ]; then - echo "ERROR: Conflict at step $MERGE_IDX ($REF_NAME) but no resolutions found" >&2 - exit 1 - fi - - # Get list of conflicted files to verify coverage - mapfile -t CONFLICTED < <(git -C "$REPO_DIR" diff --name-only --diff-filter=U) - - while IFS= read -r FILE; do - FILE="${FILE#./}" - cp "$RESOLUTION_DIR/$FILE" "$REPO_DIR/$FILE" - git -C "$REPO_DIR" add "$FILE" - done < <(cd "$RESOLUTION_DIR" && find . -type f) - - # Verify all conflicted files have saved resolutions - for FILE in "${CONFLICTED[@]}"; do - if [ ! -f "$RESOLUTION_DIR/$FILE" ]; then - echo "ERROR: No saved resolution for conflicted file '$FILE' at step $MERGE_IDX ($REF_NAME)" >&2 - echo "Re-record with: ccc-dev/record.sh" >&2 - exit 1 - fi - done - - # Overwrite MERGE_MSG so merge --continue uses our deterministic message - echo "$MERGE_MSG" > "$REPO_DIR/.git/MERGE_MSG" - GIT_EDITOR=true git -C "$REPO_DIR" merge --continue - fi -done < <(tail -n +2 "$PATCH_DIR/REFS") - -bash "$SCRIPT_DIR/patch.sh" "$REPO_DIR" "$MERGE_IDX" - -# Apply local patches (sorted by filename for deterministic order) -LOCAL_DIR="$PATCH_DIR/local" -if [ -d "$LOCAL_DIR" ]; then - LOCAL_IDX=$((MERGE_IDX + 2)) - for PATCH_FILE in $(find "$LOCAL_DIR" -name '*.patch' | sort); do - PATCH_NAME=$(basename "$PATCH_FILE" .patch) - echo "Applying local patch: $PATCH_NAME" >&2 - - export GIT_AUTHOR_NAME="ci" GIT_AUTHOR_EMAIL="ci@local" - export GIT_COMMITTER_NAME="ci" GIT_COMMITTER_EMAIL="ci@local" - export GIT_AUTHOR_DATE="@$LOCAL_IDX +0000" - export GIT_COMMITTER_DATE="@$LOCAL_IDX +0000" - - git -C "$REPO_DIR" apply "$PATCH_FILE" - git -C "$REPO_DIR" add -A - git -C "$REPO_DIR" commit -m "$PATCH_NAME" - LOCAL_IDX=$((LOCAL_IDX + 1)) - done -fi - -# Verify HEAD SHA matches recording -ACTUAL=$(git -C "$REPO_DIR" rev-parse HEAD) -EXPECTED=$(cat "$PATCH_DIR/HEAD") -if [ "$ACTUAL" != "$EXPECTED" ]; then - echo "FAIL: replay HEAD ($ACTUAL) != pinned HEAD ($EXPECTED)" >&2 - echo "Pins are stale or corrupted. Re-record with 'pnpm ccc:record'." >&2 - exit 1 -fi - -# Add fork remote for pushing to phroi/ccc (SSH for auth) -git -C "$REPO_DIR" remote add fork git@github.com:phroi/ccc.git - -echo "OK — replay HEAD matches pinned HEAD ($EXPECTED)" diff --git a/ccc-dev/status.sh b/ccc-dev/status.sh deleted file mode 100644 index 86b3d8e..0000000 --- a/ccc-dev/status.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -# Check whether ccc-dev/ccc/ is safe to wipe. -# Exit 0 → safe (not cloned, or matches pins exactly) -# Exit 1 → has custom work (any changes vs pinned commit, diverged HEAD, or no pins to compare) - -SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" -REPO_DIR="$SCRIPT_DIR/ccc" -PINS_DIR="$SCRIPT_DIR/pins" - -if [ ! -d "$REPO_DIR" ]; then - echo "ccc-dev/ccc/ is not cloned" - exit 0 -fi - -if [ ! -f "$PINS_DIR/HEAD" ]; then - echo "ccc-dev/ccc/ exists but no pins/HEAD — custom clone" - exit 1 -fi - -PINNED=$(cat "$PINS_DIR/HEAD") -ACTUAL=$(git -C "$REPO_DIR" rev-parse HEAD) - -if [ "$ACTUAL" != "$PINNED" ]; then - echo "HEAD diverged from pins/HEAD:" - echo " pinned $PINNED" - echo " actual $ACTUAL" - git -C "$REPO_DIR" log --oneline "$PINNED..$ACTUAL" 2>/dev/null || true - exit 1 -fi - -# Compare pinned commit directly against working tree. -# git diff catches unstaged AND staged changes in one shot. -if ! git -C "$REPO_DIR" diff "$PINNED" --quiet 2>/dev/null \ - || [ -n "$(git -C "$REPO_DIR" ls-files --others --exclude-standard 2>/dev/null)" ]; then - echo "ccc-dev/ccc/ has changes relative to pins:" - git -C "$REPO_DIR" diff "$PINNED" --stat 2>/dev/null || true - git -C "$REPO_DIR" ls-files --others --exclude-standard 2>/dev/null || true - exit 1 -fi - -echo "ccc-dev/ccc/ is clean (matches pins)" diff --git a/ccc-dev/tsgo-filter.sh b/ccc-dev/tsgo-filter.sh deleted file mode 100755 index 29c0498..0000000 --- a/ccc-dev/tsgo-filter.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env bash - -# tsgo wrapper that filters diagnostics from CCC source files. -# -# Stack packages import CCC .ts source directly for real-time type feedback -# across the CCC/stack boundary. This means tsgo checks CCC files under the -# stack's stricter tsconfig (verbatimModuleSyntax, noImplicitOverride, -# noUncheckedIndexedAccess) — rules CCC doesn't follow. These aren't real -# integration errors, just tsconfig-strictness mismatches. -# -# This wrapper: -# 1. Runs tsgo with noEmitOnError=false so CCC diagnostics don't block emit -# 2. Emits .js + .d.ts output normally -# 3. Reports only diagnostics from stack source files -# 4. Exits non-zero only on real stack errors - -set -uo pipefail - -output=$(pnpm tsgo --noEmitOnError false 2>&1) || true - -# Filter out diagnostic blocks originating from ccc-dev/ccc/ paths. -# A diagnostic block = a non-indented line (the error) + subsequent indented lines (details). -filtered=$(printf '%s\n' "$output" | awk ' - !/^[[:space:]]/ { skip = ($0 ~ /ccc-dev\/ccc\//) ? 1 : 0 } - !skip { print } -') - -if printf '%s\n' "$filtered" | grep -q 'error TS'; then - printf '%s\n' "$filtered" - exit 1 -fi diff --git a/ccc-dev/.gitignore b/ccc-fork/.gitignore similarity index 100% rename from ccc-dev/.gitignore rename to ccc-fork/.gitignore diff --git a/ccc-fork/README.md b/ccc-fork/README.md new file mode 100644 index 0000000..b6b7332 --- /dev/null +++ b/ccc-fork/README.md @@ -0,0 +1,156 @@ +# CCC Local Development + +## Why + +CCC has unreleased branches (`releases/next`, `releases/udt`) that this project depends on. The fork management system deterministically merges them locally so the monorepo can build against unpublished CCC changes until they're published upstream. + +## How it works + +1. **Auto-replay** — `.pnpmfile.cjs` runs at `pnpm install` time. If `ccc-fork/pins/manifest` exists but `ccc-fork/ccc/` doesn't, it auto-triggers `fork-scripts/replay.sh` to clone and set up CCC. + +2. **Workspace override** — When `ccc-fork/ccc/` is present, `.pnpmfile.cjs` auto-discovers all CCC packages (via `config.json` workspace settings) and rewrites `@ckb-ccc/*` dependencies to `workspace:*` — no manual `pnpm.overrides` needed. This is necessary because `catalog:` specifiers resolve to a semver range _before_ pnpm considers workspace linking — even with `link-workspace-packages = true`, pnpm fetches from the registry without this hook. When CCC is not cloned, the hook is a no-op and deps resolve from the registry normally. + +3. **Source-level types** — `fork-scripts/patch.sh` (called by both `record.sh` and `replay.sh`) patches CCC's `package.json` exports to point TypeScript at `.ts` source instead of built `.d.ts`, then creates a deterministic git commit (fixed author/date). This gives real-time type feedback when editing across the CCC/stack boundary — changes in CCC source are immediately visible to stack packages without rebuilding. + +4. **Diagnostic filtering** — `fork-scripts/tsgo-filter.sh` is a bash wrapper around `tsgo` used by stack package builds. Because CCC `.ts` source is type-checked under the stack's stricter tsconfig (`verbatimModuleSyntax`, `noImplicitOverride`, `noUncheckedIndexedAccess`), plain `tsgo` would report hundreds of CCC diagnostics that aren't real integration errors. The wrapper emits output normally and only fails on diagnostics from stack source files. When no forks are cloned, packages fall back to plain `tsgo`. + +## Configuration + +CCC-specific settings live in `ccc-fork/config.json`: + +```json +{ + "upstream": "https://github.com/ckb-devrel/ccc.git", + "fork": "git@github.com:phroi/ccc.git", + "refs": ["359", "328", "releases/next", "releases/udt"], + "cloneDir": "ccc", + "workspace": { + "include": ["packages/*"], + "exclude": ["packages/demo", "packages/docs", ...] + } +} +``` + +- **upstream**: Git URL to clone from +- **fork**: SSH URL of developer fork, added as `fork` remote after replay +- **refs**: Merge refs — PR numbers, branch names, or commit SHAs (auto-detected) +- **cloneDir**: Name of the cloned directory inside `ccc-fork/` +- **workspace**: Glob patterns for pnpm workspace inclusion/exclusion + +## `pins/` format + +``` +ccc-fork/pins/ + HEAD # expected SHA after full replay (merges + patch.sh + local patches) + manifest # base SHA + merge refs, TSV, one per line + res-2.resolution # conflict resolution for merge step 2 (if any) + res-4.resolution # conflict resolution for merge step 4 (gaps = no conflicts) + local-001.patch # local development patch (applied after merges + patch.sh) + local-002.patch # local development patch +``` + +- **`HEAD`**: one line, the expected final SHA after everything (merges, `patch.sh`, local patches). Verification happens at the end of replay. +- **`manifest`**: TSV, one line per ref. Line 1 is the base commit (`SHA\tbranchname`); subsequent lines are merge refs applied sequentially onto `wip`. +- **`res-N.resolution`**: counted conflict resolution for merge step N. Only present for merge steps that had conflicts. Uses positional parsing (line counts, not content inspection) for deterministic replay. +- **`local-*.patch`**: standard unified diffs of local work, applied in lexicographic order after merges + `patch.sh`, each as a deterministic commit. + +All files are human-readable and editable. + +## Recording + +Recording captures the current upstream state and any conflict resolutions: + +```bash +pnpm fork:record ccc-fork +``` + +This runs `fork-scripts/record.sh` which reads refs from `config.json`, clones CCC, merges the configured refs, uses AI Coworker to resolve any conflicts, patches for source-level type resolution, and writes `pins/`. Commit the resulting `ccc-fork/pins/` directory so other contributors get the same build. + +You can override refs on the command line: + +```bash +pnpm fork:record ccc-fork 359 328 releases/next releases/udt +``` + +### Ref auto-detection + +`record.sh` accepts any number of refs and auto-detects their type: +- `^[0-9a-f]{7,40}$` → commit SHA +- `^[0-9]+$` → GitHub PR number +- everything else → branch name + +### Conflict resolution format + +When merges produce conflicts, `record.sh` resolves them and stores the resolution as a counted resolution file in `pins/res-N.resolution` (where N is the 1-indexed merge step). These use a positional format with `CONFLICT ours=N base=M theirs=K resolution=R` headers, so you can: + +- **Inspect** exactly what was resolved and how +- **Edit by hand** if the AI resolution needs adjustment +- **Diff across re-records** to see what changed + +## Developing CCC changes + +Work directly in `ccc-fork/ccc/` on the `wip` branch. `pnpm fork:status ccc-fork` tracks pending changes (exit 0 = clean, exit 1 = has work). + +### Development loop + +1. **Edit code** on `wip` in `ccc-fork/ccc/`. Commit normally. +2. **Rebuild**: `pnpm build` (builds stack packages with CCC type integration). +3. **Run tests**: `pnpm test` + +### Saving local patches + +When you have local changes that should persist across re-records: + +```bash +pnpm fork:save ccc-fork [description] +``` + +This captures all changes (committed + uncommitted) relative to the pinned HEAD as a patch file in `pins/`. The patch is applied deterministically during replay, so it survives `pnpm fork:clean ccc-fork && pnpm install` cycles. + +Example workflow: +1. Edit files in `ccc-fork/ccc/` +2. `pnpm fork:save ccc-fork my-feature` → creates `pins/local-001-my-feature.patch` +3. Edit more files +4. `pnpm fork:save ccc-fork another-fix` → creates `pins/local-002-another-fix.patch` +5. `pnpm fork:clean ccc-fork && pnpm install` → replays merges + patches, HEAD matches + +Local patches are preserved across `pnpm fork:record ccc-fork` — they're backed up before re-recording and restored afterwards. + +### Committing CCC changes to stack + +When ready to commit stack changes that depend on CCC modifications: + +1. Push changes to the fork (`phroi/ccc`) using `pnpm fork:push ccc-fork` +2. Add the PR number to `refs` in `ccc-fork/config.json` +3. Run `pnpm fork:record ccc-fork` — this re-records with the PR as a merge ref +4. Commit the updated `ccc-fork/pins/` to the stack repo + +Only open a PR against `ckb-devrel/ccc` when the maintainer decides to upstream — keep changes on the fork until then. + +### Pushing to a PR branch + +Extract your commits (those after the recording) onto the PR branch: + +```bash +pnpm fork:push ccc-fork +cd ccc-fork/ccc +git push fork pr-666:your-branch-name +git checkout wip # return to development +``` + +## Switching modes + +**Check for pending work:** `pnpm fork:status ccc-fork` — exit 0 if clone matches pinned state (safe to wipe), exit 1 otherwise. + +**Local CCC (default when `pins/` is committed):** `pnpm install` auto-replays pins and overrides deps. + +**Published CCC:** `pnpm fork:reset ccc-fork && pnpm install` — removes clone and pins, restores published packages. + +**Re-record:** `pnpm fork:record ccc-fork` wipes and re-records everything from scratch. Aborts if clone has pending work. Local patches are preserved. + +**Force re-replay:** `pnpm fork:clean ccc-fork && pnpm install` — removes clone but keeps pins, replays on next install. + +## Requirements + +- **Recording** (`pnpm fork:record`): Requires the AI Coworker CLI (installed as a devDependency; invoked via `pnpm coworker:ask`) for automated conflict resolution (only when merging refs). Also requires `jq` for config.json and package.json processing. +- **Replay** (`pnpm install`): Requires `jq`. No other extra tools — works for any contributor with just pnpm. diff --git a/ccc-fork/config.json b/ccc-fork/config.json new file mode 100644 index 0000000..4d59d37 --- /dev/null +++ b/ccc-fork/config.json @@ -0,0 +1,17 @@ +{ + "upstream": "https://github.com/ckb-devrel/ccc.git", + "fork": "git@github.com:phroi/ccc.git", + "refs": ["359", "328", "releases/next", "releases/udt"], + "cloneDir": "ccc", + "workspace": { + "include": ["packages/*"], + "exclude": [ + "packages/demo", + "packages/docs", + "packages/examples", + "packages/faucet", + "packages/playground", + "packages/tests" + ] + } +} diff --git a/ccc-fork/pins/HEAD b/ccc-fork/pins/HEAD new file mode 100644 index 0000000..50d606a --- /dev/null +++ b/ccc-fork/pins/HEAD @@ -0,0 +1 @@ +226271097702bc04dbb1b0d76ee79c2b7bdd6c5e diff --git a/ccc-fork/pins/manifest b/ccc-fork/pins/manifest new file mode 100644 index 0000000..9715847 --- /dev/null +++ b/ccc-fork/pins/manifest @@ -0,0 +1,5 @@ +50d657beea36de3ebbd80ee88209842644daef34 master +8e63e3a21f1824445b2c339ffe4927a2a8af1bcf 359 +5761fe63fcb29ac810fab5e71063424692f65592 328 +0e18748fb139d71338c109d71aae5b149cb58af3 releases/next +0ad2a5f6305d4964b00394bc8a6ed50136fdffa8 releases/udt diff --git a/ccc-fork/pins/res-2.resolution b/ccc-fork/pins/res-2.resolution new file mode 100644 index 0000000..c264e5d --- /dev/null +++ b/ccc-fork/pins/res-2.resolution @@ -0,0 +1,15 @@ +--- packages/core/src/ckb/transaction.ts +CONFLICT ours=5 base=4 theirs=1 resolution=4 +import { + ErrorNervosDaoOutputLimit, + ErrorTransactionInsufficientCoin, +} from "./transactionErrors.js"; +CONFLICT ours=97 base=95 theirs=7 resolution=8 + const result = await from.completeFee(this, { + changeFn: change, + feeRate: expectedFeeRate, + filter, + options, + }); + await assertDaoOutputLimit(this, from.client); + return result; diff --git a/ccc-fork/pins/res-4.resolution b/ccc-fork/pins/res-4.resolution new file mode 100644 index 0000000..bcb456c --- /dev/null +++ b/ccc-fork/pins/res-4.resolution @@ -0,0 +1,20 @@ +--- packages/core/src/ckb/transactionErrors.ts +CONFLICT ours=13 base=0 theirs=3 resolution=13 +export class ErrorNervosDaoOutputLimit extends Error { + public readonly count: number; + public readonly limit: number; + + constructor(count: number) { + super( + `NervosDAO transaction has ${count} output cells, exceeding the limit of 64`, + ); + this.count = count; + this.limit = 64; + } +} + +--- vitest.config.mts +CONFLICT ours=1 base=1 theirs=1 resolution=1 + projects: packages, +CONFLICT ours=1 base=1 theirs=1 resolution=1 + include: packages, diff --git a/fork-scripts/clean-all.sh b/fork-scripts/clean-all.sh new file mode 100644 index 0000000..f65af35 --- /dev/null +++ b/fork-scripts/clean-all.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Clean all managed fork clones (status-check each before removing). +# Usage: fork-scripts/clean-all.sh + +# shellcheck source=lib.sh +source "$(cd "$(dirname "$0")" && pwd)/lib.sh" + +while IFS= read -r dev_dir; do + bash "$FORK_SCRIPTS_DIR/clean.sh" "$dev_dir" || true +done < <(discover_fork_dirs) diff --git a/fork-scripts/clean.sh b/fork-scripts/clean.sh new file mode 100644 index 0000000..1b75df6 --- /dev/null +++ b/fork-scripts/clean.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Remove a fork clone after verifying it has no pending work. +# Usage: fork-scripts/clean.sh + +# shellcheck source=lib.sh +source "$(cd "$(dirname "$0")" && pwd)/lib.sh" + +DEV_DIR="${1:?Usage: fork-scripts/clean.sh }" +DEV_DIR=$(cd "$DEV_DIR" && pwd) + +bash "$FORK_SCRIPTS_DIR/status.sh" "$DEV_DIR" +rm -rf "$(repo_dir "$DEV_DIR")" diff --git a/fork-scripts/lib.sh b/fork-scripts/lib.sh new file mode 100644 index 0000000..7f56371 --- /dev/null +++ b/fork-scripts/lib.sh @@ -0,0 +1,241 @@ +#!/usr/bin/env bash +# Shared helpers for fork management scripts + +FORK_SCRIPTS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR="$(cd "$FORK_SCRIPTS_DIR/.." && pwd)" + +# Read a value from a fork directory's config.json +# Usage: config_val +config_val() { + jq -r "$2" "$1/config.json" +} + +# Get the clone directory path for a fork +# Usage: repo_dir +repo_dir() { + local clone_dir + clone_dir=$(config_val "$1" '.cloneDir') + echo "$1/$clone_dir" +} + +# Get the pins directory path for a fork +# Usage: pins_dir +pins_dir() { + echo "$1/pins" +} + +# Get the upstream URL from config +# Usage: upstream_url +upstream_url() { + config_val "$1" '.upstream' +} + +# Get the fork URL from config (may be empty) +# Usage: fork_url +fork_url() { + local url + url=$(config_val "$1" '.fork // empty') + [ -n "$url" ] && echo "$url" +} + +# Get the refs array from config as lines +# Usage: repo_refs +repo_refs() { + config_val "$1" '.refs[]' +} + +# Discover all *-fork/ directories with config.json at repo root +# Usage: discover_fork_dirs +discover_fork_dirs() { + for d in "$ROOT_DIR"/*-fork; do + [ -f "$d/config.json" ] && echo "$d" + done +} + +# Read the expected HEAD SHA from pins/HEAD +# Usage: pinned_head +pinned_head() { + local f="$1/HEAD" + [ -f "$f" ] && cat "$f" || return 1 +} + +# Return path to pins/manifest if it exists +# Usage: manifest_file +manifest_file() { + local f="$1/manifest" + [ -f "$f" ] && echo "$f" || return 1 +} + +# Check whether pins exist (manifest present) +# Usage: has_pins +has_pins() { + [ -f "$1/manifest" ] +} + +# Count merge refs in manifest (total lines minus base line) +# Usage: merge_count +merge_count() { + local mf + mf=$(manifest_file "$1") || return 1 + echo $(( $(wc -l < "$mf") - 1 )) +} + +# Export deterministic git identity for reproducible commits +# Usage: deterministic_env +deterministic_env() { + export GIT_AUTHOR_NAME="ci" GIT_AUTHOR_EMAIL="ci@local" + export GIT_COMMITTER_NAME="ci" GIT_COMMITTER_EMAIL="ci@local" + export GIT_AUTHOR_DATE="@$1 +0000" GIT_COMMITTER_DATE="@$1 +0000" +} + +# Count files matching a glob pattern (pipefail-safe alternative to ls|wc -l) +# Usage: count_glob pattern (e.g., count_glob "$dir"/local-*.patch) +count_glob() { + local n=0 + for f in "$@"; do + [ -f "$f" ] && n=$((n + 1)) + done + echo "$n" +} + +# Apply local patches from pins/ as deterministic commits. +# Timestamp sequence continues from patch.sh: merge_count+1 is patch.sh, +# so local patches start at merge_count+2. +# Returns 1 if any patch fails to apply (caller should add remediation advice). +# Usage: apply_local_patches +apply_local_patches() { + local repo_dir="$1" p_dir="$2" + local mc ts patch name + mc=$(merge_count "$p_dir") || mc=0 + ts=$((mc + 2)) + for patch in "$p_dir"/local-*.patch; do + [ -f "$patch" ] || return 0 + name=$(basename "$patch" .patch) + echo "Applying local patch: $name" >&2 + if ! git -C "$repo_dir" apply "$patch"; then + echo "ERROR: Local patch $name failed to apply." >&2 + return 1 + fi + deterministic_env "$ts" + git -C "$repo_dir" add -A + git -C "$repo_dir" commit -m "local: $name" + ts=$((ts + 1)) + done +} + +# Apply counted conflict resolutions to a single conflicted file. +# Reads resolution data (CONFLICT headers + content lines) from $1, +# walks the conflicted file $2 positionally by line counts (never inspects +# content), and outputs the resolved file to stdout. +# Exits non-zero if the conflict count in the resolution data doesn't match +# the number of <<<<<<< markers in the file (catches fake markers). +# Usage: apply_counted_resolutions +apply_counted_resolutions() { + awk ' + FNR==NR { + if (/^CONFLICT /) { + n++ + for (i=2; i<=NF; i++) { + split($i, kv, "=") + c[n, kv[1]] = kv[2]+0 + } + rn[n] = 0 + next + } + rn[n]++ + r[n, rn[n]] = $0 + next + } + { + if (substr($0,1,7) == "<<<<<<<") { + cn++ + if (cn > n) { + printf "ERROR: more conflicts in file than in resolution data (%d > %d)\n", cn, n > "/dev/stderr" + err = 1; exit 1 + } + for (i = 0; i < c[cn,"ours"]; i++) getline + getline # ||||||| + for (i = 0; i < c[cn,"base"]; i++) getline + getline # ======= + for (i = 0; i < c[cn,"theirs"]; i++) getline + getline # >>>>>>> + for (i = 1; i <= c[cn,"resolution"]; i++) print r[cn,i] + next + } + print + } + END { + if (!err && cn != n) { + printf "ERROR: expected %d conflicts, found %d\n", n, cn > "/dev/stderr" + exit 1 + } + } + ' "$1" "$2" +} + +# Regenerate fork workspace entries in pnpm-workspace.yaml. +# Reads all *-fork/config.json files and replaces the section between +# @generated markers with computed include/exclude globs. +# Usage: sync_workspace_yaml +sync_workspace_yaml() { + local yaml="$ROOT_DIR/pnpm-workspace.yaml" + local entries="" + + for dev_dir in "$ROOT_DIR"/*-fork; do + [ -f "$dev_dir/config.json" ] || continue + local name clone_dir + name=$(basename "$dev_dir") + clone_dir=$(config_val "$dev_dir" '.cloneDir') + + mapfile -t includes < <(config_val "$dev_dir" '.workspace.include // [] | .[]') + for inc in "${includes[@]}"; do + entries+=" - ${name}/${clone_dir}/${inc}"$'\n' + done + + mapfile -t excludes < <(config_val "$dev_dir" '.workspace.exclude // [] | .[]') + for excl in "${excludes[@]}"; do + entries+=" - \"!${name}/${clone_dir}/${excl}\""$'\n' + done + done + + awk -v entries="$entries" ' + /^ # @generated begin fork-workspaces/ { print; printf "%s", entries; skip=1; next } + /^ # @generated end fork-workspaces/ { skip=0; print; next } + !skip { print } + ' "$yaml" > "$yaml.tmp" && mv "$yaml.tmp" "$yaml" +} + +# Apply a multi-file resolution file to a repo directory. +# Splits by "--- path" headers into per-file chunks, then calls +# apply_counted_resolutions for each file, replacing it in-place. +# Usage: apply_resolution_file +apply_resolution_file() { + local repo_dir="$1" res_file="$2" + local tmp_dir + tmp_dir=$(mktemp -d) + trap 'rm -rf "$tmp_dir"' RETURN + + # Split by --- headers; write path list and per-file chunks + awk -v dir="$tmp_dir" ' + /^--- / { + if (f) close(f) + n++ + path = substr($0, 5) + print path > (dir "/paths") + f = dir "/chunk-" n + next + } + f { print > f } + END { if (f) close(f) } + ' "$res_file" + + [ -f "$tmp_dir/paths" ] || return 0 + + local i=0 path + while IFS= read -r path; do + i=$((i + 1)) + apply_counted_resolutions "$tmp_dir/chunk-$i" "$repo_dir/$path" \ + > "$repo_dir/${path}.resolved.tmp" + mv "$repo_dir/${path}.resolved.tmp" "$repo_dir/$path" + done < "$tmp_dir/paths" +} diff --git a/ccc-dev/patch.sh b/fork-scripts/patch.sh old mode 100755 new mode 100644 similarity index 55% rename from ccc-dev/patch.sh rename to fork-scripts/patch.sh index d259929..24347c5 --- a/ccc-dev/patch.sh +++ b/fork-scripts/patch.sh @@ -1,22 +1,26 @@ #!/usr/bin/env bash set -euo pipefail -# Patch a CCC clone for use in the stack workspace. -# Usage: ccc-dev/patch.sh +# Patch a cloned repo for use in the stack workspace. +# Usage: fork-scripts/patch.sh -REPO_DIR="${1:?Usage: patch.sh }" +# shellcheck source=lib.sh +source "$(cd "$(dirname "$0")" && pwd)/lib.sh" + +REPO_DIR="${1:?Usage: fork-scripts/patch.sh }" MERGE_COUNT="${2:?Missing merge-count argument}" -# Remove CCC's own lockfile so deps are recorded in the root pnpm-lock.yaml +# Remove the repo's own lockfile so deps are recorded in the root pnpm-lock.yaml rm -f "$REPO_DIR/pnpm-lock.yaml" -# Patch CCC packages so the stack resolves directly to .ts source: +# Patch packages so the stack resolves directly to .ts source: # - "type":"module" → NodeNext treats .ts files as ESM # - "types" export condition → TypeScript resolves .ts source before .js dist -# - "import" rewritten to .ts source → Vite/esbuild can bundle without building CCC +# - "import" rewritten to .ts source → Vite/esbuild can bundle without building for pkg_json in "$REPO_DIR"/packages/*/package.json; do + [ -f "$pkg_json" ] || continue jq '.type = "module" | - if .exports then .exports |= with_entries( + if (.exports | type) == "object" then .exports |= with_entries( if .value | type == "object" and has("import") then .value |= ( (.import | sub("/dist/";"/src/") | sub("\\.m?js$";".ts")) as $src | @@ -27,9 +31,8 @@ for pkg_json in "$REPO_DIR"/packages/*/package.json; do done # Commit patched files with deterministic identity so record and replay produce the same hash -export GIT_AUTHOR_NAME="ci" GIT_AUTHOR_EMAIL="ci@local" -export GIT_COMMITTER_NAME="ci" GIT_COMMITTER_EMAIL="ci@local" -PATCH_TS="@$((MERGE_COUNT + 1)) +0000" -export GIT_AUTHOR_DATE="$PATCH_TS" GIT_COMMITTER_DATE="$PATCH_TS" +deterministic_env "$((MERGE_COUNT + 1))" git -C "$REPO_DIR" add -A -git -C "$REPO_DIR" commit -m "patch: source-level type resolution" +if ! git -C "$REPO_DIR" diff --cached --quiet; then + git -C "$REPO_DIR" commit -m "patch: source-level type resolution" +fi diff --git a/ccc-dev/push.sh b/fork-scripts/push.sh old mode 100755 new mode 100644 similarity index 62% rename from ccc-dev/push.sh rename to fork-scripts/push.sh index c9bb6d8..9dd1200 --- a/ccc-dev/push.sh +++ b/fork-scripts/push.sh @@ -1,33 +1,41 @@ #!/usr/bin/env bash set -euo pipefail -# Usage: ccc-dev/push.sh [target-branch] +# Usage: fork-scripts/push.sh [target-branch] # Cherry-picks commits made after recording onto the PR branch. # target-branch: defaults to the last pr-* branch found. -SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" -REPO_DIR="$SCRIPT_DIR/ccc" +# shellcheck source=lib.sh +source "$(cd "$(dirname "$0")" && pwd)/lib.sh" + +DEV_DIR="${1:?Usage: fork-scripts/push.sh [target-branch]}" +DEV_DIR=$(cd "$DEV_DIR" && pwd) +shift + +REPO_DIR=$(repo_dir "$DEV_DIR") +PINS_DIR=$(pins_dir "$DEV_DIR") +FORK_NAME=$(basename "$DEV_DIR") +CLONE_DIR=$(config_val "$DEV_DIR" '.cloneDir') # Verify prerequisites if [ ! -d "$REPO_DIR" ]; then - echo "ERROR: $REPO_DIR does not exist. Run ccc-dev/record.sh first." >&2 + echo "ERROR: $FORK_NAME clone does not exist. Run 'pnpm fork:record $FORK_NAME' first." >&2 exit 1 fi -if [ ! -r "$SCRIPT_DIR/pins/HEAD" ]; then - echo "ERROR: pins/HEAD not found. Run ccc-dev/record.sh first." >&2 + +WIP_HEAD=$(pinned_head "$PINS_DIR" 2>/dev/null) || { + echo "ERROR: No pins found. Run 'pnpm fork:record $FORK_NAME' first." >&2 exit 1 -fi +} # Verify we're on the wip branch CURRENT_BRANCH=$(git -C "$REPO_DIR" branch --show-current) if [ "$CURRENT_BRANCH" != "wip" ]; then echo "ERROR: Expected to be on 'wip' branch, but on '$CURRENT_BRANCH'." >&2 - echo "Switch back with: cd ccc-dev/ccc && git checkout wip" >&2 + echo "Switch back with: cd $FORK_NAME/$CLONE_DIR && git checkout wip" >&2 exit 1 fi -WIP_HEAD=$(cat "$SCRIPT_DIR/pins/HEAD") - # Show commits to push echo "Commits since recording:" git -C "$REPO_DIR" log --oneline "$WIP_HEAD..HEAD" @@ -55,7 +63,7 @@ git -C "$REPO_DIR" checkout "$TARGET" if ! git -C "$REPO_DIR" cherry-pick "$WIP_HEAD..wip"; then echo "" >&2 echo "ERROR: Cherry-pick failed. To recover:" >&2 - echo " cd ccc-dev/ccc" >&2 + echo " cd $FORK_NAME/$CLONE_DIR" >&2 echo " # Resolve conflicts, then: git cherry-pick --continue" >&2 echo " # Or abort with: git cherry-pick --abort && git checkout wip" >&2 exit 1 @@ -63,5 +71,5 @@ fi echo "" echo "Done. You are now on $TARGET with your commits applied." -echo "Push with: cd ccc-dev/ccc && git push $TARGET:" -echo "Return to: cd ccc-dev/ccc && git checkout wip" +echo "Push with: cd $FORK_NAME/$CLONE_DIR && git push $TARGET:" +echo "Return to: cd $FORK_NAME/$CLONE_DIR && git checkout wip" diff --git a/fork-scripts/record.sh b/fork-scripts/record.sh new file mode 100644 index 0000000..258b50f --- /dev/null +++ b/fork-scripts/record.sh @@ -0,0 +1,348 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Usage: fork-scripts/record.sh [ref ...] +# ref auto-detection: +# ^[0-9a-f]{7,40}$ → commit SHA +# ^[0-9]+$ → GitHub PR number +# everything else → branch name +# No refs on CLI → reads from config.json +# No refs at all → just clone, no merges + +# shellcheck source=lib.sh +source "$(cd "$(dirname "$0")" && pwd)/lib.sh" + +DEV_DIR="${1:?Usage: fork-scripts/record.sh [ref ...]}" +DEV_DIR=$(cd "$DEV_DIR" && pwd) +shift + +REPO_DIR=$(repo_dir "$DEV_DIR") +PINS_DIR=$(pins_dir "$DEV_DIR") +UPSTREAM=$(upstream_url "$DEV_DIR") + +# Collect refs: CLI args override config.json +if [ $# -gt 0 ]; then + REFS=("$@") +else + mapfile -t REFS < <(repo_refs "$DEV_DIR") +fi + +# --------------------------------------------------------------------------- +# resolve_conflict +# Tiered merge conflict resolution (diff3 markers required): +# Tier 0: Deterministic — one side matches base → take the other (0 tokens) +# Tier 1: Strategy classification — LLM picks OURS/THEIRS/BOTH/GENERATE (~5 tokens) +# Tier 2: Code generation — LLM generates merged code for hunks only +# Outputs the resolved file to stdout. +# Writes counted resolution to .resolution (collected into pins/res-N.resolution after merge). +# --------------------------------------------------------------------------- +resolve_conflict() { + local FILE="$1" F_REL="$2" + local COUNT WORK i OURS BASE THEIRS + + COUNT=$(awk 'substr($0,1,7)=="<<<<<<<"{n++} END{print n+0}' "$FILE") + [ "$COUNT" -gt 0 ] || { echo "ERROR: no conflict markers in $FILE" >&2; return 1; } + + WORK=$(mktemp -d) + trap 'rm -rf "$WORK"' RETURN + + # Extract ours / base / theirs for each conflict hunk + awk -v dir="$WORK" ' + substr($0,1,7) == "<<<<<<<" { n++; section = "ours"; next } + substr($0,1,7) == "|||||||" { section = "base"; next } + substr($0,1,7) == "=======" { section = "theirs"; next } + substr($0,1,7) == ">>>>>>>" { section = ""; next } + section { print > (dir "/c" n "_" section) } + ' "$FILE" + + # Ensure ours/theirs files exist even for empty hunks (edit/delete conflicts) + for i in $(seq 1 "$COUNT"); do + touch "$WORK/c${i}_ours" "$WORK/c${i}_theirs" + done + + # Tier 0: Deterministic resolution (no LLM needed) + local NEED_LLM=() + for i in $(seq 1 "$COUNT"); do + OURS="$WORK/c${i}_ours"; BASE="$WORK/c${i}_base"; THEIRS="$WORK/c${i}_theirs" + if [ ! -f "$BASE" ]; then + NEED_LLM+=("$i"); continue + fi + if diff -q "$OURS" "$BASE" >/dev/null 2>&1; then + cp "$THEIRS" "$WORK/r$i" + echo " conflict $i: deterministic (take theirs)" >&2 + elif diff -q "$THEIRS" "$BASE" >/dev/null 2>&1; then + cp "$OURS" "$WORK/r$i" + echo " conflict $i: deterministic (take ours)" >&2 + elif diff -q "$OURS" "$THEIRS" >/dev/null 2>&1; then + cp "$OURS" "$WORK/r$i" + echo " conflict $i: deterministic (sides identical)" >&2 + else + NEED_LLM+=("$i") + fi + done + + # --- helper: verify, reconstruct resolved file, write resolution sidecar --- + _finish() { + for i in $(seq 1 "$COUNT"); do + [ -f "$WORK/r$i" ] || { echo "ERROR: missing resolution for conflict $i in $FILE" >&2; return 1; } + done + + # Build per-file counted resolution data + local res_data="$WORK/res_data" + : > "$res_data" + for i in $(seq 1 "$COUNT"); do + local ours_n=0 base_n=0 theirs_n=0 res_n=0 + ours_n=$(wc -l < "$WORK/c${i}_ours") + [ -f "$WORK/c${i}_base" ] && base_n=$(wc -l < "$WORK/c${i}_base") + theirs_n=$(wc -l < "$WORK/c${i}_theirs") + res_n=$(wc -l < "$WORK/r$i") + printf 'CONFLICT ours=%d base=%d theirs=%d resolution=%d\n' \ + "$ours_n" "$base_n" "$theirs_n" "$res_n" >> "$res_data" + cat "$WORK/r$i" >> "$res_data" + done + + # Apply counted resolutions to reconstruct resolved file (verifies counts) + apply_counted_resolutions "$res_data" "$FILE" + + # Write resolution sidecar (collected into res-N.resolution by caller) + cp "$res_data" "$FILE.resolution" + } + + [ ${#NEED_LLM[@]} -eq 0 ] && { _finish; return; } + + # Tier 1: Strategy classification (~5 output tokens per conflict) + local CLASSIFY_INPUT="" STRATEGIES NUM STRATEGY REST NEED_GENERATE=() + for i in "${NEED_LLM[@]}"; do + CLASSIFY_INPUT+="=== CONFLICT $i === +--- ours --- +$(cat "$WORK/c${i}_ours") +--- base --- +$(cat "$WORK/c${i}_base" 2>/dev/null || echo "(unavailable)") +--- theirs --- +$(cat "$WORK/c${i}_theirs") + +" + done + + STRATEGIES=$(printf '%s\n' "$CLASSIFY_INPUT" | pnpm --silent coworker:ask \ + -p "For each conflict, respond with ONLY the conflict number and one strategy per line: +N OURS — keep ours (theirs is outdated/superseded) +N THEIRS — keep theirs (ours is outdated/superseded) +N BOTH_OT — concatenate ours then theirs +N BOTH_TO — concatenate theirs then ours +N GENERATE — needs custom merge +No explanations.") + + while IFS=' ' read -r NUM STRATEGY REST; do + [[ "${NUM:-}" =~ ^[0-9]+$ ]] || continue + case "$STRATEGY" in + OURS) cp "$WORK/c${NUM}_ours" "$WORK/r$NUM"; echo " conflict $NUM: classified → OURS" >&2 ;; + THEIRS) cp "$WORK/c${NUM}_theirs" "$WORK/r$NUM"; echo " conflict $NUM: classified → THEIRS" >&2 ;; + BOTH_OT) cat "$WORK/c${NUM}_ours" "$WORK/c${NUM}_theirs" > "$WORK/r$NUM"; echo " conflict $NUM: classified → BOTH (ours first)" >&2 ;; + BOTH_TO) cat "$WORK/c${NUM}_theirs" "$WORK/c${NUM}_ours" > "$WORK/r$NUM"; echo " conflict $NUM: classified → BOTH (theirs first)" >&2 ;; + GENERATE) NEED_GENERATE+=("$NUM"); echo " conflict $NUM: classified → GENERATE" >&2 ;; + *) NEED_GENERATE+=("$NUM"); echo " conflict $NUM: unrecognized '$STRATEGY', falling back to GENERATE" >&2 ;; + esac + done <<< "$STRATEGIES" + + [ ${#NEED_GENERATE[@]} -eq 0 ] && { _finish; return; } + + # Tier 2: Code generation (only for GENERATE conflicts — hunks only output) + local GENERATE_INPUT="" GENERATED + for i in "${NEED_GENERATE[@]}"; do + GENERATE_INPUT+="=== CONFLICT $i === +--- ours --- +$(cat "$WORK/c${i}_ours") +--- base --- +$(cat "$WORK/c${i}_base" 2>/dev/null || echo "(unavailable)") +--- theirs --- +$(cat "$WORK/c${i}_theirs") + +" + done + + GENERATED=$(printf '%s\n' "$GENERATE_INPUT" | pnpm --silent coworker:ask \ + -p "Merge each conflict meaningfully. Output '=== RESOLUTION N ===' header followed by ONLY the merged code. No explanations, no code fences.") + + printf '%s\n' "$GENERATED" | awk -v dir="$WORK" ' + /^=== RESOLUTION [0-9]+ ===$/ { if (f) close(f); f = dir "/r" $3; buf = ""; next } + f && /^[[:space:]]*$/ { buf = buf $0 "\n"; next } + f { if (buf != "") { printf "%s", buf > f; buf = "" }; print > f } + END { if (f) close(f) } + ' + + _finish +} + +# Guard: abort if clone has pending work +FORK_NAME=$(basename "$DEV_DIR") +if ! bash "$FORK_SCRIPTS_DIR/status.sh" "$DEV_DIR" >/dev/null 2>&1; then + bash "$FORK_SCRIPTS_DIR/status.sh" "$DEV_DIR" >&2 + echo "" >&2 + echo "ERROR: $FORK_NAME has pending work that would be lost." >&2 + echo "Push with 'pnpm fork:push $FORK_NAME', commit, or remove the clone manually." >&2 + exit 1 +fi + +# Preserve local patches before wiping +LOCAL_PATCHES_TMP="" +if [ "$(count_glob "$PINS_DIR"/local-*.patch)" -gt 0 ]; then + LOCAL_PATCHES_TMP=$(mktemp -d) + cp "$PINS_DIR"/local-*.patch "$LOCAL_PATCHES_TMP/" + echo "Preserved $(count_glob "$LOCAL_PATCHES_TMP"/local-*.patch) local patch(es)" +fi + +# Always start fresh — wipe previous clone and pins +rm -rf "$REPO_DIR" "$PINS_DIR" +mkdir -p "$PINS_DIR" + +cleanup_on_error() { + rm -rf "$REPO_DIR" "$PINS_DIR" + if [ -n "${LOCAL_PATCHES_TMP:-}" ] && [ -d "${LOCAL_PATCHES_TMP:-}" ]; then + echo "FAILED — cleaned up clone and pins/" >&2 + echo "Local patches preserved in: $LOCAL_PATCHES_TMP" >&2 + echo "Restore manually or re-record without local patches." >&2 + else + echo "FAILED — cleaned up clone and pins/" >&2 + fi +} +trap cleanup_on_error ERR + +git clone --filter=blob:none "$UPSTREAM" "$REPO_DIR" + +# Enable diff3 conflict markers so conflict resolution can see the base version. +# Force full 40-char SHAs in |||||| base markers so they're identical across runs +# (default core.abbrev varies with object count, breaking resolution replay). +git -C "$REPO_DIR" config merge.conflictStyle diff3 +git -C "$REPO_DIR" config core.abbrev 40 + +# Capture default branch name and base SHA before any merges +DEFAULT_BRANCH=$(git -C "$REPO_DIR" branch --show-current) +BASE_SHA=$(git -C "$REPO_DIR" rev-parse HEAD) +git -C "$REPO_DIR" checkout -b wip + +# Write manifest: base line first +printf '%s\t%s\n' "$BASE_SHA" "$DEFAULT_BRANCH" > "$PINS_DIR/manifest" + +MERGE_IDX=0 + +for REF in "${REFS[@]}"; do + MERGE_IDX=$((MERGE_IDX + 1)) + + deterministic_env "$MERGE_IDX" + + # Case A: full (7-40 char) hex commit SHA + if [[ $REF =~ ^[0-9a-f]{7,40}$ ]]; then + git -C "$REPO_DIR" fetch --depth=1 origin "$REF" + MERGE_REF="FETCH_HEAD" + + # Case B: all digits → GitHub pull request number + elif [[ $REF =~ ^[0-9]+$ ]]; then + git -C "$REPO_DIR" fetch origin "pull/$REF/head:pr-$REF" + MERGE_REF="pr-$REF" + + # Case C: branch name + else + git -C "$REPO_DIR" fetch origin "refs/heads/$REF:$REF" + MERGE_REF="$REF" + fi + + # Capture the resolved SHA for this ref before merging + MERGE_SHA=$(git -C "$REPO_DIR" rev-parse "$MERGE_REF") + + # Append merge ref line to manifest + printf '%s\t%s\n' "$MERGE_SHA" "$REF" >> "$PINS_DIR/manifest" + + # Use explicit merge message so record and replay produce identical commits + MERGE_MSG="Merge $REF into wip" + + # Merge by SHA (not named ref or FETCH_HEAD) so conflict marker lines + # (>>>>>>> ) are identical between record and replay. Both use the + # same pinned SHA, so counted resolutions apply with correct line counts. + if ! git -C "$REPO_DIR" merge --no-ff -m "$MERGE_MSG" "$MERGE_SHA"; then + # Capture conflicted file list BEFORE resolution + mapfile -t CONFLICTED < <(git -C "$REPO_DIR" diff --name-only --diff-filter=U) + + # Resolve conflicted files with AI Coworker (parallel, hunks-only) + PIDS=() + for FILE in "${CONFLICTED[@]}"; do + resolve_conflict "$REPO_DIR/$FILE" "$FILE" \ + > "$REPO_DIR/${FILE}.resolved" & + PIDS+=($!) + done + + # Wait for all resolutions and check exit codes + for i in "${!PIDS[@]}"; do + if ! wait "${PIDS[$i]}"; then + echo "ERROR: AI Coworker failed for ${CONFLICTED[$i]}" >&2 + exit 1 + fi + done + + # Validate, apply resolutions, and collect per-file diffs + for FILE in "${CONFLICTED[@]}"; do + if [ ! -s "$REPO_DIR/${FILE}.resolved" ]; then + echo "ERROR: AI Coworker returned empty resolution for $FILE" >&2 + exit 1 + fi + if grep -q '<<<<<<<' "$REPO_DIR/${FILE}.resolved"; then + echo "ERROR: Conflict markers remain in $FILE after resolution" >&2 + exit 1 + fi + + mv "$REPO_DIR/${FILE}.resolved" "$REPO_DIR/$FILE" + git -C "$REPO_DIR" add "$FILE" + + # Append per-file resolution with path header (written by resolve_conflict) + printf -- '--- %s\n' "$FILE" >> "$PINS_DIR/res-${MERGE_IDX}.resolution" + cat "$REPO_DIR/${FILE}.resolution" >> "$PINS_DIR/res-${MERGE_IDX}.resolution" + rm "$REPO_DIR/${FILE}.resolution" + done + + # Overwrite MERGE_MSG so merge --continue uses our deterministic message + echo "$MERGE_MSG" > "$REPO_DIR/.git/MERGE_MSG" + GIT_EDITOR=true git -C "$REPO_DIR" merge --continue + fi +done + +bash "$FORK_SCRIPTS_DIR/patch.sh" "$REPO_DIR" "$MERGE_IDX" + +# Restore and apply local patches +if [ -n "${LOCAL_PATCHES_TMP:-}" ]; then + cp "$LOCAL_PATCHES_TMP"/local-*.patch "$PINS_DIR/" + rm -rf "$LOCAL_PATCHES_TMP" + + apply_local_patches "$REPO_DIR" "$PINS_DIR" || { + echo "Upstream changes may have invalidated it. Edit or remove the patch and re-record." >&2 + exit 1 + } +fi + +# Write HEAD file +HEAD_SHA=$(git -C "$REPO_DIR" rev-parse HEAD) +printf '%s\n' "$HEAD_SHA" > "$PINS_DIR/HEAD" + +# Add fork remote for pushing (SSH for auth), if configured +FORK_REMOTE=$(fork_url "$DEV_DIR" 2>/dev/null) || true +if [ -n "${FORK_REMOTE:-}" ]; then + git -C "$REPO_DIR" remote add fork "$FORK_REMOTE" +fi + +# Regenerate fork workspace entries in pnpm-workspace.yaml +sync_workspace_yaml + +LOCAL_PATCH_COUNT=$(count_glob "$PINS_DIR"/local-*.patch) +RESOLUTION_COUNT=$(count_glob "$PINS_DIR"/res-*.resolution) + +echo "Pins recorded in $PINS_DIR/" +echo " BASE=$BASE_SHA ($DEFAULT_BRANCH)" +echo " Merges: $MERGE_IDX ref(s)" +if [ "$RESOLUTION_COUNT" -gt 0 ]; then + echo " Resolutions: $RESOLUTION_COUNT merge step(s) with conflicts" +else + echo " Resolutions: none (no conflicts)" +fi +if [ "$LOCAL_PATCH_COUNT" -gt 0 ]; then + echo " Local patches: $LOCAL_PATCH_COUNT" +fi +echo " HEAD=$HEAD_SHA" diff --git a/fork-scripts/replay-all.sh b/fork-scripts/replay-all.sh new file mode 100644 index 0000000..c656c13 --- /dev/null +++ b/fork-scripts/replay-all.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Replay all managed fork directories from their pins. +# Usage: fork-scripts/replay-all.sh + +# shellcheck source=lib.sh +source "$(cd "$(dirname "$0")" && pwd)/lib.sh" + +while IFS= read -r dev_dir; do + bash "$FORK_SCRIPTS_DIR/replay.sh" "$dev_dir" +done < <(discover_fork_dirs) diff --git a/fork-scripts/replay.sh b/fork-scripts/replay.sh new file mode 100644 index 0000000..af17015 --- /dev/null +++ b/fork-scripts/replay.sh @@ -0,0 +1,102 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Usage: fork-scripts/replay.sh +# Deterministic replay from manifest + counted resolutions + local patches + +# shellcheck source=lib.sh +source "$(cd "$(dirname "$0")" && pwd)/lib.sh" + +DEV_DIR="${1:?Usage: fork-scripts/replay.sh }" +DEV_DIR=$(cd "$DEV_DIR" && pwd) + +REPO_DIR=$(repo_dir "$DEV_DIR") +PINS_DIR=$(pins_dir "$DEV_DIR") +UPSTREAM=$(upstream_url "$DEV_DIR") +FORK_NAME=$(basename "$DEV_DIR") + +# Skip if already cloned +if [ -d "$REPO_DIR" ]; then + echo "$FORK_NAME: clone already exists, skipping (remove it to redo setup)" >&2 + exit 0 +fi + +# Skip if no pins to replay +MANIFEST=$(manifest_file "$PINS_DIR" 2>/dev/null) || { + echo "$FORK_NAME: no pins to replay, skipping" >&2 + exit 0 +} + +trap 'rm -rf "$REPO_DIR"; echo "FAILED — cleaned up $FORK_NAME clone" >&2' ERR + +# Read base SHA from first line of manifest +BASE_SHA=$(head -1 "$MANIFEST" | cut -d$'\t' -f1) +git clone --filter=blob:none "$UPSTREAM" "$REPO_DIR" + +# Match record.sh's conflict marker style and SHA abbreviation for identical markers +git -C "$REPO_DIR" config merge.conflictStyle diff3 +git -C "$REPO_DIR" config core.abbrev 40 + +git -C "$REPO_DIR" checkout "$BASE_SHA" +git -C "$REPO_DIR" checkout -b wip + +# Replay merges from manifest (skip line 1 = base) +MERGE_IDX=0 +while IFS=$'\t' read -r SHA REF_NAME; do + MERGE_IDX=$((MERGE_IDX + 1)) + echo "Replaying merge $MERGE_IDX: $REF_NAME ($SHA)" >&2 + + deterministic_env "$MERGE_IDX" + + git -C "$REPO_DIR" fetch origin "$SHA" + + # Use explicit merge message matching record.sh for deterministic commits + MERGE_MSG="Merge $REF_NAME into wip" + + # Merge by SHA (matching record.sh) so conflict markers are identical + if ! git -C "$REPO_DIR" merge --no-ff -m "$MERGE_MSG" "$SHA"; then + RES_FILE="$PINS_DIR/res-${MERGE_IDX}.resolution" + if [ ! -f "$RES_FILE" ]; then + if [ -f "$PINS_DIR/res-${MERGE_IDX}.diff" ]; then + echo "ERROR: Legacy diff format detected (res-${MERGE_IDX}.diff)." >&2 + echo "Re-record with: pnpm fork:record $FORK_NAME" >&2 + exit 1 + fi + echo "ERROR: Merge $MERGE_IDX ($REF_NAME) has conflicts but no resolution file." >&2 + echo "Re-record with: pnpm fork:record $FORK_NAME" >&2 + exit 1 + fi + + # Apply counted resolutions (positional — no sed stripping or patch needed) + apply_resolution_file "$REPO_DIR" "$RES_FILE" + + # Stage resolved files and complete the merge + git -C "$REPO_DIR" add -A + echo "$MERGE_MSG" > "$REPO_DIR/.git/MERGE_MSG" + GIT_EDITOR=true git -C "$REPO_DIR" merge --continue + fi +done < <(tail -n +2 "$MANIFEST") + +bash "$FORK_SCRIPTS_DIR/patch.sh" "$REPO_DIR" "$(merge_count "$PINS_DIR")" + +apply_local_patches "$REPO_DIR" "$PINS_DIR" || { + echo "Re-record with: pnpm fork:record $FORK_NAME" >&2 + exit 1 +} + +# Verify HEAD SHA matches pins/HEAD +ACTUAL=$(git -C "$REPO_DIR" rev-parse HEAD) +EXPECTED=$(pinned_head "$PINS_DIR") +if [ "$ACTUAL" != "$EXPECTED" ]; then + echo "FAIL: replay HEAD ($ACTUAL) != pinned HEAD ($EXPECTED)" >&2 + echo "Pins are stale or corrupted. Re-record with 'pnpm fork:record $FORK_NAME'." >&2 + exit 1 +fi + +# Add fork remote for pushing (SSH for auth), if configured +FORK_REMOTE=$(fork_url "$DEV_DIR" 2>/dev/null) || true +if [ -n "${FORK_REMOTE:-}" ]; then + git -C "$REPO_DIR" remote add fork "$FORK_REMOTE" +fi + +echo "OK — replay HEAD matches pinned HEAD ($EXPECTED)" diff --git a/fork-scripts/reset.sh b/fork-scripts/reset.sh new file mode 100644 index 0000000..c46f7cd --- /dev/null +++ b/fork-scripts/reset.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Remove a fork clone and its pins (full reset). +# Usage: fork-scripts/reset.sh + +# shellcheck source=lib.sh +source "$(cd "$(dirname "$0")" && pwd)/lib.sh" + +DEV_DIR="${1:?Usage: fork-scripts/reset.sh }" +DEV_DIR=$(cd "$DEV_DIR" && pwd) + +bash "$FORK_SCRIPTS_DIR/clean.sh" "$DEV_DIR" +rm -rf "$(pins_dir "$DEV_DIR")" diff --git a/fork-scripts/save.sh b/fork-scripts/save.sh new file mode 100644 index 0000000..b2a654e --- /dev/null +++ b/fork-scripts/save.sh @@ -0,0 +1,91 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Usage: fork-scripts/save.sh [description] +# Captures local work in the fork clone as a patch file in pins/. +# description: short label for the patch (default: "local") + +# shellcheck source=lib.sh +source "$(cd "$(dirname "$0")" && pwd)/lib.sh" + +DEV_DIR="${1:?Usage: fork-scripts/save.sh [description]}" +DEV_DIR=$(cd "$DEV_DIR" && pwd) +shift + +REPO_DIR=$(repo_dir "$DEV_DIR") +PINS_DIR=$(pins_dir "$DEV_DIR") +FORK_NAME=$(basename "$DEV_DIR") + +DESCRIPTION="${1:-local}" +# Sanitize description for use in filename (fallback if nothing alphanumeric remains) +DESCRIPTION=$(printf '%s' "$DESCRIPTION" | tr -c '[:alnum:]-_' '-' | sed 's/--*/-/g; s/^-//; s/-$//') +[ -z "$DESCRIPTION" ] && DESCRIPTION="local" + +# Check prerequisites +if [ ! -d "$REPO_DIR" ]; then + echo "ERROR: $FORK_NAME clone does not exist. Run 'pnpm fork:record $FORK_NAME' first." >&2 + exit 1 +fi + +PINNED_HEAD=$(pinned_head "$PINS_DIR" 2>/dev/null) || { + echo "ERROR: No pins found. Run 'pnpm fork:record $FORK_NAME' first." >&2 + exit 1 +} + +CURRENT_BRANCH=$(git -C "$REPO_DIR" branch --show-current) +if [ "$CURRENT_BRANCH" != "wip" ]; then + echo "ERROR: Expected to be on 'wip' branch, but on '$CURRENT_BRANCH'." >&2 + exit 1 +fi + +# Check for changes (committed + uncommitted + staged + untracked) relative to pinned HEAD +if git -C "$REPO_DIR" diff "$PINNED_HEAD" --quiet 2>/dev/null \ + && git -C "$REPO_DIR" diff --cached "$PINNED_HEAD" --quiet 2>/dev/null \ + && [ -z "$(git -C "$REPO_DIR" ls-files --others --exclude-standard 2>/dev/null)" ]; then + echo "No changes to save (working tree matches pinned HEAD)." + exit 0 +fi + +# Count existing local patches to find the pre-local-patches base state. +# Local patches are linear commits on top of post-patch.sh, so PINNED_HEAD~N +# gives us the base before any local patches were applied. +EXISTING=$(count_glob "$PINS_DIR"/local-*.patch) +if [ "$EXISTING" -gt 0 ]; then + PATCH_BASE=$(git -C "$REPO_DIR" rev-parse "${PINNED_HEAD}~${EXISTING}" 2>/dev/null) || { + echo "ERROR: Cannot compute base state. Pins may be corrupted." >&2 + echo "Re-record with: pnpm fork:record $FORK_NAME" >&2 + exit 1 + } +else + PATCH_BASE="$PINNED_HEAD" +fi + +NEXT_NUM=$(printf '%03d' $((EXISTING + 1))) +PATCH_NAME="local-${NEXT_NUM}-${DESCRIPTION}" + +# Stage everything so untracked files are included in the diff +git -C "$REPO_DIR" add -A +# Generate patch: incremental changes relative to pinned HEAD (not base) +git -C "$REPO_DIR" diff --cached "$PINNED_HEAD" > "$PINS_DIR/${PATCH_NAME}.patch" + +# Verify patch is non-empty +if [ ! -s "$PINS_DIR/${PATCH_NAME}.patch" ]; then + rm -f "$PINS_DIR/${PATCH_NAME}.patch" + echo "No diff to save." + exit 0 +fi + +# Rebuild deterministic state from base (before any local patches) +git -C "$REPO_DIR" reset --hard "$PATCH_BASE" + +apply_local_patches "$REPO_DIR" "$PINS_DIR" || { + # Remove the newly-written patch so a retry doesn't hit the same failure + rm -f "$PINS_DIR/${PATCH_NAME}.patch" + echo "Earlier patches may have changed the base. Edit or reorder patches." >&2 + exit 1 +} + +# Update HEAD +git -C "$REPO_DIR" rev-parse HEAD > "$PINS_DIR/HEAD" + +echo "Saved ${PATCH_NAME}.patch. Commit pins/ to share." diff --git a/fork-scripts/status-all.sh b/fork-scripts/status-all.sh new file mode 100644 index 0000000..ecaffb5 --- /dev/null +++ b/fork-scripts/status-all.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Check status of all managed fork directories. +# Exits non-zero if any fork has pending work. +# Usage: fork-scripts/status-all.sh + +# shellcheck source=lib.sh +source "$(cd "$(dirname "$0")" && pwd)/lib.sh" + +EXIT=0 +while IFS= read -r dev_dir; do + bash "$FORK_SCRIPTS_DIR/status.sh" "$dev_dir" || EXIT=1 +done < <(discover_fork_dirs) +exit $EXIT diff --git a/fork-scripts/status.sh b/fork-scripts/status.sh new file mode 100644 index 0000000..a45f573 --- /dev/null +++ b/fork-scripts/status.sh @@ -0,0 +1,52 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Check whether a fork clone is safe to wipe. +# Exit 0 → safe (not cloned, or matches pins exactly) +# Exit 1 → has custom work (any changes vs pinned commit, diverged HEAD, or no pins to compare) +# Usage: fork-scripts/status.sh + +# shellcheck source=lib.sh +source "$(cd "$(dirname "$0")" && pwd)/lib.sh" + +DEV_DIR="${1:?Usage: fork-scripts/status.sh }" +DEV_DIR=$(cd "$DEV_DIR" && pwd) + +REPO_DIR=$(repo_dir "$DEV_DIR") +PINS_DIR=$(pins_dir "$DEV_DIR") +FORK_NAME=$(basename "$DEV_DIR") + +if [ ! -d "$REPO_DIR" ]; then + echo "$FORK_NAME: clone is not present" + exit 0 +fi + +PINNED=$(pinned_head "$PINS_DIR" 2>/dev/null) || { + echo "$FORK_NAME: clone exists but no pins — custom clone" + exit 1 +} + +ACTUAL=$(git -C "$REPO_DIR" rev-parse HEAD) + +if [ "$ACTUAL" != "$PINNED" ]; then + echo "$FORK_NAME: HEAD diverged from pinned HEAD:" + echo " pinned $PINNED" + echo " actual $ACTUAL" + git -C "$REPO_DIR" log --oneline "$PINNED..$ACTUAL" 2>/dev/null || true + exit 1 +fi + +# Compare pinned commit against working tree AND index. +# git diff catches unstaged changes; --cached catches staged-only changes +# (e.g. staged edits where the working tree was reverted). +if ! git -C "$REPO_DIR" diff "$PINNED" --quiet 2>/dev/null \ + || ! git -C "$REPO_DIR" diff --cached "$PINNED" --quiet 2>/dev/null \ + || [ -n "$(git -C "$REPO_DIR" ls-files --others --exclude-standard 2>/dev/null)" ]; then + echo "$FORK_NAME: clone has changes relative to pins:" + git -C "$REPO_DIR" diff "$PINNED" --stat 2>/dev/null || true + git -C "$REPO_DIR" diff --cached "$PINNED" --stat 2>/dev/null || true + git -C "$REPO_DIR" ls-files --others --exclude-standard 2>/dev/null || true + exit 1 +fi + +echo "$FORK_NAME: clone is clean (matches pins)" diff --git a/fork-scripts/tsgo-filter.sh b/fork-scripts/tsgo-filter.sh new file mode 100644 index 0000000..aafd873 --- /dev/null +++ b/fork-scripts/tsgo-filter.sh @@ -0,0 +1,50 @@ +#!/usr/bin/env bash + +# tsgo wrapper that filters diagnostics from managed fork source files. +# +# Stack packages import fork .ts source directly for real-time type feedback +# across the fork/stack boundary. This means tsgo checks fork files under the +# stack's stricter tsconfig (verbatimModuleSyntax, noImplicitOverride, +# noUncheckedIndexedAccess) — rules forks may not follow. These aren't real +# integration errors, just tsconfig-strictness mismatches. +# +# This wrapper: +# 1. Detects all *-fork/ clone directories at repo root +# 2. If none are cloned, runs plain tsgo (no filtering needed) +# 3. Otherwise runs tsgo with noEmitOnError=false so fork diagnostics don't block emit +# 4. Reports only diagnostics from stack source files +# 5. Exits non-zero only on real stack errors + +set -euo pipefail + +ROOT="$(cd "$(dirname "$0")/.." && pwd)" + +# Build filter pattern from all cloned fork directories +FILTER_PARTS=() +for d in "$ROOT"/*-fork; do + [ -f "$d/config.json" ] || continue + clone_dir=$(jq -r '.cloneDir' "$d/config.json") + [ -d "$d/$clone_dir" ] && FILTER_PARTS+=("$(basename "$d")/$clone_dir/") +done + +# No managed repos cloned — run plain tsgo +if [ ${#FILTER_PARTS[@]} -eq 0 ]; then + exec pnpm tsgo +fi + +# Build AWK filter pattern (pipe-separated) +FILTER_PATTERN=$(printf '%s\n' "${FILTER_PARTS[@]}" | paste -sd'|') + +output=$(pnpm tsgo --noEmitOnError false 2>&1) || true + +# Filter out diagnostic blocks originating from fork paths. +# A diagnostic block = a non-indented line (the error) + subsequent indented lines (details). +filtered=$(printf '%s\n' "$output" | awk -v pat="$FILTER_PATTERN" ' + !/^[[:space:]]/ { skip = ($0 ~ pat) ? 1 : 0 } + !skip { print } +') + +if printf '%s\n' "$filtered" | grep -q 'error TS'; then + printf '%s\n' "$filtered" + exit 1 +fi diff --git a/package.json b/package.json index c5cd4de..7d0968d 100644 --- a/package.json +++ b/package.json @@ -1,23 +1,26 @@ { "private": true, "scripts": { - "ccc:record": "bash ccc-dev/record.sh 359 releases/next releases/udt", - "ccc:status": "bash ccc-dev/status.sh", - "ccc:push": "bash ccc-dev/push.sh", - "ccc:clean": "bash ccc-dev/status.sh && rm -rf ccc-dev/ccc", - "ccc:reset": "pnpm ccc:clean && rm -rf ccc-dev/pins", - "build": "pnpm -r --filter !./apps/** --filter !./ccc-dev/ccc/** build", - "build:all": "pnpm -r --filter !./ccc-dev/ccc/** build", + "fork:record": "bash fork-scripts/record.sh", + "fork:status": "bash fork-scripts/status.sh", + "fork:status-all": "bash fork-scripts/status-all.sh", + "fork:push": "bash fork-scripts/push.sh", + "fork:save": "bash fork-scripts/save.sh", + "fork:clean": "bash fork-scripts/clean.sh", + "fork:clean-all": "bash fork-scripts/clean-all.sh", + "fork:reset": "bash fork-scripts/reset.sh", + "build": "pnpm -r --filter !./apps/** --filter '!./*-fork/**' build", + "build:all": "pnpm -r --filter '!./*-fork/**' build", "check": "pnpm clean:deep && pnpm install && pnpm lint && pnpm build:all && pnpm test:ci", "check:fresh": "rm pnpm-lock.yaml && pnpm run check", "check:ci": "CI=true pnpm run check", - "check:full": "pnpm ccc:clean; pnpm check:fresh && pnpm check:ci", + "check:full": "pnpm fork:clean-all; pnpm check:fresh && pnpm check:ci", "test": "vitest", "test:ci": "vitest run", "test:cov": "vitest run --coverage", - "lint": "pnpm -r --filter '!./ccc-dev/ccc/**' lint", - "clean": "rm -fr dist packages/*/dist apps/*/dist ccc-dev/ccc/packages/*/dist", - "clean:deep": "pnpm clean && rm -fr node_modules packages/*/node_modules apps/*/node_modules ccc-dev/ccc/packages/*/node_modules", + "lint": "pnpm -r --filter '!./*-fork/**' lint", + "clean": "rm -fr dist packages/*/dist apps/*/dist *-fork/*/packages/*/dist", + "clean:deep": "pnpm clean && rm -fr node_modules packages/*/node_modules apps/*/node_modules *-fork/*/packages/*/node_modules", "sync:template": "pnpm -r --filter !./apps/interface --filter !./packages/utils --filter !. -c exec 'for f in .npmignore tsconfig.json typedoc.json vitest.config.mts; do cp ../../packages/utils/$f .; done'", "change": "pnpm changeset", "version": "pnpm changeset version", @@ -42,8 +45,8 @@ "prettier-plugin-organize-imports": "^4.3.0", "typedoc": "0.28.7", "typescript": "^5.9.3", - "typescript-eslint": "^8.56.0", + "typescript-eslint": "^8.56.1", "vitest": "^3.2.4" }, - "packageManager": "pnpm@10.30.1+sha512.3590e550d5384caa39bd5c7c739f72270234b2f6059e13018f975c313b1eb9fefcc09714048765d4d9efe961382c312e624572c0420762bdc5d5940cdf9be73a" -} + "packageManager": "pnpm@10.30.2+sha512.36cdc707e7b7940a988c9c1ecf88d084f8514b5c3f085f53a2e244c2921d3b2545bc20dd4ebe1fc245feec463bb298aecea7a63ed1f7680b877dc6379d8d0cb4" +} \ No newline at end of file diff --git a/packages/core/package.json b/packages/core/package.json index f522f84..28f6039 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -31,7 +31,7 @@ "scripts": { "test": "vitest", "test:ci": "vitest run", - "build": "[ -d ../../ccc-dev/ccc ] && exec bash ../../ccc-dev/tsgo-filter.sh || tsgo", + "build": "bash ../../fork-scripts/tsgo-filter.sh", "lint": "eslint ./src", "clean": "rm -fr dist", "clean:deep": "rm -fr dist node_modules" diff --git a/packages/dao/package.json b/packages/dao/package.json index 05f6509..2d036bb 100644 --- a/packages/dao/package.json +++ b/packages/dao/package.json @@ -31,7 +31,7 @@ "scripts": { "test": "vitest", "test:ci": "vitest run", - "build": "[ -d ../../ccc-dev/ccc ] && exec bash ../../ccc-dev/tsgo-filter.sh || tsgo", + "build": "bash ../../fork-scripts/tsgo-filter.sh", "lint": "eslint ./src", "clean": "rm -fr dist", "clean:deep": "rm -fr dist node_modules" diff --git a/packages/order/package.json b/packages/order/package.json index 216200b..bdaa345 100644 --- a/packages/order/package.json +++ b/packages/order/package.json @@ -31,7 +31,7 @@ "scripts": { "test": "vitest", "test:ci": "vitest run", - "build": "[ -d ../../ccc-dev/ccc ] && exec bash ../../ccc-dev/tsgo-filter.sh || tsgo", + "build": "bash ../../fork-scripts/tsgo-filter.sh", "lint": "eslint ./src", "clean": "rm -fr dist", "clean:deep": "rm -fr dist node_modules" diff --git a/packages/sdk/package.json b/packages/sdk/package.json index 0cc2b6a..dfc15ea 100644 --- a/packages/sdk/package.json +++ b/packages/sdk/package.json @@ -31,7 +31,7 @@ "scripts": { "test": "vitest", "test:ci": "vitest run", - "build": "[ -d ../../ccc-dev/ccc ] && exec bash ../../ccc-dev/tsgo-filter.sh || tsgo", + "build": "bash ../../fork-scripts/tsgo-filter.sh", "lint": "eslint ./src", "clean": "rm -fr dist", "clean:deep": "rm -fr dist node_modules" diff --git a/packages/utils/package.json b/packages/utils/package.json index 824e71a..07f2a52 100644 --- a/packages/utils/package.json +++ b/packages/utils/package.json @@ -31,7 +31,7 @@ "scripts": { "test": "vitest", "test:ci": "vitest run", - "build": "[ -d ../../ccc-dev/ccc ] && exec bash ../../ccc-dev/tsgo-filter.sh || tsgo", + "build": "bash ../../fork-scripts/tsgo-filter.sh", "lint": "eslint ./src", "clean": "rm -fr dist", "clean:deep": "rm -fr dist node_modules" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 7e44458..95513d4 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -10,7 +10,7 @@ catalogs: specifier: ^24.8.1 version: 24.10.13 -pnpmfileChecksum: sha256-bpHTpFkBXc22BQj6RinbwnORUfuz+2Nea3ugya/XRbU= +pnpmfileChecksum: sha256-SZA+voq6zh4ZSCAUW2BMUyVEx+37xe76r5k7kD+Z3v4= importers: @@ -18,7 +18,7 @@ importers: devDependencies: '@anthropic-ai/claude-code': specifier: latest - version: 2.1.50 + version: 2.1.51 '@changesets/changelog-github': specifier: ^0.5.2 version: 0.5.2 @@ -30,7 +30,7 @@ importers: version: 9.39.3 '@typescript/native-preview': specifier: latest - version: 7.0.0-dev.20260222.1 + version: 7.0.0-dev.20260223.1 '@vitest/coverage-v8': specifier: 3.2.4 version: 3.2.4(vitest@3.2.4(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2)) @@ -50,8 +50,8 @@ importers: specifier: ^5.9.3 version: 5.9.3 typescript-eslint: - specifier: ^8.56.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + specifier: ^8.56.1 + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) vitest: specifier: ^3.2.4 version: 3.2.4(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2) @@ -94,7 +94,7 @@ importers: dependencies: '@ckb-ccc/core': specifier: workspace:* - version: link:../../ccc-dev/ccc/packages/core + version: link:../../ccc-fork/ccc/packages/core '@ickb/core': specifier: workspace:* version: link:../../packages/core @@ -116,7 +116,7 @@ importers: dependencies: '@ckb-ccc/ccc': specifier: workspace:* - version: link:../../ccc-dev/ccc/packages/ccc + version: link:../../ccc-fork/ccc/packages/ccc '@ckb-lumos/base': specifier: ^0.23.0 version: 0.23.0 @@ -162,7 +162,7 @@ importers: version: 9.39.3 '@tailwindcss/vite': specifier: ^4.1.14 - version: 4.2.0(vite@6.4.1(@types/node@22.19.11)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2)) + version: 4.2.1(vite@6.4.1(@types/node@22.19.11)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2)) '@types/node': specifier: ^22.18.11 version: 22.19.11 @@ -201,13 +201,13 @@ importers: version: 0.6.14(prettier-plugin-organize-imports@4.3.0(prettier@3.8.1)(typescript@5.9.3))(prettier@3.8.1) tailwindcss: specifier: ^4.1.14 - version: 4.2.0 + version: 4.2.1 typescript: specifier: ^5.9.3 version: 5.9.3 typescript-eslint: specifier: ^8.46.1 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) vite: specifier: ^6.4.0 version: 6.4.1(@types/node@22.19.11)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2) @@ -216,7 +216,7 @@ importers: dependencies: '@ckb-ccc/core': specifier: workspace:* - version: link:../../ccc-dev/ccc/packages/core + version: link:../../ccc-fork/ccc/packages/core '@ickb/core': specifier: workspace:* version: link:../../packages/core @@ -256,7 +256,7 @@ importers: specifier: 'catalog:' version: 24.10.13 - ccc-dev/ccc/packages/ccc: + ccc-fork/ccc/packages/ccc: dependencies: '@ckb-ccc/eip6963': specifier: workspace:* @@ -315,9 +315,9 @@ importers: version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - ccc-dev/ccc/packages/ckb-ccc: + ccc-fork/ccc/packages/ckb-ccc: dependencies: '@ckb-ccc/ccc': specifier: workspace:* @@ -352,9 +352,9 @@ importers: version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - ccc-dev/ccc/packages/connector: + ccc-fork/ccc/packages/connector: dependencies: '@ckb-ccc/ccc': specifier: workspace:* @@ -389,9 +389,9 @@ importers: version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - ccc-dev/ccc/packages/connector-react: + ccc-fork/ccc/packages/connector-react: dependencies: '@ckb-ccc/connector': specifier: workspace:* @@ -432,9 +432,9 @@ importers: version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - ccc-dev/ccc/packages/core: + ccc-fork/ccc/packages/core: dependencies: '@joyid/ckb': specifier: ^1.1.2 @@ -499,12 +499,12 @@ importers: version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) vitest: specifier: ^3.2.4 version: 3.2.4(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2) - ccc-dev/ccc/packages/did-ckb: + ccc-fork/ccc/packages/did-ckb: dependencies: '@ckb-ccc/core': specifier: workspace:* @@ -539,18 +539,18 @@ importers: version: 4.3.0(prettier@3.8.1)(typescript@5.9.3) tsdown: specifier: 0.19.0-beta.3 - version: 0.19.0-beta.3(@typescript/native-preview@7.0.0-dev.20260222.1)(synckit@0.11.12)(typescript@5.9.3) + version: 0.19.0-beta.3(@typescript/native-preview@7.0.0-dev.20260223.1)(synckit@0.11.12)(typescript@5.9.3) typescript: specifier: ^5.9.2 version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) vitest: specifier: ^3.2.4 version: 3.2.4(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2) - ccc-dev/ccc/packages/eip6963: + ccc-fork/ccc/packages/eip6963: dependencies: '@ckb-ccc/core': specifier: workspace:* @@ -585,9 +585,9 @@ importers: version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - ccc-dev/ccc/packages/joy-id: + ccc-fork/ccc/packages/joy-id: dependencies: '@ckb-ccc/core': specifier: workspace:* @@ -628,9 +628,9 @@ importers: version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - ccc-dev/ccc/packages/lumos-patches: + ccc-fork/ccc/packages/lumos-patches: dependencies: '@ckb-ccc/core': specifier: workspace:* @@ -683,9 +683,9 @@ importers: version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - ccc-dev/ccc/packages/nip07: + ccc-fork/ccc/packages/nip07: dependencies: '@ckb-ccc/core': specifier: workspace:* @@ -720,9 +720,9 @@ importers: version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - ccc-dev/ccc/packages/okx: + ccc-fork/ccc/packages/okx: dependencies: '@ckb-ccc/core': specifier: workspace:* @@ -763,9 +763,9 @@ importers: version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - ccc-dev/ccc/packages/rei: + ccc-fork/ccc/packages/rei: dependencies: '@ckb-ccc/core': specifier: workspace:* @@ -800,9 +800,9 @@ importers: version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - ccc-dev/ccc/packages/shell: + ccc-fork/ccc/packages/shell: dependencies: '@ckb-ccc/core': specifier: workspace:* @@ -852,9 +852,9 @@ importers: version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - ccc-dev/ccc/packages/spore: + ccc-fork/ccc/packages/spore: dependencies: '@ckb-ccc/core': specifier: workspace:* @@ -898,12 +898,12 @@ importers: version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) vitest: specifier: ^3.2.4 version: 3.2.4(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2) - ccc-dev/ccc/packages/ssri: + ccc-fork/ccc/packages/ssri: dependencies: '@ckb-ccc/core': specifier: workspace:* @@ -941,9 +941,9 @@ importers: version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - ccc-dev/ccc/packages/type-id: + ccc-fork/ccc/packages/type-id: dependencies: '@ckb-ccc/core': specifier: workspace:* @@ -972,18 +972,18 @@ importers: version: 4.3.0(prettier@3.8.1)(typescript@5.9.3) tsdown: specifier: 0.19.0-beta.3 - version: 0.19.0-beta.3(@typescript/native-preview@7.0.0-dev.20260222.1)(synckit@0.11.12)(typescript@5.9.3) + version: 0.19.0-beta.3(@typescript/native-preview@7.0.0-dev.20260223.1)(synckit@0.11.12)(typescript@5.9.3) typescript: specifier: ^5.9.2 version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) vitest: specifier: ^3.2.4 version: 3.2.4(@types/node@24.10.13)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2) - ccc-dev/ccc/packages/udt: + ccc-fork/ccc/packages/udt: dependencies: '@ckb-ccc/core': specifier: workspace:* @@ -1024,9 +1024,9 @@ importers: version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - ccc-dev/ccc/packages/uni-sat: + ccc-fork/ccc/packages/uni-sat: dependencies: '@ckb-ccc/core': specifier: workspace:* @@ -1061,9 +1061,9 @@ importers: version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - ccc-dev/ccc/packages/utxo-global: + ccc-fork/ccc/packages/utxo-global: dependencies: '@ckb-ccc/core': specifier: workspace:* @@ -1098,9 +1098,9 @@ importers: version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - ccc-dev/ccc/packages/xverse: + ccc-fork/ccc/packages/xverse: dependencies: '@ckb-ccc/core': specifier: workspace:* @@ -1138,13 +1138,13 @@ importers: version: 5.9.3 typescript-eslint: specifier: ^8.41.0 - version: 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + version: 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) packages/core: dependencies: '@ckb-ccc/core': specifier: workspace:* - version: link:../../ccc-dev/ccc/packages/core + version: link:../../ccc-fork/ccc/packages/core '@ickb/dao': specifier: workspace:* version: link:../dao @@ -1156,7 +1156,7 @@ importers: dependencies: '@ckb-ccc/core': specifier: workspace:* - version: link:../../ccc-dev/ccc/packages/core + version: link:../../ccc-fork/ccc/packages/core '@ickb/utils': specifier: workspace:* version: link:../utils @@ -1165,7 +1165,7 @@ importers: dependencies: '@ckb-ccc/core': specifier: workspace:* - version: link:../../ccc-dev/ccc/packages/core + version: link:../../ccc-fork/ccc/packages/core '@ickb/utils': specifier: workspace:* version: link:../utils @@ -1174,7 +1174,7 @@ importers: dependencies: '@ckb-ccc/core': specifier: workspace:* - version: link:../../ccc-dev/ccc/packages/core + version: link:../../ccc-fork/ccc/packages/core '@ickb/core': specifier: workspace:* version: link:../core @@ -1192,7 +1192,7 @@ importers: dependencies: '@ckb-ccc/core': specifier: workspace:* - version: link:../../ccc-dev/ccc/packages/core + version: link:../../ccc-fork/ccc/packages/core packages: @@ -1203,8 +1203,8 @@ packages: resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==} engines: {node: '>=6.0.0'} - '@anthropic-ai/claude-code@2.1.50': - resolution: {integrity: sha512-urrhY4IRuLHFoEYb6pjRy6sbDE8TH886zwRvIAPS4Tz51MeGVomZet4EajqxX6+IOKbJNbf4IHL3fTzI0vcKXA==} + '@anthropic-ai/claude-code@2.1.51': + resolution: {integrity: sha512-9mNl3C+6xyj3QmUGzj9TDVntVogtOOKyps/d14k1SYLsyM5S/lJlMthlapDZ1E2EHipXSxDMN6IspSsdtPHVDA==} engines: {node: '>=18.0.0'} hasBin: true @@ -1698,8 +1698,8 @@ packages: resolution: {integrity: sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/eslintrc@3.3.3': - resolution: {integrity: sha512-Kr+LPIUVKz2qkx1HAMH8q1q6azbqBAsXJUxBl/ODDuVPX45Z9DfwB8tPjTi6nNZ8BuM3nbJxC5zCAg5elnBUTQ==} + '@eslint/eslintrc@3.3.4': + resolution: {integrity: sha512-4h4MVF8pmBsncB60r0wSJiIeUKTSD4m7FmTFThG8RHlsg9ajqckLm9OraguFGZE4vVdpiI1Q4+hFnisopmG6gQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@eslint/js@9.39.3': @@ -2280,69 +2280,69 @@ packages: '@shikijs/vscode-textmate@10.0.2': resolution: {integrity: sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg==} - '@tailwindcss/node@4.2.0': - resolution: {integrity: sha512-Yv+fn/o2OmL5fh/Ir62VXItdShnUxfpkMA4Y7jdeC8O81WPB8Kf6TT6GSHvnqgSwDzlB5iT7kDpeXxLsUS0T6Q==} + '@tailwindcss/node@4.2.1': + resolution: {integrity: sha512-jlx6sLk4EOwO6hHe1oCGm1Q4AN/s0rSrTTPBGPM0/RQ6Uylwq17FuU8IeJJKEjtc6K6O07zsvP+gDO6MMWo7pg==} - '@tailwindcss/oxide-android-arm64@4.2.0': - resolution: {integrity: sha512-F0QkHAVaW/JNBWl4CEKWdZ9PMb0khw5DCELAOnu+RtjAfx5Zgw+gqCHFvqg3AirU1IAd181fwOtJQ5I8Yx5wtw==} + '@tailwindcss/oxide-android-arm64@4.2.1': + resolution: {integrity: sha512-eZ7G1Zm5EC8OOKaesIKuw77jw++QJ2lL9N+dDpdQiAB/c/B2wDh0QPFHbkBVrXnwNugvrbJFk1gK2SsVjwWReg==} engines: {node: '>= 20'} cpu: [arm64] os: [android] - '@tailwindcss/oxide-darwin-arm64@4.2.0': - resolution: {integrity: sha512-I0QylkXsBsJMZ4nkUNSR04p6+UptjcwhcVo3Zu828ikiEqHjVmQL9RuQ6uT/cVIiKpvtVA25msu/eRV97JeNSA==} + '@tailwindcss/oxide-darwin-arm64@4.2.1': + resolution: {integrity: sha512-q/LHkOstoJ7pI1J0q6djesLzRvQSIfEto148ppAd+BVQK0JYjQIFSK3JgYZJa+Yzi0DDa52ZsQx2rqytBnf8Hw==} engines: {node: '>= 20'} cpu: [arm64] os: [darwin] - '@tailwindcss/oxide-darwin-x64@4.2.0': - resolution: {integrity: sha512-6TmQIn4p09PBrmnkvbYQ0wbZhLtbaksCDx7Y7R3FYYx0yxNA7xg5KP7dowmQ3d2JVdabIHvs3Hx4K3d5uCf8xg==} + '@tailwindcss/oxide-darwin-x64@4.2.1': + resolution: {integrity: sha512-/f/ozlaXGY6QLbpvd/kFTro2l18f7dHKpB+ieXz+Cijl4Mt9AI2rTrpq7V+t04nK+j9XBQHnSMdeQRhbGyt6fw==} engines: {node: '>= 20'} cpu: [x64] os: [darwin] - '@tailwindcss/oxide-freebsd-x64@4.2.0': - resolution: {integrity: sha512-qBudxDvAa2QwGlq9y7VIzhTvp2mLJ6nD/G8/tI70DCDoneaUeLWBJaPcbfzqRIWraj+o969aDQKvKW9dvkUizw==} + '@tailwindcss/oxide-freebsd-x64@4.2.1': + resolution: {integrity: sha512-5e/AkgYJT/cpbkys/OU2Ei2jdETCLlifwm7ogMC7/hksI2fC3iiq6OcXwjibcIjPung0kRtR3TxEITkqgn0TcA==} engines: {node: '>= 20'} cpu: [x64] os: [freebsd] - '@tailwindcss/oxide-linux-arm-gnueabihf@4.2.0': - resolution: {integrity: sha512-7XKkitpy5NIjFZNUQPeUyNJNJn1CJeV7rmMR+exHfTuOsg8rxIO9eNV5TSEnqRcaOK77zQpsyUkBWmPy8FgdSg==} + '@tailwindcss/oxide-linux-arm-gnueabihf@4.2.1': + resolution: {integrity: sha512-Uny1EcVTTmerCKt/1ZuKTkb0x8ZaiuYucg2/kImO5A5Y/kBz41/+j0gxUZl+hTF3xkWpDmHX+TaWhOtba2Fyuw==} engines: {node: '>= 20'} cpu: [arm] os: [linux] - '@tailwindcss/oxide-linux-arm64-gnu@4.2.0': - resolution: {integrity: sha512-Mff5a5Q3WoQR01pGU1gr29hHM1N93xYrKkGXfPw/aRtK4bOc331Ho4Tgfsm5WDGvpevqMpdlkCojT3qlCQbCpA==} + '@tailwindcss/oxide-linux-arm64-gnu@4.2.1': + resolution: {integrity: sha512-CTrwomI+c7n6aSSQlsPL0roRiNMDQ/YzMD9EjcR+H4f0I1SQ8QqIuPnsVp7QgMkC1Qi8rtkekLkOFjo7OlEFRQ==} engines: {node: '>= 20'} cpu: [arm64] os: [linux] libc: [glibc] - '@tailwindcss/oxide-linux-arm64-musl@4.2.0': - resolution: {integrity: sha512-XKcSStleEVnbH6W/9DHzZv1YhjE4eSS6zOu2eRtYAIh7aV4o3vIBs+t/B15xlqoxt6ef/0uiqJVB6hkHjWD/0A==} + '@tailwindcss/oxide-linux-arm64-musl@4.2.1': + resolution: {integrity: sha512-WZA0CHRL/SP1TRbA5mp9htsppSEkWuQ4KsSUumYQnyl8ZdT39ntwqmz4IUHGN6p4XdSlYfJwM4rRzZLShHsGAQ==} engines: {node: '>= 20'} cpu: [arm64] os: [linux] libc: [musl] - '@tailwindcss/oxide-linux-x64-gnu@4.2.0': - resolution: {integrity: sha512-/hlXCBqn9K6fi7eAM0RsobHwJYa5V/xzWspVTzxnX+Ft9v6n+30Pz8+RxCn7sQL/vRHHLS30iQPrHQunu6/vJA==} + '@tailwindcss/oxide-linux-x64-gnu@4.2.1': + resolution: {integrity: sha512-qMFzxI2YlBOLW5PhblzuSWlWfwLHaneBE0xHzLrBgNtqN6mWfs+qYbhryGSXQjFYB1Dzf5w+LN5qbUTPhW7Y5g==} engines: {node: '>= 20'} cpu: [x64] os: [linux] libc: [glibc] - '@tailwindcss/oxide-linux-x64-musl@4.2.0': - resolution: {integrity: sha512-lKUaygq4G7sWkhQbfdRRBkaq4LY39IriqBQ+Gk6l5nKq6Ay2M2ZZb1tlIyRNgZKS8cbErTwuYSor0IIULC0SHw==} + '@tailwindcss/oxide-linux-x64-musl@4.2.1': + resolution: {integrity: sha512-5r1X2FKnCMUPlXTWRYpHdPYUY6a1Ar/t7P24OuiEdEOmms5lyqjDRvVY1yy9Rmioh+AunQ0rWiOTPE8F9A3v5g==} engines: {node: '>= 20'} cpu: [x64] os: [linux] libc: [musl] - '@tailwindcss/oxide-wasm32-wasi@4.2.0': - resolution: {integrity: sha512-xuDjhAsFdUuFP5W9Ze4k/o4AskUtI8bcAGU4puTYprr89QaYFmhYOPfP+d1pH+k9ets6RoE23BXZM1X1jJqoyw==} + '@tailwindcss/oxide-wasm32-wasi@4.2.1': + resolution: {integrity: sha512-MGFB5cVPvshR85MTJkEvqDUnuNoysrsRxd6vnk1Lf2tbiqNlXpHYZqkqOQalydienEWOHHFyyuTSYRsLfxFJ2Q==} engines: {node: '>=14.0.0'} cpu: [wasm32] bundledDependencies: @@ -2353,24 +2353,24 @@ packages: - '@emnapi/wasi-threads' - tslib - '@tailwindcss/oxide-win32-arm64-msvc@4.2.0': - resolution: {integrity: sha512-2UU/15y1sWDEDNJXxEIrfWKC2Yb4YgIW5Xz2fKFqGzFWfoMHWFlfa1EJlGO2Xzjkq/tvSarh9ZTjvbxqWvLLXA==} + '@tailwindcss/oxide-win32-arm64-msvc@4.2.1': + resolution: {integrity: sha512-YlUEHRHBGnCMh4Nj4GnqQyBtsshUPdiNroZj8VPkvTZSoHsilRCwXcVKnG9kyi0ZFAS/3u+qKHBdDc81SADTRA==} engines: {node: '>= 20'} cpu: [arm64] os: [win32] - '@tailwindcss/oxide-win32-x64-msvc@4.2.0': - resolution: {integrity: sha512-CrFadmFoc+z76EV6LPG1jx6XceDsaCG3lFhyLNo/bV9ByPrE+FnBPckXQVP4XRkN76h3Fjt/a+5Er/oA/nCBvQ==} + '@tailwindcss/oxide-win32-x64-msvc@4.2.1': + resolution: {integrity: sha512-rbO34G5sMWWyrN/idLeVxAZgAKWrn5LiR3/I90Q9MkA67s6T1oB0xtTe+0heoBvHSpbU9Mk7i6uwJnpo4u21XQ==} engines: {node: '>= 20'} cpu: [x64] os: [win32] - '@tailwindcss/oxide@4.2.0': - resolution: {integrity: sha512-AZqQzADaj742oqn2xjl5JbIOzZB/DGCYF/7bpvhA8KvjUj9HJkag6bBuwZvH1ps6dfgxNHyuJVlzSr2VpMgdTQ==} + '@tailwindcss/oxide@4.2.1': + resolution: {integrity: sha512-yv9jeEFWnjKCI6/T3Oq50yQEOqmpmpfzG1hcZsAOaXFQPfzWprWrlHSdGPEF3WQTi8zu8ohC9Mh9J470nT5pUw==} engines: {node: '>= 20'} - '@tailwindcss/vite@4.2.0': - resolution: {integrity: sha512-da9mFCaHpoOgtQiWtDGIikTrSpUFBtIZCG3jy/u2BGV+l/X1/pbxzmIUxNt6JWm19N3WtGi4KlJdSH/Si83WOA==} + '@tailwindcss/vite@4.2.1': + resolution: {integrity: sha512-TBf2sJjYeb28jD2U/OhwdW0bbOsxkWPwQ7SrqGf9sVcoYwZj7rkXljroBO9wKBut9XnmQLXanuDUeqQK0lGg/w==} peerDependencies: vite: ^5.2.0 || ^6 || ^7 @@ -2421,8 +2421,8 @@ packages: '@types/lodash.isequal@4.5.8': resolution: {integrity: sha512-uput6pg4E/tj2LGxCZo9+y27JNyB2OZuuI/T5F+ylVDYuqICLG2/ktjxx0v6GvVntAf8TvEzeQLcV0ffRirXuA==} - '@types/lodash@4.17.23': - resolution: {integrity: sha512-RDvF6wTulMPjrNdCoYRC8gNR880JNGT8uB+REUpC2Ns4pRqQJhGz90wh7rgdXDPpCczF3VGktDuFGVnz8zP7HA==} + '@types/lodash@4.17.24': + resolution: {integrity: sha512-gIW7lQLZbue7lRSWEFql49QJJWThrTFFeIMJdp3eH4tKoxm1OvEPg02rm4wCCSHS0cL3/Fizimb35b7k8atwsQ==} '@types/node@12.20.55': resolution: {integrity: sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ==} @@ -2453,102 +2453,102 @@ packages: '@types/ws@8.18.1': resolution: {integrity: sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==} - '@typescript-eslint/eslint-plugin@8.56.0': - resolution: {integrity: sha512-lRyPDLzNCuae71A3t9NEINBiTn7swyOhvUj3MyUOxb8x6g6vPEFoOU+ZRmGMusNC3X3YMhqMIX7i8ShqhT74Pw==} + '@typescript-eslint/eslint-plugin@8.56.1': + resolution: {integrity: sha512-Jz9ZztpB37dNC+HU2HI28Bs9QXpzCz+y/twHOwhyrIRdbuVDxSytJNDl6z/aAKlaRIwC7y8wJdkBv7FxYGgi0A==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - '@typescript-eslint/parser': ^8.56.0 + '@typescript-eslint/parser': ^8.56.1 eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/parser@8.56.0': - resolution: {integrity: sha512-IgSWvLobTDOjnaxAfDTIHaECbkNlAlKv2j5SjpB2v7QHKv1FIfjwMy8FsDbVfDX/KjmCmYICcw7uGaXLhtsLNg==} + '@typescript-eslint/parser@8.56.1': + resolution: {integrity: sha512-klQbnPAAiGYFyI02+znpBRLyjL4/BrBd0nyWkdC0s/6xFLkXYQ8OoRrSkqacS1ddVxf/LDyODIKbQ5TgKAf/Fg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/project-service@8.56.0': - resolution: {integrity: sha512-M3rnyL1vIQOMeWxTWIW096/TtVP+8W3p/XnaFflhmcFp+U4zlxUxWj4XwNs6HbDeTtN4yun0GNTTDBw/SvufKg==} + '@typescript-eslint/project-service@8.56.1': + resolution: {integrity: sha512-TAdqQTzHNNvlVFfR+hu2PDJrURiwKsUvxFn1M0h95BB8ah5jejas08jUWG4dBA68jDMI988IvtfdAI53JzEHOQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/scope-manager@8.56.0': - resolution: {integrity: sha512-7UiO/XwMHquH+ZzfVCfUNkIXlp/yQjjnlYUyYz7pfvlK3/EyyN6BK+emDmGNyQLBtLGaYrTAI6KOw8tFucWL2w==} + '@typescript-eslint/scope-manager@8.56.1': + resolution: {integrity: sha512-YAi4VDKcIZp0O4tz/haYKhmIDZFEUPOreKbfdAN3SzUDMcPhJ8QI99xQXqX+HoUVq8cs85eRKnD+rne2UAnj2w==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/tsconfig-utils@8.56.0': - resolution: {integrity: sha512-bSJoIIt4o3lKXD3xmDh9chZcjCz5Lk8xS7Rxn+6l5/pKrDpkCwtQNQQwZ2qRPk7TkUYhrq3WPIHXOXlbXP0itg==} + '@typescript-eslint/tsconfig-utils@8.56.1': + resolution: {integrity: sha512-qOtCYzKEeyr3aR9f28mPJqBty7+DBqsdd63eO0yyDwc6vgThj2UjWfJIcsFeSucYydqcuudMOprZ+x1SpF3ZuQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/type-utils@8.56.0': - resolution: {integrity: sha512-qX2L3HWOU2nuDs6GzglBeuFXviDODreS58tLY/BALPC7iu3Fa+J7EOTwnX9PdNBxUI7Uh0ntP0YWGnxCkXzmfA==} + '@typescript-eslint/type-utils@8.56.1': + resolution: {integrity: sha512-yB/7dxi7MgTtGhZdaHCemf7PuwrHMenHjmzgUW1aJpO+bBU43OycnM3Wn+DdvDO/8zzA9HlhaJ0AUGuvri4oGg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/types@8.56.0': - resolution: {integrity: sha512-DBsLPs3GsWhX5HylbP9HNG15U0bnwut55Lx12bHB9MpXxQ+R5GC8MwQe+N1UFXxAeQDvEsEDY6ZYwX03K7Z6HQ==} + '@typescript-eslint/types@8.56.1': + resolution: {integrity: sha512-dbMkdIUkIkchgGDIv7KLUpa0Mda4IYjo4IAMJUZ+3xNoUXxMsk9YtKpTHSChRS85o+H9ftm51gsK1dZReY9CVw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript-eslint/typescript-estree@8.56.0': - resolution: {integrity: sha512-ex1nTUMWrseMltXUHmR2GAQ4d+WjkZCT4f+4bVsps8QEdh0vlBsaCokKTPlnqBFqqGaxilDNJG7b8dolW2m43Q==} + '@typescript-eslint/typescript-estree@8.56.1': + resolution: {integrity: sha512-qzUL1qgalIvKWAf9C1HpvBjif+Vm6rcT5wZd4VoMb9+Km3iS3Cv9DY6dMRMDtPnwRAFyAi7YXJpTIEXLvdfPxg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/utils@8.56.0': - resolution: {integrity: sha512-RZ3Qsmi2nFGsS+n+kjLAYDPVlrzf7UhTffrDIKr+h2yzAlYP/y5ZulU0yeDEPItos2Ph46JAL5P/On3pe7kDIQ==} + '@typescript-eslint/utils@8.56.1': + resolution: {integrity: sha512-HPAVNIME3tABJ61siYlHzSWCGtOoeP2RTIaHXFMPqjrQKCGB9OgUVdiNgH7TJS2JNIQ5qQ4RsAUDuGaGme/KOA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 typescript: '>=4.8.4 <6.0.0' - '@typescript-eslint/visitor-keys@8.56.0': - resolution: {integrity: sha512-q+SL+b+05Ud6LbEE35qe4A99P+htKTKVbyiNEe45eCbJFyh/HVK9QXwlrbz+Q4L8SOW4roxSVwXYj4DMBT7Ieg==} + '@typescript-eslint/visitor-keys@8.56.1': + resolution: {integrity: sha512-KiROIzYdEV85YygXw6BI/Dx4fnBlFQu6Mq4QE4MOH9fFnhohw6wX/OAvDY2/C+ut0I3RSPKenvZJIVYqJNkhEw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@typescript/native-preview-darwin-arm64@7.0.0-dev.20260222.1': - resolution: {integrity: sha512-aXfK/s3QlbzXvZoFQ07KJDNx86q61nCITSreqLytnqjhjsXUUuMACsxjy/YsReLG2bdii+mHTA2WB2IB0LKKGA==} + '@typescript/native-preview-darwin-arm64@7.0.0-dev.20260223.1': + resolution: {integrity: sha512-uDvCfIGr3PR8iKBA6OCNq6w0b2WMvmtkS8KUZVy04CH8ieFsxChYStLiyFTDX4GZs9BtWKeth/7qGDZewY20sQ==} cpu: [arm64] os: [darwin] - '@typescript/native-preview-darwin-x64@7.0.0-dev.20260222.1': - resolution: {integrity: sha512-+bHnCeONX47pmVXTt6kuwxiLayDVqkLtshjqpqthXMWFFGk+1K/5ASbFEb2FumSABgB9hQ/xqkjj5QHUgGmbPg==} + '@typescript/native-preview-darwin-x64@7.0.0-dev.20260223.1': + resolution: {integrity: sha512-hOKQicSgd1DhFbsqdpC5fMgg0R46sYbbtVjfXgYTAHg/WO6whfZ2SfPy9IIzsQ/CXYUZuwoJElCnc9DTcd66+w==} cpu: [x64] os: [darwin] - '@typescript/native-preview-linux-arm64@7.0.0-dev.20260222.1': - resolution: {integrity: sha512-Usm9oJzLPqK7Z7echSSaHnmTXhr3knLXycoyVZwRrmWC33aX2efZb+XrdaV/SMhdYjYHCZ6mE60qcK4nEaXdng==} + '@typescript/native-preview-linux-arm64@7.0.0-dev.20260223.1': + resolution: {integrity: sha512-oRt0l3O/itqBEwd5rhfDAyziEzbSgWar1NShduK4n2mHWTHCI1I7mFsbSPbox2pdrqOwOr0QW8xu7xEgDWWRXA==} cpu: [arm64] os: [linux] - '@typescript/native-preview-linux-arm@7.0.0-dev.20260222.1': - resolution: {integrity: sha512-bavfJlI3JNH2F/7BX0drZ4JCSjLsCc2Dy5e2s6pc2wuLIzJ6hIjFaXIeB9TDbVYJE+MlLf6rtQF9nP9iSsgk9g==} + '@typescript/native-preview-linux-arm@7.0.0-dev.20260223.1': + resolution: {integrity: sha512-FVq6XjzqtLC1MVgQiumwpuW7Ug+S+WVEbvCUJQhrs8Szbf6fIFU/6+D6fOGCKzzo9SAD6zq2RNHtejBw74JSFA==} cpu: [arm] os: [linux] - '@typescript/native-preview-linux-x64@7.0.0-dev.20260222.1': - resolution: {integrity: sha512-JaOwNBJ2nA0C/MBfMXilrVNv+hUpIzs7JtpSgpOsXa3Hq7BL2rnoO6WMuCo8IHz7v8+Lr+MPJufXVEHfrOtf5A==} + '@typescript/native-preview-linux-x64@7.0.0-dev.20260223.1': + resolution: {integrity: sha512-qpFTW7q8Vvq1v/0bzfT8+D0wLjqydIP0qKlomrEGLlMnCCAnPodo2oLc2JCtacc40TSMZZARvhctTszCn1gWBA==} cpu: [x64] os: [linux] - '@typescript/native-preview-win32-arm64@7.0.0-dev.20260222.1': - resolution: {integrity: sha512-Mngr3qdeO7Ey3DtsHe4oqIghXYcjOr9pVQtKXbijfT0slRtVPeF1TmEb/eH+Z+LsY1SOW8c/Cig1G4NDXZnghw==} + '@typescript/native-preview-win32-arm64@7.0.0-dev.20260223.1': + resolution: {integrity: sha512-HHu63F8cDhgIlqFGBnqBVQn7HSiORxyT0M6yPzG4tG4gdzx+aFUdogbYily0nzN5b6NolQTrFfh3Q85UfHCHqg==} cpu: [arm64] os: [win32] - '@typescript/native-preview-win32-x64@7.0.0-dev.20260222.1': - resolution: {integrity: sha512-8Gps/FPcQiyoHeDhRY3RXhJSJwQQuUIP5lepYO3+2xvCPPeeNBoOueiLoGKxno4CYbS4O2fPdVmymboX0ApjZA==} + '@typescript/native-preview-win32-x64@7.0.0-dev.20260223.1': + resolution: {integrity: sha512-vSis36O5qT+vOYfei7GtfWWzvIoaNdmxa1zDypBKkGGCCHt/c5vp0pXls85+8jBVS11Ep6p7ECcHlt+R5CBaug==} cpu: [x64] os: [win32] - '@typescript/native-preview@7.0.0-dev.20260222.1': - resolution: {integrity: sha512-Uxon0iNhNqH/HkWvKmTmr7d5TJp6yomoyFHNpLIEghy91/DNWEtKMuLjNDYPFcoNxWpuJW9vuWTWeu3mcqT94Q==} + '@typescript/native-preview@7.0.0-dev.20260223.1': + resolution: {integrity: sha512-NEifR9F/0khbTQRztM4Yuxcj9dFuK9ubWIXJwLSmKMlncSp4u1fzRnlfv1vlNKKrXB7BUXoANFHpsM5BEXJ06w==} hasBin: true '@vitejs/plugin-basic-ssl@1.2.0': @@ -2778,8 +2778,8 @@ packages: resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} engines: {node: '>=6'} - caniuse-lite@1.0.30001772: - resolution: {integrity: sha512-mIwLZICj+ntVTw4BT2zfp+yu/AqV6GMKfJVJMx3MwPxs+uk/uj2GLl2dH8LQbjiLDX66amCga5nKFyDgRR43kg==} + caniuse-lite@1.0.30001774: + resolution: {integrity: sha512-DDdwPGz99nmIEv216hKSgLD+D4ikHQHjBC/seF98N9CPqRX4M5mSxT9eTV6oyisnJcuzxtZy4n17yKKQYmYQOA==} cborg@4.5.8: resolution: {integrity: sha512-6/viltD51JklRhq4L7jC3zgy6gryuG5xfZ3kzpE+PravtyeQLeQmCYLREhQH7pWENg5pY4Yu/XCd6a7dKScVlw==} @@ -4029,8 +4029,8 @@ packages: resolution: {integrity: sha512-Bh7QjT8/SuKUIfObSXNHNSK6WHo6J1tHCqJsuaFDP7gP0fkzSfTxI8y85JrppZ0h8l0maIgc2tfuZQ6/t3GtnQ==} engines: {node: ^14.18.0 || >=16.0.0} - tailwindcss@4.2.0: - resolution: {integrity: sha512-yYzTZ4++b7fNYxFfpnberEEKu43w44aqDMNM9MHMmcKuCH7lL8jJ4yJ7LGHv7rSwiqM0nkiobF9I6cLlpS2P7Q==} + tailwindcss@4.2.1: + resolution: {integrity: sha512-/tBrSQ36vCleJkAOsy9kbNTgaxvGbyOamC30PRePTQe/o1MFwEKHQk4Cn7BNGaPtjp+PuUrByJehM1hgxfq4sw==} tapable@2.3.0: resolution: {integrity: sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==} @@ -4139,8 +4139,8 @@ packages: peerDependencies: typescript: 5.0.x || 5.1.x || 5.2.x || 5.3.x || 5.4.x || 5.5.x || 5.6.x || 5.7.x || 5.8.x - typescript-eslint@8.56.0: - resolution: {integrity: sha512-c7toRLrotJ9oixgdW7liukZpsnq5CZ7PuKztubGYlNppuTqhIoWfhgHo/7EU0v06gS2l/x0i2NEFK1qMIf0rIg==} + typescript-eslint@8.56.1: + resolution: {integrity: sha512-U4lM6pjmBX7J5wk4szltF7I1cGBHXZopnAXCMXb3+fZ3B/0Z3hq3wS/CCUB2NZBNAExK92mCU2tEohWuwVMsDQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 || ^10.0.0 @@ -4385,7 +4385,7 @@ snapshots: '@jridgewell/gen-mapping': 0.3.13 '@jridgewell/trace-mapping': 0.3.31 - '@anthropic-ai/claude-code@2.1.50': + '@anthropic-ai/claude-code@2.1.51': optionalDependencies: '@img/sharp-darwin-arm64': 0.34.5 '@img/sharp-darwin-x64': 0.34.5 @@ -5080,7 +5080,7 @@ snapshots: dependencies: '@types/json-schema': 7.0.15 - '@eslint/eslintrc@3.3.3': + '@eslint/eslintrc@3.3.4': dependencies: ajv: 6.14.0 debug: 4.4.3 @@ -5539,7 +5539,7 @@ snapshots: '@shikijs/vscode-textmate@10.0.2': {} - '@tailwindcss/node@4.2.0': + '@tailwindcss/node@4.2.1': dependencies: '@jridgewell/remapping': 2.3.5 enhanced-resolve: 5.19.0 @@ -5547,64 +5547,64 @@ snapshots: lightningcss: 1.31.1 magic-string: 0.30.21 source-map-js: 1.2.1 - tailwindcss: 4.2.0 + tailwindcss: 4.2.1 - '@tailwindcss/oxide-android-arm64@4.2.0': + '@tailwindcss/oxide-android-arm64@4.2.1': optional: true - '@tailwindcss/oxide-darwin-arm64@4.2.0': + '@tailwindcss/oxide-darwin-arm64@4.2.1': optional: true - '@tailwindcss/oxide-darwin-x64@4.2.0': + '@tailwindcss/oxide-darwin-x64@4.2.1': optional: true - '@tailwindcss/oxide-freebsd-x64@4.2.0': + '@tailwindcss/oxide-freebsd-x64@4.2.1': optional: true - '@tailwindcss/oxide-linux-arm-gnueabihf@4.2.0': + '@tailwindcss/oxide-linux-arm-gnueabihf@4.2.1': optional: true - '@tailwindcss/oxide-linux-arm64-gnu@4.2.0': + '@tailwindcss/oxide-linux-arm64-gnu@4.2.1': optional: true - '@tailwindcss/oxide-linux-arm64-musl@4.2.0': + '@tailwindcss/oxide-linux-arm64-musl@4.2.1': optional: true - '@tailwindcss/oxide-linux-x64-gnu@4.2.0': + '@tailwindcss/oxide-linux-x64-gnu@4.2.1': optional: true - '@tailwindcss/oxide-linux-x64-musl@4.2.0': + '@tailwindcss/oxide-linux-x64-musl@4.2.1': optional: true - '@tailwindcss/oxide-wasm32-wasi@4.2.0': + '@tailwindcss/oxide-wasm32-wasi@4.2.1': optional: true - '@tailwindcss/oxide-win32-arm64-msvc@4.2.0': + '@tailwindcss/oxide-win32-arm64-msvc@4.2.1': optional: true - '@tailwindcss/oxide-win32-x64-msvc@4.2.0': + '@tailwindcss/oxide-win32-x64-msvc@4.2.1': optional: true - '@tailwindcss/oxide@4.2.0': + '@tailwindcss/oxide@4.2.1': optionalDependencies: - '@tailwindcss/oxide-android-arm64': 4.2.0 - '@tailwindcss/oxide-darwin-arm64': 4.2.0 - '@tailwindcss/oxide-darwin-x64': 4.2.0 - '@tailwindcss/oxide-freebsd-x64': 4.2.0 - '@tailwindcss/oxide-linux-arm-gnueabihf': 4.2.0 - '@tailwindcss/oxide-linux-arm64-gnu': 4.2.0 - '@tailwindcss/oxide-linux-arm64-musl': 4.2.0 - '@tailwindcss/oxide-linux-x64-gnu': 4.2.0 - '@tailwindcss/oxide-linux-x64-musl': 4.2.0 - '@tailwindcss/oxide-wasm32-wasi': 4.2.0 - '@tailwindcss/oxide-win32-arm64-msvc': 4.2.0 - '@tailwindcss/oxide-win32-x64-msvc': 4.2.0 - - '@tailwindcss/vite@4.2.0(vite@6.4.1(@types/node@22.19.11)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2))': - dependencies: - '@tailwindcss/node': 4.2.0 - '@tailwindcss/oxide': 4.2.0 - tailwindcss: 4.2.0 + '@tailwindcss/oxide-android-arm64': 4.2.1 + '@tailwindcss/oxide-darwin-arm64': 4.2.1 + '@tailwindcss/oxide-darwin-x64': 4.2.1 + '@tailwindcss/oxide-freebsd-x64': 4.2.1 + '@tailwindcss/oxide-linux-arm-gnueabihf': 4.2.1 + '@tailwindcss/oxide-linux-arm64-gnu': 4.2.1 + '@tailwindcss/oxide-linux-arm64-musl': 4.2.1 + '@tailwindcss/oxide-linux-x64-gnu': 4.2.1 + '@tailwindcss/oxide-linux-x64-musl': 4.2.1 + '@tailwindcss/oxide-wasm32-wasi': 4.2.1 + '@tailwindcss/oxide-win32-arm64-msvc': 4.2.1 + '@tailwindcss/oxide-win32-x64-msvc': 4.2.1 + + '@tailwindcss/vite@4.2.1(vite@6.4.1(@types/node@22.19.11)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2))': + dependencies: + '@tailwindcss/node': 4.2.1 + '@tailwindcss/oxide': 4.2.1 + tailwindcss: 4.2.1 vite: 6.4.1(@types/node@22.19.11)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2) '@tanstack/query-core@5.90.20': {} @@ -5661,9 +5661,9 @@ snapshots: '@types/lodash.isequal@4.5.8': dependencies: - '@types/lodash': 4.17.23 + '@types/lodash': 4.17.24 - '@types/lodash@4.17.23': {} + '@types/lodash@4.17.24': {} '@types/node@12.20.55': {} @@ -5695,14 +5695,14 @@ snapshots: dependencies: '@types/node': 24.10.13 - '@typescript-eslint/eslint-plugin@8.56.0(@typescript-eslint/parser@8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3)': + '@typescript-eslint/eslint-plugin@8.56.1(@typescript-eslint/parser@8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3)': dependencies: '@eslint-community/regexpp': 4.12.2 - '@typescript-eslint/parser': 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - '@typescript-eslint/scope-manager': 8.56.0 - '@typescript-eslint/type-utils': 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - '@typescript-eslint/utils': 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - '@typescript-eslint/visitor-keys': 8.56.0 + '@typescript-eslint/parser': 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.56.1 + '@typescript-eslint/type-utils': 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/utils': 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.56.1 eslint: 9.39.3(jiti@2.6.1) ignore: 7.0.5 natural-compare: 1.4.0 @@ -5711,41 +5711,41 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3)': + '@typescript-eslint/parser@8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3)': dependencies: - '@typescript-eslint/scope-manager': 8.56.0 - '@typescript-eslint/types': 8.56.0 - '@typescript-eslint/typescript-estree': 8.56.0(typescript@5.9.3) - '@typescript-eslint/visitor-keys': 8.56.0 + '@typescript-eslint/scope-manager': 8.56.1 + '@typescript-eslint/types': 8.56.1 + '@typescript-eslint/typescript-estree': 8.56.1(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.56.1 debug: 4.4.3 eslint: 9.39.3(jiti@2.6.1) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/project-service@8.56.0(typescript@5.9.3)': + '@typescript-eslint/project-service@8.56.1(typescript@5.9.3)': dependencies: - '@typescript-eslint/tsconfig-utils': 8.56.0(typescript@5.9.3) - '@typescript-eslint/types': 8.56.0 + '@typescript-eslint/tsconfig-utils': 8.56.1(typescript@5.9.3) + '@typescript-eslint/types': 8.56.1 debug: 4.4.3 typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/scope-manager@8.56.0': + '@typescript-eslint/scope-manager@8.56.1': dependencies: - '@typescript-eslint/types': 8.56.0 - '@typescript-eslint/visitor-keys': 8.56.0 + '@typescript-eslint/types': 8.56.1 + '@typescript-eslint/visitor-keys': 8.56.1 - '@typescript-eslint/tsconfig-utils@8.56.0(typescript@5.9.3)': + '@typescript-eslint/tsconfig-utils@8.56.1(typescript@5.9.3)': dependencies: typescript: 5.9.3 - '@typescript-eslint/type-utils@8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3)': + '@typescript-eslint/type-utils@8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3)': dependencies: - '@typescript-eslint/types': 8.56.0 - '@typescript-eslint/typescript-estree': 8.56.0(typescript@5.9.3) - '@typescript-eslint/utils': 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/types': 8.56.1 + '@typescript-eslint/typescript-estree': 8.56.1(typescript@5.9.3) + '@typescript-eslint/utils': 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) debug: 4.4.3 eslint: 9.39.3(jiti@2.6.1) ts-api-utils: 2.4.0(typescript@5.9.3) @@ -5753,16 +5753,16 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/types@8.56.0': {} + '@typescript-eslint/types@8.56.1': {} - '@typescript-eslint/typescript-estree@8.56.0(typescript@5.9.3)': + '@typescript-eslint/typescript-estree@8.56.1(typescript@5.9.3)': dependencies: - '@typescript-eslint/project-service': 8.56.0(typescript@5.9.3) - '@typescript-eslint/tsconfig-utils': 8.56.0(typescript@5.9.3) - '@typescript-eslint/types': 8.56.0 - '@typescript-eslint/visitor-keys': 8.56.0 + '@typescript-eslint/project-service': 8.56.1(typescript@5.9.3) + '@typescript-eslint/tsconfig-utils': 8.56.1(typescript@5.9.3) + '@typescript-eslint/types': 8.56.1 + '@typescript-eslint/visitor-keys': 8.56.1 debug: 4.4.3 - minimatch: 9.0.6 + minimatch: 10.2.2 semver: 7.7.4 tinyglobby: 0.2.15 ts-api-utils: 2.4.0(typescript@5.9.3) @@ -5770,52 +5770,52 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3)': + '@typescript-eslint/utils@8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3)': dependencies: '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.3(jiti@2.6.1)) - '@typescript-eslint/scope-manager': 8.56.0 - '@typescript-eslint/types': 8.56.0 - '@typescript-eslint/typescript-estree': 8.56.0(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.56.1 + '@typescript-eslint/types': 8.56.1 + '@typescript-eslint/typescript-estree': 8.56.1(typescript@5.9.3) eslint: 9.39.3(jiti@2.6.1) typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/visitor-keys@8.56.0': + '@typescript-eslint/visitor-keys@8.56.1': dependencies: - '@typescript-eslint/types': 8.56.0 + '@typescript-eslint/types': 8.56.1 eslint-visitor-keys: 5.0.1 - '@typescript/native-preview-darwin-arm64@7.0.0-dev.20260222.1': + '@typescript/native-preview-darwin-arm64@7.0.0-dev.20260223.1': optional: true - '@typescript/native-preview-darwin-x64@7.0.0-dev.20260222.1': + '@typescript/native-preview-darwin-x64@7.0.0-dev.20260223.1': optional: true - '@typescript/native-preview-linux-arm64@7.0.0-dev.20260222.1': + '@typescript/native-preview-linux-arm64@7.0.0-dev.20260223.1': optional: true - '@typescript/native-preview-linux-arm@7.0.0-dev.20260222.1': + '@typescript/native-preview-linux-arm@7.0.0-dev.20260223.1': optional: true - '@typescript/native-preview-linux-x64@7.0.0-dev.20260222.1': + '@typescript/native-preview-linux-x64@7.0.0-dev.20260223.1': optional: true - '@typescript/native-preview-win32-arm64@7.0.0-dev.20260222.1': + '@typescript/native-preview-win32-arm64@7.0.0-dev.20260223.1': optional: true - '@typescript/native-preview-win32-x64@7.0.0-dev.20260222.1': + '@typescript/native-preview-win32-x64@7.0.0-dev.20260223.1': optional: true - '@typescript/native-preview@7.0.0-dev.20260222.1': + '@typescript/native-preview@7.0.0-dev.20260223.1': optionalDependencies: - '@typescript/native-preview-darwin-arm64': 7.0.0-dev.20260222.1 - '@typescript/native-preview-darwin-x64': 7.0.0-dev.20260222.1 - '@typescript/native-preview-linux-arm': 7.0.0-dev.20260222.1 - '@typescript/native-preview-linux-arm64': 7.0.0-dev.20260222.1 - '@typescript/native-preview-linux-x64': 7.0.0-dev.20260222.1 - '@typescript/native-preview-win32-arm64': 7.0.0-dev.20260222.1 - '@typescript/native-preview-win32-x64': 7.0.0-dev.20260222.1 + '@typescript/native-preview-darwin-arm64': 7.0.0-dev.20260223.1 + '@typescript/native-preview-darwin-x64': 7.0.0-dev.20260223.1 + '@typescript/native-preview-linux-arm': 7.0.0-dev.20260223.1 + '@typescript/native-preview-linux-arm64': 7.0.0-dev.20260223.1 + '@typescript/native-preview-linux-x64': 7.0.0-dev.20260223.1 + '@typescript/native-preview-win32-arm64': 7.0.0-dev.20260223.1 + '@typescript/native-preview-win32-x64': 7.0.0-dev.20260223.1 '@vitejs/plugin-basic-ssl@1.2.0(vite@6.4.1(@types/node@22.19.11)(jiti@2.6.1)(lightningcss@1.31.1)(yaml@2.8.2))': dependencies: @@ -6026,7 +6026,7 @@ snapshots: browserslist@4.28.1: dependencies: baseline-browser-mapping: 2.10.0 - caniuse-lite: 1.0.30001772 + caniuse-lite: 1.0.30001774 electron-to-chromium: 1.5.302 node-releases: 2.0.27 update-browserslist-db: 1.2.3(browserslist@4.28.1) @@ -6058,7 +6058,7 @@ snapshots: callsites@3.1.0: {} - caniuse-lite@1.0.30001772: {} + caniuse-lite@1.0.30001774: {} cborg@4.5.8: {} @@ -6302,7 +6302,7 @@ snapshots: '@eslint/config-array': 0.21.1 '@eslint/config-helpers': 0.4.2 '@eslint/core': 0.17.0 - '@eslint/eslintrc': 3.3.3 + '@eslint/eslintrc': 3.3.4 '@eslint/js': 9.39.3 '@eslint/plugin-kit': 0.4.1 '@humanfs/node': 0.16.7 @@ -7047,7 +7047,7 @@ snapshots: glob: 13.0.6 package-json-from-dist: 1.0.1 - rolldown-plugin-dts@0.20.0(@typescript/native-preview@7.0.0-dev.20260222.1)(rolldown@1.0.0-beta.58)(typescript@5.9.3): + rolldown-plugin-dts@0.20.0(@typescript/native-preview@7.0.0-dev.20260223.1)(rolldown@1.0.0-beta.58)(typescript@5.9.3): dependencies: '@babel/generator': 7.29.1 '@babel/parser': 7.29.0 @@ -7059,7 +7059,7 @@ snapshots: obug: 2.1.1 rolldown: 1.0.0-beta.58 optionalDependencies: - '@typescript/native-preview': 7.0.0-dev.20260222.1 + '@typescript/native-preview': 7.0.0-dev.20260223.1 typescript: 5.9.3 transitivePeerDependencies: - oxc-resolver @@ -7220,7 +7220,7 @@ snapshots: dependencies: '@pkgr/core': 0.2.9 - tailwindcss@4.2.0: {} + tailwindcss@4.2.1: {} tapable@2.3.0: {} @@ -7266,7 +7266,7 @@ snapshots: dependencies: typescript: 5.9.3 - tsdown@0.19.0-beta.3(@typescript/native-preview@7.0.0-dev.20260222.1)(synckit@0.11.12)(typescript@5.9.3): + tsdown@0.19.0-beta.3(@typescript/native-preview@7.0.0-dev.20260223.1)(synckit@0.11.12)(typescript@5.9.3): dependencies: ansis: 4.2.0 cac: 6.7.14 @@ -7277,7 +7277,7 @@ snapshots: obug: 2.1.1 picomatch: 4.0.3 rolldown: 1.0.0-beta.58 - rolldown-plugin-dts: 0.20.0(@typescript/native-preview@7.0.0-dev.20260222.1)(rolldown@1.0.0-beta.58)(typescript@5.9.3) + rolldown-plugin-dts: 0.20.0(@typescript/native-preview@7.0.0-dev.20260223.1)(rolldown@1.0.0-beta.58)(typescript@5.9.3) semver: 7.7.4 tinyexec: 1.0.2 tinyglobby: 0.2.15 @@ -7315,12 +7315,12 @@ snapshots: typescript: 5.9.3 yaml: 2.8.2 - typescript-eslint@8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3): + typescript-eslint@8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3): dependencies: - '@typescript-eslint/eslint-plugin': 8.56.0(@typescript-eslint/parser@8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - '@typescript-eslint/parser': 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) - '@typescript-eslint/typescript-estree': 8.56.0(typescript@5.9.3) - '@typescript-eslint/utils': 8.56.0(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/eslint-plugin': 8.56.1(@typescript-eslint/parser@8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/parser': 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) + '@typescript-eslint/typescript-estree': 8.56.1(typescript@5.9.3) + '@typescript-eslint/utils': 8.56.1(eslint@9.39.3(jiti@2.6.1))(typescript@5.9.3) eslint: 9.39.3(jiti@2.6.1) typescript: 5.9.3 transitivePeerDependencies: diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index d2de38a..77c1b54 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -1,13 +1,15 @@ packages: - packages/* - apps/* - - ccc-dev/ccc/packages/* - - "!ccc-dev/ccc/packages/demo" - - "!ccc-dev/ccc/packages/docs" - - "!ccc-dev/ccc/packages/examples" - - "!ccc-dev/ccc/packages/faucet" - - "!ccc-dev/ccc/packages/playground" - - "!ccc-dev/ccc/packages/tests" + # @generated begin fork-workspaces + - ccc-fork/ccc/packages/* + - "!ccc-fork/ccc/packages/demo" + - "!ccc-fork/ccc/packages/docs" + - "!ccc-fork/ccc/packages/examples" + - "!ccc-fork/ccc/packages/faucet" + - "!ccc-fork/ccc/packages/playground" + - "!ccc-fork/ccc/packages/tests" + # @generated end fork-workspaces catalog: "@ckb-ccc/core": ^1.12.2