diff --git a/.changeset/add-asset-system.md b/.changeset/add-asset-system.md new file mode 100644 index 0000000..2756309 --- /dev/null +++ b/.changeset/add-asset-system.md @@ -0,0 +1,13 @@ +--- +"dev-workflows": minor +--- + +Add asset system for commands, templates, hooks, and presets + +- **Commands**: `devw add command/spec` downloads slash commands that deploy to `.claude/commands/` +- **Templates**: `devw add template/feature-spec` installs spec templates to `docs/specs/` +- **Hooks**: `devw add hook/auto-format` merges editor hooks into `.claude/settings.local.json` +- **Presets**: `devw add preset/spec-driven` installs a full workflow bundle (rules + commands + templates + hooks) +- **Init with preset**: `devw init --preset spec-driven` scaffolds a project with a complete workflow +- **Asset-aware commands**: `list`, `remove`, `doctor`, `watch`, and `compile` all support the new asset types +- **Content**: 4 slash commands (spec, plan, build, learn), 1 template (feature-spec), 1 hook (auto-format), 1 preset (spec-driven), 1 rule (workflow/spec-driven) diff --git a/README.md b/README.md index ffbcd74..0e4a833 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ Define rules once. Compile them everywhere. -A local-first CLI to define AI coding rules and compile them for every editor and agent. +A local-first CLI to define AI coding rules, slash commands, templates, and hooks — compiled for every editor and agent. --- @@ -14,21 +14,33 @@ A local-first CLI to define AI coding rules and compile them for every editor an ```bash npx dev-workflows init -devw add react-conventions +devw add typescript/strict devw compile ``` +Or bootstrap a full spec-driven workflow: + +```bash +devw init --preset spec-driven -y +``` + --- ## What it does ``` .dwf/rules/*.yml → devw compile → CLAUDE.md - .cursor/rules + .cursor/rules/devworkflows.mdc GEMINI.md + .windsurf/rules/devworkflows.md + .github/copilot-instructions.md + +.dwf/assets/commands/* → devw compile → .claude/commands/*.md +.dwf/assets/templates/* → devw compile → docs/specs/*.md +.dwf/assets/hooks/* → devw compile → .claude/settings.local.json ``` -You define rules in YAML. The compiler generates each editor's native format. Change a rule, recompile — every editor stays in sync. +You define rules in YAML and assets in Markdown/JSON. The compiler generates each editor's native format. Change a rule, recompile — every editor stays in sync. --- @@ -37,29 +49,64 @@ You define rules in YAML. The compiler generates each editor's native format. Ch | Command | Description | |---------|-------------| | `devw init` | Initialize a `.dwf/` ruleset in your project | -| `devw add ` | Install a prebuilt rule block | -| `devw remove ` | Remove a rule block | -| `devw compile` | Generate editor-specific rule files | +| `devw init --preset ` | Initialize with a preset (e.g., `spec-driven`) | +| `devw add /` | Install a rule from the registry | +| `devw add command/` | Install a slash command | +| `devw add template/` | Install a spec template | +| `devw add hook/` | Install an editor hook | +| `devw add preset/` | Install a preset (bundle of rules + assets) | +| `devw remove /` | Remove a rule or asset | +| `devw compile` | Generate editor configs and deploy assets | | `devw watch` | Watch `.dwf/` and recompile on changes | -| `devw doctor` | Validate config and detect rule drift | +| `devw doctor` | Validate config and detect drift | | `devw list rules` | List all active rules | -| `devw list blocks` | List installed blocks | +| `devw list assets` | List installed assets | | `devw list tools` | List configured editors | --- -## Rule Blocks +## Rules Registry + +Install rules from the registry with `devw add /`. + +| Rule | Description | +|------|-------------| +| `typescript/strict` | Strict TypeScript patterns | +| `javascript/react` | React component and hook conventions | +| `javascript/nextjs` | Next.js App Router best practices | +| `css/tailwind` | Utility-first styling conventions | +| `testing/vitest` | Test naming and structure rules | +| `security/supabase-rls` | Database security and RLS rules | +| `security/auth-patterns` | Authentication best practices | +| `frontend/design-guidelines` | UI/UX design conventions | +| `frontend/accessibility` | Accessibility (a11y) rules | +| `frontend/performance` | Frontend performance patterns | +| `workflow/git-conventions` | Git commit and branch conventions | +| `workflow/debugging` | Systematic debugging methodology | +| `workflow/spec-driven` | Spec-driven development workflow | + +--- + +## Assets + +Beyond rules, dev-workflows manages **assets** — slash commands, templates, and hooks that power AI workflows. + +| Asset | Type | Description | +|-------|------|-------------| +| `command/spec` | Command | Generate a feature spec through guided questions | +| `command/plan` | Command | Create an implementation plan from a spec | +| `command/build` | Command | Execute a plan step by step with verification | +| `command/learn` | Command | Capture a lesson learned as a project rule | +| `template/feature-spec` | Template | Markdown template for feature specifications | +| `hook/auto-format` | Hook | Auto-format files after AI edits | + +### Presets -Prebuilt rule blocks you can stack. Install with `devw add `. +Presets bundle rules and assets into a single install. -| Block | Purpose | -|-------|---------| -| `typescript-strict` | Enforces strict TypeScript patterns | -| `react-conventions` | Common React component and hook rules | -| `nextjs-approuter` | Next.js App Router best practices | -| `tailwind` | Utility-first styling conventions | -| `supabase-rls` | Database security and RLS rules | -| `testing-basics` | Test naming and structure rules | +| Preset | Includes | +|--------|----------| +| `preset/spec-driven` | `workflow/spec-driven` rule + all 4 commands + feature-spec template + auto-format hook | --- @@ -72,7 +119,7 @@ Bridges generate editor-compatible rule files. One adapter per tool. | Claude Code | `CLAUDE.md` | | Cursor | `.cursor/rules/devworkflows.mdc` | | Gemini CLI | `GEMINI.md` | -| Windsurf | `.windsurfrules` | +| Windsurf | `.windsurf/rules/devworkflows.md` | | VS Code Copilot | `.github/copilot-instructions.md` | --- @@ -99,4 +146,4 @@ Issues and PRs welcome. If you find a bug or have an idea, [open an issue](https ## License -MIT \ No newline at end of file +MIT diff --git a/content/commands/build.md b/content/commands/build.md new file mode 100644 index 0000000..4c8181e --- /dev/null +++ b/content/commands/build.md @@ -0,0 +1,31 @@ +--- +name: build +description: "Execute a plan step by step with verification" +version: "0.1.0" +tool: claude +--- +You are a senior software engineer. Your job is to execute an implementation plan step by step. + +Follow this process: + +1. **Read the plan** provided by the user (a path to a plan file). + +2. **For each step in the plan:** + a. Announce which step you are starting + b. Make the code changes described + c. Run verification: tests, lint, and type checks + d. If verification passes, create a commit with the message from the plan + e. If verification fails, fix the issue before moving on + +3. **Rules:** + - Never skip a step + - Never combine steps into one commit + - Always run tests after each step + - If a step is unclear, ask for clarification before proceeding + - If a step fails after 3 attempts, stop and report the issue + +4. **After completing all steps:** + - Run the full test suite + - Report a summary: steps completed, tests passing, any issues + +Be methodical. Quality matters more than speed. diff --git a/content/commands/learn.md b/content/commands/learn.md new file mode 100644 index 0000000..79733ae --- /dev/null +++ b/content/commands/learn.md @@ -0,0 +1,32 @@ +--- +name: learn +description: "Capture a lesson learned and add it as a project rule" +version: "0.1.0" +tool: claude +--- +You are a knowledge capture assistant. Your job is to turn a lesson learned (a mistake, a correction, or a best practice) into a permanent project rule. + +Follow this process: + +1. **Ask the user** what they learned. This could be: + - A bug they found and how to avoid it + - A pattern that works well in this codebase + - A convention the AI should always follow + - A mistake the AI made that should not be repeated + +2. **Formulate a clear, actionable rule** from the lesson. The rule should: + - Be specific to this project + - Be written as an imperative instruction + - Include context about why it matters + - Be concise (1-3 sentences) + +3. **Append the rule** to the project's `CLAUDE.md` file (or equivalent) under a `## Lessons Learned` section. If the section does not exist, create it. + +Format: +``` +## Lessons Learned + +- . Context: . +``` + +Confirm with the user before writing. diff --git a/content/commands/plan.md b/content/commands/plan.md new file mode 100644 index 0000000..7a91232 --- /dev/null +++ b/content/commands/plan.md @@ -0,0 +1,45 @@ +--- +name: plan +description: "Generate an implementation plan from a spec" +version: "0.1.0" +tool: claude +--- +You are a senior software architect. Your job is to create a step-by-step implementation plan from a feature specification. + +Follow this process: + +1. **Read the spec** provided by the user (a path to a spec file in `docs/specs/`). + +2. **Analyze the codebase** to understand: + - Which files need to be created or modified + - Existing patterns to follow + - Dependencies and potential conflicts + - Test infrastructure + +3. **Generate the plan** with incremental, commit-ready chunks. Each step should: + - Be independently testable + - Build on the previous step + - Include specific file paths and function names + - Note what tests to write or update + +Use this format: + +```markdown +# Plan: + +**Spec:** +**Estimated steps:** N + +## Step 1: +**Files:** `path/to/file.ts` +**Changes:** +- Description of change 1 +- Description of change 2 +**Tests:** Description of tests to add +**Commit message:** `type: description` + +## Step 2: <title> +... +``` + +Each step should be small enough to review in one sitting. Prefer many small steps over few large ones. diff --git a/content/commands/spec.md b/content/commands/spec.md new file mode 100644 index 0000000..20bd783 --- /dev/null +++ b/content/commands/spec.md @@ -0,0 +1,47 @@ +--- +name: spec +description: "Generate a feature specification through guided questions" +version: "0.1.0" +tool: claude +--- +You are a senior software architect. Your job is to create a clear, complete specification for a new feature. + +Follow this process: + +1. **Ask 3-5 clarifying questions** about the feature. Focus on: + - What problem does this solve? + - Who is the target user? + - What are the key constraints (performance, compatibility, etc.)? + - What does success look like? + +2. **Research the codebase** before writing. Look at: + - Existing patterns and conventions + - Related features that already exist + - Technology stack and dependencies + +3. **Generate the spec** in `docs/specs/<feature-name>.md` using this structure: + +```markdown +# Feature: <name> + +## Summary +One paragraph describing the feature and its purpose. + +## Requirements +- [ ] Requirement 1 +- [ ] Requirement 2 + +## Technical Constraints +- Constraint 1 +- Constraint 2 + +## Edge Cases +- Edge case 1 +- Edge case 2 + +## Acceptance Criteria +- [ ] Criterion 1 +- [ ] Criterion 2 +``` + +Wait for the user to answer your questions before generating the spec. Do not make assumptions. diff --git a/content/hooks/auto-format.json b/content/hooks/auto-format.json new file mode 100644 index 0000000..4977dfd --- /dev/null +++ b/content/hooks/auto-format.json @@ -0,0 +1,16 @@ +{ + "name": "auto-format", + "description": "Auto-format files after AI edits", + "version": "0.1.0", + "tool": "claude", + "settings": { + "hooks": { + "PostToolUse": [ + { + "matcher": "Write|Edit", + "command": "pnpm format || true" + } + ] + } + } +} diff --git a/content/presets/spec-driven.yml b/content/presets/spec-driven.yml new file mode 100644 index 0000000..ceac6e4 --- /dev/null +++ b/content/presets/spec-driven.yml @@ -0,0 +1,15 @@ +name: spec-driven +description: "Complete spec-driven AI development workflow" +version: "0.1.0" +includes: + rules: + - workflow/spec-driven + commands: + - spec + - plan + - build + - learn + templates: + - feature-spec + hooks: + - auto-format diff --git a/content/rules/README.md b/content/rules/README.md index aeba131..bd0e46b 100644 --- a/content/rules/README.md +++ b/content/rules/README.md @@ -18,6 +18,7 @@ Rules for AI coding agents, distributed via `devw add`. | `workflow/git-conventions` | Workflow | Git workflow and commit conventions | `devw add workflow/git-conventions` | | `workflow/debugging` | Workflow | Systematic debugging methodology | `devw add workflow/debugging` | | `security/auth-patterns` | Security | Authentication and authorization best practices | `devw add security/auth-patterns` | +| `workflow/spec-driven` | Workflow | Spec-driven development workflow | `devw add workflow/spec-driven` | ## Usage diff --git a/content/rules/workflow/spec-driven.md b/content/rules/workflow/spec-driven.md new file mode 100644 index 0000000..7f01209 --- /dev/null +++ b/content/rules/workflow/spec-driven.md @@ -0,0 +1,26 @@ +--- +name: spec-driven +description: "Spec-driven development workflow: spec, plan, build, ship" +version: "0.1.0" +scope: workflow +tags: [workflow, spec-driven, methodology] +--- + +## Feature Development + +- Every new feature MUST start with a spec. Use `/spec <feature-name>` to generate one through guided questions. Never jump straight to code. +- After the spec is approved, generate an implementation plan with `/plan docs/specs/<feature-name>.md`. The plan breaks work into commit-ready steps. +- Execute the plan step by step with `/build <plan-path>`. Each step is verified with tests and committed independently. +- When you learn something new (a mistake, a pattern, a convention), capture it with `/learn` so the knowledge persists. + +## Quality Gates + +- Never skip the spec phase for features that touch more than 3 files. Small fixes and typos can go directly to implementation. +- Every plan step must include what tests to write or update. No step is complete without passing verification. +- If a step fails verification after 3 attempts, stop and ask for human guidance instead of forcing a solution. + +## Specs + +- Specs live in `docs/specs/` and follow the feature-spec template. Keep them concise but complete. +- A good spec answers: what problem, for whom, what constraints, what success looks like, and what is explicitly out of scope. +- Update the spec if requirements change during implementation. The spec is the source of truth. diff --git a/content/templates/feature-spec.md b/content/templates/feature-spec.md new file mode 100644 index 0000000..3eb693b --- /dev/null +++ b/content/templates/feature-spec.md @@ -0,0 +1,41 @@ +--- +name: feature-spec +description: "Template for writing feature specifications" +version: "0.1.0" +output_path: docs/specs +--- +# Feature: [Feature Name] + +## Summary + +[One paragraph describing the feature, its purpose, and the problem it solves.] + +## Requirements + +- [ ] [Requirement 1] +- [ ] [Requirement 2] +- [ ] [Requirement 3] + +## Technical Constraints + +- [Constraint 1: e.g., must work with existing auth system] +- [Constraint 2: e.g., response time under 200ms] + +## Edge Cases + +- [Edge case 1: e.g., what happens when input is empty?] +- [Edge case 2: e.g., what happens with concurrent requests?] + +## Acceptance Criteria + +- [ ] [Criterion 1: measurable, testable outcome] +- [ ] [Criterion 2: measurable, testable outcome] +- [ ] [Criterion 3: measurable, testable outcome] + +## Out of Scope + +- [Thing explicitly not included in this feature] + +## Open Questions + +- [Question that needs to be resolved before implementation] diff --git a/docs/commands/add.mdx b/docs/commands/add.mdx index eb87814..ba86b7e 100644 --- a/docs/commands/add.mdx +++ b/docs/commands/add.mdx @@ -1,13 +1,13 @@ --- title: "devw add" -description: "Add rules from the dev-workflows registry" +description: "Add rules, commands, templates, hooks, and presets from the registry" --- ```bash -devw add [category/rule] +devw add [category/name] ``` -Adds rules from the official dev-workflows registry. Supports both interactive and direct modes. +Adds rules and assets from the official dev-workflows registry. Supports both interactive and direct modes. ## Interactive Mode @@ -24,10 +24,26 @@ Rules already installed are shown with an `(already installed)` indicator. ## Direct Mode ```bash -devw add typescript/strict +devw add typescript/strict # add a rule +devw add command/spec # add a slash command +devw add template/feature-spec # add a spec template +devw add hook/auto-format # add an editor hook +devw add preset/spec-driven # add a preset (bundle) ``` -Downloads and installs a specific rule by path. If the rule already exists at the same version, reports "Already up to date". +Downloads and installs a specific rule or asset by path. If the rule already exists at the same version, reports "Already up to date". + +## Asset Types + +The `category` prefix determines the type: + +| Prefix | Type | Destination | +|--------|------|-------------| +| `command/` | Slash command | `.dwf/assets/commands/` → `.claude/commands/` | +| `template/` | Spec template | `.dwf/assets/templates/` → `docs/specs/` | +| `hook/` | Editor hook | `.dwf/assets/hooks/` → `.claude/settings.local.json` | +| `preset/` | Bundle | Installs multiple rules + assets at once | +| _anything else_ | Rule | `.dwf/rules/` → editor configs | ## Flags @@ -47,6 +63,12 @@ devw add # Install a specific rule devw add typescript/strict +# Install a slash command +devw add command/spec + +# Install a full workflow preset +devw add preset/spec-driven + # List all available rules devw add --list @@ -69,3 +91,22 @@ Rules are organized by category. Run `devw add --list` for the latest list. | `css/tailwind` | CSS | Utility-first Tailwind conventions | | `testing/vitest` | Testing | Vitest testing patterns | | `security/supabase-rls` | Security | Supabase RLS enforcement | +| `security/auth-patterns` | Security | Authentication best practices | +| `frontend/design-guidelines` | Frontend | UI/UX design conventions | +| `frontend/accessibility` | Frontend | Accessibility (a11y) rules | +| `frontend/performance` | Frontend | Frontend performance patterns | +| `workflow/git-conventions` | Workflow | Git commit and branch conventions | +| `workflow/debugging` | Workflow | Systematic debugging methodology | +| `workflow/spec-driven` | Workflow | Spec-driven development workflow | + +## Available Assets + +| Asset | Type | Description | +|-------|------|-------------| +| `command/spec` | Command | Generate a feature spec through guided questions | +| `command/plan` | Command | Create an implementation plan from a spec | +| `command/build` | Command | Execute a plan step by step with verification | +| `command/learn` | Command | Capture a lesson learned as a project rule | +| `template/feature-spec` | Template | Markdown template for feature specifications | +| `hook/auto-format` | Hook | Auto-format files after AI edits | +| `preset/spec-driven` | Preset | Complete spec-driven development workflow | diff --git a/docs/commands/compile.mdx b/docs/commands/compile.mdx index 48f5d79..6ca4673 100644 --- a/docs/commands/compile.mdx +++ b/docs/commands/compile.mdx @@ -7,7 +7,7 @@ description: "Compile .dwf/ rules into editor-specific config files" devw compile ``` -Reads your `.dwf/` rules and generates output for each configured tool using the appropriate bridge. +Reads your `.dwf/` rules and generates output for each configured tool using the appropriate bridge. Also deploys installed assets (commands, templates, hooks). ## Output by Tool @@ -16,6 +16,18 @@ Reads your `.dwf/` rules and generates output for each configured tool using the | Claude Code | `CLAUDE.md` | | Cursor | `.cursor/rules/devworkflows.mdc` | | Gemini CLI | `GEMINI.md` | +| Windsurf | `.windsurf/rules/devworkflows.md` | +| VS Code Copilot | `.github/copilot-instructions.md` | + +## Asset Deployment + +During compilation, installed assets are also deployed: + +| Asset Type | Source | Output | +|------------|--------|--------| +| Commands | `.dwf/assets/commands/*.md` | `.claude/commands/*.md` (frontmatter stripped) | +| Templates | `.dwf/assets/templates/*.md` | `docs/specs/*.md` (frontmatter stripped) | +| Hooks | `.dwf/assets/hooks/*.json` | `.claude/settings.local.json` (deep-merged) | ## Flags @@ -27,7 +39,7 @@ Reads your `.dwf/` rules and generates output for each configured tool using the ## Markers -When compiling to markdown-based outputs (CLAUDE.md, GEMINI.md), the generated content is wrapped in `<!-- BEGIN dev-workflows -->` and `<!-- END dev-workflows -->` markers. +When compiling to markdown-based outputs (CLAUDE.md, GEMINI.md, .github/copilot-instructions.md), the generated content is wrapped in `<!-- BEGIN dev-workflows -->` and `<!-- END dev-workflows -->` markers. This means you can have **manual content** in your CLAUDE.md and dev-workflows will only replace the section between markers. Your hand-written rules are preserved. diff --git a/docs/commands/doctor.mdx b/docs/commands/doctor.mdx index 6f8433e..e007341 100644 --- a/docs/commands/doctor.mdx +++ b/docs/commands/doctor.mdx @@ -15,9 +15,12 @@ Runs a series of checks to validate your `.dwf/` setup. 2. `config.yml` is valid YAML with correct schema 3. Rule files are valid YAML 4. No duplicate rule IDs across files -5. All configured tools have available bridges -6. Symlinks are valid (if mode is `link`) -7. Compiled files are in sync with rules (hash comparison) +5. All scopes have valid format +6. All configured tools have available bridges +7. Symlinks are valid (if mode is `link`) +8. Pulled rule files exist for each config entry +9. Asset files exist for each config entry +10. Compiled files are in sync with rules (hash comparison) ## Output @@ -26,7 +29,10 @@ Runs a series of checks to validate your `.dwf/` setup. ✓ config.yml is valid ✓ Rule files are valid YAML (12 rules loaded) ✓ No duplicate rule IDs +✓ All scopes have valid format ✓ All configured tools have bridges - Symlink check skipped (mode: copy) +✓ Pulled rule files exist (3 entries) +✓ Asset files exist (6 entries) ✗ Compiled files out of sync — run "devw compile" ``` diff --git a/docs/commands/init.mdx b/docs/commands/init.mdx index 7a4489e..579690b 100644 --- a/docs/commands/init.mdx +++ b/docs/commands/init.mdx @@ -11,11 +11,13 @@ Creates the `.dwf/` directory structure with configuration and empty rule files. ## Behavior -1. Detects AI tools configured in the project (looks for `.cursor/`, `CLAUDE.md`, `GEMINI.md`) +1. Detects AI tools configured in the project (looks for `.cursor/`, `CLAUDE.md`, `GEMINI.md`, `.windsurf/`, `.github/copilot-instructions.md`) 2. Asks which tools to include (detected tools are pre-selected) 3. Asks output mode: `copy` (default) or `link` 4. Generates `.dwf/config.yml` + empty rule files with example comments -5. Adds `.dwf/.cache/` to `.gitignore` +5. Creates `.dwf/assets/` directory for commands, templates, and hooks +6. Adds `.dwf/.cache/` to `.gitignore` +7. If `--preset` is specified, installs the preset after initialization ## Generated Structure @@ -28,6 +30,7 @@ Creates the `.dwf/` directory structure with configuration and empty rule files. security.yml workflow.yml testing.yml + assets/ ``` ## Flags @@ -36,6 +39,7 @@ Creates the `.dwf/` directory structure with configuration and empty rule files. |------|-------------| | `--tools claude,cursor,gemini` | Skip interactive tool selection | | `--mode copy\|link` | Skip interactive mode selection | +| `--preset <name>` | Install a preset after initialization (e.g., `spec-driven`) | | `-y, --yes` | Accept all defaults | ## Examples @@ -49,4 +53,7 @@ devw init --tools claude,cursor --mode copy # Accept all defaults devw init -y + +# Initialize with the spec-driven workflow preset +devw init --preset spec-driven -y ``` diff --git a/docs/commands/list.mdx b/docs/commands/list.mdx index 8d23502..18120c4 100644 --- a/docs/commands/list.mdx +++ b/docs/commands/list.mdx @@ -1,6 +1,6 @@ --- title: "devw list" -description: "List rules, blocks, or tools" +description: "List rules, assets, or tools" --- ```bash @@ -14,12 +14,18 @@ Shows information about your current configuration. | Type | Description | |------|-------------| | `rules` | All active rules with scope, severity, and source (pulled vs manual) | -| `blocks` | Deprecated — shows migration message | +| `assets` | All installed assets (commands, templates, hooks) | +| `commands` | Installed slash commands only | +| `templates` | Installed spec templates only | +| `hooks` | Installed editor hooks only | | `tools` | Configured tools with output paths | +| `blocks` | Deprecated — shows migration message | ## Examples ```bash devw list rules +devw list assets +devw list commands devw list tools ``` diff --git a/docs/concepts/assets.mdx b/docs/concepts/assets.mdx new file mode 100644 index 0000000..ef8a330 --- /dev/null +++ b/docs/concepts/assets.mdx @@ -0,0 +1,110 @@ +--- +title: "Assets" +description: "Commands, templates, hooks, and presets for AI-powered workflows" +--- + +Assets extend dev-workflows beyond rules. They are static files (Markdown, JSON) that get placed in specific directories during compilation. + +## Asset Types + +### Commands + +Slash commands are Markdown files with prompts that AI editors (like Claude Code) can execute via `/command-name`. + +```bash +devw add command/spec +devw compile +# → .claude/commands/spec.md +``` + +Commands are stored in `.dwf/assets/commands/` and deployed to `.claude/commands/` with frontmatter stripped. + +**Available commands:** + +| Command | Description | +|---------|-------------| +| `command/spec` | Generate a feature spec through guided questions | +| `command/plan` | Create an implementation plan from a spec | +| `command/build` | Execute a plan step by step with verification | +| `command/learn` | Capture a lesson learned as a project rule | + +### Templates + +Templates are Markdown files that provide structure for documents like feature specs. + +```bash +devw add template/feature-spec +devw compile +# → docs/specs/feature-spec.md +``` + +Templates are stored in `.dwf/assets/templates/` and deployed to the path specified in their `output_path` frontmatter field (defaults to `docs/specs/`). + +**Available templates:** + +| Template | Description | +|----------|-------------| +| `template/feature-spec` | Markdown template for feature specifications | + +### Hooks + +Hooks are JSON files that configure editor behavior (e.g., auto-formatting after edits). + +```bash +devw add hook/auto-format +devw compile +# → .claude/settings.local.json (deep-merged) +``` + +Hooks are stored in `.dwf/assets/hooks/` and their `settings` object is deep-merged into `.claude/settings.local.json`. Arrays are concatenated, objects are recursively merged. + +**Available hooks:** + +| Hook | Description | +|------|-------------| +| `hook/auto-format` | Auto-format files after AI edits (PostToolUse) | + +### Presets + +Presets are YAML manifests that bundle multiple rules and assets into a single install. + +```bash +devw add preset/spec-driven +``` + +This installs all rules, commands, templates, and hooks listed in the preset. + +**Available presets:** + +| Preset | Includes | +|--------|----------| +| `preset/spec-driven` | `workflow/spec-driven` rule, 4 commands (spec, plan, build, learn), feature-spec template, auto-format hook | + +## Asset Lifecycle + +1. **Install**: `devw add command/spec` downloads the file to `.dwf/assets/commands/spec.md` and records it in `config.yml` +2. **Deploy**: `devw compile` copies the file (stripping frontmatter) to the output location +3. **Remove**: `devw remove command/spec` deletes the source file and removes the config entry +4. **Verify**: `devw doctor` checks that all registered assets have corresponding files on disk + +## Config Format + +Assets are tracked in `.dwf/config.yml`: + +```yaml +assets: + - type: command + name: spec + version: 0.1.0 + installed_at: "2026-02-20T00:00:00Z" + - type: template + name: feature-spec + version: 0.1.0 + installed_at: "2026-02-20T00:00:00Z" + - type: hook + name: auto-format + version: 0.1.0 + installed_at: "2026-02-20T00:00:00Z" +``` + +The `assets` field is optional and backwards-compatible. Configs without it default to an empty array. diff --git a/docs/docs.json b/docs/docs.json index 52463bd..20b1fd1 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -56,6 +56,7 @@ "group": "Concepts", "pages": [ "concepts/rules", + "concepts/assets", "concepts/bridges", "concepts/blocks", "concepts/modes" diff --git a/docs/index.mdx b/docs/index.mdx index 1835299..cf5adf0 100644 --- a/docs/index.mdx +++ b/docs/index.mdx @@ -5,7 +5,7 @@ description: "Write rules once, compile to every AI editor" # Write once, **run everywhere** -A single source of truth for your development rules. Compile to any AI editor. +A single source of truth for your development rules, slash commands, templates, and hooks. Compile to any AI editor. ## The Problem @@ -19,7 +19,13 @@ npx dev-workflows add typescript/strict # install a rule from the registry npx dev-workflows compile # generate CLAUDE.md, .cursor/rules, GEMINI.md ``` -Define rules once in YAML, compile to each editor's native format. +Or bootstrap a complete spec-driven workflow: + +```bash +npx dev-workflows init --preset spec-driven -y +``` + +Define rules once in YAML, compile to each editor's native format. Install slash commands, spec templates, and editor hooks from the registry. <Columns cols={3}> <Card title="CLAUDE.md" icon="robot" href="/concepts/bridges"> @@ -58,6 +64,22 @@ Start with battle-tested rules instead of writing from scratch. Browse with `dev </Card> </Columns> +## Assets + +Beyond rules, install **slash commands**, **spec templates**, and **editor hooks** that power AI-assisted workflows. + +<Columns cols={3}> + <Card title="Commands" icon="terminal" href="/concepts/assets"> + /spec, /plan, /build, /learn + </Card> + <Card title="Templates" icon="file-lines" href="/concepts/assets"> + Feature spec template + </Card> + <Card title="Presets" icon="layer-group" href="/concepts/assets"> + Bundle rules + assets in one install + </Card> +</Columns> + **Open source · MIT license · 4 dependencies** [Get Started →](/quickstart) diff --git a/docs/quickstart.mdx b/docs/quickstart.mdx index f4d5c3c..6e9f184 100644 --- a/docs/quickstart.mdx +++ b/docs/quickstart.mdx @@ -16,7 +16,12 @@ description: "Install dev-workflows and compile your first rules in 60 seconds" ``` This creates a `.dwf/` directory with `config.yml` and empty rule files for each scope: architecture, conventions, security, testing, and workflow. - The CLI detects existing AI tools in your project (CLAUDE.md, .cursor/, GEMINI.md) and pre-selects them. + The CLI detects existing AI tools in your project (CLAUDE.md, .cursor/, GEMINI.md, .windsurf/, .github/copilot-instructions.md) and pre-selects them. + + **With a preset** — bootstrap a full spec-driven workflow in one command: + ```bash + npx dev-workflows init --preset spec-driven -y + ``` </Step> <Step title="Add rules from the registry (optional)"> @@ -24,24 +29,42 @@ description: "Install dev-workflows and compile your first rules in 60 seconds" npx dev-workflows add typescript/strict ``` Installs rules from the GitHub registry into your `.dwf/rules/` directory. Run `devw add --list` to see all available rules, or `devw add` for interactive selection. + + You can also install slash commands, templates, and hooks: + ```bash + devw add command/spec # slash command + devw add template/feature-spec # spec template + devw add hook/auto-format # editor hook + ``` </Step> <Step title="Compile"> ```bash npx dev-workflows compile ``` - Generates editor-specific config files: + Generates editor-specific config files and deploys assets: | Tool | Output | |------|--------| | Claude Code | `CLAUDE.md` | | Cursor | `.cursor/rules/devworkflows.mdc` | | Gemini CLI | `GEMINI.md` | + | Windsurf | `.windsurf/rules/devworkflows.md` | + | VS Code Copilot | `.github/copilot-instructions.md` | + + Assets are also deployed: + + | Asset Type | Output | + |------------|--------| + | Commands | `.claude/commands/*.md` | + | Templates | `docs/specs/*.md` | + | Hooks | `.claude/settings.local.json` | </Step> </Steps> ## What's Next - Write your own rules in `.dwf/rules/` → [Rules format](/concepts/rules) +- Learn about assets (commands, templates, hooks) → [Assets](/concepts/assets) - Validate your setup → `devw doctor` - Browse all commands → [Commands](/commands/init) diff --git a/packages/cli/src/bridges/types.ts b/packages/cli/src/bridges/types.ts index 2891a24..0f878b1 100644 --- a/packages/cli/src/bridges/types.ts +++ b/packages/cli/src/bridges/types.ts @@ -25,6 +25,22 @@ export interface ProjectConfig { mode: 'copy' | 'link'; blocks: string[]; pulled: PulledEntry[]; + assets: AssetEntry[]; +} + +export const ASSET_TYPE = { + Command: 'command', + Template: 'template', + Hook: 'hook', +} as const; + +export type AssetType = typeof ASSET_TYPE[keyof typeof ASSET_TYPE]; + +export interface AssetEntry { + type: AssetType; + name: string; + version: string; + installed_at: string; } export interface Bridge { diff --git a/packages/cli/src/commands/add.ts b/packages/cli/src/commands/add.ts index 30ac77d..996d910 100644 --- a/packages/cli/src/commands/add.ts +++ b/packages/cli/src/commands/add.ts @@ -4,14 +4,15 @@ import type { Command } from 'commander'; import chalk from 'chalk'; import { stringify, parse } from 'yaml'; import { select, checkbox, confirm } from '@inquirer/prompts'; -import { fetchRawContent, listDirectory } from '../utils/github.js'; +import { fetchRawContent, fetchContent, listDirectory } from '../utils/github.js'; import { convert } from '../core/converter.js'; +import { isAssetType, parseAssetFrontmatter } from '../core/assets.js'; import { fileExists } from '../utils/fs.js'; import { readConfig } from '../core/parser.js'; import * as cache from '../utils/cache.js'; import * as ui from '../utils/ui.js'; import { ICONS } from '../utils/ui.js'; -import type { PulledEntry } from '../bridges/types.js'; +import type { PulledEntry, AssetEntry, AssetType } from '../bridges/types.js'; const KEBAB_RE = /^[a-z0-9]+(?:-[a-z0-9]+)*$/; @@ -192,6 +193,108 @@ export async function updateConfig(cwd: string, entry: PulledEntry): Promise<voi await writeFile(configPath, stringify(doc, { lineWidth: 0 }), 'utf-8'); } +export async function updateConfigAssets(cwd: string, entry: AssetEntry): Promise<void> { + const configPath = join(cwd, '.dwf', 'config.yml'); + const raw = await readFile(configPath, 'utf-8'); + const doc = parse(raw) as Record<string, unknown>; + + const assets = Array.isArray(doc['assets']) ? (doc['assets'] as AssetEntry[]) : []; + + const existingIdx = assets.findIndex((a) => a.type === entry.type && a.name === entry.name); + if (existingIdx >= 0) { + assets[existingIdx] = entry; + } else { + assets.push(entry); + } + + doc['assets'] = assets; + await writeFile(configPath, stringify(doc, { lineWidth: 0 }), 'utf-8'); +} + +function getAssetContentPath(type: AssetType, name: string): string { + const ext = type === 'hook' ? 'json' : 'md'; + return `${type}s/${name}.${ext}`; +} + +export async function downloadAndInstallAsset( + cwd: string, + type: AssetType, + name: string, + options: AddOptions, +): Promise<boolean> { + const source = `${type}/${name}`; + const ext = type === 'hook' ? 'json' : 'md'; + const fileName = `${name}.${ext}`; + const assetDir = join(cwd, '.dwf', 'assets', `${type}s`); + const filePath = join(assetDir, fileName); + + ui.info(`Downloading ${source}...`); + + let content: string; + try { + content = await fetchContent(getAssetContentPath(type, name)); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + ui.error(msg); + process.exitCode = 1; + return false; + } + + let version = '0.1.0'; + if (type === 'hook') { + try { + const parsed = JSON.parse(content) as Record<string, unknown>; + if (typeof parsed['version'] === 'string') version = parsed['version']; + } catch { + // Use default version + } + } else { + const { frontmatter } = parseAssetFrontmatter(content); + version = frontmatter.version; + } + + if (await fileExists(filePath)) { + if (!options.force) { + ui.info(`${source} already exists locally`); + try { + const shouldOverwrite = await confirm({ + message: 'Overwrite?', + default: true, + }); + if (!shouldOverwrite) { + ui.error('Cancelled'); + return false; + } + } catch { + ui.error('Cancelled'); + return false; + } + } + } + + if (options.dryRun) { + ui.newline(); + ui.header('Dry run — would write:'); + ui.newline(); + console.log(chalk.dim(` .dwf/assets/${type}s/${fileName}`)); + return false; + } + + await mkdir(assetDir, { recursive: true }); + await writeFile(filePath, content, 'utf-8'); + + const entry: AssetEntry = { + type, + name, + version, + installed_at: new Date().toISOString(), + }; + await updateConfigAssets(cwd, entry); + + ui.success(`Added ${source} (v${version})`); + return true; +} + async function downloadAndInstall( cwd: string, category: string, @@ -419,6 +522,81 @@ async function runInteractive(cwd: string, options: AddOptions): Promise<void> { } } +interface PresetManifest { + name: string; + description: string; + version: string; + includes: { + rules?: string[]; + commands?: string[]; + templates?: string[]; + hooks?: string[]; + }; +} + +export async function installPreset( + cwd: string, + presetName: string, + options: AddOptions, +): Promise<boolean> { + ui.info(`Downloading preset ${presetName}...`); + + let content: string; + try { + content = await fetchContent(`presets/${presetName}.yml`); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + ui.error(`Preset not found: ${msg}`); + process.exitCode = 1; + return false; + } + + let manifest: PresetManifest; + try { + manifest = parse(content) as PresetManifest; + } catch { + ui.error(`Invalid preset YAML: ${presetName}`); + process.exitCode = 1; + return false; + } + + ui.newline(); + ui.header(`Preset: ${manifest.name}`); + if (manifest.description) { + ui.info(manifest.description); + } + ui.newline(); + + const noCompileOptions: AddOptions = { ...options, noCompile: true }; + let anyAdded = false; + + const rules = manifest.includes.rules ?? []; + for (const rule of rules) { + const added = await downloadAndInstall(cwd, rule.split('/')[0] ?? '', rule.split('/')[1] ?? rule, noCompileOptions); + if (added) anyAdded = true; + } + + const commands = manifest.includes.commands ?? []; + for (const cmd of commands) { + const added = await downloadAndInstallAsset(cwd, 'command', cmd, noCompileOptions); + if (added) anyAdded = true; + } + + const templates = manifest.includes.templates ?? []; + for (const tmpl of templates) { + const added = await downloadAndInstallAsset(cwd, 'template', tmpl, noCompileOptions); + if (added) anyAdded = true; + } + + const hooks = manifest.includes.hooks ?? []; + for (const hook of hooks) { + const added = await downloadAndInstallAsset(cwd, 'hook', hook, noCompileOptions); + if (added) anyAdded = true; + } + + return anyAdded; +} + async function runAdd(ruleArg: string | undefined, options: AddOptions): Promise<void> { if (options.list) { await runList(ruleArg); @@ -461,6 +639,25 @@ async function runAdd(ruleArg: string | undefined, options: AddOptions): Promise } const { category, name } = parsed; + + if (category === 'preset') { + const anyAdded = await installPreset(cwd, name, options); + if (anyAdded && !options.noCompile) { + const { runCompileFromAdd } = await import('./compile.js'); + await runCompileFromAdd(); + } + return; + } + + if (isAssetType(category)) { + const added = await downloadAndInstallAsset(cwd, category, name, options); + if (added && !options.noCompile) { + const { runCompileFromAdd } = await import('./compile.js'); + await runCompileFromAdd(); + } + return; + } + const added = await downloadAndInstall(cwd, category, name, options); if (added && !options.noCompile) { diff --git a/packages/cli/src/commands/compile.ts b/packages/cli/src/commands/compile.ts index 6ce71aa..7635ac1 100644 --- a/packages/cli/src/commands/compile.ts +++ b/packages/cli/src/commands/compile.ts @@ -4,6 +4,7 @@ import type { Command } from 'commander'; import chalk from 'chalk'; import { readConfig, readRules } from '../core/parser.js'; import { computeRulesHash, writeHash } from '../core/hash.js'; +import { deployAssets } from '../core/assets.js'; import type { Bridge } from '../bridges/types.js'; import { claudeBridge } from '../bridges/claude.js'; import { cursorBridge } from '../bridges/cursor.js'; @@ -32,6 +33,7 @@ export interface BridgeResult { export interface CompileResult { results: BridgeResult[]; activeRuleCount: number; + assetPaths: string[]; elapsedMs: number; } @@ -139,13 +141,17 @@ export async function executePipeline(options: PipelineOptions): Promise<Compile } } + let assetPaths: string[] = []; if (write) { const hash = computeRulesHash(activeRules); await writeHash(cwd, hash); + + const assetResult = await deployAssets(cwd, config); + assetPaths = assetResult.deployed; } const elapsedMs = performance.now() - startTime; - return { results, activeRuleCount: activeRules.length, elapsedMs }; + return { results, activeRuleCount: activeRules.length, assetPaths, elapsedMs }; } async function runCompile(options: CompileOptions): Promise<void> { @@ -182,11 +188,12 @@ async function runCompile(options: CompileOptions): Promise<void> { const result = await executePipeline({ cwd, tool: options.tool }); const writtenPaths = result.results.filter((r) => r.success).map((r) => r.outputPath); + const allPaths = [...writtenPaths, ...result.assetPaths]; ui.newline(); - ui.success(`Compiled ${String(result.activeRuleCount)} rules ${ICONS.arrow} ${String(writtenPaths.length)} file${writtenPaths.length !== 1 ? 's' : ''} ${ui.timing(result.elapsedMs)}`); + ui.success(`Compiled ${String(result.activeRuleCount)} rules ${ICONS.arrow} ${String(allPaths.length)} file${allPaths.length !== 1 ? 's' : ''} ${ui.timing(result.elapsedMs)}`); ui.newline(); - ui.list(writtenPaths); + ui.list(allPaths); } catch (err) { const message = err instanceof Error ? err.message : String(err); ui.error(message); diff --git a/packages/cli/src/commands/doctor.ts b/packages/cli/src/commands/doctor.ts index 52b3074..e6b8797 100644 --- a/packages/cli/src/commands/doctor.ts +++ b/packages/cli/src/commands/doctor.ts @@ -9,7 +9,7 @@ import { cursorBridge } from '../bridges/cursor.js'; import { geminiBridge } from '../bridges/gemini.js'; import { windsurfBridge } from '../bridges/windsurf.js'; import { copilotBridge } from '../bridges/copilot.js'; -import type { Bridge, ProjectConfig, PulledEntry, Rule } from '../bridges/types.js'; +import type { Bridge, ProjectConfig, PulledEntry, AssetEntry, Rule } from '../bridges/types.js'; import { fileExists } from '../utils/fs.js'; import { isValidScope } from '../core/schema.js'; import * as ui from '../utils/ui.js'; @@ -210,6 +210,32 @@ export async function checkPulledFilesExist(cwd: string, pulled: PulledEntry[]): return { passed: true, message: `Pulled rule files exist (${String(pulled.length)} entries)` }; } +export async function checkAssetFilesExist(cwd: string, assets: AssetEntry[]): Promise<CheckResult> { + if (assets.length === 0) { + return { passed: true, message: 'Asset files check skipped (no assets installed)', skipped: true }; + } + + const missing: string[] = []; + + for (const asset of assets) { + const ext = asset.type === 'hook' ? 'json' : 'md'; + const fileName = `${asset.name}.${ext}`; + const filePath = join(cwd, '.dwf', 'assets', `${asset.type}s`, fileName); + if (!(await fileExists(filePath))) { + missing.push(`${asset.type}/${asset.name}`); + } + } + + if (missing.length > 0) { + return { + passed: false, + message: `Missing asset files: ${missing.join(', ')}`, + }; + } + + return { passed: true, message: `Asset files exist (${String(assets.length)} entries)` }; +} + export async function checkHashSync(cwd: string, rules: Rule[]): Promise<CheckResult> { const storedHash = await readStoredHash(cwd); if (storedHash === null) { @@ -303,7 +329,11 @@ async function runDoctor(): Promise<void> { const pulledResult = await checkPulledFilesExist(cwd, config!.pulled); results.push(pulledResult); - // Check 9: Hash sync (conditional on compiled files existing) + // Check 9: Asset files exist + const assetResult = await checkAssetFilesExist(cwd, config!.assets); + results.push(assetResult); + + // Check 10: Hash sync (conditional on compiled files existing) const hashResult = await checkHashSync(cwd, rules); results.push(hashResult); diff --git a/packages/cli/src/commands/init.ts b/packages/cli/src/commands/init.ts index 3c19934..c5ad522 100644 --- a/packages/cli/src/commands/init.ts +++ b/packages/cli/src/commands/init.ts @@ -13,6 +13,7 @@ export interface InitOptions { tools?: string; mode?: 'copy' | 'link'; yes?: boolean; + preset?: string; } import { BUILTIN_SCOPES } from '../core/schema.js'; @@ -130,9 +131,10 @@ async function runInit(options: InitOptions): Promise<void> { } const projectName = basename(cwd); - // Create .dwf/rules/ + // Create .dwf/rules/ and .dwf/assets/ const rulesDir = join(dwfDir, 'rules'); await mkdir(rulesDir, { recursive: true }); + await mkdir(join(dwfDir, 'assets'), { recursive: true }); // Write config.yml const config = { @@ -169,6 +171,17 @@ async function runInit(options: InitOptions): Promise<void> { console.log(` 2. Add a rule ${chalk.cyan('devw add <category>/<rule>')}`); console.log(` 3. Or write your own rules in ${chalk.cyan('.dwf/rules/')}`); console.log(` 4. When ready, compile ${chalk.cyan('devw compile')}`); + + if (options.preset) { + ui.newline(); + ui.info(`Installing preset: ${options.preset}...`); + const { installPreset } = await import('./add.js'); + const { runCompileFromAdd } = await import('./compile.js'); + const anyAdded = await installPreset(cwd, options.preset, { force: true }); + if (anyAdded) { + await runCompileFromAdd(); + } + } } export function registerInitCommand(program: Command): void { @@ -177,6 +190,7 @@ export function registerInitCommand(program: Command): void { .description('Initialize .dwf/ in the current project') .option('--tools <tools>', 'Comma-separated list of tools (claude,cursor,gemini)') .option('--mode <mode>', 'Output mode: copy or link') + .option('--preset <preset>', 'Install a preset after initialization (e.g., spec-driven)') .option('-y, --yes', 'Accept all defaults') .action((options: InitOptions) => runInit(options)); } diff --git a/packages/cli/src/commands/list.ts b/packages/cli/src/commands/list.ts index 01f5bc4..85a649a 100644 --- a/packages/cli/src/commands/list.ts +++ b/packages/cli/src/commands/list.ts @@ -9,6 +9,7 @@ import { geminiBridge } from '../bridges/gemini.js'; import { windsurfBridge } from '../bridges/windsurf.js'; import { copilotBridge } from '../bridges/copilot.js'; import type { Bridge } from '../bridges/types.js'; +import { ASSET_TYPE } from '../bridges/types.js'; import * as ui from '../utils/ui.js'; import { ICONS } from '../utils/ui.js'; @@ -88,9 +89,34 @@ async function listTools(): Promise<void> { } } +async function listAssets(typeFilter?: string): Promise<void> { + const cwd = process.cwd(); + if (!(await ensureConfig(cwd))) return; + + const config = await readConfig(cwd); + + const filtered = typeFilter + ? config.assets.filter((a) => a.type === typeFilter || `${a.type}s` === typeFilter) + : config.assets; + + if (filtered.length === 0) { + const label = typeFilter ?? 'assets'; + ui.warn(`No ${label} installed`); + ui.info('Run devw add command/<name> or devw add preset/<name> to install'); + return; + } + + const label = typeFilter ?? 'assets'; + ui.header(`Installed ${label} (${String(filtered.length)})`); + ui.newline(); + for (const asset of filtered) { + console.log(` ${chalk.dim(ICONS.bullet)} ${chalk.cyan(asset.type.padEnd(10))} ${chalk.white(asset.name.padEnd(20))} ${chalk.dim(`v${asset.version}`)}`); + } +} + async function runList(subcommand: string | undefined): Promise<void> { if (!subcommand) { - ui.error('Specify what to list', 'Usage: devw list <rules|blocks|tools>'); + ui.error('Specify what to list', 'Usage: devw list <rules|tools|assets|commands|templates|hooks>'); process.exitCode = 1; return; } @@ -105,8 +131,20 @@ async function runList(subcommand: string | undefined): Promise<void> { case 'tools': await listTools(); break; + case 'assets': + await listAssets(); + break; + case 'commands': + await listAssets(ASSET_TYPE.Command); + break; + case 'templates': + await listAssets(ASSET_TYPE.Template); + break; + case 'hooks': + await listAssets(ASSET_TYPE.Hook); + break; default: - ui.error(`Unknown list type "${subcommand}"`, 'Usage: devw list <rules|blocks|tools>'); + ui.error(`Unknown list type "${subcommand}"`, 'Usage: devw list <rules|tools|assets|commands|templates|hooks>'); process.exitCode = 1; } } @@ -114,7 +152,7 @@ async function runList(subcommand: string | undefined): Promise<void> { export function registerListCommand(program: Command): void { program .command('list') - .argument('[type]', 'What to list: rules, blocks, or tools') - .description('List rules, installed blocks, or configured tools') + .argument('[type]', 'What to list: rules, tools, assets, commands, templates, hooks') + .description('List rules, configured tools, or installed assets') .action((type: string | undefined) => runList(type)); } diff --git a/packages/cli/src/commands/remove.ts b/packages/cli/src/commands/remove.ts index 9ebaca4..957d7eb 100644 --- a/packages/cli/src/commands/remove.ts +++ b/packages/cli/src/commands/remove.ts @@ -5,9 +5,10 @@ import { parse, stringify } from 'yaml'; import { checkbox, confirm } from '@inquirer/prompts'; import { readConfig } from '../core/parser.js'; import { fileExists } from '../utils/fs.js'; +import { isAssetType, removeAsset } from '../core/assets.js'; import { validateInput } from './add.js'; import * as ui from '../utils/ui.js'; -import type { PulledEntry } from '../bridges/types.js'; +import type { PulledEntry, AssetEntry } from '../bridges/types.js'; async function removePulledEntry(cwd: string, path: string): Promise<void> { const configPath = join(cwd, '.dwf', 'config.yml'); @@ -20,6 +21,17 @@ async function removePulledEntry(cwd: string, path: string): Promise<void> { await writeFile(configPath, stringify(doc, { lineWidth: 0 }), 'utf-8'); } +async function removeAssetEntry(cwd: string, type: string, name: string): Promise<void> { + const configPath = join(cwd, '.dwf', 'config.yml'); + const raw = await readFile(configPath, 'utf-8'); + const doc = parse(raw) as Record<string, unknown>; + + const assets = Array.isArray(doc['assets']) ? (doc['assets'] as AssetEntry[]) : []; + doc['assets'] = assets.filter((a) => !(a.type === type && a.name === name)); + + await writeFile(configPath, stringify(doc, { lineWidth: 0 }), 'utf-8'); +} + async function removeRule(cwd: string, path: string): Promise<boolean> { const parts = path.split('/'); if (parts.length !== 2) return false; @@ -113,7 +125,31 @@ async function runRemove(ruleArg: string | undefined): Promise<void> { return; } - const source = `${parsed.category}/${parsed.name}`; + const { category, name } = parsed; + + if (isAssetType(category)) { + const installed = config.assets.find((a) => a.type === category && a.name === name); + if (!installed) { + ui.error( + `Asset "${category}/${name}" is not installed`, + config.assets.length > 0 + ? `Installed assets: ${config.assets.map((a) => `${a.type}/${a.name}`).join(', ')}` + : 'No assets installed', + ); + process.exitCode = 1; + return; + } + + await removeAsset(cwd, category, name); + await removeAssetEntry(cwd, category, name); + ui.success(`Removed ${category}/${name}`); + + const { runCompileFromAdd } = await import('./compile.js'); + await runCompileFromAdd(); + return; + } + + const source = `${category}/${name}`; const installed = config.pulled.find((p) => p.path === source); if (!installed) { ui.error( diff --git a/packages/cli/src/commands/watch.ts b/packages/cli/src/commands/watch.ts index c178927..e7a1ed0 100644 --- a/packages/cli/src/commands/watch.ts +++ b/packages/cli/src/commands/watch.ts @@ -44,7 +44,7 @@ async function runWatch(options: WatchOptions): Promise<void> { return; } - const watcher = chokidar.watch(['**/*.yml', '**/*.yaml'], { + const watcher = chokidar.watch(['**/*.yml', '**/*.yaml', 'assets/**/*.md', 'assets/**/*.json'], { cwd: dwfDir, ignoreInitial: true, awaitWriteFinish: { stabilityThreshold: 100, pollInterval: 50 }, diff --git a/packages/cli/src/core/assets.ts b/packages/cli/src/core/assets.ts new file mode 100644 index 0000000..eb4858c --- /dev/null +++ b/packages/cli/src/core/assets.ts @@ -0,0 +1,209 @@ +import { readFile, writeFile, readdir, mkdir, unlink } from 'node:fs/promises'; +import { join } from 'node:path'; +import { parse as parseYaml } from 'yaml'; +import type { AssetType, ProjectConfig } from '../bridges/types.js'; +import { ASSET_TYPE } from '../bridges/types.js'; +import { fileExists } from '../utils/fs.js'; +import { mergeSettingsFile, type JsonValue } from './settings-merge.js'; + +const ASSET_TYPE_VALUES = new Set<string>(Object.values(ASSET_TYPE)); + +export function isAssetType(category: string): category is AssetType { + return ASSET_TYPE_VALUES.has(category); +} + +export interface AssetFrontmatter { + name: string; + description: string; + version: string; + tool?: string; + output_path?: string; +} + +export function parseAssetFrontmatter(content: string): { frontmatter: AssetFrontmatter; body: string } { + const fmRegex = /^---\n([\s\S]*?)\n---\n?([\s\S]*)$/; + const match = fmRegex.exec(content); + + if (!match?.[1]) { + return { + frontmatter: { name: '', description: '', version: '0.1.0' }, + body: content, + }; + } + + const raw: unknown = parseYaml(match[1]); + if (!raw || typeof raw !== 'object') { + return { + frontmatter: { name: '', description: '', version: '0.1.0' }, + body: match[2] ?? '', + }; + } + + const fm = raw as Record<string, unknown>; + + return { + frontmatter: { + name: typeof fm['name'] === 'string' ? fm['name'] : '', + description: typeof fm['description'] === 'string' ? fm['description'] : '', + version: typeof fm['version'] === 'string' ? fm['version'] : '0.1.0', + tool: typeof fm['tool'] === 'string' ? fm['tool'] : undefined, + output_path: typeof fm['output_path'] === 'string' ? fm['output_path'] : undefined, + }, + body: match[2] ?? '', + }; +} + +export interface AssetFile { + type: AssetType; + name: string; + content: string; +} + +export async function readAssets(cwd: string): Promise<AssetFile[]> { + const assetsDir = join(cwd, '.dwf', 'assets'); + const assets: AssetFile[] = []; + + for (const type of Object.values(ASSET_TYPE)) { + const typeDir = join(assetsDir, `${type}s`); + if (!(await fileExists(typeDir))) continue; + + let entries: string[]; + try { + entries = await readdir(typeDir); + } catch { + continue; + } + + for (const file of entries) { + const filePath = join(typeDir, file); + try { + const content = await readFile(filePath, 'utf-8'); + const name = file.replace(/\.(md|json|yml|yaml)$/, ''); + assets.push({ type, name, content }); + } catch { + // Skip unreadable files + } + } + } + + return assets; +} + +export interface DeployResult { + deployed: string[]; +} + +export async function deployCommands(cwd: string, _config: ProjectConfig): Promise<DeployResult> { + const commandsDir = join(cwd, '.dwf', 'assets', 'commands'); + const outputDir = join(cwd, '.claude', 'commands'); + const deployed: string[] = []; + + if (!(await fileExists(commandsDir))) return { deployed }; + + let entries: string[]; + try { + entries = await readdir(commandsDir); + } catch { + return { deployed }; + } + + await mkdir(outputDir, { recursive: true }); + + for (const file of entries) { + if (!file.endsWith('.md')) continue; + const content = await readFile(join(commandsDir, file), 'utf-8'); + const { body } = parseAssetFrontmatter(content); + const outputPath = join(outputDir, file); + await writeFile(outputPath, body.trimStart(), 'utf-8'); + deployed.push(`.claude/commands/${file}`); + } + + return { deployed }; +} + +export async function deployTemplates(cwd: string, _config: ProjectConfig): Promise<DeployResult> { + const templatesDir = join(cwd, '.dwf', 'assets', 'templates'); + const deployed: string[] = []; + + if (!(await fileExists(templatesDir))) return { deployed }; + + let entries: string[]; + try { + entries = await readdir(templatesDir); + } catch { + return { deployed }; + } + + for (const file of entries) { + if (!file.endsWith('.md')) continue; + const content = await readFile(join(templatesDir, file), 'utf-8'); + const { frontmatter, body } = parseAssetFrontmatter(content); + const outputPath = frontmatter.output_path ?? 'docs/specs'; + const outputDir = join(cwd, outputPath); + await mkdir(outputDir, { recursive: true }); + await writeFile(join(outputDir, file), body.trimStart(), 'utf-8'); + deployed.push(`${outputPath}/${file}`); + } + + return { deployed }; +} + +export async function deployHooks(cwd: string, _config: ProjectConfig): Promise<DeployResult> { + const hooksDir = join(cwd, '.dwf', 'assets', 'hooks'); + const deployed: string[] = []; + + if (!(await fileExists(hooksDir))) return { deployed }; + + let entries: string[]; + try { + entries = await readdir(hooksDir); + } catch { + return { deployed }; + } + + for (const file of entries) { + if (!file.endsWith('.json')) continue; + const content = await readFile(join(hooksDir, file), 'utf-8'); + let parsed: unknown; + try { + parsed = JSON.parse(content); + } catch { + continue; + } + + if (!parsed || typeof parsed !== 'object') continue; + const hookDoc = parsed as Record<string, unknown>; + const settings = hookDoc['settings']; + if (!settings || typeof settings !== 'object' || Array.isArray(settings)) continue; + + await mergeSettingsFile(cwd, settings as Record<string, JsonValue>); + deployed.push('.claude/settings.local.json'); + } + + return { deployed: [...new Set(deployed)] }; +} + +export async function deployAssets(cwd: string, config: ProjectConfig): Promise<DeployResult> { + const allDeployed: string[] = []; + + const commandResult = await deployCommands(cwd, config); + allDeployed.push(...commandResult.deployed); + + const templateResult = await deployTemplates(cwd, config); + allDeployed.push(...templateResult.deployed); + + const hookResult = await deployHooks(cwd, config); + allDeployed.push(...hookResult.deployed); + + return { deployed: allDeployed }; +} + +export async function removeAsset(cwd: string, type: AssetType, name: string): Promise<boolean> { + const ext = type === 'hook' ? 'json' : 'md'; + const filePath = join(cwd, '.dwf', 'assets', `${type}s`, `${name}.${ext}`); + + if (!(await fileExists(filePath))) return false; + + await unlink(filePath); + return true; +} diff --git a/packages/cli/src/core/parser.ts b/packages/cli/src/core/parser.ts index e09910b..b18cf71 100644 --- a/packages/cli/src/core/parser.ts +++ b/packages/cli/src/core/parser.ts @@ -1,7 +1,8 @@ import { readFile, readdir } from 'node:fs/promises'; import { join } from 'node:path'; import { parse } from 'yaml'; -import type { Rule, ProjectConfig, PulledEntry } from '../bridges/types.js'; +import type { Rule, ProjectConfig, PulledEntry, AssetEntry, AssetType } from '../bridges/types.js'; +import { ASSET_TYPE } from '../bridges/types.js'; import { isValidScope } from './schema.js'; interface RawRule { @@ -68,6 +69,20 @@ export async function readConfig(cwd: string): Promise<ProjectConfig> { .filter((p) => p.path !== '') : []; + const assetTypeValues = new Set<string>(Object.values(ASSET_TYPE)); + const assetsRaw = doc['assets']; + const assets: AssetEntry[] = Array.isArray(assetsRaw) + ? assetsRaw + .filter((a): a is Record<string, unknown> => a !== null && typeof a === 'object') + .map((a) => ({ + type: (typeof a['type'] === 'string' ? a['type'] : '') as AssetType, + name: typeof a['name'] === 'string' ? a['name'] : '', + version: typeof a['version'] === 'string' ? a['version'] : '', + installed_at: typeof a['installed_at'] === 'string' ? a['installed_at'] : '', + })) + .filter((a) => a.name !== '' && assetTypeValues.has(a.type)) + : []; + return { version, project: { name: projectName, description: projectDescription }, @@ -75,6 +90,7 @@ export async function readConfig(cwd: string): Promise<ProjectConfig> { mode: modeRaw, blocks, pulled, + assets, }; } diff --git a/packages/cli/src/core/settings-merge.ts b/packages/cli/src/core/settings-merge.ts new file mode 100644 index 0000000..1d17dae --- /dev/null +++ b/packages/cli/src/core/settings-merge.ts @@ -0,0 +1,54 @@ +import { readFile, writeFile, mkdir } from 'node:fs/promises'; +import { join, dirname } from 'node:path'; +import { fileExists } from '../utils/fs.js'; + +export type JsonValue = string | number | boolean | null | JsonValue[] | { [key: string]: JsonValue }; + +export function deepMerge(target: Record<string, JsonValue>, source: Record<string, JsonValue>): Record<string, JsonValue> { + const result: Record<string, JsonValue> = { ...target }; + + for (const key of Object.keys(source)) { + const sourceVal = source[key]!; + const targetVal = result[key]; + + if (Array.isArray(sourceVal) && Array.isArray(targetVal)) { + result[key] = [...targetVal, ...sourceVal]; + } else if ( + sourceVal !== null && typeof sourceVal === 'object' && !Array.isArray(sourceVal) && + targetVal !== null && typeof targetVal === 'object' && !Array.isArray(targetVal) + ) { + result[key] = deepMerge( + targetVal as Record<string, JsonValue>, + sourceVal as Record<string, JsonValue>, + ); + } else { + result[key] = sourceVal; + } + } + + return result; +} + +export async function mergeSettingsFile( + cwd: string, + hookSettings: Record<string, JsonValue>, +): Promise<void> { + const settingsPath = join(cwd, '.claude', 'settings.local.json'); + + let existing: Record<string, JsonValue> = {}; + if (await fileExists(settingsPath)) { + try { + const raw = await readFile(settingsPath, 'utf-8'); + const parsed: unknown = JSON.parse(raw); + if (parsed && typeof parsed === 'object' && !Array.isArray(parsed)) { + existing = parsed as Record<string, JsonValue>; + } + } catch { + // Corrupted file — start fresh + } + } + + const merged = deepMerge(existing, hookSettings); + await mkdir(dirname(settingsPath), { recursive: true }); + await writeFile(settingsPath, JSON.stringify(merged, null, 2) + '\n', 'utf-8'); +} diff --git a/packages/cli/src/utils/github.ts b/packages/cli/src/utils/github.ts index fcf75cf..996f342 100644 --- a/packages/cli/src/utils/github.ts +++ b/packages/cli/src/utils/github.ts @@ -1,6 +1,7 @@ const BRANCH = 'main'; -const RAW_BASE = `https://raw.githubusercontent.com/gpolanco/dev-workflows/${BRANCH}/content/rules`; -const API_BASE = 'https://api.github.com/repos/gpolanco/dev-workflows/contents/content/rules'; +const REPO = 'gpolanco/dev-workflows'; +const RAW_BASE = `https://raw.githubusercontent.com/${REPO}/${BRANCH}/content`; +const API_BASE = `https://api.github.com/repos/${REPO}/contents/content`; export class GitHubError extends Error { constructor( @@ -14,7 +15,7 @@ export class GitHubError extends Error { function handleResponseError(status: number, path: string): never { if (status === 404) { - throw new GitHubError(`Rule not found: ${path}`, 404); + throw new GitHubError(`Content not found: ${path}`, 404); } if (status === 403) { throw new GitHubError( @@ -25,24 +26,28 @@ function handleResponseError(status: number, path: string): never { throw new GitHubError(`GitHub request failed (HTTP ${String(status)})`, status); } -export async function fetchRawContent(path: string): Promise<string> { - const url = `${RAW_BASE}/${path}.md`; +export async function fetchContent(contentPath: string): Promise<string> { + const url = `${RAW_BASE}/${contentPath}`; let response: Response; try { response = await fetch(url); } catch (err) { const msg = err instanceof Error ? err.message : String(err); - throw new GitHubError(`Network error fetching rule: ${msg}`, 0); + throw new GitHubError(`Network error fetching content: ${msg}`, 0); } if (!response.ok) { - handleResponseError(response.status, path); + handleResponseError(response.status, contentPath); } return response.text(); } +export async function fetchRawContent(path: string): Promise<string> { + return fetchContent(`rules/${path}.md`); +} + interface GitHubContentsEntry { name: string; type: string; @@ -53,11 +58,8 @@ export interface DirectoryEntry { type: 'file' | 'dir'; } -export async function listDirectory(path?: string): Promise<DirectoryEntry[]> { - const segments = [API_BASE]; - if (path) segments.push(path); - const base = segments.join('/'); - const url = `${base}?ref=${BRANCH}`; +export async function listContentDirectory(contentPath: string): Promise<DirectoryEntry[]> { + const url = `${API_BASE}/${contentPath}?ref=${BRANCH}`; const headers: Record<string, string> = { Accept: 'application/vnd.github.v3+json', @@ -77,7 +79,7 @@ export async function listDirectory(path?: string): Promise<DirectoryEntry[]> { } if (!response.ok) { - handleResponseError(response.status, path ?? 'rules'); + handleResponseError(response.status, contentPath); } const data = (await response.json()) as GitHubContentsEntry[]; @@ -85,7 +87,16 @@ export async function listDirectory(path?: string): Promise<DirectoryEntry[]> { return data .filter((entry) => entry.type === 'file' || entry.type === 'dir') .map((entry) => ({ - name: entry.name.replace(/\.md$/, ''), + name: entry.name.replace(/\.md$/, '').replace(/\.json$/, '').replace(/\.yml$/, ''), type: entry.type === 'dir' ? ('dir' as const) : ('file' as const), })); } + +export async function listDirectory(path?: string): Promise<DirectoryEntry[]> { + const contentPath = path ? `rules/${path}` : 'rules'; + const entries = await listContentDirectory(contentPath); + return entries.map((entry) => ({ + name: entry.name.replace(/\.md$/, ''), + type: entry.type, + })); +} diff --git a/packages/cli/tests/bridges/copilot.test.ts b/packages/cli/tests/bridges/copilot.test.ts index 0653a2b..79e8085 100644 --- a/packages/cli/tests/bridges/copilot.test.ts +++ b/packages/cli/tests/bridges/copilot.test.ts @@ -21,6 +21,7 @@ const CONFIG: ProjectConfig = { mode: 'copy', blocks: [], pulled: [], + assets: [], }; describe('copilotBridge', () => { diff --git a/packages/cli/tests/bridges/windsurf.test.ts b/packages/cli/tests/bridges/windsurf.test.ts index cf83a09..2c7d239 100644 --- a/packages/cli/tests/bridges/windsurf.test.ts +++ b/packages/cli/tests/bridges/windsurf.test.ts @@ -21,6 +21,7 @@ const CONFIG: ProjectConfig = { mode: 'copy', blocks: [], pulled: [], + assets: [], }; describe('windsurfBridge', () => { diff --git a/packages/cli/tests/commands/doctor.test.ts b/packages/cli/tests/commands/doctor.test.ts index 3ca858d..5fd373d 100644 --- a/packages/cli/tests/commands/doctor.test.ts +++ b/packages/cli/tests/commands/doctor.test.ts @@ -213,6 +213,7 @@ blocks: [] mode: 'copy', blocks: [], pulled: [], + assets: [], }; const result = checkBridgesAvailable(config); @@ -227,6 +228,7 @@ blocks: [] mode: 'copy', blocks: [], pulled: [], + assets: [], }; const result = checkBridgesAvailable(config); @@ -241,6 +243,7 @@ blocks: [] mode: 'copy', blocks: [], pulled: [], + assets: [], }; const result = checkBridgesAvailable(config); @@ -258,6 +261,7 @@ blocks: [] mode: 'copy', blocks: [], pulled: [], + assets: [], }; const result = await checkSymlinks(tmpDir, config); @@ -273,6 +277,7 @@ blocks: [] mode: 'link', blocks: [], pulled: [], + assets: [], }; // Create a target file and a symlink pointing to it @@ -294,6 +299,7 @@ blocks: [] mode: 'link', blocks: [], pulled: [], + assets: [], }; // Create a symlink pointing to a non-existent target diff --git a/packages/cli/tests/core/assets.test.ts b/packages/cli/tests/core/assets.test.ts new file mode 100644 index 0000000..d25ddd1 --- /dev/null +++ b/packages/cli/tests/core/assets.test.ts @@ -0,0 +1,239 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtemp, rm, mkdir, writeFile, readFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { + isAssetType, + parseAssetFrontmatter, + readAssets, + deployCommands, + deployTemplates, + deployHooks, + deployAssets, + removeAsset, +} from '../../src/core/assets.js'; +import type { ProjectConfig } from '../../src/bridges/types.js'; +import { fileExists } from '../../src/utils/fs.js'; + +const CONFIG: ProjectConfig = { + version: '0.1', + project: { name: 'test' }, + tools: ['claude'], + mode: 'copy', + blocks: [], + pulled: [], + assets: [], +}; + +let tmpDir: string; + +beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'assets-test-')); +}); + +afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); +}); + +describe('isAssetType', () => { + it('returns true for command', () => { + assert.equal(isAssetType('command'), true); + }); + + it('returns true for template', () => { + assert.equal(isAssetType('template'), true); + }); + + it('returns true for hook', () => { + assert.equal(isAssetType('hook'), true); + }); + + it('returns false for rule', () => { + assert.equal(isAssetType('rule'), false); + }); + + it('returns false for arbitrary string', () => { + assert.equal(isAssetType('foobar'), false); + }); + + it('returns false for preset', () => { + assert.equal(isAssetType('preset'), false); + }); +}); + +describe('parseAssetFrontmatter', () => { + it('parses frontmatter from markdown', () => { + const content = `--- +name: spec +description: Generate a spec +version: "0.2.0" +tool: claude +--- +Body content here`; + + const { frontmatter, body } = parseAssetFrontmatter(content); + assert.equal(frontmatter.name, 'spec'); + assert.equal(frontmatter.description, 'Generate a spec'); + assert.equal(frontmatter.version, '0.2.0'); + assert.equal(frontmatter.tool, 'claude'); + assert.ok(body.includes('Body content here')); + }); + + it('returns defaults when no frontmatter', () => { + const content = 'Just body content'; + const { frontmatter, body } = parseAssetFrontmatter(content); + assert.equal(frontmatter.name, ''); + assert.equal(frontmatter.version, '0.1.0'); + assert.equal(body, 'Just body content'); + }); + + it('handles output_path', () => { + const content = `--- +name: feature-spec +description: Feature spec template +output_path: docs/specs +--- +Template body`; + + const { frontmatter } = parseAssetFrontmatter(content); + assert.equal(frontmatter.output_path, 'docs/specs'); + }); +}); + +describe('readAssets', () => { + it('returns empty array when no assets directory', async () => { + const assets = await readAssets(tmpDir); + assert.deepEqual(assets, []); + }); + + it('reads assets from all type directories', async () => { + const commandsDir = join(tmpDir, '.dwf', 'assets', 'commands'); + const hooksDir = join(tmpDir, '.dwf', 'assets', 'hooks'); + await mkdir(commandsDir, { recursive: true }); + await mkdir(hooksDir, { recursive: true }); + + await writeFile(join(commandsDir, 'spec.md'), '---\nname: spec\n---\nBody'); + await writeFile(join(hooksDir, 'auto-format.json'), '{"name":"auto-format"}'); + + const assets = await readAssets(tmpDir); + assert.equal(assets.length, 2); + + const command = assets.find((a) => a.type === 'command'); + assert.ok(command); + assert.equal(command.name, 'spec'); + + const hook = assets.find((a) => a.type === 'hook'); + assert.ok(hook); + assert.equal(hook.name, 'auto-format'); + }); +}); + +describe('deployCommands', () => { + it('deploys commands stripping frontmatter', async () => { + const commandsDir = join(tmpDir, '.dwf', 'assets', 'commands'); + await mkdir(commandsDir, { recursive: true }); + + await writeFile( + join(commandsDir, 'spec.md'), + '---\nname: spec\ndescription: Generate a spec\nversion: "0.1.0"\ntool: claude\n---\nYou are a spec generator.\nDo great things.', + ); + + const result = await deployCommands(tmpDir, CONFIG); + assert.equal(result.deployed.length, 1); + assert.equal(result.deployed[0], '.claude/commands/spec.md'); + + const output = await readFile(join(tmpDir, '.claude', 'commands', 'spec.md'), 'utf-8'); + assert.ok(!output.includes('---')); + assert.ok(output.includes('You are a spec generator.')); + }); + + it('returns empty when no commands dir', async () => { + const result = await deployCommands(tmpDir, CONFIG); + assert.deepEqual(result.deployed, []); + }); +}); + +describe('deployTemplates', () => { + it('deploys templates to output_path', async () => { + const templatesDir = join(tmpDir, '.dwf', 'assets', 'templates'); + await mkdir(templatesDir, { recursive: true }); + + await writeFile( + join(templatesDir, 'feature-spec.md'), + '---\nname: feature-spec\ndescription: Template\noutput_path: docs/specs\n---\n# Feature Spec\n\n## Summary', + ); + + const result = await deployTemplates(tmpDir, CONFIG); + assert.equal(result.deployed.length, 1); + assert.equal(result.deployed[0], 'docs/specs/feature-spec.md'); + + const output = await readFile(join(tmpDir, 'docs', 'specs', 'feature-spec.md'), 'utf-8'); + assert.ok(output.includes('# Feature Spec')); + assert.ok(!output.includes('---')); + }); +}); + +describe('deployHooks', () => { + it('merges hook settings into settings.local.json', async () => { + const hooksDir = join(tmpDir, '.dwf', 'assets', 'hooks'); + await mkdir(hooksDir, { recursive: true }); + + const hookContent = JSON.stringify({ + name: 'auto-format', + version: '0.1.0', + settings: { + hooks: { + PostToolUse: [{ matcher: 'Write|Edit', command: 'pnpm format || true' }], + }, + }, + }); + await writeFile(join(hooksDir, 'auto-format.json'), hookContent); + + const result = await deployHooks(tmpDir, CONFIG); + assert.equal(result.deployed.length, 1); + + const settings = JSON.parse( + await readFile(join(tmpDir, '.claude', 'settings.local.json'), 'utf-8'), + ); + assert.ok(Array.isArray(settings.hooks.PostToolUse)); + assert.equal(settings.hooks.PostToolUse[0].matcher, 'Write|Edit'); + }); +}); + +describe('deployAssets', () => { + it('deploys all asset types', async () => { + const commandsDir = join(tmpDir, '.dwf', 'assets', 'commands'); + await mkdir(commandsDir, { recursive: true }); + await writeFile(join(commandsDir, 'spec.md'), '---\nname: spec\n---\nBody'); + + const result = await deployAssets(tmpDir, CONFIG); + assert.ok(result.deployed.length > 0); + }); +}); + +describe('removeAsset', () => { + it('removes an existing asset file', async () => { + const commandsDir = join(tmpDir, '.dwf', 'assets', 'commands'); + await mkdir(commandsDir, { recursive: true }); + await writeFile(join(commandsDir, 'spec.md'), 'content'); + + const removed = await removeAsset(tmpDir, 'command', 'spec'); + assert.equal(removed, true); + assert.equal(await fileExists(join(commandsDir, 'spec.md')), false); + }); + + it('returns false for non-existent asset', async () => { + const removed = await removeAsset(tmpDir, 'command', 'nonexistent'); + assert.equal(removed, false); + }); + + it('uses .json extension for hooks', async () => { + const hooksDir = join(tmpDir, '.dwf', 'assets', 'hooks'); + await mkdir(hooksDir, { recursive: true }); + await writeFile(join(hooksDir, 'auto-format.json'), '{}'); + + const removed = await removeAsset(tmpDir, 'hook', 'auto-format'); + assert.equal(removed, true); + }); +}); diff --git a/packages/cli/tests/core/settings-merge.test.ts b/packages/cli/tests/core/settings-merge.test.ts new file mode 100644 index 0000000..5109983 --- /dev/null +++ b/packages/cli/tests/core/settings-merge.test.ts @@ -0,0 +1,103 @@ +import { describe, it, beforeEach, afterEach } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtemp, rm, mkdir, writeFile, readFile } from 'node:fs/promises'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { deepMerge, mergeSettingsFile } from '../../src/core/settings-merge.js'; + +let tmpDir: string; + +beforeEach(async () => { + tmpDir = await mkdtemp(join(tmpdir(), 'settings-merge-test-')); +}); + +afterEach(async () => { + await rm(tmpDir, { recursive: true, force: true }); +}); + +describe('deepMerge', () => { + it('merges flat objects', () => { + const result = deepMerge({ a: 1, b: 2 }, { b: 3, c: 4 }); + assert.deepEqual(result, { a: 1, b: 3, c: 4 }); + }); + + it('concatenates arrays', () => { + const result = deepMerge({ items: [1, 2] }, { items: [3, 4] }); + assert.deepEqual(result, { items: [1, 2, 3, 4] }); + }); + + it('recursively merges nested objects', () => { + const target = { hooks: { PreToolUse: [{ matcher: 'Read' }] } }; + const source = { hooks: { PostToolUse: [{ matcher: 'Write' }] } }; + const result = deepMerge(target, source); + assert.deepEqual(result, { + hooks: { + PreToolUse: [{ matcher: 'Read' }], + PostToolUse: [{ matcher: 'Write' }], + }, + }); + }); + + it('overwrites primitives', () => { + const result = deepMerge({ name: 'old' }, { name: 'new' }); + assert.equal(result.name, 'new'); + }); + + it('handles source overwriting array with non-array', () => { + const result = deepMerge({ items: [1, 2] }, { items: 'replaced' }); + assert.equal(result.items, 'replaced'); + }); + + it('preserves keys not in source', () => { + const result = deepMerge({ a: 1, b: 2 }, { c: 3 }); + assert.equal(result.a, 1); + assert.equal(result.b, 2); + assert.equal(result.c, 3); + }); +}); + +describe('mergeSettingsFile', () => { + it('creates settings file when none exists', async () => { + await mergeSettingsFile(tmpDir, { + hooks: { PostToolUse: [{ matcher: 'Write', command: 'fmt' }] }, + }); + + const content = JSON.parse( + await readFile(join(tmpDir, '.claude', 'settings.local.json'), 'utf-8'), + ); + assert.ok(Array.isArray(content.hooks.PostToolUse)); + }); + + it('merges with existing settings', async () => { + const settingsDir = join(tmpDir, '.claude'); + await mkdir(settingsDir, { recursive: true }); + await writeFile( + join(settingsDir, 'settings.local.json'), + JSON.stringify({ existing: true, hooks: { PreToolUse: [{ matcher: 'Read' }] } }), + ); + + await mergeSettingsFile(tmpDir, { + hooks: { PostToolUse: [{ matcher: 'Write', command: 'fmt' }] }, + }); + + const content = JSON.parse( + await readFile(join(settingsDir, 'settings.local.json'), 'utf-8'), + ); + assert.equal(content.existing, true); + assert.ok(Array.isArray(content.hooks.PreToolUse)); + assert.ok(Array.isArray(content.hooks.PostToolUse)); + }); + + it('handles corrupted existing file', async () => { + const settingsDir = join(tmpDir, '.claude'); + await mkdir(settingsDir, { recursive: true }); + await writeFile(join(settingsDir, 'settings.local.json'), 'not json{{{'); + + await mergeSettingsFile(tmpDir, { key: 'value' }); + + const content = JSON.parse( + await readFile(join(settingsDir, 'settings.local.json'), 'utf-8'), + ); + assert.equal(content.key, 'value'); + }); +});