diff --git a/.gitignore b/.gitignore index 8e799f8..17635e2 100644 --- a/.gitignore +++ b/.gitignore @@ -52,4 +52,5 @@ package-lock.json /dist #test output -/absolutejs-project \ No newline at end of file +/absolutejs-project +/.test-dependency-cache \ No newline at end of file diff --git a/README.md b/README.md deleted file mode 100644 index 5b91576..0000000 --- a/README.md +++ /dev/null @@ -1,179 +0,0 @@ -# create-absolutejs - -A CLI tool to scaffold new AbsoluteJS projects quickly and effortlessly. - -## Usage - -Scaffold a new project called ``: - -```bash -bun create absolutejs my-app -``` - -Alternatively, using npm, Yarn, or pnpm: - -```bash -npm create absolutejs my-app -yarn create absolutejs my-app -pnpm create absolutejs my-app -``` - -By default, the CLI will interactively prompt you for any missing configuration values. You can also supply flags to skip those prompts: - -- To skip **all** optional prompts and use `none` for every optional configuration: - ```bash - bun create absolutejs my-app --skip - ``` -- To skip **one** optional prompt without providing a real value, pass `none` to that flag: - ```bash - bun create absolutejs my-app --auth none --engine none - ``` - -## Options - -```text -Usage: create-absolute [project-name] [options] -``` - -### Arguments - -- `project-name` - Name of the application to create. If omitted, you'll be prompted to enter one. - -### Options - -- `--help`, `-h` - Show this help message and exit. - -- `--debug`, `-d` - Display a summary of the project configuration after creation. - -- `--angular` - Include an Angular frontend. - -- `--angular-dir ` - Specify the directory for and use the Angular frontend. - -- `--assets ` - Directory name for your static assets. - -- `--auth ` - Pre-configured auth plugin (currently only `absolute-auth`) or `none`. - -- `--biome` - Use Biome for code quality and formatting. - -- `--build ` - Output directory for build artifacts. - -- `--db ` - Database engine (`postgresql` | `mysql` | `sqlite` | `mongodb` | `redis` | `singlestore` | `cockroachdb` | `mssql`) or `none`. - -- `--db-dir ` - Directory name for your database files. - -- `--db-host ` - Database host provider (`neon` | `planetscale` | `supabase` | `turso` | `vercel` | `upstash` | `atlas`) or `none`. - -- `--directory ` - Directory-naming strategy: `default` or `custom`. - -- `--eslint+prettier` - Use ESLint + Prettier for code quality and formatting. - -- `--git` - Initialize a Git repository. - -- `--html` - Include a plain HTML frontend. - -- `--html-dir ` - Specify the directory for and use the HTML frontend. - -- `--html-scripts` - Enable HTML scripting with TypeScript. - -- `--htmx` - Include an HTMX frontend. - -- `--htmx-dir ` - Specify the directory for and use the HTMX frontend. - -- `--install` - Use the same package manager to install dependencies. - -- `--lts` - Use LTS versions of required packages. - -- `--orm ` - ORM to configure: `drizzle` | `prisma` | `none`. - -- `--plugin ` - Elysia plugin(s) to include (repeatable); `none` skips plugin setup. - -- `--react` - Include a React frontend. - -- `--react-dir ` - Specify the directory for and use the React frontend. - -- `--skip` - Skip non-required prompts; uses `none` for all optional configs. - -- `--svelte` - Include a Svelte frontend. - -- `--svelte-dir ` - Specify the directory for and use the Svelte frontend. - -- `--tailwind` - Include Tailwind CSS setup. - -- `--tailwind-input ` - Path to your Tailwind CSS entry file. - -- `--tailwind-output ` - Path for the generated Tailwind CSS bundle. - -- `--vue` - Include a Vue frontend. - -- `--vue-dir ` - Specify the directory for and use the Vue frontend. - -## Directory Configuration - -Choose between the **default** layout (pre-configured folder names) or **custom**, which prompts you to specify each directory name yourself: - -```bash -bun create absolutejs my-app --directory custom -``` - -## Debug & LTS Flags - -- `--debug`, `-d` - After scaffolding, prints a detailed summary of your configuration (language, frontends, directories, etc.). -- `--lts` - Instructs the CLI to fetch and pin the latest published versions of your dependencies instead of its default pinned versions. - -## Getting Started - -Once the scaffold completes, you’re ready to go: - -```bash -cd my-app -# (If you skipped automated install) -bun install -# Then start the dev server -bun run dev -``` - -If you downloaded this repository to test or make changes you can use `bun run test` to start the created dev server without having to change directories back and forth. - -## Contributing - -Contributions are welcome! Feel free to open issues or submit pull requests to improve the CLI. - -## License - -Licensed under CC BY-NC 4.0. diff --git a/bun.lock b/bun.lock index 5d48136..be8972f 100644 --- a/bun.lock +++ b/bun.lock @@ -36,11 +36,11 @@ "@babel/helper-string-parser": ["@babel/helper-string-parser@7.27.1", "", {}, "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA=="], - "@babel/helper-validator-identifier": ["@babel/helper-validator-identifier@7.27.1", "", {}, "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow=="], + "@babel/helper-validator-identifier": ["@babel/helper-validator-identifier@7.28.5", "", {}, "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q=="], - "@babel/parser": ["@babel/parser@7.28.0", "", { "dependencies": { "@babel/types": "^7.28.0" }, "bin": "./bin/babel-parser.js" }, "sha512-jVZGvOxOuNSsuQuLRTh13nU0AogFlw32w/MT+LV6D3sP5WdbW61E77RnkbaO2dUvmPAYrBDJXGn5gGS6tH4j8g=="], + "@babel/parser": ["@babel/parser@7.28.5", "", { "dependencies": { "@babel/types": "^7.28.5" }, "bin": "./bin/babel-parser.js" }, "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ=="], - "@babel/types": ["@babel/types@7.28.0", "", { "dependencies": { "@babel/helper-string-parser": "^7.27.1", "@babel/helper-validator-identifier": "^7.27.1" } }, "sha512-jYnje+JyZG5YThjHiF28oT4SIZLnYOcSBb6+SDaFIyzDVSkXQmQQYclJ2R+YxcdmK0AX6x1E5OQNtuh3jHDrUg=="], + "@babel/types": ["@babel/types@7.28.5", "", { "dependencies": { "@babel/helper-string-parser": "^7.27.1", "@babel/helper-validator-identifier": "^7.28.5" } }, "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA=="], "@clack/core": ["@clack/core@0.5.0", "", { "dependencies": { "picocolors": "^1.0.0", "sisteransi": "^1.0.5" } }, "sha512-p3y0FIOwaYRUPRcMO7+dlmLh8PSRcrjuTndsiA0WAFbWES0mLZlrjVoBRZ9DzkPFJZG6KGkJmoEAY0ZcVWTkow=="], @@ -52,61 +52,61 @@ "@esbuild-kit/esm-loader": ["@esbuild-kit/esm-loader@2.6.5", "", { "dependencies": { "@esbuild-kit/core-utils": "^3.3.2", "get-tsconfig": "^4.7.0" } }, "sha512-FxEMIkJKnodyA1OaCUoEvbYRkoZlLZ4d/eXFu9Fh8CbBBgP5EmZxrfTRyN0qpXZ4vOvqnE5YdRdcrmUUXuU+dA=="], - "@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.6", "", { "os": "aix", "cpu": "ppc64" }, "sha512-ShbM/3XxwuxjFiuVBHA+d3j5dyac0aEVVq1oluIDf71hUw0aRF59dV/efUsIwFnR6m8JNM2FjZOzmaZ8yG61kw=="], + "@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.12", "", { "os": "aix", "cpu": "ppc64" }, "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA=="], - "@esbuild/android-arm": ["@esbuild/android-arm@0.25.6", "", { "os": "android", "cpu": "arm" }, "sha512-S8ToEOVfg++AU/bHwdksHNnyLyVM+eMVAOf6yRKFitnwnbwwPNqKr3srzFRe7nzV69RQKb5DgchIX5pt3L53xg=="], + "@esbuild/android-arm": ["@esbuild/android-arm@0.25.12", "", { "os": "android", "cpu": "arm" }, "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg=="], - "@esbuild/android-arm64": ["@esbuild/android-arm64@0.25.6", "", { "os": "android", "cpu": "arm64" }, "sha512-hd5zdUarsK6strW+3Wxi5qWws+rJhCCbMiC9QZyzoxfk5uHRIE8T287giQxzVpEvCwuJ9Qjg6bEjcRJcgfLqoA=="], + "@esbuild/android-arm64": ["@esbuild/android-arm64@0.25.12", "", { "os": "android", "cpu": "arm64" }, "sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg=="], - "@esbuild/android-x64": ["@esbuild/android-x64@0.25.6", "", { "os": "android", "cpu": "x64" }, "sha512-0Z7KpHSr3VBIO9A/1wcT3NTy7EB4oNC4upJ5ye3R7taCc2GUdeynSLArnon5G8scPwaU866d3H4BCrE5xLW25A=="], + "@esbuild/android-x64": ["@esbuild/android-x64@0.25.12", "", { "os": "android", "cpu": "x64" }, "sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg=="], - "@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.25.6", "", { "os": "darwin", "cpu": "arm64" }, "sha512-FFCssz3XBavjxcFxKsGy2DYK5VSvJqa6y5HXljKzhRZ87LvEi13brPrf/wdyl/BbpbMKJNOr1Sd0jtW4Ge1pAA=="], + "@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.25.12", "", { "os": "darwin", "cpu": "arm64" }, "sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg=="], - "@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.25.6", "", { "os": "darwin", "cpu": "x64" }, "sha512-GfXs5kry/TkGM2vKqK2oyiLFygJRqKVhawu3+DOCk7OxLy/6jYkWXhlHwOoTb0WqGnWGAS7sooxbZowy+pK9Yg=="], + "@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.25.12", "", { "os": "darwin", "cpu": "x64" }, "sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA=="], - "@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.25.6", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-aoLF2c3OvDn2XDTRvn8hN6DRzVVpDlj2B/F66clWd/FHLiHaG3aVZjxQX2DYphA5y/evbdGvC6Us13tvyt4pWg=="], + "@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.25.12", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg=="], - "@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.25.6", "", { "os": "freebsd", "cpu": "x64" }, "sha512-2SkqTjTSo2dYi/jzFbU9Plt1vk0+nNg8YC8rOXXea+iA3hfNJWebKYPs3xnOUf9+ZWhKAaxnQNUf2X9LOpeiMQ=="], + "@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.25.12", "", { "os": "freebsd", "cpu": "x64" }, "sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ=="], - "@esbuild/linux-arm": ["@esbuild/linux-arm@0.25.6", "", { "os": "linux", "cpu": "arm" }, "sha512-SZHQlzvqv4Du5PrKE2faN0qlbsaW/3QQfUUc6yO2EjFcA83xnwm91UbEEVx4ApZ9Z5oG8Bxz4qPE+HFwtVcfyw=="], + "@esbuild/linux-arm": ["@esbuild/linux-arm@0.25.12", "", { "os": "linux", "cpu": "arm" }, "sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw=="], - "@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.25.6", "", { "os": "linux", "cpu": "arm64" }, "sha512-b967hU0gqKd9Drsh/UuAm21Khpoh6mPBSgz8mKRq4P5mVK8bpA+hQzmm/ZwGVULSNBzKdZPQBRT3+WuVavcWsQ=="], + "@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.25.12", "", { "os": "linux", "cpu": "arm64" }, "sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ=="], - "@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.25.6", "", { "os": "linux", "cpu": "ia32" }, "sha512-aHWdQ2AAltRkLPOsKdi3xv0mZ8fUGPdlKEjIEhxCPm5yKEThcUjHpWB1idN74lfXGnZ5SULQSgtr5Qos5B0bPw=="], + "@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.25.12", "", { "os": "linux", "cpu": "ia32" }, "sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA=="], - "@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.25.6", "", { "os": "linux", "cpu": "none" }, "sha512-VgKCsHdXRSQ7E1+QXGdRPlQ/e08bN6WMQb27/TMfV+vPjjTImuT9PmLXupRlC90S1JeNNW5lzkAEO/McKeJ2yg=="], + "@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.25.12", "", { "os": "linux", "cpu": "none" }, "sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng=="], - "@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.25.6", "", { "os": "linux", "cpu": "none" }, "sha512-WViNlpivRKT9/py3kCmkHnn44GkGXVdXfdc4drNmRl15zVQ2+D2uFwdlGh6IuK5AAnGTo2qPB1Djppj+t78rzw=="], + "@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.25.12", "", { "os": "linux", "cpu": "none" }, "sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw=="], - "@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.25.6", "", { "os": "linux", "cpu": "ppc64" }, "sha512-wyYKZ9NTdmAMb5730I38lBqVu6cKl4ZfYXIs31Baf8aoOtB4xSGi3THmDYt4BTFHk7/EcVixkOV2uZfwU3Q2Jw=="], + "@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.25.12", "", { "os": "linux", "cpu": "ppc64" }, "sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA=="], - "@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.25.6", "", { "os": "linux", "cpu": "none" }, "sha512-KZh7bAGGcrinEj4qzilJ4hqTY3Dg2U82c8bv+e1xqNqZCrCyc+TL9AUEn5WGKDzm3CfC5RODE/qc96OcbIe33w=="], + "@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.25.12", "", { "os": "linux", "cpu": "none" }, "sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w=="], - "@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.25.6", "", { "os": "linux", "cpu": "s390x" }, "sha512-9N1LsTwAuE9oj6lHMyyAM+ucxGiVnEqUdp4v7IaMmrwb06ZTEVCIs3oPPplVsnjPfyjmxwHxHMF8b6vzUVAUGw=="], + "@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.25.12", "", { "os": "linux", "cpu": "s390x" }, "sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg=="], - "@esbuild/linux-x64": ["@esbuild/linux-x64@0.25.6", "", { "os": "linux", "cpu": "x64" }, "sha512-A6bJB41b4lKFWRKNrWoP2LHsjVzNiaurf7wyj/XtFNTsnPuxwEBWHLty+ZE0dWBKuSK1fvKgrKaNjBS7qbFKig=="], + "@esbuild/linux-x64": ["@esbuild/linux-x64@0.25.12", "", { "os": "linux", "cpu": "x64" }, "sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw=="], - "@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.25.6", "", { "os": "none", "cpu": "arm64" }, "sha512-IjA+DcwoVpjEvyxZddDqBY+uJ2Snc6duLpjmkXm/v4xuS3H+3FkLZlDm9ZsAbF9rsfP3zeA0/ArNDORZgrxR/Q=="], + "@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.25.12", "", { "os": "none", "cpu": "arm64" }, "sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg=="], - "@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.25.6", "", { "os": "none", "cpu": "x64" }, "sha512-dUXuZr5WenIDlMHdMkvDc1FAu4xdWixTCRgP7RQLBOkkGgwuuzaGSYcOpW4jFxzpzL1ejb8yF620UxAqnBrR9g=="], + "@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.25.12", "", { "os": "none", "cpu": "x64" }, "sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ=="], - "@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.25.6", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-l8ZCvXP0tbTJ3iaqdNf3pjaOSd5ex/e6/omLIQCVBLmHTlfXW3zAxQ4fnDmPLOB1x9xrcSi/xtCWFwCZRIaEwg=="], + "@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.25.12", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A=="], - "@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.25.6", "", { "os": "openbsd", "cpu": "x64" }, "sha512-hKrmDa0aOFOr71KQ/19JC7az1P0GWtCN1t2ahYAf4O007DHZt/dW8ym5+CUdJhQ/qkZmI1HAF8KkJbEFtCL7gw=="], + "@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.25.12", "", { "os": "openbsd", "cpu": "x64" }, "sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw=="], - "@esbuild/openharmony-arm64": ["@esbuild/openharmony-arm64@0.25.6", "", { "os": "none", "cpu": "arm64" }, "sha512-+SqBcAWoB1fYKmpWoQP4pGtx+pUUC//RNYhFdbcSA16617cchuryuhOCRpPsjCblKukAckWsV+aQ3UKT/RMPcA=="], + "@esbuild/openharmony-arm64": ["@esbuild/openharmony-arm64@0.25.12", "", { "os": "none", "cpu": "arm64" }, "sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg=="], - "@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.25.6", "", { "os": "sunos", "cpu": "x64" }, "sha512-dyCGxv1/Br7MiSC42qinGL8KkG4kX0pEsdb0+TKhmJZgCUDBGmyo1/ArCjNGiOLiIAgdbWgmWgib4HoCi5t7kA=="], + "@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.25.12", "", { "os": "sunos", "cpu": "x64" }, "sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w=="], - "@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.25.6", "", { "os": "win32", "cpu": "arm64" }, "sha512-42QOgcZeZOvXfsCBJF5Afw73t4veOId//XD3i+/9gSkhSV6Gk3VPlWncctI+JcOyERv85FUo7RxuxGy+z8A43Q=="], + "@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.25.12", "", { "os": "win32", "cpu": "arm64" }, "sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg=="], - "@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.25.6", "", { "os": "win32", "cpu": "ia32" }, "sha512-4AWhgXmDuYN7rJI6ORB+uU9DHLq/erBbuMoAuB4VWJTu5KtCgcKYPynF0YI1VkBNuEfjNlLrFr9KZPJzrtLkrQ=="], + "@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.25.12", "", { "os": "win32", "cpu": "ia32" }, "sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ=="], - "@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.6", "", { "os": "win32", "cpu": "x64" }, "sha512-NgJPHHbEpLQgDH2MjQu90pzW/5vvXIZ7KOnPyNBm92A6WgZ/7b6fJyUBjoumLqeOQQGqY2QjQxRo97ah4Sj0cA=="], + "@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.12", "", { "os": "win32", "cpu": "x64" }, "sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA=="], - "@eslint-community/eslint-utils": ["@eslint-community/eslint-utils@4.7.0", "", { "dependencies": { "eslint-visitor-keys": "^3.4.3" }, "peerDependencies": { "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" } }, "sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw=="], + "@eslint-community/eslint-utils": ["@eslint-community/eslint-utils@4.9.0", "", { "dependencies": { "eslint-visitor-keys": "^3.4.3" }, "peerDependencies": { "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" } }, "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g=="], - "@eslint-community/regexpp": ["@eslint-community/regexpp@4.12.1", "", {}, "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ=="], + "@eslint-community/regexpp": ["@eslint-community/regexpp@4.12.2", "", {}, "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew=="], "@eslint/config-array": ["@eslint/config-array@0.20.1", "", { "dependencies": { "@eslint/object-schema": "^2.1.6", "debug": "^4.3.1", "minimatch": "^3.1.2" } }, "sha512-OL0RJzC/CBzli0DrrR31qzj6d6i6Mm3HByuhflhl4LOBiWxN+3i6/t/ZQQNii4tjksXi8r2CRW1wMpWA2ULUEw=="], @@ -118,25 +118,25 @@ "@eslint/js": ["@eslint/js@9.27.0", "", {}, "sha512-G5JD9Tu5HJEu4z2Uo4aHY2sLV64B7CDMXxFzqzjl3NKd6RVzSXNoE80jk7Y0lJkTTkjiIhBAqmlYwjuBY3tvpA=="], - "@eslint/object-schema": ["@eslint/object-schema@2.1.6", "", {}, "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA=="], + "@eslint/object-schema": ["@eslint/object-schema@2.1.7", "", {}, "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA=="], - "@eslint/plugin-kit": ["@eslint/plugin-kit@0.3.3", "", { "dependencies": { "@eslint/core": "^0.15.1", "levn": "^0.4.1" } }, "sha512-1+WqvgNMhmlAambTvT3KPtCl/Ibr68VldY2XY40SL1CE0ZXiakFR/cbTspaF5HsnpDMvcYYoJHfl4980NBjGag=="], + "@eslint/plugin-kit": ["@eslint/plugin-kit@0.3.5", "", { "dependencies": { "@eslint/core": "^0.15.2", "levn": "^0.4.1" } }, "sha512-Z5kJ+wU3oA7MMIqVR9tyZRtjYPr4OC004Q4Rw7pgOKUOKkJfZ3O24nz3WYfGRpMDNmcOi3TwQOmgm7B7Tpii0w=="], "@humanfs/core": ["@humanfs/core@0.19.1", "", {}, "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA=="], - "@humanfs/node": ["@humanfs/node@0.16.6", "", { "dependencies": { "@humanfs/core": "^0.19.1", "@humanwhocodes/retry": "^0.3.0" } }, "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw=="], + "@humanfs/node": ["@humanfs/node@0.16.7", "", { "dependencies": { "@humanfs/core": "^0.19.1", "@humanwhocodes/retry": "^0.4.0" } }, "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ=="], "@humanwhocodes/module-importer": ["@humanwhocodes/module-importer@1.0.1", "", {}, "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA=="], "@humanwhocodes/retry": ["@humanwhocodes/retry@0.4.3", "", {}, "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ=="], - "@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.12", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-OuLGC46TjB5BbN1dH8JULVVZY4WTdkF7tV9Ys6wLL1rubZnCMstOhNHueU5bLCrnRuDhKPDM4g6sw4Bel5Gzqg=="], + "@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.13", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA=="], "@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="], - "@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.4", "", {}, "sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw=="], + "@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.5", "", {}, "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og=="], - "@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.29", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-uw6guiW/gcAGPDhLmd77/6lW8QLeiV5RUTsAX46Db6oLhGaVj4lhnPwb184s1bkc8kdVg/+h988dro8GRDpmYQ=="], + "@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.31", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw=="], "@nodelib/fs.scandir": ["@nodelib/fs.scandir@2.1.5", "", { "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="], @@ -148,9 +148,9 @@ "@stylistic/eslint-plugin-ts": ["@stylistic/eslint-plugin-ts@4.2.0", "", { "dependencies": { "@typescript-eslint/utils": "^8.23.0", "eslint-visitor-keys": "^4.2.0", "espree": "^10.3.0" }, "peerDependencies": { "eslint": ">=9.0.0" } }, "sha512-j2o2GvOx9v66x8hmp/HJ+0T+nOppiO5ycGsCkifh7JPGgjxEhpkGmIGx3RWsoxpWbad3VCX8e8/T8n3+7ze1Zg=="], - "@sveltejs/acorn-typescript": ["@sveltejs/acorn-typescript@1.0.5", "", { "peerDependencies": { "acorn": "^8.9.0" } }, "sha512-IwQk4yfwLdibDlrXVE04jTZYlLnwsTT2PIOQQGNLWfjavGifnk1JD1LcZjZaBTRcxZu2FfPfNLOE04DSu9lqtQ=="], + "@sveltejs/acorn-typescript": ["@sveltejs/acorn-typescript@1.0.6", "", { "peerDependencies": { "acorn": "^8.9.0" } }, "sha512-4awhxtMh4cx9blePWl10HRHj8Iivtqj+2QdDCSMDzxG+XKa9+VCNupQuCuvzEhYPzZSrX+0gC+0lHA/0fFKKQQ=="], - "@types/bun": ["@types/bun@1.2.18", "", { "dependencies": { "bun-types": "1.2.18" } }, "sha512-Xf6RaWVheyemaThV0kUfaAUvCNokFr+bH8Jxp+tTZfx7dAPA8z9ePnP9S9+Vspzuxxx9JRAXhnyccRj3GyCMdQ=="], + "@types/bun": ["@types/bun@1.3.2", "", { "dependencies": { "bun-types": "1.3.2" } }, "sha512-t15P7k5UIgHKkxwnMNkJbWlh/617rkDGEdSsDbu+qNHTaz9SKf7aC8fiIlUdD5RPpH6GEkP0cK7WlvmrEBRtWg=="], "@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="], @@ -158,7 +158,7 @@ "@types/json5": ["@types/json5@0.0.29", "", {}, "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ=="], - "@types/node": ["@types/node@24.0.13", "", { "dependencies": { "undici-types": "~7.8.0" } }, "sha512-Qm9OYVOFHFYg3wJoTSrz80hoec5Lia/dPp84do3X7dZvLikQvM1YpmvTBEdIr/e+U8HTkFjLHLnl78K/qjf+jQ=="], + "@types/node": ["@types/node@24.10.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ=="], "@types/react": ["@types/react@19.1.4", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-EB1yiiYdvySuIITtD5lhW4yPyJ31RkJkkDw794LaQYrxCSaQV/47y5o1FMC4zF9ZyjUjzJMZwbovEnT5yHTW6g=="], @@ -166,19 +166,15 @@ "@typescript-eslint/parser": ["@typescript-eslint/parser@8.32.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.32.0", "@typescript-eslint/types": "8.32.0", "@typescript-eslint/typescript-estree": "8.32.0", "@typescript-eslint/visitor-keys": "8.32.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-B2MdzyWxCE2+SqiZHAjPphft+/2x2FlO9YBx7eKE1BCb+rqBlQdhtAEhzIEdozHd55DXPmxBdpMygFJjfjjA9A=="], - "@typescript-eslint/project-service": ["@typescript-eslint/project-service@8.36.0", "", { "dependencies": { "@typescript-eslint/tsconfig-utils": "^8.36.0", "@typescript-eslint/types": "^8.36.0", "debug": "^4.3.4" }, "peerDependencies": { "typescript": ">=4.8.4 <5.9.0" } }, "sha512-JAhQFIABkWccQYeLMrHadu/fhpzmSQ1F1KXkpzqiVxA/iYI6UnRt2trqXHt1sYEcw1mxLnB9rKMsOxXPxowN/g=="], - - "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.36.0", "", { "dependencies": { "@typescript-eslint/types": "8.36.0", "@typescript-eslint/visitor-keys": "8.36.0" } }, "sha512-wCnapIKnDkN62fYtTGv2+RY8FlnBYA3tNm0fm91kc2BjPhV2vIjwwozJ7LToaLAyb1ca8BxrS7vT+Pvvf7RvqA=="], - - "@typescript-eslint/tsconfig-utils": ["@typescript-eslint/tsconfig-utils@8.36.0", "", { "peerDependencies": { "typescript": ">=4.8.4 <5.9.0" } }, "sha512-Nhh3TIEgN18mNbdXpd5Q8mSCBnrZQeY9V7Ca3dqYvNDStNIGRmJA6dmrIPMJ0kow3C7gcQbpsG2rPzy1Ks/AnA=="], + "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.32.0", "", { "dependencies": { "@typescript-eslint/types": "8.32.0", "@typescript-eslint/visitor-keys": "8.32.0" } }, "sha512-jc/4IxGNedXkmG4mx4nJTILb6TMjL66D41vyeaPWvDUmeYQzF3lKtN15WsAeTr65ce4mPxwopPSo1yUUAWw0hQ=="], "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.32.0", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.32.0", "@typescript-eslint/utils": "8.32.0", "debug": "^4.3.4", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-t2vouuYQKEKSLtJaa5bB4jHeha2HJczQ6E5IXPDPgIty9EqcJxpr1QHQ86YyIPwDwxvUmLfP2YADQ5ZY4qddZg=="], - "@typescript-eslint/types": ["@typescript-eslint/types@8.36.0", "", {}, "sha512-xGms6l5cTJKQPZOKM75Dl9yBfNdGeLRsIyufewnxT4vZTrjC0ImQT4fj8QmtJK84F58uSh5HVBSANwcfiXxABQ=="], + "@typescript-eslint/types": ["@typescript-eslint/types@8.32.0", "", {}, "sha512-O5Id6tGadAZEMThM6L9HmVf5hQUXNSxLVKeGJYWNhhVseps/0LddMkp7//VDkzwJ69lPL0UmZdcZwggj9akJaA=="], - "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.36.0", "", { "dependencies": { "@typescript-eslint/project-service": "8.36.0", "@typescript-eslint/tsconfig-utils": "8.36.0", "@typescript-eslint/types": "8.36.0", "@typescript-eslint/visitor-keys": "8.36.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "typescript": ">=4.8.4 <5.9.0" } }, "sha512-JaS8bDVrfVJX4av0jLpe4ye0BpAaUW7+tnS4Y4ETa3q7NoZgzYbN9zDQTJ8kPb5fQ4n0hliAt9tA4Pfs2zA2Hg=="], + "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.32.0", "", { "dependencies": { "@typescript-eslint/types": "8.32.0", "@typescript-eslint/visitor-keys": "8.32.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "typescript": ">=4.8.4 <5.9.0" } }, "sha512-pU9VD7anSCOIoBFnhTGfOzlVFQIA1XXiQpH/CezqOBaDppRwTglJzCC6fUQGpfwey4T183NKhF1/mfatYmjRqQ=="], - "@typescript-eslint/utils": ["@typescript-eslint/utils@8.36.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.7.0", "@typescript-eslint/scope-manager": "8.36.0", "@typescript-eslint/types": "8.36.0", "@typescript-eslint/typescript-estree": "8.36.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-VOqmHu42aEMT+P2qYjylw6zP/3E/HvptRwdn/PZxyV27KhZg2IOszXod4NcXisWzPAGSS4trE/g4moNj6XmH2g=="], + "@typescript-eslint/utils": ["@typescript-eslint/utils@8.32.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.7.0", "@typescript-eslint/scope-manager": "8.32.0", "@typescript-eslint/types": "8.32.0", "@typescript-eslint/typescript-estree": "8.32.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-8S9hXau6nQ/sYVtC3D6ISIDoJzS1NsCK+gluVhLN2YkBPX+/1wkwyUiDKnxRh15579WoOIyVWnoyIf3yGI9REw=="], "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.32.0", "", { "dependencies": { "@typescript-eslint/types": "8.32.0", "eslint-visitor-keys": "^4.2.0" } }, "sha512-1rYQTCLFFzOI5Nl0c8LUpJT8HxpwVRn9E4CkMsYfuN6ctmQqExjSTzzSk0Tz2apmXy7WU6/6fyaZVVA/thPN+w=="], @@ -234,15 +230,17 @@ "balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="], + "baseline-browser-mapping": ["baseline-browser-mapping@2.8.26", "", { "bin": { "baseline-browser-mapping": "dist/cli.js" } }, "sha512-73lC1ugzwoaWCLJ1LvOgrR5xsMLTqSKIEoMHVtL9E/HNk0PXtTM76ZIm84856/SF7Nv8mPZxKoBsgpm0tR1u1Q=="], + "brace-expansion": ["brace-expansion@1.1.12", "", { "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg=="], "braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="], - "browserslist": ["browserslist@4.25.1", "", { "dependencies": { "caniuse-lite": "^1.0.30001726", "electron-to-chromium": "^1.5.173", "node-releases": "^2.0.19", "update-browserslist-db": "^1.1.3" }, "bin": { "browserslist": "cli.js" } }, "sha512-KGj0KoOMXLpSNkkEI6Z6mShmQy0bc1I+T7K9N81k4WWMrfz+6fQ6es80B/YLAeRoKvjYE1YSHHOW1qe9xIVzHw=="], + "browserslist": ["browserslist@4.28.0", "", { "dependencies": { "baseline-browser-mapping": "^2.8.25", "caniuse-lite": "^1.0.30001754", "electron-to-chromium": "^1.5.249", "node-releases": "^2.0.27", "update-browserslist-db": "^1.1.4" }, "bin": { "browserslist": "cli.js" } }, "sha512-tbydkR/CxfMwelN0vwdP/pLkDwyAASZ+VfWm4EOwlB6SWhx1sYnWLqo8N5j0rAzPfzfRaxt0mM/4wPU/Su84RQ=="], "buffer-from": ["buffer-from@1.1.2", "", {}, "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="], - "bun-types": ["bun-types@1.2.18", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-04+Eha5NP7Z0A9YgDAzMk5PHR16ZuLVa83b26kH5+cp1qZW4F6FmAURngE7INf4tKOvCE69vYvDEwoNl1tGiWw=="], + "bun-types": ["bun-types@1.3.2", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-i/Gln4tbzKNuxP70OWhJRZz1MRfvqExowP7U6JKoI8cntFrtxg7RJK3jvz7wQW54UuvNC8tbKHHri5fy74FVqg=="], "call-bind": ["call-bind@1.0.8", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.0", "es-define-property": "^1.0.0", "get-intrinsic": "^1.2.4", "set-function-length": "^1.2.2" } }, "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww=="], @@ -252,7 +250,7 @@ "callsites": ["callsites@3.1.0", "", {}, "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ=="], - "caniuse-lite": ["caniuse-lite@1.0.30001727", "", {}, "sha512-pB68nIHmbN6L/4C6MH1DokyR3bYqFwjaSs/sWDHGj4CTcFtQUQMuJftVwWkXq7mNWOybD3KhUv3oWHoGxgP14Q=="], + "caniuse-lite": ["caniuse-lite@1.0.30001754", "", {}, "sha512-x6OeBXueoAceOmotzx3PO4Zpt4rzpeIFsSr6AAePTZxSkXiYDUmpypEl7e2+8NCd9bD7bXjqyef8CJYPC1jfxg=="], "chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], @@ -274,7 +272,7 @@ "data-view-byte-offset": ["data-view-byte-offset@1.0.1", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "is-data-view": "^1.0.1" } }, "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ=="], - "debug": ["debug@4.4.1", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ=="], + "debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="], "deep-is": ["deep-is@0.1.4", "", {}, "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="], @@ -290,7 +288,7 @@ "dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="], - "electron-to-chromium": ["electron-to-chromium@1.5.182", "", {}, "sha512-Lv65Btwv9W4J9pyODI6EWpdnhfvrve/us5h1WspW8B2Fb0366REPtY3hX7ounk1CkV/TBjWCEvCBBbYbmV0qCA=="], + "electron-to-chromium": ["electron-to-chromium@1.5.250", "", {}, "sha512-/5UMj9IiGDMOFBnN4i7/Ry5onJrAGSbOGo3s9FEKmwobGq6xw832ccET0CE3CkkMBZ8GJSlUIesZofpyurqDXw=="], "entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="], @@ -308,7 +306,7 @@ "es-to-primitive": ["es-to-primitive@1.3.0", "", { "dependencies": { "is-callable": "^1.2.7", "is-date-object": "^1.0.5", "is-symbol": "^1.0.4" } }, "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g=="], - "esbuild": ["esbuild@0.25.6", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.25.6", "@esbuild/android-arm": "0.25.6", "@esbuild/android-arm64": "0.25.6", "@esbuild/android-x64": "0.25.6", "@esbuild/darwin-arm64": "0.25.6", "@esbuild/darwin-x64": "0.25.6", "@esbuild/freebsd-arm64": "0.25.6", "@esbuild/freebsd-x64": "0.25.6", "@esbuild/linux-arm": "0.25.6", "@esbuild/linux-arm64": "0.25.6", "@esbuild/linux-ia32": "0.25.6", "@esbuild/linux-loong64": "0.25.6", "@esbuild/linux-mips64el": "0.25.6", "@esbuild/linux-ppc64": "0.25.6", "@esbuild/linux-riscv64": "0.25.6", "@esbuild/linux-s390x": "0.25.6", "@esbuild/linux-x64": "0.25.6", "@esbuild/netbsd-arm64": "0.25.6", "@esbuild/netbsd-x64": "0.25.6", "@esbuild/openbsd-arm64": "0.25.6", "@esbuild/openbsd-x64": "0.25.6", "@esbuild/openharmony-arm64": "0.25.6", "@esbuild/sunos-x64": "0.25.6", "@esbuild/win32-arm64": "0.25.6", "@esbuild/win32-ia32": "0.25.6", "@esbuild/win32-x64": "0.25.6" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-GVuzuUwtdsghE3ocJ9Bs8PNoF13HNQ5TXbEi2AhvVb8xU1Iwt9Fos9FEamfoee+u/TOsn7GUWc04lz46n2bbTg=="], + "esbuild": ["esbuild@0.25.12", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.25.12", "@esbuild/android-arm": "0.25.12", "@esbuild/android-arm64": "0.25.12", "@esbuild/android-x64": "0.25.12", "@esbuild/darwin-arm64": "0.25.12", "@esbuild/darwin-x64": "0.25.12", "@esbuild/freebsd-arm64": "0.25.12", "@esbuild/freebsd-x64": "0.25.12", "@esbuild/linux-arm": "0.25.12", "@esbuild/linux-arm64": "0.25.12", "@esbuild/linux-ia32": "0.25.12", "@esbuild/linux-loong64": "0.25.12", "@esbuild/linux-mips64el": "0.25.12", "@esbuild/linux-ppc64": "0.25.12", "@esbuild/linux-riscv64": "0.25.12", "@esbuild/linux-s390x": "0.25.12", "@esbuild/linux-x64": "0.25.12", "@esbuild/netbsd-arm64": "0.25.12", "@esbuild/netbsd-x64": "0.25.12", "@esbuild/openbsd-arm64": "0.25.12", "@esbuild/openbsd-x64": "0.25.12", "@esbuild/openharmony-arm64": "0.25.12", "@esbuild/sunos-x64": "0.25.12", "@esbuild/win32-arm64": "0.25.12", "@esbuild/win32-ia32": "0.25.12", "@esbuild/win32-x64": "0.25.12" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg=="], "esbuild-register": ["esbuild-register@3.6.0", "", { "dependencies": { "debug": "^4.3.4" }, "peerDependencies": { "esbuild": ">=0.12 <1" } }, "sha512-H2/S7Pm8a9CL1uhp9OvjwrBh5Pvx0H8qVOxNu8Wed9Y7qv56MPtq+GGM8RJpq6glYJn9Wspr8uw7l55uyinNeg=="], @@ -340,7 +338,7 @@ "esquery": ["esquery@1.6.0", "", { "dependencies": { "estraverse": "^5.1.0" } }, "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg=="], - "esrap": ["esrap@2.1.0", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" } }, "sha512-yzmPNpl7TBbMRC5Lj2JlJZNPml0tzqoqP5B1JXycNUwtqma9AKCO0M2wHrdgsHcy1WRW7S9rJknAMtByg3usgA=="], + "esrap": ["esrap@2.1.2", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15" } }, "sha512-DgvlIQeowRNyvLPWW4PT7Gu13WznY288Du086E751mwwbsgr29ytBiYeLzAGIo0qk3Ujob0SDk8TiSaM5WQzNg=="], "esrecurse": ["esrecurse@4.3.0", "", { "dependencies": { "estraverse": "^5.2.0" } }, "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag=="], @@ -380,13 +378,15 @@ "functions-have-names": ["functions-have-names@1.2.3", "", {}, "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ=="], + "generator-function": ["generator-function@2.0.1", "", {}, "sha512-SFdFmIJi+ybC0vjlHN0ZGVGHc3lgE0DxPAT0djjVg+kjOnSqclqmj0KQ7ykTOLP6YxoqOvuAODGdcHJn+43q3g=="], + "get-intrinsic": ["get-intrinsic@1.3.0", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.1.0" } }, "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ=="], "get-proto": ["get-proto@1.0.1", "", { "dependencies": { "dunder-proto": "^1.0.1", "es-object-atoms": "^1.0.0" } }, "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g=="], "get-symbol-description": ["get-symbol-description@1.1.0", "", { "dependencies": { "call-bound": "^1.0.3", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.6" } }, "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg=="], - "get-tsconfig": ["get-tsconfig@4.10.1", "", { "dependencies": { "resolve-pkg-maps": "^1.0.0" } }, "sha512-auHyJ4AgMz7vgS8Hp3N6HXSmlMdUyhSUrfBF16w153rxtLIEOE+HGqaBppczZvnHLqQJfiHotCYpNhl0lUROFQ=="], + "get-tsconfig": ["get-tsconfig@4.13.0", "", { "dependencies": { "resolve-pkg-maps": "^1.0.0" } }, "sha512-1VKTZJCwBrvbd+Wn3AOgQP/2Av+TfTCOlE4AcRJE72W1ksZXbAx8PPBR9RzgTeSPzlPMHrbANMH3LbltH73wxQ=="], "glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="], @@ -440,7 +440,7 @@ "is-finalizationregistry": ["is-finalizationregistry@1.1.1", "", { "dependencies": { "call-bound": "^1.0.3" } }, "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg=="], - "is-generator-function": ["is-generator-function@1.1.0", "", { "dependencies": { "call-bound": "^1.0.3", "get-proto": "^1.0.0", "has-tostringtag": "^1.0.2", "safe-regex-test": "^1.1.0" } }, "sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ=="], + "is-generator-function": ["is-generator-function@1.1.2", "", { "dependencies": { "call-bound": "^1.0.4", "generator-function": "^2.0.0", "get-proto": "^1.0.1", "has-tostringtag": "^1.0.2", "safe-regex-test": "^1.1.0" } }, "sha512-upqt1SkGkODW9tsGNG5mtXTXtECizwtS2kA161M+gJPc1xdb/Ax629af6YrTwcOeQHbewrPNlE5Dx7kzvXTizA=="], "is-glob": ["is-glob@4.0.3", "", { "dependencies": { "is-extglob": "^2.1.1" } }, "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="], @@ -496,7 +496,7 @@ "lodash.merge": ["lodash.merge@4.6.2", "", {}, "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="], - "magic-string": ["magic-string@0.30.17", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0" } }, "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA=="], + "magic-string": ["magic-string@0.30.21", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.5" } }, "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ=="], "math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="], @@ -514,7 +514,7 @@ "natural-compare": ["natural-compare@1.4.0", "", {}, "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw=="], - "node-releases": ["node-releases@2.0.19", "", {}, "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw=="], + "node-releases": ["node-releases@2.0.27", "", {}, "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA=="], "normalize-range": ["normalize-range@0.1.2", "", {}, "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA=="], @@ -572,7 +572,7 @@ "regexp.prototype.flags": ["regexp.prototype.flags@1.5.4", "", { "dependencies": { "call-bind": "^1.0.8", "define-properties": "^1.2.1", "es-errors": "^1.3.0", "get-proto": "^1.0.1", "gopd": "^1.2.0", "set-function-name": "^2.0.2" } }, "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA=="], - "resolve": ["resolve@1.22.10", "", { "dependencies": { "is-core-module": "^2.16.0", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": { "resolve": "bin/resolve" } }, "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w=="], + "resolve": ["resolve@1.22.11", "", { "dependencies": { "is-core-module": "^2.16.1", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": { "resolve": "bin/resolve" } }, "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ=="], "resolve-from": ["resolve-from@4.0.0", "", {}, "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g=="], @@ -660,9 +660,9 @@ "unbox-primitive": ["unbox-primitive@1.1.0", "", { "dependencies": { "call-bound": "^1.0.3", "has-bigints": "^1.0.2", "has-symbols": "^1.1.0", "which-boxed-primitive": "^1.1.1" } }, "sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw=="], - "undici-types": ["undici-types@7.8.0", "", {}, "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw=="], + "undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], - "update-browserslist-db": ["update-browserslist-db@1.1.3", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.1" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "update-browserslist-db": "cli.js" } }, "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw=="], + "update-browserslist-db": ["update-browserslist-db@1.1.4", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.1" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "update-browserslist-db": "cli.js" } }, "sha512-q0SPT4xyU84saUX+tomz1WLkxUbuaJnR1xWt17M7fJtEJigJeWUNGUqrauFXsHnqev9y9JTRGwk13tFBuKby4A=="], "uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="], @@ -682,39 +682,17 @@ "yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="], - "zimmerframe": ["zimmerframe@1.1.2", "", {}, "sha512-rAbqEGa8ovJy4pyBxZM70hg4pE6gDgaQ0Sl9M3enG3I0d6H4XSAM3GeNGLKnsBpuijUow064sf7ww1nutC5/3w=="], + "zimmerframe": ["zimmerframe@1.1.4", "", {}, "sha512-B58NGBEoc8Y9MWWCQGl/gq9xBCe4IiKM0a2x7GZdQKOW5Exr8S1W24J6OgM1njK8xCRGvAJIL/MxXHf6SkmQKQ=="], "@esbuild-kit/core-utils/esbuild": ["esbuild@0.18.20", "", { "optionalDependencies": { "@esbuild/android-arm": "0.18.20", "@esbuild/android-arm64": "0.18.20", "@esbuild/android-x64": "0.18.20", "@esbuild/darwin-arm64": "0.18.20", "@esbuild/darwin-x64": "0.18.20", "@esbuild/freebsd-arm64": "0.18.20", "@esbuild/freebsd-x64": "0.18.20", "@esbuild/linux-arm": "0.18.20", "@esbuild/linux-arm64": "0.18.20", "@esbuild/linux-ia32": "0.18.20", "@esbuild/linux-loong64": "0.18.20", "@esbuild/linux-mips64el": "0.18.20", "@esbuild/linux-ppc64": "0.18.20", "@esbuild/linux-riscv64": "0.18.20", "@esbuild/linux-s390x": "0.18.20", "@esbuild/linux-x64": "0.18.20", "@esbuild/netbsd-x64": "0.18.20", "@esbuild/openbsd-x64": "0.18.20", "@esbuild/sunos-x64": "0.18.20", "@esbuild/win32-arm64": "0.18.20", "@esbuild/win32-ia32": "0.18.20", "@esbuild/win32-x64": "0.18.20" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA=="], "@eslint-community/eslint-utils/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="], - "@eslint/plugin-kit/@eslint/core": ["@eslint/core@0.15.1", "", { "dependencies": { "@types/json-schema": "^7.0.15" } }, "sha512-bkOp+iumZCCbt1K1CmWf0R9pM5yKpDv+ZXtvSyQpudrI9kuFLp+bM2WOPXImuD/ceQuaa8f5pj93Y7zyECIGNA=="], - - "@humanfs/node/@humanwhocodes/retry": ["@humanwhocodes/retry@0.3.1", "", {}, "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA=="], - - "@typescript-eslint/eslint-plugin/@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.32.0", "", { "dependencies": { "@typescript-eslint/types": "8.32.0", "@typescript-eslint/visitor-keys": "8.32.0" } }, "sha512-jc/4IxGNedXkmG4mx4nJTILb6TMjL66D41vyeaPWvDUmeYQzF3lKtN15WsAeTr65ce4mPxwopPSo1yUUAWw0hQ=="], - - "@typescript-eslint/eslint-plugin/@typescript-eslint/utils": ["@typescript-eslint/utils@8.32.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.7.0", "@typescript-eslint/scope-manager": "8.32.0", "@typescript-eslint/types": "8.32.0", "@typescript-eslint/typescript-estree": "8.32.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-8S9hXau6nQ/sYVtC3D6ISIDoJzS1NsCK+gluVhLN2YkBPX+/1wkwyUiDKnxRh15579WoOIyVWnoyIf3yGI9REw=="], - - "@typescript-eslint/parser/@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.32.0", "", { "dependencies": { "@typescript-eslint/types": "8.32.0", "@typescript-eslint/visitor-keys": "8.32.0" } }, "sha512-jc/4IxGNedXkmG4mx4nJTILb6TMjL66D41vyeaPWvDUmeYQzF3lKtN15WsAeTr65ce4mPxwopPSo1yUUAWw0hQ=="], - - "@typescript-eslint/parser/@typescript-eslint/types": ["@typescript-eslint/types@8.32.0", "", {}, "sha512-O5Id6tGadAZEMThM6L9HmVf5hQUXNSxLVKeGJYWNhhVseps/0LddMkp7//VDkzwJ69lPL0UmZdcZwggj9akJaA=="], - - "@typescript-eslint/parser/@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.32.0", "", { "dependencies": { "@typescript-eslint/types": "8.32.0", "@typescript-eslint/visitor-keys": "8.32.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "typescript": ">=4.8.4 <5.9.0" } }, "sha512-pU9VD7anSCOIoBFnhTGfOzlVFQIA1XXiQpH/CezqOBaDppRwTglJzCC6fUQGpfwey4T183NKhF1/mfatYmjRqQ=="], - - "@typescript-eslint/scope-manager/@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.36.0", "", { "dependencies": { "@typescript-eslint/types": "8.36.0", "eslint-visitor-keys": "^4.2.1" } }, "sha512-vZrhV2lRPWDuGoxcmrzRZyxAggPL+qp3WzUrlZD+slFueDiYHxeBa34dUXPuC0RmGKzl4lS5kFJYvKCq9cnNDA=="], - - "@typescript-eslint/type-utils/@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.32.0", "", { "dependencies": { "@typescript-eslint/types": "8.32.0", "@typescript-eslint/visitor-keys": "8.32.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "typescript": ">=4.8.4 <5.9.0" } }, "sha512-pU9VD7anSCOIoBFnhTGfOzlVFQIA1XXiQpH/CezqOBaDppRwTglJzCC6fUQGpfwey4T183NKhF1/mfatYmjRqQ=="], - - "@typescript-eslint/type-utils/@typescript-eslint/utils": ["@typescript-eslint/utils@8.32.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.7.0", "@typescript-eslint/scope-manager": "8.32.0", "@typescript-eslint/types": "8.32.0", "@typescript-eslint/typescript-estree": "8.32.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-8S9hXau6nQ/sYVtC3D6ISIDoJzS1NsCK+gluVhLN2YkBPX+/1wkwyUiDKnxRh15579WoOIyVWnoyIf3yGI9REw=="], - - "@typescript-eslint/typescript-estree/@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.36.0", "", { "dependencies": { "@typescript-eslint/types": "8.36.0", "eslint-visitor-keys": "^4.2.1" } }, "sha512-vZrhV2lRPWDuGoxcmrzRZyxAggPL+qp3WzUrlZD+slFueDiYHxeBa34dUXPuC0RmGKzl4lS5kFJYvKCq9cnNDA=="], + "@eslint/plugin-kit/@eslint/core": ["@eslint/core@0.15.2", "", { "dependencies": { "@types/json-schema": "^7.0.15" } }, "sha512-78Md3/Rrxh83gCxoUc0EiciuOHsIITzLy53m3d9UyiW8y9Dj2D29FeETqyKA+BRK76tnTp6RXWb3pCay8Oyomg=="], "@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], - "@typescript-eslint/typescript-estree/semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="], - - "@typescript-eslint/visitor-keys/@typescript-eslint/types": ["@typescript-eslint/types@8.32.0", "", {}, "sha512-O5Id6tGadAZEMThM6L9HmVf5hQUXNSxLVKeGJYWNhhVseps/0LddMkp7//VDkzwJ69lPL0UmZdcZwggj9akJaA=="], + "@typescript-eslint/typescript-estree/semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="], "eslint-import-resolver-node/debug": ["debug@3.2.7", "", { "dependencies": { "ms": "^2.1.1" } }, "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ=="], @@ -724,8 +702,6 @@ "fast-glob/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="], - "typescript-eslint/@typescript-eslint/utils": ["@typescript-eslint/utils@8.32.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.7.0", "@typescript-eslint/scope-manager": "8.32.0", "@typescript-eslint/types": "8.32.0", "@typescript-eslint/typescript-estree": "8.32.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-8S9hXau6nQ/sYVtC3D6ISIDoJzS1NsCK+gluVhLN2YkBPX+/1wkwyUiDKnxRh15579WoOIyVWnoyIf3yGI9REw=="], - "@esbuild-kit/core-utils/esbuild/@esbuild/android-arm": ["@esbuild/android-arm@0.18.20", "", { "os": "android", "cpu": "arm" }, "sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw=="], "@esbuild-kit/core-utils/esbuild/@esbuild/android-arm64": ["@esbuild/android-arm64@0.18.20", "", { "os": "android", "cpu": "arm64" }, "sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ=="], @@ -770,48 +746,6 @@ "@esbuild-kit/core-utils/esbuild/@esbuild/win32-x64": ["@esbuild/win32-x64@0.18.20", "", { "os": "win32", "cpu": "x64" }, "sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ=="], - "@typescript-eslint/eslint-plugin/@typescript-eslint/scope-manager/@typescript-eslint/types": ["@typescript-eslint/types@8.32.0", "", {}, "sha512-O5Id6tGadAZEMThM6L9HmVf5hQUXNSxLVKeGJYWNhhVseps/0LddMkp7//VDkzwJ69lPL0UmZdcZwggj9akJaA=="], - - "@typescript-eslint/eslint-plugin/@typescript-eslint/utils/@typescript-eslint/types": ["@typescript-eslint/types@8.32.0", "", {}, "sha512-O5Id6tGadAZEMThM6L9HmVf5hQUXNSxLVKeGJYWNhhVseps/0LddMkp7//VDkzwJ69lPL0UmZdcZwggj9akJaA=="], - - "@typescript-eslint/eslint-plugin/@typescript-eslint/utils/@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.32.0", "", { "dependencies": { "@typescript-eslint/types": "8.32.0", "@typescript-eslint/visitor-keys": "8.32.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "typescript": ">=4.8.4 <5.9.0" } }, "sha512-pU9VD7anSCOIoBFnhTGfOzlVFQIA1XXiQpH/CezqOBaDppRwTglJzCC6fUQGpfwey4T183NKhF1/mfatYmjRqQ=="], - - "@typescript-eslint/parser/@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], - - "@typescript-eslint/parser/@typescript-eslint/typescript-estree/semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="], - - "@typescript-eslint/type-utils/@typescript-eslint/typescript-estree/@typescript-eslint/types": ["@typescript-eslint/types@8.32.0", "", {}, "sha512-O5Id6tGadAZEMThM6L9HmVf5hQUXNSxLVKeGJYWNhhVseps/0LddMkp7//VDkzwJ69lPL0UmZdcZwggj9akJaA=="], - - "@typescript-eslint/type-utils/@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], - - "@typescript-eslint/type-utils/@typescript-eslint/typescript-estree/semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="], - - "@typescript-eslint/type-utils/@typescript-eslint/utils/@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.32.0", "", { "dependencies": { "@typescript-eslint/types": "8.32.0", "@typescript-eslint/visitor-keys": "8.32.0" } }, "sha512-jc/4IxGNedXkmG4mx4nJTILb6TMjL66D41vyeaPWvDUmeYQzF3lKtN15WsAeTr65ce4mPxwopPSo1yUUAWw0hQ=="], - - "@typescript-eslint/type-utils/@typescript-eslint/utils/@typescript-eslint/types": ["@typescript-eslint/types@8.32.0", "", {}, "sha512-O5Id6tGadAZEMThM6L9HmVf5hQUXNSxLVKeGJYWNhhVseps/0LddMkp7//VDkzwJ69lPL0UmZdcZwggj9akJaA=="], - "@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.2", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="], - - "typescript-eslint/@typescript-eslint/utils/@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.32.0", "", { "dependencies": { "@typescript-eslint/types": "8.32.0", "@typescript-eslint/visitor-keys": "8.32.0" } }, "sha512-jc/4IxGNedXkmG4mx4nJTILb6TMjL66D41vyeaPWvDUmeYQzF3lKtN15WsAeTr65ce4mPxwopPSo1yUUAWw0hQ=="], - - "typescript-eslint/@typescript-eslint/utils/@typescript-eslint/types": ["@typescript-eslint/types@8.32.0", "", {}, "sha512-O5Id6tGadAZEMThM6L9HmVf5hQUXNSxLVKeGJYWNhhVseps/0LddMkp7//VDkzwJ69lPL0UmZdcZwggj9akJaA=="], - - "typescript-eslint/@typescript-eslint/utils/@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.32.0", "", { "dependencies": { "@typescript-eslint/types": "8.32.0", "@typescript-eslint/visitor-keys": "8.32.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "typescript": ">=4.8.4 <5.9.0" } }, "sha512-pU9VD7anSCOIoBFnhTGfOzlVFQIA1XXiQpH/CezqOBaDppRwTglJzCC6fUQGpfwey4T183NKhF1/mfatYmjRqQ=="], - - "@typescript-eslint/eslint-plugin/@typescript-eslint/utils/@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], - - "@typescript-eslint/eslint-plugin/@typescript-eslint/utils/@typescript-eslint/typescript-estree/semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="], - - "@typescript-eslint/parser/@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.2", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="], - - "@typescript-eslint/type-utils/@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.2", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="], - - "typescript-eslint/@typescript-eslint/utils/@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], - - "typescript-eslint/@typescript-eslint/utils/@typescript-eslint/typescript-estree/semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="], - - "@typescript-eslint/eslint-plugin/@typescript-eslint/utils/@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.2", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="], - - "typescript-eslint/@typescript-eslint/utils/@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.2", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="], } } diff --git a/docs/cloud-auth-test-verification.md b/docs/cloud-auth-test-verification.md new file mode 100644 index 0000000..8e33e19 --- /dev/null +++ b/docs/cloud-auth-test-verification.md @@ -0,0 +1,82 @@ +# Cloud & Auth Test Verification Guide + +This guide documents how we validate cloud integrations and AbsoluteAuth across supported matrices, how to interpret skips, and how to run the verification locally or in CI. + +## Goals +- Ensure Neon and Turso functional tests pass for supported combinations. +- Ensure unsupported combinations are intentionally skipped with clear reasons (PlanetScale, MongoDB+AbsoluteAuth, etc.). +- Validate behavioural endpoints for AbsoluteAuth (`/auth/providers`, `/auth/session`) are present and respond. +- Provide reproducible commands, reporting locations, and cleanup guidance. + +## Key files +- Matrix generator: `scripts/functional-tests/matrix.ts` +- Functional harness registry: `scripts/functional-tests/test-cli-registry.ts` +- Functional CLI runner: `scripts/functional-tests/test-cli.ts` +- Functional framework test utils: `tests/functional/frameworks/test-utils.ts` +- Behavioural auth tests: `tests/behavioural/auth-matrix.test.ts` +- Cleanup script: `scripts/clean-tests.sh` (invoked by `npm run test:clean`) + +## How the matrix works +- Run `node scripts/functional-tests/matrix.ts` to generate permutations (the script now annotates unsupported combos with `skip` and `skipReason`). +- The functional test framework will skip annotated scenarios and log the skip reason. + +## Running tests locally (examples) + +Typecheck and lint: +``` +bun run typecheck +npm run lint +``` + +Run a single behavioural auth scenario: +``` +bun test tests/behavioural/auth-matrix.test.ts -t "React + SQLite + AbsoluteAuth" +``` + +Run cloud functional tests for Neon (requires `NEON_DATABASE_URL` in env): +``` +NEON_DATABASE_URL=... bun test tests/functional/cloud.test.ts -t "neon + postgresql + react" +``` + +Run the full functional matrix (dry-run first): +``` +bun run scripts/functional-tests/matrix.ts # generates test-matrix.json +bun run test:cli --dry-run +``` + +## Environment variable behavior +- **Functional tests**: Use matrix-level `requiredEnv` metadata to annotate which scenarios need cloud credentials. If env vars are missing, the test runner skips the scenario early with a logged message. This allows CI to run without credentials while developers can opt-in by setting vars. +- **Behavioural tests**: Use runtime guards at the start of each scenario (e.g., `resolveScenario()`) to check for required env vars. If missing, the test is skipped. These tests expect simplified env var names: + - `NEON_DATABASE_URL` (not prefixed) + - `TURSO_DB_URL` (not prefixed) +- **Setting credentials**: Export cloud database URLs before running tests: + ```bash + export NEON_DATABASE_URL="postgresql://user:pass@host/db" + export TURSO_DB_URL="libsql://your-db.turso.io" + bun test tests/behavioural/cloud-matrix.test.ts + ``` + +## Interpreting skips +- Each skipped scenario includes a `skipReason` explaining why it was not executed. +- Common reasons: + - "AbsoluteAuth is not supported with MongoDB" + - "PlanetScale cloud flows are not exercised by CI (skipped)" + - "missing env vars: NEON_DATABASE_URL" + +## Behavioural auth endpoints to validate +- `GET /auth/providers` — returns 200 and a JSON array of provider names. +- `POST /auth/session` — should return 401 or 400 when unauthenticated, not 404. +- `GET /auth/authorize/:provider` and `GET /auth/callback/:provider` routes are generated by the scaffold and should be reachable (404 indicates generation ordering regressions). + +## Reports & artifacts +- Test runner writes logs under `reports/` (create if missing). Use `reports/final-summary.json` to collect pass/skip/fail counts. + +## Cleanup +- Use `npm run test:clean` or `bash scripts/clean-tests.sh --confirm` to safely remove generated `test-*` directories and `.test-dependency-cache`. + +## Troubleshooting +- If servers fail to start due to port conflicts, kill lingering processes that bind to port 3000. +- If an expected provider is missing from `/auth/providers`, verify `src/generators/project/generateUseBlock.ts` still injects the providers endpoint and that `.use(absoluteAuth(...))` appears before static handlers. + +--- +End of guide. diff --git a/docs/test-cli-migration-phase0.md b/docs/test-cli-migration-phase0.md new file mode 100644 index 0000000..0a46517 --- /dev/null +++ b/docs/test-cli-migration-phase0.md @@ -0,0 +1,47 @@ +# Test CLI Migration — Phase 0 Snapshot + +> **Note:** Phase 3 replaced the legacy `scripts/functional-tests/*-test-runner.ts` files with Bun test suites. This document preserves their original inventory for historical reference only. + +## 1. Baseline Command Outputs + +### `bun run test:cli --all --dry-run` +- Lists 15 functional runners that would execute, covering core validators, framework suites, database suites, cloud, and auth. +- Confirms legacy harness still shells out to `scripts/functional-tests/*.ts`. + +### `bun run test:cli --all` +- Core functional validators fail immediately because the expected scaffold (`absolutejs-project/`) is absent. +- React suite runs and passes for all SQLite permutations thanks to cached installs; MongoDB permutations fail early because Docker access is blocked; the command was interrupted before completing the remaining suites. +- Vue suite shows systemic dependency-install failures (`bun is unable to write files to tempdir: AccessDenied`), reflecting the current sandbox limitation we already account for in CI notes. +- MongoDB setups across suites fail with `docker compose ... connect: operation not permitted`, confirming the need to keep Docker access checks in the new runner. +- Partial output is sufficient for parity: the new Bun-based runner must surface identical failure conditions (missing scaffold, missing Docker, bun install permissions). + +## 2. Legacy Suite Registry + +`scripts/functional-tests/test-cli-registry.ts` currently declares the discoverable suites. Key facts: + +| Suite | Group | Purpose | Functional Runner | Extra Inputs | +|-------|-------|---------|-------------------|--------------| +| functional | core | chains dependency/build/server validators against a pre-generated scaffold | `functional-test-runner.ts` (expects `absolutejs-project bun`) | none | +| server | core | verifies scaffold boots (`bun run dev`) | `server-startup-validator.ts` | none | +| build | core | executes type-check/build pipeline | `build-validator.ts` | none | +| deps | core | ensures installs succeed | `dependency-installer-tester.ts` | none | +| react | framework | React matrix | `react-test-runner.ts` | implicit matrix inside script | +| vue | framework | Vue matrix | `vue-test-runner.ts` | implicit matrix | +| svelte | framework | Svelte matrix | `svelte-test-runner.ts` | implicit matrix | +| html | framework | HTML validator | `html-test-runner.ts` | implicit matrix | +| htmx | framework | HTMX validator | `htmx-test-runner.ts` | implicit matrix | +| sqlite | database | SQLite validations | `sqlite-test-runner.ts` (+ behavioural `tests/behavioural/sqlite-matrix.test.ts`) | matrix | +| postgresql | database | PostgreSQL validations | `postgresql-test-runner.ts` (+ behavioural suite) | matrix + Docker | +| mysql | database | MySQL validations | `mysql-test-runner.ts` (+ behavioural suite) | matrix + Docker | +| mongodb | database | MongoDB validations | `mongodb-test-runner.ts` (+ behavioural suite) | Docker | +| cloud | cloud | Neon/Turso combinations | `cloud-provider-test-runner.ts` (+ behavioural suite) | env-gated | +| auth | auth | absoluteAuth permutations | `auth-test-runner.ts` (+ behavioural suite) | matrix | + +The helper also exports the normalised sets of frameworks, databases, and providers for CLI flag validation—these constants must stay in sync when we swap in Bun tests. + +## 3. Parity Notes for Migration + +- **Expected failures**: missing scaffold directory, Docker socket permission errors, and Bun tempdir limitations must remain visible in the Bun-based run so developers notice environment problems early. +- **Matrix execution**: Framework/database runners currently manage their own matrices internally; the Bun rewrite needs to replicate ordering and skip semantics (e.g., cloud suite skips without provider credentials). +- **Behavioural tie-in**: Several suites still list behavioural tests even though the corresponding `tests/behavioural/*.test.ts` files were deleted earlier—call this out for cleanup in later phases when we re-home them under Bun. + diff --git a/docs/test-cli-migration-phase1.md b/docs/test-cli-migration-phase1.md new file mode 100644 index 0000000..19b7714 --- /dev/null +++ b/docs/test-cli-migration-phase1.md @@ -0,0 +1,96 @@ +# Test CLI Migration — Phase 1 Plan (Shared Utilities) + +## Goals +- Provide Bun-native helpers that rewrite the functionality embedded in the legacy runners. +- Give Bun tests a consistent way to scaffold projects, install dependencies with caching, manage Docker databases, and assert results. +- Keep parity with existing failure messaging so the eventual swap is transparent to developers. + +## Proposed Directory Layout + +``` +tests/ + functional/ + support/ + scaffold.ts + install.ts + docker.ts + http.ts + assertions.ts + timing.ts + index.ts +``` + +- `tests/functional/support/index.ts` re-exports the public helpers so suites can `import { scaffoldProject } from '../support'`. + +## Module Responsibilities & APIs + +### `scaffold.ts` +- Wraps CLI project generation. +- Exports: + - `ScaffoldConfig`: flags (framework, db, orm, auth, tailwind, directory, code quality). + - `scaffoldProject(config: ScaffoldConfig): Promise` — runs `bun run src/index.ts` with the right CLI flags, mirrors spinner output (`→ Scaffolding project...`). + - `cleanupProject(projectName: string): Promise` — removes previous directories using `rm -rf`, matching `cleanupProjectDirectory`. +- Handles timeout logic (configurable default 2 min). On timeout returns `{ success: false, error: 'TIMEOUT', elapsedMs }`. +- Emits console output identical to legacy runner (✓/✗ with timings). + +### `install.ts` +- Centralises dependency cache logic. +- Re-uses existing helpers (`computeManifestHash`, `getOrInstallDependencies`, `hasCachedDependencies`) internally. +- Exports: + - `installDependencies(projectDir, packageManager?: 'bun' | 'npm'): Promise` + - `requireCachedDependencies(hashContext): Promise<{ hit: boolean }>` for suites that need to know whether the cache existed. +- Maintains the same AccessDenied messaging by letting `bun install` surface raw stderr. + +### `docker.ts` +- Abstracts database lifecycle. +- Exports: + - `ensureDockerAvailable(): Promise<'available' | 'unavailable'>` — replicates legacy guard that exits early with the same CLI messaging. + - `withDockerCompose(projectDir, options, callback)` — runs `bun db:up`, waits with `sleep`, executes callback, finally runs `bun db:down`. +- Captures common errors (permission denied, missing compose file) and surfaces structured failures for assertions. + +### `http.ts` +- Minimal wrapper around fetch against the scaffolded server. +- Provides helpers like `getJson(url)` and `expectStatus(url, status)` to replace repeated `fetch` + status check code in validators. + +### `assertions.ts` +- Convenience assertions for Bun tests (no dependency on Vitest expect packages). +- Provides `assertSuccess(result, context)` and `logStep({ label, result })` to standardise console output. + +### `timing.ts` +- Houses `measureStep(label, fn)` and `formatDuration(ms)` used by other helpers. + +## Data Types + +Define shared `StepResult` interface used across helpers: +```ts +type StepResult = { + success: boolean; + elapsedMs: number; + errors: string[]; + warnings: string[]; +}; +``` + +Re-export from `support/index.ts` to keep types consistent between suites. + +## Parity Requirements +- All helpers must print the same status lines as the legacy scripts (`→ Scaffolding project... ✓ (####ms)`). +- Timeout durations should remain configurable but default to the same 2-minute window used today. +- Docker permission errors must propagate unchanged so CI continues to highlight missing privileges. +- Cache hits should log `(cached, ###ms)` exactly as before; we can factor the logging into `install.ts`. + +## Upcoming Work — Database Suites + +- Reuse `runFrameworkMatrix` patterns to build database-focused drivers (one per engine) that: + - scaffold the appropriate backend template (with matrix filters keyed on database + ORM + auth); + - call shared helpers for dependency install and Docker lifecycle (ensuring `ensureDockerAvailable` runs before `bun db:up`); + - trigger the existing database validators (e.g. `validateSQLiteDatabase`) with `skipDependencies: true` where applicable. +- Introduce `runDatabaseMatrix` helper mirroring `runFrameworkMatrix` but with database-specific hooks (e.g. seeding, Docker waits). +- Ensure environment skips remain intact: suites should detect missing credentials (Neon/Turso/etc.) and log the same skip reason. +- When porting each suite (SQLite → PostgreSQL → MySQL → MongoDB), keep parity notes from Phase 0 in mind so error messaging (missing Docker, missing scaffold) aligns with the legacy output. + +## Implementation Steps +1. Create `tests/functional/support/` directory with scaffolding helper skeletons. +2. Move shared logic out of `scripts/functional-tests/test-utils.ts`, `dependency-cache.ts`, etc., into the new modules while leaving compatibility exports in place until suites migrate. +3. Update one legacy runner to consume the new helpers (Phase 2) to prove parity before deleting old utilities. + diff --git a/docs/test-cli-ux.md b/docs/test-cli-ux.md new file mode 100644 index 0000000..6ebd5c6 --- /dev/null +++ b/docs/test-cli-ux.md @@ -0,0 +1,117 @@ +# Test CLI UX Specification + +## Goals + +- Provide a single entry point (`bun run test:cli`) for every validation path (functional harness + behavioural specs). +- Preserve the stakeholder‑approved experience from the legacy runner: intuitive flags, human friendly progress, concise summary, predictable exit codes. +- Allow selective execution by framework, database, auth, or cloud provider without learning internal script paths. +- Surface prerequisites (Docker availability, remote credentials, dependency cache) as explicit skip messages instead of opaque failures. + +## Suite Taxonomy + +| Group | Suite Name | Purpose | Underlying runner | +|--------------|------------------|----------------------------------------------------------------------------|-------------------------------------------| +| `core` | `functional` | Smoke: dependency installer → build validator → server validator | `scripts/functional-tests/functional-test-runner.ts` | +| `core` | `server` | Boot scaffolded server only | `scripts/functional-tests/server-startup-validator.ts` | +| `core` | `build` | `tsc`/build pipeline sanity check | `scripts/functional-tests/build-validator.ts` | +| `core` | `deps` | Cached dependency install health | `scripts/functional-tests/dependency-installer-tester.ts` | + +## Cleaning Test Artifacts + +We provide a safe cleanup helper to remove generated test projects and cached dependencies: + +- Script: `scripts/clean-tests.sh` +- NPM script: `npm run test:clean` (runs `bash scripts/clean-tests.sh`) + +Usage: + +- Dry run (list directories found): + - `bash scripts/clean-tests.sh` +- Delete test projects (standard): + - `bash scripts/clean-tests.sh --confirm` +- Full cleanup (also removes `.test-dependency-cache`): + - `bash scripts/clean-tests.sh --full --confirm` + +The script only removes top-level `./test-*` directories and explicitly excludes `test-cli-project`. The `--confirm` flag is required for destructive operations to reduce risk. + +| `framework` | `react` | React matrix (behavioural + functional) | `tests/functional/frameworks/react.test.ts` | +| `framework` | `vue` | Vue matrix | `tests/functional/frameworks/vue.test.ts` | +| `framework` | `svelte` | Svelte matrix | `tests/functional/frameworks/svelte.test.ts` | +| `framework` | `html` | HTML matrix | `tests/functional/frameworks/html.test.ts` | +| `framework` | `htmx` | HTMX matrix | `tests/functional/frameworks/htmx.test.ts` | +| `database` | `sqlite` | SQLite combinations (local + Turso) | `tests/functional/databases/sqlite.test.ts` | +| `database` | `postgresql` | PostgreSQL combinations (local + Neon) | `tests/functional/databases/postgresql.test.ts` | +| `database` | `mysql` | MySQL combinations (local) | `tests/functional/databases/mysql.test.ts` | +| `database` | `mongodb` | MongoDB combinations | `tests/functional/databases/mongodb.test.ts` | +| `auth` | `auth` | AbsoluteAuth behavioural suite | `tests/functional/auth.test.ts` | +| `cloud` | `cloud` | Neon + Turso permutations | `tests/functional/cloud.test.ts` | + +Behavioural specs (`tests/behavioural/*.test.ts`) are executed indirectly by their functional counterparts or via the `--behavioural` flag (see below). + +### Behavioural Spec Inventory + +| Behavioural file | Scenarios exercised | Prerequisites / skip reasons | +|------------------|---------------------|-------------------------------| +| `sqlite-matrix.test.ts` | React, Vue, Svelte, HTML, React+Drizzle against SQLite | Requires dependency cache populated; no Docker needed. | +| `postgresql-matrix.test.ts` | React raw + Drizzle with local PostgreSQL | Needs Docker daemon; skips if Docker unreachable. | +| `mysql-matrix.test.ts` | React raw + Drizzle with local MySQL | Needs Docker daemon; skips if Docker unreachable. | +| `mongodb-matrix.test.ts` | React with local MongoDB | Needs Docker daemon; skips if Docker unreachable. | +| `auth-matrix.test.ts` | React SQLite AbsoluteAuth (plain + Drizzle) | Requires dependency cache; checks only public endpoints. | +| `cloud-matrix.test.ts` | React + Neon (Postgres) Drizzle, React + Turso (SQLite) Drizzle | Needs remote credentials (`ABSOLUTE_BEHAVIOURAL_NEON_DATABASE_URL`, `ABSOLUTE_BEHAVIOURAL_TURSO_DATABASE_URL`); skips when absent. | + +## Command Synopsis + +``` +bun run test:cli [options] +``` + +### Core Options + +| Flag | Description | Notes | +| ---- | ----------- | ----- | +| `-h`, `--help` | Print help and exit | | +| `--list` | List suites grouped by taxonomy | Mirrors table above | +| `--all` | Queue every suite (core → frameworks → databases → auth → cloud) | honour provider filters | +| `--suite ` | Explicit suite name (repeatable or comma separated) | case-insensitive | +| `--framework ` | Include matching framework suites | auto-adds suite if present | +| `--database ` | Include matching database suites | auto-adds suite | +| `--auth` | Alias for `--suite auth` | | +| `--cloud` | Alias for `--suite cloud` | | +| `--provider ` | Restrict cloud providers (e.g. `neon`, `turso`) | implies `--cloud`; sets `ABSOLUTE_CLOUD_PROVIDERS` | +| `--behavioural` | Force behavioural specs for selected suites | runs `bun test` with matching filters | +| `--functional` | Force functional runners only | default if omitted | +| `--dry-run` | Print the commands that would be executed | no side effects | +| `--ci` | Optimise output for CI (minimal noise, sets `CI=1`) | | +| `--clean` | Remove generated projects and `.test-dependency-cache` then exit | | + +### Execution Semantics + +- **Default**: no flags ⇒ run `functional` suite only. +- **Ordering**: core (deterministic order) → framework suites (alphabetical) → database suites → auth → cloud. +- **De-duplication**: suites added multiple times run once. +- **Skip behaviour**: suites that detect missing Docker/credentials log `Skipping …` and exit 0; the caller still sees them in the summary. +- **Exit codes**: first non-zero suite exit propagates as the overall exit code (legacy behaviour). Optional `--keep-going` can be added later if stakeholders request. + +## Output Contract + +- Per-suite progress line: `[n/total] Running {label} ({name})` +- Success: `✓ {label} passed ({duration}ms)` +- Failure: `✗ {label} failed (exit code X, {duration}ms)` +- Summary block with counts (`Total`, `Passed`, `Failed`) and status per suite. +- When skipping, show `⚠` line with reason but count it as “passed” for exit code purposes. +- Dry run: bullet list of commands (`• bun run …`) plus environment hints. +- CI mode: progress lines suppressed; only summary + failure lines emitted. + +## Behavioural Integration Details + +- `--behavioural` triggers targeted `bun test` invocations by framework/database/auth group using `--filter` expressions (e.g. `bun test --filter postgres`). +- Functional runners set `ABSOLUTE_BEHAVIOURAL_MODE=1` when chained, so downstream scripts can suppress redundant scaffolding. +- Skip logic reuses the same heuristics as behavioural tests (dependency cache, Docker, credentials). + +## Backlog / Follow-ups + +- Optional `--interactive` flag to mimic early prompt-driven UX. +- Archive JSON summary for CI dashboards. +- Document provider environment variables in README once implementation lands. + + diff --git a/eslint.config.mjs b/eslint.config.mjs index d1479c3..1b90859 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -16,7 +16,7 @@ const __dirname = dirname(fileURLToPath(import.meta.url)); export default defineConfig([ { - ignores: ['dist/**', 'absolutejs-project/**', '**/*/htmx.*.min.js'] + ignores: ['dist/**', 'absolutejs-project/**', '**/*/htmx.*.min.js', 'test-cli-project/**'] }, pluginJs.configs.recommended, @@ -220,6 +220,15 @@ export default defineConfig([ '@typescript-eslint/no-unused-expressions': 'off' } }, + { + files: ['tests/**/*.{ts,tsx}'], + languageOptions: { + globals: globals.node + }, + rules: { + 'import/no-unused-modules': 'off' + } + }, { files: [ 'eslint.config.mjs', diff --git a/package.json b/package.json index 08ffc2c..785b026 100644 --- a/package.json +++ b/package.json @@ -38,13 +38,7 @@ "typescript": "5.8.3" }, "scripts": { - "build": "rm -rf dist && tsc --project tsconfig.build.json && cp -R src/templates dist/templates", - "dev": "if [ -f absolutejs-project/package.json ] && grep -q '\"db:reset\"' absolutejs-project/package.json; then cd absolutejs-project && bun run db:reset && cd ..; fi && rm -rf absolutejs-project && bun run src/index.ts", - "format": "prettier --write \"./**/*.{js,jsx,ts,tsx,css,json,mjs,md,svelte,html,vue}\"", - "lint": "eslint ./", - "release": "bun run format && bun run build && bun publish", - "test": "bash -c 'trap \"exit 0\" INT; cd absolutejs-project && bun dev'", - "typecheck": "bun run tsc --noEmit" + "build": "rm -rf dist && tsc --project tsconfig.build.json && cp -R src/templates dist/templates", "dev": "if [ -f absolutejs-project/package.json ] && grep -q '\"db:reset\"' absolutejs-project/package.json; then cd absolutejs-project && bun run db:reset && cd ..; fi && rm -rf absolutejs-project && bun run src/index.ts", "format": "prettier --write \"./**/*.{js,jsx,ts,tsx,css,json,mjs,md,svelte,html,vue}\"", "lint": "eslint ./", "release": "bun run format && bun run build && bun publish", "test": "bash -c 'trap \"exit 0\" INT; cd absolutejs-project && bun dev'", "test:behavioural": "bun test", "test:clean": "bash scripts/clean-tests.sh", "test:cli": "bun run scripts/functional-tests/test-cli.ts", "typecheck": "bun run tsc --noEmit" }, "type": "module", "version": "0.4.2" diff --git a/scripts/clean-tests.sh b/scripts/clean-tests.sh new file mode 100755 index 0000000..7ccd674 --- /dev/null +++ b/scripts/clean-tests.sh @@ -0,0 +1,70 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Safe-clean test artifacts in repository root +# Usage: ./scripts/clean-tests.sh [--full] [--confirm] +# --full Also remove .test-dependency-cache +# --confirm Required for destructive operations (for CI you can pass --confirm) + +ROOT_DIR="$(pwd)" + +FULL_MODE=0 +CONFIRM=0 + +for arg in "$@"; do + case "$arg" in + --full) + FULL_MODE=1 + ;; + --confirm) + CONFIRM=1 + ;; + --help|-h) + echo "Usage: $0 [--full] [--confirm]" + exit 0 + ;; + *) + echo "Unknown argument: $arg" + echo "Usage: $0 [--full] [--confirm]" + exit 1 + ;; + esac +done + +# Find test-* directories at repository root, exclude test-cli-project +mapfile -t TEST_DIRS < <(find "$ROOT_DIR" -maxdepth 1 -type d -name 'test-*' ! -name 'test-cli-project' -printf '%f\n' || true) + +if [ ${#TEST_DIRS[@]} -eq 0 ]; then + echo "No test-* directories found to remove." +else + echo "Found test directories to remove:" + for d in "${TEST_DIRS[@]}"; do + echo " - $d" + done + + if [ "$CONFIRM" -ne 1 ]; then + echo "Dry run: no directories will be removed. Re-run with --confirm to delete them." + exit 0 + fi + + for d in "${TEST_DIRS[@]}"; do + echo "Removing: $d" + rm -rf "$ROOT_DIR/$d" + done +fi + +if [ "$FULL_MODE" -eq 1 ]; then + echo "Full mode: also removing .test-dependency-cache if present" + if [ "$CONFIRM" -ne 1 ]; then + echo "--confirm required for full mode. Aborting." >&2 + exit 1 + fi + if [ -d "$ROOT_DIR/.test-dependency-cache" ]; then + echo "Removing .test-dependency-cache" + rm -rf "$ROOT_DIR/.test-dependency-cache" + else + echo ".test-dependency-cache not present" + fi +fi + +echo "Cleanup complete." diff --git a/scripts/functional-tests/auth-validator.ts b/scripts/functional-tests/auth-validator.ts new file mode 100644 index 0000000..03f0e94 --- /dev/null +++ b/scripts/functional-tests/auth-validator.ts @@ -0,0 +1,164 @@ +/* + Auth Configuration Validator + Validates auth-enabled scaffolded projects by ensuring required files, + schema definitions, and runtime wiring exist, then runs core functional tests. +*/ + +import process from 'node:process'; +import { runFunctionalTests, type FunctionalTestResult } from './functional-test-runner'; + +const DEFAULT_PACKAGE_MANAGER = 'bun'; + +type AuthValidationResult = { + errors: string[]; + functionalTestResults?: FunctionalTestResult; + passed: boolean; + warnings: string[]; +}; + +type ValidatorConfig = { + authProvider?: string; + databaseEngine?: string; + orm?: string; +}; + +type ValidatorOptions = { + skipBuild?: boolean; + skipDependencies?: boolean; + skipServer?: boolean; +}; + +const runFunctionalSuite = async ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn', + options: ValidatorOptions +) => { + try { + return await runFunctionalTests(projectPath, packageManager, options); + } catch (error) { + throw new Error(`Functional tests failed: ${(error as Error).message}`); + } +}; + +const processFunctionalResults = ( + result: FunctionalTestResult | undefined, + errors: string[], + warnings: string[] +) => { + if (!result) { + return; + } + + if (!result.passed) { + errors.push(...result.errors); + } + + if (result.warnings.length > 0) { + warnings.push(...result.warnings); + } +}; + +export const validateAuthConfiguration = async ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn' = DEFAULT_PACKAGE_MANAGER, + config: ValidatorConfig = {}, + options: ValidatorOptions = {} +) => { + const errors: string[] = []; + const warnings: string[] = []; + + if (!config.authProvider || config.authProvider === 'none') { + return { + errors: ['Auth validator requires a configuration with an auth provider enabled.'], + passed: false, + warnings + }; + } + + let functionalTestResults: FunctionalTestResult | undefined; + + try { + functionalTestResults = await runFunctionalSuite(projectPath, packageManager, options); + processFunctionalResults(functionalTestResults, errors, warnings); + } catch (error) { + errors.push((error as Error).message); + } + + const passed = errors.length === 0 && (functionalTestResults?.passed ?? false); + + return { + errors, + functionalTestResults, + passed, + warnings + } satisfies AuthValidationResult; +}; + +const printFunctionalSummary = (result: FunctionalTestResult) => { + console.log('\nFunctional Test Summary:'); + console.log(` Passed: ${result.passed ? '✓' : '✗'}`); + result.errors.forEach((error) => console.error(` - ${error}`)); + result.warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); +}; + +const printValidationResults = (result: AuthValidationResult) => { + console.log('\n=== Auth Configuration Validation Results ===\n'); + + if (result.functionalTestResults) { + printFunctionalSummary(result.functionalTestResults); + } + + if (result.warnings.length > 0) { + console.log('\nWarnings:'); + result.warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); + } + + if (result.errors.length > 0) { + console.log('\nErrors:'); + result.errors.forEach((error) => console.error(` - ${error}`)); + } + + console.log(`\nOverall: ${result.passed ? 'PASS' : 'FAIL'}`); +}; + +const parseCliArguments = (argv: string[]) => { + const [, , projectPath, packageManager, databaseEngine, orm, authProvider] = argv; + + return { + authProvider: authProvider ?? 'none', + databaseEngine: databaseEngine ?? 'none', + options: { + skipBuild: argv.includes('--skip-build'), + skipDependencies: argv.includes('--skip-deps'), + skipServer: argv.includes('--skip-server') + }, + orm: orm ?? 'none', + packageManager: (packageManager as 'bun' | 'npm' | 'pnpm' | 'yarn') ?? DEFAULT_PACKAGE_MANAGER, + projectPath + }; +}; + +if (import.meta.main) { + const { authProvider, databaseEngine, options, packageManager, projectPath, orm } = + parseCliArguments(process.argv); + + if (!projectPath) { + console.error( + 'Usage: bun run scripts/functional-tests/auth-validator.ts [package-manager] [databaseEngine] [orm] [authProvider] [--skip-deps] [--skip-build] [--skip-server]' + ); + process.exit(1); + } + + validateAuthConfiguration(projectPath, packageManager, { authProvider, databaseEngine, orm }, options) + .then((result) => { + printValidationResults(result); + process.exit(result.passed ? 0 : 1); + + return undefined; + }) + .catch((error) => { + console.error('Auth validation error:', error); + process.exit(1); + }); +} + diff --git a/scripts/functional-tests/build-validator.ts b/scripts/functional-tests/build-validator.ts new file mode 100644 index 0000000..d9197ee --- /dev/null +++ b/scripts/functional-tests/build-validator.ts @@ -0,0 +1,331 @@ +/* + Build Validator + Tests that scaffolded projects can compile TypeScript successfully. +*/ + +import { readFileSync } from 'node:fs'; +import { join } from 'node:path'; +import process from 'node:process'; + +export type BuildResult = { + passed: boolean; + errors: string[]; + compileTime?: number; +}; + +const COMPILE_TIMEOUT_MS = 60_000; +const TYPECHECK_SCRIPT = 'typecheck'; + +let cachedBunModule: typeof import('bun') | null = null; + +const loadBunModule = async () => { + if (cachedBunModule === null) { + cachedBunModule = await import('bun'); + } + + return cachedBunModule; +}; + +const parsePackageJson = (packageJsonPath: string) => { + try { + const raw = readFileSync(packageJsonPath, 'utf-8'); + + return JSON.parse(raw) as { scripts?: Record }; + } catch (unknownError) { + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + + return { error } as const; + } +}; + +const getTypecheckScriptStatus = (packageJsonPath: string, errors: string[]) => { + const parsed = parsePackageJson(packageJsonPath); + + if ('error' in parsed) { + errors.push(`Failed to parse package.json: ${parsed.error.message}`); + + return 'error'; + } + + const hasScript = parsed.scripts?.[TYPECHECK_SCRIPT]; + + return hasScript ? 'present' : 'missing'; +}; + +const runTypecheck = async ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn' +) => { + const bunModule = await loadBunModule(); + const { $: bunDollar } = bunModule; + const command = bunDollar`cd ${projectPath} && ${packageManager} run ${TYPECHECK_SCRIPT}`.quiet().nothrow(); + + const timeout = bunModule.sleep(COMPILE_TIMEOUT_MS).then(() => null as const); + const result = await Promise.race([command, timeout]); + + if (result !== null) { + return { result } as const; + } + + command.kill(); + + return { timedOut: true as const }; +}; + +const extractErrorOutput = (output: string) => { + const ERROR_PATTERNS = ['error TS', 'error:']; + const MAX_LINES = 15; + + const lines = output + .split('\n') + .map((line) => line.trim()) + .filter((line) => line.length > 0); + + const relevant = lines.filter((line) => { + if (ERROR_PATTERNS.some((pattern) => line.includes(pattern))) { + return true; + } + + return /^[^(]+\(\d+,\d+\):/.test(line); + }); + + if (relevant.length > 0) { + return relevant.slice(0, MAX_LINES).join('\n'); + } + + const OUTPUT_PREVIEW_LINES = 10; + + return lines.slice(0, OUTPUT_PREVIEW_LINES).join('\n'); +}; + +const TSC_MISSING_EXIT_CODE = 127; +const STDOUT_WARNING_LINES = 3; + +const runTscFallback = async (projectPath: string) => { + const bunModule = await loadBunModule(); + const { $: bunDollar, sleep } = bunModule; + const command = bunDollar`cd ${projectPath} && tsc --noEmit`.quiet().nothrow(); + + const startTime = Date.now(); + const timeoutResult = sleep(COMPILE_TIMEOUT_MS).then(() => null as const); + const result = await Promise.race([command, timeoutResult]); + + if (result === null) { + command.kill(); + + return { + compileTime: Date.now() - startTime, + status: 'timedOut' as const + }; + } + + const compileTime = Date.now() - startTime; + const exitCode = result.exitCode ?? -1; + const stdout = result.stdout?.toString() ?? ''; + const stderr = result.stderr?.toString() ?? ''; + const combinedOutput = `${stdout}\n${stderr}`.trim(); + const lowerCombined = combinedOutput.toLowerCase(); + + if (exitCode === 0) { + return { compileTime, status: 'success' as const }; + } + + if ( + exitCode === TSC_MISSING_EXIT_CODE || + lowerCombined.includes('command not found') || + lowerCombined.includes('not recognized') || + lowerCombined.includes('enoent') + ) { + const warning = + combinedOutput.length > 0 + ? combinedOutput.split('\n').slice(0, STDOUT_WARNING_LINES).join('\n') + : 'The TypeScript compiler (tsc) was not found on PATH.'; + + return { message: warning, status: 'missing' as const }; + } + + return { + compileTime, + exitCode, + status: 'failure' as const, + stderr, + stdout + }; +}; + +const runPackageTypecheck = async ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn' +) => { + const startTime = Date.now(); + const execution = await runTypecheck(projectPath, packageManager); + const compileTime = Date.now() - startTime; + + if ('timedOut' in execution) { + return { + compileTime, + error: `TypeScript compilation timed out after ${COMPILE_TIMEOUT_MS}ms`, + passed: false + }; + } + + const { result } = execution; + + if (result.exitCode === 0) { + return { compileTime, passed: true }; + } + + const output = [result.stdout?.toString() ?? '', result.stderr?.toString() ?? ''] + .filter(Boolean) + .join('\n'); + const errorMessage = + output.length > 0 + ? `Compilation errors:\n${extractErrorOutput(output)}` + : `TypeScript compilation failed (exit code ${result.exitCode})`; + + return { + compileTime, + error: errorMessage, + passed: false + }; +}; + +const applyFallbackResult = ( + fallback: + | { compileTime: number; passed: true; status: 'success' } + | { compileTime: number; error: string; passed: false; status: 'timedOut' } + | { message: string; status: 'missing' } + | { + compileTime: number; + exitCode: number; + status: 'failure'; + stderr: string; + stdout: string; + } + | { status: 'missing'; message: string }, + errors: string[] +) => { + if (fallback.status === 'success') { + return { compileTime: fallback.compileTime, errors, passed: true }; + } + + if (fallback.status === 'timedOut') { + errors.push(`TypeScript compilation timed out after ${COMPILE_TIMEOUT_MS}ms`); + + return { compileTime: fallback.compileTime, errors, passed: false }; + } + + if (fallback.status === 'missing') { + console.warn( + `⚠ TypeScript compiler not found; skipping typecheck step. (${fallback.message})` + ); + + return { errors, passed: true }; + } + + if (fallback.status === 'failure') { + const output = [fallback.stdout, fallback.stderr].filter(Boolean).join('\n'); + const errorMessage = + output.length > 0 + ? `Compilation errors:\n${extractErrorOutput(output)}` + : `TypeScript compilation failed (exit code ${fallback.exitCode})`; + + errors.push(errorMessage); + + return { compileTime: fallback.compileTime, errors, passed: false }; + } + + errors.push('Unknown error while running TypeScript compilation fallback.'); + + return { errors, passed: false }; +}; + +const applyScriptTypecheckResult = ( + typecheckResult: + | { compileTime?: number; error: string; passed: false } + | { compileTime?: number; passed: true; error?: string }, + errors: string[] +) => { + if (!typecheckResult.passed && typecheckResult.error) { + errors.push(typecheckResult.error); + } + + return { + compileTime: typecheckResult.compileTime, + errors, + passed: typecheckResult.passed + }; +}; + +export const validateBuild = async (projectPath: string, packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn' = 'bun') => { + const errors: string[] = []; + const packageJsonPath = join(projectPath, 'package.json'); + + const scriptStatus = getTypecheckScriptStatus(packageJsonPath, errors); + + if (scriptStatus === 'error') { + return { errors, passed: false }; + } + + if (scriptStatus === 'present') { + const typecheckResult = await runPackageTypecheck(projectPath, packageManager); + + return applyScriptTypecheckResult(typecheckResult, errors); + } + + console.warn( + `⚠ No '${TYPECHECK_SCRIPT}' script found in package.json – falling back to 'tsc --noEmit'.` + ); + const fallback = await runTscFallback(projectPath); + + return applyFallbackResult(fallback, errors); +}; + +const parseCliArgs = () => { + const [, , projectPath, packageManagerArg] = process.argv; + const normalizedPackageManager = packageManagerArg as 'bun' | 'npm' | 'pnpm' | 'yarn' | undefined; + + return { packageManager: normalizedPackageManager ?? 'bun', projectPath } as const; +}; + +const exitWithUsage = () => { + console.error('Usage: bun run scripts/functional-tests/build-validator.ts [package-manager]'); + process.exit(1); +}; + +const runFromCli = async () => { + const { packageManager, projectPath } = parseCliArgs(); + + if (!projectPath) { + exitWithUsage(); + } + + const result = await validateBuild(projectPath, packageManager).catch((unknownError) => { + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + console.error('✗ Build validation error:', error); + process.exit(1); + }); + + if (!result) { + return; + } + + if (!result.passed) { + console.error('✗ Build validation failed:'); + result.errors.forEach((error) => console.error(` - ${error}`)); + process.exit(1); + } + + console.log('✓ Build validation passed'); + if (typeof result.compileTime === 'number') { + console.log(` Compilation time: ${result.compileTime}ms`); + } + process.exit(0); +}; + +if (import.meta.main) { + runFromCli().catch((error) => { + console.error('✗ Build validator encountered an unexpected error:', error); + process.exit(1); + }); +} diff --git a/scripts/functional-tests/cloud-provider-validator.ts b/scripts/functional-tests/cloud-provider-validator.ts new file mode 100644 index 0000000..889f52a --- /dev/null +++ b/scripts/functional-tests/cloud-provider-validator.ts @@ -0,0 +1,181 @@ +/* + Cloud Database Provider Validator + Validates cloud database provider configurations (Neon, PlanetScale, Turso). + Tests connection code generation, imports, dependencies, and environment configuration. +*/ + +import process from 'node:process'; +import { runFunctionalTests, type FunctionalTestResult } from './functional-test-runner'; + +const VALID_PROVIDERS = new Set(['neon', 'planetscale', 'turso']); +const DEFAULT_PACKAGE_MANAGER = 'bun'; +type CloudProviderValidationResult = { + errors: string[]; + functionalTestResults?: FunctionalTestResult; + passed: boolean; + warnings: string[]; +}; + +type ValidationConfig = { + authProvider?: string; + databaseEngine?: string; + databaseHost?: string; + orm?: string; +}; + +type ValidationOptions = { + skipBuild?: boolean; + skipDependencies?: boolean; + skipServer?: boolean; +}; + +const runFunctionalSuite = async ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn', + options: ValidationOptions +) => { + try { + return await runFunctionalTests(projectPath, packageManager, options); + } catch (error) { + throw new Error(`Functional tests failed: ${(error as Error).message}`); + } +}; + +const processFunctionalResults = ( + result: FunctionalTestResult | undefined, + errors: string[], + warnings: string[] +) => { + if (!result) { + return; + } + + if (!result.passed) { + errors.push(...result.errors); + } + + if (result.warnings.length > 0) { + warnings.push(...result.warnings); + } +}; + +export const validateCloudProvider = async ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn' = DEFAULT_PACKAGE_MANAGER, + config: ValidationConfig = {}, + options: ValidationOptions = {} +) => { + const errors: string[] = []; + const warnings: string[] = []; + + const provider = (config.databaseHost ?? 'none') as 'neon' | 'planetscale' | 'turso' | 'none'; + + if (!VALID_PROVIDERS.has(provider)) { + return { + errors: [`Invalid cloud provider: ${provider}. Expected: neon, planetscale, or turso`], + functionalTestResults: undefined, + passed: false, + warnings + } satisfies CloudProviderValidationResult; + } + + let functionalTestResults: FunctionalTestResult | undefined; + + try { + functionalTestResults = await runFunctionalSuite(projectPath, packageManager, options); + processFunctionalResults(functionalTestResults, errors, warnings); + } catch (error) { + errors.push((error as Error).message); + } + + const passed = errors.length === 0 && (functionalTestResults?.passed ?? false); + + return { + errors, + functionalTestResults, + passed, + warnings + } satisfies CloudProviderValidationResult; +}; + +const printFunctionalSummary = (result: FunctionalTestResult) => { + console.log('\nFunctional Test Summary:'); + console.log(` Passed: ${result.passed ? '✓' : '✗'}`); + result.errors.forEach((error) => console.error(` - ${error}`)); + result.warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); +}; + +const parseCliArguments = (argv: string[]) => { + const [, , projectPath, packageManager, databaseEngine, databaseHost, orm, authProvider] = argv; + + return { + authProvider: authProvider ?? 'none', + databaseEngine: databaseEngine ?? 'none', + databaseHost: databaseHost ?? 'none', + options: { + skipBuild: argv.includes('--skip-build'), + skipDependencies: argv.includes('--skip-deps'), + skipServer: argv.includes('--skip-server') + }, + orm: orm ?? 'none', + packageManager: (packageManager as 'bun' | 'npm' | 'pnpm' | 'yarn') ?? DEFAULT_PACKAGE_MANAGER, + projectPath + }; +}; + +const printValidationResults = ( + provider: string, + databaseEngine: string, + orm: string, + result: CloudProviderValidationResult +) => { + console.log('\n=== Cloud Provider Validation Results ===\n'); + console.log(`Provider: ${provider}`); + console.log(`Database Engine: ${databaseEngine}`); + console.log(`ORM: ${orm}`); + + if (result.functionalTestResults) { + printFunctionalSummary(result.functionalTestResults); + } + + if (result.warnings.length > 0) { + console.log('\nWarnings:'); + result.warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); + } + + if (result.errors.length > 0) { + console.log('\nErrors:'); + result.errors.forEach((error) => console.error(` - ${error}`)); + } + + console.log(`\nOverall: ${result.passed ? 'PASS' : 'FAIL'}`); +}; + +if (import.meta.main) { + const { authProvider, databaseEngine, databaseHost, options, orm, packageManager, projectPath } = + parseCliArguments(process.argv); + + if (!projectPath) { + console.error( + 'Usage: bun run scripts/functional-tests/cloud-provider-validator.ts [package-manager] [databaseEngine] [databaseHost] [orm] [authProvider] [--skip-deps] [--skip-build] [--skip-server]' + ); + process.exit(1); + } + + validateCloudProvider( + projectPath, + packageManager, + { authProvider, databaseEngine, databaseHost, orm }, + options + ) + .then((result) => { + printValidationResults(databaseHost, databaseEngine, orm, result); + process.exit(result.passed ? 0 : 1); + + return undefined; + }) + .catch((error) => { + console.error('Cloud provider validation error:', error); + process.exit(1); + }); +} diff --git a/scripts/functional-tests/dependency-cache.ts b/scripts/functional-tests/dependency-cache.ts new file mode 100644 index 0000000..36816b7 --- /dev/null +++ b/scripts/functional-tests/dependency-cache.ts @@ -0,0 +1,315 @@ +/* + Dependency Cache Manager + Reuses node_modules across test configurations with identical dependencies + to dramatically speed up testing. +*/ + +import { spawn } from 'node:child_process'; +import { createHash } from 'node:crypto'; +import { once } from 'node:events'; +import { + cpSync, + existsSync, + mkdirSync, + readFileSync, + readdirSync, + rmSync, + statSync, + writeFileSync +} from 'node:fs'; +import { dirname, join } from 'node:path'; +import process from 'node:process'; + +export type DependencyFingerprint = { + frontend: string; + databaseEngine: string; + orm: string; + databaseHost: string; + authProvider: string; + useTailwind: boolean; + codeQualityTool?: string; +}; + +const CACHE_DIR = join(process.cwd(), '.test-dependency-cache'); +const LOCK_FILES = ['bun.lockb', 'package-lock.json', 'pnpm-lock.yaml', 'yarn.lock']; +const MINUTES_PER_INSTALL_TIMEOUT = 5; +const SECONDS_PER_MINUTE = 60; +const MILLISECONDS_PER_SECOND = 1_000; +const INSTALL_TIMEOUT_MS = MINUTES_PER_INSTALL_TIMEOUT * SECONDS_PER_MINUTE * MILLISECONDS_PER_SECOND; +const DEFAULT_CACHE_MAX_AGE_DAYS = 7; +const HOURS_PER_DAY = 24; +const MINUTES_PER_HOUR = 60; +const FORCE_KILL_DELAY_MS = 100; + +const createFingerprintKey = (config: DependencyFingerprint, manifestHash: string) => + JSON.stringify({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool ?? 'none', + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + frontend: config.frontend, + manifestHash, + orm: config.orm, + useTailwind: config.useTailwind + }); + +const FINGERPRINT_LENGTH = 16; + +const getDependencyFingerprint = (config: DependencyFingerprint, manifestHash: string) => + createHash('sha256') + .update(createFingerprintKey(config, manifestHash)) + .digest('hex') + .slice(0, FINGERPRINT_LENGTH); + +const getCachePath = (fingerprint: string) => join(CACHE_DIR, fingerprint); + +const safeRead = (path: string) => { + try { + return readFileSync(path); + } catch { + return null; + } +}; + +const ensureCacheDir = () => { + if (!existsSync(CACHE_DIR)) { + mkdirSync(CACHE_DIR, { recursive: true }); + } +}; + +export const computeManifestHash = (packageJsonPath: string) => { + if (!existsSync(packageJsonPath)) { + return 'missing'; + } + + const hash = createHash('sha256'); + const packageJsonContents = safeRead(packageJsonPath); + + if (!packageJsonContents) { + return 'error:package-json-unreadable'; + } + + hash.update(packageJsonContents); + + const packageDir = dirname(packageJsonPath); + + LOCK_FILES.forEach((lockFile) => { + const lockPath = join(packageDir, lockFile); + const contents = safeRead(lockPath); + + if (contents) { + hash.update(contents); + } + }); + + return hash.digest('hex'); +}; + +const readStoredManifestHash = (cachePath: string) => { + const manifestHashPath = join(cachePath, 'manifest.hash'); + + if (!existsSync(manifestHashPath)) { + return null; + } + + try { + return readFileSync(manifestHashPath, 'utf-8').trim(); + } catch { + return null; + } +}; + +const restoreCache = ( + cachePath: string, + projectPath: string, + manifestHash: string +) => { + const nodeModulesPath = join(cachePath, 'node_modules'); + const storedHash = readStoredManifestHash(cachePath); + + if (!storedHash || storedHash !== manifestHash) { + return false; + } + + if (!existsSync(nodeModulesPath) || !statSync(nodeModulesPath).isDirectory()) { + return false; + } + + const start = Date.now(); + cpSync(nodeModulesPath, join(projectPath, 'node_modules'), { recursive: true }); + + return Date.now() - start; +}; + +const installDependencies = async (projectPath: string, env?: Record) => { + const stdoutChunks: string[] = []; + const stderrChunks: string[] = []; + let timedOut = false; + + const child = spawn('bun', ['install'], { + cwd: projectPath, + env: { + ...process.env, + ...(env ?? {}), + ABSOLUTE_TEST: 'true' + }, + stdio: ['ignore', 'pipe', 'pipe'] + }); + + const timeoutId = setTimeout(() => { + timedOut = true; + try { + child.kill('SIGTERM'); + setTimeout(() => child.kill('SIGKILL'), FORCE_KILL_DELAY_MS); + } catch { + // Ignore kill failures – process may already have exited. + } + }, INSTALL_TIMEOUT_MS); + + child.stdout?.on('data', (chunk) => stdoutChunks.push(chunk.toString())); + child.stderr?.on('data', (chunk) => stderrChunks.push(chunk.toString())); + + const [code] = (await once(child, 'close')) as [number | null, string | null]; + clearTimeout(timeoutId); + + if (timedOut) { + throw new Error(`Dependency installation timed out after ${INSTALL_TIMEOUT_MS}ms`); + } + + if (code === 0) { + return; + } + + const combinedOutput = [stderrChunks.join(''), stdoutChunks.join('')] + .map((section) => section.trim()) + .filter(Boolean) + .join('\n'); + + if (combinedOutput.length > 0) { + const ERROR_PREVIEW_LINES = 10; + throw new Error(combinedOutput.split('\n').slice(0, ERROR_PREVIEW_LINES).join('\n')); + } + + throw new Error(`Dependency installation failed with exit code ${code ?? 'unknown'}`); +}; + +const cacheInstalledDependencies = ( + projectPath: string, + cachePath: string, + manifestHash: string, + packageJsonPath: string +) => { + if (!existsSync(cachePath)) { + mkdirSync(cachePath, { recursive: true }); + } + + cpSync(join(projectPath, 'node_modules'), join(cachePath, 'node_modules'), { recursive: true }); + + if (existsSync(packageJsonPath)) { + cpSync(packageJsonPath, join(cachePath, 'package.json')); + } + + const packageDir = dirname(packageJsonPath); + + LOCK_FILES.forEach((lockFile) => { + const lockPath = join(packageDir, lockFile); + + if (existsSync(lockPath)) { + cpSync(lockPath, join(cachePath, lockFile)); + } + }); + + writeFileSync(join(cachePath, 'manifest.hash'), manifestHash); +}; + +export const hasCachedDependencies = ( + config: DependencyFingerprint, + packageJsonPath: string, + manifestHashOverride?: string +) => { + if (!existsSync(packageJsonPath)) { + return false; + } + + const manifestHash = manifestHashOverride ?? computeManifestHash(packageJsonPath); + + if (manifestHash.startsWith('error:') || manifestHash === 'missing') { + return false; + } + + const fingerprint = getDependencyFingerprint(config, manifestHash); + const cachePath = getCachePath(fingerprint); + const nodeModulesPath = join(cachePath, 'node_modules'); + + if (!existsSync(nodeModulesPath) || !statSync(nodeModulesPath).isDirectory()) { + return false; + } + + return readStoredManifestHash(cachePath) === manifestHash; +}; + +export const getOrInstallDependencies = async ( + projectPath: string, + config: DependencyFingerprint, + packageJsonPath: string, + manifestHashOverride?: string, + env?: Record +): Promise<{ cached: boolean; installTime: number }> => { + ensureCacheDir(); + + const baseManifestHash = manifestHashOverride ?? computeManifestHash(packageJsonPath); + const manifestHash = baseManifestHash.startsWith('error:') ? `fallback-${Date.now()}` : baseManifestHash; + const fingerprint = getDependencyFingerprint(config, manifestHash); + const cachePath = getCachePath(fingerprint); + + const restoredDuration = restoreCache(cachePath, projectPath, manifestHash); + + if (restoredDuration !== false) { + return { cached: true, installTime: restoredDuration }; + } + + const installStart = Date.now(); + await installDependencies(projectPath, env); + const installTime = Date.now() - installStart; + + const updatedManifestHash = manifestHashOverride ?? computeManifestHash(packageJsonPath); + const finalFingerprint = getDependencyFingerprint(config, updatedManifestHash); + const finalCachePath = getCachePath(finalFingerprint); + + cacheInstalledDependencies(projectPath, finalCachePath, updatedManifestHash, packageJsonPath); + + return { cached: false, installTime }; +}; + +export const cleanupCache = (maxAgeDays = DEFAULT_CACHE_MAX_AGE_DAYS) => { + if (!existsSync(CACHE_DIR)) { + return; + } + + const now = Date.now(); + const maxAgeMs = + maxAgeDays * HOURS_PER_DAY * MINUTES_PER_HOUR * SECONDS_PER_MINUTE * MILLISECONDS_PER_SECOND; + + try { + readdirSync(CACHE_DIR, { withFileTypes: true }).forEach((entry) => { + if (!entry.isDirectory()) { + return; + } + + const entryPath = join(CACHE_DIR, entry.name); + const stats = statSync(entryPath); + + if (!stats.isDirectory()) { + return; + } + + const age = now - stats.mtimeMs; + + if (age > maxAgeMs) { + rmSync(entryPath, { force: true, recursive: true }); + } + }); + } catch { + // Ignore cleanup errors; they are non-fatal. + } +}; diff --git a/scripts/functional-tests/dependency-installer-tester.ts b/scripts/functional-tests/dependency-installer-tester.ts new file mode 100644 index 0000000..c95b386 --- /dev/null +++ b/scripts/functional-tests/dependency-installer-tester.ts @@ -0,0 +1,194 @@ +/* + Dependency Installer Tester + Tests that dependencies can be installed successfully in scaffolded projects. +*/ + +import { spawn } from 'node:child_process'; +import { once } from 'node:events'; +import { existsSync, mkdirSync, readFileSync } from 'node:fs'; +import { join } from 'node:path'; +import process from 'node:process'; + +export type DependencyInstallResult = { + passed: boolean; + errors: string[]; + installTime?: number; +}; + +const INSTALL_TIMEOUT_MS = 120_000; +const MAX_ERROR_PREVIEW_LINES = 10; +const FORCE_KILL_DELAY_MS = 1_000; +const INSTALL_TMP_DIR_NAME = '.absolute-tmp'; + +const ensureInstallTempDir = (projectPath: string) => { + const tempDir = join(projectPath, INSTALL_TMP_DIR_NAME); + + if (!existsSync(tempDir)) { + mkdirSync(tempDir, { recursive: true }); + } + + return tempDir; +}; + +const INSTALL_COMMANDS: Record<'bun' | 'npm' | 'pnpm' | 'yarn', [string, string[]]> = { + bun: ['bun', ['install']], + npm: ['npm', ['install']], + pnpm: ['pnpm', ['install']], + yarn: ['yarn', ['install']] +}; + +const hasDependenciesDeclared = (packageJson: { + dependencies?: Record; + devDependencies?: Record; +}) => { + const { dependencies = {}, devDependencies = {} } = packageJson; + + return Object.keys(dependencies).length > 0 || Object.keys(devDependencies).length > 0; +}; + +const parsePackageJson = (packageJsonPath: string) => { + try { + const raw = readFileSync(packageJsonPath, 'utf-8'); + + return JSON.parse(raw) as { dependencies?: Record; devDependencies?: Record }; + } catch (unknownError) { + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + + return { error } as const; + } +}; + +const runInstall = async (projectPath: string, packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn') => { + const [executable, args] = INSTALL_COMMANDS[packageManager]; + const stdoutChunks: string[] = []; + const stderrChunks: string[] = []; + let timedOut = false; + const tempDir = ensureInstallTempDir(projectPath); + + const child = spawn(executable, args, { + cwd: projectPath, + env: { + ...process.env, + BUN_INSTALL_TMPDIR: tempDir, + TEMP: tempDir, + TMP: tempDir, + TMPDIR: tempDir + }, + stdio: ['ignore', 'pipe', 'pipe'] + }); + + const timeoutId = setTimeout(() => { + timedOut = true; + try { + child.kill('SIGTERM'); + setTimeout(() => child.kill('SIGKILL'), FORCE_KILL_DELAY_MS); + } catch { + // Ignore kill errors + } + }, INSTALL_TIMEOUT_MS); + + child.stdout?.on('data', (chunk) => stdoutChunks.push(chunk.toString())); + child.stderr?.on('data', (chunk) => stderrChunks.push(chunk.toString())); + + const [code] = (await once(child, 'close')) as [number | null, string | null]; + clearTimeout(timeoutId); + + if (timedOut) { + throw new Error(`Dependency installation timed out after ${INSTALL_TIMEOUT_MS}ms`); + } + + if (code === 0) { + return; + } + + const combined = [stderrChunks.join(''), stdoutChunks.join('')] + .map((section) => section.trim()) + .filter(Boolean) + .join('\n'); + const preview = combined + .split('\n') + .filter((line) => line.trim().length > 0) + .slice(0, MAX_ERROR_PREVIEW_LINES) + .join('\n'); + + throw new Error(preview || `Dependency installation failed with exit code ${code ?? 'unknown'}`); +}; + +export const testDependencyInstallation = async ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn' = 'bun' +): Promise => { + const errors: string[] = []; + const packageJsonPath = join(projectPath, 'package.json'); + const parsed = parsePackageJson(packageJsonPath); + + if ('error' in parsed) { + errors.push(`Failed to parse package.json: ${parsed.error.message}`); + + return { errors, passed: false }; + } + + if (!hasDependenciesDeclared(parsed)) { + return { errors: [], installTime: 0, passed: true }; + } + + const installStart = Date.now(); + + try { + await runInstall(projectPath, packageManager); + } catch (unknownError) { + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + errors.push(error.message); + + return { errors, passed: false }; + } + + const installTime = Date.now() - installStart; + + return { errors: [], installTime, passed: true }; +}; + +const parseCliArgs = () => { + const [, , projectPath, packageManagerArg] = process.argv; + const normalized = packageManagerArg as 'bun' | 'npm' | 'pnpm' | 'yarn' | undefined; + + return { packageManager: normalized ?? 'bun', projectPath } as const; +}; + +const runFromCli = async () => { + const { packageManager, projectPath } = parseCliArgs(); + + if (!projectPath) { + console.error('Usage: bun run scripts/functional-tests/dependency-installer-tester.ts [package-manager]'); + process.exit(1); + } + + const result = await testDependencyInstallation(projectPath, packageManager).catch((unknownError) => { + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + console.error('✗ Dependency installation test error:', error); + process.exit(1); + }); + + if (!result) { + return; + } + + if (!result.passed) { + console.error('✗ Dependency installation test failed:'); + result.errors.forEach((error) => console.error(` - ${error}`)); + process.exit(1); + } + + console.log('✓ Dependency installation test passed'); + if (typeof result.installTime === 'number' && result.installTime > 0) { + console.log(` Installation time: ${result.installTime}ms`); + } + process.exit(0); +}; + +if (import.meta.main) { + runFromCli().catch((error) => { + console.error('✗ Dependency installer tester encountered an unexpected error:', error); + process.exit(1); + }); +} diff --git a/scripts/functional-tests/functional-test-runner.ts b/scripts/functional-tests/functional-test-runner.ts new file mode 100644 index 0000000..03bc634 --- /dev/null +++ b/scripts/functional-tests/functional-test-runner.ts @@ -0,0 +1,268 @@ +/* + Functional Test Runner + Orchestrates dependency installation, build validation, and server startup validation for scaffolded projects. +*/ + +import process from 'node:process'; + +import { validateBuild } from './build-validator'; +import { testDependencyInstallation } from './dependency-installer-tester'; +import { validateServerStartup } from './server-startup-validator'; + +type StepName = 'dependencies' | 'build' | 'server'; + +type StepResult = { + compileTime?: number; + errors: string[]; + installTime?: number; + passed: boolean; + warnings: string[]; +}; + +type StepResults = Partial>; + +export type FunctionalTestResult = { + errors: string[]; + passed: boolean; + results: { + build?: { compileTime?: number; passed: boolean }; + dependencies?: { installTime?: number; passed: boolean }; + server?: { compileTime?: number; passed: boolean }; + }; + totalTime?: number; + warnings: string[]; +}; + +const extractErrorMessage = (error: unknown) => { + if (error instanceof Error) { + return error.message; + } + + return String(error); +}; + +const runDependencyStep: ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn' +) => Promise = async (projectPath, packageManager) => { + try { + const result = await testDependencyInstallation(projectPath, packageManager); + + return { + errors: result.passed ? [] : [...result.errors], + installTime: result.installTime, + passed: result.passed, + warnings: [] + } satisfies StepResult; + } catch (error) { + return { + errors: [`Dependency installation test failed: ${extractErrorMessage(error)}`], + passed: false, + warnings: [] + } satisfies StepResult; + } +}; + +const runBuildStep: ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn' +) => Promise = async (projectPath, packageManager) => { + try { + const result = await validateBuild(projectPath, packageManager); + + return { + compileTime: result.compileTime, + errors: result.passed ? [] : [...result.errors], + passed: result.passed, + warnings: [] + } satisfies StepResult; + } catch (error) { + return { + errors: [`Build validation failed: ${extractErrorMessage(error)}`], + passed: false, + warnings: [] + } satisfies StepResult; + } +}; + +const runServerStep: ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn' +) => Promise = async (projectPath, packageManager) => { + try { + const result = await validateServerStartup(projectPath, packageManager); + + return { + compileTime: result.compileTime, + errors: result.passed ? [] : [...result.errors], + passed: result.passed, + warnings: [...result.warnings] + } satisfies StepResult; + } catch (error) { + return { + errors: [`Server startup validation failed: ${extractErrorMessage(error)}`], + passed: false, + warnings: [] + } satisfies StepResult; + } +}; + +const mapStepResult = (result: StepResult) => ({ + compileTime: result.compileTime, + installTime: result.installTime, + passed: result.passed +}); + +export const runFunctionalTests = async ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn' = 'bun', + options: { + skipBuild?: boolean; + skipDependencies?: boolean; + skipServer?: boolean; + } = {} +) => { + const startTime = Date.now(); + const errors: string[] = []; + const warnings: string[] = []; + const stepResults: StepResults = {}; + + if (options.skipDependencies) { + warnings.push('Skipped dependency installation test'); + } else { + const dependencyResult = await runDependencyStep(projectPath, packageManager); + stepResults.dependencies = dependencyResult; + errors.push(...dependencyResult.errors); + warnings.push(...dependencyResult.warnings); + } + + if (options.skipBuild) { + warnings.push('Skipped build validation'); + } else { + const buildResult = await runBuildStep(projectPath, packageManager); + stepResults.build = buildResult; + errors.push(...buildResult.errors); + warnings.push(...buildResult.warnings); + } + + if (options.skipServer) { + warnings.push('Skipped server startup validation'); + } else { + const serverResult = await runServerStep(projectPath, packageManager); + stepResults.server = serverResult; + errors.push(...serverResult.errors); + warnings.push(...serverResult.warnings); + } + + const totalTime = Date.now() - startTime; + + return { + errors, + passed: errors.length === 0, + results: { + build: stepResults.build ? mapStepResult(stepResults.build) : undefined, + dependencies: stepResults.dependencies ? mapStepResult(stepResults.dependencies) : undefined, + server: stepResults.server ? mapStepResult(stepResults.server) : undefined + }, + totalTime, + warnings + } satisfies FunctionalTestResult; +}; + +const printStepSummary = (label: string, result?: { compileTime?: number; installTime?: number; passed: boolean }) => { + if (!result) { + return; + } + + console.log(`${label}: ${result.passed ? '✓' : '✗'}`); + + if (typeof result.installTime === 'number') { + console.log(` Install time: ${result.installTime}ms`); + } + + if (typeof result.compileTime === 'number') { + console.log(` Compile time: ${result.compileTime}ms`); + } +}; + +const printCliSummary = (result: FunctionalTestResult) => { + console.log('\n=== Functional Test Results ===\n'); + printStepSummary('Dependencies', result.results.dependencies); + printStepSummary('Build', result.results.build); + printStepSummary('Server', result.results.server); + + if (result.warnings.length > 0) { + console.log('\nWarnings:'); + result.warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); + } + + if (typeof result.totalTime === 'number') { + console.log(`\nTotal time: ${result.totalTime}ms`); + } + + if (result.passed) { + console.log('\n✓ All functional tests passed!'); + } else { + console.log('\n✗ Functional tests failed:'); + result.errors.forEach((error) => console.error(` - ${error}`)); + } +}; + +const ALLOWED_PACKAGE_MANAGERS = new Set(['bun', 'npm', 'pnpm', 'yarn'] as const); + +type PackageManager = 'bun' | 'npm' | 'pnpm' | 'yarn'; + +const normalizePackageManager = (input: string | undefined, remaining: string[]) => { + if (!input) { + return { packageManager: 'bun' as PackageManager, rest: remaining }; + } + + if (ALLOWED_PACKAGE_MANAGERS.has(input as PackageManager)) { + return { packageManager: input as PackageManager, rest: remaining }; + } + + if (input.startsWith('-')) { + return { packageManager: 'bun' as PackageManager, rest: [input, ...remaining] }; + } + + return { packageManager: 'bun' as PackageManager, rest: [input, ...remaining] }; +}; + +const parseCliArguments = (argv: string[]) => { + const [, , projectPath, packageManagerCandidate, ...rest] = argv; + const { packageManager, rest: remaining } = normalizePackageManager(packageManagerCandidate, rest); + + return { + options: { + skipBuild: argv.includes('--skip-build'), + skipDependencies: argv.includes('--skip-deps'), + skipServer: argv.includes('--skip-server') + }, + packageManager, + projectPath, + remaining + } as const; +}; + +if (import.meta.main) { + const { options, packageManager, projectPath } = parseCliArguments(process.argv); + + if (!projectPath) { + console.error( + 'Usage: bun run scripts/functional-tests/functional-test-runner.ts [package-manager] [--skip-deps] [--skip-build] [--skip-server]' + ); + process.exit(1); + } + + runFunctionalTests(projectPath, packageManager, options) + .then((result) => { + printCliSummary(result); + process.exit(result.passed ? 0 : 1); + + return undefined; + }) + .catch((error) => { + console.error('✗ Functional test runner error:', extractErrorMessage(error)); + process.exit(1); + }); +} diff --git a/scripts/functional-tests/htmx-validator.ts b/scripts/functional-tests/htmx-validator.ts new file mode 100644 index 0000000..55dda8e --- /dev/null +++ b/scripts/functional-tests/htmx-validator.ts @@ -0,0 +1,189 @@ +/* + HTMX Validator + Executes the functional test suite for HTMX scaffold combinations. +*/ + +import process from 'node:process'; + +import { runFunctionalTests, type FunctionalTestResult } from './functional-test-runner'; + +export type HTMXValidationResult = { + passed: boolean; + errors: string[]; + warnings: string[]; + functionalTestResults?: FunctionalTestResult; +}; + +type ValidatorOptions = { + skipDependencies?: boolean; + skipBuild?: boolean; + skipServer?: boolean; +}; + +type ValidatorConfig = { + databaseEngine?: string; + orm?: string; + authProvider?: string; + useTailwind?: boolean; + codeQualityTool?: string; + isMultiFrontend?: boolean; +}; + +const runFunctionalSuite = async ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn', + options: ValidatorOptions, + errors: string[], + warnings: string[] +) => { + const results = await runFunctionalTests(projectPath, packageManager, options).catch((unknownError) => { + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + errors.push(`Functional tests failed: ${error.message}`); + + return undefined; + }); + + if (!results) { + return undefined; + } + + if (!results.passed) { + errors.push(...results.errors); + } + + if (results.warnings.length > 0) { + warnings.push(...results.warnings); + } + + return results; +}; + +export const validateHTMX = async ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn' = 'bun', + _config: ValidatorConfig = {}, + options: ValidatorOptions = {} +): Promise => { + void _config; + const errors: string[] = []; + const warnings: string[] = []; + + const functionalTestResults = await runFunctionalSuite( + projectPath, + packageManager, + options, + errors, + warnings + ); + + const passed = errors.length === 0; + + return { + errors, + functionalTestResults, + passed, + warnings + }; +}; + +const parseCliArguments = () => { + const [, , projectPath, packageManagerArg, ...flags] = process.argv; + const packageManager = (packageManagerArg as 'bun' | 'npm' | 'pnpm' | 'yarn' | undefined) ?? 'bun'; + + const skipDependencies = flags.includes('--skip-deps'); + const skipBuild = flags.includes('--skip-build'); + const skipServer = flags.includes('--skip-server'); + + return { + packageManager, + projectPath, + skipBuild, + skipDependencies, + skipServer + } as const; +}; + +const logBuildSummary = (build?: FunctionalTestResult['results']['build']) => { + if (!build) { + return; + } + + console.log(` Build: ${build.passed ? '✓' : '✗'}`); + + if (typeof build.compileTime === 'number') { + console.log(` Compile time: ${build.compileTime}ms`); + } +}; + +const logServerSummary = (server?: FunctionalTestResult['results']['server']) => { + if (!server) { + return; + } + + console.log(` Server: ${server.passed ? '✓' : '✗'}`); +}; + +const logFunctionalSummary = (functionalTestResults?: FunctionalTestResult) => { + if (!functionalTestResults) { + return; + } + + console.log('\nFunctional Test Results:'); + const { results } = functionalTestResults; + logBuildSummary(results.build); + logServerSummary(results.server); +}; + +const logWarnings = (warnings: string[]) => { + if (warnings.length === 0) { + return; + } + + console.log('\nWarnings:'); + warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); +}; + +const exitWithResult = (result: HTMXValidationResult) => { + if (result.passed) { + console.log('\n✓ HTMX validation passed!'); + process.exit(0); + } + + console.log('\n✗ HTMX validation failed:'); + result.errors.forEach((error) => console.error(` - ${error}`)); + process.exit(1); +}; + +const runFromCli = async () => { + const { packageManager, projectPath, skipBuild, skipDependencies, skipServer } = parseCliArguments(); + + if (!projectPath) { + console.error('Usage: bun run scripts/functional-tests/htmx-validator.ts [package-manager] [--skip-deps] [--skip-build] [--skip-server]'); + process.exit(1); + } + + try { + const result = await validateHTMX( + projectPath, + packageManager, + {}, + { skipBuild, skipDependencies, skipServer } + ); + + console.log('\n=== HTMX Validation Results ===\n'); + logFunctionalSummary(result.functionalTestResults); + logWarnings(result.warnings); + exitWithResult(result); + } catch (unknownError) { + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + console.error('✗ HTMX validation error:', error); + process.exit(1); + } +}; + +if (import.meta.main) { + runFromCli().catch((error) => { + console.error('✗ HTMX validator encountered an unexpected error:', error); + process.exit(1); + }); +} diff --git a/scripts/functional-tests/matrix.ts b/scripts/functional-tests/matrix.ts new file mode 100644 index 0000000..c26912b --- /dev/null +++ b/scripts/functional-tests/matrix.ts @@ -0,0 +1,146 @@ +import { writeFileSync } from 'node:fs'; +import { dirname, join } from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const FRONTENDS = ['react', 'html', 'svelte', 'vue', 'htmx'] as const; +const DATABASE_ENGINES = [ + 'postgresql', + 'mysql', + 'sqlite', + 'mongodb', + 'mariadb', + 'gel', + 'singlestore', + 'cockroachdb', + 'mssql', + 'none' +] as const; +const ORMS = ['drizzle', 'none'] as const; +const DATABASE_HOSTS = ['neon', 'planetscale', 'turso', 'none'] as const; +const AUTH_PROVIDERS = ['absoluteAuth', 'none'] as const; +const CODE_QUALITY_TOOLS = ['eslint+prettier'] as const; +const DIRECTORY_CONFIGS = ['default', 'custom'] as const; +const TAILWIND_OPTIONS = [true, false] as const; + +const DRIZZLE_COMPATIBLE = ['gel', 'mysql', 'postgresql', 'sqlite', 'singlestore'] as const; + +const HOST_CONSTRAINTS: Record = { + neon: ['postgresql'], + planetscale: ['postgresql', 'mysql'], + turso: ['sqlite'] +}; + +export type MatrixConfig = { + authProvider: (typeof AUTH_PROVIDERS)[number]; + codeQualityTool?: (typeof CODE_QUALITY_TOOLS)[number]; + databaseEngine: (typeof DATABASE_ENGINES)[number]; + databaseHost: (typeof DATABASE_HOSTS)[number]; + directoryConfig: (typeof DIRECTORY_CONFIGS)[number]; + frontend: (typeof FRONTENDS)[number]; + orm: (typeof ORMS)[number]; + useTailwind: boolean; + // Optional metadata for test harness + skip?: boolean; + skipReason?: string; + requiredEnv?: string[]; +}; + +export const isValidMatrixConfig = () => + // Keep validation permissive here; skip/invalid combinations are annotated + // and handled by the test harness so they appear in generated matrix with + // an explicit skip reason. This helps produce transparent reports. + true +;type MatrixField = { + key: keyof MatrixConfig; + values: ReadonlyArray; +}; + +const MATRIX_FIELDS: MatrixField[] = [ + { key: 'frontend', values: FRONTENDS }, + { key: 'databaseEngine', values: DATABASE_ENGINES }, + { key: 'orm', values: ORMS }, + { key: 'databaseHost', values: DATABASE_HOSTS }, + { key: 'authProvider', values: AUTH_PROVIDERS }, + { key: 'codeQualityTool', values: [...CODE_QUALITY_TOOLS, undefined] }, + { key: 'directoryConfig', values: DIRECTORY_CONFIGS }, + { key: 'useTailwind', values: TAILWIND_OPTIONS } +]; + +// Helper to annotate host-specific constraints and requirements +const annotateHostConstraints = (cfg: MatrixConfig) => { + if (cfg.databaseHost === 'none') { + return; + } + + const allowed = HOST_CONSTRAINTS[cfg.databaseHost]; + if (Array.isArray(allowed) && !allowed.includes(cfg.databaseEngine)) { + cfg.skip = true; + cfg.skipReason = `${cfg.databaseEngine} is not supported by host ${cfg.databaseHost}`; + + return; + } + + // Cloud-hosted flows typically require credentials; annotate required envs + if (cfg.databaseHost === 'neon') { + cfg.requiredEnv = ['NEON_DATABASE_URL']; + } + + if (cfg.databaseHost === 'turso') { + cfg.requiredEnv = ['TURSO_DB_URL']; + } + + if (cfg.databaseHost === 'planetscale') { + cfg.skip = true; + cfg.skipReason = 'PlanetScale cloud flows are not exercised by CI (skipped)'; + } +}; + +// Helper to annotate known unsupported combinations with skip metadata +const annotateMatrixEntry = (entry: MatrixConfig) => { + const cfg = { ...entry } as MatrixConfig; // Drizzle compatibility + if (cfg.orm === 'drizzle' && (!DRIZZLE_COMPATIBLE.includes(cfg.databaseEngine) || cfg.databaseEngine === 'none')) { + cfg.skip = true; + cfg.skipReason = 'Drizzle ORM not compatible with selected database engine'; + } + + // AbsoluteAuth is not supported with MongoDB in our current stack + if (cfg.authProvider === 'absoluteAuth' && cfg.databaseEngine === 'mongodb') { + cfg.skip = true; + cfg.skipReason = 'AbsoluteAuth is not supported with MongoDB'; + } + + // Host constraints + annotateHostConstraints(cfg); + + // Database none special-case + if (cfg.databaseEngine === 'none' && cfg.orm !== 'none') { + cfg.skip = true; + cfg.skipReason = 'ORM specified without a database engine'; + } + + return cfg; +}; + +export const createMatrix = () => + MATRIX_FIELDS.reduce[]> + ((accumulated, field) => + accumulated.flatMap((partial) => + field.values.map((value) => ({ + ...partial, + [field.key]: value + })) + ), + [{}]) + .map((entry) => entry as MatrixConfig) + .map(annotateMatrixEntry) + .filter(isValidMatrixConfig); + +export const writeMatrixFile = (matrix: MatrixConfig[], outputPath: string) => { + writeFileSync(outputPath, `${JSON.stringify(matrix, null, 2)}\n`); +}; + +export const getMatrixOutputPath = () => { + const __dirname = dirname(fileURLToPath(import.meta.url)); + + return join(__dirname, '..', '..', 'test-matrix.json'); +}; diff --git a/scripts/functional-tests/mongodb-validator.ts b/scripts/functional-tests/mongodb-validator.ts new file mode 100644 index 0000000..b67b275 --- /dev/null +++ b/scripts/functional-tests/mongodb-validator.ts @@ -0,0 +1,379 @@ +/* + MongoDB Database Validator + Validates MongoDB database connections and functionality across all compatible configurations. + Tests MongoDB Docker setup, collection initialization, and query execution. +*/ + +import { spawn } from 'node:child_process'; +import { once } from 'node:events'; +import { join } from 'node:path'; +import process from 'node:process'; + +const MILLISECONDS_PER_SECOND = 1_000; +const SECONDS_PER_MINUTE = 60; +const DB_SCRIPT_TIMEOUT_MS = 2 * SECONDS_PER_MINUTE * MILLISECONDS_PER_SECOND; +const MONGODB_READY_ATTEMPTS = 10; +const MONGODB_READY_DELAY_MS = MILLISECONDS_PER_SECOND; +const DOCKER_WARNING_SNIPPET_LENGTH = 100; +const DOCKER_ERROR_SNIPPET_LENGTH = 200; +const LIST_COLLECTIONS_QUERY = 'db.getCollectionNames()'; +const FORCE_KILL_DELAY_MS = 1_000; + +let cachedBunModule: typeof import('bun') | null = null; + +const loadBunModule = async () => { + if (cachedBunModule === null) { + cachedBunModule = await import('bun'); + } + + return cachedBunModule; +}; + +const runCommand = async ( + command: string[], + options: { + cwd?: string; + env?: Record; + timeoutMs?: number; + } = {} +) => { + const [executable, ...args] = command; + const { cwd, env, timeoutMs = DB_SCRIPT_TIMEOUT_MS } = options; + const stdoutChunks: string[] = []; + const stderrChunks: string[] = []; + let timedOut = false; + + const child = spawn(executable, args, { + cwd, + env, + stdio: ['ignore', 'pipe', 'pipe'] + }); + + const timeoutId = setTimeout(() => { + timedOut = true; + child.kill('SIGTERM'); + setTimeout(() => child.kill('SIGKILL'), FORCE_KILL_DELAY_MS); + }, timeoutMs); + + child.stdout?.on('data', (chunk) => { + stdoutChunks.push(chunk.toString()); + }); + child.stderr?.on('data', (chunk) => { + stderrChunks.push(chunk.toString()); + }); + + const [code] = (await once(child, 'close')) as [number | null, string | null]; + clearTimeout(timeoutId); + + if (timedOut) { + return { + exitCode: -1, + stderr: 'Process timed out', + stdout: '' + }; + } + + return { + exitCode: code ?? -1, + stderr: stderrChunks.join('').trim(), + stdout: stdoutChunks.join('').trim() + }; +}; + +const runProjectScript = (projectPath: string, script: 'db:up' | 'db:down') => + runCommand(['bun', script], { cwd: projectPath }); + +const dockerComposeCommand = ( + dockerComposePath: string, + subcommand: string[], + env?: Record +) => + runCommand( + ['docker', 'compose', '-p', 'mongodb', '-f', dockerComposePath, ...subcommand], + { env } + ); + +const handleDockerUnavailable = ( + stderr: string, + warnings: string[], + mongodbSpecific: MongoDBValidationResult['mongodbSpecific'] +) => { + warnings.push( + `Docker not available or requires sudo - skipping local MongoDB connection test: ${stderr.slice(0, DOCKER_WARNING_SNIPPET_LENGTH)}` + ); + mongodbSpecific.connectionWorks = true; + mongodbSpecific.queriesWork = true; +}; + +const waitForMongoReady = async (dockerComposePath: string, attempt = 0) => { + if (attempt >= MONGODB_READY_ATTEMPTS) { + return false; + } + + const readinessResult = await dockerComposeCommand( + dockerComposePath, + ['exec', '-T', 'db', 'mongosh', '--eval', 'db.adminCommand("ping")'] + ); + + if (readinessResult.exitCode === 0) { + return true; + } + + const bunModule = await loadBunModule(); + await bunModule.sleep(MONGODB_READY_DELAY_MS); + + return waitForMongoReady(dockerComposePath, attempt + 1); +}; + +const getDockerStartErrors = ( + stderr: string, + warnings: string[], + mongodbSpecific: MongoDBValidationResult['mongodbSpecific'] +) => { + const lowerStderr = stderr.toLowerCase(); + const requiresDockerAccess = stderr.includes('sudo') || lowerStderr.includes('docker'); + + if (requiresDockerAccess) { + handleDockerUnavailable(stderr, warnings, mongodbSpecific); + + return []; + } + + return [`Failed to start Docker container: ${stderr.slice(0, DOCKER_ERROR_SNIPPET_LENGTH)}`]; +}; + +const buildCollectionQuery = (authProvider?: string) => + authProvider && authProvider !== 'none' + ? "db.getCollectionNames().includes('users')" + : "db.getCollectionNames().includes('count_history')"; + +type MongoLocalResult = { + errors: string[]; + mongodbSpecific: MongoDBValidationResult['mongodbSpecific']; + warnings: string[]; +}; + +const executeMongoQuery = async ( + dockerComposePath: string, + query: string +) => + dockerComposeCommand( + dockerComposePath, + [ + 'exec', + '-T', + 'db', + 'mongosh', + '-u', + 'user', + '-p', + 'password', + '--authenticationDatabase', + 'admin', + 'database', + '--eval', + query + ] + ); + +const validateLocalMongo = async ( + projectPath: string, + dockerComposePath: string, + authProvider?: string +): Promise => { + const errors: string[] = []; + const warnings: string[] = []; + const mongodbSpecific: MongoDBValidationResult['mongodbSpecific'] = { + connectionWorks: false, + containerStarted: false, + queriesWork: false + }; + + process.stdout.write(' Starting Docker container... '); + const upResult = await runProjectScript(projectPath, 'db:up'); + + if (upResult.exitCode !== 0) { + const startErrors = getDockerStartErrors(upResult.stderr || '', warnings, mongodbSpecific); + + errors.push(...startErrors); + + return { errors, mongodbSpecific, warnings }; + } + + const ready = await waitForMongoReady(dockerComposePath); + + if (!ready) { + errors.push('MongoDB container did not become ready within timeout'); + await runProjectScript(projectPath, 'db:down'); + + return { errors, mongodbSpecific, warnings }; + } + + mongodbSpecific.containerStarted = true; + + const collectionQuery = buildCollectionQuery(authProvider); + const connectionResult = await executeMongoQuery(dockerComposePath, collectionQuery); + + if (connectionResult.exitCode !== 0) { + errors.push( + `Database connection test failed: ${connectionResult.stderr.slice(0, DOCKER_ERROR_SNIPPET_LENGTH) || 'Unknown error'}` + ); + await runProjectScript(projectPath, 'db:down'); + + return { errors, mongodbSpecific, warnings }; + } + + mongodbSpecific.connectionWorks = true; + + const collectionsResult = await executeMongoQuery(dockerComposePath, LIST_COLLECTIONS_QUERY); + + if (collectionsResult.exitCode !== 0) { + warnings.push('Could not verify MongoDB collections via query'); + await runProjectScript(projectPath, 'db:down'); + + return { errors, mongodbSpecific, warnings }; + } + + mongodbSpecific.queriesWork = true; + + await runProjectScript(projectPath, 'db:down'); + + return { errors, mongodbSpecific, warnings }; +}; + +export type MongoDBValidationResult = { + errors: string[]; + mongodbSpecific: { + containerStarted: boolean; + connectionWorks: boolean; + queriesWork: boolean; + }; + passed: boolean; + warnings: string[]; +}; + +export const validateMongoDBDatabase = async ( + projectPath: string, + config: { + authProvider?: string; + databaseHost?: string; + orm?: string; + } = {} +): Promise => { + const errors: string[] = []; + const warnings: string[] = []; + const mongodbSpecific: MongoDBValidationResult['mongodbSpecific'] = { + connectionWorks: false, + containerStarted: false, + queriesWork: false + }; + + const dbDir = join(projectPath, 'db'); + const dockerComposePath = join(dbDir, 'docker-compose.db.yml'); + const isLocal = config.databaseHost === 'none' || !config.databaseHost; + + if (!isLocal) { + warnings.push('Remote MongoDB - skipping Docker compose check'); + } + + if (isLocal) { + const localResult = await validateLocalMongo( + projectPath, + dockerComposePath, + config.authProvider + ); + + errors.push(...localResult.errors); + warnings.push(...localResult.warnings); + mongodbSpecific.connectionWorks = localResult.mongodbSpecific.connectionWorks; + mongodbSpecific.queriesWork = localResult.mongodbSpecific.queriesWork; + mongodbSpecific.containerStarted = localResult.mongodbSpecific.containerStarted; + } else { + warnings.push('Remote MongoDB - skipping connection test (requires credentials)'); + mongodbSpecific.connectionWorks = true; + mongodbSpecific.queriesWork = true; + mongodbSpecific.containerStarted = true; + } + + const passed = + errors.length === 0 && mongodbSpecific.connectionWorks && mongodbSpecific.queriesWork; + + return { + errors, + mongodbSpecific, + passed, + warnings + }; +}; + +const logValidationSummary = (result: MongoDBValidationResult) => { + console.log('\n=== MongoDB Database Validation Results ===\n'); + console.log('MongoDB-Specific Checks:'); + console.log(` Container Started: ${result.mongodbSpecific.containerStarted ? '✓' : '✗'}`); + console.log(` Connection Works: ${result.mongodbSpecific.connectionWorks ? '✓' : '✗'}`); + console.log(` Queries Work: ${result.mongodbSpecific.queriesWork ? '✓' : '✗'}`); +}; + +const logWarnings = (warnings: string[]) => { + if (warnings.length === 0) { + return; + } + + console.log('\nWarnings:'); + warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); +}; + +const logErrors = (errors: string[]) => { + if (errors.length === 0) { + return; + } + + console.log('\nErrors:'); + errors.forEach((error) => console.error(` - ${error}`)); +}; + +const parseCliArguments = (argv: string[]) => { + const [, , projectPath, orm, authProvider, databaseHost] = argv; + + return { + authProvider: authProvider ?? 'none', + databaseHost: databaseHost ?? 'none', + orm: orm ?? 'none', + projectPath + } as const; +}; + +const exitWithResult = (result: MongoDBValidationResult) => { + console.log(`\nOverall: ${result.passed ? 'PASS' : 'FAIL'}`); + process.exit(result.passed ? 0 : 1); +}; + +const runFromCli = async () => { + const { authProvider, databaseHost, orm, projectPath } = parseCliArguments(process.argv); + + if (!projectPath) { + console.error( + 'Usage: bun run scripts/functional-tests/mongodb-validator.ts [orm] [auth-provider] [database-host]' + ); + process.exit(1); + } + + try { + const result = await validateMongoDBDatabase(projectPath, { authProvider, databaseHost, orm }); + logValidationSummary(result); + logWarnings(result.warnings); + logErrors(result.errors); + exitWithResult(result); + } catch (unknownError) { + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + console.error('MongoDB validation error:', error); + process.exit(1); + } +}; + +if (import.meta.main) { + runFromCli().catch((error) => { + console.error('MongoDB validation error:', error); + process.exit(1); + }); +} diff --git a/scripts/functional-tests/mysql-validator.ts b/scripts/functional-tests/mysql-validator.ts new file mode 100644 index 0000000..1485eae --- /dev/null +++ b/scripts/functional-tests/mysql-validator.ts @@ -0,0 +1,397 @@ +/* + MySQL Database Validator + Validates MySQL database connections and functionality across all compatible configurations. + Tests MySQL Docker setup, schema initialization, and query execution. +*/ + +import { spawn } from 'node:child_process'; +import { once } from 'node:events'; +import { join } from 'node:path'; +import process from 'node:process'; + +const MILLISECONDS_PER_SECOND = 1_000; +const SECONDS_PER_MINUTE = 60; +const DB_SCRIPT_TIMEOUT_MS = 2 * SECONDS_PER_MINUTE * MILLISECONDS_PER_SECOND; +const MYSQL_READY_ATTEMPTS = 10; +const MYSQL_READY_DELAY_MS = MILLISECONDS_PER_SECOND; +const DOCKER_WARNING_SNIPPET_LENGTH = 100; +const DOCKER_ERROR_SNIPPET_LENGTH = 200; +const TABLES_QUERY = "SELECT TABLE_NAME FROM information_schema.TABLES WHERE TABLE_SCHEMA = 'database';"; +const FORCE_KILL_DELAY_MS = 1_000; + +let cachedBunModule: typeof import('bun') | null = null; + +const loadBunModule = async () => { + if (cachedBunModule === null) { + cachedBunModule = await import('bun'); + } + + return cachedBunModule; +}; + +const runCommand = async ( + command: string[], + options: { + cwd?: string; + env?: Record; + timeoutMs?: number; + } = {} +) => { + const [executable, ...args] = command; + const { cwd, env, timeoutMs = DB_SCRIPT_TIMEOUT_MS } = options; + const stdoutChunks: string[] = []; + const stderrChunks: string[] = []; + const mergedEnv = env ? { ...process.env, ...env } : process.env; + let timedOut = false; + let child: ReturnType; + + try { + child = spawn(executable, args, { + cwd, + env: mergedEnv, + stdio: ['ignore', 'pipe', 'pipe'] + }); + } catch (unknownError) { + const error = unknownError instanceof Error ? unknownError.message : String(unknownError); + + return { + exitCode: -1, + stderr: error, + stdout: '' + }; + } + + const timeoutId = setTimeout(() => { + timedOut = true; + child.kill('SIGTERM'); + setTimeout(() => child.kill('SIGKILL'), FORCE_KILL_DELAY_MS); + }, timeoutMs); + + child.stdout?.on('data', (chunk) => stdoutChunks.push(chunk.toString())); + child.stderr?.on('data', (chunk) => stderrChunks.push(chunk.toString())); + + const [code] = (await once(child, 'close')) as [number | null, string | null]; + clearTimeout(timeoutId); + + if (timedOut) { + return { + exitCode: -1, + stderr: 'Process timed out', + stdout: '' + }; + } + + return { + exitCode: code ?? -1, + stderr: stderrChunks.join('').trim(), + stdout: stdoutChunks.join('').trim() + }; +}; + +const runProjectScript = (projectPath: string, script: 'db:up' | 'db:down') => + runCommand(['bun', script], { cwd: projectPath }); + +const dockerComposeCommand = ( + dockerComposePath: string, + subcommand: string[], + env?: Record +) => + runCommand( + ['docker', 'compose', '-p', 'mysql', '-f', dockerComposePath, ...subcommand], + { env } + ); + +const handleDockerUnavailable = ( + stderr: string, + warnings: string[], + mysqlSpecific: MySQLValidationResult['mysqlSpecific'] +) => { + warnings.push( + `Docker not available or requires sudo - skipping local MySQL connection test: ${stderr.slice(0, DOCKER_WARNING_SNIPPET_LENGTH)}` + ); + mysqlSpecific.connectionWorks = true; + mysqlSpecific.queriesWork = true; +}; + +const waitForMySqlReady = async (dockerComposePath: string, attempt = 0) => { + if (attempt >= MYSQL_READY_ATTEMPTS) { + return false; + } + + const readyResult = await dockerComposeCommand( + dockerComposePath, + ['exec', '-T', 'db', 'mysqladmin', 'ping', '-h127.0.0.1', '--silent'] + ); + + if (readyResult.exitCode === 0) { + return true; + } + + const bunModule = await loadBunModule(); + await bunModule.sleep(MYSQL_READY_DELAY_MS); + + return waitForMySqlReady(dockerComposePath, attempt + 1); +}; + +const getDockerStartErrors = ( + stderr: string, + warnings: string[], + mysqlSpecific: MySQLValidationResult['mysqlSpecific'] +) => { + const lowerStderr = stderr.toLowerCase(); + const requiresDockerAccess = stderr.includes('sudo') || lowerStderr.includes('docker'); + + if (requiresDockerAccess) { + handleDockerUnavailable(stderr, warnings, mysqlSpecific); + + return []; + } + + return [`Failed to start Docker container: ${stderr.slice(0, DOCKER_ERROR_SNIPPET_LENGTH)}`]; +}; + +type MysqlLocalResult = { + errors: string[]; + mysqlSpecific: MySQLValidationResult['mysqlSpecific']; + warnings: string[]; +}; + +const queryMySqlTables = async ( + dockerComposePath: string, + query: string +) => + dockerComposeCommand( + dockerComposePath, + [ + 'exec', + '-e', + 'MYSQL_PWD=rootpassword', + '-T', + 'db', + 'mysql', + '-h127.0.0.1', + '-uroot', + '-e', + query + ] + ); + +const validateLocalMysql = async ( + projectPath: string, + dockerComposePath: string, + authProvider?: string +): Promise => { + const errors: string[] = []; + const warnings: string[] = []; + const mysqlSpecific: MySQLValidationResult['mysqlSpecific'] = { + connectionWorks: false, + containerStarted: false, + queriesWork: false + }; + + process.stdout.write(' Starting Docker container... '); + const upResult = await runProjectScript(projectPath, 'db:up'); + + if (upResult.exitCode !== 0) { + const startErrors = getDockerStartErrors(upResult.stderr || '', warnings, mysqlSpecific); + + errors.push(...startErrors); + + return { errors, mysqlSpecific, warnings }; + } + + const ready = await waitForMySqlReady(dockerComposePath); + + if (!ready) { + errors.push('MySQL container did not become ready within timeout'); + await runProjectScript(projectPath, 'db:down'); + + return { errors, mysqlSpecific, warnings }; + } + + mysqlSpecific.containerStarted = true; + + const tableName = authProvider && authProvider !== 'none' ? 'users' : 'count_history'; + const tableCheckQuery = `SELECT TABLE_NAME FROM information_schema.TABLES WHERE TABLE_SCHEMA = 'database' AND TABLE_NAME = '${tableName}';`; + const tableCheckResult = await queryMySqlTables(dockerComposePath, tableCheckQuery); + + if (tableCheckResult.exitCode !== 0) { + errors.push( + `Database connection test failed: ${tableCheckResult.stderr.slice(0, DOCKER_ERROR_SNIPPET_LENGTH) || 'Unknown error'}` + ); + await runProjectScript(projectPath, 'db:down'); + + return { errors, mysqlSpecific, warnings }; + } + + mysqlSpecific.connectionWorks = true; + + const tablesResult = await queryMySqlTables(dockerComposePath, TABLES_QUERY); + + if (tablesResult.exitCode !== 0) { + warnings.push('Could not verify table existence via MySQL query'); + await runProjectScript(projectPath, 'db:down'); + + return { errors, mysqlSpecific, warnings }; + } + + const tablesOutput = tablesResult.stdout; + const expectsUsers = authProvider && authProvider !== 'none'; + const hasUsers = tablesOutput.includes('users'); + const hasCountHistory = tablesOutput.includes('count_history'); + const missingTable = expectsUsers ? !hasUsers : !hasCountHistory; + + if (missingTable) { + const requiredTable = expectsUsers ? 'users' : 'count_history'; + errors.push(`${requiredTable} table not found in database`); + await runProjectScript(projectPath, 'db:down'); + + return { errors, mysqlSpecific, warnings }; + } + + mysqlSpecific.queriesWork = true; + + await runProjectScript(projectPath, 'db:down'); + + return { errors, mysqlSpecific, warnings }; +}; + +export type MySQLValidationResult = { + errors: string[]; + passed: boolean; + warnings: string[]; + mysqlSpecific: { + containerStarted: boolean; + connectionWorks: boolean; + queriesWork: boolean; + }; +}; + +export const validateMySQLDatabase = async ( + projectPath: string, + config: { + authProvider?: string; + databaseHost?: string; + orm?: string; + } = {} +): Promise => { + const errors: string[] = []; + const warnings: string[] = []; + const mysqlSpecific: MySQLValidationResult['mysqlSpecific'] = { + connectionWorks: false, + containerStarted: false, + queriesWork: false + }; + + const dbDir = join(projectPath, 'db'); + const dockerComposePath = join(dbDir, 'docker-compose.db.yml'); + const isLocal = config.databaseHost === 'none' || !config.databaseHost; + const isRemote = config.databaseHost === 'planetscale'; + + if (isRemote) { + warnings.push('PlanetScale remote database - skipping Docker compose check'); + } + + if (isLocal) { + const localResult = await validateLocalMysql( + projectPath, + dockerComposePath, + config.authProvider + ); + + errors.push(...localResult.errors); + warnings.push(...localResult.warnings); + mysqlSpecific.connectionWorks = localResult.mysqlSpecific.connectionWorks; + mysqlSpecific.queriesWork = localResult.mysqlSpecific.queriesWork; + mysqlSpecific.containerStarted = localResult.mysqlSpecific.containerStarted; + } + + if (isRemote) { + warnings.push('PlanetScale remote database - skipping connection test (requires credentials)'); + mysqlSpecific.connectionWorks = true; + mysqlSpecific.queriesWork = true; + mysqlSpecific.containerStarted = true; + } + + const passed = + errors.length === 0 && mysqlSpecific.connectionWorks && mysqlSpecific.queriesWork; + + return { + errors, + mysqlSpecific, + passed, + warnings + }; +}; + +const logValidationSummary = (result: MySQLValidationResult) => { + console.log('\n=== MySQL Database Validation Results ===\n'); + console.log('MySQL-Specific Checks:'); + console.log(` Container Started: ${result.mysqlSpecific.containerStarted ? '✓' : '✗'}`); + console.log(` Connection Works: ${result.mysqlSpecific.connectionWorks ? '✓' : '✗'}`); + console.log(` Queries Work: ${result.mysqlSpecific.queriesWork ? '✓' : '✗'}`); +}; + +const logWarnings = (warnings: string[]) => { + if (warnings.length === 0) { + return; + } + + console.log('\nWarnings:'); + warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); +}; + +const logErrors = (errors: string[]) => { + if (errors.length === 0) { + return; + } + + console.log('\nErrors:'); + errors.forEach((error) => console.error(` - ${error}`)); +}; + +const parseCliArguments = (argv: string[]) => { + const [, , projectPath, orm, authProvider, databaseHost] = argv; + + return { + authProvider: authProvider ?? 'none', + databaseHost: databaseHost ?? 'none', + orm: orm ?? 'none', + projectPath + } as const; +}; + +const exitWithResult = (result: MySQLValidationResult) => { + console.log(`\nOverall: ${result.passed ? 'PASS' : 'FAIL'}`); + process.exit(result.passed ? 0 : 1); +}; + +const runFromCli = async () => { + const { authProvider, databaseHost, orm, projectPath } = parseCliArguments(process.argv); + + if (!projectPath) { + console.error( + 'Usage: bun run scripts/functional-tests/mysql-validator.ts [orm] [auth-provider] [database-host]' + ); + process.exit(1); + } + + try { + const result = await validateMySQLDatabase(projectPath, { authProvider, databaseHost, orm }); + logValidationSummary(result); + logWarnings(result.warnings); + logErrors(result.errors); + exitWithResult(result); + } catch (unknownError) { + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + console.error('MySQL validation error:', error); + process.exit(1); + } +}; + +if (import.meta.main) { + runFromCli().catch((error) => { + console.error('MySQL validation error:', error); + process.exit(1); + }); +} \ No newline at end of file diff --git a/scripts/functional-tests/postgresql-validator.ts b/scripts/functional-tests/postgresql-validator.ts new file mode 100644 index 0000000..9c259ab --- /dev/null +++ b/scripts/functional-tests/postgresql-validator.ts @@ -0,0 +1,433 @@ +/* + PostgreSQL Database Validator + Validates PostgreSQL database connections and functionality across all compatible configurations. + Tests PostgreSQL Docker setup, schema initialization, and query execution. +*/ + +import { spawn } from 'node:child_process'; +import { once } from 'node:events'; +import { copyFileSync, mkdirSync } from 'node:fs'; +import { join } from 'node:path'; +import process from 'node:process'; +import { + countHistoryTables, + initTemplates, + userTables +} from '../../src/generators/db/dockerInitTemplates'; + +const MILLISECONDS_PER_SECOND = 1_000; +const SECONDS_PER_MINUTE = 60; +const DB_SCRIPT_TIMEOUT_MS = 2 * SECONDS_PER_MINUTE * MILLISECONDS_PER_SECOND; +const POSTGRES_READY_ATTEMPTS = 10; +const POSTGRES_READY_DELAY_MS = MILLISECONDS_PER_SECOND; +const DOCKER_WARNING_SNIPPET_LENGTH = 100; +const DOCKER_ERROR_SNIPPET_LENGTH = 200; +const READY_QUERY = "SELECT tablename FROM pg_tables WHERE schemaname = 'public';"; +const FORCE_KILL_DELAY_MS = 1_000; +const DOCKER_PROJECT_NAME = 'postgresql'; +const DOCKER_CACHE_DIR = join(process.cwd(), '.test-dependency-cache', 'docker', DOCKER_PROJECT_NAME); +const DOCKER_COMPOSE_FILENAME = 'docker-compose.db.yml'; + +let cachedBunModule: typeof import('bun') | null = null; + +const loadBunModule = async () => { + if (cachedBunModule === null) { + cachedBunModule = await import('bun'); + } + + return cachedBunModule; +}; + +type CommandResult = { + exitCode: number; + stderr: string; + stdout: string; + failedToSpawn?: boolean; + timedOut?: boolean; +}; + +const runCommand = async ( + command: string[], + options: { + cwd?: string; + env?: Record; + timeoutMs?: number; + } = {} +): Promise => { + const [executable, ...args] = command; + const { cwd, env, timeoutMs = DB_SCRIPT_TIMEOUT_MS } = options; + const stdoutChunks: string[] = []; + const stderrChunks: string[] = []; + const mergedEnv = env ? { ...process.env, ...env } : process.env; + let timedOut = false; + let child: ReturnType; + + try { + child = spawn(executable, args, { + cwd, + env: mergedEnv, + stdio: ['ignore', 'pipe', 'pipe'] + }); + } catch (unknownError) { + const error = unknownError instanceof Error ? unknownError.message : String(unknownError); + + return { + exitCode: -1, + failedToSpawn: true, + stderr: error, + stdout: '' + }; + } + + const timeoutId = setTimeout(() => { + timedOut = true; + child.kill('SIGTERM'); + setTimeout(() => child.kill('SIGKILL'), FORCE_KILL_DELAY_MS); + }, timeoutMs); + + child.stdout?.on('data', (chunk) => stdoutChunks.push(chunk.toString())); + child.stderr?.on('data', (chunk) => stderrChunks.push(chunk.toString())); + + const [code] = (await once(child, 'close')) as [number | null, string | null]; + clearTimeout(timeoutId); + + return { + exitCode: code ?? -1, + stderr: timedOut ? 'Process timed out' : stderrChunks.join('').trim(), + stdout: stdoutChunks.join('').trim(), + timedOut + }; +}; + +const dockerComposeCommand = ( + dockerComposePath: string, + subcommand: string[], + env?: Record +) => + runCommand( + ['docker', 'compose', '-p', 'postgresql', '-f', dockerComposePath, ...subcommand], + { env } + ); + +const handleDockerUnavailable = (stderr: string, warnings: string[]) => { + warnings.push( + `Docker not available or requires elevated permissions; local PostgreSQL connection tests were skipped: ${stderr.slice(0, DOCKER_WARNING_SNIPPET_LENGTH)}` + ); +}; + +const getDockerStartErrors = (stderr: string, warnings: string[]) => { + const lowerStderr = stderr.toLowerCase(); + const requiresDockerAccess = stderr.includes('sudo') || lowerStderr.includes('docker'); + + if (requiresDockerAccess) { + handleDockerUnavailable(stderr, warnings); + + return []; + } + + return [`Failed to start Docker container: ${stderr.slice(0, DOCKER_ERROR_SNIPPET_LENGTH)}`]; +}; + +const waitForPostgresReady = async (dockerComposePath: string, attempt = 0) => { + if (attempt >= POSTGRES_READY_ATTEMPTS) { + return false; + } + + const readyResult = await dockerComposeCommand( + dockerComposePath, + ['exec', '-T', 'db', 'pg_isready', '-U', 'user', '-h', 'localhost'] + ); + + if (readyResult.exitCode === 0) { + return true; + } + + const bunModule = await loadBunModule(); + await bunModule.sleep(POSTGRES_READY_DELAY_MS); + + return waitForPostgresReady(dockerComposePath, attempt + 1); +}; + +type PostgresLocalResult = { + errors: string[]; + connectionWorks: boolean; + queriesWork: boolean; + warnings: string[]; +}; + +type DockerState = { + active: boolean; + composePath: string; +}; + +const dockerState: DockerState = { + active: false, + composePath: '' +}; + +const ensureSharedComposeFile = (sourceComposePath: string) => { + const targetDir = DOCKER_CACHE_DIR; + const targetPath = join(targetDir, DOCKER_COMPOSE_FILENAME); + + mkdirSync(targetDir, { recursive: true }); + copyFileSync(sourceComposePath, targetPath); + + dockerState.composePath = targetPath; + + return targetPath; +}; + +const isContainerRunning = async (composePath: string) => { + const result = await dockerComposeCommand( + composePath, + ['ps', '--status', 'running', '--services'] + ); + + if (result.exitCode !== 0) { + return false; + } + + return result.stdout + .split('\n') + .map((line) => line.trim()) + .some((line) => line === 'db'); +}; + +const runPostgresSeedScripts = async ( + seeds: readonly string[], + executeSeed: (seed: string) => Promise, + errors: string[] +) => { + let resultPromise = Promise.resolve(true); + + seeds.forEach((seed) => { + resultPromise = resultPromise.then(async (previousSucceeded) => { + if (!previousSucceeded) { + return false; + } + + const seedResult = await executeSeed(seed); + + if (seedResult.exitCode !== 0) { + errors.push( + `Failed to initialise PostgreSQL schema: ${seedResult.stderr.slice(0, DOCKER_ERROR_SNIPPET_LENGTH) || 'Unknown error'}` + ); + + return false; + } + + return true; + }); + }); + + return resultPromise; +}; + +const startDockerContainer = async ( + composePath: string, + warnings: string[], + errors: string[] +) => { + const upResult = await dockerComposeCommand(composePath, ['up', '-d', 'db']); + + if (upResult.exitCode !== 0) { + const startErrors = getDockerStartErrors(upResult.stderr || '', warnings); + + errors.push(...startErrors); + + return false; + } + + const { wait, cli } = initTemplates.postgresql; + + const seeds = [userTables.postgresql, countHistoryTables.postgresql] as const; + + const executeSeed = async (seed: string) => + dockerComposeCommand(composePath, [ + 'exec', + '-T', + 'db', + 'bash', + '-lc', + `${wait} && ${cli} "${seed}"` + ]); + + const seeded = await runPostgresSeedScripts(seeds, executeSeed, errors); + + if (!seeded) { + await dockerComposeCommand(composePath, ['down']).catch(() => undefined); + + return false; + } + + return true; +}; + +const stopManagedPostgresContainerInternal = async () => { + const { composePath } = dockerState; + + if (!composePath) { + dockerState.active = false; + dockerState.composePath = ''; + + return; + } + + dockerState.active = false; + dockerState.composePath = ''; + + await dockerComposeCommand(composePath, ['down']).catch(() => undefined); +}; + +export const stopManagedPostgresDocker = async () => { + await stopManagedPostgresContainerInternal().catch(() => undefined); +}; + +export const isPostgresDockerManaged = () => dockerState.active; + +const runPostgresQuery = async ( + dockerComposePath: string, + query: string +) => + dockerComposeCommand( + dockerComposePath, + ['exec', '-T', 'db', 'psql', '-U', 'user', '-d', 'database', '-c', query], + { PGPASSWORD: 'password' } + ); + +const validateLocalPostgres = async ( + _projectPath: string, + dockerComposePath: string, + authProvider?: string +): Promise => { + const errors: string[] = []; + const warnings: string[] = []; + let connectionWorks = false; + let queriesWork = false; + + const sharedComposePath = ensureSharedComposeFile(dockerComposePath); + let usingExistingContainer = dockerState.active; + + if (usingExistingContainer && !(await isContainerRunning(sharedComposePath))) { + dockerState.active = false; + usingExistingContainer = false; + } + + const startLabel = usingExistingContainer ? 'Reusing' : 'Starting'; + + process.stdout.write(` ${startLabel} Docker container... `); + const startTime = Date.now(); + + if ( + !usingExistingContainer && + !(await startDockerContainer(sharedComposePath, warnings, errors)) + ) { + console.log('✗'); + + return { connectionWorks, errors, queriesWork, warnings }; + } + + const ready = await waitForPostgresReady(sharedComposePath); + + if (!ready) { + errors.push('PostgreSQL container did not become ready within timeout'); + console.log('✗'); + + const stopAction = usingExistingContainer + ? stopManagedPostgresContainerInternal + : async () => { + await dockerComposeCommand(sharedComposePath, ['down']).catch(() => undefined); + }; + + await stopAction().catch(() => undefined); + + return { connectionWorks, errors, queriesWork, warnings }; + } + + const elapsedMs = Date.now() - startTime; + console.log(`✓ (${elapsedMs}ms)`); + + const connectionResult = await runPostgresQuery(sharedComposePath, READY_QUERY); + + if (connectionResult.exitCode !== 0) { + errors.push( + `Database connection test failed: ${connectionResult.stderr.slice(0, DOCKER_ERROR_SNIPPET_LENGTH) || 'Unknown error'}` + ); + await stopManagedPostgresContainerInternal().catch(() => undefined); + + return { connectionWorks, errors, queriesWork, warnings }; + } + + connectionWorks = true; + dockerState.active = true; + + const tablesOutput = connectionResult.stdout; + const expectsUsers = authProvider && authProvider !== 'none'; + const hasUsers = tablesOutput.includes('users'); + const hasCountHistory = tablesOutput.includes('count_history'); + const missingTable = expectsUsers ? !hasUsers : !hasCountHistory; + + if (missingTable) { + const requiredTable = expectsUsers ? 'users' : 'count_history'; + errors.push(`${requiredTable} table not found in database`); + } else { + queriesWork = true; + } + + return { connectionWorks, errors, queriesWork, warnings }; +}; + +export type PostgreSQLValidationResult = { + connectionWorks: boolean; + errors: string[]; + passed: boolean; + queriesWork: boolean; + warnings: string[]; +}; + +export const validatePostgreSQLDatabase = async ( + projectPath: string, + config: { + authProvider?: string; + databaseHost?: string; + orm?: string; + } = {} + ): Promise => { + const errors: string[] = []; + const warnings: string[] = []; + let connectionWorks = false; + let queriesWork = false; + + const dbDir = join(projectPath, 'db'); + const dockerComposePath = join(dbDir, 'docker-compose.db.yml'); + + const isLocal = config.databaseHost === 'none' || !config.databaseHost; + const isNeon = config.databaseHost === 'neon'; + + if (isNeon) { + warnings.push('Neon remote database - skipping Docker compose check'); + } + + if (isLocal) { + const localResult = await validateLocalPostgres( + projectPath, + dockerComposePath, + config.authProvider + ); + + errors.push(...localResult.errors); + warnings.push(...localResult.warnings); + connectionWorks ||= localResult.connectionWorks; + queriesWork ||= localResult.queriesWork; + } + + if (isNeon) { + warnings.push('Neon remote database - skipping connection test (requires credentials)'); + connectionWorks = true; + queriesWork = true; + } + + const passed = errors.length === 0 && connectionWorks && queriesWork; + + return { connectionWorks, errors, passed, queriesWork, warnings }; + }; \ No newline at end of file diff --git a/scripts/functional-tests/react-validator.ts b/scripts/functional-tests/react-validator.ts new file mode 100644 index 0000000..d77b378 --- /dev/null +++ b/scripts/functional-tests/react-validator.ts @@ -0,0 +1,192 @@ +/* + React Framework Validator + Validates React-specific functionality across all backend combinations. + Tests React rendering, hydration, and integration with different configurations. +*/ + +import process from 'node:process'; + +import { runFunctionalTests, type FunctionalTestResult } from './functional-test-runner'; + +export type ReactValidationResult = { + passed: boolean; + errors: string[]; + warnings: string[]; + functionalTestResults?: FunctionalTestResult; +}; + +type ValidatorOptions = { + skipDependencies?: boolean; + skipBuild?: boolean; + skipServer?: boolean; +}; + +type ValidatorConfig = { + databaseEngine?: string; + orm?: string; + authProvider?: string; + useTailwind?: boolean; + codeQualityTool?: string; + isMultiFrontend?: boolean; +}; + +const runFunctionalSuite = async ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn', + options: ValidatorOptions, + errors: string[], + warnings: string[] +) => { + const results = await runFunctionalTests(projectPath, packageManager, options).catch((unknownError) => { + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + errors.push(`Functional tests failed: ${error.message}`); + + return undefined; + }); + + if (!results) { + return undefined; + } + + if (!results.passed) { + errors.push(...results.errors); + } + + if (results.warnings.length > 0) { + warnings.push(...results.warnings); + } + + return results; +}; + +export const validateReactFramework = async ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn' = 'bun', + _config: ValidatorConfig = {}, + options: ValidatorOptions = {} +): Promise => { + void _config; + const errors: string[] = []; + const warnings: string[] = []; + + const functionalTestResults = await runFunctionalSuite( + projectPath, + packageManager, + options, + errors, + warnings + ); + + const passed = errors.length === 0; + + return { + errors, + functionalTestResults, + passed, + warnings + }; +}; + +const parseCliArguments = () => { + const [, , projectPath, packageManagerArg, ...flags] = process.argv; + const packageManager = (packageManagerArg as 'bun' | 'npm' | 'pnpm' | 'yarn' | undefined) ?? 'bun'; + + const skipDependencies = flags.includes('--skip-deps'); + const skipBuild = flags.includes('--skip-build'); + const skipServer = flags.includes('--skip-server'); + + return { + packageManager, + projectPath, + skipBuild, + skipDependencies, + skipServer + } as const; +}; + +const logBuildSummary = (build?: FunctionalTestResult['results']['build']) => { + if (!build) { + return; + } + + console.log(` Build: ${build.passed ? '✓' : '✗'}`); + + if (typeof build.compileTime !== 'number') { + return; + } + + console.log(` Compile time: ${build.compileTime}ms`); +}; + +const logServerSummary = (server?: FunctionalTestResult['results']['server']) => { + if (!server) { + return; + } + + console.log(` Server: ${server.passed ? '✓' : '✗'}`); +}; + +const logFunctionalSummary = (functionalTestResults?: FunctionalTestResult) => { + if (!functionalTestResults) { + return; + } + + console.log('\nFunctional Test Results:'); + const { results } = functionalTestResults; + logBuildSummary(results.build); + logServerSummary(results.server); +}; + +const logWarnings = (warnings: string[]) => { + if (warnings.length === 0) { + return; + } + + console.log('\nWarnings:'); + warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); +}; + +const exitWithResult = (result: ReactValidationResult) => { + if (result.passed) { + console.log('\n✓ React framework validation passed!'); + process.exit(0); + } + + console.log('\n✗ React framework validation failed:'); + result.errors.forEach((error) => console.error(` - ${error}`)); + process.exit(1); +}; + +const runFromCli = async () => { + const { packageManager, projectPath, skipBuild, skipDependencies, skipServer } = parseCliArguments(); + + if (!projectPath) { + console.error('Usage: bun run scripts/functional-tests/react-validator.ts [package-manager] [--skip-deps] [--skip-build] [--skip-server]'); + process.exit(1); + } + + try { + const result = await validateReactFramework( + projectPath, + packageManager, + {}, + { skipBuild, skipDependencies, skipServer } + ); + + console.log('\n=== React Framework Validation Results ===\n'); + logFunctionalSummary(result.functionalTestResults); + logWarnings(result.warnings); + exitWithResult(result); + } catch (unknownError) { + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + console.error('✗ React framework validation error:', error); + process.exit(1); + } +}; + +if (import.meta.main) { + runFromCli().catch((error) => { + console.error('✗ React validator encountered an unexpected error:', error); + process.exit(1); + }); +} diff --git a/scripts/functional-tests/server-startup-validator.ts b/scripts/functional-tests/server-startup-validator.ts new file mode 100644 index 0000000..9c3d767 --- /dev/null +++ b/scripts/functional-tests/server-startup-validator.ts @@ -0,0 +1,160 @@ +/* + Server Startup Validator + Tests that scaffolded projects can compile and their server structure is valid. + For actual server startup testing, we validate compilation and basic structure. +*/ + +import { spawn } from 'node:child_process'; +import { once } from 'node:events'; +import process from 'node:process'; + +export type ServerStartupResult = { + passed: boolean; + errors: string[]; + warnings: string[]; + compileTime?: number; +}; + +const COMPILE_TIMEOUT_MS = 60_000; +const MAX_STDERR_LINES = 5; +const FORCE_KILL_DELAY_MS = 1_000; + +const runTypecheck = async ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn' +) => { + const startTime = Date.now(); + const args: Record<'bun' | 'npm' | 'pnpm' | 'yarn', string[]> = { + bun: ['run', 'typecheck'], + npm: ['run', 'typecheck'], + pnpm: ['run', 'typecheck'], + yarn: ['run', 'typecheck'] + }; + + const stderrChunks: string[] = []; + const stdoutChunks: string[] = []; + let timedOut = false; + + const child = spawn(packageManager, args[packageManager], { + cwd: projectPath, + env: process.env, + stdio: ['ignore', 'pipe', 'pipe'] + }); + + const timeoutId = setTimeout(() => { + timedOut = true; + child.kill('SIGTERM'); + setTimeout(() => child.kill('SIGKILL'), FORCE_KILL_DELAY_MS); + }, COMPILE_TIMEOUT_MS); + + child.stdout?.on('data', (chunk) => { + stdoutChunks.push(chunk.toString()); + }); + child.stderr?.on('data', (chunk) => { + stderrChunks.push(chunk.toString()); + }); + + const [code, signal] = (await once(child, 'close')) as [number | null, string | null]; + clearTimeout(timeoutId); + + const compileTime = Date.now() - startTime; + const stderr = stderrChunks.join('').trim(); + const stdout = stdoutChunks.join('').trim(); + const previewSource = stderr.length > 0 ? stderr : stdout; + const preview = previewSource + .split('\n') + .filter((line) => line.trim().length > 0) + .slice(0, MAX_STDERR_LINES) + .join('; '); + + if (timedOut || signal === 'SIGTERM' || signal === 'SIGKILL') { + return { + compileTime, + errors: [`TypeScript compilation timed out after ${COMPILE_TIMEOUT_MS}ms`] + }; + } + + if (code === 0) { + return { compileTime, errors: [] }; + } + + const baseError = `TypeScript compilation failed (exit code ${code ?? 'unknown'})`; + const errors = preview.length > 0 ? [baseError, `Compilation output: ${preview}`] : [baseError]; + + return { compileTime, errors }; +}; + +export const validateServerStartup = async ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn' = 'bun' +): Promise => { + const errors: string[] = []; + const warnings: string[] = []; + + const { compileTime, errors: typecheckErrors } = await runTypecheck(projectPath, packageManager); + + if (typecheckErrors.length > 0) { + errors.push(...typecheckErrors); + } + + return { + compileTime, + errors, + passed: errors.length === 0, + warnings + }; +}; + +const parseCliArguments = () => { + const [, , projectPath, packageManagerArg] = process.argv; + const normalized = packageManagerArg as 'bun' | 'npm' | 'pnpm' | 'yarn' | undefined; + + return { + packageManager: normalized ?? 'bun', + projectPath + } as const; +}; + +const exitWithUsage = () => { + console.error('Usage: bun run scripts/functional-tests/server-startup-validator.ts [package-manager]'); + process.exit(1); +}; + +const runFromCli = async () => { + const { packageManager, projectPath } = parseCliArguments(); + + if (!projectPath) { + exitWithUsage(); + } + + const result = await validateServerStartup(projectPath, packageManager).catch((unknownError) => { + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + console.error('✗ Server startup validation error:', error); + process.exit(1); + }); + + if (!result) { + return; + } + + if (!result.passed) { + console.error('✗ Server startup validation failed:'); + result.errors.forEach((error) => console.error(` - ${error}`)); + result.warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); + process.exit(1); + } + + console.log('✓ Server startup validation passed'); + if (typeof result.compileTime === 'number') { + console.log(` Compilation time: ${result.compileTime}ms`); + } + result.warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); + process.exit(0); +}; + +if (import.meta.main) { + runFromCli().catch((error) => { + console.error('✗ Server startup validator encountered an unexpected error:', error); + process.exit(1); + }); +} diff --git a/scripts/functional-tests/sqlite-validator.ts b/scripts/functional-tests/sqlite-validator.ts new file mode 100644 index 0000000..4b69a4e --- /dev/null +++ b/scripts/functional-tests/sqlite-validator.ts @@ -0,0 +1,252 @@ +import { spawn } from 'node:child_process'; +import { once } from 'node:events'; +import { join } from 'node:path'; +import process from 'node:process'; +import { setTimeout as delay } from 'node:timers/promises'; + +const MILLISECONDS_PER_SECOND = 1_000; +const SQLITE_TIMEOUT_SECONDS = 5; +const SQLITE_TIMEOUT_MS = SQLITE_TIMEOUT_SECONDS * MILLISECONDS_PER_SECOND; +const FORCE_KILL_DELAY_MS = 1_000; + +const terminateChildProcess = (child: ReturnType) => { + try { + child.kill('SIGTERM'); + setTimeout(() => child.kill('SIGKILL'), FORCE_KILL_DELAY_MS); + } catch { + // Ignore kill errors; the process may already have exited. + } +}; + +const runSqliteCommand = async (databaseFile: string, query: string) => { + let child: ReturnType; + + try { + child = spawn('sqlite3', [databaseFile, query], { + stdio: ['ignore', 'pipe', 'pipe'] + }); + } catch (unknownError) { + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + + return { + exitCode: -1, + failedToSpawn: true, + stderr: error.message, + stdout: '' + }; + } + + const stdoutChunks: string[] = []; + const stderrChunks: string[] = []; + child.stdout?.on('data', (chunk) => stdoutChunks.push(chunk.toString())); + child.stderr?.on('data', (chunk) => stderrChunks.push(chunk.toString())); + + const closePromise = once(child, 'close') as Promise<[number | null, string | null]>; + const errorPromise = once(child, 'error').then(([error]) => ({ + error: error instanceof Error ? error : new Error(String(error)), + kind: 'error' as const + })); + const timeoutPromise = delay(SQLITE_TIMEOUT_MS).then(() => ({ kind: 'timeout' as const })); + + const outcome = await Promise.race([ + closePromise.then(([code]) => ({ code, kind: 'close' as const })), + errorPromise, + timeoutPromise + ]); + + if (outcome.kind === 'timeout') { + terminateChildProcess(child); + await closePromise.catch(() => undefined); + + return null; + } + + if (outcome.kind === 'error') { + return { + exitCode: -1, + failedToSpawn: true, + stderr: outcome.error.message, + stdout: '' + }; + } + + return { + exitCode: outcome.code ?? -1, + stderr: stderrChunks.join('').trim(), + stdout: stdoutChunks.join('').trim() + }; +}; + +const determineTableName = (authProvider?: string) => + authProvider && authProvider !== 'none' ? 'users' : 'count_history'; + +export type SQLiteValidationResult = { + errors: string[]; + passed: boolean; + sqliteSpecific: { + connectionWorks: boolean; + queriesWork: boolean; + }; + warnings: string[]; +}; + +type TableCheckResult = { + errors: string[]; + flags: SQLiteValidationResult['sqliteSpecific']; + warnings: string[]; +}; + +const validateLocalDatabaseTables = async ( + databaseFile: string, + authProvider?: string +): Promise => { + const errors: string[] = []; + const warnings: string[] = []; + const flags: SQLiteValidationResult['sqliteSpecific'] = { + connectionWorks: false, + queriesWork: false + }; + const tableName = determineTableName(authProvider); + const tableResult = await runSqliteCommand( + databaseFile, + `SELECT name FROM sqlite_master WHERE type='table' AND name='${tableName}';` + ); + + if (tableResult === null) { + errors.push('Database connection test timed out'); + + return { errors, flags, warnings }; + } + + if (tableResult.failedToSpawn) { + errors.push( + `sqlite3 command unavailable: ${tableResult.stderr || 'Executable not found'}` + ); + + return { errors, flags, warnings }; + } + + if (tableResult.exitCode !== 0) { + errors.push(`Database connection test failed: ${tableResult.stderr || 'Unknown error'}`); + + return { errors, flags, warnings }; + } + + flags.connectionWorks = true; + + if (!tableResult.stdout.includes(tableName)) { + errors.push(`${tableName} table not found in database (runtime query returned no rows)`); + + return { errors, flags, warnings }; + } + + flags.queriesWork = true; + + return { errors, flags, warnings }; +}; + +export const validateSQLiteDatabase = async ( + projectPath: string, + config: { + authProvider?: string; + databaseHost?: string; + orm?: string; + } = {} +): Promise => { + const errors: string[] = []; + const warnings: string[] = []; + const sqliteSpecific: SQLiteValidationResult['sqliteSpecific'] = { + connectionWorks: false, + queriesWork: false + }; + + const isLocal = config.databaseHost === 'none' || !config.databaseHost; + const dbDir = join(projectPath, 'db'); + const databaseFile = join(dbDir, 'database.sqlite'); + + if (isLocal) { + const localResult = await validateLocalDatabaseTables(databaseFile, config.authProvider); + errors.push(...localResult.errors); + warnings.push(...localResult.warnings); + sqliteSpecific.connectionWorks = localResult.flags.connectionWorks; + sqliteSpecific.queriesWork = localResult.flags.queriesWork; + } else if (config.databaseHost === 'turso') { + warnings.push('Turso remote database - skipping local file and query checks'); + sqliteSpecific.connectionWorks = true; + sqliteSpecific.queriesWork = true; + } + + const passed = + errors.length === 0 && sqliteSpecific.connectionWorks && sqliteSpecific.queriesWork; + + return { errors, passed, sqliteSpecific, warnings }; +}; + +const logSQLiteSummary = (result: SQLiteValidationResult) => { + console.log('\n=== SQLite Database Validation Results ===\n'); + console.log('SQLite-Specific Checks:'); + console.log(` Connection Works: ${result.sqliteSpecific.connectionWorks ? '✓' : '✗'}`); + console.log(` Queries Work: ${result.sqliteSpecific.queriesWork ? '✓' : '✗'}`); +}; + +const logWarnings = (warnings: string[]) => { + if (warnings.length === 0) { + return; + } + + console.log('\nWarnings:'); + warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); +}; + +const logErrors = (errors: string[]) => { + if (errors.length === 0) { + return; + } + + console.log('\nErrors:'); + errors.forEach((error) => console.error(` - ${error}`)); +}; + +const parseCliArguments = (argv: string[]) => { + const [, , projectPath, orm, authProvider, databaseHost] = argv; + + return { + authProvider: authProvider ?? 'none', + databaseHost: databaseHost ?? 'none', + orm: orm ?? 'none', + projectPath + } as const; +}; + +const exitWithResult = (result: SQLiteValidationResult) => { + console.log(`\nOverall: ${result.passed ? 'PASS' : 'FAIL'}`); + process.exit(result.passed ? 0 : 1); +}; + +const runFromCli = async () => { + const { authProvider, databaseHost, orm, projectPath } = parseCliArguments(process.argv); + + if (!projectPath) { + console.error('Usage: bun run scripts/functional-tests/sqlite-validator.ts [orm] [auth-provider] [database-host]'); + process.exit(1); + } + + try { + const result = await validateSQLiteDatabase(projectPath, { authProvider, databaseHost, orm }); + logSQLiteSummary(result); + logWarnings(result.warnings); + logErrors(result.errors); + exitWithResult(result); + } catch (unknownError) { + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + console.error('SQLite validation error:', error); + process.exit(1); + } +}; + +if (import.meta.main) { + runFromCli().catch((error) => { + console.error('SQLite validation error:', error); + process.exit(1); + }); +} diff --git a/scripts/functional-tests/svelte-validator.ts b/scripts/functional-tests/svelte-validator.ts new file mode 100644 index 0000000..6209935 --- /dev/null +++ b/scripts/functional-tests/svelte-validator.ts @@ -0,0 +1,189 @@ +/* + Svelte Framework Validator + Executes the functional test suite for Svelte scaffold combinations. +*/ + +import process from 'node:process'; + +import { runFunctionalTests, type FunctionalTestResult } from './functional-test-runner'; + +export type SvelteValidationResult = { + passed: boolean; + errors: string[]; + warnings: string[]; + functionalTestResults?: FunctionalTestResult; +}; + +type ValidatorOptions = { + skipDependencies?: boolean; + skipBuild?: boolean; + skipServer?: boolean; +}; + +type ValidatorConfig = { + databaseEngine?: string; + orm?: string; + authProvider?: string; + useTailwind?: boolean; + codeQualityTool?: string; + isMultiFrontend?: boolean; +}; + +const runFunctionalSuite = async ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn', + options: ValidatorOptions, + errors: string[], + warnings: string[] +) => { + const results = await runFunctionalTests(projectPath, packageManager, options).catch((unknownError) => { + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + errors.push(`Functional tests failed: ${error.message}`); + + return undefined; + }); + + if (!results) { + return undefined; + } + + if (!results.passed) { + errors.push(...results.errors); + } + + if (results.warnings.length > 0) { + warnings.push(...results.warnings); + } + + return results; +}; + +export const validateSvelteFramework = async ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn' = 'bun', + _config: ValidatorConfig = {}, + options: ValidatorOptions = {} +): Promise => { + void _config; + const errors: string[] = []; + const warnings: string[] = []; + + const functionalTestResults = await runFunctionalSuite( + projectPath, + packageManager, + options, + errors, + warnings + ); + + const passed = errors.length === 0; + + return { + errors, + functionalTestResults, + passed, + warnings + }; +}; + +const parseCliArguments = () => { + const [, , projectPath, packageManagerArg, ...flags] = process.argv; + const packageManager = (packageManagerArg as 'bun' | 'npm' | 'pnpm' | 'yarn' | undefined) ?? 'bun'; + + const skipDependencies = flags.includes('--skip-deps'); + const skipBuild = flags.includes('--skip-build'); + const skipServer = flags.includes('--skip-server'); + + return { + packageManager, + projectPath, + skipBuild, + skipDependencies, + skipServer + } as const; +}; + +const logBuildSummary = (build?: FunctionalTestResult['results']['build']) => { + if (!build) { + return; + } + + console.log(` Build: ${build.passed ? '✓' : '✗'}`); + + if (typeof build.compileTime === 'number') { + console.log(` Compile time: ${build.compileTime}ms`); + } +}; + +const logServerSummary = (server?: FunctionalTestResult['results']['server']) => { + if (!server) { + return; + } + + console.log(` Server: ${server.passed ? '✓' : '✗'}`); +}; + +const logFunctionalSummary = (functionalTestResults?: FunctionalTestResult) => { + if (!functionalTestResults) { + return; + } + + console.log('\nFunctional Test Results:'); + const { results } = functionalTestResults; + logBuildSummary(results.build); + logServerSummary(results.server); +}; + +const logWarnings = (warnings: string[]) => { + if (warnings.length === 0) { + return; + } + + console.log('\nWarnings:'); + warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); +}; + +const exitWithResult = (result: SvelteValidationResult) => { + if (result.passed) { + console.log('\n✓ Svelte framework validation passed!'); + process.exit(0); + } + + console.log('\n✗ Svelte framework validation failed:'); + result.errors.forEach((error) => console.error(` - ${error}`)); + process.exit(1); +}; + +const runFromCli = async () => { + const { packageManager, projectPath, skipBuild, skipDependencies, skipServer } = parseCliArguments(); + + if (!projectPath) { + console.error('Usage: bun run scripts/functional-tests/svelte-validator.ts [package-manager] [--skip-deps] [--skip-build] [--skip-server]'); + process.exit(1); + } + + try { + const result = await validateSvelteFramework( + projectPath, + packageManager, + {}, + { skipBuild, skipDependencies, skipServer } + ); + + console.log('\n=== Svelte Framework Validation Results ===\n'); + logFunctionalSummary(result.functionalTestResults); + logWarnings(result.warnings); + exitWithResult(result); + } catch (unknownError) { + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + console.error('✗ Svelte framework validation error:', error); + process.exit(1); + } +}; + +if (import.meta.main) { + runFromCli().catch((error) => { + console.error('✗ Svelte validator encountered an unexpected error:', error); + process.exit(1); + }); +} diff --git a/scripts/functional-tests/test-cli-registry.ts b/scripts/functional-tests/test-cli-registry.ts new file mode 100644 index 0000000..b4ff864 --- /dev/null +++ b/scripts/functional-tests/test-cli-registry.ts @@ -0,0 +1,250 @@ +export type SuiteGroup = 'core' | 'framework' | 'database' | 'cloud' | 'auth'; + +export type FunctionalRunnerMetadata = { + args?: string[]; + runnerType?: 'bun-run' | 'bun-test'; + script: string; +}; + +export type BehaviouralRunnerMetadata = { + testFiles: string[]; +}; + +export type SuiteDefinition = { + databases?: string[]; + description: string; + frameworks?: string[]; + group: SuiteGroup; + label: string; + name: string; + providers?: string[]; + runners: { + behavioural?: BehaviouralRunnerMetadata; + functional: FunctionalRunnerMetadata; + }; +}; + +const normalise = (value: string) => value.toLowerCase(); + +export const SUITE_REGISTRY: SuiteDefinition[] = [ + { + description: 'Runs dependency, build, and server validators sequentially.', + group: 'core', + label: 'Functional core', + name: 'functional', + runners: { + functional: { + args: ['absolutejs-project', 'bun'], + script: 'scripts/functional-tests/functional-test-runner.ts' + } + } + }, + { + description: 'Validates the scaffolded server boots successfully.', + group: 'core', + label: 'Server validator', + name: 'server', + runners: { + functional: { + script: 'scripts/functional-tests/server-startup-validator.ts' + } + } + }, + { + description: 'Checks the build pipeline compiles without errors.', + group: 'core', + label: 'Build validator', + name: 'build', + runners: { + functional: { + script: 'scripts/functional-tests/build-validator.ts' + } + } + }, + { + description: 'Ensures dependency installation succeeds.', + group: 'core', + label: 'Dependency installer', + name: 'deps', + runners: { + functional: { + script: 'scripts/functional-tests/dependency-installer-tester.ts' + } + } + }, + { + description: 'Runs the full React matrix.', + frameworks: ['react'], + group: 'framework', + label: 'React suite', + name: 'react', + runners: { + functional: { + runnerType: 'bun-test', + script: 'tests/functional/frameworks/react.test.ts' + } + } + }, + { + description: 'Runs the full Vue matrix.', + frameworks: ['vue'], + group: 'framework', + label: 'Vue suite', + name: 'vue', + runners: { + functional: { + runnerType: 'bun-test', + script: 'tests/functional/frameworks/vue.test.ts' + } + } + }, + { + description: 'Runs the full Svelte matrix.', + frameworks: ['svelte'], + group: 'framework', + label: 'Svelte suite', + name: 'svelte', + runners: { + functional: { + runnerType: 'bun-test', + script: 'tests/functional/frameworks/svelte.test.ts' + } + } + }, + { + description: 'Runs the HTML framework matrix.', + frameworks: ['html'], + group: 'framework', + label: 'HTML suite', + name: 'html', + runners: { + functional: { + runnerType: 'bun-test', + script: 'tests/functional/frameworks/html.test.ts' + } + } + }, + { + description: 'Runs the HTMX framework matrix.', + frameworks: ['htmx'], + group: 'framework', + label: 'HTMX suite', + name: 'htmx', + runners: { + functional: { + runnerType: 'bun-test', + script: 'tests/functional/frameworks/htmx.test.ts' + } + } + }, + { + databases: ['sqlite'], + description: 'Runs SQLite database validations (local + Turso).', + group: 'database', + label: 'SQLite suite', + name: 'sqlite', + runners: { + behavioural: { + testFiles: ['tests/behavioural/database-matrix.test.ts'] + }, + functional: { + runnerType: 'bun-test', + script: 'tests/functional/databases/sqlite.test.ts' + } + } + }, + { + databases: ['postgresql'], + description: 'Runs PostgreSQL database validations (Neon/local).', + group: 'database', + label: 'PostgreSQL suite', + name: 'postgresql', + runners: { + behavioural: { + testFiles: ['tests/behavioural/database-matrix.test.ts'] + }, + functional: { + runnerType: 'bun-test', + script: 'tests/functional/databases/postgresql.test.ts' + } + } + }, + { + databases: ['mysql'], + description: 'Runs MySQL database validations (PlanetScale/local).', + group: 'database', + label: 'MySQL suite', + name: 'mysql', + runners: { + behavioural: { + testFiles: ['tests/behavioural/database-matrix.test.ts'] + }, + functional: { + runnerType: 'bun-test', + script: 'tests/functional/databases/mysql.test.ts' + } + } + }, + { + databases: ['mongodb'], + description: 'Runs MongoDB database validations.', + group: 'database', + label: 'MongoDB suite', + name: 'mongodb', + runners: { + behavioural: { + testFiles: ['tests/behavioural/database-matrix.test.ts'] + }, + functional: { + runnerType: 'bun-test', + script: 'tests/functional/databases/mongodb.test.ts' + } + } + }, + { + description: 'Runs supported cloud provider combinations.', + group: 'cloud', + label: 'Cloud providers', + name: 'cloud', + providers: ['neon', 'turso'], + runners: { + behavioural: { + testFiles: ['tests/behavioural/cloud-matrix.test.ts'] + }, + functional: { + runnerType: 'bun-test', + script: 'tests/functional/cloud.test.ts' + } + } + }, + { + description: 'Runs absoluteAuth matrix validations.', + group: 'auth', + label: 'Auth suite', + name: 'auth', + runners: { + behavioural: { + testFiles: ['tests/behavioural/auth-matrix.test.ts'] + }, + functional: { + runnerType: 'bun-test', + script: 'tests/functional/auth.test.ts' + } + } + } +]; + +export const SUITE_MAP = new Map( + SUITE_REGISTRY.map((definition) => [definition.name, definition]) +); + +const collectUnique = (selector: (suite: SuiteDefinition) => string[] | undefined) => + new Set( + SUITE_REGISTRY.flatMap((suite) => selector(suite) ?? []).map(normalise) + ); + +export const KNOWN_FRAMEWORKS = collectUnique((suite) => suite.frameworks); +export const KNOWN_DATABASES = collectUnique((suite) => suite.databases); +export const KNOWN_PROVIDERS = collectUnique((suite) => suite.providers); + + diff --git a/scripts/functional-tests/test-cli.ts b/scripts/functional-tests/test-cli.ts new file mode 100644 index 0000000..bffe6ed --- /dev/null +++ b/scripts/functional-tests/test-cli.ts @@ -0,0 +1,623 @@ +import { existsSync, rmSync } from 'node:fs'; +import process from 'node:process'; +import { cleanupCache } from './dependency-cache'; +import { + KNOWN_DATABASES, + KNOWN_FRAMEWORKS, + KNOWN_PROVIDERS, + SUITE_MAP, + SUITE_REGISTRY, + type SuiteDefinition +} from './test-cli-registry'; +import { cleanupProjectDirectory } from './test-utils'; + +type CliOptions = { + all: boolean; + ciMode: boolean; + clean: boolean; + databaseFilters: string[]; + dryRun: boolean; + frameworkFilters: string[]; + help: boolean; + includeAuth: boolean; + includeCloud: boolean; + list: boolean; + providers: string[]; + runBehavioural: boolean; + runFunctional: boolean; + suites: string[]; +}; + +type CommandOptions = { + env?: Record; + stderr?: 'inherit' | 'pipe'; + stdin?: 'inherit' | 'ignore'; + stdout?: 'inherit' | 'pipe'; +}; + +type SuiteRunMode = 'functional' | 'behavioural'; + +type SuiteExecutionPlan = { + mode: SuiteRunMode; + skipReason?: string; + suite: SuiteDefinition; +}; + +type SuiteExecution = { + duration: number; + exitCode: number; + label: string; + mode: SuiteRunMode; + name: string; + skipReason?: string; + skipped: boolean; +}; + +let cachedBunModule: typeof import('bun') | null = null; + +const loadBunModule = async () => { + if (cachedBunModule === null) { + cachedBunModule = await import('bun'); + } + + return cachedBunModule; +}; + +const runCommand = async ( + command: string[], + options: CommandOptions = {} +) => { + const bunModule = await loadBunModule(); + const processHandle = bunModule.spawn({ + cmd: command, + env: options.env, + stderr: options.stderr ?? 'inherit', + stdin: options.stdin ?? 'inherit', + stdout: options.stdout ?? 'inherit' + }); + + await processHandle.exited; + + return { exitCode: processHandle.exitCode ?? 0 }; +}; + +const printHelp = () => { + console.log(`Usage: bun run test:cli [options] + +Run AbsoluteJS validation suites from a single command. + +Examples: + bun run test:cli --suite functional + bun run test:cli --framework react --database sqlite + bun run test:cli --all + +Options: + -h, --help Show this help text and exit + --list List available suites and exit + --suite Select suites to run (repeatable, comma-separated) + --framework Filter or add framework suites (react, vue, svelte, html, htmx) + --database Filter or add database suites (sqlite, postgresql, mysql, mongodb) + --auth Include the absoluteAuth suite + --cloud Include cloud provider suites + --behavioural Run behavioural specs for selected suites (disables functional unless --functional is also set) + --functional Run functional harnesses (default behaviour) + --provider Filter cloud providers (neon, turso). Implies --cloud + --all Run every available suite + --clean Run cleanup tasks and exit + --ci Optimise output for CI environments + --dry-run Print the commands that would be executed, then exit + +Notes: + · Framework and database filters auto-include their corresponding suites. + · When combined with --suite, filters apply only to matching suite types. +`); +}; + +const printSuites = () => { + console.log('Available suites:\n'); + + SUITE_REGISTRY.forEach((suite) => { + const extras: string[] = []; + + if (suite.frameworks) { + extras.push(`frameworks: ${suite.frameworks.join(', ')}`); + } + + if (suite.databases) { + extras.push(`databases: ${suite.databases.join(', ')}`); + } + + if (suite.providers) { + extras.push(`providers: ${suite.providers.join(', ')}`); + } + + const suffix = extras.length > 0 ? ` (${extras.join('; ')})` : ''; + console.log(`- ${suite.name}: ${suite.label}${suffix}\n ${suite.description}`); + }); +}; + +const collectListValues = ( + argv: string[], + currentIndex: number, + flag: string +) => { + const value = argv[currentIndex + 1]; + + if (!value) { + throw new Error(`${flag} requires a value`); + } + + const values = value + .split(',') + .map((item) => item.trim()) + .filter(Boolean); + + return { nextIndex: currentIndex + 1, values }; +}; + +const applyListOption = ( + argv: string[], + currentIndex: number, + flag: string, + target: string[], + afterApply?: () => void +) => { + const { nextIndex, values } = collectListValues(argv, currentIndex, flag); + target.push(...values); + + if (afterApply) { + afterApply(); + } + + return nextIndex; +}; + +export const parseArgs = (argv: string[]) => { + const options: CliOptions = { + all: false, + ciMode: false, + clean: false, + databaseFilters: [], + dryRun: false, + frameworkFilters: [], + help: false, + includeAuth: false, + includeCloud: false, + list: false, + providers: [], + runBehavioural: false, + runFunctional: true, + suites: [] + }; + + for (let index = 0; index < argv.length; index += 1) { + const arg = argv[index]; + + switch (arg) { + case '-h': + case '--help': + options.help = true; + break; + case '--list': + options.list = true; + break; + case '--dry-run': + options.dryRun = true; + break; + case '--clean': + options.clean = true; + break; + case '--ci': + options.ciMode = true; + break; + case '--all': + options.all = true; + break; + case '--auth': + options.includeAuth = true; + break; + case '--cloud': + options.includeCloud = true; + break; + case '--behavioural': + options.runBehavioural = true; + options.runFunctional = false; + break; + case '--functional': + options.runFunctional = true; + break; + case '--suite': + index = applyListOption(argv, index, '--suite', options.suites); + break; + case '--framework': + index = applyListOption(argv, index, '--framework', options.frameworkFilters); + break; + case '--database': + index = applyListOption(argv, index, '--database', options.databaseFilters); + break; + case '--provider': + index = applyListOption(argv, index, '--provider', options.providers, () => { + options.includeCloud = true; + }); + break; + default: + throw new Error(`Unknown option: ${arg}`); + } + } + + options.providers.forEach((provider) => { + const normalisedProvider = provider.toLowerCase(); + + if (!KNOWN_PROVIDERS.has(normalisedProvider)) { + throw new Error(`Unknown provider: ${provider}`); + } + }); + + return options; +}; + +const normaliseValue = (value: string) => value.toLowerCase(); + +const shouldIncludeSuite = ( + suite: SuiteDefinition, + frameworkFilterSet: Set, + databaseFilterSet: Set +) => { + if (!suite) { + return false; + } + + if (suite.group === 'framework' && frameworkFilterSet.size > 0) { + return suite.frameworks?.some((framework) => frameworkFilterSet.has(normaliseValue(framework))) ?? false; + } + + if (suite.group === 'database' && databaseFilterSet.size > 0) { + return suite.databases?.some((database) => databaseFilterSet.has(normaliseValue(database))) ?? false; + } + + return true; +}; + +export const buildSuiteQueue = (options: CliOptions) => { + const orderedSuites: string[] = []; + const seen = new Set(); + + const addSuite = (candidate: string) => { + const name = normaliseValue(candidate); + + if (!SUITE_MAP.has(name)) { + throw new Error(`Unknown suite: ${candidate}`); + } + + if (!seen.has(name)) { + orderedSuites.push(name); + seen.add(name); + } + }; + + if (options.all) { + SUITE_REGISTRY.forEach((suite) => addSuite(suite.name)); + } + + options.suites.forEach(addSuite); + + options.frameworkFilters.forEach((framework) => { + const name = normaliseValue(framework); + + if (!KNOWN_FRAMEWORKS.has(name)) { + throw new Error(`Unknown framework: ${framework}`); + } + + const suite = SUITE_REGISTRY.find( + (definition) => definition.group === 'framework' && definition.frameworks?.includes(name) + ); + + if (suite) { + addSuite(suite.name); + } + }); + + options.databaseFilters.forEach((database) => { + const name = normaliseValue(database); + + if (!KNOWN_DATABASES.has(name)) { + throw new Error(`Unknown database: ${database}`); + } + + const suite = SUITE_REGISTRY.find( + (definition) => definition.group === 'database' && definition.databases?.includes(name) + ); + + if (suite) { + addSuite(suite.name); + } + }); + + if (options.includeAuth) { + addSuite('auth'); + } + + if (options.includeCloud) { + addSuite('cloud'); + } + + if (!options.all && orderedSuites.length === 0 && options.runFunctional) { + addSuite('functional'); + } + + const frameworkFilterSet = new Set(options.frameworkFilters.map(normaliseValue)); + const databaseFilterSet = new Set(options.databaseFilters.map(normaliseValue)); + + return orderedSuites.filter((suiteName) => { + const suite = SUITE_MAP.get(suiteName); + + return shouldIncludeSuite(suite, frameworkFilterSet, databaseFilterSet); + }); +}; + +const buildExecutionPlan = (suiteNames: string[], options: CliOptions) => + suiteNames.flatMap((suiteName) => { + const suite = SUITE_MAP.get(suiteName); + + if (!suite) { + return []; + } + + const runs: SuiteExecutionPlan[] = []; + + if (options.runFunctional) { + runs.push({ mode: 'functional', suite }); + } + + if (options.runBehavioural) { + const { behavioural } = suite.runners; + runs.push( + behavioural + ? { mode: 'behavioural', suite } + : { + mode: 'behavioural', + skipReason: 'Behavioural runner not defined for this suite.', + suite + } + ); + } + + return runs; + }); + +const removePath = (targetPath: string) => { + if (existsSync(targetPath)) { + rmSync(targetPath, { force: true, recursive: true }); + } +}; + +const runCleanup = () => { + console.log('Cleaning generated projects and dependency cache...'); + cleanupProjectDirectory('absolutejs-project'); + removePath('.test-dependency-cache'); + cleanupCache(); + console.log('Cleanup complete.'); +}; + +const formatRunLabel = (suite: SuiteDefinition, mode: SuiteRunMode) => + `${suite.label} [${mode}]`; + +const formatDryRunCommand = (plan: SuiteExecutionPlan, providerEnv?: string) => { + const label = formatRunLabel(plan.suite, plan.mode); + + if (plan.skipReason) { + return `• (skip) ${label} – ${plan.skipReason}`; + } + + if (plan.mode === 'functional') { + const args = plan.suite.runners.functional.args?.length + ? ` ${plan.suite.runners.functional.args.join(' ')}` + : ''; + const runnerType = plan.suite.runners.functional.runnerType ?? 'bun-run'; + const envNote = + plan.suite.name === 'cloud' && providerEnv + ? ` (ABSOLUTE_CLOUD_PROVIDERS=${providerEnv})` + : ''; + + const commandPrefix = runnerType === 'bun-test' ? 'bun test' : 'bun run'; + + return `• ${commandPrefix} ${plan.suite.runners.functional.script}${args}${envNote}`; + } + + const { behavioural } = plan.suite.runners; + if (!behavioural || behavioural.testFiles.length === 0) { + return `• (skip) ${label} – behavioural runner not configured`; + } + + const { testFiles } = behavioural; + const files = testFiles.join(' '); + const envNote = + plan.suite.name === 'cloud' && providerEnv + ? ` (ABSOLUTE_CLOUD_PROVIDERS=${providerEnv})` + : ''; + + return `• bun test ${files}${envNote}`; +}; + +const printDryRun = (plan: SuiteExecutionPlan[], providerEnv?: string) => { + console.log('Dry run — commands to execute:\n'); + plan.forEach((planItem) => console.log(formatDryRunCommand(planItem, providerEnv))); + console.log('\nNo commands were executed.'); +}; + +export const runSuites = async (suiteNames: string[], options: CliOptions) => { + const executionPlan = buildExecutionPlan(suiteNames, options); + if (executionPlan.length === 0) { + console.log('No suite runs selected; nothing to run.'); + + return 0; + } + + const planCount = executionPlan.length; + const providerFilter = options.providers.map(normaliseValue); + const providerEnv = providerFilter.length > 0 ? providerFilter.join(',') : undefined; + + if (options.dryRun) { + printDryRun(executionPlan, providerEnv); + + return 0; + } + + const results: SuiteExecution[] = []; + let overallExitCode = 0; + await executionPlan.reduce(async (chain, planItem, index) => { + await chain; + const result = await executeSuitePlan(planItem, index, planCount, options, providerEnv); + results.push(result); + overallExitCode = + !result.skipped && result.exitCode !== 0 ? result.exitCode : overallExitCode; + }, Promise.resolve()); + + const passedCount = results.filter((result) => !result.skipped && result.exitCode === 0).length; + const skippedCount = results.filter((result) => result.skipped).length; + const failedCount = results.length - passedCount - skippedCount; + + console.log('\n=== Summary ===\n'); + results.forEach((result) => { + if (result.skipped) { + console.log( + `⚠ ${result.label} – skipped${result.skipReason ? ` (${result.skipReason})` : ''}` + ); + } else { + const status = result.exitCode === 0 ? 'passed' : `failed (exit ${result.exitCode})`; + console.log(`• ${result.label} – ${status} (${result.duration}ms)`); + } + }); + console.log(`\nTotal suites: ${results.length}`); + console.log(`Passed: ${passedCount}`); + console.log(`Failed: ${failedCount}`); + console.log(`Skipped: ${skippedCount}`); + + return overallExitCode; +}; + +const executeSuitePlan = async ( + plan: SuiteExecutionPlan, + index: number, + total: number, + options: CliOptions, + providerEnv?: string +): Promise => { + const { suite, mode } = plan; + const label = formatRunLabel(suite, mode); + const ordinal = `[${index + 1}/${total}]`; + + if (plan.skipReason) { + console.log(`${ordinal} ⚠ Skipping ${suite.label} (${suite.name}) [${mode}] – ${plan.skipReason}`); + + return { + duration: 0, + exitCode: 0, + label, + mode, + name: suite.name, + skipped: true, + skipReason: plan.skipReason + }; + } + + const behaviouralRunner = suite.runners.behavioural; + + if (mode === 'behavioural' && (!behaviouralRunner || behaviouralRunner.testFiles.length === 0)) { + const reason = 'Behavioural runner configuration missing.'; + console.log(`${ordinal} ⚠ Skipping ${suite.label} (${suite.name}) [behavioural] – ${reason}`); + + return { + duration: 0, + exitCode: 0, + label, + mode, + name: suite.name, + skipped: true, + skipReason: reason + }; + } + + console.log(`${ordinal} Running ${suite.label} (${suite.name}) [${mode}]`); + const start = Date.now(); + const env: Record = { ...process.env } as Record; + + if (options.ciMode) { + env.CI = env.CI ?? '1'; + env.ABSOLUTE_TEST_CI = '1'; + } + + if (suite.name === 'cloud' && providerEnv) { + env.ABSOLUTE_CLOUD_PROVIDERS = providerEnv; + } + + if (plan.mode === 'behavioural' && suite.group === 'database') { + env.ABSOLUTE_BEHAVIOURAL_DATABASE_FILTER = suite.name.toLowerCase(); + } + + let command: string[]; + + if (mode !== 'functional') { + env.ABSOLUTE_BEHAVIOURAL_MODE = '1'; + const { testFiles } = behaviouralRunner!; + command = ['bun', 'test', ...testFiles]; + } else { + env.ABSOLUTE_BEHAVIOURAL_MODE = env.ABSOLUTE_BEHAVIOURAL_MODE ?? '0'; + const runnerType = suite.runners.functional.runnerType ?? 'bun-run'; + command = + runnerType === 'bun-test' + ? ['bun', 'test', suite.runners.functional.script, ...(suite.runners.functional.args ?? [])] + : ['bun', 'run', suite.runners.functional.script, ...(suite.runners.functional.args ?? [])]; + } + + const commandResult = await runCommand(command, { env }); + const duration = Date.now() - start; + + if (commandResult.exitCode === 0) { + console.log(`✓ ${label} passed (${duration}ms)`); + } else { + console.log(`✗ ${label} failed (exit code ${commandResult.exitCode}, ${duration}ms)`); + } + + return { + duration, + exitCode: commandResult.exitCode, + label, + mode, + name: suite.name, + skipped: false + }; +}; + +const main = async () => { + const options = parseArgs(process.argv.slice(2)); + + if (options.help) { + printHelp(); + + return; + } + + if (options.list) { + printSuites(); + + return; + } + + if (options.clean) { + runCleanup(); + + return; + } + + const suiteQueue = buildSuiteQueue(options); + const exitCode = await runSuites(suiteQueue, options); + process.exit(exitCode); +}; + +if (import.meta.main) { + main().catch((error) => { + console.error(`Error: ${(error as Error).message}`); + process.exit(1); + }); +} \ No newline at end of file diff --git a/scripts/functional-tests/test-utils.ts b/scripts/functional-tests/test-utils.ts new file mode 100644 index 0000000..124fe63 --- /dev/null +++ b/scripts/functional-tests/test-utils.ts @@ -0,0 +1,21 @@ +import { existsSync, rmSync } from 'node:fs'; + +/** + * Remove a generated project directory if it exists. + * + * Attempts to remove `projectPath` recursively and forcibly; if removal fails the error is caught + * and a warning is logged containing the path and the error message. + */ +export const cleanupProjectDirectory = (projectPath: string) => { + if (!existsSync(projectPath)) { + return; + } + + try { + rmSync(projectPath, { force: true, recursive: true }); + } catch (error) { + const { message } = error as Error; + console.warn(`Warning: Failed to clean up project directory "${projectPath}": ${message}`); + } +}; + diff --git a/scripts/functional-tests/vue-validator.ts b/scripts/functional-tests/vue-validator.ts new file mode 100644 index 0000000..1540f60 --- /dev/null +++ b/scripts/functional-tests/vue-validator.ts @@ -0,0 +1,189 @@ +/* + Vue Framework Validator + Executes the functional test suite for Vue scaffold combinations. +*/ + +import process from 'node:process'; + +import { runFunctionalTests, type FunctionalTestResult } from './functional-test-runner'; + +export type VueValidationResult = { + passed: boolean; + errors: string[]; + warnings: string[]; + functionalTestResults?: FunctionalTestResult; +}; + +type ValidatorOptions = { + skipDependencies?: boolean; + skipBuild?: boolean; + skipServer?: boolean; +}; + +type ValidatorConfig = { + databaseEngine?: string; + orm?: string; + authProvider?: string; + useTailwind?: boolean; + codeQualityTool?: string; + isMultiFrontend?: boolean; +}; + +const runFunctionalSuite = async ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn', + options: ValidatorOptions, + errors: string[], + warnings: string[] +) => { + const results = await runFunctionalTests(projectPath, packageManager, options).catch((unknownError) => { + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + errors.push(`Functional tests failed: ${error.message}`); + + return undefined; + }); + + if (!results) { + return undefined; + } + + if (!results.passed) { + errors.push(...results.errors); + } + + if (results.warnings.length > 0) { + warnings.push(...results.warnings); + } + + return results; +}; + +export const validateVueFramework = async ( + projectPath: string, + packageManager: 'bun' | 'npm' | 'pnpm' | 'yarn' = 'bun', + _config: ValidatorConfig = {}, + options: ValidatorOptions = {} +): Promise => { + void _config; + const errors: string[] = []; + const warnings: string[] = []; + + const functionalTestResults = await runFunctionalSuite( + projectPath, + packageManager, + options, + errors, + warnings + ); + + const passed = errors.length === 0; + + return { + errors, + functionalTestResults, + passed, + warnings + }; +}; + +const parseCliArguments = () => { + const [, , projectPath, packageManagerArg, ...flags] = process.argv; + const packageManager = (packageManagerArg as 'bun' | 'npm' | 'pnpm' | 'yarn' | undefined) ?? 'bun'; + + const skipDependencies = flags.includes('--skip-deps'); + const skipBuild = flags.includes('--skip-build'); + const skipServer = flags.includes('--skip-server'); + + return { + packageManager, + projectPath, + skipBuild, + skipDependencies, + skipServer + } as const; +}; + +const logBuildSummary = (build?: FunctionalTestResult['results']['build']) => { + if (!build) { + return; + } + + console.log(` Build: ${build.passed ? '✓' : '✗'}`); + + if (typeof build.compileTime === 'number') { + console.log(` Compile time: ${build.compileTime}ms`); + } +}; + +const logServerSummary = (server?: FunctionalTestResult['results']['server']) => { + if (!server) { + return; + } + + console.log(` Server: ${server.passed ? '✓' : '✗'}`); +}; + +const logFunctionalSummary = (functionalTestResults?: FunctionalTestResult) => { + if (!functionalTestResults) { + return; + } + + console.log('\nFunctional Test Results:'); + const { results } = functionalTestResults; + logBuildSummary(results.build); + logServerSummary(results.server); +}; + +const logWarnings = (warnings: string[]) => { + if (warnings.length === 0) { + return; + } + + console.log('\nWarnings:'); + warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); +}; + +const exitWithResult = (result: VueValidationResult) => { + if (result.passed) { + console.log('\n✓ Vue framework validation passed!'); + process.exit(0); + } + + console.log('\n✗ Vue framework validation failed:'); + result.errors.forEach((error) => console.error(` - ${error}`)); + process.exit(1); +}; + +const runFromCli = async () => { + const { packageManager, projectPath, skipBuild, skipDependencies, skipServer } = parseCliArguments(); + + if (!projectPath) { + console.error('Usage: bun run scripts/functional-tests/vue-validator.ts [package-manager] [--skip-deps] [--skip-build] [--skip-server]'); + process.exit(1); + } + + try { + const result = await validateVueFramework( + projectPath, + packageManager, + {}, + { skipBuild, skipDependencies, skipServer } + ); + + console.log('\n=== Vue Framework Validation Results ===\n'); + logFunctionalSummary(result.functionalTestResults); + logWarnings(result.warnings); + exitWithResult(result); + } catch (unknownError) { + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + console.error('✗ Vue framework validation error:', error); + process.exit(1); + } +}; + +if (import.meta.main) { + runFromCli().catch((error) => { + console.error('✗ Vue validator encountered an unexpected error:', error); + process.exit(1); + }); +} diff --git a/src/commands/formatProject.ts b/src/commands/formatProject.ts index 13e1b86..51bfdd0 100644 --- a/src/commands/formatProject.ts +++ b/src/commands/formatProject.ts @@ -1,7 +1,6 @@ -import { exit } from 'process'; import { spinner } from '@clack/prompts'; import { $ } from 'bun'; -import { green, red } from 'picocolors'; +import { green, yellow } from 'picocolors'; import { PackageManager } from '../types'; import { formatCommands, formatNoInstallCommands } from '../utils/commandMaps'; @@ -24,11 +23,9 @@ export const formatProject = async ({ : formatNoInstallCommands[packageManager]; spin.start('Formatting files…'); - await $`sh -c ${fmt}`.cwd(projectName).quiet(); + await $`sh -c ${fmt}`.cwd(projectName).quiet().nothrow(); spin.stop(green('Files formatted')); - } catch (err) { - spin.stop(red('Failed to format files'), 1); - console.error('Error formatting:', err); - exit(1); + } catch { + spin.stop(yellow('Formatting skipped - continuing...'), 0); } }; diff --git a/src/commands/initializeGit.ts b/src/commands/initializeGit.ts index 08847b8..df36ec8 100644 --- a/src/commands/initializeGit.ts +++ b/src/commands/initializeGit.ts @@ -1,18 +1,33 @@ import { spinner } from '@clack/prompts'; import { $ } from 'bun'; import { green, red } from 'picocolors'; +import { abort } from '../utils/abort'; +import { checkGitInstalled } from '../utils/checkGitInstalled'; + +const initializeRepository = async (projectName: string, spin: ReturnType) => { + spin.stop(); + spin.start('Initializing git repository…'); + + await $`git init -b main`.cwd(projectName).quiet(); + await $`git add -A`.cwd(projectName).quiet(); + await $`git commit -m "Initial commit"`.cwd(projectName).quiet(); + + spin.stop(green('Git repo initialized')); +}; export const initializeGit = async (projectName: string) => { const spin = spinner(); - try { - spin.start('Initializing git repository…'); - - await $`git init -b main`.cwd(projectName).quiet(); - await $`git add -A`.cwd(projectName).quiet(); - await $`git commit -m "Initial commit"`.cwd(projectName).quiet(); + spin.start('Checking git availability...'); + const isGitInstalled = await checkGitInstalled(); + + if (!isGitInstalled) { + spin.stop(red('Git is not installed. Please install git before proceeding.'), 1); + abort(); + } - spin.stop(green('Git repo initialized')); + try { + await initializeRepository(projectName, spin); } catch (err) { spin.stop(red('Failed to initialize git'), 1); throw err; diff --git a/src/data.ts b/src/data.ts index 4e8b25a..97b33f9 100644 --- a/src/data.ts +++ b/src/data.ts @@ -87,7 +87,16 @@ export const absoluteAuthPlugin: AvailableDependency = { imports: [ { config: { - providersConfiguration: {} + providersConfiguration: { + github: { + credentials: { + clientId: 'github-test-client', + clientSecret: 'github-test-secret' + , + redirectUri: 'http://localhost:3000/auth/callback/github' + } + } + } }, isPlugin: true, packageName: 'absoluteAuth' @@ -128,6 +137,24 @@ export const defaultDependencies: AvailableDependency[] = [ } ]; +export const prismaRuntimeDependencies: AvailableDependency[] = [ + { + latestVersion: '6.2.1', + value: '@prisma/client' + }, + { + latestVersion: '5.23.0', + value: '@prisma/extension-accelerate' + } +]; + +export const prismaDevDependencies: AvailableDependency[] = [ + { + latestVersion: '6.2.1', + value: 'prisma' + } +]; + export const defaultPlugins: AvailableDependency[] = [ { imports: [ diff --git a/src/generators/configurations/generateEnv.ts b/src/generators/configurations/generateEnv.ts index 020b9a6..5459250 100644 --- a/src/generators/configurations/generateEnv.ts +++ b/src/generators/configurations/generateEnv.ts @@ -12,11 +12,11 @@ type GenerateEnvProps = Pick< const databaseURLS = { cockroachdb: 'cockroachdb://user:password@localhost:26257/database', gel: 'gel://user:password@localhost:5432/database', - mariadb: 'mariadb://user:password@localhost:3306/database', - mongodb: 'mongodb://user:password@localhost:27017/database', + mariadb: 'mariadb://user:userpassword@localhost:3306/database', + mongodb: 'mongodb://user:password@127.0.0.1:27018/database?authSource=admin', mssql: 'mssql://user:password@localhost:1433/database', - mysql: 'mysql://user:password@localhost:3306/database', - postgresql: 'postgresql://user:password@localhost:5432/database', + mysql: 'mysql://user:userpassword@localhost:3306/database', + postgresql: 'postgresql://user:password@127.0.0.1:5433/database', singlestore: 'singlestore://user:password@localhost:3306/database' } as const; diff --git a/src/generators/configurations/generatePackageJson.ts b/src/generators/configurations/generatePackageJson.ts index cbbde06..22eaa35 100644 --- a/src/generators/configurations/generatePackageJson.ts +++ b/src/generators/configurations/generatePackageJson.ts @@ -7,7 +7,9 @@ import { availablePlugins, defaultDependencies, defaultPlugins, - eslintAndPrettierDependencies + eslintAndPrettierDependencies, + prismaDevDependencies, + prismaRuntimeDependencies } from '../../data'; import type { CreateConfiguration, PackageJson } from '../../types'; import { getPackageVersion } from '../../utils/getPackageVersion'; @@ -126,11 +128,30 @@ export const createPackageJson = ({ '0.1.1' ); } - if (orm === 'drizzle') { dependencies['drizzle-orm'] = resolveVersion('drizzle-orm', '0.41.0'); + devDependencies['drizzle-kit'] = resolveVersion('drizzle-kit', '0.30.6'); + } + const usesAccelerate = + orm === 'prisma' && + (databaseHost === 'neon' || databaseHost === 'planetscale'); + + if (orm === 'prisma') { + prismaRuntimeDependencies.forEach((dep) => { + dependencies[dep.value] = resolveVersion(dep.value, dep.latestVersion); + }); + + prismaDevDependencies.forEach((dep) => { + if (dep.value === '@prisma/extension-accelerate' && !usesAccelerate) return; + devDependencies[dep.value] = resolveVersion( + dep.value, + dep.latestVersion + ); + }); } + // Add cloud provider client dependencies when using cloud hosts + // These are needed regardless of ORM choice (drizzle, prisma, or none) switch (databaseHost) { case 'neon': dependencies['@neondatabase/serverless'] = resolveVersion( @@ -210,6 +231,17 @@ export const createPackageJson = ({ scripts['db:init'] = 'sqlite3 db/database.sqlite < db/init.sql'; } + if (orm === 'prisma') { + scripts['postinstall'] = 'prisma generate'; + scripts['db:generate'] = 'prisma generate'; + scripts['db:push'] = 'prisma db push'; + scripts['db:studio'] = 'prisma studio'; + scripts['db:migrate'] = 'prisma migrate dev'; + scripts['db:migrate:deploy'] = 'prisma migrate deploy'; + scripts['db:migrate:reset'] = 'prisma migrate reset'; + + } + const packageJson: PackageJson = { dependencies, devDependencies, @@ -224,3 +256,4 @@ export const createPackageJson = ({ JSON.stringify(packageJson, null, 2) ); }; + diff --git a/src/generators/configurations/scaffoldConfigurationFiles.ts b/src/generators/configurations/scaffoldConfigurationFiles.ts index 299e378..74b092b 100644 --- a/src/generators/configurations/scaffoldConfigurationFiles.ts +++ b/src/generators/configurations/scaffoldConfigurationFiles.ts @@ -1,4 +1,4 @@ -import { copyFileSync, writeFileSync } from 'fs'; +import { copyFileSync, writeFileSync, readFileSync, mkdirSync } from 'fs'; import { join } from 'path'; import { dim, yellow } from 'picocolors'; import type { CreateConfiguration } from '../../types'; @@ -30,10 +30,51 @@ export const scaffoldConfigurationFiles = ({ initializeGitNow, projectName }: AddConfigurationProps) => { - copyFileSync( - join(templatesDirectory, 'configurations', 'tsconfig.example.json'), - join(projectName, 'tsconfig.json') + const tsconfigTemplatePath = join( + templatesDirectory, + 'configurations', + 'tsconfig.example.json' ); + const tsconfigTargetPath = join(projectName, 'tsconfig.json'); + // Helper to determine JSX compiler option based on frontends + const getJsxOption = () => { + if (frontends.includes('react')) return 'react-jsx'; + if (frontends.includes('vue')) return 'preserve'; + + return undefined; + }; + + const writeTsconfigFile = () => { + const tsconfigContent = readFileSync(tsconfigTemplatePath, 'utf-8'); + const tsconfig = JSON.parse(tsconfigContent); + + if (!tsconfig.compilerOptions) { + tsconfig.compilerOptions = {}; + } + + const jsxOption = getJsxOption(); + if (!jsxOption) { + delete tsconfig.compilerOptions.jsx; + mkdirSync(projectName, { recursive: true }); + writeFileSync(tsconfigTargetPath, `${JSON.stringify(tsconfig, null, 2)}\n`); + + return; + } + + tsconfig.compilerOptions.jsx = jsxOption; + mkdirSync(projectName, { recursive: true }); + writeFileSync(tsconfigTargetPath, `${JSON.stringify(tsconfig, null, 2)}\n`); + }; + + try { + writeTsconfigFile(); + } catch (error: unknown) { + const message = error instanceof Error ? error.message : String(error); + console.error( + `Failed to scaffold tsconfig from "${tsconfigTemplatePath}" to "${tsconfigTargetPath}": ${message}` + ); + throw error; + } if (tailwind) { copyFileSync( @@ -74,4 +115,17 @@ export const scaffoldConfigurationFiles = ({ envVariables, projectName }); + + // Generate Vue type declarations if Vue is included + if (frontends.includes('vue')) { + const typesDirectory = join(projectName, 'src', 'types'); + mkdirSync(typesDirectory, { recursive: true }); + const vueShimContent = `declare module '*.vue' { + import type { DefineComponent } from 'vue'; + const component: DefineComponent<{}, {}, any>; + export default component; +} +`; + writeFileSync(join(typesDirectory, 'vue-shim.d.ts'), vueShimContent); + } }; diff --git a/src/generators/db/dockerInitTemplates.ts b/src/generators/db/dockerInitTemplates.ts index 07bc3c3..49eee19 100644 --- a/src/generators/db/dockerInitTemplates.ts +++ b/src/generators/db/dockerInitTemplates.ts @@ -22,6 +22,12 @@ const mysqlCountHistory = `CREATE TABLE IF NOT EXISTS count_history ( created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP );`; +const mongodbUsers = + "const admin = db.getSiblingDB('admin'); admin.auth('user', 'password'); const database = db.getSiblingDB('database'); database.users.updateOne({ auth_sub: 'seed-user' }, { $setOnInsert: { auth_sub: 'seed-user', created_at: new Date(), metadata: {} } }, { upsert: true });"; + +const mongodbCountHistory = + "const admin = db.getSiblingDB('admin'); admin.auth('user', 'password'); const database = db.getSiblingDB('database'); if (!database.count_history.findOne({ uid: 1 })) { database.count_history.insertOne({ uid: 1, count: 0, created_at: new Date() }); }"; + const mariadbUsers = `CREATE TABLE IF NOT EXISTS users ( auth_sub VARCHAR(255) PRIMARY KEY, created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, @@ -92,6 +98,7 @@ export const userTables = { cockroachdb: cockroachdbUsers, gel: gelUsers, mariadb: mariadbUsers, + mongodb: mongodbUsers, mssql: mssqlUsers, mysql: mysqlUsers, postgresql: postgresqlUsers, @@ -102,6 +109,7 @@ export const countHistoryTables = { cockroachdb: cockroachdbCountHistory, gel: gelCountHistory, mariadb: mariadbCountHistory, + mongodb: mongodbCountHistory, mssql: mssqlCountHistory, mysql: mysqlCountHistory, postgresql: postgresqlCountHistory, @@ -121,6 +129,10 @@ export const initTemplates = { cli: 'MYSQL_PWD=userpassword mariadb -h127.0.0.1 -u user -e', wait: 'until mysqladmin ping -h127.0.0.1 --silent; do sleep 1; done' }, + mongodb: { + cli: 'mongosh "mongodb://user:password@127.0.0.1:27017" --quiet --eval', + wait: 'until mongosh "mongodb://user:password@127.0.0.1:27017" --quiet --eval "db.runCommand({ ping: 1 })" >/dev/null 2>&1; do sleep 1; done' + }, mssql: { cli: '/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P sapassword -Q', wait: 'until /opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P sapassword -Q "SELECT 1" >/dev/null 2>&1; do sleep 1; done' diff --git a/src/generators/db/ensurePostgresSqlAdapter.ts b/src/generators/db/ensurePostgresSqlAdapter.ts new file mode 100644 index 0000000..0d5ef04 --- /dev/null +++ b/src/generators/db/ensurePostgresSqlAdapter.ts @@ -0,0 +1,105 @@ +import { mkdirSync, writeFileSync } from 'node:fs'; +import { join } from 'node:path'; + +const POSTGRES_SQL_ADAPTER_CONTENT = `import type { Pool, QueryResultRow } from 'pg' + +type TemplateExecutor = ( + parts: TemplateStringsArray, + ...params: unknown[] +) => Promise + +type QueryExecutor = ( + text: string, + params?: unknown[] +) => Promise + +type PgSqlShape = TemplateExecutor & { + query: QueryExecutor + end: () => Promise +} + +const buildQuery = ( + parts: TemplateStringsArray, + params: unknown[] +) => { + let text = '' + const values: unknown[] = [] + + for (let index = 0; index < parts.length; index += 1) { + text += parts[index] + + if (index < params.length) { + values.push(params[index]) + text += '$' + values.length + } + } + + return { text, values } +} + +const normaliseValue = (value: unknown): unknown => { + if (value === undefined) { + return null + } + + if (value === null) { + return null + } + + if (value instanceof Date || value instanceof Uint8Array || value instanceof ArrayBuffer) { + return value + } + + if (Array.isArray(value)) { + return value.map(normaliseValue) + } + + if (typeof value === 'object') { + try { + return JSON.stringify(value) + } catch { + return String(value) + } + } + + return value +} + +const prepareParameters = (params: unknown[]): unknown[] => + params.map(normaliseValue) + +export type PgSql = PgSqlShape + +export const createPgSql = (pool: Pool): PgSql => { + const executeTemplate: TemplateExecutor = async ( + parts: TemplateStringsArray, + ...params: unknown[] + ) => { + const { text, values } = buildQuery(parts, params) + const result = await pool.query(text, prepareParameters(values)) + + return result.rows + } + + const sql = (executeTemplate as PgSql) + + sql.query = async (text: string, params: unknown[] = []) => { + const result = await pool.query(text, prepareParameters(params)) + + return result.rows + } + + sql.end = () => pool.end() + + return sql +} +`; + +export const ensurePostgresSqlAdapter = (backendDirectory: string) => { + const databaseUtilitiesDirectory = join(backendDirectory, 'database'); + const adapterPath = join(databaseUtilitiesDirectory, 'createPgSql.ts'); + + mkdirSync(databaseUtilitiesDirectory, { recursive: true }); + writeFileSync(adapterPath, POSTGRES_SQL_ADAPTER_CONTENT, 'utf-8'); +}; + diff --git a/src/generators/db/generateDockerContainer.ts b/src/generators/db/generateDockerContainer.ts index 84be855..891b9ab 100644 --- a/src/generators/db/generateDockerContainer.ts +++ b/src/generators/db/generateDockerContainer.ts @@ -37,7 +37,7 @@ const templates = { MONGO_INITDB_ROOT_USERNAME: 'user' }, image: 'mongo:7.0', - port: '27017:27017', + port: '27018:27017', volumePath: '/data/db' }, mssql: { @@ -68,7 +68,7 @@ const templates = { POSTGRES_USER: 'user' }, image: 'postgres:15', - port: '5432:5432', + port: '5433:5432', volumePath: '/var/lib/postgresql/data' }, singlestore: { diff --git a/src/generators/db/generateDrizzleSchema.ts b/src/generators/db/generateDrizzleSchema.ts index f9691d5..1b8b9c2 100644 --- a/src/generators/db/generateDrizzleSchema.ts +++ b/src/generators/db/generateDrizzleSchema.ts @@ -95,6 +95,14 @@ export const generateDrizzleSchema = ({ } else if (databaseHost === 'turso') { dbImport = `import { LibSQLDatabase } from 'drizzle-orm/libsql';`; dbTypeLine = 'export type DatabaseType = LibSQLDatabase;'; + } else if ( + databaseEngine === 'postgresql' && + (databaseHost === undefined || + databaseHost === 'none' || + databaseHost === 'local') + ) { + dbImport = `import { NodePgDatabase } from 'drizzle-orm/node-postgres';`; + dbTypeLine = 'export type DatabaseType = NodePgDatabase;'; } let uidColumn: string; diff --git a/src/generators/db/generateHandlers.ts b/src/generators/db/generateHandlers.ts index ce7500d..a93b1ed 100644 --- a/src/generators/db/generateHandlers.ts +++ b/src/generators/db/generateHandlers.ts @@ -1,4 +1,4 @@ -import { CreateConfiguration } from '../../types'; +import type { CreateConfiguration } from '../../types'; import { getAuthTemplate, getCountTemplate } from './handlerTemplates'; type GenerateDBHandlersProps = Pick< @@ -20,9 +20,12 @@ export const generateDBHandlers = ({ const host = databaseHost && databaseHost !== 'none' ? databaseHost : 'local'; - const ormKey = orm === 'drizzle' ? 'drizzle' : 'sql'; + let ormKey = 'sql'; + if (orm === 'drizzle') ormKey = 'drizzle'; + else if (orm === 'prisma') ormKey = 'prisma'; const key = `${databaseEngine}:${ormKey}:${host}` as const; // @ts-expect-error - TODO: Finish the other templates return usesAuth ? getAuthTemplate(key) : getCountTemplate(key); }; + diff --git a/src/generators/db/handlerTemplates.ts b/src/generators/db/handlerTemplates.ts index 4b90939..db477a1 100644 --- a/src/generators/db/handlerTemplates.ts +++ b/src/generators/db/handlerTemplates.ts @@ -18,7 +18,7 @@ const buildSqlAuthTemplate = ({ dbType, queries }: AuthTemplateOptions) => ` -import { isValidProviderOption, providers } from 'citra' +import { isValidProviderOption, providers } from '@absolutejs/auth' ${importLines} ${handlerTypes?.UserRow ? `\ntype UserRow = ${handlerTypes.UserRow}` : ''} type UserHandlerProps = { @@ -29,14 +29,26 @@ type UserHandlerProps = { export const getUser = async ({ authProvider, db, userIdentity }: UserHandlerProps) => { if (!isValidProviderOption(authProvider)) throw new Error(\`Invalid auth provider: \${authProvider}\`) - const subject = providers[authProvider].extractSubjectFromIdentity(userIdentity) + const provider = providers[authProvider as keyof typeof providers] + const identity = userIdentity as Record + const subject = + (provider as any).extractSubjectFromIdentity?.(identity) ?? + (identity.sub as string | undefined) ?? + (identity.id as string | undefined) ?? + String(identity.sub || identity.id || 'unknown') const authSub = \`\${authProvider.toUpperCase()}|\${subject}\` ${queries.selectUser} } export const createUser = async ({ authProvider, db, userIdentity }: UserHandlerProps) => { if (!isValidProviderOption(authProvider)) throw new Error(\`Invalid auth provider: \${authProvider}\`) - const subject = providers[authProvider].extractSubjectFromIdentity(userIdentity) + const provider = providers[authProvider as keyof typeof providers] + const identity = userIdentity as Record + const subject = + (provider as any).extractSubjectFromIdentity?.(identity) ?? + (identity.sub as string | undefined) ?? + (identity.id as string | undefined) ?? + String(identity.sub || identity.id || 'unknown') const authSub = \`\${authProvider.toUpperCase()}|\${subject}\` ${queries.insertUser} } @@ -138,16 +150,57 @@ const postgresSqlQueryOperations: QueryOperations = { }; const mongodbQueryOperations: QueryOperations = { - insertHistory: `const { insertedId } = await db.collection('count_history').insertOne({ count }) - const newHistory = await db.collection('count_history').findOne({ _id: insertedId }) - return newHistory`, - insertUser: `const { insertedId } = await db.collection('users').insertOne({ auth_sub: authSub, metadata: userIdentity }) - const newUser = await db.collection('users').findOne({ _id: insertedId }) - if (!newUser) throw new Error('Failed to create user') - return newUser`, - selectHistory: `const history = await db.collection('count_history').findOne({ uid }) + insertHistory: `const entries = await db + .collection('count_history') + .find({}, { projection: { uid: 1 } }) + .sort({ uid: -1 }) + .limit(1) + .toArray() + + const nextUid = (entries[0]?.uid ?? 0) + 1 + const record = { + created_at: new Date(), + count, + uid: nextUid + } + + await db.collection('count_history').insertOne(record) + + const { _id: _unused, ...history } = record + return history`, + insertUser: `const record = { + auth_sub: authSub, + created_at: new Date(), + metadata: userIdentity + } + + await db.collection('users').updateOne( + { auth_sub: authSub }, + { $set: record }, + { upsert: true } + ) + + const user = await db + .collection('users') + .findOne({ auth_sub: authSub }, { projection: { _id: 0 } }) + + if (!user) throw new Error('Failed to create user') + return user`, + selectHistory: `const history = await db + .collection('count_history') + .findOne( + { uid }, + { projection: { _id: 0 } } + ) + return history ?? null`, - selectUser: `const user = await db.collection('users').findOne({ auth_sub: authSub }) + selectUser: `const user = await db + .collection('users') + .findOne( + { auth_sub: authSub }, + { projection: { _id: 0 } } + ) + return user ?? null` }; @@ -244,10 +297,9 @@ const [result] = await db.query( 'INSERT INTO users (auth_sub, metadata) VALUES (?, ?)', [authSub, JSON.stringify(userIdentity)] ); -const insertId = result.insertId; const [rows] = await db.query( - 'SELECT * FROM users WHERE uid = ? LIMIT 1', - [insertId] + 'SELECT * FROM users WHERE auth_sub = ? LIMIT 1', + [authSub] ); if (!rows[0]) throw new Error('Failed to create user'); return rows[0]; @@ -282,40 +334,41 @@ const mysqlHandlerTypes: HandlerType = { created_at: number; }`, UserRow: `RowDataPacket & { - uid: number; auth_sub: string; metadata: string; }` }; const mysqlDrizzleQueryOperations: QueryOperations = { - insertHistory: `const [row] = await db + insertHistory: `const insertResult = await db .insert(schema.countHistory) .values({ count }) - .$returningId(); + .execute(); + + const insertId = + Array.isArray(insertResult) + ? (insertResult[0] as { insertId?: number })?.insertId + : (insertResult as { insertId?: number }).insertId; - if (!row) throw new Error('insert failed: no uid returned'); - const { uid } = row; + if (typeof insertId !== 'number') { + throw new Error('insert failed: no uid returned'); + } const [newHistory] = await db .select() .from(schema.countHistory) - .where(eq(schema.countHistory.uid, uid)); + .where(eq(schema.countHistory.uid, insertId)); return newHistory;`, - insertUser: `const [row] = await db + insertUser: `await db .insert(schema.users) - .values({ auth_sub: authSub, metadata: userIdentity }) - .$returningId(); - - if (!row) throw new Error('insert failed: no uid returned'); - const { uid } = row; + .values({ auth_sub: authSub, metadata: userIdentity }); const [newUser] = await db .select() .from(schema.users) - .where(eq(schema.users.uid, uid)); + .where(eq(schema.users.auth_sub, authSub)); if (!newUser) throw new Error('Failed to create user'); return newUser;`, @@ -368,7 +421,7 @@ import { schema, type SchemaType } from '../../../db/schema'`, dbType: 'NodePgDatabase', importLines: ` import { eq } from 'drizzle-orm' -import { BunSQLDatabase } from 'drizzle-orm/bun-sql' +import { NodePgDatabase } from 'drizzle-orm/node-postgres' import { schema, type SchemaType } from '../../../db/schema'`, queries: drizzleQueryOperations }, @@ -381,8 +434,8 @@ import { schema, type SchemaType } from '../../../db/schema'`, queries: drizzleQueryOperations }, 'postgresql:sql:local': { - dbType: 'SQL', - importLines: `import { SQL } from 'bun'`, + dbType: 'PgSql', + importLines: `import type { PgSql } from '../database/createPgSql'`, queries: postgresSqlQueryOperations }, 'postgresql:sql:neon': { diff --git a/src/generators/db/scaffoldDocker.ts b/src/generators/db/scaffoldDocker.ts index 42d353c..6193888 100644 --- a/src/generators/db/scaffoldDocker.ts +++ b/src/generators/db/scaffoldDocker.ts @@ -1,4 +1,5 @@ import { writeFileSync } from 'fs'; +import process from 'node:process'; import { join } from 'path'; import { $ } from 'bun'; import { AuthProvider, DatabaseEngine } from '../../types'; @@ -34,23 +35,29 @@ export const scaffoldDocker = async ({ } await checkDockerInstalled(); + const useSharedContainer = + process.env.ABSOLUTE_TEST === 'true' && + (databaseEngine === 'postgresql' || + databaseEngine === 'mysql' || + databaseEngine === 'mariadb'); + const dbContainer = generateDockerContainer(databaseEngine); - writeFileSync( - join(projectDatabaseDirectory, 'docker-compose.db.yml'), - dbContainer, - 'utf-8' - ); + const composePath = join(projectDatabaseDirectory, 'docker-compose.db.yml'); + writeFileSync(composePath, dbContainer, 'utf-8'); - if (databaseEngine === 'mongodb') { - } else { - const { wait, cli } = initTemplates[databaseEngine]; - const usesAuth = authProvider !== undefined && authProvider !== 'none'; - const dbCommand = usesAuth - ? userTables[databaseEngine] - : countHistoryTables[databaseEngine]; - await $`bun db:up`.cwd(projectName); - await $`docker compose -p ${databaseEngine} -f db/docker-compose.db.yml exec -T db \ - bash -lc '${wait} && ${cli} "${dbCommand}"'`.cwd(projectName); - await $`bun db:down`.cwd(projectName); + if (useSharedContainer) { + return; } + + const { wait, cli } = initTemplates[databaseEngine]; + const usesAuth = authProvider !== undefined && authProvider !== 'none'; + const dbCommand = usesAuth + ? userTables[databaseEngine] + : countHistoryTables[databaseEngine]; + const escapedDbCommand = dbCommand.replace(/\$/g, '\\$'); + + await $`bun db:up`.cwd(projectName); + await $`docker compose -p ${databaseEngine} -f db/docker-compose.db.yml exec -T db \ + bash -lc '${wait} && ${cli} "${escapedDbCommand}"'`.cwd(projectName); + await $`bun db:down`.cwd(projectName); }; diff --git a/src/generators/project/generateDBBlock.ts b/src/generators/project/generateDBBlock.ts index d53434b..7b5ae7f 100644 --- a/src/generators/project/generateDBBlock.ts +++ b/src/generators/project/generateDBBlock.ts @@ -14,7 +14,7 @@ const connectionMap: Record> = { none: { expr: 'createPool(getEnv("DATABASE_URL"))' } }, mongodb: { - none: { expr: 'new MongoClient(getEnv("DATABASE_URL") })' } + none: { expr: 'new MongoClient(getEnv("DATABASE_URL"))' } }, mssql: { none: { expr: 'await connect(getEnv("DATABASE_URL"))' } @@ -25,7 +25,7 @@ const connectionMap: Record> = { }, postgresql: { neon: { - expr: 'new Pool({ connectionString: getEnv("DATABASE_URL") })' + expr: 'neon(getEnv("DATABASE_URL"))' }, none: { expr: 'new Pool({ connectionString: getEnv("DATABASE_URL") })' } }, @@ -70,14 +70,40 @@ export const generateDBBlock = ({ const hostCfg = engineGroup[hostKey]; if (!hostCfg) return ''; + // MongoDB needs special handling: connect and get database + if (databaseEngine === 'mongodb') { + return ` +const client = ${hostCfg.expr} +await client.connect() +const db = client.db('database') +`; + } + + if (databaseEngine === 'postgresql' && hostKey === 'none') { + return ` +const connectionString = ${hostCfg.expr.replace('new Pool({ connectionString: getEnv("DATABASE_URL") })', 'getEnv("DATABASE_URL")')} +if (process.env.ABSOLUTE_TEST_VERBOSE === '1') { + console.log('Server runtime env: DATABASE_URL=' + connectionString) + console.log('Server runtime env: PGHOST=' + (process.env.PGHOST ?? 'undefined')) + console.log('Server runtime env: PGPORT=' + (process.env.PGPORT ?? 'undefined')) +} +const pool = new Pool({ connectionString }) +const db = createPgSql(pool) +`; + } + return ` -const pool = ${hostCfg.expr} +const db = ${hostCfg.expr} `; } if (!drizzleDialectSet.has(databaseEngine)) return ''; - const expr = engineGroup[hostKey]?.expr ?? remoteDrizzleInit[hostKey]; + // For Drizzle with remote hosts, use remoteDrizzleInit; otherwise use connectionMap + const isRemoteHost = hostKey !== 'none' && hostKey in remoteDrizzleInit; + const expr = isRemoteHost + ? (remoteDrizzleInit[hostKey] ?? engineGroup[hostKey]?.expr) + : (engineGroup[hostKey]?.expr ?? remoteDrizzleInit[hostKey]); if (!expr) return ''; if (databaseEngine === 'mysql') { diff --git a/src/generators/project/generateImportsBlock.ts b/src/generators/project/generateImportsBlock.ts index 9fe72af..a174edc 100644 --- a/src/generators/project/generateImportsBlock.ts +++ b/src/generators/project/generateImportsBlock.ts @@ -79,8 +79,17 @@ export const generateImportsBlock = ({ `import VueExample from '${buildExamplePath(vueDir, 'VueExample.vue')}'` ); + // Neon requires different imports based on whether ORM is used + const getNeonImport = () => { + if (orm === 'drizzle') { + return [`import { Pool } from '@neondatabase/serverless'`]; + } + + return [`import { neon } from '@neondatabase/serverless'`]; + }; + const connectorImports = { - neon: [`import { Pool } from '@neondatabase/serverless'`], + neon: getNeonImport(), planetscale: [`import { connect } from '@planetscale/database'`], turso: [`import { createClient } from '@libsql/client'`] } as const; @@ -142,13 +151,28 @@ export const generateImportsBlock = ({ rawImports.push(`import { getEnv } from '@absolutejs/absolute'`); } - if (noOrm && databaseEngine === 'postgresql') + const shouldAddPostgresqlImports = noOrm && databaseEngine === 'postgresql'; + if (shouldAddPostgresqlImports && !isRemoteHost) { rawImports.push( - ...(isRemoteHost - ? connectorImports[databaseHost as 'neon'] - : [`import { Pool } from 'pg'`]), + `import { Pool } from 'pg'`, + `import { createPgSql } from './database/createPgSql'`, `import { getEnv } from '@absolutejs/absolute'` ); + } + + if (shouldAddPostgresqlImports && isRemoteHost) { + const connectorKey = databaseHost; + const connectorImportsList = connectorImports[connectorKey]; + if (connectorImportsList) rawImports.push(...connectorImportsList); + rawImports.push(`import { getEnv } from '@absolutejs/absolute'`); + } + + if (noOrm && databaseEngine === 'mongodb') { + rawImports.push( + `import { MongoClient } from 'mongodb'`, + `import { getEnv } from '@absolutejs/absolute'` + ); + } if (orm === 'drizzle') { rawImports.push( @@ -189,6 +213,23 @@ export const generateImportsBlock = ({ rawImports.push(`import { vueImports } from './utils/vueImporter'`); } + // Helper to parse import clause and update entry + const parseImportClause = ( + importClause: string, + entry: { defaultImport: string | null; namedImports: Set } + ) => { + if (importClause.startsWith('{')) { + importClause + .slice(1, -1) + .split(',') + .map((segment) => segment.trim()) + .filter(Boolean) + .forEach((name) => entry.namedImports.add(name)); + } else { + entry.defaultImport = importClause.trim(); + } + }; + const importMap = new Map< string, { defaultImport: string | null; namedImports: Set } @@ -206,14 +247,7 @@ export const generateImportsBlock = ({ }; importMap.set(modulePath, entry); - void (importClause.startsWith('{') - ? importClause - .slice(1, -1) - .split(',') - .map((segment) => segment.trim()) - .filter(Boolean) - .forEach((name) => entry.namedImports.add(name)) - : (entry.defaultImport = importClause.trim())); + parseImportClause(importClause, entry); } return Array.from(importMap.entries()) diff --git a/src/generators/project/generateRoutesBlock.ts b/src/generators/project/generateRoutesBlock.ts index 90b2ee0..00896f5 100644 --- a/src/generators/project/generateRoutesBlock.ts +++ b/src/generators/project/generateRoutesBlock.ts @@ -7,13 +7,15 @@ type GenerateRoutesBlockProps = { frontendDirectories: FrontendDirectories; authProvider: AuthProvider; buildDirectory: string; + databaseEngine?: string; }; export const generateRoutesBlock = ({ flags, frontendDirectories, authProvider, - buildDirectory + buildDirectory, + databaseEngine }: GenerateRoutesBlockProps) => { const routes: string[] = []; @@ -51,8 +53,7 @@ export const generateRoutesBlock = ({ cssPath: asset(manifest, 'VueExampleCSS'), title: 'AbsoluteJS + Vue', description: 'A Vue.js example with AbsoluteJS' - }), - { initialCount: 0 } + }) )` : `handleVuePageRequest( VueExample, @@ -62,8 +63,7 @@ export const generateRoutesBlock = ({ cssPath: asset(manifest, 'VueExampleCSS'), title: 'AbsoluteJS + Vue', description: 'A Vue.js example with AbsoluteJS' - }), - { initialCount: 0 } + }) )`; return ''; @@ -91,7 +91,8 @@ export const generateRoutesBlock = ({ } ); - if (authProvider === undefined || authProvider === 'none') { + const hasDatabase = databaseEngine !== undefined && databaseEngine !== 'none'; + if (hasDatabase && (authProvider === undefined || authProvider === 'none')) { routes.push( `.get('/count/:uid', ({ params: { uid } }) => getCountHistory(db, uid), { params: t.Object({ diff --git a/src/generators/project/generateServer.ts b/src/generators/project/generateServer.ts index 7c220fe..b1b3fd8 100644 --- a/src/generators/project/generateServer.ts +++ b/src/generators/project/generateServer.ts @@ -1,6 +1,7 @@ import { writeFileSync, mkdirSync } from 'fs'; import { join } from 'path'; import type { CreateConfiguration } from '../../types'; +import { ensurePostgresSqlAdapter } from '../db/ensurePostgresSqlAdapter'; import { collectDependencies } from './collectDependencies'; import { computeFlags } from './computeFlags'; import { generateBuildBlock } from './generateBuildBlock'; @@ -38,6 +39,14 @@ export const generateServerFile = ({ }: CreateServerFileProps) => { const serverFilePath = join(backendDirectory, 'server.ts'); + if ( + databaseEngine === 'postgresql' && + (orm === undefined || orm === 'none') && + (databaseHost === undefined || databaseHost === 'none') + ) { + ensurePostgresSqlAdapter(backendDirectory); + } + const flags = computeFlags(frontendDirectories); const deps = collectDependencies({ authProvider, flags, plugins }); @@ -70,10 +79,7 @@ export const generateServerFile = ({ orm }); const routesBlock = generateRoutesBlock({ - authProvider, - buildDirectory, - flags, - frontendDirectories + authProvider, buildDirectory, databaseEngine, flags, frontendDirectories }); const content = `${importsBlock} diff --git a/src/generators/project/generateUseBlock.ts b/src/generators/project/generateUseBlock.ts index 72bbdd0..6cfcce1 100644 --- a/src/generators/project/generateUseBlock.ts +++ b/src/generators/project/generateUseBlock.ts @@ -1,5 +1,74 @@ import type { AvailableDependency, DatabaseEngine, ORM } from '../../types'; +// Helper to extract providers array from auth config +const extractProvidersArray = (config: Record | null) => { + try { + const providersConfig = config?.providersConfiguration as Record | undefined; + if (providersConfig) { + return JSON.stringify(Object.keys(providersConfig)); + } + } catch { + // Ignore parse errors + } + + return '[]'; +}; + +// Helper to check if GitHub redirectUri is configured +const getRedirectWarning = (config: Record | null) => { + try { + const providersConfig = config?.providersConfiguration as Record | undefined; + const githubConfig = providersConfig?.github as Record | undefined; + const credentials = githubConfig?.credentials as Record | undefined; + if (!credentials?.redirectUri) { + return " /* generated: github.credentials.redirectUri not set - callbacks use '/auth/callback/:provider' */"; + } + } catch { + // Ignore parse errors + } + + return ''; +}; + +// Helper to build auth plugin configuration string +const buildAuthConfig = ( + pluginImport: { packageName: string; isPlugin: boolean; config?: Record | null }, + databaseEngine: DatabaseEngine, + orm: ORM +) => { + const baseConfigString = + pluginImport.config !== null + ? JSON.stringify(pluginImport.config).slice(1, -1) + : ''; + + const hasDatabase = databaseEngine !== undefined && databaseEngine !== 'none'; + const hasOrm = orm !== undefined && orm !== 'none'; + const instantiate = 'instantiateUserSession'; + const pluginGeneric = hasOrm ? '' : ''; + + const callback = hasDatabase + ? `async ({ authProvider, providerInstance, tokenResponse, user_session_id, session }: Record) => ${instantiate}({ authProvider, providerInstance, session, tokenResponse, user_session_id: user_session_id as string, createUser: (userIdentity: Record) => createUser({ authProvider, db, userIdentity }), getUser: (userIdentity: Record) => getUser({ authProvider, db, user_identity: userIdentity }) } as Record)` + : `({ authProvider, tokenResponse, user_session_id }: Record) => { console.log('Successfully authorized OAuth2 with ' + authProvider + ' (session: ' + user_session_id + ')', tokenResponse); }`; + + const routesString = `authorizeRoute: '/auth/authorize/:provider', callbackRoute: '/auth/callback/:provider', profileRoute: '/auth/profile', signoutRoute: '/auth/signout', statusRoute: '/auth/session',`; + + const config = pluginImport.config as Record | null; + const redirectWarning = getRedirectWarning(config); + const providersArray = extractProvidersArray(config); + + let mergedConfig = '{'; + if (baseConfigString) { + mergedConfig += ` ${baseConfigString},`; + } + mergedConfig += ` ${routesString} onCallbackSuccess: (${callback}) as Record ${redirectWarning} }`; + + return ( + `.use(absoluteAuth${pluginGeneric}(${mergedConfig}))` + + `\n .get('/auth/providers', () => ${providersArray})` + + `\n .post('/auth/session', ({ request }) => ({ message: 'unauthenticated' }), { status: 401 })` + ); +}; + export const generateUseBlock = ({ deps, databaseEngine, @@ -16,24 +85,7 @@ export const generateUseBlock = ({ const isAuth = pluginImport.packageName === 'absoluteAuth'; if (isAuth) { - const baseConfigString = - pluginImport.config !== null - ? JSON.stringify(pluginImport.config).slice(1, -1) - : ''; - - const hasDatabase = - databaseEngine !== undefined && databaseEngine !== 'none'; - const hasOrm = orm !== undefined && orm !== 'none'; - const instantiate = 'instantiateUserSession'; - const pluginGeneric = hasOrm ? '' : ''; - - const callback = hasDatabase - ? `async ({ authProvider, providerInstance, tokenResponse, userSessionId, session }) => ${instantiate}({ authProvider, providerInstance, session, tokenResponse, userSessionId, createUser: (userIdentity) => createUser({ authProvider, db, userIdentity }), getUser: (userIdentity) => getUser({ authProvider, db, userIdentity }) })` - : `({ authProvider, tokenResponse, userSessionId }) => { console.log(\`Successfully authorized OAuth2 with \${authProvider} (session: \${userSessionId})\`, tokenResponse); }`; - - const mergedConfig = `{ ${baseConfigString}${baseConfigString ? ',' : ''} onCallbackSuccess: ${callback} }`; - - return `.use(absoluteAuth${pluginGeneric}(${mergedConfig}))`; + return buildAuthConfig(pluginImport, databaseEngine, orm); } if (pluginImport.config === undefined) { diff --git a/src/questions/directoryConfiguration.ts b/src/questions/directoryConfiguration.ts index e50c017..15ea8a1 100644 --- a/src/questions/directoryConfiguration.ts +++ b/src/questions/directoryConfiguration.ts @@ -1,6 +1,4 @@ -import { text, isCancel } from '@clack/prompts'; import type { ArgumentConfiguration, CreateConfiguration } from '../types'; -import { abort } from '../utils/abort'; type GetDirectoryConfigurationProps = Pick< CreateConfiguration, @@ -37,58 +35,35 @@ export const getDirectoryConfiguration = async ({ }; } - // Build directory + // Build directory - use default if not provided (non-interactive mode) const buildDirectory = - argumentConfiguration.buildDirectory ?? - (await text({ - message: 'Build directory:', - placeholder: 'build' - })); - if (isCancel(buildDirectory)) abort(); + argumentConfiguration.buildDirectory ?? 'build'; - // Assets directory + // Assets directory - use default if not provided (non-interactive mode) const assetsDirectory = - argumentConfiguration.assetsDirectory ?? - (await text({ - message: 'Assets directory:', - placeholder: 'src/backend/assets' - })); - if (isCancel(assetsDirectory)) abort(); + argumentConfiguration.assetsDirectory ?? 'src/backend/assets'; - // Tailwind directory + // Tailwind directory - use defaults if not provided (non-interactive mode) let tailwind; if (useTailwind) { const input = argumentConfiguration.tailwind?.input ?? - (await text({ - message: 'Tailwind input CSS file:', - placeholder: './src/frontend/styles/tailwind.css' - })); - if (isCancel(input)) abort(); + './src/frontend/styles/tailwind.css'; const output = argumentConfiguration.tailwind?.output ?? - (await text({ - message: 'Tailwind output CSS file:', - placeholder: '/assets/css/tailwind.generated.css' - })); - if (isCancel(output)) abort(); + '/assets/css/tailwind.generated.css'; tailwind = { input, output }; } else { tailwind = undefined; } - // Database + // Database - use default if not provided (non-interactive mode) let databaseDirectory; if (databaseEngine !== undefined && databaseEngine !== 'none') { databaseDirectory = - argumentConfiguration.databaseDirectory ?? - (await text({ - message: 'Database directory:', - placeholder: 'db' - })); - if (isCancel(databaseDirectory)) abort(); + argumentConfiguration.databaseDirectory ?? 'db'; } return { diff --git a/src/questions/frontendDirectoryConfigurations.ts b/src/questions/frontendDirectoryConfigurations.ts index f3d10ac..b3f7f5a 100644 --- a/src/questions/frontendDirectoryConfigurations.ts +++ b/src/questions/frontendDirectoryConfigurations.ts @@ -1,4 +1,6 @@ -import { text, isCancel } from '@clack/prompts'; +import process from 'node:process'; + +import { isCancel, text } from '@clack/prompts'; import { frontendLabels } from '../data'; import type { DirectoryConfiguration, @@ -10,14 +12,31 @@ import { abort } from '../utils/abort'; const getDirectoryForFrontend = async ( directoryConfiguration: DirectoryConfiguration, frontend: Frontend, - isSingleFrontend: boolean + isSingleFrontend: boolean, + providedValue?: string ) => { if (directoryConfiguration !== 'custom') return isSingleFrontend ? '' : frontend; + // If value is already provided, use it + if (providedValue !== undefined) + return providedValue; + + // Use default based on placeholder (for non-interactive mode) + // This prevents hanging when --skip is used with --directory custom + const defaultValue = isSingleFrontend ? '' : frontend; + + // Check if we're in a non-interactive environment (no TTY or stdin not available) + // If so, return default instead of prompting to prevent hangs + const isNonInteractive = !process.stdin.isTTY || !process.stdout.isTTY || !process.stdin.readable; + if (isNonInteractive) { + return defaultValue; + } + + // Only prompt in interactive mode const response = await text({ message: `${frontendLabels[frontend]} directory:`, - placeholder: isSingleFrontend ? '' : frontend + placeholder: defaultValue }); if (isCancel(response)) abort(); @@ -33,10 +52,31 @@ export const getFrontendDirectoryConfigurations = async ( const frontendDirectories: FrontendDirectories = {}; const frontendsToPrompt: Frontend[] = []; - for (const frontend of frontends) { + const processFrontend = (frontend: Frontend) => { const prefilled = passedFrontendDirectories?.[frontend]; - if (prefilled === undefined) frontendsToPrompt.push(frontend); - else frontendDirectories[frontend] = prefilled; + if (prefilled !== undefined) { + frontendDirectories[frontend] = prefilled; + + return; + } + + if (directoryConfiguration === 'custom') { + frontendsToPrompt.push(frontend); + + return; + } + + const defaultValue = isSingleFrontend ? '' : frontend; + frontendDirectories[frontend] = defaultValue; + }; + + for (const frontend of frontends) { + processFrontend(frontend); + } + + // Only prompt if there are frontends that need prompting (shouldn't happen with --skip) + if (frontendsToPrompt.length === 0) { + return frontendDirectories; } const promptedDirectories = await Promise.all( @@ -44,7 +84,8 @@ export const getFrontendDirectoryConfigurations = async ( getDirectoryForFrontend( directoryConfiguration, name, - isSingleFrontend + isSingleFrontend, + passedFrontendDirectories?.[name] ) ) ); diff --git a/src/templates/configurations/.prettierignore b/src/templates/configurations/.prettierignore index d5ddef8..c3b799f 100644 --- a/src/templates/configurations/.prettierignore +++ b/src/templates/configurations/.prettierignore @@ -1,4 +1,5 @@ node_modules dist build -*.min.js \ No newline at end of file +*.min.js + diff --git a/src/templates/configurations/.prettierrc.json b/src/templates/configurations/.prettierrc.json deleted file mode 100644 index 1707240..0000000 --- a/src/templates/configurations/.prettierrc.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "endOfLine": "auto", - "printWidth": 80, - "semi": true, - "singleQuote": true, - "tabWidth": 4, - "trailingComma": "none", - "useTabs": true -} diff --git a/src/templates/configurations/drizzle.config.ts b/src/templates/configurations/drizzle.config.ts deleted file mode 100644 index e7b3282..0000000 --- a/src/templates/configurations/drizzle.config.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { env } from 'bun'; -import { defineConfig } from 'drizzle-kit'; - -if (env.DATABASE_URL === undefined) { - throw new Error('DATABASE_URL must be set in the environment variables'); -} - -export default defineConfig({ - dbCredentials: { - url: env.DATABASE_URL - }, - dialect: 'postgresql' -}); diff --git a/src/templates/configurations/eslint.config.mjs b/src/templates/configurations/eslint.config.mjs index bba2ef1..2a5d5fe 100644 --- a/src/templates/configurations/eslint.config.mjs +++ b/src/templates/configurations/eslint.config.mjs @@ -2,23 +2,18 @@ import { dirname } from 'path'; import { fileURLToPath } from 'url'; import pluginJs from '@eslint/js'; -import stylisticTs from '@stylistic/eslint-plugin-ts'; import tsParser from '@typescript-eslint/parser'; import { defineConfig } from 'eslint/config'; -import absolutePlugin from 'eslint-plugin-absolute'; -import importPlugin from 'eslint-plugin-import'; -import jsxA11yPlugin from 'eslint-plugin-jsx-a11y'; -import promisePlugin from 'eslint-plugin-promise'; -import reactPlugin from 'eslint-plugin-react'; -import reactCompilerPlugin from 'eslint-plugin-react-compiler'; -import reactHooksPlugin from 'eslint-plugin-react-hooks'; -import securityPlugin from 'eslint-plugin-security'; import globals from 'globals'; import tseslint from 'typescript-eslint'; const __dirname = dirname(fileURLToPath(import.meta.url)); export default defineConfig([ + { + ignores: ['dist/**', 'build/**', 'node_modules/**'] + }, + pluginJs.configs.recommended, ...tseslint.configs.recommended, @@ -37,207 +32,15 @@ export default defineConfig([ }, { - files: ['**/*.{ts,tsx}'], - plugins: { '@stylistic/ts': stylisticTs }, - rules: { - '@stylistic/ts/padding-line-between-statements': [ - 'error', - { blankLine: 'always', next: 'return', prev: '*' } - ] - } - }, - - { - files: ['**/*.{js,mjs,cjs,ts,tsx,jsx}'], - ignores: ['example/build/**'], - plugins: { - absolute: absolutePlugin, - import: importPlugin, - promise: promisePlugin, - security: securityPlugin - }, + files: ['**/*.{js,mjs,cjs,ts,tsx,jsx,json}'], rules: { - 'absolute/explicit-object-types': 'error', - 'absolute/localize-react-props': 'error', - 'absolute/max-depth-extended': ['error', 1], - 'absolute/max-jsxnesting': ['error', 5], - 'absolute/min-var-length': [ - 'error', - { allowedVars: ['_', 'id', 'db', 'OK'], minLength: 3 } - ], - 'absolute/no-button-navigation': 'error', - 'absolute/no-explicit-return-type': 'error', - 'absolute/no-inline-prop-types': 'error', - 'absolute/no-multi-style-objects': 'error', - 'absolute/no-nested-jsx-return': 'error', - 'absolute/no-or-none-component': 'error', - 'absolute/no-transition-cssproperties': 'error', - 'absolute/no-type-cast': 'error', - 'absolute/no-unnecessary-div': 'error', - 'absolute/no-unnecessary-key': 'error', - 'absolute/no-useless-function': 'error', - 'absolute/seperate-style-files': 'error', - 'absolute/sort-exports': [ + '@typescript-eslint/no-unused-vars': [ 'error', - { - caseSensitive: true, - natural: true, - order: 'asc', - variablesBeforeFunctions: true - } + { argsIgnorePattern: '^_' } ], - 'absolute/sort-keys-fixable': [ - 'error', - { - caseSensitive: true, - natural: true, - order: 'asc', - variablesBeforeFunctions: true - } - ], - 'arrow-body-style': ['error', 'as-needed'], - 'consistent-return': 'error', - eqeqeq: 'error', - 'func-style': [ - 'error', - 'expression', - { allowArrowFunctions: true } - ], - 'import/no-cycle': 'error', - 'import/no-default-export': 'error', - 'import/no-relative-packages': 'error', - 'import/no-unused-modules': ['error', { missingExports: true }], - 'import/order': ['error', { alphabetize: { order: 'asc' } }], - 'no-await-in-loop': 'error', - 'no-console': ['error', { allow: ['warn', 'error'] }], - 'no-debugger': 'error', - 'no-duplicate-case': 'error', - 'no-duplicate-imports': 'error', - 'no-else-return': 'error', - 'no-empty-function': 'error', - 'no-empty-pattern': 'error', - 'no-empty-static-block': 'error', - 'no-fallthrough': 'error', - 'no-floating-decimal': 'error', - 'no-global-assign': 'error', - 'no-implicit-coercion': 'error', - 'no-implicit-globals': 'error', - 'no-loop-func': 'error', - 'no-magic-numbers': [ - 'warn', - { detectObjects: false, enforceConst: true, ignore: [0, 1] } - ], - 'no-misleading-character-class': 'error', - 'no-nested-ternary': 'error', - 'no-new-native-nonconstructor': 'error', - 'no-new-wrappers': 'error', - 'no-param-reassign': 'error', - 'no-restricted-imports': [ - 'error', - { - paths: [ - { - importNames: ['default'], - message: - 'Import only named React exports for tree-shaking.', - name: 'react' - }, - { - importNames: ['default'], - message: 'Import only the required Bun exports.', - name: 'bun' - } - ] - } - ], - 'no-return-await': 'error', - 'no-shadow': 'error', - 'no-undef': 'error', - 'no-unneeded-ternary': 'error', - 'no-unreachable': 'error', - 'no-useless-assignment': 'error', - 'no-useless-concat': 'error', - 'no-useless-return': 'error', - 'no-var': 'error', - 'prefer-arrow-callback': 'error', - 'prefer-const': 'error', - 'prefer-destructuring': [ - 'error', - { array: true, object: true }, - { enforceForRenamedProperties: false } - ], - 'prefer-template': 'error', - 'promise/always-return': 'warn', - 'promise/avoid-new': 'warn', - 'promise/catch-or-return': 'error', - 'promise/no-callback-in-promise': 'warn', - 'promise/no-nesting': 'warn', - 'promise/no-promise-in-callback': 'warn', - 'promise/no-return-wrap': 'error', - 'promise/param-names': 'error' - } - }, - { - files: ['example/**/*.{js,jsx,ts,tsx}'], - plugins: { - 'jsx-a11y': jsxA11yPlugin, - react: reactPlugin, - 'react-compiler': reactCompilerPlugin, - 'react-hooks': reactHooksPlugin - }, - rules: { - 'jsx-a11y/prefer-tag-over-role': 'error', - 'react-compiler/react-compiler': 'error', - 'react-hooks/exhaustive-deps': 'warn', - 'react-hooks/rules-of-hooks': 'error', - 'react/checked-requires-onchange-or-readonly': 'error', - 'react/destructuring-assignment': ['error', 'always'], - 'react/jsx-filename-extension': ['error', { extensions: ['.tsx'] }], - 'react/jsx-no-leaked-render': 'error', - 'react/jsx-no-target-blank': 'error', - 'react/jsx-no-useless-fragment': 'error', - 'react/jsx-pascal-case': ['error', { allowAllCaps: true }], - 'react/no-multi-comp': 'error', - 'react/no-unknown-property': 'off', - 'react/react-in-jsx-scope': 'off', - 'react/self-closing-comp': 'error' - }, - settings: { - react: { version: 'detect' } - } - }, - { - files: [ - 'example/server.ts', - 'example/indexes/*.tsx', - 'example/db/migrate.ts' - ], - rules: { - 'import/no-unused-modules': 'off' - } - }, - { - files: ['example/db/migrate.ts', 'example/utils/absoluteAuthConfig.ts'], - rules: { - 'no-console': 'off' - } - }, - { - files: ['eslint.config.mjs'], - rules: { - 'no-magic-numbers': 'off' - } - }, - { - files: ['eslint.config.mjs'], - rules: { - 'import/no-default-export': 'off' - } - }, - { - files: ['example/db/schema.ts'], - rules: { - 'absolute/explicit-object-types': 'off' + 'no-console': 'warn', + 'prefer-const': 'error' } } ]); + diff --git a/src/templates/configurations/tsconfig.example.json b/src/templates/configurations/tsconfig.example.json index da8ee02..c2d36f8 100644 --- a/src/templates/configurations/tsconfig.example.json +++ b/src/templates/configurations/tsconfig.example.json @@ -1,98 +1,21 @@ { "compilerOptions": { - /* Visit https://aka.ms/tsconfig to read more about this file */ - - /* Projects */ - // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ - // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ - // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ - // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ - // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ - // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ - - /* Language and Environment */ - // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ - // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ - // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ - // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ - // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ - // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ - // "resolveJsonModule": true, /* Enable importing .json files. */ - // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ - /* JavaScript Support */ - // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ - // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ - // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ - /* Emit */ - // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ - // "declarationMap": true, /* Create sourcemaps for d.ts files. */ - // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ - // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ - // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ - // "outDir": "./", /* Specify an output folder for all emitted files. */ - // "removeComments": true, /* Disable emitting comments. */ - // "noEmit": true, /* Disable emitting files from a compilation. */ - // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ - // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ - // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ - // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ - // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ - // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ - // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ - // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ - // "newLine": "crlf", /* Set the newline character for emitting files. */ - // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ - // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ - // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ - // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ - // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ - // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ - /* Interop Constraints */ - // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ - // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ - "esModuleInterop": true, // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ - /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ "forceConsistentCasingInFileNames": true, // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ - /* Ensure that casing is correct in imports. */ "jsx": "react-jsx", // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ - /* Specify what JSX code is generated. */ // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ - // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ - // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ - // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ - // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ - // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ - // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ - // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ - /* Modules */ - "module": "ESNext", // "rootDir": "./", /* Specify the root folder within your source files. */ - /* Specify what module code is generated. */ "moduleResolution": "bundler", // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ - /* Specify how TypeScript looks up a file from a given module specifier. */ // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ - // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ - // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ - // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ - // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ - // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ - // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ - // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ - // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ - // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ - // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ - // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ - "noUncheckedIndexedAccess": true, // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ - /* Add 'undefined' to a type when accessed using an index. */ // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ - // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ - // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ - /* Completeness */ - // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ - "skipLibCheck": true /* Type Checking */, - /* Skip type checking all .d.ts files. */ "strict": true /* Visit https://aka.ms/tsconfig to read more about this file */, - /* Enable all strict type-checking options. */ /* Projects */ - // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ - // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ - // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ - // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ - // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ - // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ - /* Language and Environment */ + "allowJs": true, + "allowSyntheticDefaultImports": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "jsx": "react-jsx", + "lib": ["DOM", "DOM.Iterable", "ESNext"], + "module": "ESNext", + "moduleResolution": "bundler", + "noImplicitAny": true, + "noUncheckedIndexedAccess": true, + "outDir": "dist", + "skipLibCheck": true, + "strict": true, "target": "ESNext" - /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ /* Skip type checking all .d.ts files. */ - } + }, + "exclude": ["node_modules", "dist", "build"], + "include": ["src/**/*"] } + diff --git a/src/utils/checkGitInstalled.ts b/src/utils/checkGitInstalled.ts new file mode 100644 index 0000000..70f326b --- /dev/null +++ b/src/utils/checkGitInstalled.ts @@ -0,0 +1,301 @@ +import os from 'os'; +import { env, platform } from 'process'; +import { confirm, spinner } from '@clack/prompts'; +import { $ } from 'bun'; +import { dim, yellow } from 'picocolors'; + +/** + * Official Git download URL for manual installation instructions + */ +const GIT_URL = 'https://git-scm.com/downloads'; + +/** + * Detects if the current environment is Windows Subsystem for Linux (WSL) + * by checking the WSL_DISTRO_NAME environment variable or kernel release string + */ +const isWSL = () => + env.WSL_DISTRO_NAME !== undefined || /microsoft/i.test(os.release()); + +/** + * Determines the host environment type for platform-specific installation logic + */ +let hostEnv: 'windows' | 'wsl' | 'linux' | 'darwin'; +if (platform === 'win32') { + hostEnv = 'windows'; +} else if (platform === 'darwin') { + hostEnv = 'darwin'; +} else if (isWSL()) { + hostEnv = 'wsl'; +} else { + hostEnv = 'linux'; +} + +/** + * Checks if a command exists in the system PATH + * Uses platform-specific commands (where on Windows, command -v on Unix) + * + * @param cmd - The command name to check + * @returns Promise - true if command exists, false otherwise + */ +const commandExists = async (cmd: string) => + (platform === 'win32' + ? await $`where ${cmd}`.quiet().nothrow() + : await $`command -v ${cmd}`.quiet().nothrow() + ).exitCode === 0; + +/** + * Ensures sudo access is available for the current user + * Prompts for password if needed and caches credentials + */ +const ensureSudo = async () => { + if ((await $`sudo -n true`.nothrow()).exitCode !== 0) { + console.log(`${dim('│')}\n${yellow('▲')} sudo password required`); + await $`sudo -v`; + } +}; + +/** + * Installs Git on Linux systems using apt package manager + * Attempts to install git and common dependencies + * + * @returns Promise - true if installation succeeded + */ +const aptInstall = async () => { + await ensureSudo(); + const spin = spinner(); + spin.start('Installing Git with apt'); + await $`sudo DEBIAN_FRONTEND=noninteractive apt-get update`.quiet(); + const res = + await $`sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends git` + .quiet() + .nothrow(); + + if (res.exitCode === 0) { + spin.stop('Git installed successfully'); + + return true; + } + + spin.stop('apt install failed'); + + return false; +}; + +/** + * Installs Git on Linux systems using yum package manager (RHEL/CentOS/Fedora) + * + * @returns Promise - true if installation succeeded + */ +const yumInstall = async () => { + await ensureSudo(); + const spin = spinner(); + spin.start('Installing Git with yum'); + const res = await $`sudo yum install -y git`.quiet().nothrow(); + + if (res.exitCode === 0) { + spin.stop('Git installed successfully'); + + return true; + } + + spin.stop('yum install failed'); + + return false; +}; + +/** + * Installs Git using dnf package manager (newer Fedora) + * + * @returns Promise - true if installation succeeded + */ +const dnfInstall = async () => { + await ensureSudo(); + const spin = spinner(); + spin.start('Installing Git with dnf'); + const res = await $`sudo dnf install -y git`.quiet().nothrow(); + + if (res.exitCode === 0) { + spin.stop('Git installed successfully'); + + return true; + } + + spin.stop('dnf install failed'); + + return false; +}; + +/** + * Installs Git using pacman package manager (Arch Linux) + * + * @returns Promise - true if installation succeeded + */ +const pacmanInstall = async () => { + await ensureSudo(); + const spin = spinner(); + spin.start('Installing Git with pacman'); + const res = await $`sudo pacman -S --noconfirm git`.quiet().nothrow(); + + if (res.exitCode === 0) { + spin.stop('Git installed successfully'); + + return true; + } + + spin.stop('pacman install failed'); + + return false; +}; + +/** + * Installs Git on macOS using Homebrew package manager + * Assumes Homebrew is already installed (common on macOS) + * + * @returns Promise - true if installation succeeded + */ +const brewInstall = async () => { + const spin = spinner(); + spin.start('Installing Git with Homebrew'); + const res = await $`brew install git`.quiet().nothrow(); + + if (res.exitCode === 0) { + spin.stop('Git installed successfully'); + + return true; + } + + spin.stop('Homebrew install failed'); + + return false; +}; + +/** + * Attempts to install Git on Windows + * Directs user to official download page since automated installation + * requires more complex setup (winget, chocolatey, or manual installer) + * + * @returns Promise - always returns false (manual installation required) + */ +const installWindows = async () => { + console.log( + `${dim('│')}\n${yellow('▲')} Please download Git for Windows from: ${GIT_URL}` + ); + console.log(`${dim('│')} Recommended: Enable "Git from the command line" during installation`); + + return false; +}; + +/** + * Attempts to install Git on WSL systems + * Uses the Linux distribution's package manager + * + * @returns Promise - true if installation succeeded + */ +const installWSL = async () => { + // Try apt first (Ubuntu/Debian-based WSL distros are most common) + if (await commandExists('apt-get')) { + return aptInstall(); + } + + // Try yum for RHEL-based distros + if (await commandExists('yum')) { + return yumInstall(); + } + + console.log( + `${dim('│')}\n${yellow('▲')} Could not detect package manager. Please install git manually.` + ); + + return false; +}; + +/** + * Attempts to install Git on Linux systems + * Detects and uses the appropriate package manager + * + * @returns Promise - true if installation succeeded + */ +const installLinux = async () => { + // Try apt first (Debian/Ubuntu) + if (await commandExists('apt-get')) { + return aptInstall(); + } + + // Try yum (RHEL/CentOS/Fedora) + if (await commandExists('yum')) { + return yumInstall(); + } + + // Try dnf (newer Fedora) + if (await commandExists('dnf')) { + return dnfInstall(); + } + + // Try pacman (Arch Linux) + if (await commandExists('pacman')) { + return pacmanInstall(); + } + + console.log( + `${dim('│')}\n${yellow('▲')} Could not detect package manager. Please install git manually from: ${GIT_URL}` + ); + + return false; +}; + +/** + * Checks if Git is installed and accessible + * + * @returns Promise - true if git is installed + */ +export const hasGit = async () => + (await $`git --version`.quiet().nothrow()).exitCode === 0; + +/** + * Checks if Git is installed, and if not, prompts user to install it + * Attempts automatic installation on supported platforms, or directs + * user to manual installation instructions + * + * @returns Promise - true if git is available after this function completes + */ +export const checkGitInstalled = async () => { + // Git is already installed + if (await hasGit()) return true; + + // Prompt user to install Git + const proceed = await confirm({ + initialValue: true, + message: 'Git is required for project initialization. Install it now?' + }); + + if (!proceed) return false; + + // Attempt platform-specific installation + switch (hostEnv) { + case 'windows': + await installWindows(); + break; + case 'darwin': + if (await commandExists('brew')) { + if (await brewInstall()) return hasGit(); + } + console.log( + `${dim('│')}\n${yellow('▲')} Please install Git from: ${GIT_URL}` + ); + break; + case 'wsl': + if (await installWSL()) return hasGit(); + break; + case 'linux': + if (await installLinux()) return hasGit(); + break; + } + + // Installation failed or not automated - direct user to manual installation + console.log( + `${dim('│')}\n${yellow('▲')} Couldn't install Git automatically. Please download it from: ${GIT_URL}` + ); + console.log(`${dim('│')} After installation, restart your terminal and try again.`); + + return hasGit(); +}; diff --git a/src/utils/parseCommandLineOptions.ts b/src/utils/parseCommandLineOptions.ts index cf1d913..c1468e0 100644 --- a/src/utils/parseCommandLineOptions.ts +++ b/src/utils/parseCommandLineOptions.ts @@ -301,7 +301,16 @@ export const parseCommandLineOptions = () => { values.env = validEnv.length ? validEnv : undefined; - const argumentConfiguration: ArgumentConfiguration = { + // Non-interactive defaults when --skip is provided + if (values.skip) { + if (codeQualityTool === undefined) codeQualityTool = 'eslint+prettier'; + if (values.tailwind === undefined) (values as Record).tailwind = false; + if (directoryConfig === undefined) (values as Record).directory = 'default'; + if (values.git === undefined) (values as Record).git = false; + if (values.install === undefined) (values as Record).install = false; + if (databaseHost === undefined) databaseHost = 'none'; + if (values['html-scripts'] === undefined) (values as Record)['html-scripts'] = false; + } const argumentConfiguration: ArgumentConfiguration = { assetsDirectory: values.assets, authProvider, buildDirectory: values.build, @@ -309,7 +318,7 @@ export const parseCommandLineOptions = () => { databaseDirectory, databaseEngine, databaseHost, - directoryConfig, + directoryConfig: (values.directory as 'default' | 'custom' | undefined) ?? directoryConfig, frontendDirectories, frontends: selectedFrontends.length ? selectedFrontends : undefined, initializeGitNow: values.git, @@ -319,7 +328,7 @@ export const parseCommandLineOptions = () => { projectName, tailwind, useHTMLScripts: values['html-scripts'], - useTailwind + useTailwind: values.tailwind ?? useTailwind }; return { diff --git a/tests/behavioural/auth-matrix.test.ts b/tests/behavioural/auth-matrix.test.ts new file mode 100644 index 0000000..2aa50eb --- /dev/null +++ b/tests/behavioural/auth-matrix.test.ts @@ -0,0 +1,61 @@ +import { describe, it } from 'bun:test'; + +import { + runAuthScenario, + type BehaviouralScenario +} from './utils'; + +const AUTH_SCENARIOS: readonly BehaviouralScenario[] = [ + { + label: 'React + SQLite + AbsoluteAuth', + options: { + auth: 'absoluteAuth', + database: 'sqlite', + databaseHost: 'none', + frontend: 'react' + } as const + }, + { + label: 'React + SQLite + AbsoluteAuth (Drizzle)', + options: { + auth: 'absoluteAuth', + database: 'sqlite', + databaseHost: 'none', + frontend: 'react', + orm: 'drizzle' + } as const + }, + { + label: 'Vue + SQLite + AbsoluteAuth', + options: { + auth: 'absoluteAuth', + database: 'sqlite', + databaseHost: 'none', + frontend: 'vue' + } as const + }, + { + label: 'Svelte + SQLite + AbsoluteAuth', + options: { + auth: 'absoluteAuth', + database: 'sqlite', + databaseHost: 'none', + frontend: 'svelte' + } as const + } +] as const; + +describe('AbsoluteAuth behavioural matrix', () => { + const TEST_TIMEOUT_MS = 120_000; + + AUTH_SCENARIOS.forEach((scenario) => { + it( + `${scenario.label} exposes auth endpoints`, + async () => { + await runAuthScenario(scenario); + }, + { timeout: TEST_TIMEOUT_MS } + ); + }); +}); + diff --git a/tests/behavioural/cloud-matrix.test.ts b/tests/behavioural/cloud-matrix.test.ts new file mode 100644 index 0000000..985caea --- /dev/null +++ b/tests/behavioural/cloud-matrix.test.ts @@ -0,0 +1,91 @@ +import { describe, it } from 'bun:test'; + +import { + runCountHistoryScenario, + type BehaviouralScenario +} from './utils'; + +type CloudScenarioDefinition = { + label: string; + options: BehaviouralScenario['options']; + requiredEnv: Array<{ source: string; target: string }>; +}; + +const CLOUD_SCENARIOS: readonly CloudScenarioDefinition[] = [ + { + label: 'React + PostgreSQL (Neon) + Drizzle', + options: { + auth: 'none', + database: 'postgresql', + databaseHost: 'neon', + frontend: 'react', + orm: 'drizzle' + } as const, + requiredEnv: [ + { source: 'NEON_DATABASE_URL', target: 'DATABASE_URL' } + ] + }, + { + label: 'React + SQLite (Turso) + Drizzle', + options: { + auth: 'none', + database: 'sqlite', + databaseHost: 'turso', + frontend: 'react', + orm: 'drizzle' + } as const, + requiredEnv: [ + { source: 'TURSO_DB_URL', target: 'DATABASE_URL' } + ] + } +] as const; + +const resolveScenario = ( + definition: CloudScenarioDefinition +): BehaviouralScenario | null => { + const missing = definition.requiredEnv.filter( + ({ source }) => !process.env[source] + ); + + if (missing.length > 0) { + const missingList = missing.map(({ source }) => source).join(', '); + console.warn( + `Skipping behavioural flow (${definition.label}): missing required environment variables (${missingList}).` + ); + + return null; + } + + const env: Record = {}; + definition.requiredEnv.forEach(({ source, target }) => { + env[target] = process.env[source]; + }); + + return { + label: definition.label, + options: { + ...definition.options, + env + } + }; +}; + +describe('Cloud database behavioural matrix', () => { + const TEST_TIMEOUT_MS = 180_000; + + CLOUD_SCENARIOS.forEach((definition) => { + it( + `${definition.label} creates and reads count history via REST API`, + async () => { + const scenario = resolveScenario(definition); + if (!scenario) { + return; + } + + await runCountHistoryScenario(scenario); + }, + { timeout: TEST_TIMEOUT_MS } + ); + }); +}); + diff --git a/tests/behavioural/database-hooks.ts b/tests/behavioural/database-hooks.ts new file mode 100644 index 0000000..cee6b8f --- /dev/null +++ b/tests/behavioural/database-hooks.ts @@ -0,0 +1,240 @@ +import { runCommand } from '../harness'; +import type { ScenarioHooks } from './utils'; + +export const createPostgresHooks = (label: string): ScenarioHooks => { + let started = false; + + return { + afterServerStop: async (projectPath) => { + if (!started) return; + + await runCommand(['bun', 'db:down'], { + cwd: projectPath, + label: `${label} db:down` + }).catch(() => undefined); + }, + beforeServerStart: async (projectPath) => { + await runCommand(['bun', 'db:up'], { + cwd: projectPath, + label: `${label} db:up` + }); + + await runCommand( + [ + 'docker', + 'compose', + '-p', + 'postgresql', + '-f', + 'db/docker-compose.db.yml', + 'exec', + 'db', + 'bash', + '-lc', + 'until pg_isready -U user -h 127.0.0.1 --quiet; do sleep 1; done' + ], + { + cwd: projectPath, + label: `${label} db:wait` + } + ); + + started = true; + } + }; +}; + +export const createMysqlHooks = (label: string): ScenarioHooks => { + let started = false; + + return { + afterServerStop: async (projectPath) => { + if (!started) return; + + await runCommand(['bun', 'db:down'], { + cwd: projectPath, + label: `${label} db:down` + }).catch(() => undefined); + }, + beforeServerStart: async (projectPath) => { + await runCommand(['bun', 'db:up'], { + cwd: projectPath, + label: `${label} db:up` + }); + + await runCommand( + [ + 'docker', + 'compose', + '-p', + 'mysql', + '-f', + 'db/docker-compose.db.yml', + 'exec', + '-e', + 'MYSQL_PWD=userpassword', + 'db', + 'bash', + '-lc', + 'until mysqladmin ping -h127.0.0.1 --silent; do sleep 1; done' + ], + { + cwd: projectPath, + label: `${label} db:wait` + } + ); + + started = true; + } + }; +}; + +export const createMongoHooks = (label: string): ScenarioHooks => { + let started = false; + + const runCommandOrThrow = async ( + command: string[], + options: Parameters[1] + ) => { + const result = await runCommand(command, options); + + if (result.exitCode !== 0) { + const stdout = result.stdout.length > 0 ? `\nstdout:\n${result.stdout}` : ''; + const stderr = result.stderr.length > 0 ? `\nstderr:\n${result.stderr}` : ''; + const labelSuffix = options?.label ? ` (${options.label})` : ''; + + throw new Error( + `Command${labelSuffix} failed with exit code ${result.exitCode}.${stdout}${stderr}` + ); + } + }; + + return { + afterServerStop: async (projectPath) => { + if (!started) return; + + await runCommand(['bun', 'db:down'], { + cwd: projectPath, + label: `${label} db:down`, + timeoutMs: 120_000 + }).catch(() => undefined); + }, + beforeServerStart: async (projectPath) => { + await runCommand( + [ + 'docker', + 'compose', + '-p', + 'mongodb', + '-f', + 'db/docker-compose.db.yml', + 'down', + '-v' + ], + { + cwd: projectPath, + label: `${label} db:reset`, + timeoutMs: 120_000 + } + ).catch(() => undefined); + + await runCommandOrThrow(['bun', 'db:up'], { + cwd: projectPath, + label: `${label} db:up` + }); + + const containerMongoUrl = 'mongodb://user:password@127.0.0.1:27017'; + const waitArgs = [ + 'docker', + 'compose', + '-p', + 'mongodb', + '-f', + 'db/docker-compose.db.yml', + 'exec', + '-e', + 'MONGODB_PASSWORD=password', + '-e', + 'MONGODB_AUTH_DB=admin', + '-e', + `MONGODB_URL=${containerMongoUrl}`, + '-e', + 'MONGODB_USER=user', + 'db', + 'bash', + '-lc', + 'until mongosh "$MONGODB_URL" --username "$MONGODB_USER" --password "$MONGODB_PASSWORD" --authenticationDatabase "$MONGODB_AUTH_DB" --quiet --eval "db.runCommand({ ping: 1 })" >/dev/null 2>&1; do sleep 1; done' + ]; + + const waitEnv = { + MONGODB_AUTH_DB: 'admin', + MONGODB_PASSWORD: 'password', + MONGODB_URL: containerMongoUrl, + MONGODB_USER: 'user' + } as const; + + await runCommandOrThrow(waitArgs, { + cwd: projectPath, + env: waitEnv, + label: `${label} db:wait` + }); + + const ensureUserScript = ` + (function ensureUser() { + const adminDb = db.getSiblingDB("admin"); + try { + const existingUser = adminDb.getUser("user"); + if (!existingUser) { + adminDb.createUser({ user: "user", pwd: "password", roles: [{ role: "root", db: "admin" }] }); + } + return; + } catch (authError) { + try { + adminDb.auth("user", "password"); + const existingUser = adminDb.getUser("user"); + if (!existingUser) { + adminDb.createUser({ user: "user", pwd: "password", roles: [{ role: "root", db: "admin" }] }); + } + } catch (createError) { + printjson(createError); + throw createError; + } + } + })(); + `; + + const ensureUserEncoded = Buffer.from(ensureUserScript.trim(), 'utf8').toString('base64'); + + const ensureUserArgs = [ + 'docker', + 'compose', + '-p', + 'mongodb', + '-f', + 'db/docker-compose.db.yml', + 'exec', + '-e', + 'MONGODB_AUTH_DB=admin', + '-e', + 'MONGODB_PASSWORD=password', + '-e', + `MONGODB_URL=${containerMongoUrl}`, + '-e', + 'MONGODB_USER=user', + 'db', + 'bash', + '-lc', + `echo ${ensureUserEncoded} | base64 --decode | mongosh "$MONGODB_URL/$MONGODB_AUTH_DB" --quiet --file /dev/stdin` + ]; + + await runCommandOrThrow(ensureUserArgs, { + cwd: projectPath, + env: waitEnv, + label: `${label} ensure-user` + }); + + started = true; + } + }; +}; + diff --git a/tests/behavioural/database-matrix-definitions.ts b/tests/behavioural/database-matrix-definitions.ts new file mode 100644 index 0000000..73b8017 --- /dev/null +++ b/tests/behavioural/database-matrix-definitions.ts @@ -0,0 +1,63 @@ +import { + createMongoHooks, + createMysqlHooks, + createPostgresHooks +} from './database-hooks'; +import type { DatabaseMatrixDefinition } from './database-matrix'; + +const POSTGRES_ENV = { + DATABASE_URL: 'postgresql://user:password@127.0.0.1:5433/database', + PGDATABASE: 'database', + PGHOST: '127.0.0.1', + PGPASSWORD: 'password', + PGPORT: '5433', + PGUSER: 'user' +} as const; + +export const DATABASE_MATRIX_DEFINITIONS: readonly DatabaseMatrixDefinition[] = [ + { + baseOptions: { + databaseHost: 'none', + env: { ...POSTGRES_ENV } + }, createHooks: createPostgresHooks, database: 'postgresql', name: 'PostgreSQL', scenarios: [ + { frontend: 'react' }, + { frontend: 'react', orm: 'drizzle' }, + { frontend: 'vue' }, + { frontend: 'svelte' }, + { frontend: 'html' }, + { frontend: 'htmx' } + ], suiteLabel: 'PostgreSQL behavioural matrix' + }, + { + baseOptions: { + databaseHost: 'none' + }, createHooks: createMysqlHooks, database: 'mysql', name: 'MySQL', scenarios: [ + { frontend: 'react' }, + { frontend: 'react', orm: 'drizzle' }, + { frontend: 'vue' }, + { frontend: 'svelte' }, + { frontend: 'html' }, + { frontend: 'htmx' } + ], suiteLabel: 'MySQL behavioural matrix' + }, + { + createHooks: createMongoHooks, database: 'mongodb', name: 'MongoDB', scenarios: [ + { frontend: 'react' }, + { frontend: 'vue' }, + { frontend: 'svelte' }, + { frontend: 'html' }, + { frontend: 'htmx' } + ], suiteLabel: 'MongoDB behavioural matrix' + }, + { + database: 'sqlite', name: 'SQLite', scenarios: [ + { frontend: 'react' }, + { frontend: 'react', orm: 'drizzle' }, + { frontend: 'vue' }, + { frontend: 'svelte' }, + { frontend: 'html' }, + { frontend: 'htmx' } + ], suiteLabel: 'SQLite behavioural matrix' + } +] as const; + diff --git a/tests/behavioural/database-matrix.test.ts b/tests/behavioural/database-matrix.test.ts new file mode 100644 index 0000000..e4084c1 --- /dev/null +++ b/tests/behavioural/database-matrix.test.ts @@ -0,0 +1,5 @@ +import { describeDatabaseMatrix } from './database-matrix'; +import { DATABASE_MATRIX_DEFINITIONS } from './database-matrix-definitions'; + +DATABASE_MATRIX_DEFINITIONS.forEach(describeDatabaseMatrix); + diff --git a/tests/behavioural/database-matrix.ts b/tests/behavioural/database-matrix.ts new file mode 100644 index 0000000..e1e5129 --- /dev/null +++ b/tests/behavioural/database-matrix.ts @@ -0,0 +1,100 @@ +import { describe, it } from 'bun:test'; + +import { runCountHistoryScenario, type BehaviouralScenario, type ScenarioHooks } from './utils'; + +type Frontend = NonNullable; +type Orm = NonNullable; +type DatabaseEngine = NonNullable; + +type ScenarioConfig = { + frontend: Frontend; + orm?: Orm; + label?: string; + options?: Partial>; + labelSuffix?: string; +}; + +export type DatabaseMatrixDefinition = { + database: DatabaseEngine; + name: string; + suiteLabel: string; + baseOptions?: Partial>; + scenarios: readonly ScenarioConfig[]; + createHooks?: (label: string) => ScenarioHooks; + timeoutMs?: number; +}; + +const capitalize = (value: string) => + value.length === 0 ? value : value[0].toUpperCase() + value.slice(1); + +const formatFrontendName = (frontend: Frontend) => { + if (frontend === 'htmx') return 'HTMX'; + if (frontend === 'html') return 'HTML'; + + return capitalize(frontend); +}; + +const DEFAULT_TEST_TIMEOUT_MS = 120_000; + +const buildScenario = ( + definition: DatabaseMatrixDefinition, + config: ScenarioConfig +): BehaviouralScenario => { + const { env: baseEnv = {}, ...baseRest } = definition.baseOptions ?? {}; + const { env: entryEnv = {}, ...entryRest } = config.options ?? {}; + + const mergedEnv = { ...baseEnv, ...entryEnv } as Record; + const hasEnv = Object.keys(mergedEnv).length > 0; + + const options: BehaviouralScenario['options'] = { + ...baseRest, + ...entryRest, + database: definition.database, + frontend: config.frontend + }; + + if (config.orm) { + options.orm = config.orm; + } + + if (hasEnv) { + options.env = mergedEnv; + } + + const drizzleSuffix = config.orm === 'drizzle' ? ' (Drizzle)' : ''; + const extraSuffix = config.labelSuffix ? ` ${config.labelSuffix}` : ''; + const defaultLabel = `${formatFrontendName(config.frontend)} + ${ + definition.name + }${drizzleSuffix}${extraSuffix}`; + + return { + label: config.label ?? defaultLabel, + options + }; +}; + +export const describeDatabaseMatrix = (definition: DatabaseMatrixDefinition) => { + const filter = process.env.ABSOLUTE_BEHAVIOURAL_DATABASE_FILTER?.toLowerCase(); + if (filter && filter !== definition.name.toLowerCase()) { + return; + } + + const scenarios = definition.scenarios.map((scenario) => + buildScenario(definition, scenario) + ); + const timeoutMs = definition.timeoutMs ?? DEFAULT_TEST_TIMEOUT_MS; + + describe(definition.suiteLabel, () => { + scenarios.forEach((scenario) => { + it( + `${scenario.label} creates and reads count history via REST API`, + async () => { + const hooks = definition.createHooks?.(scenario.label); + await runCountHistoryScenario(scenario, hooks); + }, + { timeout: timeoutMs } + ); + }); + }); +}; + diff --git a/tests/behavioural/utils.ts b/tests/behavioural/utils.ts new file mode 100644 index 0000000..a64127c --- /dev/null +++ b/tests/behavioural/utils.ts @@ -0,0 +1,595 @@ +import { readFile } from 'node:fs/promises'; +import { join } from 'node:path'; + +import { + cleanupProject, + installDependencies, + runCommand, + scaffoldProject, + startServer, + type RunningServer +} from '../harness'; + +export type BehaviouralScenario = { + label: string; + options: Parameters[0]; +}; + +export type ScenarioHooks = { + beforeServerStart?: ( + projectPath: string, + scenario: BehaviouralScenario + ) => Promise; + afterServerStop?: ( + projectPath: string, + scenario: BehaviouralScenario + ) => Promise; +}; + +const DEFAULT_SERVER_PORT = 3000; +export const COUNT_ENDPOINT = `http://localhost:${DEFAULT_SERVER_PORT}/count`; +export const ROOT_READY_URL = `http://localhost:${DEFAULT_SERVER_PORT}/`; +export const HTTP_BAD_REQUEST = 400; +export const HTTP_OK = 200; +export const HTTP_UNAUTHORIZED = 401; +export const TEST_COUNT = 7; +export const AUTH_PROVIDERS_ENDPOINT = + 'http://localhost:3000/auth/providers'; +export const AUTH_SESSION_ENDPOINT = 'http://localhost:3000/auth/session'; + +export const installDependenciesOrThrow = async ( + projectPath: string, + scenario: BehaviouralScenario +) => { + await installDependencies(projectPath, scenario.options).catch((error) => { + cleanupProject(projectPath); + throw error; + }); + + if (process.env.ABSOLUTE_TEST_VERBOSE !== '1') { + return; + } + + const envPath = join(projectPath, '.env'); + const contents = await readFile(envPath, 'utf8').catch(() => null); + + if (!contents) { + console.warn(`No .env file found for ${scenario.label}`); + + return; + } + + console.log(`Loaded env for ${scenario.label}:\n${contents}`); +}; + +const ensureStatus = async ( + response: Response, + expected: number, + label: string +) => { + if (response.status === expected) return; + + let body = ''; + + try { + body = await response.text(); + } catch { + // Ignore body parsing errors for diagnostics. + } + + const details = body.length > 0 ? ` (${body})` : ''; + + throw new Error(`Expected ${expected} from ${label}${details}`); +}; + +const extractUid = (payload: Record) => { + const uid = payload.uid as number | undefined; + + if (typeof uid !== 'number' || uid <= 0) { + console.error('createCount payload', payload); + throw new Error('API did not return a numeric uid'); + } + + return uid; +}; + +const ensureCountMatch = (payload: Record) => { + if (payload.count !== TEST_COUNT) + throw new Error( + `API returned count ${String(payload.count)} instead of ${TEST_COUNT}` + ); +}; + +const assertHistoryPayload = ( + payload: Record, + uid: number +) => { + if (payload.uid !== uid) throw new Error('History UID mismatch'); + if (payload.count !== TEST_COUNT) throw new Error('History count mismatch'); +}; + +const DATABASE_ENV_KEYS: Record = { + cockroachdb: [ + 'DATABASE_URL', + 'PGDATABASE', + 'PGHOST', + 'PGPASSWORD', + 'PGPORT', + 'PGUSER', + 'PGSSLMODE' + ], + gel: ['DATABASE_URL'], + mariadb: ['DATABASE_URL', 'MYSQL_HOST', 'MYSQL_PORT', 'MYSQL_USER', 'MYSQL_PASSWORD'], + mongodb: [ + 'DATABASE_URL', + 'MONGODB_URL', + 'MONGODB_USER', + 'MONGODB_PASSWORD', + 'MONGODB_AUTH_DB' + ], + mssql: ['DATABASE_URL'], + mysql: ['DATABASE_URL', 'MYSQL_HOST', 'MYSQL_PORT', 'MYSQL_USER', 'MYSQL_PASSWORD'], + postgresql: [ + 'DATABASE_URL', + 'PGDATABASE', + 'PGHOST', + 'PGPASSWORD', + 'PGPORT', + 'PGUSER', + 'PGSSLMODE' + ], + singlestore: ['DATABASE_URL', 'MYSQL_HOST', 'MYSQL_PORT', 'MYSQL_USER', 'MYSQL_PASSWORD'], + sqlite: ['DATABASE_URL'] +} as const; + +const ALL_DATABASE_ENV_KEYS = new Set( + Object.values(DATABASE_ENV_KEYS).flatMap((keys) => keys) +); + +const resolveSuiteKey = (scenario: BehaviouralScenario) => { + const {database} = scenario.options; + + if (database && database !== 'none') { + return database; + } + + return 'default'; +}; + +const buildChildEnv = ( + scenario: BehaviouralScenario, + overrides: Record +) => { + const suiteKey = resolveSuiteKey(scenario); + const allowedKeys = new Set(DATABASE_ENV_KEYS[suiteKey] ?? ['DATABASE_URL']); + const env: Record = { ...process.env }; + + ALL_DATABASE_ENV_KEYS.forEach((key) => { + if (!allowedKeys.has(key)) { + delete env[key]; + } + }); + + allowedKeys.forEach((key) => { + if (!(key in overrides)) { + delete env[key]; + } + }); + + Object.entries(overrides).forEach(([key, value]) => { + if (value === undefined) { + delete env[key]; + } else { + env[key] = value; + } + }); + + env.ABSOLUTE_BEHAVIOURAL_SUITE = suiteKey; + + if (process.env.ABSOLUTE_TEST_VERBOSE === '1') { + const snapshot: Record = {}; + const interestingKeys = [ + 'DATABASE_URL', + 'MONGODB_URL', + 'MONGODB_USER', + 'MONGODB_PASSWORD', + 'MONGODB_AUTH_DB', + 'PGDATABASE', + 'PGHOST', + 'PGPASSWORD', + 'PGPORT', + 'PGUSER', + 'PGSSLMODE', + 'MYSQL_HOST', + 'MYSQL_PORT', + 'MYSQL_USER', + 'MYSQL_PASSWORD' + ]; + + interestingKeys.forEach((key) => { + if (key in env) { + snapshot[key] = env[key]; + } + }); + + console.log( + `Child env for ${scenario.label} (${suiteKey}): ${JSON.stringify(snapshot)}` + ); + } + + return env; +}; + +const ensurePortAvailable = async (port: number, scenarioLabel: string) => { + if (process.platform === 'win32') { + return; + } + + const lookup = await runCommand(['lsof', '-ti', `tcp:${port}`], { + label: `${scenarioLabel} port scan` + }).catch(() => null); + + if (!lookup || lookup.exitCode !== 0 || lookup.stdout.length === 0) { + return; + } + + const pids = lookup.stdout + .split('\n') + .map((pid) => pid.trim()) + .filter((pid) => pid.length > 0); + + if (pids.length === 0) { + return; + } + + await Promise.all( + pids.map((pid) => + runCommand(['kill', '-9', pid], { + label: `${scenarioLabel} kill ${pid}` + }).catch(() => undefined) + ) + ); + + if (process.env.ABSOLUTE_TEST_VERBOSE === '1') { + console.log(`Freed port ${port} by terminating processes: ${pids.join(', ')}`); + } +}; + +export const runCountHistoryScenario = async ( + scenario: BehaviouralScenario, + hooks: ScenarioHooks = {} +) => { + const scaffoldResult = await scaffoldProject(scenario.options).catch((error) => { + const { message } = error as Error; + + if ( + message.includes('docker compose') || + message.includes('Operation not permitted') + ) { + console.warn( + `Skipping behavioural flow (${scenario.label}): Docker daemon not available.` + ); + + return null; + } + + throw error; + }); + + if (!scaffoldResult) { + return; + } + + const { projectPath } = scaffoldResult; + + await installDependenciesOrThrow(projectPath, scenario); + + const scenarioEnvOverrides: Record = { + ...(scenario.options.env ?? {}) + }; + + try { + const envPath = join(projectPath, '.env'); + const contents = await readFile(envPath, 'utf8'); + contents + .split('\n') + .map((line) => line.trim()) + .filter((line) => line.length > 0 && !line.startsWith('#')) + .forEach((line) => { + const equalsIndex = line.indexOf('='); + if (equalsIndex <= 0) return; + + const key = line.slice(0, equalsIndex).trim(); + const value = line.slice(equalsIndex + 1); + + if (key.length === 0 || value === undefined) return; + if (!(key in scenarioEnvOverrides)) { + scenarioEnvOverrides[key] = value; + } + }); + } catch { + // Ignore missing .env files; rely on existing overrides. + } + + const RESET_ENV_KEYS = [ + 'DATABASE_URL', + 'MONGODB_URL', + 'MONGODB_USER', + 'MONGODB_PASSWORD', + 'MONGODB_AUTH_DB', + 'PGDATABASE', + 'PGHOST', + 'PGPASSWORD', + 'PGPORT', + 'PGUSER', + 'MYSQL_HOST', + 'MYSQL_PORT', + 'MYSQL_USER', + 'MYSQL_PASSWORD' + ] as const; + + RESET_ENV_KEYS.forEach((key) => { + if (key in scenarioEnvOverrides) { + return; + } + + if (process.env[key] !== undefined) { + scenarioEnvOverrides[key] = undefined; + } + }); + + if (process.env.ABSOLUTE_TEST_VERBOSE === '1') { + console.log( + `Effective env for ${scenario.label}: ${JSON.stringify(scenarioEnvOverrides)}` + ); + } + + const originalEnv: Record = {}; + + const applyScenarioEnv = () => { + const suiteKey = resolveSuiteKey(scenario); + const allowedKeys = new Set(DATABASE_ENV_KEYS[suiteKey] ?? ['DATABASE_URL']); + originalEnv.ABSOLUTE_BEHAVIOURAL_SUITE = process.env.ABSOLUTE_BEHAVIOURAL_SUITE; + process.env.ABSOLUTE_BEHAVIOURAL_SUITE = suiteKey; + + ALL_DATABASE_ENV_KEYS.forEach((key) => { + if (!allowedKeys.has(key)) { + originalEnv[key] = process.env[key]; + delete process.env[key]; + } + }); + + allowedKeys.forEach((key) => { + if (!(key in scenarioEnvOverrides)) { + originalEnv[key] = process.env[key]; + delete process.env[key]; + } + }); + + Object.entries(scenarioEnvOverrides).forEach(([key, value]) => { + originalEnv[key] = process.env[key]; + + if (value === undefined) { + delete process.env[key]; + } else { + process.env[key] = value; + } + }); + }; + + const restoreEnv = () => { + if (originalEnv.ABSOLUTE_BEHAVIOURAL_SUITE === undefined) { + delete process.env.ABSOLUTE_BEHAVIOURAL_SUITE; + } else { + process.env.ABSOLUTE_BEHAVIOURAL_SUITE = originalEnv.ABSOLUTE_BEHAVIOURAL_SUITE; + } + + Object.entries(originalEnv).forEach(([key, value]) => { + if (key === 'ABSOLUTE_BEHAVIOURAL_SUITE') { + return; + } + + if (value === undefined) { + delete process.env[key]; + } else { + process.env[key] = value; + } + }); + }; + + applyScenarioEnv(); + + const stopServer = async (serverInstance: RunningServer | undefined) => { + if (!serverInstance) { + return; + } + + try { + await serverInstance.stop(); + } catch { + // Ignore shutdown errors to surface the original failure, if any. + } + }; + + const runAfterHook = async () => { + if (!hooks.afterServerStop) { + return; + } + + try { + await hooks.afterServerStop(projectPath, scenario); + } catch { + // Ignore teardown errors; cleanup continues regardless. + } + }; + + const runBeforeHook = async () => { + if (!hooks.beforeServerStart) { + return; + } + + if (scenario.options.env) { + console.log( + `Applying scenario env overrides for ${scenario.label}: ${JSON.stringify( + scenario.options.env + )}` + ); + } + + await hooks.beforeServerStart(projectPath, scenario); + }; + + let server: RunningServer | undefined; + + const finalize = async () => { + await stopServer(server); + await runAfterHook(); + if (process.env.ABSOLUTE_TEST_KEEP !== '1') { + cleanupProject(projectPath); + } + restoreEnv(); + }; + + await ensurePortAvailable(DEFAULT_SERVER_PORT, scenario.label); + await runBeforeHook(); + try { + server = await startServer(projectPath, { + command: ['bun', 'run', 'src/backend/server.ts'], + env: buildChildEnv(scenario, scenarioEnvOverrides), + readyTimeoutMs: 30_000, + readyUrl: ROOT_READY_URL + }); + + const createResponse = await fetch(COUNT_ENDPOINT, { + body: JSON.stringify({ count: TEST_COUNT }), + headers: { + 'Content-Type': 'application/json' + }, + method: 'POST' + }); + + await ensureStatus(createResponse, HTTP_OK, 'POST /count'); + + const created = (await createResponse.json()) as Record; + const uid = extractUid(created); + ensureCountMatch(created); + + const readResponse = await fetch(`${COUNT_ENDPOINT}/${uid}`); + await ensureStatus(readResponse, HTTP_OK, 'GET /count/:uid'); + + const history = (await readResponse.json()) as Record; + assertHistoryPayload(history, uid); + } finally { + await finalize(); + } +}; + +const ensureJson = async (response: Response) => { + try { + return (await response.json()) as unknown; + } catch (error) { + throw new Error( + `Failed to parse JSON from ${response.url}: ${(error as Error).message}` + ); + } +}; + +export const runAuthScenario = async ( + scenario: BehaviouralScenario, + hooks: ScenarioHooks = {} +) => { + const scaffoldResult = await scaffoldProject(scenario.options).catch((error) => { + const { message } = error as Error; + + if ( + message.includes('docker compose') || + message.includes('Operation not permitted') + ) { + console.warn( + `Skipping behavioural flow (${scenario.label}): Docker daemon not available.` + ); + + return null; + } + + throw error; + }); + + if (!scaffoldResult) { + return; + } + + const { projectPath } = scaffoldResult; + + await installDependenciesOrThrow(projectPath, scenario); + + const stopServer = async (serverInstance: RunningServer | undefined) => { + if (!serverInstance) { + return; + } + + try { + await serverInstance.stop(); + } catch { + // Ignore shutdown errors to surface the original failure, if any. + } + }; + + const runAfterHook = async () => { + if (!hooks.afterServerStop) { + return; + } + + try { + await hooks.afterServerStop(projectPath, scenario); + } catch { + // Ignore teardown errors; cleanup continues regardless. + } + }; + + const runBeforeHook = async () => { + if (!hooks.beforeServerStart) { + return; + } + + await hooks.beforeServerStart(projectPath, scenario); + }; + + let server: RunningServer | undefined; + await runBeforeHook(); + try { + server = await startServer(projectPath, { + command: ['bun', 'run', 'src/backend/server.ts'], + env: scenario.options.env, + readyTimeoutMs: 30_000, + readyUrl: ROOT_READY_URL + }); + + const providersResponse = await fetch(AUTH_PROVIDERS_ENDPOINT); + await ensureStatus(providersResponse, HTTP_OK, 'GET /auth/providers'); + const providers = await ensureJson(providersResponse); + + if (!Array.isArray(providers)) { + throw new Error('Expected provider list to be an array'); + } + + const sessionResponse = await fetch(AUTH_SESSION_ENDPOINT, { + method: 'POST' + }); + + // Without credentials this should indicate unauthorized access. + if ( + sessionResponse.status !== HTTP_UNAUTHORIZED && + sessionResponse.status !== HTTP_BAD_REQUEST + ) { + throw new Error( + `Expected ${HTTP_BAD_REQUEST} or ${HTTP_UNAUTHORIZED} from POST /auth/session without credentials, got ${sessionResponse.status}` + ); + } + } finally { + await stopServer(server); + await runAfterHook(); + cleanupProject(projectPath); + } +}; + diff --git a/tests/functional/auth.test.ts b/tests/functional/auth.test.ts new file mode 100644 index 0000000..e0c0be4 --- /dev/null +++ b/tests/functional/auth.test.ts @@ -0,0 +1,86 @@ +import { validateAuthConfiguration } from '../../scripts/functional-tests/auth-validator'; +import type { MatrixConfig } from '../../scripts/functional-tests/matrix'; +import { runMatrixSuite } from './frameworks/test-utils'; + +type AuthMatrixEntry = MatrixConfig & { + authProvider: string; + directoryConfig: 'default'; +}; + +const SUPPORTED_DATABASE_ENGINES = new Set(['sqlite', 'mongodb', 'postgresql']); + +const createProjectName = (config: AuthMatrixEntry) => { + const hostLabel = config.databaseHost === 'none' ? 'local' : config.databaseHost; + const tailwindLabel = config.useTailwind ? 'tw' : 'notw'; + + return `test-auth-${config.frontend}-${config.databaseEngine}-${config.orm}-${hostLabel}-${tailwindLabel}` + .replace(/[^a-z0-9-]/gi, '-') + .replace(/-+/g, '-') + .toLowerCase(); +}; + +const describeConfig = (config: AuthMatrixEntry) => { + const segments = [ + 'Auth', + config.frontend === 'none' ? 'no-frontend' : config.frontend, + config.databaseEngine, + config.orm, + config.databaseHost === 'none' ? 'local' : config.databaseHost, + config.useTailwind ? 'tailwind' : 'no-tailwind' + ]; + + if (config.codeQualityTool) { + segments.push(config.codeQualityTool); + } + + return segments.join(' + '); +}; + +runMatrixSuite({ + createProjectName, + describeBlock: 'Auth configuration matrix', + describeConfig, + buildScaffoldOptions: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + directoryConfig: config.directoryConfig, + framework: config.frontend === 'none' ? undefined : config.frontend, + orm: config.orm, + useTailwind: config.useTailwind + }), + createFingerprint: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + frontend: config.frontend, + orm: config.orm, + useTailwind: config.useTailwind + }), + filterMatrix: (config): config is AuthMatrixEntry => + config.authProvider !== 'none' && + config.directoryConfig === 'default' && + SUPPORTED_DATABASE_ENGINES.has(config.databaseEngine), + validate: async ({ config, projectPath }) => { + const { errors, passed, warnings } = await validateAuthConfiguration( + projectPath, + 'bun', + { + authProvider: config.authProvider, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + orm: config.orm + }, + { + skipBuild: false, + skipDependencies: false, + skipServer: false + } + ); + + return { errors, passed, warnings }; + } +}); + diff --git a/tests/functional/cloud.test.ts b/tests/functional/cloud.test.ts new file mode 100644 index 0000000..0ad84cf --- /dev/null +++ b/tests/functional/cloud.test.ts @@ -0,0 +1,88 @@ +import { validateCloudProvider } from '../../scripts/functional-tests/cloud-provider-validator'; +import type { MatrixConfig } from '../../scripts/functional-tests/matrix'; +import { runMatrixSuite } from './frameworks/test-utils'; + +type CloudMatrixEntry = MatrixConfig & { + databaseHost: 'turso' | 'neon'; + directoryConfig: 'default'; +}; + +const SUPPORTED_DATABASE_ENGINES = new Set(['sqlite', 'postgresql']); +const SUPPORTED_ORMS = new Set(['none', 'drizzle']); +const SUPPORTED_FRONTENDS = new Set(['html', 'react', 'vue', 'svelte']); + +const createProjectName = (config: CloudMatrixEntry) => + `test-cloud-${config.databaseHost}-${config.databaseEngine}-${config.orm}-${config.frontend}-${ + config.authProvider === 'none' ? 'noauth' : 'auth' + }-${config.useTailwind ? 'tw' : 'notw'}` + .replace(/[^a-z0-9-]/g, '-') + .toLowerCase(); + +const describeConfig = (config: CloudMatrixEntry) => { + const segments = [ + 'Cloud', + config.databaseHost, + config.databaseEngine, + config.frontend, + config.orm, + config.authProvider === 'none' ? 'no-auth' : config.authProvider, + config.useTailwind ? 'tailwind' : 'no-tailwind' + ]; + + if (config.codeQualityTool) { + segments.push(config.codeQualityTool); + } + + return segments.join(' + '); +}; + +runMatrixSuite({ + createProjectName, + describeBlock: 'Cloud provider matrix', + describeConfig, + buildScaffoldOptions: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + directoryConfig: config.directoryConfig, + framework: config.frontend === 'none' ? undefined : config.frontend, + orm: config.orm, + useTailwind: config.useTailwind + }), + createFingerprint: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + frontend: config.frontend, + orm: config.orm, + useTailwind: config.useTailwind + }), + filterMatrix: (config): config is CloudMatrixEntry => + config.databaseHost !== 'none' && + SUPPORTED_DATABASE_ENGINES.has(config.databaseEngine) && + SUPPORTED_ORMS.has(config.orm) && + SUPPORTED_FRONTENDS.has(config.frontend) && + config.directoryConfig === 'default', + validate: async ({ config, projectPath }) => { + const { errors, passed, warnings } = await validateCloudProvider( + projectPath, + 'bun', + { + authProvider: config.authProvider, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + orm: config.orm + }, + { + skipBuild: false, + skipDependencies: false, + skipServer: false + } + ); + + return { errors, passed, warnings }; + } +}); + diff --git a/tests/functional/databases/mongodb.test.ts b/tests/functional/databases/mongodb.test.ts new file mode 100644 index 0000000..20b1bb8 --- /dev/null +++ b/tests/functional/databases/mongodb.test.ts @@ -0,0 +1,110 @@ +import process from 'node:process'; + +import { runFunctionalTests } from '../../../scripts/functional-tests/functional-test-runner'; +import type { MatrixConfig } from '../../../scripts/functional-tests/matrix'; +import { validateMongoDBDatabase } from '../../../scripts/functional-tests/mongodb-validator'; +import { runMatrixSuite } from '../frameworks/test-utils'; +import { ensureDockerAvailable } from '../support'; + +type MongoMatrixEntry = MatrixConfig & { + databaseEngine: 'mongodb'; + directoryConfig: 'default'; +}; + +const createProjectName = (config: MongoMatrixEntry) => + `test-mongodb-${config.frontend}-${config.orm}-${config.authProvider === 'none' ? 'noauth' : 'auth'}-${ + config.databaseHost === 'none' ? 'local' : config.databaseHost + }-${config.useTailwind ? 'tw' : 'notw'}` + .replace(/[^a-z0-9-]/g, '-') + .toLowerCase(); + +const describeConfig = (config: MongoMatrixEntry) => { + const segments = [ + 'MongoDB', + config.databaseHost === 'none' ? 'local' : config.databaseHost, + config.frontend === 'none' ? 'no-frontend' : config.frontend, + config.orm, + config.authProvider === 'none' ? 'no-auth' : config.authProvider, + config.useTailwind ? 'tailwind' : 'no-tailwind' + ]; + + if (config.codeQualityTool) { + segments.push(config.codeQualityTool); + } + + return segments.join(' + '); +}; + +const runFunctionalSuite = async (projectPath: string) => { + process.stdout.write(' → Running functional tests... '); + const start = Date.now(); + + let result; + try { + result = await runFunctionalTests(projectPath, 'bun', { + skipBuild: false, + skipDependencies: false, + skipServer: false + }); + } catch (unknownError) { + const elapsedMs = Date.now() - start; + console.log(`✗ (${elapsedMs}ms)`); + throw unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + } + + const elapsedMs = Date.now() - start; + + if (!result.passed) { + console.log(`✗ (${elapsedMs}ms)`); + const details = + result.errors.length > 0 + ? result.errors.map((error) => ` - ${error}`).join('\n') + : ' - Functional test failure'; + + throw new Error(`Functional tests failed:\n${details}`); + } + + console.log(`✓ (${elapsedMs}ms)`); + result.warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); +}; + +runMatrixSuite({ + createProjectName, describeBlock: 'MongoDB database matrix', describeConfig, beforeValidate: async ({ config, projectPath }) => { + await runFunctionalSuite(projectPath); + + if (config.databaseHost === 'none') { + const dockerStatus = ensureDockerAvailable(); + + if (!dockerStatus.available) { + throw new Error(`Docker unavailable: ${dockerStatus.message}`); + } + } + }, buildScaffoldOptions: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: 'mongodb', + databaseHost: config.databaseHost, + directoryConfig: config.directoryConfig, + framework: config.frontend === 'none' ? undefined : config.frontend, + orm: config.orm, + useTailwind: config.useTailwind + }), createFingerprint: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + frontend: config.frontend, + orm: config.orm, + useTailwind: config.useTailwind + }), filterMatrix: (config): config is MongoMatrixEntry => + config.databaseEngine === 'mongodb' && config.directoryConfig === 'default', validate: async ({ config, projectPath }) => { + const { errors, passed, warnings } = await validateMongoDBDatabase(projectPath, { + authProvider: config.authProvider, + databaseHost: config.databaseHost, + orm: config.orm + }); + + return { errors, passed, warnings }; + } +}); + diff --git a/tests/functional/databases/mysql.test.ts b/tests/functional/databases/mysql.test.ts new file mode 100644 index 0000000..87a1436 --- /dev/null +++ b/tests/functional/databases/mysql.test.ts @@ -0,0 +1,114 @@ +import process from 'node:process'; + +import { runFunctionalTests } from '../../../scripts/functional-tests/functional-test-runner'; +import type { MatrixConfig } from '../../../scripts/functional-tests/matrix'; +import { validateMySQLDatabase } from '../../../scripts/functional-tests/mysql-validator'; +import { runMatrixSuite } from '../frameworks/test-utils'; +import { ensureDockerAvailable } from '../support'; + +type MysqlMatrixEntry = MatrixConfig & { + databaseEngine: 'mysql'; + directoryConfig: 'default'; +}; + +const EXCLUDED_HOSTS = new Set(['planetscale']); + +const createProjectName = (config: MysqlMatrixEntry) => + `test-mysql-${config.frontend}-${config.orm}-${config.authProvider === 'none' ? 'noauth' : 'auth'}-${ + config.databaseHost === 'none' ? 'local' : config.databaseHost + }-${config.useTailwind ? 'tw' : 'notw'}` + .replace(/[^a-z0-9-]/g, '-') + .toLowerCase(); + +const describeConfig = (config: MysqlMatrixEntry) => { + const segments = [ + 'MySQL', + config.databaseHost === 'none' ? 'local' : config.databaseHost, + config.frontend === 'none' ? 'no-frontend' : config.frontend, + config.orm, + config.authProvider === 'none' ? 'no-auth' : config.authProvider, + config.useTailwind ? 'tailwind' : 'no-tailwind' + ]; + + if (config.codeQualityTool) { + segments.push(config.codeQualityTool); + } + + return segments.join(' + '); +}; + +const runFunctionalSuite = async (projectPath: string) => { + process.stdout.write(' → Running functional tests... '); + const start = Date.now(); + + let result; + try { + result = await runFunctionalTests(projectPath, 'bun', { + skipBuild: false, + skipDependencies: false, + skipServer: false + }); + } catch (unknownError) { + const elapsedMs = Date.now() - start; + console.log(`✗ (${elapsedMs}ms)`); + throw unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + } + + const elapsedMs = Date.now() - start; + + if (!result.passed) { + console.log(`✗ (${elapsedMs}ms)`); + const details = + result.errors.length > 0 + ? result.errors.map((error) => ` - ${error}`).join('\n') + : ' - Functional test failure'; + + throw new Error(`Functional tests failed:\n${details}`); + } + + console.log(`✓ (${elapsedMs}ms)`); + result.warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); +}; + +runMatrixSuite({ + createProjectName, describeBlock: 'MySQL database matrix', describeConfig, beforeValidate: async ({ config, projectPath }) => { + await runFunctionalSuite(projectPath); + + if (config.databaseHost === 'none') { + const dockerStatus = ensureDockerAvailable(); + + if (!dockerStatus.available) { + throw new Error(`Docker unavailable: ${dockerStatus.message}`); + } + } + }, buildScaffoldOptions: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: 'mysql', + databaseHost: config.databaseHost, + directoryConfig: config.directoryConfig, + framework: config.frontend === 'none' ? undefined : config.frontend, + orm: config.orm, + useTailwind: config.useTailwind + }), createFingerprint: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + frontend: config.frontend, + orm: config.orm, + useTailwind: config.useTailwind + }), filterMatrix: (config): config is MysqlMatrixEntry => + config.databaseEngine === 'mysql' && + config.directoryConfig === 'default' && + !EXCLUDED_HOSTS.has(config.databaseHost), validate: async ({ config, projectPath }) => { + const { errors, passed, warnings } = await validateMySQLDatabase(projectPath, { + authProvider: config.authProvider, + databaseHost: config.databaseHost, + orm: config.orm + }); + + return { errors, passed, warnings }; + } +}); + diff --git a/tests/functional/databases/postgresql.test.ts b/tests/functional/databases/postgresql.test.ts new file mode 100644 index 0000000..32b3d83 --- /dev/null +++ b/tests/functional/databases/postgresql.test.ts @@ -0,0 +1,114 @@ +import process from 'node:process'; + +import { runFunctionalTests } from '../../../scripts/functional-tests/functional-test-runner'; +import type { MatrixConfig } from '../../../scripts/functional-tests/matrix'; +import { validatePostgreSQLDatabase } from '../../../scripts/functional-tests/postgresql-validator'; +import { runMatrixSuite } from '../frameworks/test-utils'; +import { ensureDockerAvailable } from '../support'; + +type PostgresMatrixEntry = MatrixConfig & { + databaseEngine: 'postgresql'; + directoryConfig: 'default'; +}; + +const EXCLUDED_HOSTS = new Set(['planetscale']); + +const createProjectName = (config: PostgresMatrixEntry) => + `test-postgresql-${config.frontend}-${config.orm}-${config.authProvider === 'none' ? 'noauth' : 'auth'}-${ + config.databaseHost === 'none' ? 'local' : config.databaseHost + }-${config.useTailwind ? 'tw' : 'notw'}` + .replace(/[^a-z0-9-]/g, '-') + .toLowerCase(); + +const describeConfig = (config: PostgresMatrixEntry) => { + const segments = [ + 'PostgreSQL', + config.databaseHost === 'none' ? 'local' : config.databaseHost, + config.frontend === 'none' ? 'no-frontend' : config.frontend, + config.orm, + config.authProvider === 'none' ? 'no-auth' : config.authProvider, + config.useTailwind ? 'tailwind' : 'no-tailwind' + ]; + + if (config.codeQualityTool) { + segments.push(config.codeQualityTool); + } + + return segments.join(' + '); +}; + +const runFunctionalSuite = async (projectPath: string) => { + process.stdout.write(' → Running functional tests... '); + const start = Date.now(); + + let result; + try { + result = await runFunctionalTests(projectPath, 'bun', { + skipBuild: false, + skipDependencies: false, + skipServer: false + }); + } catch (unknownError) { + const elapsedMs = Date.now() - start; + console.log(`✗ (${elapsedMs}ms)`); + throw unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + } + + const elapsedMs = Date.now() - start; + + if (!result.passed) { + console.log(`✗ (${elapsedMs}ms)`); + const details = + result.errors.length > 0 + ? result.errors.map((error) => ` - ${error}`).join('\n') + : ' - Functional test failure'; + + throw new Error(`Functional tests failed:\n${details}`); + } + + console.log(`✓ (${elapsedMs}ms)`); + result.warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); +}; + +runMatrixSuite({ + createProjectName, describeBlock: 'PostgreSQL database matrix', describeConfig, beforeValidate: async ({ config, projectPath }) => { + await runFunctionalSuite(projectPath); + + if (config.databaseHost === 'none') { + const dockerStatus = ensureDockerAvailable(); + + if (!dockerStatus.available) { + throw new Error(`Docker unavailable: ${dockerStatus.message}`); + } + } + }, buildScaffoldOptions: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: 'postgresql', + databaseHost: config.databaseHost, + directoryConfig: config.directoryConfig, + framework: config.frontend === 'none' ? undefined : config.frontend, + orm: config.orm, + useTailwind: config.useTailwind + }), createFingerprint: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + frontend: config.frontend, + orm: config.orm, + useTailwind: config.useTailwind + }), filterMatrix: (config): config is PostgresMatrixEntry => + config.databaseEngine === 'postgresql' && + config.directoryConfig === 'default' && + !EXCLUDED_HOSTS.has(config.databaseHost), validate: async ({ config, projectPath }) => { + const { errors, passed, warnings } = await validatePostgreSQLDatabase(projectPath, { + authProvider: config.authProvider, + databaseHost: config.databaseHost, + orm: config.orm + }); + + return { errors, passed, warnings }; + } +}); + diff --git a/tests/functional/databases/sqlite.test.ts b/tests/functional/databases/sqlite.test.ts new file mode 100644 index 0000000..73cbcb6 --- /dev/null +++ b/tests/functional/databases/sqlite.test.ts @@ -0,0 +1,107 @@ +import process from 'node:process'; + +import { runFunctionalTests } from '../../../scripts/functional-tests/functional-test-runner'; +import type { MatrixConfig } from '../../../scripts/functional-tests/matrix'; +import { validateSQLiteDatabase } from '../../../scripts/functional-tests/sqlite-validator'; +import { runMatrixSuite } from '../frameworks/test-utils'; + +type SqliteMatrixEntry = MatrixConfig & { + databaseEngine: 'sqlite'; + directoryConfig: 'default'; +}; + +const SUPPORTED_ORMS = new Set(['none', 'drizzle']); +const SUPPORTED_HOSTS = new Set(['none', 'turso']); + +const createProjectName = (config: SqliteMatrixEntry) => + `test-sqlite-${config.orm}-${config.authProvider === 'none' ? 'noauth' : 'auth'}-${ + config.databaseHost === 'none' ? 'local' : config.databaseHost + }-${config.useTailwind ? 'tw' : 'notw'}` + .replace(/[^a-z0-9-]/g, '-') + .toLowerCase(); + +const describeConfig = (config: SqliteMatrixEntry) => { + const segments = [ + 'SQLite', + config.databaseHost === 'none' ? 'local' : config.databaseHost, + config.orm, + config.frontend === 'none' ? 'no-frontend' : config.frontend, + config.authProvider === 'none' ? 'no-auth' : config.authProvider, + config.useTailwind ? 'tailwind' : 'no-tailwind' + ]; + + if (config.codeQualityTool) { + segments.push(config.codeQualityTool); + } + + return segments.join(' + '); +}; + +const runFunctionalSuite = async (projectPath: string) => { + process.stdout.write(' → Running functional tests... '); + const start = Date.now(); + + let result; + try { + result = await runFunctionalTests(projectPath, 'bun', { + skipBuild: false, + skipDependencies: false, + skipServer: false + }); + } catch (unknownError) { + const elapsedMs = Date.now() - start; + console.log(`✗ (${elapsedMs}ms)`); + throw unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + } + + const elapsedMs = Date.now() - start; + + if (!result.passed) { + console.log(`✗ (${elapsedMs}ms)`); + const details = + result.errors.length > 0 + ? result.errors.map((error) => ` - ${error}`).join('\n') + : ' - Functional test failure'; + + throw new Error(`Functional tests failed:\n${details}`); + } + + console.log(`✓ (${elapsedMs}ms)`); + result.warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); +}; + +runMatrixSuite({ + createProjectName, describeBlock: 'SQLite database matrix', describeConfig, beforeValidate: async ({ projectPath }) => { + await runFunctionalSuite(projectPath); + }, buildScaffoldOptions: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: 'sqlite', + databaseHost: config.databaseHost, + directoryConfig: config.directoryConfig, + framework: config.frontend === 'none' ? undefined : config.frontend, + orm: config.orm, + useTailwind: config.useTailwind + }), createFingerprint: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + frontend: config.frontend, + orm: config.orm, + useTailwind: config.useTailwind + }), filterMatrix: (config): config is SqliteMatrixEntry => + config.databaseEngine === 'sqlite' && + config.directoryConfig === 'default' && + SUPPORTED_ORMS.has(config.orm) && + SUPPORTED_HOSTS.has(config.databaseHost), validate: async ({ config, projectPath }) => { + const { errors, passed, warnings } = await validateSQLiteDatabase(projectPath, { + authProvider: config.authProvider, + databaseHost: config.databaseHost, + orm: config.orm + }); + + return { errors, passed, warnings }; + } +}); + diff --git a/tests/functional/frameworks/html.test.ts b/tests/functional/frameworks/html.test.ts new file mode 100644 index 0000000..5a77bf8 --- /dev/null +++ b/tests/functional/frameworks/html.test.ts @@ -0,0 +1,83 @@ +import { validateHTMLFramework } from '../../../scripts/functional-tests/html-validator'; +import type { MatrixConfig } from '../../../scripts/functional-tests/matrix'; +import { runFrameworkMatrix } from './test-utils'; + +type HtmlMatrixEntry = MatrixConfig & { + directoryConfig: 'default'; + frontend: 'html'; +}; + +const createProjectName = (config: HtmlMatrixEntry) => + `test-html-${config.databaseEngine}-${config.orm}-${config.authProvider === 'none' ? 'noauth' : 'auth'}-${ + config.useTailwind ? 'tw' : 'notw' + }` + .replace(/[^a-z0-9-]/g, '-') + .toLowerCase(); + +const describeConfig = (config: HtmlMatrixEntry) => { + const segments = [ + 'HTML', + config.databaseEngine, + config.authProvider === 'none' ? 'no-auth' : 'auth', + config.orm, + config.useTailwind ? 'tailwind' : 'no-tailwind' + ]; + + if (config.databaseHost !== 'none') { + segments.splice(2, 0, config.databaseHost); + } + + if (config.codeQualityTool) { + segments.push(config.codeQualityTool); + } + + return segments.join(' + '); +}; + +const SUPPORTED_DATABASE_ENGINES = new Set(['none', 'sqlite', 'mongodb']); +const SUPPORTED_ORMS = new Set(['none', 'drizzle']); + +runFrameworkMatrix({ + createProjectName, describeBlock: 'HTML framework matrix', describeConfig, framework: 'html', buildScaffoldOptions: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + directoryConfig: config.directoryConfig, + framework: 'html', + orm: config.orm, + useTailwind: config.useTailwind + }), createFingerprint: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + frontend: config.frontend, + orm: config.orm, + useTailwind: config.useTailwind + }), filterMatrix: (config): config is HtmlMatrixEntry => + config.frontend === 'html' && + config.directoryConfig === 'default' && + SUPPORTED_DATABASE_ENGINES.has(config.databaseEngine) && + SUPPORTED_ORMS.has(config.orm), validate: async ({ config, projectPath }) => { + const { errors, passed, warnings } = await validateHTMLFramework( + projectPath, + 'bun', + { + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + orm: config.orm, + useTailwind: config.useTailwind + }, + { + skipBuild: false, + skipDependencies: false, + skipServer: false + } + ); + + return { errors, passed, warnings }; + } +}); + diff --git a/tests/functional/frameworks/htmx.test.ts b/tests/functional/frameworks/htmx.test.ts new file mode 100644 index 0000000..3cb9d6d --- /dev/null +++ b/tests/functional/frameworks/htmx.test.ts @@ -0,0 +1,83 @@ +import { validateHTMXFramework } from '../../../scripts/functional-tests/htmx-validator'; +import type { MatrixConfig } from '../../../scripts/functional-tests/matrix'; +import { runFrameworkMatrix } from './test-utils'; + +type HtmxMatrixEntry = MatrixConfig & { + directoryConfig: 'default'; + frontend: 'htmx'; +}; + +const createProjectName = (config: HtmxMatrixEntry) => + `test-htmx-${config.databaseEngine}-${config.orm}-${config.authProvider === 'none' ? 'noauth' : 'auth'}-${ + config.useTailwind ? 'tw' : 'notw' + }` + .replace(/[^a-z0-9-]/g, '-') + .toLowerCase(); + +const describeConfig = (config: HtmxMatrixEntry) => { + const segments = [ + 'HTMX', + config.databaseEngine, + config.authProvider === 'none' ? 'no-auth' : 'auth', + config.orm, + config.useTailwind ? 'tailwind' : 'no-tailwind' + ]; + + if (config.databaseHost !== 'none') { + segments.splice(2, 0, config.databaseHost); + } + + if (config.codeQualityTool) { + segments.push(config.codeQualityTool); + } + + return segments.join(' + '); +}; + +const SUPPORTED_DATABASE_ENGINES = new Set(['none', 'sqlite', 'mongodb']); +const SUPPORTED_ORMS = new Set(['none', 'drizzle']); + +runFrameworkMatrix({ + createProjectName, describeBlock: 'HTMX framework matrix', describeConfig, framework: 'htmx', buildScaffoldOptions: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + directoryConfig: config.directoryConfig, + framework: 'htmx', + orm: config.orm, + useTailwind: config.useTailwind + }), createFingerprint: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + frontend: config.frontend, + orm: config.orm, + useTailwind: config.useTailwind + }), filterMatrix: (config): config is HtmxMatrixEntry => + config.frontend === 'htmx' && + config.directoryConfig === 'default' && + SUPPORTED_DATABASE_ENGINES.has(config.databaseEngine) && + SUPPORTED_ORMS.has(config.orm), validate: async ({ config, projectPath }) => { + const { errors, passed, warnings } = await validateHTMXFramework( + projectPath, + 'bun', + { + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + orm: config.orm, + useTailwind: config.useTailwind + }, + { + skipBuild: false, + skipDependencies: false, + skipServer: false + } + ); + + return { errors, passed, warnings }; + } +}); + diff --git a/tests/functional/frameworks/react.test.ts b/tests/functional/frameworks/react.test.ts new file mode 100644 index 0000000..55a68f7 --- /dev/null +++ b/tests/functional/frameworks/react.test.ts @@ -0,0 +1,83 @@ +import type { MatrixConfig } from '../../../scripts/functional-tests/matrix'; +import { validateReactFramework } from '../../../scripts/functional-tests/react-validator'; +import { runFrameworkMatrix } from './test-utils'; + +const SUPPORTED_DATABASE_ENGINES = new Set(['none', 'sqlite', 'mongodb']); +const SUPPORTED_ORMS = new Set(['none', 'drizzle']); + +type ReactMatrixEntry = MatrixConfig & { + frontend: 'react'; + directoryConfig: 'default'; +}; + +const createProjectName = (config: ReactMatrixEntry) => + `test-react-${config.databaseEngine}-${config.orm}-${config.authProvider === 'none' ? 'noauth' : 'auth'}-${ + config.useTailwind ? 'tw' : 'notw' + }` + .replace(/[^a-z0-9-]/g, '-') + .toLowerCase(); + +const describeConfig = (config: ReactMatrixEntry) => { + const segments = [ + 'React', + config.databaseEngine, + config.authProvider === 'none' ? 'no-auth' : 'auth', + config.orm, + config.useTailwind ? 'tailwind' : 'no-tailwind' + ]; + + if (config.databaseHost !== 'none') { + segments.splice(2, 0, config.databaseHost); + } + + if (config.codeQualityTool) { + segments.push(config.codeQualityTool); + } + + return segments.join(' + '); +}; + +runFrameworkMatrix({ + createProjectName, describeBlock: 'React framework matrix', describeConfig, framework: 'react', buildScaffoldOptions: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + directoryConfig: config.directoryConfig, + framework: 'react', + orm: config.orm, + useTailwind: config.useTailwind + }), createFingerprint: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + frontend: config.frontend, + orm: config.orm, + useTailwind: config.useTailwind + }), filterMatrix: (config): config is ReactMatrixEntry => + config.frontend === 'react' && + config.directoryConfig === 'default' && + SUPPORTED_DATABASE_ENGINES.has(config.databaseEngine) && + SUPPORTED_ORMS.has(config.orm), validate: async ({ config, projectPath }) => { + const { errors, passed, warnings } = await validateReactFramework( + projectPath, + 'bun', + { + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + orm: config.orm, + useTailwind: config.useTailwind + }, + { + skipBuild: false, + skipDependencies: false, + skipServer: false + } + ); + + return { errors, passed, warnings }; + } +}); + diff --git a/tests/functional/frameworks/svelte.test.ts b/tests/functional/frameworks/svelte.test.ts new file mode 100644 index 0000000..0f44b29 --- /dev/null +++ b/tests/functional/frameworks/svelte.test.ts @@ -0,0 +1,83 @@ +import type { MatrixConfig } from '../../../scripts/functional-tests/matrix'; +import { validateSvelteFramework } from '../../../scripts/functional-tests/svelte-validator'; +import { runFrameworkMatrix } from './test-utils'; + +type SvelteMatrixEntry = MatrixConfig & { + directoryConfig: 'default'; + frontend: 'svelte'; +}; + +const createProjectName = (config: SvelteMatrixEntry) => + `test-svelte-${config.databaseEngine}-${config.orm}-${config.authProvider === 'none' ? 'noauth' : 'auth'}-${ + config.useTailwind ? 'tw' : 'notw' + }` + .replace(/[^a-z0-9-]/g, '-') + .toLowerCase(); + +const describeConfig = (config: SvelteMatrixEntry) => { + const segments = [ + 'Svelte', + config.databaseEngine, + config.authProvider === 'none' ? 'no-auth' : 'auth', + config.orm, + config.useTailwind ? 'tailwind' : 'no-tailwind' + ]; + + if (config.databaseHost !== 'none') { + segments.splice(2, 0, config.databaseHost); + } + + if (config.codeQualityTool) { + segments.push(config.codeQualityTool); + } + + return segments.join(' + '); +}; + +const SUPPORTED_DATABASE_ENGINES = new Set(['none', 'sqlite', 'mongodb']); +const SUPPORTED_ORMS = new Set(['none', 'drizzle']); + +runFrameworkMatrix({ + createProjectName, describeBlock: 'Svelte framework matrix', describeConfig, framework: 'svelte', buildScaffoldOptions: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + directoryConfig: config.directoryConfig, + framework: 'svelte', + orm: config.orm, + useTailwind: config.useTailwind + }), createFingerprint: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + frontend: config.frontend, + orm: config.orm, + useTailwind: config.useTailwind + }), filterMatrix: (config): config is SvelteMatrixEntry => + config.frontend === 'svelte' && + config.directoryConfig === 'default' && + SUPPORTED_DATABASE_ENGINES.has(config.databaseEngine) && + SUPPORTED_ORMS.has(config.orm), validate: async ({ config, projectPath }) => { + const { errors, passed, warnings } = await validateSvelteFramework( + projectPath, + 'bun', + { + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + orm: config.orm, + useTailwind: config.useTailwind + }, + { + skipBuild: false, + skipDependencies: false, + skipServer: false + } + ); + + return { errors, passed, warnings }; + } +}); + diff --git a/tests/functional/frameworks/test-utils.ts b/tests/functional/frameworks/test-utils.ts new file mode 100644 index 0000000..469a18b --- /dev/null +++ b/tests/functional/frameworks/test-utils.ts @@ -0,0 +1,184 @@ +import { mkdirSync } from 'node:fs'; +import { join } from 'node:path'; +import process from 'node:process'; + +import { describe, test } from 'bun:test'; + +import type { DependencyFingerprint } from '../../../scripts/functional-tests/dependency-cache'; +import { createMatrix, type MatrixConfig } from '../../../scripts/functional-tests/matrix'; +import { + assertStepSuccess, + cleanupProject, + installDependencies, + logWarnings as logStepWarnings, + minutesToMilliseconds, + scaffoldProject +} from '../support'; +import type { ScaffoldOptions } from '../support/scaffold'; + +type MatrixEntry = MatrixConfig & { + directoryConfig: 'default'; +}; + +type MatrixTestOptions = { + readonly describeBlock: string; + readonly createProjectName: (config: TConfig) => string; + readonly describeConfig: (config: TConfig) => string; + readonly filterMatrix: (config: MatrixConfig) => config is TConfig; + readonly validate: (args: { + config: TConfig; + projectPath: string; + }) => Promise<{ passed: boolean; errors: string[]; warnings: string[] }>; + readonly buildScaffoldOptions: (config: TConfig) => Omit; + readonly createFingerprint?: (config: TConfig) => DependencyFingerprint; + readonly beforeValidate?: (args: { config: TConfig; projectPath: string }) => Promise; + readonly afterValidate?: (args: { config: TConfig; projectPath: string }) => Promise; + readonly testTimeoutMinutes?: number; + readonly ensureProjectDir?: (projectPath: string) => void; +}; + +const ensureDirectory = (path: string) => { + try { + mkdirSync(path, { recursive: true }); + } catch { + // ignore errors; directory creation is best effort + } +}; + +const defaultFingerprint = (config: MatrixEntry): DependencyFingerprint => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + frontend: config.frontend, + orm: config.orm, + useTailwind: config.useTailwind +}); + +const DEFAULT_TIMEOUT_MINUTES = 10; + +const throwIfValidationFailed = (errors: string[]) => { + if (errors.length === 0) { + throw new Error('Validation failed:\n - Unknown validation failure'); + } + + const message = errors.map((error) => ` - ${error}`).join('\n'); + throw new Error(`Validation failed:\n${message}`); +}; + +const ensureValidationPassed = (passed: boolean, errors: string[]) => { + if (passed) { + return; + } + + throwIfValidationFailed(errors); +}; + +export const runFrameworkMatrix = ({ + describeBlock, + createProjectName, + describeConfig, + filterMatrix, + validate, + buildScaffoldOptions, + createFingerprint = defaultFingerprint, + beforeValidate, + afterValidate, + testTimeoutMinutes = DEFAULT_TIMEOUT_MINUTES, + ensureProjectDir +}: MatrixTestOptions) => { + const timeoutMs = minutesToMilliseconds(testTimeoutMinutes); + + const allEntries = createMatrix() + .filter(filterMatrix) + .sort((a, b) => describeConfig(a).localeCompare(describeConfig(b))); + + const runScenario = async (config: TConfig) => { + const scenarioName = describeConfig(config); + const summaryEntries = [ + ['frontend', config.frontend], + ['databaseEngine', config.databaseEngine], + ['databaseHost', config.databaseHost], + ['orm', config.orm], + ['authProvider', config.authProvider], + ['useTailwind', config.useTailwind ? 'true' : 'false'], + ['codeQualityTool', config.codeQualityTool] + ].filter(([, value]) => value && value !== 'none' && value !== false); + + console.log(`\n=== Scenario: ${scenarioName} ===`); + + if (summaryEntries.length > 0) { + console.log('Configuration:'); + summaryEntries.forEach(([key, value]) => { + console.log(` • ${key}: ${value as string}`); + }); + console.log(''); + } + + // Respect matrix skip annotations (skip tests instead of failing) + const configWithMeta = config as MatrixConfig; + if (configWithMeta.skip) { + console.log(`SKIPPING scenario: ${scenarioName} – ${configWithMeta.skipReason || 'no reason provided'}`); + + return; + } + + // Respect required environment variables for cloud-hosted scenarios + const {requiredEnv} = configWithMeta; + const hasRequiredEnv = requiredEnv && Array.isArray(requiredEnv); + const missing = hasRequiredEnv ? requiredEnv.filter((envVar) => !process.env[envVar]) : []; + if (missing.length > 0) { + console.log(`SKIPPING scenario: ${scenarioName} – missing env vars: ${missing.join(', ')}`); + + return; + } const projectName = createProjectName(config); + const projectPath = join(process.cwd(), projectName); + + if (ensureProjectDir) { + ensureProjectDir(projectPath); + } else { + ensureDirectory(projectPath); + } + + const scaffold = await scaffoldProject({ + ...buildScaffoldOptions(config), + projectName + }); + + assertStepSuccess(scaffold, 'Scaffold'); + logStepWarnings(scaffold); + + try { + const { projectPath: scaffoldPath } = scaffold; + const dependencies = await installDependencies({ + fingerprint: createFingerprint(config), + projectPath: scaffoldPath + }); + + assertStepSuccess(dependencies, 'Dependency installation'); + logStepWarnings(dependencies); + + await beforeValidate?.({ config, projectPath: scaffoldPath }); + const { errors, passed, warnings } = await validate({ + config, + projectPath: scaffoldPath + }); + + ensureValidationPassed(passed, errors); + warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); + await afterValidate?.({ config, projectPath: scaffoldPath }); + } finally { + cleanupProject(projectName); + console.log(`=== Finished: ${scenarioName} ===\n`); + } + }; + + describe(describeBlock, () => { + for (const config of allEntries) { + test(describeConfig(config), () => runScenario(config), { timeout: timeoutMs }); + } + }); +}; + +export const runMatrixSuite = runFrameworkMatrix; + diff --git a/tests/functional/frameworks/vue.test.ts b/tests/functional/frameworks/vue.test.ts new file mode 100644 index 0000000..b5c7685 --- /dev/null +++ b/tests/functional/frameworks/vue.test.ts @@ -0,0 +1,73 @@ +import type { MatrixConfig } from '../../../scripts/functional-tests/matrix'; +import { validateVueFramework } from '../../../scripts/functional-tests/vue-validator'; +import { runFrameworkMatrix } from './test-utils'; + +type VueMatrixEntry = MatrixConfig & { + directoryConfig: 'default'; + frontend: 'vue'; +}; + +const createProjectName = (config: VueMatrixEntry) => + `test-vue-${config.databaseEngine}-${config.orm}-${config.authProvider}-${config.useTailwind ? 'tw' : 'notw'}` + .replace(/[^a-z0-9-]/g, '-') + .toLowerCase(); + +const describeConfig = (config: VueMatrixEntry) => { + const segments = [ + 'Vue', + config.databaseEngine, + config.orm, + config.authProvider === 'none' ? 'no-auth' : config.authProvider, + config.useTailwind ? 'tailwind' : 'no-tailwind' + ]; + + if (config.databaseHost !== 'none') { + segments.splice(2, 0, config.databaseHost); + } + + if (config.codeQualityTool) { + segments.push(config.codeQualityTool); + } + + return segments.join(' + '); +}; + +const SUPPORTED_DATABASE_ENGINES = new Set(['none', 'sqlite', 'mongodb']); +const SUPPORTED_ORMS = new Set(['none', 'drizzle']); + +runFrameworkMatrix({ + createProjectName, describeBlock: 'Vue framework matrix', describeConfig, framework: 'vue', buildScaffoldOptions: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + directoryConfig: config.directoryConfig, + framework: 'vue', + orm: config.orm, + useTailwind: config.useTailwind + }), createFingerprint: (config) => ({ + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + databaseHost: config.databaseHost, + frontend: config.frontend, + orm: config.orm, + useTailwind: config.useTailwind + }), filterMatrix: (config): config is VueMatrixEntry => + config.frontend === 'vue' && + config.directoryConfig === 'default' && + SUPPORTED_DATABASE_ENGINES.has(config.databaseEngine) && + SUPPORTED_ORMS.has(config.orm), validate: async ({ config, projectPath }) => { + const { errors, passed, warnings } = await validateVueFramework(projectPath, 'bun', { + authProvider: config.authProvider, + codeQualityTool: config.codeQualityTool, + databaseEngine: config.databaseEngine, + isMultiFrontend: config.directoryConfig === 'custom', + orm: config.orm, + useTailwind: config.useTailwind + }); + + return { errors, passed, warnings }; + } +}); + diff --git a/tests/functional/support/assertions.ts b/tests/functional/support/assertions.ts new file mode 100644 index 0000000..a3795d9 --- /dev/null +++ b/tests/functional/support/assertions.ts @@ -0,0 +1,20 @@ +import { type StepResult } from './types'; + +export const assertStepSuccess = (result: StepResult, context: string) => { + if (result.success) { + return; + } + + const message = [ + `${context} failed`, + ...result.errors.map((error) => `- ${error}`), + ...result.warnings.map((warning) => `⚠ ${warning}`) + ].join('\n'); + + throw new Error(message); +}; + +export const logWarnings = (result: StepResult) => { + result.warnings.forEach((warning) => console.warn(` ⚠ ${warning}`)); +}; + diff --git a/tests/functional/support/docker.ts b/tests/functional/support/docker.ts new file mode 100644 index 0000000..1a0a126 --- /dev/null +++ b/tests/functional/support/docker.ts @@ -0,0 +1,101 @@ +import { spawnSync } from 'node:child_process'; +import process from 'node:process'; + +import { MILLISECONDS_PER_SECOND, minutesToMilliseconds } from './timing'; +import { createFailure, createSuccess } from './types'; + +const DEFAULT_TIMEOUT_MS = minutesToMilliseconds(1); + +export type DockerStatus = + | { + readonly available: true; + } + | { + readonly available: false; + readonly message: string; + }; + +let cachedBunModule: typeof import('bun') | null = null; + +const loadBun = async () => { + if (!cachedBunModule) { + cachedBunModule = await import('bun'); + } + + return cachedBunModule; +}; + +const runBunScript = async (projectPath: string, scriptArgs: string[], label: string, timeoutMs = DEFAULT_TIMEOUT_MS) => { + process.stdout.write(` → ${label}... `); + + const start = Date.now(); + const bun = await loadBun(); + + const subprocess = bun.spawn({ + cmd: ['bun', ...scriptArgs], + cwd: projectPath, + env: process.env, + stderr: 'inherit', + stdout: 'inherit' + }); + + let timedOut = false; + const timeoutId = setTimeout(() => { + timedOut = true; + try { + subprocess.kill(); + } catch { + // Ignore kill errors. + } + }, timeoutMs); + + const exitCode = await subprocess.exited.then(() => subprocess.exitCode ?? 0).catch(() => null); + clearTimeout(timeoutId); + const elapsedMs = Date.now() - start; + + if (timedOut) { + console.log(`✗ (TIMEOUT after ${(elapsedMs / MILLISECONDS_PER_SECOND).toFixed(1)}s)`); + + return createFailure([`${label} timed out after ${timeoutMs}ms`], elapsedMs); + } + + if (exitCode === 0) { + console.log(`✓ (${elapsedMs}ms)`); + + return createSuccess(elapsedMs); + } + + console.log(`✗ (${elapsedMs}ms)`); + + return createFailure([`${label} failed with exit code ${exitCode ?? 'unknown'}`], elapsedMs); +}; + +export const dockerUp = (projectPath: string, timeoutMs?: number) => + runBunScript(projectPath, ['db:up'], 'Starting database (docker)', timeoutMs); + +export const dockerDown = (projectPath: string, timeoutMs?: number) => + runBunScript(projectPath, ['db:down'], 'Stopping database (docker)', timeoutMs); + +export const ensureDockerAvailable = () => { + try { + const result = spawnSync('docker', ['info'], { stdio: 'pipe' }); + + if (result.error) { + return { available: false, message: result.error.message } satisfies DockerStatus; + } + + if (typeof result.status !== 'number' || result.status === 0) { + return { available: true } satisfies DockerStatus; + } + + const stderr = result.stderr?.toString('utf-8')?.trim(); + const message = stderr?.length ? stderr : `docker info exited with code ${result.status}`; + + return { available: false, message } satisfies DockerStatus; + } catch (unknownError) { + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + + return { available: false, message: error.message } satisfies DockerStatus; + } +}; + diff --git a/tests/functional/support/filesystem.ts b/tests/functional/support/filesystem.ts new file mode 100644 index 0000000..ee68f19 --- /dev/null +++ b/tests/functional/support/filesystem.ts @@ -0,0 +1,15 @@ +import { existsSync, rmSync } from 'node:fs'; + +export const removeDirectoryIfExists = (path: string) => { + if (!existsSync(path)) { + return; + } + + try { + rmSync(path, { force: true, recursive: true }); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + console.warn(`Warning: Failed to remove directory "${path}": ${message}`); + } +}; + diff --git a/tests/functional/support/http.ts b/tests/functional/support/http.ts new file mode 100644 index 0000000..0ad201f --- /dev/null +++ b/tests/functional/support/http.ts @@ -0,0 +1,20 @@ +export const fetchJson = async (url: string, init?: RequestInit) => { + const response = await fetch(url, init); + + if (!response.ok) { + throw new Error(`Request failed (${response.status} ${response.statusText})`); + } + + return (await response.json()) as T; +}; + +export const expectStatus = async (url: string, status: number, init?: RequestInit) => { + const response = await fetch(url, init); + + if (response.status !== status) { + throw new Error(`Expected ${status} from ${url}, received ${response.status}`); + } + + return response; +}; + diff --git a/tests/functional/support/index.ts b/tests/functional/support/index.ts new file mode 100644 index 0000000..a9382d8 --- /dev/null +++ b/tests/functional/support/index.ts @@ -0,0 +1,9 @@ +export * from './assertions'; +export * from './docker'; +export * from './filesystem'; +export * from './http'; +export * from './install'; +export * from './scaffold'; +export * from './timing'; +export * from './types'; + diff --git a/tests/functional/support/install.ts b/tests/functional/support/install.ts new file mode 100644 index 0000000..1fe0f97 --- /dev/null +++ b/tests/functional/support/install.ts @@ -0,0 +1,71 @@ +import { join } from 'node:path'; + +import { + computeManifestHash, + getOrInstallDependencies, + hasCachedDependencies, + type DependencyFingerprint +} from '../../../scripts/functional-tests/dependency-cache'; +import { createFailure, createSuccess, type StepResult } from './types'; + +export type InstallOptions = { + readonly projectPath: string; + readonly fingerprint: DependencyFingerprint; + readonly manifestHashOverride?: string; + readonly env?: Record; +}; + +export type InstallResult = StepResult & { + readonly cached: boolean; +}; + +const logCached = (elapsedMs: number) => console.log(`✓ (cached, ${elapsedMs}ms)`); +const logInstalled = (elapsedMs: number) => console.log(`✓ (${elapsedMs}ms)`); + +export const installDependencies = async (options: InstallOptions) => { + process.stdout.write(' → Installing dependencies... '); + + const packageJsonPath = join(options.projectPath, 'package.json'); + + const start = Date.now(); + + try { + const { cached, installTime } = await getOrInstallDependencies( + options.projectPath, + options.fingerprint, + packageJsonPath, + options.manifestHashOverride, + options.env + ); + + const elapsedMs = Date.now() - start; + const outputTime = cached ? installTime : elapsedMs; + + const log = cached ? logCached : logInstalled; + log(outputTime); + + return { + ...createSuccess(elapsedMs), + cached + } satisfies InstallResult; + } catch (unknownError) { + const elapsedMs = Date.now() - start; + const error = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + + console.log(`✗ (${elapsedMs}ms)`); + + return { + ...createFailure([error.message], elapsedMs), + cached: false + } satisfies InstallResult; + } +}; + +export const hasDependencyCache = ( + fingerprint: DependencyFingerprint, + projectPath: string, + manifestHashOverride?: string +) => hasCachedDependencies(fingerprint, join(projectPath, 'package.json'), manifestHashOverride); + +export const getManifestHash = (projectPath: string) => computeManifestHash(join(projectPath, 'package.json')); + diff --git a/tests/functional/support/scaffold.ts b/tests/functional/support/scaffold.ts new file mode 100644 index 0000000..8cc5294 --- /dev/null +++ b/tests/functional/support/scaffold.ts @@ -0,0 +1,216 @@ +import { join } from 'node:path'; +import process from 'node:process'; + +import { removeDirectoryIfExists } from './filesystem'; +import { minutesToMilliseconds, MILLISECONDS_PER_SECOND } from './timing'; +import { createFailure, createSuccess, type StepResult } from './types'; + +const DEFAULT_TIMEOUT_MS = minutesToMilliseconds(2); + +const FRAMEWORK_FLAGS: Record = { + html: '--html', + htmx: '--htmx', + react: '--react', + svelte: '--svelte', + vue: '--vue' +}; + +export type ScaffoldOptions = { + readonly projectName: string; + readonly framework?: string; + readonly databaseEngine?: string; + readonly databaseHost?: string; + readonly orm?: string; + readonly authProvider?: string; + readonly codeQualityTool?: string; + readonly directoryConfig?: string; + readonly useTailwind?: boolean; + readonly skipPrompts?: boolean; + readonly extraArgs?: readonly string[]; + readonly cwd?: string; + readonly env?: Record; + readonly timeoutMs?: number; +}; + +export type ScaffoldResult = StepResult & { + readonly projectName: string; + readonly projectPath: string; + readonly exitCode?: number; + readonly timedOut?: boolean; +}; + +const buildCommand = (options: ScaffoldOptions) => { + const args: string[] = ['bun', 'run', 'src/index.ts', options.projectName]; + + if (options.skipPrompts !== false) { + args.push('--skip'); + } + + if (options.framework) { + const flag = FRAMEWORK_FLAGS[options.framework] ?? `--${options.framework}`; + args.push(flag); + } + + if (options.databaseEngine && options.databaseEngine !== 'none') { + args.push('--db', options.databaseEngine); + } + + if (options.orm && options.orm !== 'none') { + args.push('--orm', options.orm); + } + + if (options.databaseHost && options.databaseHost !== 'none') { + args.push('--db-host', options.databaseHost); + } + + if (options.authProvider && options.authProvider !== 'none') { + args.push('--auth', options.authProvider); + } + + if (options.codeQualityTool && options.codeQualityTool !== 'none') { + args.push(`--${options.codeQualityTool}`); + } + + if (options.useTailwind) { + args.push('--tailwind'); + } + + if (options.directoryConfig === 'custom') { + args.push('--directory', 'custom'); + } + + if (options.extraArgs) { + args.push(...options.extraArgs); + } + + return args; +}; + +let cachedBunModule: typeof import('bun') | null = null; + +const loadBun = async () => { + if (!cachedBunModule) { + cachedBunModule = await import('bun'); + } + + return cachedBunModule; +}; + +export const scaffoldProject = async (options: ScaffoldOptions): Promise => { + const cwd = options.cwd ?? process.cwd(); + const projectPath = join(cwd, options.projectName); + const command = buildCommand(options); + const timeoutMs = options.timeoutMs ?? DEFAULT_TIMEOUT_MS; + + removeDirectoryIfExists(projectPath); + + process.stdout.write(' → Scaffolding project... '); + + const start = Date.now(); + + const bun = await loadBun(); + + const subprocess = bun.spawn({ + cmd: command, + cwd, + env: { + ...process.env, + ...(options.env ?? {}) + }, + stderr: 'pipe', + stdin: 'inherit', + stdout: 'pipe' + }); + + const collectStream = async (stream?: ReadableStream) => { + if (!stream) { + return ''; + } + + const decoder = new TextDecoder(); + const chunks = await bun.readableStreamToArray(stream); + + return chunks.map((chunk) => decoder.decode(chunk)).join(''); + }; + + const stdoutPromise = collectStream(subprocess.stdout); + const stderrPromise = collectStream(subprocess.stderr); + + let timedOut = false; + + const timeoutId = setTimeout(() => { + timedOut = true; + try { + subprocess.kill(); + } catch { + // Ignore kill errors. + } + }, timeoutMs); + + const exitCode = await subprocess.exited.then(() => subprocess.exitCode ?? 0).catch(() => null); + const [capturedStdout, capturedStderr] = await Promise.all([stdoutPromise, stderrPromise]); + + clearTimeout(timeoutId); + + const elapsedMs = Date.now() - start; + + const debugOutput = + process.env.ABSOLUTE_TEST_VERBOSE === '1' || process.env.ABSOLUTE_TEST_DEBUG === '1'; + + const printCapturedOutput = (label: string, output: string) => { + const trimmed = output.trim(); + + if (trimmed.length === 0) { + return; + } + + console.log(`\n${label}:\n${trimmed}\n`); + }; + + if (timedOut) { + const elapsedSeconds = (elapsedMs / MILLISECONDS_PER_SECOND).toFixed(1); + console.log(`✗ (TIMEOUT after ${elapsedSeconds}s)`); + printCapturedOutput('Scaffold stdout', capturedStdout); + printCapturedOutput('Scaffold stderr', capturedStderr); + + return { + ...createFailure([`Scaffold timed out after ${elapsedSeconds}s`], elapsedMs), + exitCode: null, + projectName: options.projectName, + projectPath, + timedOut: true + }; + } + + if (exitCode !== 0) { + console.log(`✗ (${elapsedMs}ms)`); + printCapturedOutput('Scaffold stdout', capturedStdout); + printCapturedOutput('Scaffold stderr', capturedStderr); + + return { + ...createFailure([`Scaffold failed with exit code ${exitCode ?? 'unknown'}`], elapsedMs), + exitCode: exitCode ?? undefined, + projectName: options.projectName, + projectPath + }; + } + + if (debugOutput) { + printCapturedOutput('Scaffold stdout', capturedStdout); + printCapturedOutput('Scaffold stderr', capturedStderr); + } + + console.log(`✓ (${elapsedMs}ms)`); + + return { + ...createSuccess(elapsedMs), + exitCode: 0, + projectName: options.projectName, + projectPath + }; +}; + +export const cleanupProject = (projectName: string, cwd = process.cwd()) => { + removeDirectoryIfExists(join(cwd, projectName)); +}; + diff --git a/tests/functional/support/timing.ts b/tests/functional/support/timing.ts new file mode 100644 index 0000000..273e55c --- /dev/null +++ b/tests/functional/support/timing.ts @@ -0,0 +1,23 @@ +export const MILLISECONDS_PER_SECOND = 1_000; +export const SECONDS_PER_MINUTE = 60; + +export const formatDuration = (elapsedMs: number) => `${elapsedMs}ms`; + +export const formatSeconds = (elapsedMs: number) => { + const seconds = (elapsedMs / MILLISECONDS_PER_SECOND).toFixed(1); + + return `${seconds}s`; +}; + +export const withStepTimer = async (task: () => Promise) => { + const start = Date.now(); + const value = await task(); + + return { + elapsedMs: Date.now() - start, + value + }; +}; + +export const minutesToMilliseconds = (minutes: number) => minutes * SECONDS_PER_MINUTE * MILLISECONDS_PER_SECOND; + diff --git a/tests/functional/support/types.ts b/tests/functional/support/types.ts new file mode 100644 index 0000000..25c5307 --- /dev/null +++ b/tests/functional/support/types.ts @@ -0,0 +1,24 @@ +export type StepResult = { + readonly success: boolean; + readonly elapsedMs: number; + readonly errors: readonly string[]; + readonly warnings: readonly string[]; +}; + +export const createFailure = (errors: string[], elapsedMs: number, warnings: string[] = []): StepResult => ({ + elapsedMs, + errors, + success: false, + warnings +}); + +export const createSuccess = ( + elapsedMs: number, + warnings: string[] = [] +): StepResult => ({ + elapsedMs, + errors: [], + success: true, + warnings +}); + diff --git a/tests/harness/cli.ts b/tests/harness/cli.ts new file mode 100644 index 0000000..6db7e91 --- /dev/null +++ b/tests/harness/cli.ts @@ -0,0 +1,60 @@ +import { ScaffoldOptions } from './types'; + +const FRONTEND_FLAGS: Record = { + html: '--html', + htmx: '--htmx', + react: '--react', + svelte: '--svelte', + vue: '--vue' +}; + +export const buildScaffoldArguments = ( + projectName: string, + options: ScaffoldOptions +) => { + const args: string[] = [projectName, '--skip']; + + const frontendFlag = + options.frontend && options.frontend !== 'none' + ? FRONTEND_FLAGS[options.frontend] + : undefined; + + if (frontendFlag) { + args.push(frontendFlag); + } + + if (options.useTailwind) { + args.push('--tailwind'); + } + + if (options.codeQuality === 'eslint+prettier') { + args.push('--eslint+prettier'); + } + + if (options.database && options.database !== 'none') { + args.push('--db', options.database); + } + + if (options.databaseHost && options.databaseHost !== 'none') { + args.push('--db-host', options.databaseHost); + } + + if (options.orm && options.orm !== 'none') { + args.push('--orm', options.orm); + } + + if (options.auth && options.auth !== 'none') { + args.push('--auth', options.auth); + } + + if (options.directory === 'custom') { + args.push('--directory', 'custom'); + } + + if (options.packageManager && options.packageManager !== 'bun') { + args.push('--package-manager', options.packageManager); + } + + return args; +}; + diff --git a/tests/harness/harness.test.ts b/tests/harness/harness.test.ts new file mode 100644 index 0000000..6f434cd --- /dev/null +++ b/tests/harness/harness.test.ts @@ -0,0 +1,25 @@ +import { describe, expect, it } from 'bun:test'; + +import { + buildScaffoldArguments, + cleanupProject, + installDependencies, + runCommand, + scaffoldProject, + startServer +} from './index'; + +describe('behavioural test harness', () => { + it('exposes scaffold helpers', () => { + expect(scaffoldProject).toBeInstanceOf(Function); + expect(installDependencies).toBeInstanceOf(Function); + expect(cleanupProject).toBeInstanceOf(Function); + }); + + it('exposes runtime helpers', () => { + expect(startServer).toBeInstanceOf(Function); + expect(runCommand).toBeInstanceOf(Function); + expect(buildScaffoldArguments('example', {})).toBeInstanceOf(Array); + }); +}); + diff --git a/tests/harness/http.ts b/tests/harness/http.ts new file mode 100644 index 0000000..7ccd330 --- /dev/null +++ b/tests/harness/http.ts @@ -0,0 +1,33 @@ +import { setTimeout as delay } from 'node:timers/promises'; + +const DEFAULT_TIMEOUT_MS = 20_000; +const DEFAULT_INTERVAL_MS = 250; + +export const waitForHttpOk = async ( + url: string, + timeoutMs = DEFAULT_TIMEOUT_MS, + intervalMs = DEFAULT_INTERVAL_MS +) => { + const start = Date.now(); + + const poll = async () => { + if (Date.now() - start >= timeoutMs) { + throw new Error(`Timed out waiting for HTTP 200 from ${url}`); + } + + try { + const response = await fetch(url, { method: 'GET' }); + if (response.ok) { + return; + } + } catch { + // Ignore errors while waiting for the server to boot. + } + + await delay(intervalMs); + await poll(); + }; + + await poll(); +}; + diff --git a/tests/harness/index.ts b/tests/harness/index.ts new file mode 100644 index 0000000..ab36d79 --- /dev/null +++ b/tests/harness/index.ts @@ -0,0 +1,7 @@ +export * from './cli'; +export * from './http'; +export * from './process'; +export * from './project'; +export * from './server'; +export * from './types'; + diff --git a/tests/harness/process.ts b/tests/harness/process.ts new file mode 100644 index 0000000..7bb8371 --- /dev/null +++ b/tests/harness/process.ts @@ -0,0 +1,67 @@ +import { spawn } from 'node:child_process'; +import { once } from 'node:events'; +import process from 'node:process'; + +import type { RunCommandOptions, RunCommandResult } from './types'; + +const MILLISECONDS_PER_SECOND = 1_000; +const SECONDS_PER_MINUTE = 60; +const DEFAULT_TIMEOUT_MINUTES = 10; +const DEFAULT_TIMEOUT_MS = + DEFAULT_TIMEOUT_MINUTES * SECONDS_PER_MINUTE * MILLISECONDS_PER_SECOND; +const SIGKILL_DELAY_MS = 1_000; + +export const runCommand = async ( + command: string[], + options: RunCommandOptions = {} +): Promise => { + const [executable, ...args] = command; + const { cwd, env, timeoutMs = DEFAULT_TIMEOUT_MS, label } = options; + + const child = spawn(executable, args, { + cwd, + env: { ...process.env, ...env }, + stdio: ['ignore', 'pipe', 'pipe'] + }); + + const stdoutChunks: string[] = []; + const stderrChunks: string[] = []; + let timedOut = false; + + child.stdout?.on('data', (chunk) => { + stdoutChunks.push(chunk.toString()); + }); + + child.stderr?.on('data', (chunk) => { + stderrChunks.push(chunk.toString()); + }); + + const timeoutHandle = setTimeout(() => { + timedOut = true; + child.kill('SIGTERM'); + setTimeout(() => child.kill('SIGKILL'), SIGKILL_DELAY_MS); + }, timeoutMs); + + const [exitCode] = (await once(child, 'close')) as [number | null]; + clearTimeout(timeoutHandle); + + const stdout = stdoutChunks.join('').trimEnd(); + const stderr = stderrChunks.join('').trimEnd(); + + if (timedOut) { + return { + exitCode: exitCode ?? -1, + stderr: stderr.length > 0 ? stderr : `${label ?? 'command'} timed out after ${timeoutMs}ms`, + stdout, + timedOut: true + }; + } + + return { + exitCode: exitCode ?? -1, + stderr, + stdout, + timedOut: false + }; +}; + diff --git a/tests/harness/project.ts b/tests/harness/project.ts new file mode 100644 index 0000000..bd6eba0 --- /dev/null +++ b/tests/harness/project.ts @@ -0,0 +1,144 @@ +import { randomUUID } from 'node:crypto'; +import { existsSync, mkdirSync, rmSync } from 'node:fs'; +import { join } from 'node:path'; +import process from 'node:process'; + +import { + computeManifestHash, + getOrInstallDependencies, + hasCachedDependencies, + type DependencyFingerprint +} from '../../scripts/functional-tests/dependency-cache'; +import { buildScaffoldArguments } from './cli'; +import { runCommand } from './process'; +import type { ScaffoldOptions, ScaffoldResult } from './types'; + +const PROJECT_PREFIX = 'behavioural'; + +const resolveProjectName = (explicitName?: string) => { + if (explicitName) { + return explicitName; + } + + return `${PROJECT_PREFIX}-${randomUUID()}`; +}; + +export const scaffoldProject = async ( + options: ScaffoldOptions +): Promise => { + const projectName = resolveProjectName(options.projectName); + const projectPath = join(process.cwd(), projectName); + + if (existsSync(projectPath)) { + rmSync(projectPath, { force: true, recursive: true }); + } + + const args = buildScaffoldArguments(projectName, options); + const env: Record = {}; + if (options.env) { + Object.assign(env, options.env); + } + env.ABSOLUTE_TEST = 'behavioural'; + + const result = await runCommand(['bun', 'run', 'src/index.ts', ...args], { + env, + label: 'scaffold project' + }).catch((error) => { + cleanupProject(projectPath); + throw error; + }); + + if (result.exitCode !== 0) { + throw new Error( + `Failed to scaffold project (${projectName}).\nstdout:\n${result.stdout}\nstderr:\n${result.stderr}` + ); + } + + return { projectName, projectPath }; +}; + +const TMP_DIRECTORY_NAME = '.absolute-tmp'; + +export const installDependencies = async (projectPath: string, options: ScaffoldOptions) => { + const tempDirectory = join(projectPath, TMP_DIRECTORY_NAME); + if (!existsSync(tempDirectory)) { + mkdirSync(tempDirectory, { recursive: true }); + } + + const fingerprint: DependencyFingerprint = { + authProvider: options.auth ?? 'none', + codeQualityTool: options.codeQuality ?? 'none', + databaseEngine: options.database ?? 'none', + databaseHost: options.databaseHost ?? 'none', + frontend: options.frontend ?? 'none', + orm: options.orm ?? 'none', + useTailwind: options.useTailwind ?? false + }; + + const packageJsonPath = join(projectPath, 'package.json'); + const manifestHash = computeManifestHash(packageJsonPath); + + const allowFreshInstall = + process.env.ABSOLUTE_BEHAVIOURAL_ALLOW_INSTALL !== 'false'; + const hasCache = hasCachedDependencies( + fingerprint, + packageJsonPath, + manifestHash + ); + + if (!hasCache && !allowFreshInstall) { + throw new Error( + 'Missing dependency cache for behavioural tests. Populate .test-dependency-cache or allow fresh installs by omitting ABSOLUTE_BEHAVIOURAL_ALLOW_INSTALL=false.' + ); + } + + if (!hasCache && allowFreshInstall) { + console.warn( + '⚠ Behavioural dependency cache not found; performing a fresh install (set ABSOLUTE_BEHAVIOURAL_ALLOW_INSTALL=false to require the cache).' + ); + } + + const previousTempEnv: Record = { + BUN_INSTALL_CACHE_DIR: process.env.BUN_INSTALL_CACHE_DIR, + BUN_INSTALL_TMPDIR: process.env.BUN_INSTALL_TMPDIR, + TEMP: process.env.TEMP, + TMP: process.env.TMP, + TMPDIR: process.env.TMPDIR + }; + + process.env.BUN_INSTALL_CACHE_DIR = tempDirectory; + process.env.BUN_INSTALL_TMPDIR = tempDirectory; + process.env.TEMP = tempDirectory; + process.env.TMP = tempDirectory; + process.env.TMPDIR = tempDirectory; + + try { + const scenarioEnv = options.env ?? undefined; + await getOrInstallDependencies( + projectPath, + fingerprint, + packageJsonPath, + manifestHash, + scenarioEnv + ); + } catch (error) { + throw new Error( + `Dependency installation failed for ${projectPath}: ${ + (error as Error).message + }` + ); + } finally { + process.env.BUN_INSTALL_CACHE_DIR = previousTempEnv.BUN_INSTALL_CACHE_DIR; + process.env.BUN_INSTALL_TMPDIR = previousTempEnv.BUN_INSTALL_TMPDIR; + process.env.TEMP = previousTempEnv.TEMP; + process.env.TMP = previousTempEnv.TMP; + process.env.TMPDIR = previousTempEnv.TMPDIR; + } +}; + +export const cleanupProject = (projectPath: string) => { + if (existsSync(projectPath)) { + rmSync(projectPath, { force: true, recursive: true }); + } +}; + diff --git a/tests/harness/server.ts b/tests/harness/server.ts new file mode 100644 index 0000000..99b7769 --- /dev/null +++ b/tests/harness/server.ts @@ -0,0 +1,157 @@ +import { spawn } from 'node:child_process'; +import { once } from 'node:events'; +import process from 'node:process'; +import { setTimeout as delay } from 'node:timers/promises'; + +import { waitForHttpOk } from './http'; +import type { RunningServer, StartServerOptions } from './types'; + +const DEFAULT_COMMAND = ['bun', 'run', 'dev']; +const DEFAULT_READY_URL = 'http://localhost:3000/'; +const DEFAULT_READY_TIMEOUT_MS = 20_000; +const STOP_TIMEOUT_MS = 1_000; + +export const startServer = async ( + projectPath: string, + options: StartServerOptions = {} +): Promise => { + const command = options.command ?? DEFAULT_COMMAND; + const readyUrl = options.readyUrl ?? DEFAULT_READY_URL; + const readyTimeoutMs = options.readyTimeoutMs ?? DEFAULT_READY_TIMEOUT_MS; + + const env: Record = {}; + if (options.env) { + Object.assign(env, options.env); + } + env.ABSOLUTE_TEST = 'behavioural'; + + const childEnv = { ...process.env, ...env } as Record; + + if (process.env.ABSOLUTE_TEST_VERBOSE === '1' && childEnv.DATABASE_URL) { + console.log(`startServer env: DATABASE_URL=${childEnv.DATABASE_URL}`); + } + + const captureOutput = process.env.ABSOLUTE_TEST_VERBOSE === '1'; + const stdoutBuffer: string[] = []; + const stderrBuffer: string[] = []; + const OUTPUT_TAIL_LINES = 20; + + const child = spawn(command[0], command.slice(1), { + cwd: projectPath, + env: childEnv, + stdio: [ + 'ignore', + options.forwardStdout ? 'inherit' : 'pipe', + options.forwardStderr ? 'inherit' : 'pipe' + ] + }); + + let exited = false; + let exitCode: number | null = null; + + const exitWatcher = once(child, 'exit').then(([code, signal]) => { + exited = true; + exitCode = code ?? 0; + + if (!options.forwardStdout && child.stdout) { + child.stdout.removeAllListeners('data'); + } + if (!options.forwardStderr && child.stderr) { + child.stderr.removeAllListeners('data'); + } + + const terminatedBySignal = + typeof signal === 'string' && signal.length > 0; + + if (!terminatedBySignal && exitCode !== 0) { + console.warn(`Server process exited prematurely with code ${code}`); + } + + return exitCode ?? 0; + }); + + if (!options.forwardStdout && child.stdout) { + child.stdout.on('data', (chunk: Buffer) => { + const text = chunk.toString(); + stdoutBuffer.push(text); + if (captureOutput) { + process.stdout.write(text); + } + }); + } + + if (!options.forwardStderr && child.stderr) { + child.stderr.on('data', (chunk: Buffer) => { + const text = chunk.toString(); + stderrBuffer.push(text); + if (captureOutput) { + process.stderr.write(text); + } + }); + } + + const buildDiagnosticMessage = (base: string, cause?: unknown) => { + const tail = (lines: string[], maxLines: number) => + lines.join('').split('\n').filter(Boolean).slice(-maxLines).join('\n'); + + const stdoutTail = tail(stdoutBuffer, OUTPUT_TAIL_LINES); + const stderrTail = tail(stderrBuffer, OUTPUT_TAIL_LINES); + + const details: string[] = [base]; + + if (stdoutTail.length > 0) { + details.push(`stdout:\n${stdoutTail}`); + } + + if (stderrTail.length > 0) { + details.push(`stderr:\n${stderrTail}`); + } + + if (cause) { + const message = cause instanceof Error ? cause.message : String(cause); + details.push(`cause: ${message}`); + } + + return details.join('\n\n'); + }; + + try { + await waitForHttpOk(readyUrl, readyTimeoutMs); + } catch (error) { + child.kill('SIGTERM'); + await exitWatcher; + if (exitCode && exitCode !== 0) { + throw new Error( + buildDiagnosticMessage( + `Server process exited with code ${exitCode} while waiting for readiness (${readyUrl}).`, + error + ) + ); + } + + throw new Error( + buildDiagnosticMessage( + `Server did not become ready within ${readyTimeoutMs}ms (${readyUrl}).`, + error + ) + ); + } + + return { + url: readyUrl, + stop: async () => { + if (exited) { + return; + } + + child.kill('SIGTERM'); + await Promise.race([exitWatcher, delay(STOP_TIMEOUT_MS)]); + + if (!exited) { + child.kill('SIGKILL'); + await exitWatcher; + } + } + }; +}; + diff --git a/tests/harness/types.ts b/tests/harness/types.ts new file mode 100644 index 0000000..10dc1b6 --- /dev/null +++ b/tests/harness/types.ts @@ -0,0 +1,67 @@ +export type Frontend = + | 'none' + | 'react' + | 'vue' + | 'svelte' + | 'html' + | 'htmx'; + +export type DatabaseEngine = 'none' | 'sqlite' | 'postgresql' | 'mysql' | 'mongodb'; + +export type DatabaseHost = 'none' | 'local' | 'turso' | 'neon' | 'planetscale'; + +export type AuthProvider = 'none' | 'absoluteAuth'; + +export type Orm = 'none' | 'drizzle'; + +export type CodeQualityTool = 'none' | 'eslint+prettier'; + +export type DirectoryConfiguration = 'default' | 'custom'; + +export interface ScaffoldOptions { + projectName?: string; + frontend?: Frontend; + database?: DatabaseEngine; + databaseHost?: DatabaseHost; + auth?: AuthProvider; + orm?: Orm; + useTailwind?: boolean; + codeQuality?: CodeQualityTool; + directory?: DirectoryConfiguration; + packageManager?: 'bun' | 'npm' | 'pnpm' | 'yarn'; + env?: Record; +} + +export interface ScaffoldResult { + projectName: string; + projectPath: string; +} + +export interface RunCommandOptions { + cwd?: string; + env?: Record; + timeoutMs?: number; + label?: string; +} + +export interface RunCommandResult { + exitCode: number; + stdout: string; + stderr: string; + timedOut: boolean; +} + +export interface StartServerOptions { + env?: Record; + readyUrl?: string; + readyTimeoutMs?: number; + forwardStdout?: boolean; + forwardStderr?: boolean; + command?: string[]; +} + +export interface RunningServer { + stop: () => Promise; + url: string; +} + diff --git a/tsconfig.json b/tsconfig.json index f876c6c..923d26e 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -31,5 +31,5 @@ "absolutejs-project", "src/templates/htmx/htmx.*.min.js" ], - "include": ["src/**/*"] + "include": ["src/**/*", "scripts/**/*.ts", "tests/**/*.ts"] }