diff --git a/.githooks/pre-commit b/.githooks/pre-commit new file mode 100755 index 00000000..44817a43 --- /dev/null +++ b/.githooks/pre-commit @@ -0,0 +1,9 @@ +#!/usr/bin/env sh +set -e + +if [ "$SKIP_TEST_COMPLIANCE" = "1" ]; then + echo "Skipping unit-test compliance (SKIP_TEST_COMPLIANCE=1)." + exit 0 +fi + +node ./scripts/precommit-compliance.mjs diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 40ddf8b3..6e5b2cd7 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -7,8 +7,35 @@ on: permissions: contents: write jobs: - build-and-deploy: + build: runs-on: ubuntu-latest + steps: + - name: Checkout ๐Ÿ›Ž๏ธ + uses: actions/checkout@v3 + + - name: Setup Node โœจ + uses: actions/setup-node@v3 + with: + node-version: 20.x + + - name: Install Dependencies ๐Ÿ”ง + run: + yarn install + + - name: Build ๐Ÿ”ง + run: + yarn run build + + - name: Post Build Actions ๐Ÿ› ๏ธ + run: + yarn run github-post-build + env: + NODE_ENV: production + + deploy: + runs-on: ubuntu-latest + needs: build + if: github.event_name == 'push' && github.ref == 'refs/heads/main' environment: github-pages steps: - name: Checkout ๐Ÿ›Ž๏ธ diff --git a/.github/workflows/unit-test-compliance.yml b/.github/workflows/unit-test-compliance.yml new file mode 100644 index 00000000..4a786928 --- /dev/null +++ b/.github/workflows/unit-test-compliance.yml @@ -0,0 +1,26 @@ +name: Unit Test Compliance + +on: + push: + branches: + - main + - "codex/**" + pull_request: {} + +jobs: + unit-test-compliance: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Node + uses: actions/setup-node@v4 + with: + node-version: 20.x + + - name: Install dependencies + run: yarn install --frozen-lockfile + + - name: Run unit + component compliance + run: yarn test:compliance:quiet diff --git a/.gitignore b/.gitignore index 47395e55..6b8b0cf7 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,8 @@ node_modules .data/* docs/ yarn.lock +dist/ +coverage/ # local env files .env.local diff --git a/CITATION.bib b/CITATION.bib new file mode 100644 index 00000000..e44e4fba --- /dev/null +++ b/CITATION.bib @@ -0,0 +1,9 @@ +@misc{solidcockpit_2026, + author = {Crum, Elias}, + title = {{Solid Cockpit}}, + year = {2026}, + version = {1.0.0}, + publisher = {GitHub}, + howpublished = {\\url{https://github.com/KNowledgeOnWebScale/solid-cockpit}}, + note = {Software. Web app: \\url{https://knowledgeonwebscale.github.io/solid-cockpit}. Accessed: 2026-03-04} +} diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 00000000..eda3b99b --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,18 @@ +cff-version: 1.2.0 +message: "If you use Solid Cockpit in academic work, please cite it using the metadata below." +title: "Solid Cockpit" +type: software +version: "1.0.0" +date-released: 2026-03-04 +license: "MIT" +authors: + - family-names: "Crum" + given-names: "Elias" +repository-code: "https://github.com/KNowledgeOnWebScale/solid-cockpit" +url: "https://knowledgeonwebscale.github.io/solid-cockpit" +keywords: + - Solid + - Linked Data + - SPARQL + - Pod + - Privacy diff --git a/README.md b/README.md index bfd33411..3f7915cb 100644 --- a/README.md +++ b/README.md @@ -1,168 +1,337 @@ ![Solid Cockpit Header Logo](./src/assets/full-sc-logo.png "SC Logo") +# Solid Cockpit + +![Version](https://img.shields.io/badge/version-1.0.0-blue) +![Web App Tag](https://img.shields.io/badge/web--app--tag-web--app--v1.0.0-0a7ea4) +![Vue](https://img.shields.io/badge/vue-3.2.13-42b883) +![Vite](https://img.shields.io/badge/vite-6.2.3-646cff) +![License](https://img.shields.io/badge/license-MIT-green) +Solid Cockpit is a web application for interacting with Solid Pods: authentication, data upload, query execution, and privacy management. ---- +This application was developed in the context of the CHIST-ERA TRIPLE project. ## Table of Contents -- [Ways to Get a Solid Pod](#ways-to-get-a-solid-pod) -- [Contribute](#contribute) -- [Development Notes](#development-notes) -- [TRIPLE Guide](#triple-guide) -- [Solid Pod VoID File Generation Guide](#solid-pod-void-file-generation-guide) -- [Contact](#contact) -# Solid Cockpit +- [Users](#users) +- [Developers](#developers) -This repository contains all code and guides for the Solid Cockpit web application. Solid Cockpit is an intuitive web application for accessing, editing, and interacting with Solid Pods. +## Users -This applicaiton was originally made for the CHIST-ERA TRIPLE project. For additional getting started guides and information about this project please see ... . +### What Solid Cockpit Does -**To use this App, you need to already have a Solid Pod!** +Main capabilities: +- Solid Pod login and registration flow +- File upload and pod resource operations +- SPARQL querying over Solid Pods and endpoints +- Query caching support +- Privacy/ACL access management +### Accessing the App -## Ways to Get a Solid Pod: +Public deployment: -- [Community Solid Server](https://communitysolidserver.github.io/CommunitySolidServer/latest/): For hosting a Solid Pod on your local machine or on a custom server. -- [Solidcommunity.net](https://solidcommunity.net/): A community-run Solid server (very easy). -- [Other Solid Pod Hosting Services](https://solidproject.org/for-developers#hosted-pod-services): A catologue of other services that offer Solid Pod hosting. +- +If you want to run locally, see [Developers](#developers). +### Ways to Get a Solid Pod -## Contribute +- [Community Solid Server](https://communitysolidserver.github.io/CommunitySolidServer/latest/): host a Pod locally or on your own server +- [Solidcommunity.net](https://solidcommunity.net/): community-hosted Solid Pods +- [Other Solid Pod Hosting Services](https://solidproject.org/for-developers#hosted-pod-services) -Code contributions are welcome! Please commit any pull requests against the `main` branch. +### Usage Guides -Issue reporting and other general feedback are welcome. Please use GitHub Issues open an issue or GitHub Discussions where applicable. +TRIPLE onboarding guide: +- [TRIPLE-guide.md](./TRIPLE-guide.md) +Solid Pod VoID file generation: -## Development Notes: +- + +Example: + +```bash +mvn package +java -jar target/void-generator-0.7-SNAPSHOT-uber.jar \ + --from-solid-pod [URL-to-solid-pod] \ + --void-file void.ttl \ + -i [URL-to-solid-pod]/void.ttl \ + --repository [URL-to-solid-pod] +``` -#### General issues: +Then upload `void.ttl` to the pod root using the app's `Data Upload` page. -- If weird things start to happen with athn issues, clear browser history and cookies and it should fix things. +### Citation ---- +If you use this tool in an academic publication, you can cite: +`Crum, E. (2026). Solid Cockpit (Version 1.0.0) [Software]. GitHub. https://github.com/KNowledgeOnWebScale/solid-cockpit` +BibTeX: -## TRIPLE Guide -๐Ÿ‘‹ Welcome to the TRIPLE Getting Started Guide! This document will help you set up a Solid Pod and introduce you to the features of the Solid Cockpit app. Follow the steps carefully to get up and running ๐Ÿš€ +```bibtex +@misc{solidcockpit_2026, + author = {Crum, Elias}, + title = {{Solid Cockpit}}, + year = {2026}, + version = {1.0.0}, + publisher = {GitHub}, + howpublished = {\url{https://github.com/KNowledgeOnWebScale/solid-cockpit}}, + note = {Software. Web app: \url{https://knowledgeonwebscale.github.io/solid-cockpit}. Accessed: 2026-03-04} +} +``` +You can also use: +- `CITATION.cff` for GitHub-style citation metadata +- `CITATION.bib` for direct BibTeX import -### ๐ŸŽฏ Objectives of This Guide +### Contact and Support -1. Setting up a Solid Pod (that is accessible via the internet) -2. Logging into your Pod at any time (after setting it up) -3. Understanding the features and functionalities of the Solid Cockpit app. -4. Overview of future capabilities, such as managing pod contents, adjusting data privacy, and querying data using SPARQL. +- Discussions: +- Issues: +## Developers +### Tech Stack -### ๐ŸŒ Setting Up a Solid Pod (Online) +- Vue 3 + Vite 6 +- TypeScript +- Vuetify 3 +- Pinia +- Inrupt Solid client/auth libraries +- Comunica SPARQL engine -To **set up** your Solid Pod using the TRIPLE platform, follow these steps: +### Prerequisites -1. **๐Ÿ”— Navigate to our Solid Pod Hosting website** - - Go to [https://triple.ilabt.imec.be/](https://triple.ilabt.imec.be/) - - Use the following credentials to enter the site: - - **Password**: `triple` - -2. **๐Ÿ” Sign Up to make a Pod** - - Click **Sign Up** (assuming this is your first time) - -3. **๐Ÿ› ๏ธ Create Your Solid Pod** - - Once you are on the **Your Account** page, click on **Create Pod**. - - Enter a name for your new pod and click **Create Pod**. - - After creating your pod, return to the home page by clicking **Back**. +- Node.js 20+ +- Yarn 1.x (repo currently uses `yarn.lock`) -### ๐Ÿ”‘ Logging into your Solid Pod (after creation) -1. **Navigate to our Solid Pod Hosting website** - - Go to [https://triple.ilabt.imec.be/](https://triple.ilabt.imec.be/) - - Use the following credentials to enter the site: - - **Password**: `triple` +### Local Setup -2. **Login using your credentials** - - Click **Login** (assuming you have already made an account) +Install dependencies: -3. **Register your Pod on your new WebID card** - - Navigate to the "Home" page of the [Solid-Cockpit Webpage](https://knowledgeonwebscale.github.io/solid-cockpit/home) - - Login to your Solid Pod using the "https://triple.ilabt.imec.be/" provider - - CLICK the "REGISTER POD" button (not necessary to enter any PodURL) - - Thats it. Now you are connected to your new Pod!! +```bash +yarn install +``` -### ๐Ÿ›ซ Using the Solid Cockpit App +Run locally: -The Solid Cockpit app provides several functionalities for managing and interacting with your Solid Pod. +```bash +yarn dev +``` -#### โœจ Available Features +Build production assets: -1. ๐Ÿ“ **Data Upload** - - Upload data to your Pod. - - When uploading RDF data, file validity can be assessed before uploading. - - Specify and modify the metadata of uploaded files. +```bash +yarn build +``` -2. ๐Ÿ—‚๏ธ **Data Browser** (under construction) - - View, modify, move, and delete the contents of your Pod. - - Explore and edit the container structure of your Pod. +Preview production build: -3. ๐Ÿ” **Data Query** - - An editor to write SPARQL queries to interact with data in your Pod, the Pods of others, and SPARQL Endpoints. - - A user input to designate the sources for the queries. - - Options that impact the the output formats, query execution, and other relevant parameters. - -4. ๐Ÿ”’ **Data Privacy Management** - - View the current privacy settings for your data (Read, Write, and Append). - - Add or change privacy settings to containers and resources in your Pod. - - Give and receive notifications related to new access rights to the data of others. +```bash +yarn serve +``` +### Scripts + +| Script | Description | +| --- | --- | +| `yarn dev` | Start Vite development server | +| `yarn build` | Build production assets into `dist/` | +| `yarn serve` | Preview the production build locally | +| `yarn test:unit` | Run unit tests (Node built-in runner + TS loader) | +| `yarn test:unit:watch` | Run unit tests in watch mode | +| `yarn test:unit:coverage` | Run unit tests with coverage report generation | +| `yarn test:unit:compliance` | Enforce unit-test + coverage thresholds | +| `yarn test:unit:compliance:quiet` | Enforce unit coverage with concise output | +| `yarn test:component` | Run Vue component tests (`.vue`) via Vitest | +| `yarn test:component:watch` | Run Vue component tests in watch mode | +| `yarn test:component:coverage` | Run Vue component tests with coverage | +| `yarn test:component:compliance` | Enforce Vue component test coverage thresholds | +| `yarn test:component:compliance:quiet` | Enforce component coverage with concise output | +| `yarn test:compliance` | Run full (unit + component) compliance checks | +| `yarn test:compliance:quiet` | Run full compliance checks with concise output | +| `yarn hooks:install` | Configure local git hooks path (`.githooks`) | +| `yarn github-post-build` | Create route-compatible `index.html` copies in `dist/` | +| `yarn deploy` | Publish `dist/` to GitHub Pages | + +### Testing and Coverage + +Unit test suite: + +- Location: `tests/unit/` +- Command: `yarn test:unit` + +Component test suite: + +- Location: `tests/components/` +- Command: `yarn test:component` +- Includes focused `ThemeSwitch` + `TheFooter` tests and full `.vue` smoke mounts in `tests/components/AllComponentsSmoke.test.ts` + +Coverage tracker: + +```bash +yarn test:unit:coverage +``` +This command: -### ๐Ÿ”ฎ In the Near Future +- runs the unit test suite with Node coverage enabled +- writes machine-readable output to `coverage/unit-coverage-summary.json` +- writes a readable summary to `coverage/unit-coverage-summary.txt` -- Improve the functionality of the Pod Browser for displaying pod contents effectively. -- Ability to query using the SIB SPARQL Editor (with auto-complete + class visualization + example queries) -- Enhance privacy management display ++ include sharedWithMe.ttl // sharedWithOthers.ttl +Compliance thresholds (gating): -> Stay tuned for future updates that will bring more features and improvements to the Solid Cockpit app, making it more powerful and easier to use. +- line coverage: `98%` +- branch coverage: `90%` +- function coverage: `100%` +Tracked files: +- `src/components/fileUploadUtils.ts` +- `src/components/mime_types.js` +- `src/components/queryPodUtils.ts` +- `src/components/z3-headers.ts` -### ๐Ÿค Support and Further Information +Advisory (non-gating) coverage is also reported for: -If you encounter issues during setup or usage, please consult our support resources or reach out to our support team for assistance. +- `src/components/login.ts` +- `src/components/getData.ts` +- `src/components/privacyEdit.ts` ---- +Override thresholds with env vars: +- `UNIT_COVERAGE_LINES` +- `UNIT_COVERAGE_BRANCHES` +- `UNIT_COVERAGE_FUNCS` -## Solid Pod VoID File Generation Guide +### Git Workflow -### Tool Download +Commit-time compliance check: -Please navigate to https://github.com/JervenBolleman/void-generator/tree/solid-pod-support for more information about local dependencies and run guides. +- Hook file: `.githooks/pre-commit` +- Command run by hook: `node ./scripts/precommit-compliance.mjs` +- Hook output is intentionally concise on pass and detailed on failure. -### Local command to execute +Install hooks locally: +```bash +yarn hooks:install ``` -mvn package -java -jar target/void-generator-0.7-SNAPSHOT-uber.jar --from-solid-pod [URL-to-solid-pod] --void-file void.ttl -i [URL-to-solid-pod]/void.ttl --repository [URL-to-solid-pod] +If automatic hook setup is blocked in your environment: + +```bash +git config --local core.hooksPath .githooks ``` -### After VoID File Generation +CI compliance check: + +- Workflow: `.github/workflows/unit-test-compliance.yml` +- Enforces unit tests, component tests, and coverage thresholds. + +### Web-App Version Tags + +Current app version: -- Upload the VoID file to your Solid Pod (to the root directory [i.e. www.yourwebid.com/pod]) via the `Data Upload` page of Solid Cockpit +- `package.json` version: `1.0.0` +- web-app release tag convention: `web-app-v` +- current computed web-app tag: `web-app-v1.0.0` +In-app visibility: ---- +- Footer displays semantic version (`vX.Y.Z`) and computed release tag (`web-app-vX.Y.Z`) +- Values are injected at build time from `package.json` via Vite defines -## Contact +Recommended release workflow: -๐Ÿ™ We hope you find this guide helpful in setting up and exploring your Solid Pod. For questions or comments: [go here](https://github.com/KNowledgeOnWebScale/solid-cockpit/discussions). To report bugs or problems: [try here](https://github.com/KNowledgeOnWebScale/solid-cockpit/issues). +1. Update version: +```bash +yarn version --new-version X.Y.Z +``` + +2. Build and validate: + +```bash +yarn test:unit +yarn build +``` + +3. Create and push release tags: +```bash +git tag vX.Y.Z +git tag web-app-vX.Y.Z +git push origin vX.Y.Z web-app-vX.Y.Z +``` -We hope you enjoy piloting your Solid Pod with Solid Cockpit! +### Deployment + +GitHub Pages deployment setup: + +- `vite.config.js` uses `/solid-cockpit/` base path for production +- `yarn github-post-build` prepares route folders in `dist/` +- `yarn deploy` publishes `dist/` via `gh-pages` + +### Dependency Versions + +Direct dependency versions currently declared in `package.json`. + +Runtime dependencies: + +| Package | Version | +| --- | --- | +| `@comunica/context-entries` | `^4.2.0` | +| `@comunica/logger-pretty` | `^4.2.0` | +| `@comunica/query-sparql` | `^4.3.0` | +| `@comunica/query-sparql-solid` | `^4.0.2` | +| `@inrupt/solid-client` | `2.1.2` | +| `@inrupt/solid-client-authn-browser` | `3.1.0` | +| `@inrupt/solid-client-authn-node` | `^3.1.0` | +| `@triply/yasqe` | `^4.2.28` | +| `@triply/yasr` | `^4.2.28` | +| `@vitejs/plugin-vue` | `^5.2.3` | +| `@vue/eslint-config-typescript` | `^9.1.0` | +| `actor-query-process-remote-cache` | `^0.1.0` | +| `core-js` | `^3.8.3` | +| `fs` | `^0.0.1-security` | +| `pinia` | `^2.3.1` | +| `query-sparql-remote-cache` | `^0.0.9` | +| `sparqljs` | `^3.7.3` | +| `vite` | `^6.2.3` | +| `vue` | `^3.2.13` | +| `vue-router` | `^4.5.1` | +| `vuetify` | `^3.5.14` | +| `z3-solver` | `^4.15.3` | + +Development dependencies: + +| Package | Version | +| --- | --- | +| `@tsconfig/node20` | `^20.1.5` | +| `@typescript-eslint/eslint-plugin` | `^5.4.0` | +| `@typescript-eslint/parser` | `^5.4.0` | +| `@vitest/coverage-v8` | `2.1.9` | +| `@vue/test-utils` | `2.4.6` | +| `eslint` | `^7.32.0` | +| `eslint-config-prettier` | `^8.3.0` | +| `eslint-plugin-prettier` | `^4.0.0` | +| `eslint-plugin-vue` | `^8.0.3` | +| `gh-pages` | `^5.0.0` | +| `jsdom` | `24.1.3` | +| `prettier` | `^2.4.1` | +| `typescript` | `^5.0.0` | +| `vitest` | `2.1.9` | + +### Contributing + +- Open pull requests against the `main` branch +- Use GitHub Issues for bug reports +- Use GitHub Discussions for broader questions and ideas diff --git a/main.yml b/main.yml deleted file mode 100644 index 0554291a..00000000 --- a/main.yml +++ /dev/null @@ -1,38 +0,0 @@ -name: Build and Deploy -on: - push: - branches: - - main - pull_request: {} -permissions: - contents: write -jobs: - build-and-deploy: - runs-on: ubuntu-latest - env: - VITE_BASE_URL: /solid-cockpit/ - steps: - - name: Checkout ๐Ÿ›Ž๏ธ - uses: actions/checkout@v3 - - - name: Setup Node โœจ - uses: actions/setup-node@v3 - with: - node-version: 20.x - cache: npm - - - name: Install and Build ๐Ÿ”ง - run: | - yarn i - yarn run build - - - name: Add 404 fallback - run: cp dist/index.html dist/404.html - - - name: Deploy ๐Ÿš€ - uses: JamesIves/github-pages-deploy-action@releases/v4 - with: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - BRANCH: pages - FOLDER: dist - CLEAN: true diff --git a/package.json b/package.json index 4f9ef9b7..b3eafb65 100644 --- a/package.json +++ b/package.json @@ -12,6 +12,20 @@ "dev": "vite", "build": "vite build", "serve": "vite preview", + "test:unit": "node --test --import ./tests/register-ts-loader.mjs ./tests/unit/*.test.ts", + "test:unit:watch": "node --test --watch --import ./tests/register-ts-loader.mjs ./tests/unit/*.test.ts", + "test:unit:coverage": "node ./scripts/unit-coverage.mjs", + "test:unit:compliance": "node ./scripts/unit-coverage.mjs --enforce", + "test:unit:compliance:quiet": "node ./scripts/unit-coverage.mjs --enforce --quiet", + "test:component": "vitest run --config ./vitest.config.ts", + "test:component:watch": "vitest --config ./vitest.config.ts", + "test:component:coverage": "vitest run --coverage --config ./vitest.config.ts", + "test:component:compliance": "vitest run --coverage --config ./vitest.config.ts", + "test:component:compliance:quiet": "node ./node_modules/vitest/vitest.mjs run --coverage --config ./vitest.config.ts --reporter=dot --coverage.reporter=json-summary --coverage.reportsDirectory=coverage/component", + "test:compliance": "yarn test:unit:compliance && yarn test:component:compliance", + "test:compliance:quiet": "node ./scripts/precommit-compliance.mjs", + "hooks:install": "node ./scripts/setup-git-hooks.mjs", + "prepare": "node ./scripts/setup-git-hooks.mjs", "github-post-build": "node github-post-build-script.mjs", "deploy": "gh-pages -d dist" }, @@ -43,12 +57,16 @@ "@tsconfig/node20": "^20.1.5", "@typescript-eslint/eslint-plugin": "^5.4.0", "@typescript-eslint/parser": "^5.4.0", + "@vitest/coverage-istanbul": "2.1.9", + "@vue/test-utils": "2.4.6", "eslint": "^7.32.0", "eslint-config-prettier": "^8.3.0", "eslint-plugin-prettier": "^4.0.0", "eslint-plugin-vue": "^8.0.3", "gh-pages": "^5.0.0", + "jsdom": "24.1.3", "prettier": "^2.4.1", - "typescript": "^5.0.0" + "typescript": "^5.0.0", + "vitest": "2.1.9" } } diff --git a/scripts/precommit-compliance.mjs b/scripts/precommit-compliance.mjs new file mode 100644 index 00000000..fa5bc7d0 --- /dev/null +++ b/scripts/precommit-compliance.mjs @@ -0,0 +1,36 @@ +import { spawnSync } from "node:child_process"; + +function runStep(label, command, args) { + const result = spawnSync(command, args, { encoding: "utf8" }); + + if (result.status !== 0) { + console.error(`\n${label}: FAIL`); + if (result.stdout) process.stdout.write(result.stdout); + if (result.stderr) process.stderr.write(result.stderr); + process.exit(result.status ?? 1); + } + + if (result.stdout) process.stdout.write(result.stdout); + if (result.stderr) process.stderr.write(result.stderr); +} + +console.log("Running pre-commit compliance checks..."); + +runStep("Unit compliance", process.execPath, [ + "./scripts/unit-coverage.mjs", + "--enforce", + "--quiet", +]); + +runStep("Component compliance", process.execPath, [ + "./node_modules/vitest/vitest.mjs", + "run", + "--coverage", + "--config", + "./vitest.config.ts", + "--reporter=dot", + "--coverage.reporter=json-summary", + "--coverage.reportsDirectory=coverage/component", +]); + +console.log("All pre-commit compliance checks passed."); diff --git a/scripts/setup-git-hooks.mjs b/scripts/setup-git-hooks.mjs new file mode 100644 index 00000000..f8e1837d --- /dev/null +++ b/scripts/setup-git-hooks.mjs @@ -0,0 +1,23 @@ +import { chmodSync, existsSync } from "node:fs"; +import { execSync } from "node:child_process"; + +const preCommitHookPath = ".githooks/pre-commit"; + +if (!existsSync(".git")) { + console.log("Skipping git hook installation (.git directory not found)."); + process.exit(0); +} + +if (!existsSync(preCommitHookPath)) { + console.warn(`Pre-commit hook not found at ${preCommitHookPath}; skipping installation.`); + process.exit(0); +} + +try { + execSync("git config --local core.hooksPath .githooks", { stdio: "ignore" }); + chmodSync(preCommitHookPath, 0o755); + console.log("Git hooks installed (core.hooksPath=.githooks)."); +} catch (error) { + console.warn("Could not configure git hooks automatically in this environment."); + console.warn("Run `git config --local core.hooksPath .githooks` manually if needed."); +} diff --git a/scripts/unit-coverage.mjs b/scripts/unit-coverage.mjs new file mode 100644 index 00000000..eb85f35b --- /dev/null +++ b/scripts/unit-coverage.mjs @@ -0,0 +1,176 @@ +import { mkdirSync, readdirSync, writeFileSync } from "node:fs"; +import { spawnSync } from "node:child_process"; + +const args = new Set(process.argv.slice(2)); +const enforce = args.has("--enforce"); +const quiet = args.has("--quiet"); + +const lineThreshold = Number(process.env.UNIT_COVERAGE_LINES ?? "98"); +const branchThreshold = Number(process.env.UNIT_COVERAGE_BRANCHES ?? "90"); +const funcThreshold = Number(process.env.UNIT_COVERAGE_FUNCS ?? "100"); + +const trackedFiles = [ + "src/components/fileUploadUtils.ts", + "src/components/mime_types.js", + "src/components/queryPodUtils.ts", + "src/components/z3-headers.ts", +]; +const advisoryFiles = [ + "src/components/login.ts", + "src/components/getData.ts", + "src/components/privacyEdit.ts", +]; + +const testFiles = readdirSync("tests/unit") + .filter((fileName) => fileName.endsWith(".test.ts")) + .sort() + .map((fileName) => `./tests/unit/${fileName}`); + +const nodeResult = spawnSync( + "node", + [ + "--test", + "--experimental-test-coverage", + "--import", + "./tests/register-ts-loader.mjs", + ...testFiles, + ], + { encoding: "utf8" } +); + +if (!quiet) { + if (nodeResult.stdout) process.stdout.write(nodeResult.stdout); + if (nodeResult.stderr) process.stderr.write(nodeResult.stderr); +} + +if (nodeResult.status !== 0) { + if (quiet) { + if (nodeResult.stdout) process.stdout.write(nodeResult.stdout); + if (nodeResult.stderr) process.stderr.write(nodeResult.stderr); + } + process.exit(nodeResult.status ?? 1); +} + +const coverageRegex = + /^#\s(.+?)\s\|\s([\d.]+)\s\|\s([\d.]+)\s\|\s([\d.]+)\s\|\s?(.*)$/; + +const metricsByFile = new Map(); +for (const line of nodeResult.stdout.split("\n")) { + if (!line.startsWith("# ")) continue; + if (line.includes("start of coverage report")) continue; + if (line.includes("end of coverage report")) continue; + if (line.includes("file | line % | branch % | funcs %")) continue; + if (line.startsWith("# all files")) continue; + + const match = line.match(coverageRegex); + if (!match) continue; + + const [, file, linePct, branchPct, funcPct, uncovered] = match; + metricsByFile.set(file.trim(), { + file: file.trim(), + linePct: Number(linePct), + branchPct: Number(branchPct), + funcPct: Number(funcPct), + uncovered: uncovered.trim(), + }); +} + +const trackedMetrics = trackedFiles.map((file) => metricsByFile.get(file)).filter(Boolean); +const missingFiles = trackedFiles.filter((file) => !metricsByFile.has(file)); +const advisoryMetrics = advisoryFiles + .map((file) => metricsByFile.get(file)) + .filter(Boolean); +const missingAdvisoryFiles = advisoryFiles.filter((file) => !metricsByFile.has(file)); + +const summary = { + generatedAt: new Date().toISOString(), + thresholds: { + linePct: lineThreshold, + branchPct: branchThreshold, + funcPct: funcThreshold, + }, + trackedFiles, + advisoryFiles, + metrics: trackedMetrics, + advisoryMetrics, + missingFiles, + missingAdvisoryFiles, +}; + +mkdirSync("coverage", { recursive: true }); +writeFileSync( + "coverage/unit-coverage-summary.json", + JSON.stringify(summary, null, 2) + "\n", + "utf8" +); + +const average = trackedMetrics.reduce( + (acc, metric) => { + acc.linePct += metric.linePct; + acc.branchPct += metric.branchPct; + acc.funcPct += metric.funcPct; + return acc; + }, + { linePct: 0, branchPct: 0, funcPct: 0 } +); + +if (trackedMetrics.length > 0) { + average.linePct /= trackedMetrics.length; + average.branchPct /= trackedMetrics.length; + average.funcPct /= trackedMetrics.length; +} + +let reportText = "Unit Coverage (Tracked Files)\n"; +reportText += `Thresholds -> lines: ${lineThreshold}%, branches: ${branchThreshold}%, functions: ${funcThreshold}%\n`; +for (const metric of trackedMetrics) { + reportText += `- ${metric.file}: lines=${metric.linePct.toFixed(2)} branches=${metric.branchPct.toFixed(2)} funcs=${metric.funcPct.toFixed(2)}\n`; +} +reportText += `Average: lines=${average.linePct.toFixed(2)} branches=${average.branchPct.toFixed(2)} funcs=${average.funcPct.toFixed(2)}\n`; +if (missingFiles.length > 0) { + reportText += `Missing from coverage report: ${missingFiles.join(", ")}\n`; +} +if (advisoryMetrics.length > 0 && !quiet) { + reportText += "Advisory Coverage (non-gating)\n"; + for (const metric of advisoryMetrics) { + reportText += `- ${metric.file}: lines=${metric.linePct.toFixed(2)} branches=${metric.branchPct.toFixed(2)} funcs=${metric.funcPct.toFixed(2)}\n`; + } +} +if (missingAdvisoryFiles.length > 0 && !quiet) { + reportText += `Missing advisory files from coverage report: ${missingAdvisoryFiles.join(", ")}\n`; +} + +writeFileSync("coverage/unit-coverage-summary.txt", reportText, "utf8"); +console.log(reportText.trimEnd()); + +if (!enforce) { + process.exit(0); +} + +const failures = []; +for (const metric of trackedMetrics) { + if (metric.linePct < lineThreshold) { + failures.push(`${metric.file}: line coverage ${metric.linePct}% < ${lineThreshold}%`); + } + if (metric.branchPct < branchThreshold) { + failures.push( + `${metric.file}: branch coverage ${metric.branchPct}% < ${branchThreshold}%` + ); + } + if (metric.funcPct < funcThreshold) { + failures.push(`${metric.file}: function coverage ${metric.funcPct}% < ${funcThreshold}%`); + } +} + +for (const missingFile of missingFiles) { + failures.push(`${missingFile}: no coverage metrics found`); +} + +if (failures.length > 0) { + console.error("\nCoverage compliance failed:"); + failures.forEach((failure) => console.error(`- ${failure}`)); + process.exit(1); +} + +if (quiet) { + console.log("Unit coverage compliance: PASS"); +} diff --git a/src/components/ContainerNav.vue b/src/components/ContainerNav.vue index f40eddc3..1e61f7be 100644 --- a/src/components/ContainerNav.vue +++ b/src/components/ContainerNav.vue @@ -1,29 +1,78 @@ diff --git a/src/components/PodBrowser.vue b/src/components/PodBrowser.vue index 11972c8b..739f9b81 100644 --- a/src/components/PodBrowser.vue +++ b/src/components/PodBrowser.vue @@ -3,78 +3,142 @@ href="https://cdnjs.cloudflare.com/ajax/libs/materialize/1.0.0/css/materialize.min.css" rel="stylesheet" /> - -
- Pod Data Browser -
+
+ +
+
+ Pod Data Browser +

+ Browse containers and resources in your pod, inspect item details, and + manage content from one place. +

+
+
-
- {{ deletedItemType }} deleted successfully! - -
+
+ {{ deletedItemType }} deleted successfully! + +
-
- -
+
+ +
- -
-
- +
+ +
diff --git a/src/components/PodRegistration.vue b/src/components/PodRegistration.vue index 67a2a25c..d0ab15bf 100644 --- a/src/components/PodRegistration.vue +++ b/src/components/PodRegistration.vue @@ -1,103 +1,101 @@ diff --git a/src/components/Styling/TheFooter.vue b/src/components/Styling/TheFooter.vue index 0adf5fcd..0c4b62db 100644 --- a/src/components/Styling/TheFooter.vue +++ b/src/components/Styling/TheFooter.vue @@ -18,6 +18,10 @@ Last Modified: {{ lastModified }} | + + Version: v{{ appVersion }} + + |
+ +
+ + {{ loggedIn ? "Connected" : "Signed out" }} +
@@ -123,6 +128,7 @@ export default { menu: false as boolean, message: false as boolean, notloggedOut: false as boolean, + loginStatusIntervalId: null as number | null, }; }, computed: { @@ -160,10 +166,16 @@ export default { this.loginCheck(); // Perform login check on component mount // Regularly check login status - setInterval(() => { + this.loginStatusIntervalId = window.setInterval(() => { this.loginCheck(); }, 30000); // Check every 30 seconds }, + beforeUnmount() { + if (this.loginStatusIntervalId !== null) { + window.clearInterval(this.loginStatusIntervalId); + this.loginStatusIntervalId = null; + } + }, }; @@ -190,14 +202,41 @@ export default { } .theme-change { - margin-left: auto; + margin-left: 0; } .header-container .header-right { display: flex; align-items: center; + gap: 0.85rem; margin-left: auto; } +.session-indicator { + display: inline-flex; + align-items: center; + gap: 0.45rem; + padding: 0.38rem 0.65rem; + border-radius: 999px; + background: var(--panel-elev); + color: var(--text-muted); + font-family: "Oxanium", monospace; + font-size: 0.82rem; + font-weight: 600; + white-space: nowrap; +} +.session-indicator.active { + background: color-mix(in srgb, var(--primary) 12%, transparent); + color: var(--text-secondary); +} +.session-dot { + width: 8px; + height: 8px; + border-radius: 999px; + background: var(--gray-500); +} +.session-indicator.active .session-dot { + background: var(--success); +} .header-container .account { display: flex; @@ -285,4 +324,27 @@ export default { border: 2px solid var(--main-white); cursor: pointer; } +@media (max-width: 760px) { + .header-container { + margin: 12px; + } + .header-container h1 { + font-size: 26pt; + } + .header-container img { + width: 64px; + margin-left: 12px; + } + .header-container .header-right { + gap: 0.6rem; + } + .session-indicator { + font-size: 0.76rem; + padding: 0.32rem 0.55rem; + } + .header-container .account { + padding: 8px; + margin-right: 8px; + } +} diff --git a/src/components/fileUpload.ts b/src/components/fileUpload.ts index 5e31b694..dcec063a 100644 --- a/src/components/fileUpload.ts +++ b/src/components/fileUpload.ts @@ -1,5 +1,6 @@ import { WithResourceInfo, + getFile, overwriteFile, saveFileInContainer, createContainerAt, @@ -11,10 +12,18 @@ import { deleteFile, deleteSolidDataset, getContainedResourceUrlAll, + getUrl, getUrlAll, } from "@inrupt/solid-client"; import { fetch } from "@inrupt/solid-client-authn-browser"; -import { mimeTypes } from "./mime_types.js"; +import { + getMimeType, + alreadyExistsCheck, + uploadSuccess, + derefrenceFile, +} from "./fileUploadUtils"; + +export { getMimeType, alreadyExistsCheck, uploadSuccess, derefrenceFile }; /** * Iterates through a FileList and uploads files to a Solid Pod via the uploadToPod() inrupt method. @@ -47,17 +56,6 @@ export async function handleFiles( return outputList; } -/** - * Converts a file extension into a MIME Type for use in HTTP PUT requests. - * The function relies on the hash map contained in the file 'mime_types.js'. - * - * @param fileExtension The file extension string of the file for which the MIME Type should be found. - * @returns The MIME Type string of the file of interest or 'application/octet-stream' if not in the hash map. - */ -export function getMimeType(fileExtension: string) { - return mimeTypes[fileExtension.toLowerCase()] || "application/octet-stream"; -} - /** * Takes in a File and uploads it to a Solid Pod using the @inrupt/solid-client method overwriteFile(). * @@ -136,6 +134,174 @@ export async function deleteContainer(containerUrl: string): Promise { } } +/** + * Moves a resource or container to another container inside the user's pod. + * + * Resources are copied to the destination and then deleted from the source. + * Containers are recreated recursively at the destination and then removed. + * + * @param sourceUrl The current URL of the resource or container. + * @param destinationContainerUrl The target container URL where the item should be moved. + * @param podUrl The root pod URL used to validate and create intermediate containers. + * @returns A Promise that resolves to the new URL when successful, or "error" when the move fails. + */ +export async function movePodItem( + sourceUrl: string, + destinationContainerUrl: string, + podUrl: string +): Promise { + try { + if (!destinationContainerUrl.endsWith("/")) { + destinationContainerUrl = `${destinationContainerUrl}/`; + } + await ensureDirectoriesExist(podUrl, destinationContainerUrl, fetch); + + if (sourceUrl.endsWith("/")) { + return await moveContainerToPod(sourceUrl, destinationContainerUrl, podUrl); + } + + return await moveResourceToPod(sourceUrl, destinationContainerUrl, podUrl); + } catch (error) { + console.error(`Error moving ${sourceUrl} to ${destinationContainerUrl}:`, error); + return "error"; + } +} + +async function moveResourceToPod( + sourceUrl: string, + destinationContainerUrl: string, + podUrl: string +): Promise { + await ensureDirectoriesExist(podUrl, destinationContainerUrl, fetch); + const sourceFile = await getFile(sourceUrl, { fetch }); + const fileName = sourceUrl.split("/").pop() || sourceFile.name || "resource"; + const fileToMove = new File([sourceFile], fileName, { + type: sourceFile.type || "application/octet-stream", + }); + const targetUrl = `${destinationContainerUrl}${fileName}`; + + const savedFile = await overwriteFile(targetUrl, fileToMove, { + contentType: fileToMove.type, + fetch, + }); + await deleteFromPod(sourceUrl); + + return savedFile.internal_resourceInfo.sourceIri; +} + +async function moveContainerToPod( + sourceContainerUrl: string, + destinationContainerUrl: string, + podUrl: string +): Promise { + await ensureDirectoriesExist(podUrl, destinationContainerUrl, fetch); + const containerName = + sourceContainerUrl.split("/").filter(Boolean).pop() || "container"; + const targetContainerUrl = `${destinationContainerUrl}${containerName}/`; + + try { + await createContainerAt(targetContainerUrl, { fetch }); + } catch (error) { + // The destination container may already exist, which is safe to continue with. + } + + const sourceDataset = await getSolidDataset(sourceContainerUrl, { fetch }); + const containedResources = getContainedResourceUrlAll(sourceDataset); + + for (const resourceUrl of containedResources) { + await movePodItem(resourceUrl, targetContainerUrl, podUrl); + } + + await deleteSolidDataset(sourceContainerUrl, { fetch }); + return targetContainerUrl; +} + +/** + * Renames a resource or container within its current parent container. + * + * The implementation reuses the same copy-then-delete strategy as move operations, + * but targets a new sibling name inside the source parent container. + * + * @param sourceUrl The current URL of the resource or container. + * @param newName The new item name without path segments. + * @param podUrl The root pod URL used to validate any intermediate container handling. + * @returns A Promise that resolves to the renamed URL when successful, or "error" when the rename fails. + */ +export async function renamePodItem( + sourceUrl: string, + newName: string, + podUrl: string +): Promise { + const sanitizedName = newName.trim().replace(/^\/+|\/+$/g, ""); + if (!sanitizedName || sanitizedName.includes("/")) { + return "error"; + } + + const isContainer = sourceUrl.endsWith("/"); + const normalizedSourceUrl = isContainer ? sourceUrl.slice(0, -1) : sourceUrl; + const parentContainerUrl = + normalizedSourceUrl.substring(0, normalizedSourceUrl.lastIndexOf("/") + 1); + + try { + if (isContainer) { + return await renameContainerInPod(sourceUrl, parentContainerUrl, sanitizedName, podUrl); + } + + return await renameResourceInPod(sourceUrl, parentContainerUrl, sanitizedName, podUrl); + } catch (error) { + console.error(`Error renaming ${sourceUrl} to ${sanitizedName}:`, error); + return "error"; + } +} + +async function renameResourceInPod( + sourceUrl: string, + parentContainerUrl: string, + newName: string, + podUrl: string +): Promise { + await ensureDirectoriesExist(podUrl, parentContainerUrl, fetch); + const sourceFile = await getFile(sourceUrl, { fetch }); + const fileToRename = new File([sourceFile], newName, { + type: sourceFile.type || "application/octet-stream", + }); + const targetUrl = `${parentContainerUrl}${newName}`; + + const savedFile = await overwriteFile(targetUrl, fileToRename, { + contentType: fileToRename.type, + fetch, + }); + await deleteFromPod(sourceUrl); + + return savedFile.internal_resourceInfo.sourceIri; +} + +async function renameContainerInPod( + sourceContainerUrl: string, + parentContainerUrl: string, + newName: string, + podUrl: string +): Promise { + await ensureDirectoriesExist(podUrl, parentContainerUrl, fetch); + const targetContainerUrl = `${parentContainerUrl}${newName}/`; + + try { + await createContainerAt(targetContainerUrl, { fetch }); + } catch (error) { + // The destination container may already exist, which is safe to continue with. + } + + const sourceDataset = await getSolidDataset(sourceContainerUrl, { fetch }); + const containedResources = getContainedResourceUrlAll(sourceDataset); + + for (const resourceUrl of containedResources) { + await movePodItem(resourceUrl, targetContainerUrl, podUrl); + } + + await deleteSolidDataset(sourceContainerUrl, { fetch }); + return targetContainerUrl; +} + /** * Deletes a Query Hash Thing from queries.ttl file using the @inrupt/solid-client method removeThing(). * @@ -149,25 +315,43 @@ export async function deleteThing( ): Promise { const SD_ENDPOINT = "http://www.w3.org/ns/sparql-service-description#endpoint"; + const RDF_REST = "http://www.w3.org/1999/02/22-rdf-syntax-ns#rest"; + const RDF_NIL = "http://www.w3.org/1999/02/22-rdf-syntax-ns#nil"; + const PROV_WAS_GENERATED_BY = "http://www.w3.org/ns/prov#wasGeneratedBy"; const removalTarget = `${queriesttlUrl}#${targetHash}`; try { - let dataset = await getSolidDataset(removalTarget, { fetch }); + let dataset = await getSolidDataset(queriesttlUrl, { fetch }); - // Get the sources Thing from the desired query and delete it + // Remove the entry's linked RDF list and provenance node before deleting the entry itself. const entry = getThing(dataset, removalTarget); - const endpointUrls = getUrlAll(entry, SD_ENDPOINT); + if (!entry) { + return false; + } + + const sourceListHead = getUrl(entry, SD_ENDPOINT); + const provenanceActivity = getUrl(entry, PROV_WAS_GENERATED_BY); let removed = dataset; - console.log(removed); - if (endpointUrls.length != 0) { - for (const oUrl of endpointUrls) { - // a) Remove the object URL from the entryโ€™s sd:endpoint values - removed = removeThing(removed, oUrl); + + let currentListNode = sourceListHead; + while (currentListNode && currentListNode !== RDF_NIL) { + const listThing = getThing(removed, currentListNode); + const nextListNode = listThing ? getUrl(listThing, RDF_REST) : null; + removed = removeThing(removed, currentListNode); + currentListNode = nextListNode; + } + + if (provenanceActivity) { + removed = removeThing(removed, provenanceActivity); + } + + const endpointUrls = getUrlAll(entry, SD_ENDPOINT); + if (endpointUrls.length !== 0) { + for (const endpointUrl of endpointUrls) { + removed = removeThing(removed, endpointUrl); } } - // delete the hash Thing removed = removeThing(removed, removalTarget); - console.log(removed); await saveSolidDatasetAt(queriesttlUrl, removed, { fetch }); return true; @@ -291,57 +475,3 @@ export async function alreadyExists( return false; } } - -/** - * Takes in a file name and returns whether it already exists in the specified container or not. - * - * @param uploadMessage the file name (could also be "already exists") - * - * @return A boolean representing whether the file to be uploaded alreay exists in the current directory - */ -export function alreadyExistsCheck(uploadMessage: string): boolean { - if (uploadMessage == "already exists") { - return true; - } else { - return false; - } -} - -/** - * Checks if the files uploaded from submitUpload() have .name properties (which proves upload was success). - * - * @param uploadedFiles is a list of files obtained from the upload process - * - * @returns a boolean value that indicated if the file uploads have been successful or not - */ -export function uploadSuccess(uploadedFiles: string[]): boolean { - let success = false; - uploadedFiles.forEach((up: string) => { - if (up !== undefined || up !== "error") { - success = true; - } else { - success = false; - } - }); - return success; -} - -/** - * Function that returns different bits of information about a file - * - * @param inputFile the file that info is to be determined from - * - * @returns the file NAME, the file SIZE, and the file's URI (URL) - */ -export function derefrenceFile(inputFile: File & WithResourceInfo): string[] { - try { - return [ - inputFile.name, - String(inputFile.size), - inputFile.internal_resourceInfo.sourceIri, - ]; - } catch (error) { - console.error("Error", error); - return ["error"]; - } -} diff --git a/src/components/fileUploadUtils.ts b/src/components/fileUploadUtils.ts new file mode 100644 index 00000000..85b65446 --- /dev/null +++ b/src/components/fileUploadUtils.ts @@ -0,0 +1,55 @@ +import { WithResourceInfo } from "@inrupt/solid-client"; +import { mimeTypes } from "./mime_types.js"; + +/** + * Converts a file extension into a MIME Type for use in HTTP requests. + * + * @param fileExtension The file extension string of the file for which the MIME Type should be found. + * @returns The MIME Type string of the file of interest or "application/octet-stream" if not in the hash map. + */ +export function getMimeType(fileExtension: string): string { + return mimeTypes[fileExtension.toLowerCase()] || "application/octet-stream"; +} + +/** + * Checks whether an upload result indicates that the file already exists. + * + * @param uploadMessage Upload result text + * @returns true when the upload message equals "already exists", false otherwise. + */ +export function alreadyExistsCheck(uploadMessage: string): boolean { + return uploadMessage === "already exists"; +} + +/** + * Checks whether all uploaded file status values indicate success. + * + * @param uploadedFiles list of upload result strings + * @returns true if every item is a non-error string and the list is non-empty. + */ +export function uploadSuccess(uploadedFiles: string[]): boolean { + if (uploadedFiles.length === 0) { + return false; + } + + return uploadedFiles.every((up) => up !== undefined && up !== "error"); +} + +/** + * Function that returns different bits of information about a file. + * + * @param inputFile the file that info is to be determined from + * @returns the file NAME, the file SIZE, and the file's URI (URL) + */ +export function derefrenceFile(inputFile: File & WithResourceInfo): string[] { + try { + return [ + inputFile.name, + String(inputFile.size), + inputFile.internal_resourceInfo.sourceIri, + ]; + } catch (error) { + console.error("Error", error); + return ["error"]; + } +} diff --git a/src/components/login.ts b/src/components/login.ts index 74319ff5..d7740a1f 100644 --- a/src/components/login.ts +++ b/src/components/login.ts @@ -51,6 +51,7 @@ export async function logOut(): Promise { return session.info.isLoggedIn } catch (error) { console.error('Error:', error); + return session.info.isLoggedIn } } else { return session.info.isLoggedIn @@ -118,4 +119,3 @@ export async function handleRedirectAfterPageLoad(): Promise { } } - diff --git a/src/components/queryPod.ts b/src/components/queryPod.ts index ef33c501..7ac7f765 100644 --- a/src/components/queryPod.ts +++ b/src/components/queryPod.ts @@ -13,16 +13,61 @@ import { buildThing, setThing, Thing, + getThing, getThingAll, SolidDataset, getFile, getUrl, + getStringNoLocale, getDatetime, + removeThing, + setStringNoLocale, + setDatetime, } from "@inrupt/solid-client"; import { fetch } from "@inrupt/solid-client-authn-browser"; -import { Parser as SparqlParser } from "sparqljs"; -import { changeAclPublic, generateAcl, Permissions } from "./privacyEdit"; -import { fetchPermissionsData } from "./getData"; +import { + stopQuery, + cleanSourcesUrlsForCache, + generateHash, + generateSeededHash, + parseSparqlQuery, + cleanSourcesUrls as cleanSourcesUrlsInternal, + type ComunicaSources as UtilityComunicaSources, +} from "./queryPodUtils"; + +export { stopQuery, cleanSourcesUrlsForCache, generateHash, generateSeededHash, parseSparqlQuery }; + +const RDF_TYPE = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type"; +const RDF_FIRST = "http://www.w3.org/1999/02/22-rdf-syntax-ns#first"; +const RDF_REST = "http://www.w3.org/1999/02/22-rdf-syntax-ns#rest"; +const RDF_NIL = "http://www.w3.org/1999/02/22-rdf-syntax-ns#nil"; +const DCT_CREATED = "http://purl.org/dc/terms/created"; +const DCT_MODIFIED = "http://purl.org/dc/terms/modified"; +const DCT_TITLE = "http://purl.org/dc/terms/title"; +const DCT_DESCRIPTION = "http://purl.org/dc/terms/description"; +const TQ_QUERY = "http://www.w3.org/2001/sw/DataAccess/tests/test-query#query"; +const TQ_QUERY_FORM = + "http://www.w3.org/2001/sw/DataAccess/tests/test-query#QueryForm"; +const TQ_QUERY_SELECT = + "http://www.w3.org/2001/sw/DataAccess/tests/test-query#QuerySelect"; +const TM_RESULT = + "http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#result"; +const SH = "http://www.w3.org/ns/shacl#"; +const SH_SELECT = `${SH}select`; +const SH_SPARQL_EXECUTABLE = `${SH}SPARQLExecutable`; +const SD_ENDPOINT = "http://www.w3.org/ns/sparql-service-description#endpoint"; +const SPEX = "https://purl.expasy.org/sparql-examples/ontology#"; +const PROV = "http://www.w3.org/ns/prov#"; +const PROV_ACTIVITY = `${PROV}Activity`; +const PROV_USED = `${PROV}used`; +const PROV_MODIFIED = `${PROV}modified`; +const PROV_WAS_GENERATED_BY = `${PROV}wasGeneratedBy`; +const QVMC = "https://vocab.example/qvmc#"; +const QVMC_INDEX = `${QVMC}Index`; +const QVMC_STATUS = `${QVMC}status`; +const QVMC_LINKED_QUERY = `${QVMC}linkedQuery`; +const LDP_RDF_SOURCE = "http://www.w3.org/ns/ldp#RDFSource"; +const CACHE_STATUS_CURRENT = "current"; export interface QueryResultJson { head: { vars: string[] }; @@ -39,16 +84,28 @@ export interface CacheOutput { provenanceOutput: ProvenanceData | null; resultsOutput: QueryResultJson; } -export interface ComunicaSources { - value: string; - context?: Record; -} +export type ComunicaSources = UtilityComunicaSources; export interface CachedQuery { hash: string; + title?: string; queryFile: string; resultsFile: string; sourceUrls: string[]; created: string; + modified?: string; + status?: string; +} + +export interface QueryCacheEntryInput { + hash: string; + query: string; + queryFileUrl: string; + resultsFileUrl: string; + sources: string[]; + status?: "current" | "stale" | "failed"; + title?: string; + description?: string; + linkedQueryHash?: string | null; } export type FetchLike = ( @@ -67,19 +124,6 @@ export interface CoiFetchOptions { onError?: (e: unknown) => void; } -/** - * Stops an ongoing query by destroying a main thread binding stream. - * @param bindingStream - The Comunica bindings stream to destroy. - */ -export function stopQuery(bindingStream: any) { - if (bindingStream && typeof bindingStream.destroy === "function") { - // Destroy the main thread binding stream - bindingStream.destroy(); - return true; - } - return false; -} - /** * Cleans an array of source URLs by removing angle brackets ("<" and ">") * Also turns string[] into a ComunicaSources[], meaning Solid sources are given auth context. @@ -88,47 +132,63 @@ export function stopQuery(bindingStream: any) { * @returns A new array of cleaned source URLs without angle brackets. */ export function cleanSourcesUrls(dirtySources: string[]): ComunicaSources[] { - const betterSources: string[] = []; - const comunicasources: ComunicaSources[] = []; - - dirtySources.forEach((url) => { - // removes unwanted URI indicating characters - if (url.startsWith("<") && url.endsWith(">")) { - betterSources.push(url.slice(1, -1)); - } else { - betterSources.push(url); - } - }); - betterSources.forEach((url) => { - // TODO: fix --> currently very dumb way of doing this - if (url.includes("sparql") || url.includes("endpoint")) { - comunicasources.push({ value: url }); - } else { - comunicasources.push({ value: url, context: { fetch: fetch } }); - } - }); - return comunicasources; + return cleanSourcesUrlsInternal(dirtySources, fetch); } /** - * Cleans an array of source URLs by removing angle brackets ("<" and ">") - * Also turns string[] into a ComunicaSources[], meaning Solid sources are given auth context. - * - * @param dirtySources - An array of source URLs, some of which may be enclosed in angle brackets. - * @returns A new array of cleaned source URLs without angle brackets. + * Builds a deterministic 10-character cache entry identifier from the query + * text and normalized source selection. This aligns the client-side cache key + * more closely with the spec's stable opaque token guidance. */ -export function cleanSourcesUrlsForCache(dirtySources: string[]): string[] { - const betterSourcesCache: string[] = []; +export function buildCacheEntryHash( + query: string, + sources: string[], + requestHeaders: string[] = [] +): string { + const normalizedQuery = query.trim().replace(/\s+/g, " "); + const normalizedSources = Array.from( + new Set(cleanSourcesUrlsForCache(sources).map((source) => source.trim())) + ).sort(); + const normalizedHeaders = [...requestHeaders].sort(); + + return generateSeededHash( + JSON.stringify({ + query: normalizedQuery, + sources: normalizedSources, + headers: normalizedHeaders, + }), + 10 + ); +} - dirtySources.forEach((url) => { - // removes unwanted URI indicating characters - if (url.startsWith("<") && url.endsWith(">")) { - betterSourcesCache.push(url.slice(1, -1)); - } else { - betterSourcesCache.push(url); - } +function getIndexResourceUrl(containerUrl: string, fileName = "queries.ttl"): string { + return `${containerUrl}${fileName}`; +} + +function getQueryEntryUrl( + containerUrl: string, + hash: string, + fileName = "queries.ttl" +): string { + return `${getIndexResourceUrl(containerUrl, fileName)}#${hash}`; +} + +function validateCacheSources(sources: string[]): string[] { + const cleanedSources = cleanSourcesUrlsForCache(sources) + .map((source) => source.trim()) + .filter((source) => source.length > 0); + + if (cleanedSources.length === 0) { + throw new Error( + "The query cache specification requires one or more source URIs." + ); + } + + cleanedSources.forEach((source) => { + new URL(source); }); - return betterSourcesCache; + + return cleanedSources; } /** @@ -386,67 +446,6 @@ export async function executeQueryInMainThread( } } -/** - * Generates a unique 6-character hash using alphanumeric characters. - * @param length - Length for the hash - * @returns A string representing the hash. - */ -export function generateHash(length: number): string { - const charset = - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; - let hash = ""; - for (let i = 0; i < length; i++) { - const randomIndex = Math.floor(Math.random() * charset.length); - hash += charset.charAt(randomIndex); - } - return hash; -} - -/** - * Generates a deterministic alphanumeric hash of given length using a seed. - * @param seedValue - The string used to seed the hash generation - * @param length - Length of the output hash (default: 10) - * @returns A deterministic hash string - */ -export function generateSeededHash(seedValue: string, length = 10): string { - const charset = - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; - const seed = stringToSeed(seedValue); - const random = mulberry32(seed); - - let hash = ""; - for (let i = 0; i < length; i++) { - const index = Math.floor(random() * charset.length); - hash += charset.charAt(index); - } - return hash; -} - -/** - * Creates a seeded pseudorandom number generator. - * Mulberry32 is fast and well-suited for small tasks like this. - */ -function mulberry32(seed: number): () => number { - return function () { - seed |= 0; - seed = (seed + 0x6d2b79f5) | 0; - let t = Math.imul(seed ^ (seed >>> 15), 1 | seed); - t = (t + Math.imul(t ^ (t >>> 7), 61 | t)) ^ t; - return ((t ^ (t >>> 14)) >>> 0) / 4294967296; - }; -} - -/** - * Converts a string into a numeric seed. - */ -function stringToSeed(str: string): number { - let hash = 0; - for (let i = 0; i < str.length; i++) { - hash = (Math.imul(31, hash) + str.charCodeAt(i)) | 0; - } - return hash >>> 0; -} - /** * Ensures that a container exists at the given URL. * If the container does not exist, it creates one. @@ -481,23 +480,6 @@ export async function ensureCacheContainer( if (providedCache === podUrl) { await createContainerAt(cacheUrl, { fetch }); - // TODO: Change this to just initialize acl with default permissions - // Set public read permissions for the cache container - const publicRead: Permissions = { - read: true, - append: false, - write: false, - control: false, - }; - let aclDataset = await fetchPermissionsData(cacheUrl); - if (aclDataset == null) { - console.warn("Initializing an ACL for your inbox/ container..."); - await generateAcl(cacheUrl, webId); - await changeAclPublic(cacheUrl, publicRead); - } else { - await changeAclPublic(cacheUrl, publicRead); - } - console.log(`Query Cache container was created at ${cacheUrl}`); return cacheUrl; } else { @@ -515,13 +497,10 @@ export async function ensureCacheContainer( * - nodes: An array of all list node Things (to be added to your dataset). */ function buildRdfList(sources: string[]): { head: Thing; nodes: Thing[] } { - const RDF_FIRST = "http://www.w3.org/1999/02/22-rdf-syntax-ns#first"; - const RDF_REST = "http://www.w3.org/1999/02/22-rdf-syntax-ns#rest"; - const RDF_NIL = "http://www.w3.org/1999/02/22-rdf-syntax-ns#nil"; - if (sources.length === 0) { - // For an empty list, return rdf:nil. - return { head: createThing({ url: RDF_NIL }), nodes: [] }; + throw new Error( + "Cannot create a cache entry without at least one endpoint source." + ); } // Create a blank node for the current list element. @@ -538,6 +517,162 @@ function buildRdfList(sources: string[]): { head: Thing; nodes: Thing[] } { return { head: listNode, nodes: [listNode, ...restList.nodes] }; } +function removeReferencedRdfList( + dataset: SolidDataset, + headUrl: string | null +): SolidDataset { + let updatedDataset = dataset; + let currentNodeUrl = headUrl; + + while (currentNodeUrl && currentNodeUrl !== RDF_NIL) { + const currentNode = getThing(updatedDataset, currentNodeUrl); + const nextNodeUrl = currentNode ? getUrl(currentNode, RDF_REST) : null; + updatedDataset = removeThing(updatedDataset, currentNodeUrl); + currentNodeUrl = nextNodeUrl; + } + + return updatedDataset; +} + +function removeQueryEntryArtifacts( + dataset: SolidDataset, + entryUrl: string +): { dataset: SolidDataset; createdAt: Date | null } { + const entryThing = getThing(dataset, entryUrl); + if (!entryThing) { + return { dataset, createdAt: null }; + } + + const createdAt = getDatetime(entryThing, DCT_CREATED) ?? null; + const sourceListHeadUrl = getUrl(entryThing, SD_ENDPOINT); + const provenanceActivityUrl = getUrl(entryThing, PROV_WAS_GENERATED_BY); + + let updatedDataset = removeReferencedRdfList(dataset, sourceListHeadUrl); + + if (provenanceActivityUrl) { + updatedDataset = removeThing(updatedDataset, provenanceActivityUrl); + } + + updatedDataset = removeThing(updatedDataset, entryUrl); + return { dataset: updatedDataset, createdAt }; +} + +function upsertIndexMetadata( + dataset: SolidDataset, + containerUrl: string, + modifiedAt: Date, + fileName = "queries.ttl" +): SolidDataset { + const indexThingUrl = `${getIndexResourceUrl(containerUrl, fileName)}#index`; + let indexThing = createThing({ url: indexThingUrl }); + indexThing = buildThing(indexThing) + .addUrl(RDF_TYPE, QVMC_INDEX) + .addUrl(RDF_TYPE, LDP_RDF_SOURCE) + .addStringNoLocale(DCT_TITLE, "SPARQL Query Materialization Container") + .addDatetime(DCT_MODIFIED, modifiedAt) + .build(); + + return setThing(dataset, indexThing); +} + +/** + * Creates or replaces a spec-shaped `queries.ttl` entry after the concrete + * `.rq` and results files already exist in the container. + */ +export async function upsertQueryCacheEntry( + containerUrl: string, + entry: QueryCacheEntryInput, + fileName = "queries.ttl" +): Promise { + const cleanedSources = validateCacheSources(entry.sources); + const entryUrl = getQueryEntryUrl(containerUrl, entry.hash, fileName); + const serviceSources = parseSparqlQuery(entry.query); + const modifiedAt = new Date(); + + let dataset: SolidDataset; + try { + dataset = await getSolidDataset(getIndexResourceUrl(containerUrl, fileName), { + fetch, + }); + } catch { + dataset = createSolidDataset(); + } + + const { dataset: withoutExistingEntry, createdAt } = removeQueryEntryArtifacts( + dataset, + entryUrl + ); + + const { head: sourceListHead, nodes: sourceListNodes } = + buildRdfList(cleanedSources); + + let generationActivity = createThing(); + generationActivity = buildThing(generationActivity) + .addUrl(RDF_TYPE, PROV_ACTIVITY) + .addUrl(PROV_USED, entry.queryFileUrl) + .addIri(PROV_USED, sourceListHead.url) + .addDatetime(PROV_MODIFIED, modifiedAt) + .build(); + + let queryThing = createThing({ url: entryUrl }); + queryThing = buildThing(queryThing) + .addUrl(RDF_TYPE, TQ_QUERY_FORM) + .addUrl(RDF_TYPE, TQ_QUERY_SELECT) + .addIri(RDF_TYPE, SH_SPARQL_EXECUTABLE) + .addUrl(TQ_QUERY, entry.queryFileUrl) + .addStringNoLocale(SH_SELECT, entry.query) + .addUrl(TM_RESULT, entry.resultsFileUrl) + .addIri(SD_ENDPOINT, sourceListHead.url) + .addDatetime(DCT_CREATED, createdAt ?? modifiedAt) + .addDatetime(DCT_MODIFIED, modifiedAt) + .addStringNoLocale(QVMC_STATUS, entry.status ?? CACHE_STATUS_CURRENT) + .addIri(PROV_WAS_GENERATED_BY, generationActivity.url) + .build(); + + if (entry.title) { + queryThing = buildThing(queryThing) + .addStringNoLocale(DCT_TITLE, entry.title) + .build(); + } + + if (entry.description) { + queryThing = buildThing(queryThing) + .addStringNoLocale(DCT_DESCRIPTION, entry.description) + .build(); + } + + if (entry.linkedQueryHash) { + queryThing = buildThing(queryThing) + .addIri( + QVMC_LINKED_QUERY, + getQueryEntryUrl(containerUrl, entry.linkedQueryHash, fileName) + ) + .build(); + } + + serviceSources.forEach((source) => { + queryThing = buildThing(queryThing).addUrl(`${SPEX}federatesWith`, source).build(); + }); + + let updatedDataset = upsertIndexMetadata( + withoutExistingEntry, + containerUrl, + modifiedAt, + fileName + ); + updatedDataset = setThing(updatedDataset, generationActivity); + updatedDataset = setThing(updatedDataset, queryThing); + sourceListNodes.forEach((node) => { + updatedDataset = setThing(updatedDataset, node); + }); + + await saveSolidDatasetAt(getIndexResourceUrl(containerUrl, fileName), updatedDataset, { + fetch, + }); + + return entry.hash; +} + /** * Creates and uploads a Turtle file (Queries.ttl) into the container. * The function takes an array of source URLs and formats them into Turtle statements. @@ -562,116 +697,20 @@ export async function createQueriesTTL( sources: string[], fileName = "queries.ttl" ): Promise { - // Initiatize query cache variables - const hash = generateHash(6); - const queryFile = `${hash}.rq`; - const queryResult = `${hash}.json`; - - // prefixes - const TYPE = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type"; - const CREATED = "http://purl.org/dc/terms/created"; - const QUERYprop = - "http://www.w3.org/2001/sw/DataAccess/tests/test-query#query"; - const sh = "http://www.w3.org/ns/shacl#"; - const QUERYsuperclass = - "http://www.w3.org/2001/sw/DataAccess/tests/test-query#QueryForm"; - const QUERYSELsubclass = - "http://www.w3.org/2001/sw/DataAccess/tests/test-query#QuerySelect"; - // TODO: create conditional to allow for labelling of non-select queries... - const QUERYCONsubclass = - "http://www.w3.org/2001/sw/DataAccess/tests/test-query#QueryConstruct"; - const QUERYDESCsubclass = - "http://www.w3.org/2001/sw/DataAccess/tests/test-query#QueryDescribe"; - const QUERYASKsubclass = - "http://www.w3.org/2001/sw/DataAccess/tests/test-query#QueryAsk "; - const RESULT = - "http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#result"; - const SOURCE = "http://www.w3.org/ns/sparql-service-description#endpoint"; - const OWL = "http://www.w3.org/2002/07/owl#"; - const SCHEMA = "https://schema.org/"; - const SPEX = "https://purl.expasy.org/sparql-examples/ontology#"; - - const cleanedSources: string[] = cleanSourcesUrlsForCache(sources); - - // parse input query string using SPARQLjs - const serviceSources = parseSparqlQuery(query); - - // Saves RDF data as queries.ttl - let dataset: SolidDataset, message: string; - try { - // Try to retrieve the dataset (container) and save updated dataset - dataset = await getSolidDataset(containerUrl + fileName, { fetch }); - message = `UPDATED queries.ttl which now includes: ${hash}`; - } catch (error) { - dataset = createSolidDataset(); - message = `CREATED queries.ttl with first query: ${hash}`; - } - - // Tried to get fancy with shacl prefixes here but not necessary :( - // const prefixes: string[] = Object.entries(parsedQuery.prefixes).map( - // ([prefix, namespace]) => { - // // Add prefixes using SHACL's `sh:declare` - // let prefixDeclaration: Thing = createThing({ url: `_:${hash}_prefixes` }); - // prefixDeclaration = buildThing(prefixDeclaration) - // .addIri(`${sh}declare`, `_:prefix_${prefix}`) - // .build(); - // // Add actual prefix urls SHACL's `sh:prefix` and `sh:namespace` - // let prefixContent = createThing({ url: `_:${hash}_prefixes` }); - // prefixContent = buildThing(prefixContent) - // .addStringNoLocale(`${sh}prefix`, prefix) - // .addUrl(`${sh}namespace`, namespace) - // .build(); - - // dataset = setThing(dataset, prefixDeclaration); - // dataset = setThing(dataset, prefixContent); - // return `_:${hash}_prefixes`; - // } - // ); - - // Create the RDF List of sources - const { head: sourceListHead, nodes: sourceListNodes } = - buildRdfList(cleanedSources); - - // Create a Thing for the new query cache - const subjectUri = `${containerUrl + fileName}#${hash}`; - let newQueryThing: Thing = createThing({ url: subjectUri }); - newQueryThing = buildThing(newQueryThing) - // Specify the query hash. - .addUrl(`${TYPE}`, `${QUERYsuperclass}`) - .addUrl(`${TYPE}`, `${QUERYSELsubclass}`) - .addIri(`${TYPE}`, `${sh}SPARQLExecutable`) - // Add the query file - .addUrl(`${QUERYprop}`, `${containerUrl}${queryFile}`) - // add sh:prefixes - // .addIri(`${sh}prefixes}`, prefixes[0]) - // add query body - // TODO: fix this so the query is enclosed in """ """ not " " ... - .addStringNoLocale(`${sh}select`, query) - // Add the results file name - .addUrl(`${RESULT}`, `${containerUrl}${queryResult}`) - // Add sources as an RDF list - .addIri(`${SOURCE}`, sourceListHead.url) - - // Add date of query execution. - .addDatetime(`${CREATED}`, new Date()) - .build(); - - // Adds any SERVICE description sources to query entry - if (serviceSources.length > 0) { - serviceSources.forEach((source) => { - newQueryThing = buildThing(newQueryThing) - .addUrl(`${SPEX}federatesWith`, source) - .build(); - }); - } - - // Adds query sources to query entry - dataset = setThing(dataset, newQueryThing); - sourceListNodes.forEach((node) => { - dataset = setThing(dataset, node); - }); - await saveSolidDatasetAt(containerUrl + fileName, dataset, { fetch }); - console.log(message); + const hash = buildCacheEntryHash(query, sources); + await upsertQueryCacheEntry( + containerUrl, + { + hash, + query, + queryFileUrl: `${containerUrl}${hash}.rq`, + // Keep the legacy .json extension for compatibility with current SDK usage. + resultsFileUrl: `${containerUrl}${hash}.json`, + sources, + status: CACHE_STATUS_CURRENT, + }, + fileName + ); return hash; } @@ -719,48 +758,6 @@ export async function uploadQueryFile( } } -/** - * Parses a SPARQL query file for SERVICE clauses and returns any federation source URLs. - * - * @param queryString The SPARQL query as a string. - * @returns An object containing the prefixes and the query body. - */ -export function parseSparqlQuery(queryString: string): string[] { - const parser = new SparqlParser(); - - try { - // Parse the SPARQL query string into a structured object - const parsedQuery = parser.parse(queryString); - - // Initialize an array to store service sources - const serviceSources: string[] = []; - - // Helper function to recursively search for SERVICE clauses - function findServiceClauses(pattern: any) { - if (pattern.type === "service" || pattern.type === "SERVICE") { - // Add the service source (URL) to the array - serviceSources.push(pattern.name.value); - } else if (pattern.type === "group" || pattern.type === "union") { - // Recursively check patterns in groups or unions - pattern.patterns.forEach(findServiceClauses); - } else if (pattern.type === "optional") { - // Recursively check optional patterns - findServiceClauses(pattern.pattern); - } - } - - // Start searching for SERVICE clauses in the query's WHERE clause - if (parsedQuery.type === "query" && parsedQuery.where) { - parsedQuery.where.forEach(findServiceClauses); - } - - return serviceSources; - } catch (error) { - console.error("Error parsing SPARQL query for SERVICE clauses:", error); - return []; - } -} - /** * Creates and uploads a JSON file (e.g., hash1.sparqljson) into the container. * @@ -829,13 +826,44 @@ export async function getStoredTtl(resourceUrl: string): Promise { export interface QueryEntry { hash: string; + title?: string; queryFile: string; resultsFile: string; sourceUrls: string[]; created: string; + modified?: string; + status?: string; } -// TODO: Fix THIS +/** + * Updates only the user-facing title for a cached query entry. The underlying + * hash and file locations remain unchanged. + */ +export async function renameCachedQueryEntry( + ttlFileUrl: string, + targetHash: string, + title: string +): Promise { + const entryUrl = `${ttlFileUrl}#${targetHash}`; + try { + let dataset = await getSolidDataset(ttlFileUrl, { fetch }); + const entryThing = getThing(dataset, entryUrl); + if (!entryThing) { + return false; + } + + let renamedThing = setStringNoLocale(entryThing, DCT_TITLE, title.trim()); + renamedThing = setDatetime(renamedThing, DCT_MODIFIED, new Date()); + dataset = setThing(dataset, renamedThing); + await saveSolidDatasetAt(ttlFileUrl, dataset, { fetch }); + return true; + } catch (error) { + console.error(`Could not rename cached query ${targetHash}:`, error); + return false; + } +} + + /** * Retrieves all query entries from a Queries.ttl file. * @@ -855,52 +883,42 @@ export interface QueryEntry { export async function getCachedQueries( ttlFileUrl: string ): Promise { - // Load the dataset from the TTL file. const dataset: SolidDataset = await getSolidDataset(ttlFileUrl, { fetch }); const things: Thing[] = getThingAll(dataset); const queryEntries: QueryEntry[] = []; - let i = 0; things.forEach((thing) => { - // Extract the hash from the Thingโ€™s URL fragment. - i += 1; + const queryFile = getUrl(thing, TQ_QUERY); + const resultsFile = getUrl(thing, TM_RESULT); + if (!queryFile || !resultsFile) { + return; + } + const thingUrl = thing.url; const hash = thingUrl.includes("#") ? thingUrl.split("#")[1] : ""; - if (hash.length < 7) { - const created = - getDatetime(thing, "http://purl.org/dc/terms/created")?.toISOString() || - "N/A"; - const queryFile = - getUrl( - thing, - "http://www.w3.org/2001/sw/DataAccess/tests/test-query#query" - ) || "N/A"; - const resultsFile = - getUrl( - thing, - "http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#result" - ) || "N/A"; - // For dereferencing RDF source list - const sourceListUrl = getUrl( - thing, - "http://www.w3.org/ns/sparql-service-description#endpoint" - ); - let sourceUrls: string[] = []; - if (sourceListUrl) { - const sourceListHash = sourceListUrl.includes("#") - ? sourceListUrl.split("#")[1] - : ""; - sourceUrls = rdfListSources(sourceListHash, things, i); - } - queryEntries.push({ - hash, - queryFile, - resultsFile, - sourceUrls, - created, - }); + if (!hash) { + return; } + + const created = getDatetime(thing, DCT_CREATED)?.toISOString() || "N/A"; + const modified = getDatetime(thing, DCT_MODIFIED)?.toISOString() || created; + const status = getStringNoLocale(thing, QVMC_STATUS) || CACHE_STATUS_CURRENT; + const title = getStringNoLocale(thing, DCT_TITLE) || hash; + const sourceListUrl = getUrl(thing, SD_ENDPOINT); + const sourceUrls = rdfListSources(sourceListUrl, dataset); + + queryEntries.push({ + hash, + title, + queryFile, + resultsFile, + sourceUrls, + created, + modified, + status, + }); }); + return queryEntries; } @@ -912,29 +930,26 @@ export async function getCachedQueries( * @returns An array of extracted source URLs. */ function rdfListSources( - rdfHash: string, - things: Thing[], - index: number + listHeadUrl: string | null, + dataset: SolidDataset ): string[] { - const RDF_FIRST = "http://www.w3.org/1999/02/22-rdf-syntax-ns#first"; - const RDF_REST = "http://www.w3.org/1999/02/22-rdf-syntax-ns#rest"; - const RDF_NIL = "http://www.w3.org/1999/02/22-rdf-syntax-ns#nil"; - const extractedUrls: string[] = []; - let currentNodeHash = `#${rdfHash}`; - - while (index < things.length) { - if (currentNodeHash.length < 8) break; + let currentNodeUrl = listHeadUrl; + const visited = new Set(); - const url = getUrl(things[index], RDF_FIRST); - if (url) extractedUrls.push(url); + while (currentNodeUrl && currentNodeUrl !== RDF_NIL && !visited.has(currentNodeUrl)) { + visited.add(currentNodeUrl); + const currentNode = getThing(dataset, currentNodeUrl); + if (!currentNode) break; - const nextNodeHash = getUrl(things[index], RDF_REST); - if (!nextNodeHash || nextNodeHash === RDF_NIL) break; // Stop at rdf:nil + const url = getUrl(currentNode, RDF_FIRST); + if (url) { + extractedUrls.push(url); + } - currentNodeHash = nextNodeHash; - index += 1; + currentNodeUrl = getUrl(currentNode, RDF_REST); } + return extractedUrls; } diff --git a/src/components/queryPodUtils.ts b/src/components/queryPodUtils.ts new file mode 100644 index 00000000..982c1922 --- /dev/null +++ b/src/components/queryPodUtils.ts @@ -0,0 +1,160 @@ +import { Parser as SparqlParser } from "sparqljs"; + +export interface ComunicaSources { + value: string; + context?: Record; +} + +/** + * Stops an ongoing query by destroying a main thread binding stream. + * @param bindingStream The Comunica bindings stream to destroy. + */ +export function stopQuery(bindingStream: any): boolean { + if (bindingStream && typeof bindingStream.destroy === "function") { + bindingStream.destroy(); + return true; + } + return false; +} + +/** + * Cleans an array of source URLs by removing angle brackets ("<" and ">") + * and adapting the output to Comunica source objects. + * + * @param dirtySources input source URLs + * @param authenticatedFetch authenticated fetch function for Solid sources + * @returns cleaned Comunica source objects + */ +export function cleanSourcesUrls( + dirtySources: string[], + authenticatedFetch: typeof fetch +): ComunicaSources[] { + return dirtySources.map((url) => { + const cleanUrl = + url.startsWith("<") && url.endsWith(">") ? url.slice(1, -1) : url; + + if (cleanUrl.includes("sparql") || cleanUrl.includes("endpoint")) { + return { value: cleanUrl }; + } + + return { value: cleanUrl, context: { fetch: authenticatedFetch } }; + }); +} + +/** + * Cleans an array of source URLs by removing angle brackets ("<" and ">"). + * + * @param dirtySources input source URLs + * @returns cleaned source URL strings + */ +export function cleanSourcesUrlsForCache(dirtySources: string[]): string[] { + return dirtySources.map((url) => + url.startsWith("<") && url.endsWith(">") ? url.slice(1, -1) : url + ); +} + +/** + * Generates a unique alphanumeric hash. + * @param length length for the hash + * @returns random hash string + */ +export function generateHash(length: number): string { + const charset = + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; + let hash = ""; + for (let i = 0; i < length; i++) { + const randomIndex = Math.floor(Math.random() * charset.length); + hash += charset.charAt(randomIndex); + } + return hash; +} + +/** + * Generates a deterministic alphanumeric hash of given length using a seed. + * @param seedValue the seed value + * @param length output hash length (default: 10) + * @returns deterministic hash string + */ +export function generateSeededHash(seedValue: string, length = 10): string { + const charset = + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; + const seed = stringToSeed(seedValue); + const random = mulberry32(seed); + + let hash = ""; + for (let i = 0; i < length; i++) { + const index = Math.floor(random() * charset.length); + hash += charset.charAt(index); + } + return hash; +} + +/** + * Creates a seeded pseudorandom number generator. + * Mulberry32 is fast and well-suited for small tasks like this. + */ +function mulberry32(seed: number): () => number { + return function () { + seed |= 0; + seed = (seed + 0x6d2b79f5) | 0; + let t = Math.imul(seed ^ (seed >>> 15), 1 | seed); + t = (t + Math.imul(t ^ (t >>> 7), 61 | t)) ^ t; + return ((t ^ (t >>> 14)) >>> 0) / 4294967296; + }; +} + +/** + * Converts a string into a numeric seed. + */ +function stringToSeed(str: string): number { + let hash = 0; + for (let i = 0; i < str.length; i++) { + hash = (Math.imul(31, hash) + str.charCodeAt(i)) | 0; + } + return hash >>> 0; +} + +/** + * Parses a SPARQL query for SERVICE clauses and returns federation sources. + * + * @param queryString SPARQL query string + * @returns list of SERVICE endpoint URLs + */ +export function parseSparqlQuery(queryString: string): string[] { + const parser = new SparqlParser(); + + try { + const parsedQuery = parser.parse(queryString); + const serviceSources: string[] = []; + + function findServiceClauses(pattern: any): void { + if (!pattern || typeof pattern !== "object") { + return; + } + + const patternType = + typeof pattern.type === "string" ? pattern.type.toLowerCase() : ""; + + if (patternType === "service" && pattern.name?.value) { + serviceSources.push(pattern.name.value); + } + + if (Array.isArray(pattern.patterns)) { + pattern.patterns.forEach(findServiceClauses); + } + + if (Array.isArray(pattern.where)) { + pattern.where.forEach(findServiceClauses); + } + } + + if (parsedQuery.type === "query" && parsedQuery.where) { + parsedQuery.where.forEach(findServiceClauses); + } + + return serviceSources; + } catch (error) { + console.error("Error parsing SPARQL query for SERVICE clauses:", error); + return []; + } +} diff --git a/src/main.ts b/src/main.ts index 87c291aa..f348a37f 100644 --- a/src/main.ts +++ b/src/main.ts @@ -3,6 +3,7 @@ import { createApp } from 'vue' import App from './App.vue'; import router from './router'; import { createPinia } from 'pinia'; +import { useAuthStore } from "./stores/auth"; import 'vuetify/styles'; import { createVuetify } from 'vuetify'; @@ -45,9 +46,10 @@ const vuetify = createVuetify({ }, }) -const app = createApp(App).use(vuetify); +const app = createApp(App); +app.use(pinia); app.use(router); -app.use(createPinia()); +app.use(vuetify); // Set theme based on data-theme attribute const observer = new MutationObserver(() => { @@ -64,4 +66,13 @@ if (initialTheme) { vuetify.theme.global.name.value = initialTheme; } -app.mount('#app'); \ No newline at end of file +async function bootstrap() { + const authStore = useAuthStore(pinia); + await authStore.initializeAuth(); + await router.isReady(); + app.mount('#app'); +} + +bootstrap().catch((error) => { + console.error("Application bootstrap failed:", error); +}); diff --git a/src/navigation.ts b/src/navigation.ts new file mode 100644 index 00000000..3a5fca1a --- /dev/null +++ b/src/navigation.ts @@ -0,0 +1,11 @@ +export const LOGGED_OUT_NAV_ITEMS = ["Home", "Query"] as const; + +export const LOGGED_IN_NAV_ITEMS = [ + "Home", + "Data Upload", + "Pod Browser", + "Query", + "Data Privacy", +] as const; + +export const PUBLIC_ROUTE_NAMES = ["Home", "Login Page", "Query"] as const; diff --git a/src/router.ts b/src/router.ts index 4ce647e4..e4433f2c 100644 --- a/src/router.ts +++ b/src/router.ts @@ -10,6 +10,7 @@ import EditPrivacy from "./components/EditPrivacy.vue"; import NotFound from "./components/Styling/NotFound.vue"; import { useAuthStore } from "./stores/auth"; +import { PUBLIC_ROUTE_NAMES } from "./navigation"; /** * The router here allows for navigation between different functional pages of the TRIPLE App @@ -56,29 +57,24 @@ const router = createRouter({ }); /** - * A timeout here is necessary because the isLoggedin() function relies on an async function handleRedirectAfterPageLoad() to return boolean - * The result is the routing of the web page flow from the login page to the functional parts of the app - * (and returning to the login page if logged out at any point) + * Initialize auth/session state before running protected-route checks. */ -const publicPages = ["Home", "Login Page", "Query"]; - -setTimeout(() => { +router.beforeEach(async (to) => { const authStore = useAuthStore(); - router.beforeEach(async (to, from, next) => { + if (!authStore.authReady || authStore.authLoading) { + await authStore.initializeAuth(); + } + + if (PUBLIC_ROUTE_NAMES.includes(to.name as (typeof PUBLIC_ROUTE_NAMES)[number])) { + return true; + } - // make sure the user is authenticated - if (publicPages.includes(to.name as string)) { - // Always allow public pages - return next(); - } - // If not logged in, redirect to login - if (!authStore.loggedIn) { - return next({ name: "Login Page" }); - } - // Otherwise allow navigation - next(); - }); -}, 100); + if (!authStore.loggedIn) { + return { name: "Home" }; + } + + return true; +}); /* router.afterEach(function (to, from) { // sending analytics data diff --git a/src/shims-vue.d.ts b/src/shims-vue.d.ts index 2b97bd96..6d1b5945 100644 --- a/src/shims-vue.d.ts +++ b/src/shims-vue.d.ts @@ -3,3 +3,6 @@ declare module '*.vue' { const component: DefineComponent<{}, {}, any> export default component } + +declare const __APP_VERSION__: string; +declare const __APP_RELEASE_TAG__: string; diff --git a/src/stores/auth.ts b/src/stores/auth.ts index 698d30e3..30e45302 100644 --- a/src/stores/auth.ts +++ b/src/stores/auth.ts @@ -1,20 +1,47 @@ import { defineStore } from "pinia"; -import { isLoggedin, currentWebId } from "../components/login"; +import { + isLoggedin, + currentWebId, + handleRedirectAfterPageLoad, +} from "../components/login"; + +let authInitPromise: Promise | null = null; export const useAuthStore = defineStore("auth", { state: () => ({ loggedIn: false, webId: "" as string, selectedPodUrl: "" as string, + authReady: false, + authLoading: false, }), actions: { - initializeAuth() { - this.loggedIn = isLoggedin(); - this.webId = currentWebId(); + async initializeAuth(force = false) { + if (!force && this.authReady) { + return; + } + + if (!force && authInitPromise) { + return authInitPromise; + } + + this.authLoading = true; + authInitPromise = (async () => { + await handleRedirectAfterPageLoad(); + this.loggedIn = isLoggedin(); + this.webId = currentWebId() ?? ""; + this.authReady = true; + })().finally(() => { + this.authLoading = false; + authInitPromise = null; + }); + + return authInitPromise; }, setAuth(loggedIn: boolean, webId: string) { this.loggedIn = loggedIn; this.webId = webId; + this.authReady = true; }, setSelectedPodUrl(podUrl: string) { this.selectedPodUrl = podUrl; // Action to set selected Pod URL @@ -23,6 +50,7 @@ export const useAuthStore = defineStore("auth", { this.loggedIn = false; this.webId = ""; this.selectedPodUrl = ""; // Clear selected Pod URL + this.authReady = true; }, }, }); diff --git a/tests/components/AllComponentsSmoke.test.ts b/tests/components/AllComponentsSmoke.test.ts new file mode 100644 index 00000000..32c0c5d2 --- /dev/null +++ b/tests/components/AllComponentsSmoke.test.ts @@ -0,0 +1,367 @@ +import { RouterLinkStub, mount, shallowMount } from "@vue/test-utils"; +import { createPinia } from "pinia"; +import { nextTick } from "vue"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; +import TheFooter from "../../src/components/Styling/TheFooter.vue"; +import ThemeSwitch from "../../src/components/Styling/ThemeSwitch.vue"; +import FunctionSelector from "../../src/components/Styling/FunctionSelector.vue"; +import { useAuthStore } from "../../src/stores/auth"; + +vi.mock("../../src/components/login.ts", () => ({ + session: { + info: { + isLoggedIn: false, + webId: "https://user.example/profile/card#me", + }, + }, + startLogin: vi.fn(async () => ""), + isLoggedin: vi.fn(() => false), + currentWebId: vi.fn(() => "https://user.example/profile/card#me"), + getPodURLs: vi.fn(async () => ["https://pod.example/"]), + redirectToHomepage: vi.fn(), + redirectToLogin: vi.fn(), + logOut: vi.fn(async () => false), + handleRedirectAfterPageLoad: vi.fn(async () => {}), +})); + +vi.mock("../../src/components/getData.ts", () => ({ + webIdDataset: vi.fn(async () => {}), + fetchData: vi.fn(async (url: string) => ({ + internal_resourceInfo: { + sourceIri: url, + linkedResources: { + type: "resource", + describedby: `${url}.meta`, + }, + }, + })), + fetchPermissionsData: vi.fn(async () => ({})), + fetchAclAgents: vi.fn(async () => ({ + "https://friend.example/profile/card#me": { + read: true, + append: false, + write: false, + control: false, + }, + })), + fetchPublicAccess: vi.fn(async () => ({ + read: true, + append: false, + write: false, + control: false, + })), +})); + +vi.mock("../../src/components/fileUpload.ts", () => ({ + handleFiles: vi.fn(async () => [{ name: "mock-file.ttl" }]), + uploadSuccess: vi.fn(() => true), + alreadyExistsCheck: vi.fn(() => false), + deleteFromPod: vi.fn(async () => true), + deleteContainer: vi.fn(async () => true), + deleteThing: vi.fn(async () => true), +})); + +vi.mock("../../src/components/privacyEdit.ts", () => ({ + checkUrl: vi.fn(() => false), + generateAcl: vi.fn(async () => true), + changeAclAgent: vi.fn(async () => true), + changeAclPublic: vi.fn(async () => true), + createInboxWithACL: vi.fn(async () => true), + updateSharedWithMe: vi.fn(async () => true), + updateSharedWithOthers: vi.fn(async () => true), + getSharedWithOthers: vi.fn(async () => []), + getSharedWithMe: vi.fn(async () => ({ + lastAccessed: "2025-01-01T00:00:00.000Z", + sharedItems: [], + })), + saveNewAccessTime: vi.fn(async () => true), +})); + +vi.mock("../../src/components/queryPod.ts", () => ({ + ensureCacheContainer: vi.fn(async (_pod: string, _webId: string, base: string) => `${base}querycache/`), + createQueriesTTL: vi.fn(async () => "hash-abc"), + uploadQueryFile: vi.fn(async () => "hash-abc.rq"), + uploadResults: vi.fn(async () => "hash-abc.json"), + getStoredTtl: vi.fn(async () => false), + fetchQueryFileData: vi.fn(async () => "SELECT * WHERE { ?s ?p ?o }"), + getCachedQueries: vi.fn(async () => []), + executeQueryWithPodConnected: vi.fn(async () => "no-cache"), + fetchSparqlJsonFileData: vi.fn(async () => ({ + head: { vars: [] }, + results: { bindings: [] }, + })), + stopQuery: vi.fn(() => true), + cleanSourcesUrls: vi.fn(() => []), + executeQueryInMainThread: vi.fn(async () => ({ + provenanceOutput: null, + resultsOutput: { + head: { vars: [] }, + results: { bindings: [] }, + }, + })), +})); + +vi.mock("@inrupt/solid-client", () => ({ + getContainedResourceUrlAll: vi.fn(() => [ + "https://pod.example/", + "https://pod.example/container/", + "https://pod.example/container/file.ttl", + ]), + internal_AclRule: {}, +})); + +vi.mock("@triply/yasqe", () => { + class YasqeMock { + private value = ""; + + constructor(_element: Element | null, _options: Record) {} + + setValue(value: string) { + this.value = value; + } + + getValue() { + return this.value; + } + + setCursor(_cursor: Record) {} + + focus() {} + + destroy() {} + + on(_event: string, _callback: (instance: { getValue: () => string }) => void) {} + } + + return { + default: YasqeMock, + }; +}); + +vi.mock("@triply/yasr", () => { + class YasrMock { + constructor(_element: Element | null, _options: Record) {} + + setResponse(_response: unknown, _prefixes: Record) {} + + destroy() {} + } + + return { + default: YasrMock, + }; +}); + +const componentModules = import.meta.glob("../../src/components/**/*.vue"); +const componentEntries = Object.entries(componentModules).sort(([a], [b]) => + a.localeCompare(b) +); + +const flushPromises = async () => { + await Promise.resolve(); + await Promise.resolve(); +}; + +function makeProps(path: string): Record { + if (path.endsWith("/Styling/SharedWith.vue")) { + return { + currentOperation: "sharedWithMe", + currentPod: "https://pod.example/", + currentWebId: "https://user.example/profile/card#me", + }; + } + + if (path.endsWith("/ContainerNav.vue")) { + return { + currentPod: "https://pod.example/", + }; + } + + if (path.endsWith("/LandingPage.vue")) { + return { + currPod: "https://pod.example/", + }; + } + + return {}; +} + +describe("Focused Styling Component Tests", () => { + it("FunctionSelector only shows Home and Query when logged out", () => { + const pinia = createPinia(); + const authStore = useAuthStore(pinia); + authStore.setAuth(false, ""); + + const wrapper = shallowMount(FunctionSelector, { + global: { + plugins: [pinia], + config: { + compilerOptions: { + isCustomElement: (tag) => tag.startsWith("v-"), + }, + }, + }, + }); + + expect((wrapper.vm as unknown as { items: string[] }).items).toEqual(["Home", "Query"]); + }); + + it("FunctionSelector shows all functional pages when logged in", () => { + const pinia = createPinia(); + const authStore = useAuthStore(pinia); + authStore.setAuth(true, "https://user.example/profile/card#me"); + + const wrapper = shallowMount(FunctionSelector, { + global: { + plugins: [pinia], + config: { + compilerOptions: { + isCustomElement: (tag) => tag.startsWith("v-"), + }, + }, + }, + }); + + expect((wrapper.vm as unknown as { items: string[] }).items).toEqual([ + "Home", + "Data Upload", + "Pod Browser", + "Query", + "Data Privacy", + ]); + }); + + it("ThemeSwitch initializes from saved localStorage theme", async () => { + localStorage.setItem("app-theme", "light"); + + const wrapper = mount(ThemeSwitch); + await nextTick(); + + const button = wrapper.get("button.theme-switch"); + expect(button.attributes("data-mode")).toBe("light"); + expect(button.attributes("aria-checked")).toBe("false"); + expect(document.documentElement.getAttribute("data-theme")).toBe("light"); + }); + + it("ThemeSwitch defaults to dark and toggles theme on click", async () => { + const wrapper = mount(ThemeSwitch); + await nextTick(); + + const button = wrapper.get("button.theme-switch"); + expect(button.attributes("data-mode")).toBe("dark"); + expect(localStorage.getItem("app-theme")).toBe("dark"); + expect(document.documentElement.getAttribute("data-theme")).toBe("dark"); + + await button.trigger("click"); + expect(button.attributes("data-mode")).toBe("light"); + expect(button.attributes("aria-checked")).toBe("false"); + expect(localStorage.getItem("app-theme")).toBe("light"); + expect(document.documentElement.getAttribute("data-theme")).toBe("light"); + }); + + it("ThemeSwitch toggles theme on keyboard handlers", async () => { + const wrapper = mount(ThemeSwitch); + await nextTick(); + + const button = wrapper.get("button.theme-switch"); + expect(button.attributes("data-mode")).toBe("dark"); + + await button.trigger("keydown.enter"); + expect(button.attributes("data-mode")).toBe("light"); + + await button.trigger("keydown.space"); + expect(button.attributes("data-mode")).toBe("dark"); + }); + + it("TheFooter renders version metadata and shows last modified date after fetch", async () => { + const fetchMock = vi.fn(async () => ({ + json: async () => [ + { + commit: { committer: { date: "2026-02-20T10:20:30.000Z" } }, + }, + ], + })); + vi.stubGlobal("fetch", fetchMock as unknown as typeof fetch); + + const wrapper = mount(TheFooter); + await vi.waitFor(() => { + expect(wrapper.text()).toContain("Last Modified: 2026-02-20"); + }); + + expect(wrapper.text()).toContain("Version: v1.0.0"); + expect(wrapper.text()).toContain("Version: v1.0.0"); + expect(fetchMock).toHaveBeenCalledOnce(); + }); + + it("TheFooter keeps last-modified hidden when commit API returns no entries", async () => { + const fetchMock = vi.fn(async () => ({ + json: async () => [], + })); + vi.stubGlobal("fetch", fetchMock as unknown as typeof fetch); + + const wrapper = mount(TheFooter); + await flushPromises(); + + expect(wrapper.text()).not.toContain("Last Modified:"); + }); + + it("TheFooter handles fetch failures without crashing", async () => { + const fetchMock = vi.fn(async () => { + throw new Error("network failure"); + }); + const errorSpy = vi.spyOn(console, "error").mockImplementation(() => {}); + vi.stubGlobal("fetch", fetchMock as unknown as typeof fetch); + + const wrapper = mount(TheFooter); + await flushPromises(); + + expect(wrapper.exists()).toBe(true); + expect(wrapper.text()).not.toContain("Last Modified:"); + expect(errorSpy).toHaveBeenCalled(); + }); +}); + +describe("All Vue Components Smoke Tests", () => { + beforeEach(() => { + vi.useFakeTimers(); + vi.stubGlobal("fetch", vi.fn(async () => ({ json: async () => [] }))); + vi.stubGlobal("alert", vi.fn()); + vi.stubGlobal("confirm", vi.fn(() => false)); + vi.stubGlobal("open", vi.fn()); + vi.spyOn(console, "warn").mockImplementation(() => {}); + }); + + afterEach(() => { + vi.runOnlyPendingTimers(); + vi.clearAllTimers(); + vi.useRealTimers(); + vi.unstubAllGlobals(); + }); + + for (const [path, loadComponent] of componentEntries) { + it(`mounts ${path}`, async () => { + const module = await loadComponent(); + const component = module.default; + + const wrapper = shallowMount(component, { + props: makeProps(path), + global: { + plugins: [createPinia()], + config: { + compilerOptions: { + isCustomElement: (tag) => tag.startsWith("v-"), + }, + }, + stubs: { + RouterLink: RouterLinkStub, + RouterView: true, + transition: false, + }, + }, + }); + + expect(wrapper.exists()).toBe(true); + wrapper.unmount(); + }); + } +}); diff --git a/tests/components/PodBrowserFeatures.test.ts b/tests/components/PodBrowserFeatures.test.ts new file mode 100644 index 00000000..78c2742f --- /dev/null +++ b/tests/components/PodBrowserFeatures.test.ts @@ -0,0 +1,200 @@ +import { mount } from "@vue/test-utils"; +import { createPinia } from "pinia"; +import { nextTick } from "vue"; +import { describe, expect, it, vi, beforeEach } from "vitest"; +import PodBrowser from "../../src/components/PodBrowser.vue"; +import { useAuthStore } from "../../src/stores/auth"; + +const { + mockUrls, + getContainedResourceUrlAllMock, + fetchDataMock, + getSolidDatasetMock, + getFileMock, + movePodItemMock, + renamePodItemMock, +} = vi.hoisted(() => { + const mockUrls = [ + "https://pod.example/", + "https://pod.example/docs/", + "https://pod.example/docs/report.ttl", + "https://pod.example/image.png", + ]; + + return { + mockUrls, + getContainedResourceUrlAllMock: vi.fn(() => mockUrls), + fetchDataMock: vi.fn(async (url: string) => ({ + internal_resourceInfo: { + sourceIri: url, + linkedResources: { + describedby: `${url}.meta`, + }, + }, + })), + getSolidDatasetMock: vi.fn(async () => ({})), + getFileMock: vi.fn(async (url: string) => ({ + name: url.split("/").pop() || "file.ttl", + type: "text/turtle", + size: 2048, + lastModified: Date.UTC(2026, 2, 25), + })), + movePodItemMock: vi.fn(async () => "https://pod.example/archive/report.ttl"), + renamePodItemMock: vi.fn(async () => "https://pod.example/docs/renamed.ttl"), + }; +}); + +vi.mock("../../src/components/getData.ts", () => ({ + fetchData: fetchDataMock, +})); + +vi.mock("../../src/components/login.ts", () => ({ + currentWebId: vi.fn(() => "https://user.example/profile/card#me"), + getPodURLs: vi.fn(async () => ["https://pod.example/"]), +})); + +vi.mock("../../src/components/fileUpload.ts", () => ({ + deleteFromPod: vi.fn(async () => true), + deleteContainer: vi.fn(async () => true), + movePodItem: movePodItemMock, + renamePodItem: renamePodItemMock, +})); + +vi.mock("../../src/components/privacyEdit.ts", () => ({ + checkUrl: vi.fn(() => false), +})); + +vi.mock("@inrupt/solid-client", () => ({ + getContainedResourceUrlAll: getContainedResourceUrlAllMock, + getSolidDataset: getSolidDatasetMock, + getFile: getFileMock, +})); + +vi.mock("@inrupt/solid-client-authn-browser", () => ({ + fetch: vi.fn(), +})); + +const flushPromises = async () => { + await Promise.resolve(); + await Promise.resolve(); + await nextTick(); +}; + +function mountBrowser() { + const pinia = createPinia(); + const authStore = useAuthStore(pinia); + authStore.setAuth(true, "https://user.example/profile/card#me"); + authStore.setSelectedPodUrl("https://pod.example/"); + + return mount(PodBrowser, { + global: { + plugins: [pinia], + config: { + compilerOptions: { + isCustomElement: (tag) => tag.startsWith("v-"), + }, + }, + stubs: { + PodRegistration: true, + PodBrowserGuide: true, + ContainerNav: { + template: '
nav
', + }, + }, + }, + }); +} + +describe("PodBrowser features", () => { + beforeEach(() => { + vi.clearAllMocks(); + vi.stubGlobal("confirm", vi.fn(() => false)); + vi.stubGlobal("alert", vi.fn()); + }); + + it("keeps filters hidden by default and filters items by type and search", async () => { + const wrapper = mountBrowser(); + await flushPromises(); + + expect(wrapper.find(".filters-panel").exists()).toBe(false); + expect((wrapper.vm as unknown as { filteredUrls: string[] }).filteredUrls).toHaveLength(4); + + await wrapper.get(".filter-toggle").trigger("click"); + expect(wrapper.find(".filters-panel").exists()).toBe(true); + + const chips = wrapper.findAll(".filter-chip"); + await chips[1].trigger("click"); + expect((wrapper.vm as unknown as { filteredUrls: string[] }).filteredUrls).toEqual([ + "https://pod.example/", + "https://pod.example/docs/", + ]); + + await wrapper.get("#itemSearch").setValue("report"); + expect((wrapper.vm as unknown as { filteredUrls: string[] }).filteredUrls).toEqual([]); + + await chips[0].trigger("click"); + expect((wrapper.vm as unknown as { filteredUrls: string[] }).filteredUrls).toEqual([ + "https://pod.example/docs/report.ttl", + ]); + + await wrapper.get(".filter-reset").trigger("click"); + expect((wrapper.vm as unknown as { filteredUrls: string[] }).filteredUrls).toHaveLength(4); + }); + + it("renders collapsed move and rename panels when item details are expanded", async () => { + const wrapper = mountBrowser(); + await flushPromises(); + + await wrapper.findAll(".item-toggle")[0].trigger("click"); + await flushPromises(); + + expect(wrapper.find(".move-card").exists()).toBe(false); + expect(wrapper.find(".rename-card").exists()).toBe(false); + expect(wrapper.text()).toContain("Move item"); + expect(wrapper.text()).toContain("Rename item"); + expect(wrapper.text()).toContain("Delete item"); + }); + + it("supports move destination modes and calls move helper", async () => { + const wrapper = mountBrowser(); + await flushPromises(); + + await wrapper.findAll(".item-toggle")[2].trigger("click"); + await flushPromises(); + + await wrapper.findAll(".action-toggle")[0].trigger("click"); + expect(wrapper.find(".move-card").exists()).toBe(true); + + const modeButtons = wrapper.findAll(".move-mode-switch button"); + await modeButtons[0].trigger("click"); + const moveInput = wrapper.get(".move-input"); + await moveInput.setValue("https://pod.example/archive/"); + await wrapper.get(".move-btn").trigger("click"); + + expect(movePodItemMock).toHaveBeenCalledWith( + "https://pod.example/image.png", + "https://pod.example/archive/", + "https://pod.example/" + ); + }); + + it("supports rename panel and calls rename helper", async () => { + const wrapper = mountBrowser(); + await flushPromises(); + + await wrapper.findAll(".item-toggle")[2].trigger("click"); + await flushPromises(); + + await wrapper.findAll(".action-toggle")[1].trigger("click"); + expect(wrapper.find(".rename-card").exists()).toBe(true); + + await wrapper.get(".rename-input").setValue("renamed.ttl"); + await wrapper.get(".rename-btn").trigger("click"); + + expect(renamePodItemMock).toHaveBeenCalledWith( + "https://pod.example/image.png", + "renamed.ttl", + "https://pod.example/" + ); + }); +}); diff --git a/tests/components/setup.ts b/tests/components/setup.ts new file mode 100644 index 00000000..5d3dedde --- /dev/null +++ b/tests/components/setup.ts @@ -0,0 +1,7 @@ +import { afterEach, vi } from "vitest"; + +afterEach(() => { + document.documentElement.removeAttribute("data-theme"); + localStorage.clear(); + vi.restoreAllMocks(); +}); diff --git a/tests/register-ts-loader.mjs b/tests/register-ts-loader.mjs new file mode 100644 index 00000000..d460315e --- /dev/null +++ b/tests/register-ts-loader.mjs @@ -0,0 +1,4 @@ +import { register } from "node:module"; +import { pathToFileURL } from "node:url"; + +register("./tests/ts-esbuild-loader.mjs", pathToFileURL("./")); diff --git a/tests/ts-esbuild-loader.mjs b/tests/ts-esbuild-loader.mjs new file mode 100644 index 00000000..a8883399 --- /dev/null +++ b/tests/ts-esbuild-loader.mjs @@ -0,0 +1,64 @@ +import { readFile } from "node:fs/promises"; +import path from "node:path"; +import { fileURLToPath, pathToFileURL } from "node:url"; +import { transform } from "esbuild"; + +const EXTENSIONS = [".ts", ".tsx", ".js", ".mjs"]; + +export async function resolve(specifier, context, defaultResolve) { + try { + return await defaultResolve(specifier, context, defaultResolve); + } catch (error) { + const isRelativeOrAbsolute = + specifier.startsWith("./") || + specifier.startsWith("../") || + specifier.startsWith("/"); + + if (!isRelativeOrAbsolute || path.extname(specifier)) { + throw error; + } + + const parentPath = context.parentURL + ? path.dirname(fileURLToPath(context.parentURL)) + : process.cwd(); + + for (const ext of EXTENSIONS) { + const candidatePath = specifier.startsWith("/") + ? `${specifier}${ext}` + : path.join(parentPath, `${specifier}${ext}`); + + try { + return await defaultResolve( + pathToFileURL(candidatePath).href, + context, + defaultResolve + ); + } catch { + // Continue checking the next extension. + } + } + + throw error; + } +} + +export async function load(url, context, defaultLoad) { + if (url.endsWith(".ts") || url.endsWith(".tsx")) { + const source = await readFile(new URL(url), "utf8"); + const loader = url.endsWith(".tsx") ? "tsx" : "ts"; + const { code } = await transform(source, { + loader, + format: "esm", + target: "es2020", + sourcemap: "inline", + }); + + return { + format: "module", + source: code, + shortCircuit: true, + }; + } + + return defaultLoad(url, context, defaultLoad); +} diff --git a/tests/unit/fileUpload.test.ts b/tests/unit/fileUpload.test.ts new file mode 100644 index 00000000..2cdbfa70 --- /dev/null +++ b/tests/unit/fileUpload.test.ts @@ -0,0 +1,54 @@ +import assert from "node:assert/strict"; +import test from "node:test"; +import { + alreadyExistsCheck, + derefrenceFile, + getMimeType, + uploadSuccess, +} from "../../src/components/fileUploadUtils.ts"; +import { mimeTypes } from "../../src/components/mime_types.js"; + +test("getMimeType resolves known MIME types case-insensitively", () => { + assert.equal(getMimeType(".TTL"), mimeTypes[".ttl"]); +}); + +test("getMimeType falls back to application/octet-stream for unknown extensions", () => { + assert.equal(getMimeType(".unknown-ext"), "application/octet-stream"); +}); + +test("alreadyExistsCheck only returns true for the explicit marker string", () => { + assert.equal(alreadyExistsCheck("already exists"), true); + assert.equal(alreadyExistsCheck("uploaded"), false); +}); + +test("uploadSuccess returns true only when every upload item is successful", () => { + assert.equal(uploadSuccess(["https://pod.example/file1.ttl"]), true); + assert.equal(uploadSuccess(["https://pod.example/file1.ttl", "error"]), false); + assert.equal(uploadSuccess([]), false); +}); + +test("derefrenceFile returns file name, size, and source IRI", () => { + const inputFile = { + name: "example.ttl", + size: 128, + internal_resourceInfo: { + sourceIri: "https://pod.example/example.ttl", + }, + } as any; + + assert.deepEqual(derefrenceFile(inputFile), [ + "example.ttl", + "128", + "https://pod.example/example.ttl", + ]); +}); + +test("derefrenceFile returns an error sentinel when file metadata is malformed", () => { + const oldError = console.error; + console.error = () => {}; + try { + assert.deepEqual(derefrenceFile({} as any), ["error"]); + } finally { + console.error = oldError; + } +}); diff --git a/tests/unit/getData.test.ts b/tests/unit/getData.test.ts new file mode 100644 index 00000000..85272b76 --- /dev/null +++ b/tests/unit/getData.test.ts @@ -0,0 +1,41 @@ +import assert from "node:assert/strict"; +import test from "node:test"; +import { + webIdDataset, + fetchData, + fetchPermissionsData, + fetchAclAgents, + fetchPublicAccess, +} from "../../src/components/getData.ts"; + +test("webIdDataset throws for invalid WebID URL input", async () => { + await assert.rejects(() => webIdDataset("not-a-url", ""), /Invalid URL/); +}); + +test("fetchData throws for invalid resource URL input", async () => { + await assert.rejects(() => fetchData("not-a-url"), /Invalid URL/); +}); + +test("fetchPermissionsData returns null when ACL lookup fails", async () => { + assert.equal(await fetchPermissionsData("not-a-url"), null); +}); + +test("fetchAclAgents returns null when ACL agent lookup fails", async () => { + const oldError = console.error; + console.error = () => {}; + try { + assert.equal(await fetchAclAgents("not-a-url"), null); + } finally { + console.error = oldError; + } +}); + +test("fetchPublicAccess returns null when public ACL lookup fails", async () => { + const oldError = console.error; + console.error = () => {}; + try { + assert.equal(await fetchPublicAccess("not-a-url"), null); + } finally { + console.error = oldError; + } +}); diff --git a/tests/unit/login.test.ts b/tests/unit/login.test.ts new file mode 100644 index 00000000..25999a54 --- /dev/null +++ b/tests/unit/login.test.ts @@ -0,0 +1,148 @@ +import assert from "node:assert/strict"; +import test, { afterEach, beforeEach } from "node:test"; +import { + session, + startLogin, + logOut, + isLoggedin, + currentWebId, + getPodURLs, + redirectToHomepage, + redirectToLogin, +} from "../../src/components/login.ts"; + +function createSessionStorageMock() { + const store = new Map(); + return { + setItem(key: string, value: string) { + store.set(key, String(value)); + }, + getItem(key: string) { + return store.has(key) ? store.get(key) : null; + }, + removeItem(key: string) { + store.delete(key); + }, + clear() { + store.clear(); + }, + }; +} + +const originalWindow = (globalThis as any).window; +const originalSessionStorage = (globalThis as any).sessionStorage; +const originalLogin = session.login; +const originalLogout = session.logout; +const originalSessionInfo = { ...session.info } as any; +const originalConsoleError = console.error; + +beforeEach(() => { + (globalThis as any).window = { + location: { href: "https://example.org/solid-cockpit/login" }, + }; + (globalThis as any).sessionStorage = createSessionStorageMock(); + + session.login = originalLogin; + session.logout = originalLogout; + session.info.isLoggedIn = false; + (session.info as any).webId = originalSessionInfo.webId; +}); + +afterEach(() => { + (globalThis as any).window = originalWindow; + (globalThis as any).sessionStorage = originalSessionStorage; + + session.login = originalLogin; + session.logout = originalLogout; + session.info.isLoggedIn = originalSessionInfo.isLoggedIn; + (session.info as any).webId = originalSessionInfo.webId; + console.error = originalConsoleError; +}); + +test("startLogin stores redirect and calls session.login when logged out", async () => { + let callCount = 0; + session.login = (async (options: any) => { + callCount += 1; + assert.equal(options.oidcIssuer, "https://issuer.example"); + assert.equal(options.redirectUrl, "https://example.org/solid-cockpit/login"); + assert.equal(options.clientName, "Solid Cockpit"); + }) as any; + + const status = await startLogin("https://issuer.example"); + assert.equal(status, ""); + assert.equal(callCount, 1); + assert.equal( + (globalThis as any).sessionStorage.getItem("postLoginRedirect"), + "https://example.org/solid-cockpit/login" + ); +}); + +test("startLogin is a no-op when already logged in", async () => { + session.info.isLoggedIn = true; + let callCount = 0; + session.login = (async () => { + callCount += 1; + }) as any; + + const status = await startLogin("https://issuer.example"); + assert.equal(status, ""); + assert.equal(callCount, 0); +}); + +test("startLogin returns error status when session.login fails", async () => { + session.login = (async () => { + throw new Error("login failed"); + }) as any; + console.error = () => {}; + + const status = await startLogin("https://issuer.example"); + assert.equal(status, "error"); +}); + +test("logOut returns updated login state when logout succeeds", async () => { + session.info.isLoggedIn = true; + session.logout = (async () => { + session.info.isLoggedIn = false; + }) as any; + + assert.equal(await logOut(), false); +}); + +test("logOut returns current login state when logout fails", async () => { + session.info.isLoggedIn = true; + session.logout = (async () => { + throw new Error("logout failed"); + }) as any; + console.error = () => {}; + + assert.equal(await logOut(), true); +}); + +test("logOut returns false when already logged out", async () => { + session.info.isLoggedIn = false; + assert.equal(await logOut(), false); +}); + +test("isLoggedin and currentWebId reflect session info", () => { + session.info.isLoggedIn = true; + (session.info as any).webId = "https://pod.example/profile/card#me"; + + assert.equal(isLoggedin(), true); + assert.equal(currentWebId(), "https://pod.example/profile/card#me"); +}); + +test("getPodURLs returns null when pod URL lookup fails", async () => { + (session.info as any).webId = undefined; + assert.equal(await getPodURLs(), null); +}); + +test("redirect helpers set location href under /solid-cockpit", () => { + (globalThis as any).window.location.href = "https://example.org/current/page"; + + redirectToHomepage(); + assert.equal((globalThis as any).window.location.href, "https://example.org/solid-cockpit/"); + + (globalThis as any).window.location.href = "https://example.org/current/page"; + redirectToLogin(); + assert.equal((globalThis as any).window.location.href, "https://example.org/solid-cockpit/login"); +}); diff --git a/tests/unit/privacyEdit.test.ts b/tests/unit/privacyEdit.test.ts new file mode 100644 index 00000000..2c1fd05a --- /dev/null +++ b/tests/unit/privacyEdit.test.ts @@ -0,0 +1,90 @@ +import assert from "node:assert/strict"; +import test from "node:test"; +import { + addDatetime, + createSolidDataset, + createThing, + getDatetime, + getThing, + getThingAll, + setThing, +} from "@inrupt/solid-client"; +import { + checkUrl, + createNewAcl, + getCurrentRdfDateTime, + recordLastAccessTime, + saveNewAccessTime, +} from "../../src/components/privacyEdit.ts"; + +const DCT_MODIFIED = "http://purl.org/dc/terms/modified"; + +test("getCurrentRdfDateTime returns a valid ISO timestamp string", () => { + const timestamp = getCurrentRdfDateTime(); + assert.equal(Number.isNaN(Date.parse(timestamp)), false); + assert.match(timestamp, /\d{4}-\d{2}-\d{2}T/); +}); + +test("checkUrl validates URL input and compares against the current WebID", () => { + assert.equal(checkUrl("https://pod.example/profile/card#me", "https://pod.example/profile/card#me"), true); + assert.equal(checkUrl("https://pod.example/profile/card#me", "https://pod.example/other#me"), false); + assert.equal(checkUrl("not-a-url", "https://pod.example/profile/card#me"), true); +}); + +test("createNewAcl builds a frozen ACL dataset with expected metadata", () => { + const targetResource = { + ...createSolidDataset(), + internal_resourceInfo: { + sourceIri: "https://pod.example/resource", + aclUrl: "https://pod.example/resource.acl", + isRawData: false, + linkedResources: {}, + }, + } as any; + + const acl = createNewAcl(targetResource); + assert.equal(Object.isFrozen(acl), true); + assert.equal(acl.internal_accessTo, "https://pod.example/resource"); + assert.equal(acl.internal_resourceInfo.sourceIri, "https://pod.example/resource.acl"); +}); + +test("recordLastAccessTime creates a last-access Thing when absent", () => { + const fileUrl = "https://pod.example/inbox/sharedWithMe.ttl"; + const dataset = createSolidDataset(); + + const updated = recordLastAccessTime(dataset, fileUrl); + const things = getThingAll(updated); + + assert.equal(things.length, 1); + assert.equal(Boolean(getDatetime(things[0], DCT_MODIFIED)), true); +}); + +test("recordLastAccessTime updates existing last-access Thing when present", () => { + const fileUrl = "https://pod.example/inbox/sharedWithMe.ttl"; + const oldDate = new Date("2020-01-01T00:00:00.000Z"); + + let dataset = createSolidDataset(); + let lastAccessThing = addDatetime( + createThing({ url: `${fileUrl}#lastAccess` }), + DCT_MODIFIED, + oldDate + ); + dataset = setThing(dataset, lastAccessThing); + + const updated = recordLastAccessTime(dataset, fileUrl); + const updatedThing = getThing(updated, `${fileUrl}#lastAccess`); + const updatedDate = updatedThing ? getDatetime(updatedThing, DCT_MODIFIED) : null; + + assert.ok(updatedDate); + assert.notEqual(updatedDate?.toISOString(), oldDate.toISOString()); +}); + +test("saveNewAccessTime returns false when the target pod URL is invalid", async () => { + const oldError = console.error; + console.error = () => {}; + try { + assert.equal(await saveNewAccessTime("not-a-url"), false); + } finally { + console.error = oldError; + } +}); diff --git a/tests/unit/queryPod.test.ts b/tests/unit/queryPod.test.ts new file mode 100644 index 00000000..a02de64a --- /dev/null +++ b/tests/unit/queryPod.test.ts @@ -0,0 +1,104 @@ +import assert from "node:assert/strict"; +import test from "node:test"; +import { + cleanSourcesUrls, + cleanSourcesUrlsForCache, + generateHash, + generateSeededHash, + parseSparqlQuery, + stopQuery, +} from "../../src/components/queryPodUtils.ts"; + +test("stopQuery destroys a binding stream when destroy() exists", () => { + let wasDestroyed = false; + const mockBindingStream = { + destroy: () => { + wasDestroyed = true; + }, + }; + + assert.equal(stopQuery(mockBindingStream), true); + assert.equal(wasDestroyed, true); +}); + +test("stopQuery returns false for null or non-destroyable streams", () => { + assert.equal(stopQuery(null), false); + assert.equal(stopQuery({}), false); +}); + +test("cleanSourcesUrls strips angle brackets and applies auth context to non-endpoint URLs", () => { + const fakeFetch = (() => + Promise.resolve(new Response("ok"))) as unknown as typeof fetch; + const cleaned = cleanSourcesUrls([ + "", + "https://query.example/sparql", + "https://query.example/endpoint", + ], fakeFetch); + + assert.equal(cleaned[0].value, "https://pod.example/data.ttl"); + assert.equal(typeof cleaned[0].context?.fetch, "function"); + assert.equal(cleaned[1].value, "https://query.example/sparql"); + assert.equal(cleaned[1].context, undefined); + assert.equal(cleaned[2].context, undefined); +}); + +test("cleanSourcesUrlsForCache strips angle brackets without changing URL order", () => { + assert.deepEqual( + cleanSourcesUrlsForCache([ + "", + "https://query.example/sparql", + ]), + ["https://pod.example/data.ttl", "https://query.example/sparql"] + ); +}); + +test("generateHash returns alphanumeric hashes of the requested length", () => { + const hash = generateHash(12); + assert.equal(hash.length, 12); + assert.match(hash, /^[A-Za-z0-9]+$/); +}); + +test("generateSeededHash is deterministic for a seed and length", () => { + const hashA = generateSeededHash("triple-seed", 16); + const hashB = generateSeededHash("triple-seed", 16); + const hashC = generateSeededHash("different-seed", 16); + + assert.equal(hashA, hashB); + assert.notEqual(hashA, hashC); + assert.equal(hashA.length, 16); +}); + +test("generateSeededHash uses default output length when omitted", () => { + assert.equal(generateSeededHash("default-length-seed").length, 10); +}); + +test("parseSparqlQuery extracts SERVICE endpoints from nested query patterns", () => { + const query = ` + SELECT * WHERE { + SERVICE { ?s ?p ?o . } + OPTIONAL { + SERVICE { ?a ?b ?c . } + } + } + `; + const serviceSources = parseSparqlQuery(query); + + assert.deepEqual(serviceSources.sort(), [ + "https://service.one/sparql", + "https://service.two/endpoint", + ]); +}); + +test("parseSparqlQuery returns an empty list for invalid SPARQL input", () => { + const oldError = console.error; + console.error = () => {}; + try { + assert.deepEqual(parseSparqlQuery("THIS IS NOT SPARQL"), []); + } finally { + console.error = oldError; + } +}); + +test("parseSparqlQuery returns no services for update statements", () => { + assert.deepEqual(parseSparqlQuery("INSERT DATA { . }"), []); +}); diff --git a/tests/unit/z3-headers.test.ts b/tests/unit/z3-headers.test.ts new file mode 100644 index 00000000..f3b17db5 --- /dev/null +++ b/tests/unit/z3-headers.test.ts @@ -0,0 +1,74 @@ +import assert from "node:assert/strict"; +import test from "node:test"; +import { createCoiFetch } from "../../src/components/z3-headers.ts"; + +test("createCoiFetch injects isolation and CORS headers by default", async () => { + const baseFetch = async () => + new Response("ok", { + status: 200, + headers: { "Content-Type": "text/plain" }, + }); + + const wrappedFetch = createCoiFetch(baseFetch as typeof fetch); + const response = await wrappedFetch("https://example.org/resource"); + + assert.equal(response.headers.get("Cross-Origin-Embedder-Policy"), "require-corp"); + assert.equal(response.headers.get("Cross-Origin-Resource-Policy"), "cross-origin"); + assert.equal(response.headers.get("Cross-Origin-Opener-Policy"), "same-origin"); + assert.equal(response.headers.get("Access-Control-Allow-Origin"), "*"); +}); + +test("createCoiFetch enforces credentialless mode for no-cors requests", async () => { + let receivedRequest: Request | undefined; + const baseFetch = async (input: RequestInfo | URL) => { + receivedRequest = input as Request; + return new Response("ok", { status: 200 }); + }; + + const wrappedFetch = createCoiFetch(baseFetch as typeof fetch, { + coepCredentialless: true, + }); + + const request = new Request("https://example.org/resource", { + mode: "no-cors", + credentials: "include", + }); + const response = await wrappedFetch(request); + + assert.equal(receivedRequest?.credentials, "omit"); + assert.equal(response.headers.get("Cross-Origin-Embedder-Policy"), "credentialless"); + assert.equal(response.headers.get("Cross-Origin-Resource-Policy"), null); +}); + +test("createCoiFetch skips ACAO header when noCors option is enabled", async () => { + const baseFetch = async () => new Response("ok", { status: 200 }); + const wrappedFetch = createCoiFetch(baseFetch as typeof fetch, { noCors: true }); + const response = await wrappedFetch("https://example.org/resource"); + + assert.equal(response.headers.get("Access-Control-Allow-Origin"), null); +}); + +test("createCoiFetch returns opaque responses unchanged when passthroughOpaque is enabled", async () => { + const opaqueResponse = { status: 0 } as Response; + const baseFetch = async () => opaqueResponse; + const wrappedFetch = createCoiFetch(baseFetch as typeof fetch, { + passthroughOpaque: true, + }); + + const response = await wrappedFetch("https://example.org/resource"); + assert.equal(response, opaqueResponse); +}); + +test("createCoiFetch surfaces invalid request combinations from Request construction", async () => { + const baseFetch = async () => new Response("ok", { status: 200 }); + const wrappedFetch = createCoiFetch(baseFetch as typeof fetch); + + await assert.rejects( + () => + wrappedFetch("https://example.org/resource", { + mode: "cors", + cache: "only-if-cached", + }), + /only-if-cached/ + ); +}); diff --git a/vite.config.js b/vite.config.js index f9250457..419dd81b 100644 --- a/vite.config.js +++ b/vite.config.js @@ -1,12 +1,23 @@ import { defineConfig } from "vite"; import vue from "@vitejs/plugin-vue"; import { fileURLToPath, URL } from "node:url"; +import { readFileSync } from "node:fs"; + +const packageJson = JSON.parse( + readFileSync(new URL("./package.json", import.meta.url), "utf-8") +); +const appVersion = packageJson.version ?? "0.0.0"; +const appReleaseTag = `web-app-v${appVersion}`; // https://vitejs.dev/config/ export default defineConfig(({ command }) => { const isDev = command === "serve"; return { plugins: [vue()], + define: { + __APP_VERSION__: JSON.stringify(appVersion), + __APP_RELEASE_TAG__: JSON.stringify(appReleaseTag), + }, resolve: { alias: { "@": fileURLToPath(new URL("./src", import.meta.url)), diff --git a/vitest.config.ts b/vitest.config.ts new file mode 100644 index 00000000..ea79f1b7 --- /dev/null +++ b/vitest.config.ts @@ -0,0 +1,44 @@ +import { defineConfig } from "vitest/config"; +import vue from "@vitejs/plugin-vue"; +import { fileURLToPath, URL } from "node:url"; +import { readFileSync } from "node:fs"; + +const packageJson = JSON.parse( + readFileSync(new URL("./package.json", import.meta.url), "utf-8") +); +const appVersion = packageJson.version ?? "0.0.0"; +const appReleaseTag = `web-app-v${appVersion}`; + +export default defineConfig({ + plugins: [vue()], + define: { + __APP_VERSION__: JSON.stringify(appVersion), + __APP_RELEASE_TAG__: JSON.stringify(appReleaseTag), + }, + resolve: { + alias: { + "@": fileURLToPath(new URL("./src", import.meta.url)), + }, + }, + test: { + environment: "jsdom", + include: ["tests/components/**/*.test.ts"], + setupFiles: ["./tests/components/setup.ts"], + coverage: { + provider: "istanbul", + reporter: ["text", "json-summary"], + include: [ + "src/components/Styling/ThemeSwitch.vue", + "src/components/Styling/TheFooter.vue", + "src/components/Styling/useTheme.ts", + "src/components/PodBrowser.vue", + ], + thresholds: { + lines: 70, + statements: 70, + functions: 70, + branches: 60, + }, + }, + }, +});