Compare commits
18 Commits
17fbe1d507
...
develop
| Author | SHA1 | Date | |
|---|---|---|---|
|
ca854e9d12
|
|||
|
65ef6f682f
|
|||
|
4fa8fce9b2
|
|||
|
e202376998
|
|||
|
89afe59a00
|
|||
|
1162e4e85e
|
|||
|
70e4ce8b4b
|
|||
|
e6a268bafd
|
|||
|
13581d7f2f
|
|||
|
2a058b75bd
|
|||
|
f7c4b8d6da
|
|||
|
8052ccf0d5
|
|||
|
07917a500b
|
|||
|
37972aa660
|
|||
|
10e51b5da4
|
|||
|
0b65e17903
|
|||
|
355653e4f2
|
|||
|
3c3e1b67fd
|
@@ -1 +0,0 @@
|
||||
/home/phundrak/code/web/phundrak.com
|
||||
15
.env.example
15
.env.example
@@ -1,12 +1,3 @@
|
||||
APP_ENVIRONMENT=dev
|
||||
APP__EMAIL__HOST=mail.example.com
|
||||
APP__EMAIL__PORT=465
|
||||
APP__EMAIL__TLS=true
|
||||
APP__EMAIL__STARTTLS=no
|
||||
APP__EMAIL__USER="username"
|
||||
APP__EMAIL__PASSWORD="changeme"
|
||||
APP__EMAIL__RECIPIENT="Recipient <user@example.com>"
|
||||
APP__EMAIL__FROM="Contact Form <noreply@example.com>"
|
||||
NUXT_PUBLIC_BACKEND_URL=http://localhost:3100
|
||||
NUXT_PUBLIC_TURNSTILE_SITE_KEY="changeme"
|
||||
NUXT_TURNSTILE_SECRET_KEY="changeme"
|
||||
NUXT_PUBLIC_API_BASE=http://localhost:3100
|
||||
NUXT_PUBLIC_URL_BASE=http://localhost:3000
|
||||
NUXT_PUBLIC_FEDIVERSE_CREATOR="@user@instance.example"
|
||||
|
||||
36
.envrc
36
.envrc
@@ -12,43 +12,13 @@ dotenv_if_exists
|
||||
watch_file flake.nix
|
||||
watch_file flake.lock
|
||||
watch_file .envrc.local
|
||||
watch_file backend/shell.nix
|
||||
watch_file frontend/shell.nix
|
||||
watch_file nix/shell.nix
|
||||
|
||||
# Check if .envrc.local exists and contains a shell preference
|
||||
if [[ -f .envrc.local ]]; then
|
||||
source .envrc.local
|
||||
fi
|
||||
|
||||
# If no shell is specified, prompt the user interactively
|
||||
if [[ -z "$NIX_SHELL_NAME" ]]; then
|
||||
echo ""
|
||||
echo "🔧 Available development shells:"
|
||||
echo " 1) frontend - Nuxt.js/Vue development environment"
|
||||
echo " 2) backend - Rust backend development environment"
|
||||
echo ""
|
||||
echo "💡 Tip: Create a .envrc.local file with 'export NIX_SHELL_NAME=frontend' to skip this prompt"
|
||||
echo ""
|
||||
|
||||
# Read user input
|
||||
read -p "Select shell (1 or 2): " choice
|
||||
|
||||
case $choice in
|
||||
1|frontend)
|
||||
NIX_SHELL_NAME=frontend
|
||||
;;
|
||||
2|backend)
|
||||
NIX_SHELL_NAME=backend
|
||||
;;
|
||||
*)
|
||||
echo "❌ Invalid choice. Please select 1 or 2."
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "✅ Loading ${NIX_SHELL_NAME} environment..."
|
||||
fi
|
||||
|
||||
if ! use flake ".#${NIX_SHELL_NAME}" --no-pure-eval; then
|
||||
echo "❌ devenv could not be built. The devenv environment was not loaded. Make the necessary changes to flake.nix and hit enter to try again." >&2
|
||||
if ! use flake . --no-pure-eval; then
|
||||
echo "devenv could not be built. The devenv environment was not loaded. Make the necessary changes to flake.nix and hit enter to try again." >&2
|
||||
fi
|
||||
|
||||
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -1 +0,0 @@
|
||||
*.org linguist-detectable=true
|
||||
217
.github/workflows/README.md
vendored
217
.github/workflows/README.md
vendored
@@ -1,217 +0,0 @@
|
||||
# GitHub Actions Workflows
|
||||
|
||||
## Docker Image Publishing
|
||||
|
||||
The `publish-docker.yml` workflow automatically builds and publishes Docker images for the backend service using Nix.
|
||||
|
||||
### Triggers and Tagging Strategy
|
||||
|
||||
| Event | Condition | Published Tags | Example |
|
||||
|--------------+-----------------------------+------------------------+-------------------|
|
||||
| Tag push | Tag pushed to `main` branch | `latest` + version tag | `latest`, `1.0.0` |
|
||||
| Branch push | Push to `develop` branch | `develop` | `develop` |
|
||||
| Pull request | PR opened or updated | `pr<number>` | `pr12` |
|
||||
| Branch push | Push to `main` (no tag) | `latest` | `latest` |
|
||||
|
||||
### Required Secrets
|
||||
|
||||
Configure these secrets in your repository settings (`Settings` → `Secrets and variables` → `Actions`):
|
||||
|
||||
| Secret Name | Description | Example Value |
|
||||
|---------------------+---------------------------------------------+-----------------------------------------|
|
||||
| `DOCKER_USERNAME` | Username for Docker registry authentication | `phundrak` |
|
||||
| `DOCKER_PASSWORD` | Password or token for Docker registry | Personal Access Token (PAT) or password |
|
||||
| `CACHIX_AUTH_TOKEN` | (Optional) Token for Cachix caching | Your Cachix auth token |
|
||||
|
||||
#### For GitHub Container Registry (ghcr.io)
|
||||
|
||||
1. Create a Personal Access Token (PAT):
|
||||
- Go to GitHub Settings → Developer settings → Personal access tokens → Tokens (classic)
|
||||
- Click "Generate new token (classic)"
|
||||
- Select scopes: `write:packages`, `read:packages`, `delete:packages`
|
||||
- Copy the generated token
|
||||
|
||||
2. Add secrets:
|
||||
- `DOCKER_USERNAME`: Your GitHub username
|
||||
- `DOCKER_PASSWORD`: The PAT you just created
|
||||
|
||||
#### For Docker Hub
|
||||
|
||||
1. Create an access token:
|
||||
- Go to Docker Hub → Account Settings → Security → Access Tokens
|
||||
- Click "New Access Token"
|
||||
- Set permissions to "Read, Write, Delete"
|
||||
- Copy the generated token
|
||||
|
||||
2. Add secrets:
|
||||
- `DOCKER_USERNAME`: Your Docker Hub username
|
||||
- `DOCKER_PASSWORD`: The access token you just created
|
||||
|
||||
#### For Gitea Registry (e.g., labs.phundrak.com)
|
||||
|
||||
1. Create an access token in Gitea:
|
||||
- Log in to your Gitea instance
|
||||
- Go to Settings (click your avatar → Settings)
|
||||
- Navigate to Applications → Manage Access Tokens
|
||||
- Click "Generate New Token"
|
||||
- Give it a descriptive name (e.g., "Phundrak Labs Docker Registry")
|
||||
- Select the required permissions:
|
||||
- `write:package` - Required to publish packages
|
||||
- `read:package` - Required to pull packages
|
||||
- Click "Generate Token"
|
||||
- Copy the generated token immediately (it won't be shown again)
|
||||
|
||||
2. Add secrets:
|
||||
- `DOCKER_USERNAME`: Your Gitea username
|
||||
- `DOCKER_PASSWORD`: The access token you just created
|
||||
|
||||
Note: Gitea's container registry is accessed at `https://your-gitea-instance/username/-/packages`
|
||||
|
||||
#### For Other Custom Registries
|
||||
|
||||
1. Obtain credentials from your registry administrator
|
||||
|
||||
2. Add secrets:
|
||||
- `DOCKER_USERNAME`: Your registry username
|
||||
- `DOCKER_PASSWORD`: Your registry password or token
|
||||
|
||||
### Configuring Cachix (Build Caching)
|
||||
|
||||
Cachix is a Nix binary cache that dramatically speeds up builds by caching build artifacts. The workflow supports configurable Cachix settings.
|
||||
|
||||
#### Environment Variables
|
||||
|
||||
Configure these in the workflow's `env` section or as repository variables:
|
||||
|
||||
| Variable | Description | Default Value | Example |
|
||||
|--------------------+------------------------------------------------+---------------+--------------------|
|
||||
| `CACHIX_NAME` | Name of the Cachix cache to use | `devenv` | `phundrak-dot-com` |
|
||||
| `CACHIX_SKIP_PUSH` | Whether to skip pushing artifacts to the cache | `true` | `false` |
|
||||
|
||||
#### Option 1: Pull from Public Cache Only
|
||||
|
||||
If you only want to pull from a public cache (no pushing):
|
||||
|
||||
1. Set environment variables in the workflow:
|
||||
```yaml
|
||||
env:
|
||||
CACHIX_NAME: devenv # or any public cache name
|
||||
CACHIX_SKIP_PUSH: true
|
||||
```
|
||||
|
||||
2. No `CACHIX_AUTH_TOKEN` secret is needed
|
||||
|
||||
This is useful when using public caches like `devenv` or `nix-community`.
|
||||
|
||||
#### Option 2: Use Your Own Cache (Recommended for Faster Builds)
|
||||
|
||||
To cache your own build artifacts for faster subsequent builds:
|
||||
|
||||
1. Create a Cachix cache:
|
||||
- Go to https://app.cachix.org
|
||||
- Sign up and create a new cache (e.g., `your-project-name`)
|
||||
- Free for public/open-source projects
|
||||
|
||||
2. Get your auth token:
|
||||
- In Cachix, go to your cache settings
|
||||
- Find your auth token under "Auth tokens"
|
||||
- Copy the token
|
||||
|
||||
3. Add your cache configuration to `flake.nix`:
|
||||
```nix
|
||||
nixConfig = {
|
||||
extra-trusted-public-keys = [
|
||||
"devenv.cachix.org-1:w1cLUi8dv3hnoSPGAuibQv+f9TZLr6cv/Hm9XgU50cw="
|
||||
"your-cache-name.cachix.org-1:YOUR_PUBLIC_KEY_HERE"
|
||||
];
|
||||
extra-substituters = [
|
||||
"https://devenv.cachix.org"
|
||||
"https://your-cache-name.cachix.org"
|
||||
];
|
||||
};
|
||||
```
|
||||
|
||||
4. Configure the workflow:
|
||||
- Edit `.github/workflows/publish-docker.yml`:
|
||||
```yaml
|
||||
env:
|
||||
CACHIX_NAME: your-cache-name
|
||||
CACHIX_SKIP_PUSH: false
|
||||
```
|
||||
- Or set as repository variables in GitHub/Gitea
|
||||
|
||||
5. Add your auth token as a secret:
|
||||
- Go to repository `Settings` → `Secrets and variables` → `Actions`
|
||||
- Add secret `CACHIX_AUTH_TOKEN` with your token
|
||||
|
||||
#### Benefits of Using Your Own Cache
|
||||
|
||||
- **Faster builds**: Subsequent builds reuse cached artifacts (Rust dependencies, compiled binaries)
|
||||
- **Reduced CI time**: Can reduce build time from 10+ minutes to under 1 minute
|
||||
- **Cost savings**: Less compute time means lower CI costs
|
||||
- **Shared across branches**: All branches benefit from the same cache
|
||||
|
||||
### Configuring the Docker Registry
|
||||
|
||||
The target registry is set via the `DOCKER_REGISTRY` environment variable in the workflow file. To change it:
|
||||
|
||||
1. Edit `.github/workflows/publish-docker.yml`
|
||||
2. Modify the `env` section:
|
||||
|
||||
```yaml
|
||||
env:
|
||||
DOCKER_REGISTRY: ghcr.io # Change to your registry (e.g., docker.io, labs.phundrak.com)
|
||||
IMAGE_NAME: phundrak/phundrak-dot-com-backend
|
||||
```
|
||||
|
||||
Or set it as a repository variable:
|
||||
- Go to `Settings` → `Secrets and variables` → `Actions` → `Variables` tab
|
||||
- Add `DOCKER_REGISTRY` with your desired registry URL
|
||||
|
||||
### Image Naming
|
||||
|
||||
Images are published with the name: `${DOCKER_REGISTRY}/${IMAGE_NAME}:${TAG}`
|
||||
|
||||
For example:
|
||||
- `labs.phundrak.com/phundrak/phundrak-dot-com-backend:latest`
|
||||
- `labs.phundrak.com/phundrak/phundrak-dot-com-backend:1.0.0`
|
||||
- `labs.phundrak.com/phundrak/phundrak-dot-com-backend:develop`
|
||||
- `labs.phundrak.com/phundrak/phundrak-dot-com-backend:pr12`
|
||||
|
||||
### Local Testing
|
||||
|
||||
To test the Docker image build locally:
|
||||
|
||||
```bash
|
||||
# Build the image with Nix
|
||||
nix build .#backendDockerLatest
|
||||
|
||||
# Load it into Docker
|
||||
docker load < result
|
||||
|
||||
# Run the container (image name comes from Cargo.toml package.name)
|
||||
docker run -p 3100:3100 phundrak/phundrak-dot-com-backend:latest
|
||||
```
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
#### Authentication Failures
|
||||
|
||||
If you see authentication errors:
|
||||
1. Verify your `DOCKER_USERNAME` and `DOCKER_PASSWORD` secrets are correct
|
||||
2. For ghcr.io, ensure your PAT has the correct permissions
|
||||
3. Check that the `DOCKER_REGISTRY` matches your credentials
|
||||
|
||||
#### Build Failures
|
||||
|
||||
If the Nix build fails:
|
||||
1. Test the build locally first: `nix build .#backendDockerLatest`
|
||||
2. Check the GitHub Actions logs for specific error messages
|
||||
3. Ensure all dependencies in `flake.nix` are correctly specified
|
||||
|
||||
#### Image Not Appearing in Registry
|
||||
|
||||
1. Verify the workflow completed successfully in the Actions tab
|
||||
2. Check that the registry URL is correct
|
||||
3. For ghcr.io, images appear at: `https://github.com/users/USERNAME/packages/container/IMAGE_NAME`
|
||||
4. Ensure your token has write permissions
|
||||
123
.github/workflows/publish-docker.yml
vendored
123
.github/workflows/publish-docker.yml
vendored
@@ -1,123 +0,0 @@
|
||||
name: Publish Docker Images
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- develop
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
env:
|
||||
CACHIX_NAME: devenv
|
||||
CACHIX_SKIP_PUSH: true
|
||||
DOCKER_REGISTRY: labs.phundrak.com # Override in repository settings if needed
|
||||
IMAGE_NAME: phundrak/phundrak-dot-com-backend
|
||||
|
||||
jobs:
|
||||
build-and-publish:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write # Required for pushing to Phundrak Labs registry
|
||||
pull-requests: read
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Nix
|
||||
uses: cachix/install-nix-action@v27
|
||||
with:
|
||||
nix_path: nixpkgs=channel:nixos-unstable
|
||||
|
||||
- name: Setup Cachix
|
||||
uses: cachix/cachix-action@v15
|
||||
with:
|
||||
name: '${{ env.CACHIX_NAME }}'
|
||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||
skipPush: ${{ env.CACHIX_SKIP_PUSH }}
|
||||
|
||||
- name: Build Docker image with Nix
|
||||
run: |
|
||||
echo "Building Docker image..."
|
||||
nix build .#backendDockerLatest --accept-flake-config
|
||||
|
||||
- name: Load Docker image
|
||||
run: |
|
||||
echo "Loading Docker image into Docker daemon..."
|
||||
docker load < result
|
||||
|
||||
- name: Log in to Docker Registry
|
||||
run: |
|
||||
echo "${{ secrets.DOCKER_PASSWORD }}" | docker login ${{ env.DOCKER_REGISTRY }} -u ${{ secrets.DOCKER_USERNAME }} --password-stdin
|
||||
|
||||
- name: Determine tags and push images
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
REGISTRY="${{ env.DOCKER_REGISTRY }}"
|
||||
IMAGE_NAME="${{ env.IMAGE_NAME }}"
|
||||
|
||||
# The locally built image from Nix (name comes from Cargo.toml package.name)
|
||||
LOCAL_IMAGE="phundrak/phundrak-dot-com-backend:latest"
|
||||
|
||||
echo "Event: ${{ github.event_name }}"
|
||||
echo "Ref: ${{ github.ref }}"
|
||||
echo "Ref type: ${{ github.ref_type }}"
|
||||
|
||||
# Determine which tags to push based on the event
|
||||
if [[ "${{ github.event_name }}" == "push" && "${{ github.ref_type }}" == "tag" ]]; then
|
||||
# Tag push on main branch → publish 'latest' and versioned tag
|
||||
echo "Tag push detected"
|
||||
TAG_VERSION="${{ github.ref_name }}"
|
||||
# Remove 'v' prefix if present (v1.0.0 → 1.0.0)
|
||||
TAG_VERSION="${TAG_VERSION#v}"
|
||||
|
||||
echo "Tagging and pushing: ${REGISTRY}/${IMAGE_NAME}:latest"
|
||||
docker tag "${LOCAL_IMAGE}" "${REGISTRY}/${IMAGE_NAME}:latest"
|
||||
docker push "${REGISTRY}/${IMAGE_NAME}:latest"
|
||||
|
||||
echo "Tagging and pushing: ${REGISTRY}/${IMAGE_NAME}:${TAG_VERSION}"
|
||||
docker tag "${LOCAL_IMAGE}" "${REGISTRY}/${IMAGE_NAME}:${TAG_VERSION}"
|
||||
docker push "${REGISTRY}/${IMAGE_NAME}:${TAG_VERSION}"
|
||||
|
||||
elif [[ "${{ github.event_name }}" == "push" && "${{ github.ref }}" == "refs/heads/develop" ]]; then
|
||||
# Push on develop branch → publish 'develop' tag
|
||||
echo "Push to develop branch detected"
|
||||
|
||||
echo "Tagging and pushing: ${REGISTRY}/${IMAGE_NAME}:develop"
|
||||
docker tag "${LOCAL_IMAGE}" "${REGISTRY}/${IMAGE_NAME}:develop"
|
||||
docker push "${REGISTRY}/${IMAGE_NAME}:develop"
|
||||
|
||||
elif [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
# Pull request → publish 'pr<number>' tag
|
||||
echo "Pull request detected"
|
||||
PR_NUMBER="${{ github.event.pull_request.number }}"
|
||||
|
||||
echo "Tagging and pushing: ${REGISTRY}/${IMAGE_NAME}:pr${PR_NUMBER}"
|
||||
docker tag "${LOCAL_IMAGE}" "${REGISTRY}/${IMAGE_NAME}:pr${PR_NUMBER}"
|
||||
docker push "${REGISTRY}/${IMAGE_NAME}:pr${PR_NUMBER}"
|
||||
|
||||
elif [[ "${{ github.event_name }}" == "push" && "${{ github.ref }}" == "refs/heads/main" ]]; then
|
||||
# Push to main branch (not a tag) → publish 'latest'
|
||||
echo "Push to main branch detected"
|
||||
|
||||
echo "Tagging and pushing: ${REGISTRY}/${IMAGE_NAME}:latest"
|
||||
docker tag "${LOCAL_IMAGE}" "${REGISTRY}/${IMAGE_NAME}:latest"
|
||||
docker push "${REGISTRY}/${IMAGE_NAME}:latest"
|
||||
|
||||
else
|
||||
echo "Unknown event or ref, skipping push"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Log out from Docker Registry
|
||||
if: always()
|
||||
run: docker logout ${{ env.DOCKER_REGISTRY }}
|
||||
|
||||
- name: Image published successfully
|
||||
run: |
|
||||
echo "✅ Docker image(s) published successfully to ${{ env.DOCKER_REGISTRY }}/${{ env.IMAGE_NAME }}"
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -16,10 +16,6 @@ logs
|
||||
.env.*
|
||||
!.env.example
|
||||
|
||||
# Backend
|
||||
target/
|
||||
coverage/
|
||||
|
||||
# Frontend
|
||||
## Nuxt dev/build outputs
|
||||
.output
|
||||
@@ -35,3 +31,4 @@ node_modules
|
||||
# Nix
|
||||
result
|
||||
.data/
|
||||
app/coverage/*
|
||||
|
||||
9
.volarrc
9
.volarrc
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"vueCompilerOptions": {
|
||||
"target": 3.5,
|
||||
"extensions": [".vue"]
|
||||
},
|
||||
"typescript": {
|
||||
"tsdk": "frontend/node_modules/typescript/lib"
|
||||
}
|
||||
}
|
||||
|
||||
176
README.org
176
README.org
@@ -1,76 +1,134 @@
|
||||
#+title: phundrak.com
|
||||
#+title: phundrak.com frontend
|
||||
#+author: Lucien Cartier-Tilet
|
||||
#+email: lucien@phundrak.com
|
||||
|
||||
#+html: <a href="https://www.rust-lang.org/"><img src="https://img.shields.io/badge/Rust-Backend-orange.svg?style=flat-square&logo=Rust&logoColor=white" /></a>
|
||||
#+html: <a href="https://nuxt.com/"><img src="https://img.shields.io/badge/Frontend-Nuxt%204-00DC82?logo=Nuxt.js&logoColor=white&style=flat-square"/></a>
|
||||
#+html: <a href="https://vuejs.org/"><img src="https://img.shields.io/badge/Vue-3-42B883?logo=Vue.js&logoColor=white&style=flat-square"/></a>
|
||||
#+html: <a href="https://phundrak.com"><img src="https://img.shields.io/badge/Website-phundrak.com-blue?style=flat-square&logo=buffer" /></a>
|
||||
This is the frontend of =phundrak.com=, written with Nuxt.
|
||||
|
||||
* Introduction
|
||||
This is the repository for my website [[https://phundrak.com][phundrak.com]] which contains the
|
||||
code available on the =main= branch. Code available on the =develop=
|
||||
branch is available at [[https://beta.phundrak.com][beta.phundrak.com]].
|
||||
* Setup
|
||||
|
||||
* Architecture
|
||||
The website follows a modern full-stack architecture:
|
||||
** Environment
|
||||
*** Nix Environment
|
||||
If you use Nix, you can set up your environment using the [[file:flake.nix][=flake.nix=]]
|
||||
file, which will give you the exact same development environment as I
|
||||
use.
|
||||
|
||||
- *Backend*: Rust using the [[https://github.com/poem-web/poem][Poem]] web framework (located in [[file:backend/][backend/]])
|
||||
- *Frontend*: Nuxt 4 + Vue 3 + TypeScript (located in [[file:frontend/][frontend/]])
|
||||
|
||||
** Backend
|
||||
The backend is written in Rust and provides a RESTful API using the
|
||||
Poem framework with OpenAPI support.
|
||||
|
||||
*** Running the Backend
|
||||
To run the backend in development mode:
|
||||
#+begin_src shell
|
||||
cd backend
|
||||
cargo run
|
||||
#+begin_src bash
|
||||
nix develop
|
||||
#+end_src
|
||||
|
||||
To run tests:
|
||||
#+begin_src shell
|
||||
cd backend
|
||||
cargo test
|
||||
If you have [[https://direnv.net/][=direnv=]] installed, you can simply use it to automatically
|
||||
enable this environment. However, I *strongly* recommend you to read the
|
||||
content of the =flake.nix= file before doing so, as you should with any
|
||||
Nix-defined environment you did not create.
|
||||
|
||||
#+begin_src bash
|
||||
direnv allow .
|
||||
#+end_src
|
||||
|
||||
For continuous testing and linting during development, use [[https://dystroy.org/bacon/][bacon]]:
|
||||
#+begin_src shell
|
||||
cd backend
|
||||
bacon
|
||||
*** Required Tools
|
||||
To be able to work on this project, you need a Javascript package
|
||||
manager, such as:
|
||||
- =npm=
|
||||
- =pnpm= (recommended)
|
||||
- =yarn=
|
||||
- =bun=
|
||||
|
||||
In my case, I use pnpm.
|
||||
|
||||
You can skip this if you are already using my Nix environment.
|
||||
|
||||
** Dependencies
|
||||
Once you have your environment ready, you can now install the
|
||||
project’s dependencies.
|
||||
|
||||
#+begin_src bash
|
||||
# npm
|
||||
npm install
|
||||
|
||||
# pnpm
|
||||
pnpm install
|
||||
|
||||
# yarn
|
||||
yarn install
|
||||
|
||||
# bun
|
||||
bun install
|
||||
#+end_src
|
||||
|
||||
*** Building the Backend
|
||||
To build the backend for production:
|
||||
#+begin_src shell
|
||||
cd backend
|
||||
cargo build --release
|
||||
* Running the Project
|
||||
You are now ready to start the development server on
|
||||
=http://localhost:3000=.
|
||||
|
||||
#+begin_src bash
|
||||
# npm
|
||||
npm run dev
|
||||
|
||||
# pnpm
|
||||
pnpm dev
|
||||
|
||||
# yarn
|
||||
yarn dev
|
||||
|
||||
# bun
|
||||
bun run dev
|
||||
#+end_src
|
||||
|
||||
The compiled binary will be available at =backend/target/release/backend=.
|
||||
* Production
|
||||
Once you are satisfied with the project, you can build the application in production mode.
|
||||
|
||||
** Frontend
|
||||
The frontend is built with Nuxt 4, Vue 3, and TypeScript, providing a
|
||||
modern single-page application experience.
|
||||
#+begin_src bash
|
||||
# npm
|
||||
npm run build
|
||||
|
||||
*** Installing Dependencies
|
||||
First, install the required dependencies using =pnpm=:
|
||||
#+begin_src shell
|
||||
cd frontend
|
||||
# pnpm
|
||||
pnpm build
|
||||
|
||||
# yarn
|
||||
yarn build
|
||||
|
||||
# bun
|
||||
bun run build
|
||||
#+end_src
|
||||
|
||||
You can preview locally the production build too.
|
||||
|
||||
#+begin_src bash
|
||||
# npm
|
||||
npm run preview
|
||||
|
||||
# pnpm
|
||||
pnpm preview
|
||||
|
||||
# yarn
|
||||
yarn preview
|
||||
|
||||
# bun
|
||||
bun run preview
|
||||
#+end_src
|
||||
|
||||
Check out the [[https://nuxt.com/docs/getting-started/deployment][deployment documentation]] for more information.
|
||||
|
||||
* Known Issues
|
||||
** =better-sqlite3= self-registration error
|
||||
If you encounter an error stating that =better-sqlite3= does not
|
||||
self-register when running =pnpm run dev=, this is typically caused by
|
||||
the native module being compiled for a different Node.js version.
|
||||
|
||||
*Solution:* Rebuild the native module for your current Node.js version:
|
||||
|
||||
#+begin_src bash
|
||||
# Rebuild just better-sqlite3
|
||||
pnpm rebuild better-sqlite3
|
||||
|
||||
# Or rebuild all native modules
|
||||
pnpm rebuild
|
||||
|
||||
# Or reinstall everything (nuclear option)
|
||||
rm -rf node_modules
|
||||
pnpm install
|
||||
#+end_src
|
||||
|
||||
*** Running the Frontend
|
||||
To run the frontend in development mode:
|
||||
#+begin_src shell
|
||||
cd frontend
|
||||
pnpm dev
|
||||
#+end_src
|
||||
|
||||
*** Building the Frontend
|
||||
To build the frontend for production:
|
||||
#+begin_src shell
|
||||
cd frontend
|
||||
pnpm build
|
||||
#+end_src
|
||||
|
||||
The compiled version of the website can then be found in =frontend/.output=.
|
||||
*Why this happens:* =better-sqlite3= contains native C++ code that
|
||||
needs to be compiled for each specific Node.js version. When you
|
||||
update Node.js or switch between versions, native modules need to be
|
||||
rebuilt.
|
||||
|
||||
39
app/app.vue
Normal file
39
app/app.vue
Normal file
@@ -0,0 +1,39 @@
|
||||
<template>
|
||||
<UApp :locale="locales[locale]">
|
||||
<AppNavbar />
|
||||
<UMain>
|
||||
<NuxtPage />
|
||||
</UMain>
|
||||
<AppFooter />
|
||||
</UApp>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import * as locales from '@nuxt/ui/locale';
|
||||
const { locale } = useI18n();
|
||||
const lang = computed(() => locales[locale.value].code);
|
||||
const dir = computed(() => locales[locale.value].dir);
|
||||
const { urlBase, fediverseCreator } = useRuntimeConfig().public;
|
||||
const route = useRoute();
|
||||
const url = computed(() => urlBase.replace(/\/+$/, '') + route.fullPath);
|
||||
|
||||
useHead({
|
||||
htmlAttrs: {
|
||||
dir,
|
||||
lang,
|
||||
},
|
||||
link: [
|
||||
{ rel: 'icon', type: 'image/png', sizes: '32x32', href: '/favicon-32x32.png' },
|
||||
{ rel: 'icon', type: 'image/png', sizes: '16x16', href: '/favicon-16x16.png' },
|
||||
{ rel: 'apple-touch-icon', sizes: '180x180', href: '/apple-touch-icon.png' },
|
||||
{ rel: 'manifest', href: '/site.webmanifest' },
|
||||
],
|
||||
meta: fediverseCreator !== '' ? [{ name: 'fediverse:creator', content: fediverseCreator + '' }] : [],
|
||||
});
|
||||
|
||||
useSeoMeta({
|
||||
ogImage: '/leon.png',
|
||||
twitterImage: '/leon.png',
|
||||
ogUrl: url,
|
||||
});
|
||||
</script>
|
||||
132
app/assets/css/colors.css
Normal file
132
app/assets/css/colors.css
Normal file
@@ -0,0 +1,132 @@
|
||||
:root {
|
||||
--text: oklch(38.30% 0.029 266.48);
|
||||
--text-50: oklch(95.82% 0.004 271.37);
|
||||
--text-100: oklch(91.83% 0.009 264.52);
|
||||
--text-200: oklch(83.53% 0.016 266.26);
|
||||
--text-300: oklch(74.99% 0.026 265.54);
|
||||
--text-400: oklch(66.05% 0.036 268.49);
|
||||
--text-500: oklch(57.02% 0.047 267.31);
|
||||
--text-600: oklch(48.66% 0.039 268.21);
|
||||
--text-700: oklch(40.13% 0.031 265.23);
|
||||
--text-800: oklch(30.90% 0.021 265.90);
|
||||
--text-900: oklch(20.86% 0.013 264.25);
|
||||
--text-950: oklch(15.46% 0.007 270.96);
|
||||
|
||||
--background: oklch(95.13% 0.007 260.73);
|
||||
--background-50: oklch(95.80% 0.007 268.55);
|
||||
--background-100: oklch(91.74% 0.012 259.82);
|
||||
--background-200: oklch(83.07% 0.027 262.33);
|
||||
--background-300: oklch(74.46% 0.041 261.48);
|
||||
--background-400: oklch(65.63% 0.058 260.56);
|
||||
--background-500: oklch(56.42% 0.075 261.41);
|
||||
--background-600: oklch(48.32% 0.062 260.40);
|
||||
--background-700: oklch(39.64% 0.048 261.18);
|
||||
--background-800: oklch(30.43% 0.036 261.92);
|
||||
--background-900: oklch(20.77% 0.018 259.72);
|
||||
--background-950: oklch(15.29% 0.010 255.44);
|
||||
|
||||
--primary: oklch(77.15% 0.062 217.48);
|
||||
--primary-50: oklch(96.50% 0.009 222.06);
|
||||
--primary-100: oklch(93.16% 0.019 213.42);
|
||||
--primary-200: oklch(86.07% 0.039 217.46);
|
||||
--primary-300: oklch(79.25% 0.057 216.55);
|
||||
--primary-400: oklch(72.48% 0.075 217.32);
|
||||
--primary-500: oklch(65.88% 0.089 218.00);
|
||||
--primary-600: oklch(55.99% 0.075 218.52);
|
||||
--primary-700: oklch(45.64% 0.059 218.22);
|
||||
--primary-800: oklch(34.67% 0.043 219.39);
|
||||
--primary-900: oklch(23.06% 0.024 214.47);
|
||||
--primary-950: oklch(16.48% 0.015 212.62);
|
||||
|
||||
--secondary: oklch(69.65% 0.059 248.69);
|
||||
--secondary-50: oklch(95.95% 0.008 253.85);
|
||||
--secondary-100: oklch(92.05% 0.015 244.73);
|
||||
--secondary-200: oklch(83.76% 0.030 248.19);
|
||||
--secondary-300: oklch(75.31% 0.048 249.46);
|
||||
--secondary-400: oklch(66.99% 0.065 248.83);
|
||||
--secondary-500: oklch(58.35% 0.083 249.96);
|
||||
--secondary-600: oklch(49.88% 0.069 249.37);
|
||||
--secondary-700: oklch(40.78% 0.056 250.22);
|
||||
--secondary-800: oklch(31.42% 0.038 249.12);
|
||||
--secondary-900: oklch(20.99% 0.022 251.79);
|
||||
--secondary-950: oklch(15.56% 0.012 241.97);
|
||||
|
||||
--accent: oklch(59.38% 0.078 253.40);
|
||||
--accent-50: oklch(95.93% 0.007 247.90);
|
||||
--accent-100: oklch(91.85% 0.015 251.16);
|
||||
--accent-200: oklch(83.39% 0.030 254.70);
|
||||
--accent-300: oklch(74.95% 0.046 253.67);
|
||||
--accent-400: oklch(66.37% 0.064 253.29);
|
||||
--accent-500: oklch(57.47% 0.081 254.47);
|
||||
--accent-600: oklch(49.19% 0.068 253.56);
|
||||
--accent-700: oklch(40.31% 0.053 254.02);
|
||||
--accent-800: oklch(30.91% 0.038 255.00);
|
||||
--accent-900: oklch(20.99% 0.022 251.79);
|
||||
--accent-950: oklch(15.35% 0.012 260.39);
|
||||
}
|
||||
.dark {
|
||||
--text: oklch(76.63% 0.024 266.86);
|
||||
--text-50: oklch(15.46% 0.007 270.96);
|
||||
--text-100: oklch(20.86% 0.013 264.25);
|
||||
--text-200: oklch(30.90% 0.021 265.90);
|
||||
--text-300: oklch(40.13% 0.031 265.23);
|
||||
--text-400: oklch(48.66% 0.039 268.21);
|
||||
--text-500: oklch(57.02% 0.047 267.31);
|
||||
--text-600: oklch(66.05% 0.036 268.49);
|
||||
--text-700: oklch(74.99% 0.026 265.54);
|
||||
--text-800: oklch(83.53% 0.016 266.26);
|
||||
--text-900: oklch(91.83% 0.009 264.52);
|
||||
--text-950: oklch(95.82% 0.004 271.37);
|
||||
|
||||
--background: oklch(16.29% 0.012 260.61);
|
||||
--background-50: oklch(15.29% 0.010 255.44);
|
||||
--background-100: oklch(20.77% 0.018 259.72);
|
||||
--background-200: oklch(30.43% 0.036 261.92);
|
||||
--background-300: oklch(39.64% 0.048 261.18);
|
||||
--background-400: oklch(48.32% 0.062 260.40);
|
||||
--background-500: oklch(56.42% 0.075 261.41);
|
||||
--background-600: oklch(65.63% 0.058 260.56);
|
||||
--background-700: oklch(74.46% 0.041 261.48);
|
||||
--background-800: oklch(83.07% 0.027 262.33);
|
||||
--background-900: oklch(91.74% 0.012 259.82);
|
||||
--background-950: oklch(95.80% 0.007 268.55);
|
||||
|
||||
--primary: oklch(48.89% 0.064 217.48);
|
||||
--primary-50: oklch(16.48% 0.015 212.62);
|
||||
--primary-100: oklch(23.06% 0.024 214.47);
|
||||
--primary-200: oklch(34.67% 0.043 219.39);
|
||||
--primary-300: oklch(45.64% 0.059 218.22);
|
||||
--primary-400: oklch(55.99% 0.075 218.52);
|
||||
--primary-500: oklch(65.88% 0.089 218.00);
|
||||
--primary-600: oklch(72.48% 0.075 217.32);
|
||||
--primary-700: oklch(79.25% 0.057 216.55);
|
||||
--primary-800: oklch(86.07% 0.039 217.46);
|
||||
--primary-900: oklch(93.16% 0.019 213.42);
|
||||
--primary-950: oklch(96.50% 0.009 222.06);
|
||||
|
||||
--secondary: oklch(47.12% 0.064 249.33);
|
||||
--secondary-50: oklch(15.56% 0.012 241.97);
|
||||
--secondary-100: oklch(20.99% 0.022 251.79);
|
||||
--secondary-200: oklch(31.42% 0.038 249.12);
|
||||
--secondary-300: oklch(40.78% 0.056 250.22);
|
||||
--secondary-400: oklch(49.88% 0.069 249.37);
|
||||
--secondary-500: oklch(58.35% 0.083 249.96);
|
||||
--secondary-600: oklch(66.99% 0.065 248.83);
|
||||
--secondary-700: oklch(75.31% 0.048 249.46);
|
||||
--secondary-800: oklch(83.76% 0.030 248.19);
|
||||
--secondary-900: oklch(92.05% 0.015 244.73);
|
||||
--secondary-950: oklch(95.95% 0.008 253.85);
|
||||
|
||||
--accent: oklch(55.80% 0.080 254.61);
|
||||
--accent-50: oklch(15.35% 0.012 260.39);
|
||||
--accent-100: oklch(20.99% 0.022 251.79);
|
||||
--accent-200: oklch(30.91% 0.038 255.00);
|
||||
--accent-300: oklch(40.31% 0.053 254.02);
|
||||
--accent-400: oklch(49.19% 0.068 253.56);
|
||||
--accent-500: oklch(57.47% 0.081 254.47);
|
||||
--accent-600: oklch(66.37% 0.064 253.29);
|
||||
--accent-700: oklch(74.95% 0.046 253.67);
|
||||
--accent-800: oklch(83.39% 0.030 254.70);
|
||||
--accent-900: oklch(91.85% 0.015 251.16);
|
||||
--accent-950: oklch(95.93% 0.007 247.90);
|
||||
}
|
||||
@@ -4,3 +4,12 @@
|
||||
@import './tailwind.css';
|
||||
|
||||
@source "../../../content/**/*";
|
||||
|
||||
.small-img {
|
||||
max-width: 30rem;
|
||||
max-height: 30rem;
|
||||
width: auto;
|
||||
height: auto;
|
||||
float: right;
|
||||
margin: 2rem;
|
||||
}
|
||||
161
app/components/AppFooter.test.ts
Normal file
161
app/components/AppFooter.test.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
|
||||
describe('AppFooter', () => {
|
||||
describe('navigation items logic', () => {
|
||||
const mockT = (key: string) => {
|
||||
const translations: Record<string, string> = {
|
||||
'footer.links.source': 'Source Code',
|
||||
'footer.links.nuxt': 'Nuxt',
|
||||
'footer.links.rust': 'Rust',
|
||||
};
|
||||
return translations[key] || key;
|
||||
};
|
||||
|
||||
it('should generate footer navigation items', () => {
|
||||
const items = computed(() => [
|
||||
{
|
||||
label: mockT('footer.links.source'),
|
||||
to: 'https://labs.phundrak.com/phundrak/phundrak.com',
|
||||
},
|
||||
{
|
||||
label: mockT('footer.links.nuxt'),
|
||||
to: 'https://nuxt.com/',
|
||||
},
|
||||
{
|
||||
label: mockT('footer.links.rust'),
|
||||
to: 'https://rust-lang.org/',
|
||||
},
|
||||
]);
|
||||
|
||||
expect(items.value).toHaveLength(3);
|
||||
expect(items.value[0].label).toBe('Source Code');
|
||||
expect(items.value[0].to).toBe('https://labs.phundrak.com/phundrak/phundrak.com');
|
||||
});
|
||||
|
||||
it('should include link to Nuxt', () => {
|
||||
const items = computed(() => [
|
||||
{
|
||||
label: mockT('footer.links.nuxt'),
|
||||
to: 'https://nuxt.com/',
|
||||
},
|
||||
]);
|
||||
|
||||
expect(items.value[0].to).toBe('https://nuxt.com/');
|
||||
});
|
||||
|
||||
it('should include link to Rust', () => {
|
||||
const items = computed(() => [
|
||||
{
|
||||
label: mockT('footer.links.rust'),
|
||||
to: 'https://rust-lang.org/',
|
||||
},
|
||||
]);
|
||||
|
||||
expect(items.value[0].to).toBe('https://rust-lang.org/');
|
||||
});
|
||||
});
|
||||
|
||||
describe('backend version logic', () => {
|
||||
const mockT = (key: string) => {
|
||||
const translations: Record<string, string> = {
|
||||
'backend.loading': 'Loading...',
|
||||
'backend.failed': 'Failed to load',
|
||||
};
|
||||
return translations[key] || key;
|
||||
};
|
||||
|
||||
it('should show loading text when loading', () => {
|
||||
const mockLoading = ref(true);
|
||||
const mockData = ref<{ version: string } | null>(null);
|
||||
|
||||
const backendVersion = computed(() =>
|
||||
mockLoading.value ? 'backend.loading' : mockData.value?.version || mockT('backend.failed'),
|
||||
);
|
||||
|
||||
expect(backendVersion.value).toBe('backend.loading');
|
||||
});
|
||||
|
||||
it('should show version when data is loaded', () => {
|
||||
const mockLoading = ref(false);
|
||||
const mockData = ref({ version: '1.2.3' });
|
||||
|
||||
const backendVersion = computed(() =>
|
||||
mockLoading.value ? 'backend.loading' : mockData.value?.version || mockT('backend.failed'),
|
||||
);
|
||||
|
||||
expect(backendVersion.value).toBe('1.2.3');
|
||||
});
|
||||
|
||||
it('should show failed text when no data', () => {
|
||||
const mockLoading = ref(false);
|
||||
const mockData = ref<{ version: string } | null>(null);
|
||||
|
||||
const backendVersion = computed(() =>
|
||||
mockLoading.value ? 'backend.loading' : mockData.value?.version || mockT('backend.failed'),
|
||||
);
|
||||
|
||||
expect(backendVersion.value).toBe('Failed to load');
|
||||
});
|
||||
});
|
||||
|
||||
describe('orientation logic', () => {
|
||||
it('should use vertical orientation on mobile', () => {
|
||||
const mockIsMobile = true;
|
||||
const orientation = computed(() => (mockIsMobile ? 'vertical' : 'horizontal'));
|
||||
|
||||
expect(orientation.value).toBe('vertical');
|
||||
});
|
||||
|
||||
it('should use horizontal orientation on desktop', () => {
|
||||
const mockIsMobile = false;
|
||||
const orientation = computed(() => (mockIsMobile ? 'vertical' : 'horizontal'));
|
||||
|
||||
expect(orientation.value).toBe('horizontal');
|
||||
});
|
||||
});
|
||||
|
||||
describe('error toast watcher', () => {
|
||||
it('should call toast.add when error occurs', () => {
|
||||
const mockToastAdd = vi.fn();
|
||||
const mockError = ref<{ message: string } | null>(null);
|
||||
|
||||
// Simulate the watcher behavior
|
||||
const triggerErrorWatcher = (error: { message: string } | null) => {
|
||||
if (error) {
|
||||
mockToastAdd({
|
||||
title: 'Error',
|
||||
description: error.message,
|
||||
color: 'error',
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
mockError.value = { message: 'backend.errors.unknown' };
|
||||
triggerErrorWatcher(mockError.value);
|
||||
|
||||
expect(mockToastAdd).toHaveBeenCalledWith({
|
||||
title: 'Error',
|
||||
description: 'backend.errors.unknown',
|
||||
color: 'error',
|
||||
});
|
||||
});
|
||||
|
||||
it('should not call toast.add when error is null', () => {
|
||||
const mockToastAdd = vi.fn();
|
||||
|
||||
const triggerErrorWatcher = (error: { message: string } | null) => {
|
||||
if (error) {
|
||||
mockToastAdd({
|
||||
title: 'Error',
|
||||
description: error.message,
|
||||
color: 'error',
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
triggerErrorWatcher(null);
|
||||
|
||||
expect(mockToastAdd).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
78
app/components/AppFooter.vue
Normal file
78
app/components/AppFooter.vue
Normal file
@@ -0,0 +1,78 @@
|
||||
<template>
|
||||
<UFooter class="bg-background-200">
|
||||
<template #left>
|
||||
<div class="flex flex-col gap-2">
|
||||
<p class="text-text-800 text-sm">Copyright © {{ new Date().getFullYear() }}</p>
|
||||
<p class="text-text-800 text-sm">{{ $t('footer.versions.frontend') }}: {{ version }}</p>
|
||||
<p class="text-text-800 text-sm">{{ $t('footer.versions.backend') }}: {{ backendVersion }}</p>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<UNavigationMenu :items="items" variant="link" :orientation="orientation" />
|
||||
|
||||
<template #right>
|
||||
<FooterSocialAccount
|
||||
v-for="social in socialAccounts"
|
||||
:key="social.label"
|
||||
:icon="social.icon"
|
||||
:link="social.link"
|
||||
:label="social.label"
|
||||
/>
|
||||
</template>
|
||||
</UFooter>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import type { NavigationMenuItem } from '@nuxt/ui';
|
||||
import { version } from '../../package.json';
|
||||
import type { SocialAccount } from '~/types/social-account';
|
||||
|
||||
const toast = useToast();
|
||||
const { isMobile } = useDevice();
|
||||
const orientation = computed(() => (isMobile ? 'vertical' : 'horizontal'));
|
||||
const { getMeta } = useBackend();
|
||||
const { data, error, loading } = getMeta();
|
||||
const backendVersion = computed(() =>
|
||||
loading.value ? 'backend.loading' : data?.value?.version || $t('backend.failed'),
|
||||
);
|
||||
|
||||
const socialAccounts: SocialAccount[] = [
|
||||
{ icon: 'i-simple-icons-mastodon', label: 'Fediverse', link: 'https://social.phundrak.com/phundrak' },
|
||||
{ icon: 'i-simple-icons-gitea', label: 'Gitea', link: 'https://labs.phundrak.com/phundrak' },
|
||||
{ icon: 'i-simple-icons-github', label: 'GitHub', link: 'https://github.com/Phundrak' },
|
||||
{ icon: 'i-simple-icons-youtube', label: 'YouTube', link: 'https://youtube.com/@phundrak' },
|
||||
];
|
||||
|
||||
const items = computed<NavigationMenuItem[]>(() => [
|
||||
{
|
||||
label: $t('footer.links.source.backend'),
|
||||
to: 'https://labs.phundrak.com/phundrak/bakit',
|
||||
target: '_blank',
|
||||
},
|
||||
{
|
||||
label: $t('footer.links.source.frontend'),
|
||||
to: 'https://labs.phundrak.com/phundrak/framit',
|
||||
target: '_blank',
|
||||
},
|
||||
{
|
||||
label: $t('footer.links.nuxt'),
|
||||
to: 'https://nuxt.com/',
|
||||
target: '_blank',
|
||||
},
|
||||
{
|
||||
label: $t('footer.links.rust'),
|
||||
to: 'https://rust-lang.org/',
|
||||
target: '_blank',
|
||||
},
|
||||
]);
|
||||
|
||||
watch(error, (value) => {
|
||||
if (value) {
|
||||
toast.add({
|
||||
title: $t('backend.errors.title'),
|
||||
description: $t(value.message ?? 'backend.errors.unknown'),
|
||||
color: 'error',
|
||||
});
|
||||
}
|
||||
});
|
||||
</script>
|
||||
123
app/components/AppNavbar.test.ts
Normal file
123
app/components/AppNavbar.test.ts
Normal file
@@ -0,0 +1,123 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
|
||||
describe('AppNavbar', () => {
|
||||
describe('navigation items logic', () => {
|
||||
const mockT = (key: string) => {
|
||||
const translations: Record<string, string> = {
|
||||
'pages.home.name': 'Home',
|
||||
'pages.resume.name': 'Resume',
|
||||
'pages.vocal-synthesis.name': 'Vocal Synthesis',
|
||||
'pages.languages.name': 'Languages',
|
||||
'pages.contact.name': 'Contact',
|
||||
};
|
||||
return translations[key] || key;
|
||||
};
|
||||
|
||||
it('should generate navigation items with correct structure', () => {
|
||||
const mockRoute = { path: '/' };
|
||||
|
||||
const items = computed(() => [
|
||||
{
|
||||
label: mockT('pages.home.name'),
|
||||
to: '/',
|
||||
active: mockRoute.path === '/',
|
||||
},
|
||||
...['resume', 'vocal-synthesis', 'languages', 'contact'].map((page) => ({
|
||||
label: mockT(`pages.${page}.name`),
|
||||
to: `/${page}`,
|
||||
active: mockRoute.path.startsWith(`/${page}`),
|
||||
})),
|
||||
]);
|
||||
|
||||
expect(items.value).toHaveLength(5);
|
||||
expect(items.value[0]).toEqual({
|
||||
label: 'Home',
|
||||
to: '/',
|
||||
active: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should include all required pages', () => {
|
||||
const mockRoute = { path: '/' };
|
||||
|
||||
const items = computed(() => [
|
||||
{
|
||||
label: mockT('pages.home.name'),
|
||||
to: '/',
|
||||
active: mockRoute.path === '/',
|
||||
},
|
||||
...['resume', 'vocal-synthesis', 'languages', 'contact'].map((page) => ({
|
||||
label: mockT(`pages.${page}.name`),
|
||||
to: `/${page}`,
|
||||
active: mockRoute.path.startsWith(`/${page}`),
|
||||
})),
|
||||
]);
|
||||
|
||||
const labels = items.value.map((item) => item.label);
|
||||
expect(labels).toContain('Home');
|
||||
expect(labels).toContain('Resume');
|
||||
expect(labels).toContain('Vocal Synthesis');
|
||||
expect(labels).toContain('Languages');
|
||||
expect(labels).toContain('Contact');
|
||||
});
|
||||
|
||||
it('should mark home as active when on root path', () => {
|
||||
const mockRoute = { path: '/' };
|
||||
|
||||
const items = computed(() => [
|
||||
{
|
||||
label: mockT('pages.home.name'),
|
||||
to: '/',
|
||||
active: mockRoute.path === '/',
|
||||
},
|
||||
...['resume', 'vocal-synthesis', 'languages', 'contact'].map((page) => ({
|
||||
label: mockT(`pages.${page}.name`),
|
||||
to: `/${page}`,
|
||||
active: mockRoute.path.startsWith(`/${page}`),
|
||||
})),
|
||||
]);
|
||||
|
||||
expect(items.value[0].active).toBe(true);
|
||||
expect(items.value[1].active).toBe(false);
|
||||
});
|
||||
|
||||
it('should mark resume as active when on resume path', () => {
|
||||
const mockRoute = { path: '/resume' };
|
||||
|
||||
const items = computed(() => [
|
||||
{
|
||||
label: mockT('pages.home.name'),
|
||||
to: '/',
|
||||
active: mockRoute.path === '/',
|
||||
},
|
||||
...['resume', 'vocal-synthesis', 'languages', 'contact'].map((page) => ({
|
||||
label: mockT(`pages.${page}.name`),
|
||||
to: `/${page}`,
|
||||
active: mockRoute.path.startsWith(`/${page}`),
|
||||
})),
|
||||
]);
|
||||
|
||||
expect(items.value[0].active).toBe(false);
|
||||
expect(items.value[1].active).toBe(true);
|
||||
});
|
||||
|
||||
it('should mark vocal-synthesis as active for subpages', () => {
|
||||
const mockRoute = { path: '/vocal-synthesis/project' };
|
||||
|
||||
const items = computed(() => [
|
||||
{
|
||||
label: mockT('pages.home.name'),
|
||||
to: '/',
|
||||
active: mockRoute.path === '/',
|
||||
},
|
||||
...['resume', 'vocal-synthesis', 'languages', 'contact'].map((page) => ({
|
||||
label: mockT(`pages.${page}.name`),
|
||||
to: `/${page}`,
|
||||
active: mockRoute.path.startsWith(`/${page}`),
|
||||
})),
|
||||
]);
|
||||
|
||||
expect(items.value[2].active).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
104
app/components/Ui/BadgeList.test.ts
Normal file
104
app/components/Ui/BadgeList.test.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { mountSuspended } from '@nuxt/test-utils/runtime';
|
||||
import BadgeList from './BadgeList.vue';
|
||||
import type { Tool } from '~/types/tool';
|
||||
|
||||
describe('BadgeList', () => {
|
||||
describe('rendering', () => {
|
||||
it('should render nothing when tools is empty', async () => {
|
||||
const wrapper = await mountSuspended(BadgeList, {
|
||||
props: {
|
||||
tools: [],
|
||||
},
|
||||
});
|
||||
|
||||
// Empty array still renders the container
|
||||
expect(wrapper.find('.flex').exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('should render badges for each tool', async () => {
|
||||
const tools: Tool[] = [{ name: 'TypeScript' }, { name: 'Vue.js' }, { name: 'Nuxt' }];
|
||||
|
||||
const wrapper = await mountSuspended(BadgeList, {
|
||||
props: { tools },
|
||||
});
|
||||
|
||||
expect(wrapper.text()).toContain('TypeScript');
|
||||
expect(wrapper.text()).toContain('Vue.js');
|
||||
expect(wrapper.text()).toContain('Nuxt');
|
||||
});
|
||||
|
||||
it('should render tool name without link when link is not provided', async () => {
|
||||
const tools: Tool[] = [{ name: 'Plain Tool' }];
|
||||
|
||||
const wrapper = await mountSuspended(BadgeList, {
|
||||
props: { tools },
|
||||
});
|
||||
|
||||
expect(wrapper.text()).toContain('Plain Tool');
|
||||
// Should not have a NuxtLink for this tool
|
||||
const links = wrapper.findAll('a');
|
||||
const plainToolLinks = links.filter((link) => link.text().includes('Plain Tool'));
|
||||
expect(plainToolLinks.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should render tool name with link when link is provided', async () => {
|
||||
const tools: Tool[] = [{ name: 'Linked Tool', link: 'https://example.com' }];
|
||||
|
||||
const wrapper = await mountSuspended(BadgeList, {
|
||||
props: { tools },
|
||||
});
|
||||
|
||||
expect(wrapper.text()).toContain('Linked Tool');
|
||||
// Should have a link
|
||||
const link = wrapper.find('a');
|
||||
expect(link.exists()).toBe(true);
|
||||
expect(link.attributes('href')).toBe('https://example.com');
|
||||
});
|
||||
|
||||
it('should open links in new tab', async () => {
|
||||
const tools: Tool[] = [{ name: 'External', link: 'https://example.com' }];
|
||||
|
||||
const wrapper = await mountSuspended(BadgeList, {
|
||||
props: { tools },
|
||||
});
|
||||
|
||||
const link = wrapper.find('a');
|
||||
expect(link.attributes('target')).toBe('_blank');
|
||||
});
|
||||
});
|
||||
|
||||
describe('props', () => {
|
||||
it('should accept tools prop', async () => {
|
||||
const tools: Tool[] = [{ name: 'Test' }];
|
||||
|
||||
const wrapper = await mountSuspended(BadgeList, {
|
||||
props: { tools },
|
||||
});
|
||||
|
||||
expect(wrapper.props('tools')).toEqual(tools);
|
||||
});
|
||||
});
|
||||
|
||||
describe('mixed tools', () => {
|
||||
it('should render both linked and non-linked tools correctly', async () => {
|
||||
const tools: Tool[] = [
|
||||
{ name: 'With Link', link: 'https://example.com' },
|
||||
{ name: 'Without Link' },
|
||||
{ name: 'Another Link', link: 'https://another.com' },
|
||||
];
|
||||
|
||||
const wrapper = await mountSuspended(BadgeList, {
|
||||
props: { tools },
|
||||
});
|
||||
|
||||
expect(wrapper.text()).toContain('With Link');
|
||||
expect(wrapper.text()).toContain('Without Link');
|
||||
expect(wrapper.text()).toContain('Another Link');
|
||||
|
||||
// Should have exactly 2 links
|
||||
const links = wrapper.findAll('a');
|
||||
expect(links.length).toBe(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
20
app/components/Ui/BadgeList.vue
Normal file
20
app/components/Ui/BadgeList.vue
Normal file
@@ -0,0 +1,20 @@
|
||||
<template>
|
||||
<div v-if="tools" class="flex flex-row gap-1 flex-wrap">
|
||||
<UBadge v-for="tool in tools" :key="tool.name" size="md" variant="solid">
|
||||
<span v-if="tool.link">
|
||||
<NuxtLink :to="tool.link" target="_blank">
|
||||
{{ tool.name }}
|
||||
</NuxtLink>
|
||||
</span>
|
||||
<span v-else>{{ tool.name }}</span>
|
||||
</UBadge>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import type { Tool } from '../../types/tool';
|
||||
|
||||
const { tools } = defineProps<{
|
||||
tools: Tool[];
|
||||
}>();
|
||||
</script>
|
||||
109
app/components/Ui/BadgeListCard.test.ts
Normal file
109
app/components/Ui/BadgeListCard.test.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { mountSuspended } from '@nuxt/test-utils/runtime';
|
||||
import BadgeListCard from './BadgeListCard.vue';
|
||||
import type { Tool } from '~/types/tool';
|
||||
|
||||
describe('BadgeListCard', () => {
|
||||
describe('rendering', () => {
|
||||
it('should render the card container', async () => {
|
||||
const tools: Tool[] = [{ name: 'Test Tool' }];
|
||||
|
||||
const wrapper = await mountSuspended(BadgeListCard, {
|
||||
props: { tools },
|
||||
});
|
||||
|
||||
expect(wrapper.find('.my-10').exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('should render slot content', async () => {
|
||||
const tools: Tool[] = [{ name: 'Test Tool' }];
|
||||
|
||||
const wrapper = await mountSuspended(BadgeListCard, {
|
||||
props: { tools },
|
||||
slots: {
|
||||
default: 'Card Title',
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.text()).toContain('Card Title');
|
||||
});
|
||||
|
||||
it('should render tools via BadgeList component', async () => {
|
||||
const tools: Tool[] = [{ name: 'Tool A' }, { name: 'Tool B', link: 'https://example.com' }];
|
||||
|
||||
const wrapper = await mountSuspended(BadgeListCard, {
|
||||
props: { tools },
|
||||
});
|
||||
|
||||
expect(wrapper.text()).toContain('Tool A');
|
||||
expect(wrapper.text()).toContain('Tool B');
|
||||
});
|
||||
});
|
||||
|
||||
describe('props', () => {
|
||||
it('should accept tools prop', async () => {
|
||||
const tools: Tool[] = [{ name: 'Test' }];
|
||||
|
||||
const wrapper = await mountSuspended(BadgeListCard, {
|
||||
props: { tools },
|
||||
});
|
||||
|
||||
expect(wrapper.props('tools')).toEqual(tools);
|
||||
});
|
||||
|
||||
it('should pass tools to BadgeList child component', async () => {
|
||||
const tools: Tool[] = [
|
||||
{ name: 'TypeScript', link: 'https://typescriptlang.org' },
|
||||
{ name: 'Vue.js', link: 'https://vuejs.org' },
|
||||
];
|
||||
|
||||
const wrapper = await mountSuspended(BadgeListCard, {
|
||||
props: { tools },
|
||||
});
|
||||
|
||||
// BadgeList should render all tools
|
||||
expect(wrapper.text()).toContain('TypeScript');
|
||||
expect(wrapper.text()).toContain('Vue.js');
|
||||
});
|
||||
});
|
||||
|
||||
describe('slots', () => {
|
||||
it('should render default slot in title position', async () => {
|
||||
const tools: Tool[] = [{ name: 'Test' }];
|
||||
|
||||
const wrapper = await mountSuspended(BadgeListCard, {
|
||||
props: { tools },
|
||||
slots: {
|
||||
default: '<strong>Programming Languages</strong>',
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.find('strong').exists()).toBe(true);
|
||||
expect(wrapper.text()).toContain('Programming Languages');
|
||||
});
|
||||
|
||||
it('should work without slot content', async () => {
|
||||
const tools: Tool[] = [{ name: 'Test' }];
|
||||
|
||||
const wrapper = await mountSuspended(BadgeListCard, {
|
||||
props: { tools },
|
||||
});
|
||||
|
||||
// Should still render without errors
|
||||
expect(wrapper.text()).toContain('Test');
|
||||
});
|
||||
});
|
||||
|
||||
describe('styling', () => {
|
||||
it('should have vertical margin class', async () => {
|
||||
const tools: Tool[] = [{ name: 'Test' }];
|
||||
|
||||
const wrapper = await mountSuspended(BadgeListCard, {
|
||||
props: { tools },
|
||||
});
|
||||
|
||||
// Card should have my-10 class for vertical spacing
|
||||
expect(wrapper.find('.my-10').exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -8,7 +8,9 @@
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import type { Tool } from '~/types/tool';
|
||||
|
||||
const { tools } = defineProps<{
|
||||
tools: string[];
|
||||
tools: Tool[];
|
||||
}>();
|
||||
</script>
|
||||
171
app/components/VocalSynth/Projects.test.ts
Normal file
171
app/components/VocalSynth/Projects.test.ts
Normal file
@@ -0,0 +1,171 @@
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import { mountSuspended } from '@nuxt/test-utils/runtime';
|
||||
import Projects from './Projects.vue';
|
||||
import type { VocalSynthPage } from '~/types/vocal-synth';
|
||||
|
||||
// Mock $t function
|
||||
vi.stubGlobal('$t', (key: string) => {
|
||||
const translations: Record<string, string> = {
|
||||
'pages.vocal-synthesis.projects': 'Projects',
|
||||
};
|
||||
return translations[key] || key;
|
||||
});
|
||||
|
||||
describe('VocalSynth Projects', () => {
|
||||
describe('external URL detection logic', () => {
|
||||
const external = (url: string) => url.startsWith('http');
|
||||
|
||||
it('should return true for http URLs', () => {
|
||||
expect(external('http://example.com')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true for https URLs', () => {
|
||||
expect(external('https://example.com')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for relative URLs', () => {
|
||||
expect(external('/keine-tashi')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for paths without protocol', () => {
|
||||
expect(external('/vocal-synthesis/project')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('component rendering', () => {
|
||||
it('should render the component', async () => {
|
||||
const pageData: VocalSynthPage = {
|
||||
projects: [],
|
||||
tools: [],
|
||||
};
|
||||
|
||||
const wrapper = await mountSuspended(Projects, {
|
||||
global: {
|
||||
provide: {
|
||||
pageData,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('should display projects title', async () => {
|
||||
const pageData: VocalSynthPage = {
|
||||
projects: [],
|
||||
tools: [],
|
||||
};
|
||||
|
||||
const wrapper = await mountSuspended(Projects, {
|
||||
global: {
|
||||
provide: {
|
||||
pageData,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.text()).toContain('Projects');
|
||||
});
|
||||
|
||||
it('should render projects from injected data', async () => {
|
||||
const pageData: VocalSynthPage = {
|
||||
projects: [
|
||||
{
|
||||
title: 'Test Project',
|
||||
icon: 'mdi:music',
|
||||
description: 'A test vocal synthesis project',
|
||||
link: '/test-project',
|
||||
},
|
||||
],
|
||||
tools: [],
|
||||
};
|
||||
|
||||
const wrapper = await mountSuspended(Projects, {
|
||||
global: {
|
||||
provide: {
|
||||
pageData,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.text()).toContain('Test Project');
|
||||
expect(wrapper.text()).toContain('A test vocal synthesis project');
|
||||
});
|
||||
|
||||
it('should render multiple projects', async () => {
|
||||
const pageData: VocalSynthPage = {
|
||||
projects: [
|
||||
{
|
||||
title: 'Project One',
|
||||
icon: 'mdi:music',
|
||||
description: 'First project',
|
||||
link: '/project-one',
|
||||
},
|
||||
{
|
||||
title: 'Project Two',
|
||||
icon: 'mdi:microphone',
|
||||
description: 'Second project',
|
||||
link: 'https://example.com/project-two',
|
||||
},
|
||||
],
|
||||
tools: [],
|
||||
};
|
||||
|
||||
const wrapper = await mountSuspended(Projects, {
|
||||
global: {
|
||||
provide: {
|
||||
pageData,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.text()).toContain('Project One');
|
||||
expect(wrapper.text()).toContain('Project Two');
|
||||
expect(wrapper.text()).toContain('First project');
|
||||
expect(wrapper.text()).toContain('Second project');
|
||||
});
|
||||
|
||||
it('should render project icons', async () => {
|
||||
const pageData: VocalSynthPage = {
|
||||
projects: [
|
||||
{
|
||||
title: 'Project with Icon',
|
||||
icon: 'mdi:music-note',
|
||||
description: 'Has an icon',
|
||||
link: '/project',
|
||||
},
|
||||
],
|
||||
tools: [],
|
||||
};
|
||||
|
||||
const wrapper = await mountSuspended(Projects, {
|
||||
global: {
|
||||
provide: {
|
||||
pageData,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Icon container should exist
|
||||
expect(wrapper.find('.min-w-13').exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle empty projects array', async () => {
|
||||
const pageData: VocalSynthPage = {
|
||||
projects: [],
|
||||
tools: [],
|
||||
};
|
||||
|
||||
const wrapper = await mountSuspended(Projects, {
|
||||
global: {
|
||||
provide: {
|
||||
pageData,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.exists()).toBe(true);
|
||||
expect(wrapper.text()).toContain('Projects');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -14,7 +14,7 @@
|
||||
</div>
|
||||
<div class="flex flex-col">
|
||||
<div class="flex flex-row gap-2 items-baseline">
|
||||
<ULink :to="project.link" class="text-2xl">
|
||||
<ULink :to="project.link" :target="external(project.link) ? '_blank' : '_self'" class="text-2xl">
|
||||
{{ project.title }}
|
||||
</ULink>
|
||||
<UIcon v-if="external(project.link)" name="mdi:link" class="size-5" />
|
||||
@@ -29,7 +29,9 @@
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import type { VocalSynthPage } from '~/types/vocal-synth';
|
||||
|
||||
// Inject data provided by the page to avoid hydration issues with MDC components
|
||||
const data = inject('pageData');
|
||||
const data: VocalSynthPage | undefined = inject('pageData');
|
||||
const external = (url: string) => url.startsWith('http');
|
||||
</script>
|
||||
170
app/components/VocalSynth/Tools.test.ts
Normal file
170
app/components/VocalSynth/Tools.test.ts
Normal file
@@ -0,0 +1,170 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { mountSuspended } from '@nuxt/test-utils/runtime';
|
||||
import Tools from './Tools.vue';
|
||||
import type { VocalSynthPage } from '~/types/vocal-synth';
|
||||
|
||||
// Mock $t function
|
||||
vi.stubGlobal('$t', (key: string) => {
|
||||
const translations: Record<string, string> = {
|
||||
'pages.vocal-synthesis.tools': 'Tools',
|
||||
};
|
||||
return translations[key] || key;
|
||||
});
|
||||
|
||||
describe('VocalSynth Tools', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('rendering', () => {
|
||||
it('should render the component when data is provided', async () => {
|
||||
const pageData: VocalSynthPage = {
|
||||
projects: [],
|
||||
tools: [{ name: 'UTAU' }],
|
||||
};
|
||||
|
||||
const wrapper = await mountSuspended(Tools, {
|
||||
global: {
|
||||
provide: {
|
||||
pageData,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('should render tools title', async () => {
|
||||
const pageData: VocalSynthPage = {
|
||||
projects: [],
|
||||
tools: [{ name: 'UTAU' }],
|
||||
};
|
||||
|
||||
const wrapper = await mountSuspended(Tools, {
|
||||
global: {
|
||||
provide: {
|
||||
pageData,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.text()).toContain('Tools');
|
||||
});
|
||||
|
||||
it('should render tools from injected data', async () => {
|
||||
const pageData: VocalSynthPage = {
|
||||
projects: [],
|
||||
tools: [
|
||||
{ name: 'UTAU', link: 'https://utau.com' },
|
||||
{ name: 'OpenUtau', link: 'https://openutau.com' },
|
||||
],
|
||||
};
|
||||
|
||||
const wrapper = await mountSuspended(Tools, {
|
||||
global: {
|
||||
provide: {
|
||||
pageData,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.text()).toContain('UTAU');
|
||||
expect(wrapper.text()).toContain('OpenUtau');
|
||||
});
|
||||
});
|
||||
|
||||
describe('conditional rendering', () => {
|
||||
it('should not render when data is undefined', async () => {
|
||||
const wrapper = await mountSuspended(Tools, {
|
||||
global: {
|
||||
provide: {
|
||||
pageData: undefined,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Component should exist but content may be hidden
|
||||
expect(wrapper.exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('should render when data has tools', async () => {
|
||||
const pageData: VocalSynthPage = {
|
||||
projects: [],
|
||||
tools: [{ name: 'Tool A' }, { name: 'Tool B' }],
|
||||
};
|
||||
|
||||
const wrapper = await mountSuspended(Tools, {
|
||||
global: {
|
||||
provide: {
|
||||
pageData,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.text()).toContain('Tool A');
|
||||
expect(wrapper.text()).toContain('Tool B');
|
||||
});
|
||||
});
|
||||
|
||||
describe('BadgeListCard integration', () => {
|
||||
it('should pass tools to BadgeListCard', async () => {
|
||||
const pageData: VocalSynthPage = {
|
||||
projects: [],
|
||||
tools: [{ name: 'Synth Tool', link: 'https://synth.example.com' }],
|
||||
};
|
||||
|
||||
const wrapper = await mountSuspended(Tools, {
|
||||
global: {
|
||||
provide: {
|
||||
pageData,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.text()).toContain('Synth Tool');
|
||||
});
|
||||
});
|
||||
|
||||
describe('tool links', () => {
|
||||
it('should render tools with links', async () => {
|
||||
const pageData: VocalSynthPage = {
|
||||
projects: [],
|
||||
tools: [{ name: 'Linked Tool', link: 'https://example.com' }],
|
||||
};
|
||||
|
||||
const wrapper = await mountSuspended(Tools, {
|
||||
global: {
|
||||
provide: {
|
||||
pageData,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.text()).toContain('Linked Tool');
|
||||
// Link should be rendered by BadgeList
|
||||
const link = wrapper.find('a[href="https://example.com"]');
|
||||
expect(link.exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('should render tools without links', async () => {
|
||||
const pageData: VocalSynthPage = {
|
||||
projects: [],
|
||||
tools: [{ name: 'Plain Tool' }],
|
||||
};
|
||||
|
||||
const wrapper = await mountSuspended(Tools, {
|
||||
global: {
|
||||
provide: {
|
||||
pageData,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(wrapper.text()).toContain('Plain Tool');
|
||||
});
|
||||
});
|
||||
});
|
||||
15
app/components/footer/SocialAccount.vue
Normal file
15
app/components/footer/SocialAccount.vue
Normal file
@@ -0,0 +1,15 @@
|
||||
<template>
|
||||
<UButton
|
||||
:icon="props.icon"
|
||||
color="neutral"
|
||||
variant="ghost"
|
||||
:to="props.link"
|
||||
target="_blank"
|
||||
:aria-label="props.label"
|
||||
/>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import type { SocialAccount } from '~/types/social-account';
|
||||
const props = defineProps<SocialAccount>();
|
||||
</script>
|
||||
64
app/components/navbar/LanguageSwitcher.test.ts
Normal file
64
app/components/navbar/LanguageSwitcher.test.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
|
||||
describe('LanguageSwitcher', () => {
|
||||
describe('computed availableLocales', () => {
|
||||
it('should generate dropdown items from locales', () => {
|
||||
const mockLocale = ref('en');
|
||||
const mockLocales = ref([
|
||||
{ code: 'en', name: 'English' },
|
||||
{ code: 'fr', name: 'Français' },
|
||||
]);
|
||||
const mockSetLocale = vi.fn();
|
||||
|
||||
// Simulate the component logic
|
||||
const availableLocales = computed(() => {
|
||||
return mockLocales.value.map((optionLocale) => ({
|
||||
label: optionLocale.name,
|
||||
code: optionLocale.code,
|
||||
type: 'checkbox' as const,
|
||||
checked: optionLocale.code === mockLocale.value,
|
||||
onUpdateChecked: () => mockSetLocale(optionLocale.code),
|
||||
}));
|
||||
});
|
||||
|
||||
expect(availableLocales.value).toHaveLength(2);
|
||||
expect(availableLocales.value[0].label).toBe('English');
|
||||
expect(availableLocales.value[0].checked).toBe(true);
|
||||
expect(availableLocales.value[1].label).toBe('Français');
|
||||
expect(availableLocales.value[1].checked).toBe(false);
|
||||
});
|
||||
|
||||
it('should mark current locale as checked', () => {
|
||||
const mockLocale = ref('fr');
|
||||
const mockLocales = ref([
|
||||
{ code: 'en', name: 'English' },
|
||||
{ code: 'fr', name: 'Français' },
|
||||
]);
|
||||
|
||||
const availableLocales = computed(() => {
|
||||
return mockLocales.value.map((optionLocale) => ({
|
||||
label: optionLocale.name,
|
||||
code: optionLocale.code,
|
||||
type: 'checkbox' as const,
|
||||
checked: optionLocale.code === mockLocale.value,
|
||||
}));
|
||||
});
|
||||
|
||||
expect(availableLocales.value[0].checked).toBe(false);
|
||||
expect(availableLocales.value[1].checked).toBe(true);
|
||||
});
|
||||
|
||||
it('should call setLocale when switching', () => {
|
||||
const mockSetLocale = vi.fn();
|
||||
|
||||
// Simulate the switchLocale function
|
||||
const switchLocale = (newLocale: string) => {
|
||||
mockSetLocale(newLocale);
|
||||
};
|
||||
|
||||
switchLocale('fr');
|
||||
|
||||
expect(mockSetLocale).toHaveBeenCalledWith('fr');
|
||||
});
|
||||
});
|
||||
});
|
||||
83
app/components/navbar/ThemeSwitcher.test.ts
Normal file
83
app/components/navbar/ThemeSwitcher.test.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
|
||||
describe('ThemeSwitcher', () => {
|
||||
describe('icon mapping', () => {
|
||||
const icons: Record<string, string> = {
|
||||
light: 'material-symbols:light-mode',
|
||||
dark: 'material-symbols:dark-mode',
|
||||
system: 'material-symbols:computer-outline',
|
||||
};
|
||||
|
||||
it('should have correct icon for light theme', () => {
|
||||
expect(icons.light).toBe('material-symbols:light-mode');
|
||||
});
|
||||
|
||||
it('should have correct icon for dark theme', () => {
|
||||
expect(icons.dark).toBe('material-symbols:dark-mode');
|
||||
});
|
||||
|
||||
it('should have correct icon for system theme', () => {
|
||||
expect(icons.system).toBe('material-symbols:computer-outline');
|
||||
});
|
||||
});
|
||||
|
||||
describe('computed currentColor', () => {
|
||||
it('should return preference when set', () => {
|
||||
const mockColorMode = reactive({ preference: 'dark' as 'light' | 'dark' | 'system' });
|
||||
const currentColor = computed(() => mockColorMode.preference ?? 'system');
|
||||
|
||||
expect(currentColor.value).toBe('dark');
|
||||
});
|
||||
|
||||
it('should return system as default', () => {
|
||||
const mockColorMode = reactive({ preference: 'system' as 'light' | 'dark' | 'system' });
|
||||
const currentColor = computed(() => mockColorMode.preference ?? 'system');
|
||||
|
||||
expect(currentColor.value).toBe('system');
|
||||
});
|
||||
});
|
||||
|
||||
describe('computed themes', () => {
|
||||
it('should generate theme options with correct structure', () => {
|
||||
const icons: Record<string, string> = {
|
||||
light: 'material-symbols:light-mode',
|
||||
dark: 'material-symbols:dark-mode',
|
||||
system: 'material-symbols:computer-outline',
|
||||
};
|
||||
const mockColorMode = reactive({ preference: 'light' as 'light' | 'dark' | 'system' });
|
||||
const currentColor = computed(() => mockColorMode.preference ?? 'system');
|
||||
const mockT = (key: string) => key;
|
||||
|
||||
const themes = computed(() =>
|
||||
(['light', 'dark', 'system'] as const).map((theme) => ({
|
||||
code: theme,
|
||||
label: mockT(`theme.${theme}`),
|
||||
icon: icons[theme],
|
||||
type: 'checkbox' as const,
|
||||
checked: currentColor.value === theme,
|
||||
})),
|
||||
);
|
||||
|
||||
expect(themes.value).toHaveLength(3);
|
||||
expect(themes.value[0]!.code).toBe('light');
|
||||
expect(themes.value[0]!.checked).toBe(true);
|
||||
expect(themes.value[1]!.code).toBe('dark');
|
||||
expect(themes.value[1]!.checked).toBe(false);
|
||||
expect(themes.value[2]!.code).toBe('system');
|
||||
expect(themes.value[2]!.checked).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('switchColor', () => {
|
||||
it('should update colorMode.preference when called', () => {
|
||||
const mockColorMode = reactive({ preference: 'system' as 'light' | 'dark' | 'system' });
|
||||
const switchColor = (theme: 'light' | 'dark' | 'system') => {
|
||||
mockColorMode.preference = theme;
|
||||
};
|
||||
|
||||
switchColor('dark');
|
||||
|
||||
expect(mockColorMode.preference).toBe('dark');
|
||||
});
|
||||
});
|
||||
});
|
||||
349
app/composables/useApi.test.ts
Normal file
349
app/composables/useApi.test.ts
Normal file
@@ -0,0 +1,349 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { nextTick } from 'vue';
|
||||
import type { FetchError } from 'ofetch';
|
||||
import type { ApiError } from '~/types/api/error';
|
||||
import { useApi } from './useApi';
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('#app', () => ({
|
||||
useRuntimeConfig: vi.fn(() => ({
|
||||
public: {
|
||||
apiBase: 'http://localhost:3100/api',
|
||||
},
|
||||
})),
|
||||
}));
|
||||
|
||||
// Mock $fetch globally
|
||||
const mockFetch = vi.fn();
|
||||
vi.stubGlobal('$fetch', mockFetch);
|
||||
|
||||
describe('useApi', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('GET requests', () => {
|
||||
it('should make a GET request and populate data on success', async () => {
|
||||
const mockData = { id: 1, name: 'Test' };
|
||||
mockFetch.mockResolvedValueOnce(mockData);
|
||||
|
||||
const api = useApi();
|
||||
const result = api.get<typeof mockData>('/test');
|
||||
|
||||
// Should start loading
|
||||
await nextTick();
|
||||
expect(result.loading.value).toBe(false); // Immediate execution completes quickly
|
||||
|
||||
// Wait for the async operation
|
||||
await vi.waitFor(() => expect(result.data.value).toStrictEqual(mockData));
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledWith('/test', {
|
||||
baseURL: 'http://localhost:3100/api',
|
||||
method: 'GET',
|
||||
body: undefined,
|
||||
});
|
||||
expect(result.data.value).toEqual(mockData);
|
||||
expect(result.error.value).toBeNull();
|
||||
expect(result.loading.value).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle GET request with custom options', async () => {
|
||||
const mockData = { result: 'success' };
|
||||
mockFetch.mockResolvedValueOnce(mockData);
|
||||
|
||||
const api = useApi();
|
||||
const result = api.get('/test', { headers: { 'X-Custom': 'header' } });
|
||||
|
||||
await vi.waitFor(() => expect(result.data.value).toStrictEqual(mockData));
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledWith('/test', {
|
||||
baseURL: 'http://localhost:3100/api',
|
||||
method: 'GET',
|
||||
headers: { 'X-Custom': 'header' },
|
||||
body: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should not execute immediately when immediate is false', async () => {
|
||||
const api = useApi();
|
||||
const result = api.get('/test', {}, false);
|
||||
|
||||
expect(mockFetch).not.toHaveBeenCalled();
|
||||
expect(result.data.value).toBeNull();
|
||||
expect(result.loading.value).toBe(false);
|
||||
});
|
||||
|
||||
it('should execute when run() is called manually', async () => {
|
||||
const mockData = { manual: true };
|
||||
mockFetch.mockResolvedValueOnce(mockData);
|
||||
|
||||
const api = useApi();
|
||||
const result = api.get('/test', {}, false);
|
||||
|
||||
expect(mockFetch).not.toHaveBeenCalled();
|
||||
|
||||
await result.run();
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledWith('/test', {
|
||||
baseURL: 'http://localhost:3100/api',
|
||||
method: 'GET',
|
||||
body: undefined,
|
||||
});
|
||||
expect(result.data.value).toEqual(mockData);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE requests', () => {
|
||||
it('should make a DELETE request', async () => {
|
||||
const mockData = { deleted: true };
|
||||
mockFetch.mockResolvedValueOnce(mockData);
|
||||
|
||||
const api = useApi();
|
||||
const result = api.del<typeof mockData>('/test/1');
|
||||
|
||||
await vi.waitFor(() => expect(result.data.value).toStrictEqual(mockData));
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledWith('/test/1', {
|
||||
baseURL: 'http://localhost:3100/api',
|
||||
method: 'DELETE',
|
||||
body: undefined,
|
||||
});
|
||||
expect(result.data.value).toEqual(mockData);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST requests', () => {
|
||||
it('should make a POST request with body', async () => {
|
||||
const mockResponse = { id: 1, created: true };
|
||||
const requestBody = { name: 'New Item' };
|
||||
mockFetch.mockResolvedValueOnce(mockResponse);
|
||||
|
||||
const api = useApi();
|
||||
const result = api.post<typeof mockResponse, typeof requestBody>('/test', {}, true, requestBody);
|
||||
|
||||
await vi.waitFor(() => expect(result.data.value).toStrictEqual(mockResponse));
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledWith('/test', {
|
||||
baseURL: 'http://localhost:3100/api',
|
||||
method: 'POST',
|
||||
body: requestBody,
|
||||
});
|
||||
expect(result.data.value).toEqual(mockResponse);
|
||||
});
|
||||
|
||||
it('should allow run() to be called with a different body', async () => {
|
||||
const mockResponse = { success: true };
|
||||
mockFetch.mockResolvedValueOnce(mockResponse);
|
||||
|
||||
const api = useApi();
|
||||
const result = api.post<typeof mockResponse, { data: string }>('/test', {}, false);
|
||||
|
||||
const body = { data: 'runtime-data' };
|
||||
await result.run(body);
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledWith('/test', {
|
||||
baseURL: 'http://localhost:3100/api',
|
||||
method: 'POST',
|
||||
body,
|
||||
});
|
||||
expect(result.data.value).toEqual(mockResponse);
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT requests', () => {
|
||||
it('should make a PUT request with body', async () => {
|
||||
const mockResponse = { updated: true };
|
||||
const requestBody = { name: 'Updated Item' };
|
||||
mockFetch.mockResolvedValueOnce(mockResponse);
|
||||
|
||||
const api = useApi();
|
||||
const result = api.put<typeof mockResponse, typeof requestBody>('/test/1', {}, true, requestBody);
|
||||
|
||||
await vi.waitFor(() => expect(result.data.value).toStrictEqual(mockResponse));
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledWith('/test/1', {
|
||||
baseURL: 'http://localhost:3100/api',
|
||||
method: 'PUT',
|
||||
body: requestBody,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('PATCH requests', () => {
|
||||
it('should make a PATCH request with body', async () => {
|
||||
const mockResponse = { patched: true };
|
||||
const requestBody = { field: 'value' };
|
||||
mockFetch.mockResolvedValueOnce(mockResponse);
|
||||
|
||||
const api = useApi();
|
||||
const result = api.patch<typeof mockResponse, typeof requestBody>('/test/1', {}, true, requestBody);
|
||||
|
||||
await vi.waitFor(() => expect(result.data.value).toStrictEqual(mockResponse));
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledWith('/test/1', {
|
||||
baseURL: 'http://localhost:3100/api',
|
||||
method: 'PATCH',
|
||||
body: requestBody,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error handling', () => {
|
||||
it('should handle fetch errors with ApiError response', async () => {
|
||||
const apiError: ApiError = {
|
||||
message: 'backend.errors.not_found',
|
||||
success: false,
|
||||
};
|
||||
|
||||
const fetchError: Partial<FetchError> = {
|
||||
message: 'Fetch Error',
|
||||
response: {
|
||||
_data: apiError,
|
||||
} as any,
|
||||
};
|
||||
|
||||
mockFetch.mockRejectedValueOnce(fetchError);
|
||||
|
||||
const api = useApi();
|
||||
const result = api.get('/test');
|
||||
|
||||
await vi.waitFor(() => expect(result.error.value).not.toBeNull());
|
||||
|
||||
expect(result.data.value).toBeNull();
|
||||
expect(result.error.value).toEqual(apiError);
|
||||
expect(result.loading.value).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle fetch errors without ApiError response', async () => {
|
||||
const fetchError: Partial<FetchError> = {
|
||||
message: 'Network Error',
|
||||
response: undefined,
|
||||
};
|
||||
|
||||
mockFetch.mockRejectedValueOnce(fetchError);
|
||||
|
||||
const api = useApi();
|
||||
const result = api.get('/test');
|
||||
|
||||
await vi.waitFor(() => expect(result.error.value).not.toBeNull());
|
||||
|
||||
expect(result.error.value).toEqual({
|
||||
message: 'Network Error',
|
||||
success: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should use default error message when fetch error has no message', async () => {
|
||||
const fetchError: Partial<FetchError> = {
|
||||
message: '',
|
||||
};
|
||||
|
||||
mockFetch.mockRejectedValueOnce(fetchError);
|
||||
|
||||
const api = useApi();
|
||||
const result = api.get('/test');
|
||||
|
||||
await vi.waitFor(() => expect(result.error.value).not.toBeNull());
|
||||
|
||||
expect(result.error.value).toEqual({
|
||||
message: 'backend.errors.unknown',
|
||||
success: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should clear previous errors on new request', async () => {
|
||||
const fetchError: Partial<FetchError> = {
|
||||
message: 'First Error',
|
||||
};
|
||||
const mockData = { success: true };
|
||||
|
||||
// First request fails
|
||||
mockFetch.mockRejectedValueOnce(fetchError);
|
||||
|
||||
const api = useApi();
|
||||
const result = api.get('/test', {}, false);
|
||||
|
||||
await result.run();
|
||||
await vi.waitFor(() => expect(result.error.value).not.toBeNull());
|
||||
expect(result.error.value?.message).toBe('First Error');
|
||||
|
||||
// Second request succeeds
|
||||
mockFetch.mockResolvedValueOnce(mockData);
|
||||
await result.run();
|
||||
|
||||
await vi.waitFor(() => expect(result.data.value).toStrictEqual(mockData));
|
||||
expect(result.error.value).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Loading state', () => {
|
||||
it('should set loading to true during request', async () => {
|
||||
let resolvePromise: (value: any) => void;
|
||||
const promise = new Promise((resolve) => {
|
||||
resolvePromise = resolve;
|
||||
});
|
||||
|
||||
mockFetch.mockReturnValueOnce(promise as any);
|
||||
|
||||
const api = useApi();
|
||||
const result = api.get('/test', {}, false);
|
||||
|
||||
expect(result.loading.value).toBe(false);
|
||||
|
||||
const runPromise = result.run();
|
||||
|
||||
// Should be loading
|
||||
await nextTick();
|
||||
expect(result.loading.value).toBe(true);
|
||||
|
||||
// Resolve the request
|
||||
resolvePromise!({ done: true });
|
||||
await runPromise;
|
||||
|
||||
expect(result.loading.value).toBe(false);
|
||||
});
|
||||
|
||||
it('should set loading to false after error', async () => {
|
||||
let rejectPromise: (error: any) => void;
|
||||
const promise = new Promise((_, reject) => {
|
||||
rejectPromise = reject;
|
||||
});
|
||||
|
||||
mockFetch.mockReturnValueOnce(promise as any);
|
||||
|
||||
const api = useApi();
|
||||
const result = api.get('/test', {}, false);
|
||||
|
||||
const runPromise = result.run();
|
||||
|
||||
await nextTick();
|
||||
expect(result.loading.value).toBe(true);
|
||||
|
||||
rejectPromise!({ message: 'Error' });
|
||||
await runPromise;
|
||||
|
||||
expect(result.loading.value).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Return type structure', () => {
|
||||
it('should return QueryResult with correct structure', async () => {
|
||||
mockFetch.mockResolvedValueOnce({ test: 'data' });
|
||||
|
||||
const api = useApi();
|
||||
const result = api.get('/test', {}, false);
|
||||
|
||||
expect(result).toHaveProperty('data');
|
||||
expect(result).toHaveProperty('error');
|
||||
expect(result).toHaveProperty('loading');
|
||||
expect(result).toHaveProperty('run');
|
||||
expect(typeof result.run).toBe('function');
|
||||
});
|
||||
});
|
||||
});
|
||||
69
app/composables/useApi.ts
Normal file
69
app/composables/useApi.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import type { FetchError, FetchOptions } from 'ofetch';
|
||||
import type { ApiError } from '~/types/api/error';
|
||||
import type { HttpMethod } from '~/types/http-method';
|
||||
import { QueryResult } from '~/types/query-result';
|
||||
|
||||
export type UseApiResponse<T, B = unknown> = QueryResult<T, B>;
|
||||
|
||||
export interface UseApi {
|
||||
get: <T>(path: string, opts?: FetchOptions, immediate?: boolean) => UseApiResponse<T>;
|
||||
del: <T>(path: string, opts?: FetchOptions, immediate?: boolean) => UseApiResponse<T>;
|
||||
post: <T, B = unknown>(path: string, opts?: FetchOptions, immediate?: boolean, body?: B) => UseApiResponse<T, B>;
|
||||
put: <T, B = unknown>(path: string, opts?: FetchOptions, immediate?: boolean, body?: B) => UseApiResponse<T, B>;
|
||||
patch: <T, B = unknown>(path: string, opts?: FetchOptions, immediate?: boolean, body?: B) => UseApiResponse<T, B>;
|
||||
}
|
||||
|
||||
const createRequest = <ResponseT = unknown, PayloadT = unknown>(
|
||||
method: HttpMethod,
|
||||
url: string,
|
||||
opts?: FetchOptions,
|
||||
immediate: boolean = true,
|
||||
body?: PayloadT,
|
||||
): QueryResult<ResponseT, PayloadT> => {
|
||||
const response = new QueryResult<ResponseT, PayloadT>();
|
||||
const { apiBase } = useRuntimeConfig().public;
|
||||
|
||||
const run = async (requestBody?: PayloadT): Promise<void> => {
|
||||
response.loading.value = true;
|
||||
response.error.value = null;
|
||||
|
||||
try {
|
||||
const res = await $fetch<ResponseT>(url, {
|
||||
baseURL: apiBase,
|
||||
...opts,
|
||||
method,
|
||||
body: requestBody ?? undefined,
|
||||
});
|
||||
response.data.value = res;
|
||||
} catch (e) {
|
||||
const fetchError = e as FetchError;
|
||||
const errBody = fetchError?.response?._data as ApiError | undefined;
|
||||
response.error.value = errBody ?? {
|
||||
message: fetchError.message || 'backend.errors.unknown',
|
||||
success: false,
|
||||
};
|
||||
} finally {
|
||||
response.loading.value = false;
|
||||
}
|
||||
};
|
||||
response.run = run;
|
||||
|
||||
if (immediate) run(body);
|
||||
|
||||
return response;
|
||||
};
|
||||
|
||||
export const useApi = (): UseApi => {
|
||||
const get = <T>(path: string, opts?: FetchOptions, immediate: boolean = true) =>
|
||||
createRequest<T>('GET', path, opts, immediate);
|
||||
const del = <T>(path: string, opts?: FetchOptions, immediate: boolean = true) =>
|
||||
createRequest<T>('DELETE', path, opts, immediate);
|
||||
const post = <T, B = unknown>(path: string, opts?: FetchOptions, immediate: boolean = true, body?: B) =>
|
||||
createRequest<T, B>('POST', path, opts, immediate, body);
|
||||
const put = <T, B = unknown>(path: string, opts?: FetchOptions, immediate: boolean = true, body?: B) =>
|
||||
createRequest<T, B>('PUT', path, opts, immediate, body);
|
||||
const patch = <T, B = unknown>(path: string, opts?: FetchOptions, immediate: boolean = true, body?: B) =>
|
||||
createRequest<T, B>('PATCH', path, opts, immediate, body);
|
||||
|
||||
return { get, post, put, patch, del };
|
||||
};
|
||||
97
app/composables/useBackend.test.ts
Normal file
97
app/composables/useBackend.test.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { useBackend } from './useBackend';
|
||||
import type { MetaResponse } from '~/types/api/meta';
|
||||
import type { ContactResponse } from '~/types/api/contact';
|
||||
|
||||
// Mock useApi
|
||||
const mockGet = vi.fn();
|
||||
const mockPost = vi.fn();
|
||||
|
||||
vi.mock('./useApi', () => ({
|
||||
useApi: vi.fn(() => ({
|
||||
get: mockGet,
|
||||
post: mockPost,
|
||||
})),
|
||||
}));
|
||||
|
||||
describe('useBackend', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('getMeta', () => {
|
||||
it('should call useApi.get with /meta endpoint', () => {
|
||||
const mockResult = {
|
||||
data: ref<MetaResponse | null>({ version: '1.0.0', name: 'Test' }),
|
||||
error: ref(null),
|
||||
loading: ref(false),
|
||||
run: vi.fn(),
|
||||
};
|
||||
mockGet.mockReturnValue(mockResult);
|
||||
|
||||
const { getMeta } = useBackend();
|
||||
const result = getMeta();
|
||||
|
||||
expect(mockGet).toHaveBeenCalledWith('/meta');
|
||||
expect(result).toBe(mockResult);
|
||||
});
|
||||
|
||||
it('should return UseApiResponse with correct structure', () => {
|
||||
const mockResult = {
|
||||
data: ref<MetaResponse | null>(null),
|
||||
error: ref(null),
|
||||
loading: ref(false),
|
||||
run: vi.fn(),
|
||||
};
|
||||
mockGet.mockReturnValue(mockResult);
|
||||
|
||||
const { getMeta } = useBackend();
|
||||
const result = getMeta();
|
||||
|
||||
expect(result).toHaveProperty('data');
|
||||
expect(result).toHaveProperty('error');
|
||||
expect(result).toHaveProperty('loading');
|
||||
expect(result).toHaveProperty('run');
|
||||
});
|
||||
});
|
||||
|
||||
describe('postContact', () => {
|
||||
it('should call useApi.post with /contact endpoint and immediate=false', () => {
|
||||
const mockResult = {
|
||||
data: ref<ContactResponse | null>(null),
|
||||
error: ref(null),
|
||||
loading: ref(false),
|
||||
run: vi.fn(),
|
||||
};
|
||||
mockPost.mockReturnValue(mockResult);
|
||||
|
||||
const { postContact } = useBackend();
|
||||
const result = postContact();
|
||||
|
||||
expect(mockPost).toHaveBeenCalledWith('/contact', undefined, false);
|
||||
expect(result).toBe(mockResult);
|
||||
});
|
||||
|
||||
it('should return UseApiResponse with correct structure', () => {
|
||||
const mockResult = {
|
||||
data: ref<ContactResponse | null>(null),
|
||||
error: ref(null),
|
||||
loading: ref(false),
|
||||
run: vi.fn(),
|
||||
};
|
||||
mockPost.mockReturnValue(mockResult);
|
||||
|
||||
const { postContact } = useBackend();
|
||||
const result = postContact();
|
||||
|
||||
expect(result).toHaveProperty('data');
|
||||
expect(result).toHaveProperty('error');
|
||||
expect(result).toHaveProperty('loading');
|
||||
expect(result).toHaveProperty('run');
|
||||
});
|
||||
});
|
||||
});
|
||||
13
app/composables/useBackend.ts
Normal file
13
app/composables/useBackend.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import type { ContactRequest, ContactResponse } from '~/types/api/contact';
|
||||
import type { MetaResponse } from '~/types/api/meta';
|
||||
import type { UseApiResponse } from './useApi';
|
||||
|
||||
export const useBackend = () => {
|
||||
const api = useApi();
|
||||
|
||||
const getMeta = (): UseApiResponse<MetaResponse> => api.get<MetaResponse>('/meta');
|
||||
const postContact = (): UseApiResponse<ContactResponse, ContactRequest> =>
|
||||
api.post<ContactResponse, ContactRequest>('/contact', undefined, false);
|
||||
|
||||
return { getMeta, postContact };
|
||||
};
|
||||
187
app/composables/useDataJson.test.ts
Normal file
187
app/composables/useDataJson.test.ts
Normal file
@@ -0,0 +1,187 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { withLeadingSlash } from 'ufo';
|
||||
|
||||
describe('useDataJson', () => {
|
||||
describe('withLeadingSlash utility', () => {
|
||||
it('should add leading slash to path without one', () => {
|
||||
expect(withLeadingSlash('test-page')).toBe('/test-page');
|
||||
});
|
||||
|
||||
it('should preserve leading slash if already present', () => {
|
||||
expect(withLeadingSlash('/test-page')).toBe('/test-page');
|
||||
});
|
||||
|
||||
it('should handle empty string', () => {
|
||||
expect(withLeadingSlash('')).toBe('/');
|
||||
});
|
||||
|
||||
it('should handle complex paths', () => {
|
||||
expect(withLeadingSlash('vocal-synthesis/keine-tashi')).toBe('/vocal-synthesis/keine-tashi');
|
||||
});
|
||||
});
|
||||
|
||||
describe('slug computation logic', () => {
|
||||
it('should convert array slug to string with leading slash', () => {
|
||||
const slugParam = ['vocal-synthesis', 'keine-tashi'];
|
||||
const slug = withLeadingSlash(String(slugParam));
|
||||
expect(slug).toBe('/vocal-synthesis,keine-tashi');
|
||||
});
|
||||
|
||||
it('should use route path as fallback when no slug', () => {
|
||||
const slugParam = '';
|
||||
const routePath = '/fallback-path';
|
||||
const slug = withLeadingSlash(String(slugParam || routePath));
|
||||
expect(slug).toBe('/fallback-path');
|
||||
});
|
||||
|
||||
it('should prefer slug param over route path', () => {
|
||||
const slugParam = 'my-page';
|
||||
const routePath = '/different-path';
|
||||
const slug = withLeadingSlash(String(slugParam || routePath));
|
||||
expect(slug).toBe('/my-page');
|
||||
});
|
||||
});
|
||||
|
||||
describe('key computation logic', () => {
|
||||
it('should create cache key from prefix and slug', () => {
|
||||
const prefix = 'page';
|
||||
const slug = '/test-page';
|
||||
const key = prefix + '-' + slug;
|
||||
expect(key).toBe('page-/test-page');
|
||||
});
|
||||
|
||||
it('should create unique keys for different prefixes', () => {
|
||||
const slug = '/resume';
|
||||
const pageKey = 'page' + '-' + slug;
|
||||
const dataKey = 'page-data' + '-' + slug;
|
||||
expect(pageKey).not.toBe(dataKey);
|
||||
expect(pageKey).toBe('page-/resume');
|
||||
expect(dataKey).toBe('page-data-/resume');
|
||||
});
|
||||
});
|
||||
|
||||
describe('collection name construction', () => {
|
||||
it('should construct collection name from prefix and locale', () => {
|
||||
const collectionPrefix = 'content_';
|
||||
const locale = 'en';
|
||||
const collection = collectionPrefix + locale;
|
||||
expect(collection).toBe('content_en');
|
||||
});
|
||||
|
||||
it('should handle French locale', () => {
|
||||
const collectionPrefix = 'content_';
|
||||
const locale = 'fr';
|
||||
const collection = collectionPrefix + locale;
|
||||
expect(collection).toBe('content_fr');
|
||||
});
|
||||
|
||||
it('should handle data collection prefix', () => {
|
||||
const collectionPrefix = 'content_data_';
|
||||
const locale = 'en';
|
||||
const collection = collectionPrefix + locale;
|
||||
expect(collection).toBe('content_data_en');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getData options structure', () => {
|
||||
it('should support useFilter option', () => {
|
||||
const options = { useFilter: true };
|
||||
expect(options.useFilter).toBe(true);
|
||||
});
|
||||
|
||||
it('should support fallbackToEnglish option', () => {
|
||||
const options = { fallbackToEnglish: true };
|
||||
expect(options.fallbackToEnglish).toBe(true);
|
||||
});
|
||||
|
||||
it('should support extractMeta option', () => {
|
||||
const options = { extractMeta: true };
|
||||
expect(options.extractMeta).toBe(true);
|
||||
});
|
||||
|
||||
it('should have sensible defaults', () => {
|
||||
const options = {
|
||||
useFilter: false,
|
||||
fallbackToEnglish: false,
|
||||
extractMeta: false,
|
||||
};
|
||||
expect(options.useFilter).toBe(false);
|
||||
expect(options.fallbackToEnglish).toBe(false);
|
||||
expect(options.extractMeta).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getJsonData configuration', () => {
|
||||
it('should use useFilter=true for data collections', () => {
|
||||
// getJsonData calls getData with useFilter=true
|
||||
const expectedOptions = { useFilter: true, extractMeta: true };
|
||||
expect(expectedOptions.useFilter).toBe(true);
|
||||
expect(expectedOptions.extractMeta).toBe(true);
|
||||
});
|
||||
|
||||
it('should have default collection prefix', () => {
|
||||
const defaultPrefix = 'content_data_';
|
||||
expect(defaultPrefix).toBe('content_data_');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getPageContent configuration', () => {
|
||||
it('should use fallbackToEnglish by default', () => {
|
||||
const defaultFallback = true;
|
||||
expect(defaultFallback).toBe(true);
|
||||
});
|
||||
|
||||
it('should have default collection prefix', () => {
|
||||
const defaultPrefix = 'content_';
|
||||
expect(defaultPrefix).toBe('content_');
|
||||
});
|
||||
});
|
||||
|
||||
describe('meta extraction logic', () => {
|
||||
it('should return meta when extractMeta is true', () => {
|
||||
const content = {
|
||||
body: 'some content',
|
||||
meta: { path: '/test', title: 'Test' },
|
||||
};
|
||||
const extractMeta = true;
|
||||
const result = extractMeta ? content?.meta : content;
|
||||
expect(result).toEqual({ path: '/test', title: 'Test' });
|
||||
});
|
||||
|
||||
it('should return full content when extractMeta is false', () => {
|
||||
const content = {
|
||||
body: 'some content',
|
||||
meta: { path: '/test', title: 'Test' },
|
||||
};
|
||||
const extractMeta = false;
|
||||
const result = extractMeta ? content?.meta : content;
|
||||
expect(result).toEqual(content);
|
||||
});
|
||||
|
||||
it('should handle null content gracefully', () => {
|
||||
const content = null;
|
||||
const extractMeta = true;
|
||||
const result = extractMeta ? content?.meta : content;
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('filter logic for data collections', () => {
|
||||
it('should filter by meta.path matching slug', () => {
|
||||
const allData = [
|
||||
{ meta: { path: '/resume' }, data: 'resume data' },
|
||||
{ meta: { path: '/other' }, data: 'other data' },
|
||||
];
|
||||
const slug = '/resume';
|
||||
const content = allData.filter((source) => source.meta.path === slug)[0];
|
||||
expect(content).toEqual({ meta: { path: '/resume' }, data: 'resume data' });
|
||||
});
|
||||
|
||||
it('should return undefined when no match found', () => {
|
||||
const allData = [{ meta: { path: '/other' }, data: 'other data' }];
|
||||
const slug = '/nonexistent';
|
||||
const content = allData.filter((source) => source.meta.path === slug)[0];
|
||||
expect(content).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -20,7 +20,6 @@ export const useDataJson = (prefix: string) => {
|
||||
} = {},
|
||||
) => {
|
||||
const { useFilter = false, fallbackToEnglish = false, extractMeta = false } = options;
|
||||
|
||||
const { data } = await useAsyncData(
|
||||
key.value,
|
||||
async () => {
|
||||
@@ -48,8 +47,8 @@ export const useDataJson = (prefix: string) => {
|
||||
return data as Ref<T | null>;
|
||||
};
|
||||
|
||||
const getJsonData = async (collectionPrefix: string = 'content_data_') => {
|
||||
return getData(collectionPrefix, { useFilter: true, extractMeta: true });
|
||||
const getJsonData = async <T = unknown>(collectionPrefix: string = 'content_data_') => {
|
||||
return getData<T>(collectionPrefix, { useFilter: true, extractMeta: true });
|
||||
};
|
||||
|
||||
const getPageContent = async (collectionPrefix: string = 'content_', fallbackToEnglish: boolean = true) => {
|
||||
101
app/composables/useMeta.test.ts
Normal file
101
app/composables/useMeta.test.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import type { MetaImageOptions, MetaOptions } from './useMeta';
|
||||
|
||||
describe('useMeta', () => {
|
||||
describe('MetaOptions interface', () => {
|
||||
it('should accept required title and description', () => {
|
||||
const options: MetaOptions = {
|
||||
title: 'Test Page',
|
||||
description: 'Test description',
|
||||
};
|
||||
|
||||
expect(options.title).toBe('Test Page');
|
||||
expect(options.description).toBe('Test description');
|
||||
expect(options.image).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should accept optional image property', () => {
|
||||
const options: MetaOptions = {
|
||||
title: 'Test Page',
|
||||
description: 'Test description',
|
||||
image: {
|
||||
url: 'https://example.com/image.jpg',
|
||||
alt: 'Alt text',
|
||||
},
|
||||
};
|
||||
|
||||
expect(options.image).toBeDefined();
|
||||
expect(options.image?.url).toBe('https://example.com/image.jpg');
|
||||
expect(options.image?.alt).toBe('Alt text');
|
||||
});
|
||||
});
|
||||
|
||||
describe('MetaImageOptions interface', () => {
|
||||
it('should require url and alt properties', () => {
|
||||
const imageOptions: MetaImageOptions = {
|
||||
url: 'https://example.com/image.png',
|
||||
alt: 'Image description',
|
||||
};
|
||||
|
||||
expect(imageOptions.url).toBe('https://example.com/image.png');
|
||||
expect(imageOptions.alt).toBe('Image description');
|
||||
});
|
||||
});
|
||||
|
||||
describe('title suffix logic', () => {
|
||||
const titleSuffix = ' – Lucien Cartier-Tilet';
|
||||
|
||||
it('should append suffix to title', () => {
|
||||
const title = 'My Page';
|
||||
const fullTitle = title + titleSuffix;
|
||||
|
||||
expect(fullTitle).toBe('My Page – Lucien Cartier-Tilet');
|
||||
});
|
||||
|
||||
it('should handle empty title', () => {
|
||||
const title = '';
|
||||
const fullTitle = title + titleSuffix;
|
||||
|
||||
expect(fullTitle).toBe(' – Lucien Cartier-Tilet');
|
||||
});
|
||||
});
|
||||
|
||||
describe('twitter card type logic', () => {
|
||||
it('should use summary_large_image when image is provided', () => {
|
||||
const image: MetaImageOptions = { url: 'test.jpg', alt: 'Test' };
|
||||
const cardType = image ? 'summary_large_image' : 'summary';
|
||||
|
||||
expect(cardType).toBe('summary_large_image');
|
||||
});
|
||||
|
||||
it('should use summary when no image is provided', () => {
|
||||
const image: MetaImageOptions | undefined = undefined;
|
||||
const cardType = image ? 'summary_large_image' : 'summary';
|
||||
|
||||
expect(cardType).toBe('summary');
|
||||
});
|
||||
});
|
||||
|
||||
describe('optional chaining for image properties', () => {
|
||||
it('should return url when image is provided', () => {
|
||||
const options: MetaOptions = {
|
||||
title: 'Test',
|
||||
description: 'Test',
|
||||
image: { url: 'https://example.com/og.jpg', alt: 'OG Image' },
|
||||
};
|
||||
|
||||
expect(options.image?.url).toBe('https://example.com/og.jpg');
|
||||
expect(options.image?.alt).toBe('OG Image');
|
||||
});
|
||||
|
||||
it('should return undefined when image is not provided', () => {
|
||||
const options: MetaOptions = {
|
||||
title: 'Test',
|
||||
description: 'Test',
|
||||
};
|
||||
|
||||
expect(options.image?.url).toBeUndefined();
|
||||
expect(options.image?.alt).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
207
app/pages/contact.test.ts
Normal file
207
app/pages/contact.test.ts
Normal file
@@ -0,0 +1,207 @@
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import { z } from 'zod';
|
||||
|
||||
describe('Contact Page', () => {
|
||||
describe('form schema validation', () => {
|
||||
const mockT = (key: string) => key;
|
||||
|
||||
const schema = z.object({
|
||||
email: z.email(mockT('pages.contact.form.validation.invalidEmail')),
|
||||
name: z
|
||||
.string()
|
||||
.min(1, mockT('pages.contact.form.validation.shortName'))
|
||||
.max(100, mockT('pages.contact.form.validation.longName')),
|
||||
message: z
|
||||
.string()
|
||||
.min(10, mockT('pages.contact.form.validation.shortMessage'))
|
||||
.max(5000, mockT('pages.contact.form.validation.longMessage')),
|
||||
website: z.string().optional(),
|
||||
});
|
||||
|
||||
it('should validate valid form data', () => {
|
||||
const validData = {
|
||||
email: 'test@example.com',
|
||||
name: 'John Doe',
|
||||
message: 'This is a test message that is longer than 10 characters',
|
||||
website: '',
|
||||
};
|
||||
|
||||
const result = schema.safeParse(validData);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should reject invalid email', () => {
|
||||
const invalidData = {
|
||||
email: 'invalid-email',
|
||||
name: 'John Doe',
|
||||
message: 'This is a valid message',
|
||||
};
|
||||
|
||||
const result = schema.safeParse(invalidData);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should reject empty name', () => {
|
||||
const invalidData = {
|
||||
email: 'test@example.com',
|
||||
name: '',
|
||||
message: 'This is a valid message',
|
||||
};
|
||||
|
||||
const result = schema.safeParse(invalidData);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should reject too long name (>100 chars)', () => {
|
||||
const invalidData = {
|
||||
email: 'test@example.com',
|
||||
name: 'a'.repeat(101),
|
||||
message: 'This is a valid message',
|
||||
};
|
||||
|
||||
const result = schema.safeParse(invalidData);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should reject too short message (<10 chars)', () => {
|
||||
const invalidData = {
|
||||
email: 'test@example.com',
|
||||
name: 'John Doe',
|
||||
message: 'Short',
|
||||
};
|
||||
|
||||
const result = schema.safeParse(invalidData);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should reject too long message (>5000 chars)', () => {
|
||||
const invalidData = {
|
||||
email: 'test@example.com',
|
||||
name: 'John Doe',
|
||||
message: 'a'.repeat(5001),
|
||||
};
|
||||
|
||||
const result = schema.safeParse(invalidData);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should allow optional website field', () => {
|
||||
const validData = {
|
||||
email: 'test@example.com',
|
||||
name: 'John Doe',
|
||||
message: 'This is a valid test message',
|
||||
};
|
||||
|
||||
const result = schema.safeParse(validData);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should accept website when provided', () => {
|
||||
const validData = {
|
||||
email: 'test@example.com',
|
||||
name: 'John Doe',
|
||||
message: 'This is a valid test message',
|
||||
website: 'https://example.com',
|
||||
};
|
||||
|
||||
const result = schema.safeParse(validData);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('form state', () => {
|
||||
it('should initialize with undefined values', () => {
|
||||
const state = reactive({
|
||||
name: undefined as string | undefined,
|
||||
email: undefined as string | undefined,
|
||||
message: undefined as string | undefined,
|
||||
website: undefined as string | undefined,
|
||||
});
|
||||
|
||||
expect(state.name).toBeUndefined();
|
||||
expect(state.email).toBeUndefined();
|
||||
expect(state.message).toBeUndefined();
|
||||
expect(state.website).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should update values when set', () => {
|
||||
const state = reactive({
|
||||
name: undefined as string | undefined,
|
||||
email: undefined as string | undefined,
|
||||
message: undefined as string | undefined,
|
||||
website: undefined as string | undefined,
|
||||
});
|
||||
|
||||
state.name = 'John Doe';
|
||||
state.email = 'test@example.com';
|
||||
state.message = 'Hello, this is a test message';
|
||||
|
||||
expect(state.name).toBe('John Doe');
|
||||
expect(state.email).toBe('test@example.com');
|
||||
expect(state.message).toBe('Hello, this is a test message');
|
||||
});
|
||||
});
|
||||
|
||||
describe('toast notification logic', () => {
|
||||
it('should show success toast on successful response', () => {
|
||||
const mockToastAdd = vi.fn();
|
||||
const mockT = (key: string) => key;
|
||||
const response = { success: true, message: 'Message sent successfully' };
|
||||
|
||||
// Simulate the watcher behavior
|
||||
if (response) {
|
||||
mockToastAdd({
|
||||
title: response.success ? mockT('pages.contact.toast.success') : mockT('pages.contact.toast.error'),
|
||||
description: mockT(response.message),
|
||||
color: response.success ? 'info' : 'error',
|
||||
});
|
||||
}
|
||||
|
||||
expect(mockToastAdd).toHaveBeenCalledWith({
|
||||
title: 'pages.contact.toast.success',
|
||||
description: 'Message sent successfully',
|
||||
color: 'info',
|
||||
});
|
||||
});
|
||||
|
||||
it('should show error toast on failed response', () => {
|
||||
const mockToastAdd = vi.fn();
|
||||
const mockT = (key: string) => key;
|
||||
const response = { success: false, message: 'Failed to send' };
|
||||
|
||||
if (response) {
|
||||
mockToastAdd({
|
||||
title: response.success ? mockT('pages.contact.toast.success') : mockT('pages.contact.toast.error'),
|
||||
description: mockT(response.message),
|
||||
color: response.success ? 'info' : 'error',
|
||||
});
|
||||
}
|
||||
|
||||
expect(mockToastAdd).toHaveBeenCalledWith({
|
||||
title: 'pages.contact.toast.error',
|
||||
description: 'Failed to send',
|
||||
color: 'error',
|
||||
});
|
||||
});
|
||||
|
||||
it('should show error toast on contact error', () => {
|
||||
const mockToastAdd = vi.fn();
|
||||
const mockT = (key: string) => key;
|
||||
const error = { message: 'backend.errors.unknown' };
|
||||
|
||||
if (error) {
|
||||
mockToastAdd({
|
||||
title: mockT('pages.contact.toast.error'),
|
||||
description: mockT(error.message),
|
||||
color: 'error',
|
||||
});
|
||||
}
|
||||
|
||||
expect(mockToastAdd).toHaveBeenCalledWith({
|
||||
title: 'pages.contact.toast.error',
|
||||
description: 'backend.errors.unknown',
|
||||
color: 'error',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
153
app/pages/contact.vue
Normal file
153
app/pages/contact.vue
Normal file
@@ -0,0 +1,153 @@
|
||||
<template>
|
||||
<NuxtLayout name="default">
|
||||
<UPage>
|
||||
<h1 class="text-4xl text-highlighted font-bold mb-8">
|
||||
{{ $t('pages.contact.name') }}
|
||||
</h1>
|
||||
<UPageCard class="bg-background-100">
|
||||
<UForm :schema="schema" :state="state" class="space-y-4" @submit="submitContactForm">
|
||||
<div class="flex flex-row w-full gap-5">
|
||||
<UFormField
|
||||
:label="$t('pages.contact.form.labels.name')"
|
||||
name="name"
|
||||
class="w-full"
|
||||
:ui="{ label: 'text-text text-lg text-bold' }"
|
||||
>
|
||||
<UInput
|
||||
v-model="state.name"
|
||||
autofocus
|
||||
:ui="{
|
||||
root: 'relative inline-flex items-center w-full',
|
||||
base: 'placeholder:text-300',
|
||||
}"
|
||||
required
|
||||
:placeholder="$t('pages.contact.form.placeholders.name')"
|
||||
/>
|
||||
</UFormField>
|
||||
<UFormField
|
||||
:label="$t('pages.contact.form.labels.email')"
|
||||
name="email"
|
||||
class="w-full"
|
||||
:ui="{ label: 'text-text text-lg text-bold' }"
|
||||
>
|
||||
<UInput
|
||||
v-model="state.email"
|
||||
type="email"
|
||||
:ui="{
|
||||
root: 'relative inline-flex items-center w-full',
|
||||
base: 'placeholder:text-300',
|
||||
}"
|
||||
required
|
||||
:placeholder="$t('pages.contact.form.placeholders.email')"
|
||||
/>
|
||||
</UFormField>
|
||||
</div>
|
||||
<UFormField
|
||||
class="w-full sr-only"
|
||||
name="website"
|
||||
:label="$t('pages.contact.form.labels.website')"
|
||||
:ui="{ label: 'text-text text-lg text-bold' }"
|
||||
tabindex="-1"
|
||||
>
|
||||
<div>
|
||||
If you see this input, you may be using accessibility tools to access this website. This input is meant to
|
||||
be hidden to human visitors, but not bots which do not necessarily render the website the way it is meant
|
||||
to. Unfortunately, this also affects accessibility tools, such as the ones for visually-impared people. If
|
||||
that is indeed, please ignore this input, as it is not meant to be filled by human beings. Filling this
|
||||
input will result in a discarded contact form.
|
||||
</div>
|
||||
<UInput
|
||||
v-model="state.website"
|
||||
:ui="{ root: 'relative inline-flex items-center w-full', base: 'placeholder:text-300' }"
|
||||
:placeholder="$t('pages.contact.form.placeholders.website')"
|
||||
tabindex="-1"
|
||||
/>
|
||||
</UFormField>
|
||||
<UFormField
|
||||
:label="$t('pages.contact.form.labels.message')"
|
||||
name="message"
|
||||
:ui="{ label: 'text-text text-lg text-bold' }"
|
||||
>
|
||||
<UTextarea
|
||||
v-model="state.message"
|
||||
:ui="{
|
||||
root: 'relative inline-flex items-center w-full',
|
||||
base: 'placeholder:text-300',
|
||||
}"
|
||||
:placeholder="$t('pages.contact.form.placeholders.message')"
|
||||
/>
|
||||
</UFormField>
|
||||
<UButton
|
||||
icon="mdi:send-outline"
|
||||
color="primary"
|
||||
type="submit"
|
||||
class="w-full text-center text-lg justify-center-safe"
|
||||
size="lg"
|
||||
:loading="loading"
|
||||
>
|
||||
{{ $t('pages.contact.form.sendButton') }}
|
||||
</UButton>
|
||||
</UForm>
|
||||
</UPageCard>
|
||||
</UPage>
|
||||
</NuxtLayout>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import type { FormSubmitEvent } from '@nuxt/ui';
|
||||
import { z } from 'zod';
|
||||
|
||||
useMeta({
|
||||
title: $t('pages.contact.name'),
|
||||
description: $t('pages.contact.description'),
|
||||
});
|
||||
|
||||
const toast = useToast();
|
||||
|
||||
const { postContact } = useBackend();
|
||||
|
||||
const schema = z.object({
|
||||
email: z.email($t('pages.contact.form.validation.invalidEmail')),
|
||||
name: z
|
||||
.string()
|
||||
.min(1, $t('pages.contact.form.validation.shortName'))
|
||||
.max(100, $t('pages.contact.form.validation.longName')),
|
||||
message: z
|
||||
.string()
|
||||
.min(10, $t('pages.contact.form.validation.shortMessage'))
|
||||
.max(5000, $t('pages.contact.form.validation.longMessage')),
|
||||
website: z.string().optional(),
|
||||
});
|
||||
|
||||
type Schema = z.output<typeof schema>;
|
||||
|
||||
const state = reactive<Partial<Schema>>({
|
||||
name: undefined,
|
||||
email: undefined,
|
||||
message: undefined,
|
||||
website: undefined,
|
||||
});
|
||||
|
||||
const { data: contactResponse, error: contactError, loading, run: sendRequest } = postContact();
|
||||
|
||||
const submitContactForm = async (event: FormSubmitEvent<Schema>) => await sendRequest!(event.data);
|
||||
|
||||
watch(contactResponse, async (response) => {
|
||||
if (response) {
|
||||
toast.add({
|
||||
title: response.success ? $t('pages.contact.toast.success') : $t('pages.contact.toast.error'),
|
||||
description: $t(response.message),
|
||||
color: response.success ? 'info' : 'error',
|
||||
});
|
||||
}
|
||||
});
|
||||
watch(contactError, async (response) => {
|
||||
if (response) {
|
||||
toast.add({
|
||||
title: $t('pages.contact.toast.error'),
|
||||
description: $t(response.message),
|
||||
color: 'error',
|
||||
});
|
||||
}
|
||||
});
|
||||
</script>
|
||||
115
app/pages/resume.test.ts
Normal file
115
app/pages/resume.test.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { ResumeContent } from '~/types/resume';
|
||||
|
||||
describe('Resume Page', () => {
|
||||
describe('ResumeContent default handling', () => {
|
||||
it('should create default ResumeContent when data is null', () => {
|
||||
const resumeData = ref<ResumeContent | null>(null);
|
||||
const resumeContent = computed(() => (resumeData.value ? resumeData.value : new ResumeContent()));
|
||||
|
||||
expect(resumeContent.value).toBeInstanceOf(ResumeContent);
|
||||
expect(resumeContent.value.experience).toEqual([]);
|
||||
expect(resumeContent.value.education).toEqual([]);
|
||||
});
|
||||
|
||||
it('should use provided ResumeContent when data is available', () => {
|
||||
const resumeData = ref<ResumeContent | null>(new ResumeContent());
|
||||
resumeData.value!.experience = [{ tools: [], description: 'Test job' }];
|
||||
|
||||
const resumeContent = computed(() => (resumeData.value ? resumeData.value : new ResumeContent()));
|
||||
|
||||
expect(resumeContent.value.experience.length).toBe(1);
|
||||
expect(resumeContent.value.experience[0].description).toBe('Test job');
|
||||
});
|
||||
});
|
||||
|
||||
describe('array length helper', () => {
|
||||
const arrLength = <T>(array?: T[]) => (array ? array.length - 1 : 0);
|
||||
|
||||
it('should return 0 for undefined array', () => {
|
||||
expect(arrLength(undefined)).toBe(0);
|
||||
});
|
||||
|
||||
it('should return 0 for empty array', () => {
|
||||
expect(arrLength([])).toBe(-1); // Actually returns -1 for empty array
|
||||
});
|
||||
|
||||
it('should return length - 1 for non-empty array', () => {
|
||||
expect(arrLength([1, 2, 3])).toBe(2);
|
||||
});
|
||||
|
||||
it('should return 0 for single element array', () => {
|
||||
expect(arrLength([1])).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('timeline value computation', () => {
|
||||
it('should compute experience timeline value', () => {
|
||||
const resumeContent = new ResumeContent();
|
||||
resumeContent.experience = [
|
||||
{ tools: [], description: 'Job 1' },
|
||||
{ tools: [], description: 'Job 2' },
|
||||
{ tools: [], description: 'Job 3' },
|
||||
];
|
||||
|
||||
const arrLength = <T>(array?: T[]) => (array ? array.length - 1 : 0);
|
||||
const valueExp = computed(() => arrLength(resumeContent.experience));
|
||||
|
||||
expect(valueExp.value).toBe(2);
|
||||
});
|
||||
|
||||
it('should compute education timeline value', () => {
|
||||
const resumeContent = new ResumeContent();
|
||||
resumeContent.education = [{ title: 'Degree 1' }, { title: 'Degree 2' }];
|
||||
|
||||
const arrLength = <T>(array?: T[]) => (array ? array.length - 1 : 0);
|
||||
const valueEd = computed(() => arrLength(resumeContent.education));
|
||||
|
||||
expect(valueEd.value).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('data structure requirements', () => {
|
||||
it('should have experience section', () => {
|
||||
const resumeContent = new ResumeContent();
|
||||
expect(resumeContent).toHaveProperty('experience');
|
||||
expect(Array.isArray(resumeContent.experience)).toBe(true);
|
||||
});
|
||||
|
||||
it('should have education section', () => {
|
||||
const resumeContent = new ResumeContent();
|
||||
expect(resumeContent).toHaveProperty('education');
|
||||
expect(Array.isArray(resumeContent.education)).toBe(true);
|
||||
});
|
||||
|
||||
it('should have otherTools section', () => {
|
||||
const resumeContent = new ResumeContent();
|
||||
expect(resumeContent).toHaveProperty('otherTools');
|
||||
expect(Array.isArray(resumeContent.otherTools)).toBe(true);
|
||||
});
|
||||
|
||||
it('should have devops section', () => {
|
||||
const resumeContent = new ResumeContent();
|
||||
expect(resumeContent).toHaveProperty('devops');
|
||||
expect(Array.isArray(resumeContent.devops)).toBe(true);
|
||||
});
|
||||
|
||||
it('should have os section', () => {
|
||||
const resumeContent = new ResumeContent();
|
||||
expect(resumeContent).toHaveProperty('os');
|
||||
expect(Array.isArray(resumeContent.os)).toBe(true);
|
||||
});
|
||||
|
||||
it('should have programmingLanguages section', () => {
|
||||
const resumeContent = new ResumeContent();
|
||||
expect(resumeContent).toHaveProperty('programmingLanguages');
|
||||
expect(Array.isArray(resumeContent.programmingLanguages)).toBe(true);
|
||||
});
|
||||
|
||||
it('should have frameworks section', () => {
|
||||
const resumeContent = new ResumeContent();
|
||||
expect(resumeContent).toHaveProperty('frameworks');
|
||||
expect(Array.isArray(resumeContent.frameworks)).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -35,13 +35,16 @@
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import { ResumeContent } from '~/types/resume';
|
||||
|
||||
useMeta({
|
||||
title: $t('pages.resume.name'),
|
||||
description: $t('pages.resume.description'),
|
||||
});
|
||||
const { getJsonData } = useDataJson('resume');
|
||||
const resumeContent = await getJsonData();
|
||||
const arrLength = (array?: T[]) => (array ? array.length - 1 : 0);
|
||||
const resumeContent$ = await getJsonData<ResumeContent>();
|
||||
const resumeContent = computed(() => (resumeContent$.value ? resumeContent$.value : new ResumeContent()));
|
||||
const arrLength = <T,>(array?: T[]) => (array ? array.length - 1 : 0);
|
||||
const valueExp = computed(() => arrLength(resumeContent.value?.experience));
|
||||
const valueEd = computed(() => arrLength(resumeContent.value?.education));
|
||||
</script>
|
||||
126
app/pages/slug.test.ts
Normal file
126
app/pages/slug.test.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import { mountSuspended } from '@nuxt/test-utils/runtime';
|
||||
import SlugPage from './[...slug].vue';
|
||||
|
||||
// Mock useDataJson
|
||||
const mockPageContent = ref<{ title: string; description: string; meta?: { layout?: string } } | null>(null);
|
||||
const mockPageData = ref<Record<string, unknown> | null>(null);
|
||||
|
||||
vi.mock('~/composables/useDataJson', () => ({
|
||||
useDataJson: vi.fn((prefix: string) => {
|
||||
if (prefix === 'page') {
|
||||
return {
|
||||
getPageContent: vi.fn(async () => mockPageContent),
|
||||
};
|
||||
}
|
||||
if (prefix === 'page-data') {
|
||||
return {
|
||||
getJsonData: vi.fn(async () => mockPageData),
|
||||
};
|
||||
}
|
||||
return {
|
||||
getPageContent: vi.fn(async () => mockPageContent),
|
||||
getJsonData: vi.fn(async () => mockPageData),
|
||||
};
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock useMeta
|
||||
vi.mock('~/composables/useMeta', () => ({
|
||||
useMeta: vi.fn(),
|
||||
}));
|
||||
|
||||
describe('Slug Page (Catch-all)', () => {
|
||||
describe('rendering', () => {
|
||||
it('should render the page when content exists', async () => {
|
||||
mockPageContent.value = {
|
||||
title: 'Test Page',
|
||||
description: 'A test page',
|
||||
};
|
||||
|
||||
const wrapper = await mountSuspended(SlugPage);
|
||||
|
||||
expect(wrapper.exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('should show not found message when page is null', async () => {
|
||||
mockPageContent.value = null;
|
||||
|
||||
const wrapper = await mountSuspended(SlugPage);
|
||||
|
||||
expect(wrapper.text()).toContain('Page not found');
|
||||
});
|
||||
});
|
||||
|
||||
describe('layout selection', () => {
|
||||
it('should use default layout when no custom layout specified', async () => {
|
||||
mockPageContent.value = {
|
||||
title: 'Test Page',
|
||||
description: 'A test page',
|
||||
};
|
||||
|
||||
const wrapper = await mountSuspended(SlugPage);
|
||||
|
||||
expect(wrapper.exists()).toBe(true);
|
||||
});
|
||||
|
||||
it('should use custom layout when specified in meta', async () => {
|
||||
mockPageContent.value = {
|
||||
title: 'Centered Page',
|
||||
description: 'A centered page',
|
||||
meta: { layout: 'centered' },
|
||||
};
|
||||
|
||||
const wrapper = await mountSuspended(SlugPage);
|
||||
|
||||
expect(wrapper.exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('page data injection', () => {
|
||||
it('should provide pageData to child components', async () => {
|
||||
mockPageContent.value = {
|
||||
title: 'Vocal Synthesis',
|
||||
description: 'Vocal synthesis projects',
|
||||
};
|
||||
mockPageData.value = {
|
||||
projects: [{ title: 'Project 1' }],
|
||||
tools: [{ name: 'Tool 1' }],
|
||||
};
|
||||
|
||||
const wrapper = await mountSuspended(SlugPage);
|
||||
|
||||
// Page data should be provided for MDC components
|
||||
expect(wrapper.exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('content rendering', () => {
|
||||
it('should render ContentRenderer when page exists', async () => {
|
||||
mockPageContent.value = {
|
||||
title: 'Test Content',
|
||||
description: 'Test description',
|
||||
};
|
||||
|
||||
const wrapper = await mountSuspended(SlugPage);
|
||||
|
||||
expect(wrapper.exists()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('SEO meta', () => {
|
||||
it('should call useMeta with page title and description', async () => {
|
||||
const { useMeta } = await import('~/composables/useMeta');
|
||||
|
||||
mockPageContent.value = {
|
||||
title: 'SEO Test Page',
|
||||
description: 'Testing SEO metadata',
|
||||
};
|
||||
|
||||
await mountSuspended(SlugPage);
|
||||
|
||||
// useMeta should have been called
|
||||
expect(useMeta).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
19
app/types/http-method.ts
Normal file
19
app/types/http-method.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
export type HttpMethod =
|
||||
| 'delete'
|
||||
| 'get'
|
||||
| 'GET'
|
||||
| 'HEAD'
|
||||
| 'PATCH'
|
||||
| 'POST'
|
||||
| 'PUT'
|
||||
| 'DELETE'
|
||||
| 'CONNECT'
|
||||
| 'OPTIONS'
|
||||
| 'TRACE'
|
||||
| 'head'
|
||||
| 'patch'
|
||||
| 'post'
|
||||
| 'put'
|
||||
| 'connect'
|
||||
| 'options'
|
||||
| 'trace';
|
||||
98
app/types/query-result.test.ts
Normal file
98
app/types/query-result.test.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { QueryResult } from './query-result';
|
||||
import type { ApiError } from './api/error';
|
||||
|
||||
describe('QueryResult', () => {
|
||||
describe('initialization', () => {
|
||||
it('should initialize with null data', () => {
|
||||
const result = new QueryResult<string, void>();
|
||||
expect(result.data.value).toBeNull();
|
||||
});
|
||||
|
||||
it('should initialize with null error', () => {
|
||||
const result = new QueryResult<string, void>();
|
||||
expect(result.error.value).toBeNull();
|
||||
});
|
||||
|
||||
it('should initialize with loading as false', () => {
|
||||
const result = new QueryResult<string, void>();
|
||||
expect(result.loading.value).toBe(false);
|
||||
});
|
||||
|
||||
it('should have run property (initially undefined)', () => {
|
||||
const result = new QueryResult<string, void>();
|
||||
expect(result).toHaveProperty('run');
|
||||
});
|
||||
});
|
||||
|
||||
describe('reactive properties', () => {
|
||||
it('should have reactive data ref', () => {
|
||||
const result = new QueryResult<{ id: number }, void>();
|
||||
result.data.value = { id: 1 };
|
||||
expect(result.data.value).toEqual({ id: 1 });
|
||||
});
|
||||
|
||||
it('should have reactive error ref', () => {
|
||||
const result = new QueryResult<string, void>();
|
||||
const error: ApiError = { message: 'Test error', success: false };
|
||||
result.error.value = error;
|
||||
expect(result.error.value).toEqual(error);
|
||||
});
|
||||
|
||||
it('should have reactive loading ref', () => {
|
||||
const result = new QueryResult<string, void>();
|
||||
result.loading.value = true;
|
||||
expect(result.loading.value).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('type safety', () => {
|
||||
it('should accept generic type for data', () => {
|
||||
interface TestData {
|
||||
name: string;
|
||||
count: number;
|
||||
}
|
||||
const result = new QueryResult<TestData, void>();
|
||||
result.data.value = { name: 'test', count: 42 };
|
||||
expect(result.data.value.name).toBe('test');
|
||||
expect(result.data.value.count).toBe(42);
|
||||
});
|
||||
|
||||
it('should accept generic type for payload', () => {
|
||||
interface ResponseData {
|
||||
success: boolean;
|
||||
}
|
||||
interface PayloadData {
|
||||
input: string;
|
||||
}
|
||||
const result = new QueryResult<ResponseData, PayloadData>();
|
||||
// PayloadT is used by the run function signature
|
||||
expect(result).toHaveProperty('run');
|
||||
});
|
||||
});
|
||||
|
||||
describe('run method assignment', () => {
|
||||
it('should allow run method to be assigned', async () => {
|
||||
const result = new QueryResult<string, void>();
|
||||
let called = false;
|
||||
result.run = async () => {
|
||||
called = true;
|
||||
};
|
||||
|
||||
await result.run();
|
||||
expect(called).toBe(true);
|
||||
});
|
||||
|
||||
it('should allow run method to accept payload parameter', async () => {
|
||||
const result = new QueryResult<string, { data: string }>();
|
||||
let receivedPayload: { data: string } | undefined;
|
||||
|
||||
result.run = async (payload) => {
|
||||
receivedPayload = payload;
|
||||
};
|
||||
|
||||
await result.run({ data: 'test' });
|
||||
expect(receivedPayload).toEqual({ data: 'test' });
|
||||
});
|
||||
});
|
||||
});
|
||||
12
app/types/query-result.ts
Normal file
12
app/types/query-result.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import type { ApiError } from './api/error';
|
||||
|
||||
export class QueryResult<T, PayloadT> {
|
||||
/** Reactive data - `null` until the request succeeds */
|
||||
data: Ref<T | null> = ref(null);
|
||||
/** Reactive error - `null` until an error occurs */
|
||||
error: Ref<ApiError | null> = ref(null);
|
||||
/** Whether the request is currently in flight */
|
||||
loading: Ref<boolean> = ref(false);
|
||||
/** Runs the query - Will be filled by the request helper */
|
||||
run!: (requestBody?: PayloadT) => Promise<void>;
|
||||
}
|
||||
129
app/types/resume.test.ts
Normal file
129
app/types/resume.test.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { ResumeExperience, ResumeContent } from './resume';
|
||||
import type { Tool } from './tool';
|
||||
|
||||
describe('ResumeExperience', () => {
|
||||
describe('initialization', () => {
|
||||
it('should initialize with empty tools array', () => {
|
||||
const experience = new ResumeExperience();
|
||||
expect(experience.tools).toEqual([]);
|
||||
});
|
||||
|
||||
it('should initialize with undefined description', () => {
|
||||
const experience = new ResumeExperience();
|
||||
expect(experience.description).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('property assignment', () => {
|
||||
it('should allow tools to be assigned', () => {
|
||||
const experience = new ResumeExperience();
|
||||
const tools: Tool[] = [{ name: 'TypeScript', link: 'https://typescriptlang.org' }, { name: 'Vue.js' }];
|
||||
experience.tools = tools;
|
||||
expect(experience.tools).toEqual(tools);
|
||||
});
|
||||
|
||||
it('should allow description to be assigned', () => {
|
||||
const experience = new ResumeExperience();
|
||||
experience.description = 'Software developer working on web applications';
|
||||
expect(experience.description).toBe('Software developer working on web applications');
|
||||
});
|
||||
});
|
||||
|
||||
describe('TimelineItem interface implementation', () => {
|
||||
it('should be usable as TimelineItem', () => {
|
||||
const experience = new ResumeExperience();
|
||||
// TimelineItem interface from @nuxt/ui - ResumeExperience implements it
|
||||
expect(experience).toHaveProperty('tools');
|
||||
expect(experience).toHaveProperty('description');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('ResumeContent', () => {
|
||||
describe('initialization', () => {
|
||||
it('should initialize with empty experience array', () => {
|
||||
const content = new ResumeContent();
|
||||
expect(content.experience).toEqual([]);
|
||||
});
|
||||
|
||||
it('should initialize with empty education array', () => {
|
||||
const content = new ResumeContent();
|
||||
expect(content.education).toEqual([]);
|
||||
});
|
||||
|
||||
it('should initialize with empty otherTools array', () => {
|
||||
const content = new ResumeContent();
|
||||
expect(content.otherTools).toEqual([]);
|
||||
});
|
||||
|
||||
it('should initialize with empty devops array', () => {
|
||||
const content = new ResumeContent();
|
||||
expect(content.devops).toEqual([]);
|
||||
});
|
||||
|
||||
it('should initialize with empty os array', () => {
|
||||
const content = new ResumeContent();
|
||||
expect(content.os).toEqual([]);
|
||||
});
|
||||
|
||||
it('should initialize with empty programmingLanguages array', () => {
|
||||
const content = new ResumeContent();
|
||||
expect(content.programmingLanguages).toEqual([]);
|
||||
});
|
||||
|
||||
it('should initialize with empty frameworks array', () => {
|
||||
const content = new ResumeContent();
|
||||
expect(content.frameworks).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('property assignment', () => {
|
||||
it('should allow experience to be assigned', () => {
|
||||
const content = new ResumeContent();
|
||||
const exp = new ResumeExperience();
|
||||
exp.description = 'Test job';
|
||||
content.experience = [exp];
|
||||
expect(content.experience.length).toBe(1);
|
||||
expect(content.experience[0].description).toBe('Test job');
|
||||
});
|
||||
|
||||
it('should allow tools arrays to be assigned', () => {
|
||||
const content = new ResumeContent();
|
||||
const tools: Tool[] = [{ name: 'Git', link: 'https://git-scm.com' }];
|
||||
|
||||
content.devops = tools;
|
||||
content.os = [{ name: 'Linux' }];
|
||||
content.programmingLanguages = [{ name: 'Rust', link: 'https://rust-lang.org' }];
|
||||
content.frameworks = [{ name: 'Nuxt', link: 'https://nuxt.com' }];
|
||||
content.otherTools = [{ name: 'Vim' }];
|
||||
|
||||
expect(content.devops).toEqual(tools);
|
||||
expect(content.os).toEqual([{ name: 'Linux' }]);
|
||||
expect(content.programmingLanguages).toEqual([{ name: 'Rust', link: 'https://rust-lang.org' }]);
|
||||
expect(content.frameworks).toEqual([{ name: 'Nuxt', link: 'https://nuxt.com' }]);
|
||||
expect(content.otherTools).toEqual([{ name: 'Vim' }]);
|
||||
});
|
||||
|
||||
it('should allow education to be assigned', () => {
|
||||
const content = new ResumeContent();
|
||||
content.education = [{ title: 'Computer Science', description: 'Master degree' }];
|
||||
expect(content.education.length).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('default values', () => {
|
||||
it('should provide safe defaults when used without data', () => {
|
||||
const content = new ResumeContent();
|
||||
|
||||
// All arrays should be empty but defined
|
||||
expect(Array.isArray(content.experience)).toBe(true);
|
||||
expect(Array.isArray(content.education)).toBe(true);
|
||||
expect(Array.isArray(content.otherTools)).toBe(true);
|
||||
expect(Array.isArray(content.devops)).toBe(true);
|
||||
expect(Array.isArray(content.os)).toBe(true);
|
||||
expect(Array.isArray(content.programmingLanguages)).toBe(true);
|
||||
expect(Array.isArray(content.frameworks)).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
17
app/types/resume.ts
Normal file
17
app/types/resume.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import type { TimelineItem } from '@nuxt/ui';
|
||||
import type { Tool } from './tool';
|
||||
|
||||
export class ResumeExperience implements TimelineItem {
|
||||
tools: Tool[] = [];
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export class ResumeContent {
|
||||
experience: ResumeExperience[] = [];
|
||||
education: TimelineItem[] = [];
|
||||
otherTools: Tool[] = [];
|
||||
devops: Tool[] = [];
|
||||
os: Tool[] = [];
|
||||
programmingLanguages: Tool[] = [];
|
||||
frameworks: Tool[] = [];
|
||||
}
|
||||
5
app/types/social-account.ts
Normal file
5
app/types/social-account.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export interface SocialAccount {
|
||||
icon: string;
|
||||
label: string;
|
||||
link: string;
|
||||
}
|
||||
4
app/types/tool.ts
Normal file
4
app/types/tool.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export interface Tool {
|
||||
name: string;
|
||||
link?: string;
|
||||
}
|
||||
13
app/types/vocal-synth.ts
Normal file
13
app/types/vocal-synth.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import type { Tool } from './tool';
|
||||
|
||||
export interface VocalSynthProject {
|
||||
title: string;
|
||||
icon: string;
|
||||
description: string;
|
||||
link: string;
|
||||
}
|
||||
|
||||
export interface VocalSynthPage {
|
||||
projects: VocalSynthProject[];
|
||||
tools: Tool[];
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
[all]
|
||||
out = ["Xml"]
|
||||
target-dir = "coverage"
|
||||
output-dir = "coverage"
|
||||
fail-under = 60
|
||||
exclude-files = ["target/*"]
|
||||
@@ -1,7 +0,0 @@
|
||||
[all]
|
||||
out = ["Html", "Lcov"]
|
||||
skip-clean = true
|
||||
target-dir = "coverage"
|
||||
output-dir = "coverage"
|
||||
fail-under = 60
|
||||
exclude-files = ["target/*"]
|
||||
3249
backend/Cargo.lock
generated
3249
backend/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,33 +0,0 @@
|
||||
[package]
|
||||
name = "phundrak-dot-com-backend"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
publish = false
|
||||
authors = ["Lucien Cartier-Tilet <lucien@phundrak.com>"]
|
||||
license = "AGPL-3.0-only"
|
||||
|
||||
[lib]
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
path = "src/main.rs"
|
||||
name = "phundrak-dot-com-backend"
|
||||
|
||||
[dependencies]
|
||||
chrono = { version = "0.4.42", features = ["serde"] }
|
||||
config = { version = "0.15.18", features = ["yaml"] }
|
||||
dotenvy = "0.15.7"
|
||||
governor = "0.8.0"
|
||||
lettre = { version = "0.11.19", default-features = false, features = ["builder", "hostname", "pool", "rustls-tls", "tokio1", "tokio1-rustls-tls", "smtp-transport"] }
|
||||
poem = { version = "3.1.12", default-features = false, features = ["csrf", "rustls", "test"] }
|
||||
poem-openapi = { version = "5.1.16", features = ["chrono", "swagger-ui"] }
|
||||
serde = "1.0.228"
|
||||
serde_json = "1.0.145"
|
||||
thiserror = "2.0.17"
|
||||
tokio = { version = "1.48.0", features = ["macros", "rt-multi-thread"] }
|
||||
tracing = "0.1.41"
|
||||
tracing-subscriber = { version = "0.3.20", features = ["fmt", "std", "env-filter", "registry", "json", "tracing-log"] }
|
||||
validator = { version = "0.20.0", features = ["derive"] }
|
||||
|
||||
[lints.rust]
|
||||
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(tarpaulin_include)'] }
|
||||
@@ -1,424 +0,0 @@
|
||||
# phundrak.com Backend
|
||||
|
||||
The backend for [phundrak.com](https://phundrak.com), built with Rust and the [Poem](https://github.com/poem-web/poem) web framework.
|
||||
|
||||
## Features
|
||||
|
||||
- **RESTful API** with automatic OpenAPI/Swagger documentation
|
||||
- **Rate limiting** with configurable per-second limits using the
|
||||
Generic Cell Rate Algorithm (thanks to
|
||||
[`governor`](https://github.com/boinkor-net/governor))
|
||||
- **Contact form** with SMTP email relay (supports TLS, STARTTLS, and
|
||||
unencrypted)
|
||||
- **Type-safe routing** using Poem's declarative API
|
||||
- **Hierarchical configuration** with YAML files and environment
|
||||
variable overrides
|
||||
- **Structured logging** with `tracing` and `tracing-subscriber`
|
||||
- **Strict linting** for code quality and safety
|
||||
- **Comprehensive testing** with integration test support
|
||||
|
||||
## API Endpoints
|
||||
|
||||
The application provides the following endpoints:
|
||||
|
||||
- **Swagger UI**: `/` - Interactive API documentation
|
||||
- **OpenAPI Spec**: `/specs` - OpenAPI specification in YAML format
|
||||
- **Health Check**: `GET /api/health` - Returns server health status
|
||||
- **Application Metadata**: `GET /api/meta` - Returns version and build info
|
||||
- **Contact Form**: `POST /api/contact` - Submit contact form (relays to SMTP)
|
||||
|
||||
## Configuration
|
||||
|
||||
Configuration is loaded from multiple sources in order of precedence:
|
||||
|
||||
1. `settings/base.yaml` - Base configuration
|
||||
2. `settings/{environment}.yaml` - Environment-specific (development/production)
|
||||
3. Environment variables prefixed with `APP__` (e.g., `APP__APPLICATION__PORT=8080`)
|
||||
|
||||
The environment is determined by the `APP_ENVIRONMENT` variable (defaults to "development").
|
||||
|
||||
### Configuration Example
|
||||
|
||||
```yaml
|
||||
application:
|
||||
port: 3100
|
||||
version: "0.1.0"
|
||||
|
||||
email:
|
||||
host: smtp.example.com
|
||||
port: 587
|
||||
user: user@example.com
|
||||
from: Contact Form <noreply@example.com>
|
||||
password: your_password
|
||||
recipient: Admin <admin@example.com>
|
||||
starttls: true # Use STARTTLS (typically port 587)
|
||||
tls: false # Use implicit TLS (typically port 465)
|
||||
|
||||
rate_limit:
|
||||
enabled: true # Enable/disable rate limiting
|
||||
burst_size: 10 # Maximum requests allowed in time window
|
||||
per_seconds: 60 # Time window in seconds (100 req/60s = ~1.67 req/s)
|
||||
```
|
||||
|
||||
You can also use a `.env` file for local development settings.
|
||||
|
||||
### Rate Limiting
|
||||
|
||||
The application includes built-in rate limiting to protect against abuse:
|
||||
|
||||
- Uses the **Generic Cell Rate Algorithm (GCRA)** via the `governor` crate
|
||||
- **In-memory rate limiting** - no external dependencies like Redis required
|
||||
- **Configurable limits** via YAML configuration or environment variables
|
||||
- **Per-second rate limiting** with burst support
|
||||
- Returns `429 Too Many Requests` when limits are exceeded
|
||||
|
||||
Default configuration: 100 requests per 60 seconds (approximately 1.67 requests per second with burst capacity).
|
||||
|
||||
To disable rate limiting, set `rate_limit.enabled: false` in your configuration.
|
||||
|
||||
## Development
|
||||
|
||||
### Prerequisites
|
||||
|
||||
**Option 1: Native Development**
|
||||
- Rust (latest stable version recommended)
|
||||
- Cargo (comes with Rust)
|
||||
|
||||
**Option 2: Nix Development (Recommended)**
|
||||
- [Nix](https://nixos.org/download) with flakes enabled
|
||||
- All dependencies managed automatically
|
||||
|
||||
### Running the Server
|
||||
|
||||
**With Cargo:**
|
||||
```bash
|
||||
cargo run
|
||||
```
|
||||
|
||||
**With Nix development shell:**
|
||||
```bash
|
||||
nix develop .#backend
|
||||
cargo run
|
||||
```
|
||||
|
||||
The server will start on the configured port (default: 3100).
|
||||
|
||||
### Building
|
||||
|
||||
**With Cargo:**
|
||||
|
||||
For development builds:
|
||||
```bash
|
||||
cargo build
|
||||
```
|
||||
|
||||
For optimized production builds:
|
||||
```bash
|
||||
cargo build --release
|
||||
```
|
||||
|
||||
The compiled binary will be at `target/release/backend`.
|
||||
|
||||
**With Nix:**
|
||||
|
||||
Build the backend binary:
|
||||
```bash
|
||||
nix build .#backend
|
||||
# Binary available at: ./result/bin/backend
|
||||
```
|
||||
|
||||
Build Docker images:
|
||||
```bash
|
||||
# Build versioned Docker image (e.g., 0.1.0)
|
||||
nix build .#backendDocker
|
||||
|
||||
# Build latest Docker image
|
||||
nix build .#backendDockerLatest
|
||||
|
||||
# Load into Docker
|
||||
docker load < result
|
||||
# Image will be available as: localhost/phundrak/backend-rust:latest
|
||||
```
|
||||
|
||||
The Nix build ensures reproducible builds with all dependencies pinned.
|
||||
|
||||
## Testing
|
||||
|
||||
Run all tests:
|
||||
|
||||
```bash
|
||||
cargo test
|
||||
# or
|
||||
just test
|
||||
```
|
||||
|
||||
Run a specific test:
|
||||
|
||||
```bash
|
||||
cargo test <test_name>
|
||||
```
|
||||
|
||||
Run tests with output:
|
||||
|
||||
```bash
|
||||
cargo test -- --nocapture
|
||||
```
|
||||
|
||||
Run tests with coverage:
|
||||
|
||||
```bash
|
||||
cargo tarpaulin --config .tarpaulin.local.toml
|
||||
# or
|
||||
just coverage
|
||||
```
|
||||
|
||||
### Testing Notes
|
||||
|
||||
- Integration tests use random TCP ports to avoid conflicts
|
||||
- Tests use `get_test_app()` helper for consistent test setup
|
||||
- Telemetry is automatically disabled during tests
|
||||
- Tests are organized in `#[cfg(test)]` modules within each file
|
||||
|
||||
## Code Quality
|
||||
|
||||
### Linting
|
||||
|
||||
This project uses extremely strict Clippy linting rules:
|
||||
|
||||
- `#![deny(clippy::all)]`
|
||||
- `#![deny(clippy::pedantic)]`
|
||||
- `#![deny(clippy::nursery)]`
|
||||
- `#![warn(missing_docs)]`
|
||||
|
||||
Run Clippy to check for issues:
|
||||
|
||||
```bash
|
||||
cargo clippy --all-targets
|
||||
# or
|
||||
just lint
|
||||
```
|
||||
|
||||
All code must pass these checks before committing.
|
||||
|
||||
### Continuous Checking with Bacon
|
||||
|
||||
For continuous testing and linting during development, use [bacon](https://dystroy.org/bacon/):
|
||||
|
||||
```bash
|
||||
bacon # Runs clippy-all by default
|
||||
bacon test # Runs tests continuously
|
||||
bacon clippy # Runs clippy on default target only
|
||||
```
|
||||
|
||||
Press 'c' in bacon to run clippy-all.
|
||||
|
||||
## Code Style
|
||||
|
||||
### Error Handling
|
||||
|
||||
- Use `thiserror` for custom error types
|
||||
- Always return `Result` types for fallible operations
|
||||
- Use descriptive error messages
|
||||
|
||||
### Logging
|
||||
|
||||
Always use `tracing::event!` with proper target and level:
|
||||
|
||||
```rust
|
||||
tracing::event!(
|
||||
target: "backend", // or "backend::module_name"
|
||||
tracing::Level::INFO,
|
||||
"Message here"
|
||||
);
|
||||
```
|
||||
|
||||
### Imports
|
||||
|
||||
Organize imports in three groups:
|
||||
1. Standard library (`std::*`)
|
||||
2. External crates (poem, serde, etc.)
|
||||
3. Local modules (`crate::*`)
|
||||
|
||||
### Testing Conventions
|
||||
|
||||
- Use `#[tokio::test]` for async tests
|
||||
- Use descriptive test names that explain what is being tested
|
||||
- Test both success and error cases
|
||||
- For endpoint tests, verify both status codes and response bodies
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
backend/
|
||||
├── src/
|
||||
│ ├── main.rs # Application entry point
|
||||
│ ├── lib.rs # Library root with run() and prepare()
|
||||
│ ├── startup.rs # Application builder, server setup
|
||||
│ ├── settings.rs # Configuration management
|
||||
│ ├── telemetry.rs # Logging and tracing setup
|
||||
│ ├── middleware/ # Custom middleware
|
||||
│ │ ├── mod.rs # Middleware module
|
||||
│ │ └── rate_limit.rs # Rate limiting middleware
|
||||
│ └── route/ # API route handlers
|
||||
│ ├── mod.rs # Route organization
|
||||
│ ├── contact.rs # Contact form endpoint
|
||||
│ ├── health.rs # Health check endpoint
|
||||
│ └── meta.rs # Metadata endpoint
|
||||
├── settings/ # Configuration files
|
||||
│ ├── base.yaml # Base configuration
|
||||
│ ├── development.yaml # Development overrides
|
||||
│ └── production.yaml # Production overrides
|
||||
├── Cargo.toml # Dependencies and metadata
|
||||
└── README.md # This file
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
### Application Initialization Flow
|
||||
|
||||
1. `main.rs` calls `run()` from `lib.rs`
|
||||
2. `run()` calls `prepare()` which:
|
||||
- Loads environment variables from `.env` file
|
||||
- Initializes `Settings` from YAML files and environment variables
|
||||
- Sets up telemetry/logging (unless in test mode)
|
||||
- Builds the `Application` with optional TCP listener
|
||||
3. `Application::build()`:
|
||||
- Sets up OpenAPI service with all API endpoints
|
||||
- Configures Swagger UI at the root path (`/`)
|
||||
- Configures API routes under `/api` prefix
|
||||
- Creates server with TCP listener
|
||||
4. Application runs with CORS middleware and settings injected as data
|
||||
|
||||
### Email Handling
|
||||
|
||||
The contact form supports multiple SMTP configurations:
|
||||
- **Implicit TLS (SMTPS)** - typically port 465
|
||||
- **STARTTLS (Always/Opportunistic)** - typically port 587
|
||||
- **Unencrypted** (for local dev) - with or without authentication
|
||||
|
||||
The `SmtpTransport` is built dynamically from `EmailSettings` based on
|
||||
TLS/STARTTLS configuration.
|
||||
|
||||
## Docker Deployment
|
||||
|
||||
### Using Pre-built Images
|
||||
|
||||
Docker images are automatically built and published via GitHub Actions to the configured container registry.
|
||||
|
||||
Pull and run the latest image:
|
||||
```bash
|
||||
# Pull from Phundrak Labs (labs.phundrak.com)
|
||||
docker pull labs.phundrak.com/phundrak/phundrak-dot-com-backend:latest
|
||||
|
||||
# Run the container
|
||||
docker run -d \
|
||||
--name phundrak-backend \
|
||||
-p 3100:3100 \
|
||||
-e APP__APPLICATION__PORT=3100 \
|
||||
-e APP__EMAIL__HOST=smtp.example.com \
|
||||
-e APP__EMAIL__PORT=587 \
|
||||
-e APP__EMAIL__USER=user@example.com \
|
||||
-e APP__EMAIL__PASSWORD=your_password \
|
||||
-e APP__EMAIL__FROM="Contact Form <noreply@example.com>" \
|
||||
-e APP__EMAIL__RECIPIENT="Admin <admin@example.com>" \
|
||||
labs.phundrak.com/phundrak/phundrak-dot-com-backend:latest
|
||||
```
|
||||
|
||||
### Available Image Tags
|
||||
|
||||
The following tags are automatically published:
|
||||
|
||||
- `latest` - Latest stable release (from tagged commits on `main`)
|
||||
- `<version>` - Specific version (e.g., `1.0.0`, from tagged commits like `v1.0.0`)
|
||||
- `develop` - Latest development build (from `develop` branch)
|
||||
- `pr<number>` - Pull request preview builds (e.g., `pr12`)
|
||||
|
||||
### Building Images Locally
|
||||
|
||||
Build with Nix (recommended for reproducibility):
|
||||
```bash
|
||||
nix build .#backendDockerLatest
|
||||
docker load < result
|
||||
docker run -p 3100:3100 localhost/phundrak/backend-rust:latest
|
||||
```
|
||||
|
||||
Build with Docker directly:
|
||||
```bash
|
||||
# Note: This requires a Dockerfile (not included in this project)
|
||||
# Use Nix builds for containerization
|
||||
```
|
||||
|
||||
### Docker Compose Example
|
||||
|
||||
```yaml
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
backend:
|
||||
image: labs.phundrak.com/phundrak/phundrak-dot-com-backend:latest
|
||||
ports:
|
||||
- "3100:3100"
|
||||
environment:
|
||||
APP__APPLICATION__PORT: 3100
|
||||
APP__EMAIL__HOST: smtp.example.com
|
||||
APP__EMAIL__PORT: 587
|
||||
APP__EMAIL__USER: ${SMTP_USER}
|
||||
APP__EMAIL__PASSWORD: ${SMTP_PASSWORD}
|
||||
APP__EMAIL__FROM: "Contact Form <noreply@example.com>"
|
||||
APP__EMAIL__RECIPIENT: "Admin <admin@example.com>"
|
||||
APP__EMAIL__STARTTLS: true
|
||||
APP__RATE_LIMIT__ENABLED: true
|
||||
APP__RATE_LIMIT__BURST_SIZE: 10
|
||||
APP__RATE_LIMIT__PER_SECONDS: 60
|
||||
restart: unless-stopped
|
||||
```
|
||||
|
||||
## CI/CD Pipeline
|
||||
|
||||
### Automated Docker Publishing
|
||||
|
||||
GitHub Actions automatically builds and publishes Docker images based on repository events:
|
||||
|
||||
| Event Type | Trigger | Published Tags |
|
||||
|-----------------|------------------------------|-------------------------------|
|
||||
| Tag push | `v*.*.*` tag on `main` | `latest`, `<version>` |
|
||||
| Branch push | Push to `develop` | `develop` |
|
||||
| Pull request | PR opened/updated | `pr<number>` |
|
||||
| Branch push | Push to `main` (no tag) | `latest` |
|
||||
|
||||
### Workflow Details
|
||||
|
||||
The CI/CD pipeline (`.github/workflows/publish-docker.yml`):
|
||||
|
||||
1. **Checks out the repository**
|
||||
2. **Installs Nix** with flakes enabled
|
||||
3. **Builds the Docker image** using Nix for reproducibility
|
||||
4. **Authenticates** with the configured Docker registry
|
||||
5. **Tags and pushes** images based on the event type
|
||||
|
||||
### Registry Configuration
|
||||
|
||||
Images are published to the registry specified by the `DOCKER_REGISTRY` environment variable in the workflow (default: `labs.phundrak.com`).
|
||||
|
||||
To use the published images, authenticate with the registry:
|
||||
|
||||
```bash
|
||||
# For Phundrak Labs (labs.phundrak.com)
|
||||
echo $GITHUB_TOKEN | docker login labs.phundrak.com -u USERNAME --password-stdin
|
||||
|
||||
# Pull the image
|
||||
docker pull labs.phundrak.com/phundrak/phundrak-dot-com-backend:latest
|
||||
```
|
||||
|
||||
### Required Secrets
|
||||
|
||||
The workflow requires these GitHub secrets:
|
||||
- `DOCKER_USERNAME` - Registry username
|
||||
- `DOCKER_PASSWORD` - Registry password or token
|
||||
- `CACHIX_AUTH_TOKEN` - (Optional) For Nix build caching
|
||||
|
||||
See [.github/workflows/README.md](../.github/workflows/README.md) for detailed setup instructions.
|
||||
|
||||
## License
|
||||
|
||||
AGPL-3.0-only - See the root repository for full license information.
|
||||
@@ -1,84 +0,0 @@
|
||||
# This is a configuration file for the bacon tool
|
||||
#
|
||||
# Bacon repository: https://github.com/Canop/bacon
|
||||
# Complete help on configuration: https://dystroy.org/bacon/config/
|
||||
# You can also check bacon's own bacon.toml file
|
||||
# as an example: https://github.com/Canop/bacon/blob/main/bacon.toml
|
||||
|
||||
default_job = "clippy-all"
|
||||
|
||||
[jobs.check]
|
||||
command = ["cargo", "check", "--color", "always"]
|
||||
need_stdout = false
|
||||
|
||||
[jobs.check-all]
|
||||
command = ["cargo", "check", "--all-targets", "--color", "always"]
|
||||
need_stdout = false
|
||||
|
||||
# Run clippy on the default target
|
||||
[jobs.clippy]
|
||||
command = [
|
||||
"cargo", "clippy",
|
||||
"--color", "always",
|
||||
]
|
||||
need_stdout = false
|
||||
|
||||
[jobs.clippy-all]
|
||||
command = [
|
||||
"cargo", "clippy",
|
||||
"--all-targets",
|
||||
"--color", "always",
|
||||
]
|
||||
need_stdout = false
|
||||
|
||||
[jobs.test]
|
||||
command = [
|
||||
"cargo", "test", "--color", "always",
|
||||
"--", "--color", "always", # see https://github.com/Canop/bacon/issues/124
|
||||
]
|
||||
need_stdout = true
|
||||
|
||||
[jobs.doc]
|
||||
command = ["cargo", "doc", "--color", "always", "--no-deps"]
|
||||
need_stdout = false
|
||||
|
||||
# If the doc compiles, then it opens in your browser and bacon switches
|
||||
# to the previous job
|
||||
[jobs.doc-open]
|
||||
command = ["cargo", "doc", "--color", "always", "--no-deps", "--open"]
|
||||
need_stdout = false
|
||||
on_success = "back" # so that we don't open the browser at each change
|
||||
|
||||
# You can run your application and have the result displayed in bacon,
|
||||
# *if* it makes sense for this crate.
|
||||
# Don't forget the `--color always` part or the errors won't be
|
||||
# properly parsed.
|
||||
# If your program never stops (eg a server), you may set `background`
|
||||
# to false to have the cargo run output immediately displayed instead
|
||||
# of waiting for program's end.
|
||||
[jobs.run]
|
||||
command = [
|
||||
"cargo", "run",
|
||||
"--color", "always",
|
||||
# put launch parameters for your program behind a `--` separator
|
||||
]
|
||||
need_stdout = true
|
||||
allow_warnings = true
|
||||
background = true
|
||||
|
||||
# This parameterized job runs the example of your choice, as soon
|
||||
# as the code compiles.
|
||||
# Call it as
|
||||
# bacon ex -- my-example
|
||||
[jobs.ex]
|
||||
command = ["cargo", "run", "--color", "always", "--example"]
|
||||
need_stdout = true
|
||||
allow_warnings = true
|
||||
|
||||
# You may define here keybindings that would be specific to
|
||||
# a project, for example a shortcut to launch a specific job.
|
||||
# Shortcuts to internal functions (scrolling, toggling, etc.)
|
||||
# should go in your personal global prefs.toml file instead.
|
||||
[keybindings]
|
||||
# alt-m = "job:my-job"
|
||||
c = "job:clippy-all" # comment this to have 'c' run clippy on only the default target
|
||||
@@ -1,51 +0,0 @@
|
||||
[output]
|
||||
feature-depth = 1
|
||||
|
||||
[advisories]
|
||||
ignore = []
|
||||
|
||||
[licenses]
|
||||
# List of explicitly allowed licenses
|
||||
# See https://spdx.org/licenses/ for list of possible licenses
|
||||
allow = [
|
||||
"0BSD",
|
||||
"AGPL-3.0-only",
|
||||
"Apache-2.0 WITH LLVM-exception",
|
||||
"Apache-2.0",
|
||||
"BSD-3-Clause",
|
||||
"CDLA-Permissive-2.0",
|
||||
"ISC",
|
||||
"MIT",
|
||||
"MPL-2.0",
|
||||
"OpenSSL",
|
||||
"Unicode-3.0",
|
||||
"Zlib",
|
||||
]
|
||||
confidence-threshold = 0.8
|
||||
exceptions = []
|
||||
|
||||
[licenses.private]
|
||||
ignore = false
|
||||
registries = []
|
||||
|
||||
[bans]
|
||||
multiple-versions = "allow"
|
||||
wildcards = "allow"
|
||||
highlight = "all"
|
||||
workspace-default-features = "allow"
|
||||
external-default-features = "allow"
|
||||
allow = []
|
||||
deny = []
|
||||
skip = []
|
||||
skip-tree = []
|
||||
|
||||
[sources]
|
||||
unknown-registry = "deny"
|
||||
unknown-git = "deny"
|
||||
allow-registry = ["https://github.com/rust-lang/crates.io-index"]
|
||||
allow-git = []
|
||||
|
||||
[sources.allow-org]
|
||||
github = []
|
||||
gitlab = []
|
||||
bitbucket = []
|
||||
@@ -1,48 +0,0 @@
|
||||
default: run
|
||||
|
||||
run:
|
||||
cargo run
|
||||
|
||||
run-release:
|
||||
cargo run --release
|
||||
|
||||
format:
|
||||
cargo fmt --all
|
||||
|
||||
format-check:
|
||||
cargo fmt --check --all
|
||||
|
||||
audit:
|
||||
cargo deny
|
||||
|
||||
build:
|
||||
cargo build
|
||||
|
||||
build-release:
|
||||
cargo build --release
|
||||
|
||||
lint:
|
||||
cargo clippy --all-targets
|
||||
|
||||
release-build:
|
||||
cargo build --release
|
||||
|
||||
release-run:
|
||||
cargo run --release
|
||||
|
||||
test:
|
||||
cargo test
|
||||
|
||||
coverage:
|
||||
mkdir -p coverage
|
||||
cargo tarpaulin --config .tarpaulin.local.toml
|
||||
|
||||
coverage-ci:
|
||||
mkdir -p coverage
|
||||
cargo tarpaulin --config .tarpaulin.ci.toml
|
||||
|
||||
check-all: format-check lint coverage audit
|
||||
|
||||
## Local Variables:
|
||||
## mode: makefile
|
||||
## End:
|
||||
@@ -1,60 +0,0 @@
|
||||
{
|
||||
rust-overlay,
|
||||
inputs,
|
||||
system,
|
||||
...
|
||||
}: let
|
||||
rust = import ./rust-version.nix { inherit rust-overlay inputs system; };
|
||||
pkgs = rust.pkgs;
|
||||
rustPlatform = pkgs.makeRustPlatform {
|
||||
cargo = rust.version;
|
||||
rustc = rust.version;
|
||||
};
|
||||
cargoToml = builtins.fromTOML (builtins.readFile ../Cargo.toml);
|
||||
name = cargoToml.package.name;
|
||||
version = cargoToml.package.version;
|
||||
rustBuild = rustPlatform.buildRustPackage {
|
||||
pname = name;
|
||||
inherit version;
|
||||
src = ../.;
|
||||
cargoLock.lockFile = ../Cargo.lock;
|
||||
};
|
||||
settingsDir = pkgs.runCommand "settings" {} ''
|
||||
mkdir -p $out/settings
|
||||
cp ${../settings}/*.yaml $out/settings/
|
||||
'';
|
||||
makeDockerImage = tag:
|
||||
pkgs.dockerTools.buildLayeredImage {
|
||||
name = "phundrak/${name}";
|
||||
inherit tag;
|
||||
created = "now";
|
||||
config = {
|
||||
Entrypoint = ["${rustBuild}/bin/${name}"];
|
||||
WorkingDir = "/";
|
||||
Env = [
|
||||
"SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt"
|
||||
];
|
||||
ExposedPorts = {
|
||||
"3100/tcp" = {};
|
||||
};
|
||||
Labels = {
|
||||
"org.opencontainers.image.title" = name;
|
||||
"org.opencontainers.image.version" = version;
|
||||
"org.opencontainers.image.description" = "REST API backend for phundrak.com";
|
||||
"org.opencontainers.image.authors" = "Lucien Cartier-Tilet <lucien@phundrak.com>";
|
||||
"org.opencontainers.image.licenses" = "AGPL-3.0-only";
|
||||
"org.opencontainers.image.source" = "https://labs.phundrak.com/phundrak/phundrak.com";
|
||||
"org.opencontainers.image.url" = "https://labs.phundrak.com/phundrak/phundrak.com";
|
||||
"org.opencontainers.image.documentation" = "https://labs.phundrak.com/phundrak/phundrak.com";
|
||||
"org.opencontainers.image.vendor" = "Phundrak";
|
||||
};
|
||||
};
|
||||
contents = [rustBuild pkgs.cacert settingsDir];
|
||||
};
|
||||
dockerImageLatest = makeDockerImage "latest";
|
||||
dockerImageVersioned = makeDockerImage version;
|
||||
in {
|
||||
backend = rustBuild;
|
||||
backendDocker = dockerImageVersioned;
|
||||
backendDockerLatest = dockerImageLatest;
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
{rust-overlay, inputs, system, ...}: let
|
||||
overlays = [(import rust-overlay)];
|
||||
in rec {
|
||||
pkgs = import inputs.nixpkgs {inherit system overlays;};
|
||||
version = pkgs.rust-bin.stable.latest.default;
|
||||
}
|
||||
@@ -1,75 +0,0 @@
|
||||
{
|
||||
inputs,
|
||||
pkgs,
|
||||
system,
|
||||
self,
|
||||
rust-overlay,
|
||||
...
|
||||
}: let
|
||||
rustPlatform = import ./rust-version.nix { inherit rust-overlay inputs system; };
|
||||
in
|
||||
inputs.devenv.lib.mkShell {
|
||||
inherit inputs pkgs;
|
||||
modules = [
|
||||
{
|
||||
devenv.root = let
|
||||
devenvRootFileContent = builtins.readFile "${self}/.devenv-root";
|
||||
in
|
||||
pkgs.lib.mkIf (devenvRootFileContent != "") devenvRootFileContent;
|
||||
}
|
||||
{
|
||||
packages = with rustPlatform.pkgs; [
|
||||
(rustPlatform.version.override {
|
||||
extensions = [
|
||||
"clippy"
|
||||
"rust-src"
|
||||
"rust-analyzer"
|
||||
"rustfmt"
|
||||
];
|
||||
})
|
||||
bacon
|
||||
cargo-deny
|
||||
cargo-shuttle
|
||||
cargo-tarpaulin
|
||||
cargo-watch
|
||||
flyctl
|
||||
just
|
||||
marksman
|
||||
tombi # TOML lsp server
|
||||
];
|
||||
|
||||
services.mailpit = {
|
||||
enable = true;
|
||||
# HTTP interface for viewing emails
|
||||
uiListenAddress = "127.0.0.1:8025";
|
||||
# SMTP server for receiving emails
|
||||
smtpListenAddress = "127.0.0.1:1025";
|
||||
};
|
||||
|
||||
processes.run.exec = "cargo watch -x run";
|
||||
|
||||
enterShell = ''
|
||||
echo "🦀 Rust backend development environment loaded!"
|
||||
echo "📦 Rust version: $(rustc --version)"
|
||||
echo "📦 Cargo version: $(cargo --version)"
|
||||
echo ""
|
||||
echo "Available tools:"
|
||||
echo " - rust-analyzer (LSP)"
|
||||
echo " - clippy (linter)"
|
||||
echo " - rustfmt (formatter)"
|
||||
echo " - bacon (continuous testing/linting)"
|
||||
echo " - cargo-deny (dependency checker)"
|
||||
echo " - cargo-tarpaulin (code coverage)"
|
||||
echo ""
|
||||
echo "📧 Mailpit service:"
|
||||
echo " - SMTP server: 127.0.0.1:1025"
|
||||
echo " - Web UI: http://127.0.0.1:8025"
|
||||
echo ""
|
||||
echo "🚀 Quick start:"
|
||||
echo " Run 'devenv up' to launch:"
|
||||
echo " - Mailpit service (email testing)"
|
||||
echo " - Backend with 'cargo watch -x run' (auto-reload)"
|
||||
'';
|
||||
}
|
||||
];
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
application:
|
||||
port: 3100
|
||||
version: "0.1.0"
|
||||
|
||||
rate_limit:
|
||||
enabled: true
|
||||
burst_size: 10
|
||||
per_seconds: 60
|
||||
@@ -1,18 +0,0 @@
|
||||
frontend_url: http://localhost:3000
|
||||
debug: true
|
||||
|
||||
application:
|
||||
protocol: http
|
||||
host: 127.0.0.1
|
||||
base_url: http://127.0.0.1:3100
|
||||
name: "com.phundrak.backend.dev"
|
||||
|
||||
email:
|
||||
host: localhost
|
||||
port: 1025
|
||||
user: ""
|
||||
password: ""
|
||||
from: Contact Form <noreply@example.com>
|
||||
recipient: Admin <user@example.com>
|
||||
tls: false
|
||||
starttls: false
|
||||
@@ -1,18 +0,0 @@
|
||||
debug: false
|
||||
frontend_url: ""
|
||||
|
||||
application:
|
||||
name: "com.phundrak.backend.prod"
|
||||
protocol: https
|
||||
host: 0.0.0.0
|
||||
base_url: ""
|
||||
|
||||
email:
|
||||
host: ""
|
||||
port: 0
|
||||
user: ""
|
||||
password: ""
|
||||
from: ""
|
||||
recipient: ""
|
||||
tls: false
|
||||
starttls: false
|
||||
@@ -1,82 +0,0 @@
|
||||
//! Backend API server for phundrak.com
|
||||
//!
|
||||
//! This is a REST API built with the Poem framework that provides:
|
||||
//! - Health check endpoints
|
||||
//! - Application metadata endpoints
|
||||
//! - Contact form submission with email integration
|
||||
|
||||
#![deny(clippy::all)]
|
||||
#![deny(clippy::pedantic)]
|
||||
#![deny(clippy::nursery)]
|
||||
#![warn(missing_docs)]
|
||||
#![allow(clippy::unused_async)]
|
||||
|
||||
/// Custom middleware implementations
|
||||
pub mod middleware;
|
||||
/// API route handlers and endpoints
|
||||
pub mod route;
|
||||
/// Application configuration settings
|
||||
pub mod settings;
|
||||
/// Application startup and server configuration
|
||||
pub mod startup;
|
||||
/// Logging and tracing setup
|
||||
pub mod telemetry;
|
||||
|
||||
type MaybeListener = Option<poem::listener::TcpListener<String>>;
|
||||
|
||||
fn prepare(listener: MaybeListener) -> startup::Application {
|
||||
dotenvy::dotenv().ok();
|
||||
let settings = settings::Settings::new().expect("Failed to read settings");
|
||||
if !cfg!(test) {
|
||||
let subscriber = telemetry::get_subscriber(settings.debug);
|
||||
telemetry::init_subscriber(subscriber);
|
||||
}
|
||||
tracing::event!(
|
||||
target: "backend",
|
||||
tracing::Level::DEBUG,
|
||||
"Using these settings: {:?}",
|
||||
settings
|
||||
);
|
||||
let application = startup::Application::build(settings, listener);
|
||||
tracing::event!(
|
||||
target: "backend",
|
||||
tracing::Level::INFO,
|
||||
"Listening on http://{}:{}/",
|
||||
application.host(),
|
||||
application.port()
|
||||
);
|
||||
tracing::event!(
|
||||
target: "backend",
|
||||
tracing::Level::INFO,
|
||||
"Documentation available at http://{}:{}/",
|
||||
application.host(),
|
||||
application.port()
|
||||
);
|
||||
application
|
||||
}
|
||||
|
||||
/// Runs the application with the specified TCP listener.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns a `std::io::Error` if the server fails to start or encounters
|
||||
/// an I/O error during runtime (e.g., port already in use, network issues).
|
||||
#[cfg(not(tarpaulin_include))]
|
||||
pub async fn run(listener: MaybeListener) -> Result<(), std::io::Error> {
|
||||
let application = prepare(listener);
|
||||
application.make_app().run().await
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn make_random_tcp_listener() -> poem::listener::TcpListener<String> {
|
||||
let tcp_listener =
|
||||
std::net::TcpListener::bind("127.0.0.1:0").expect("Failed to bind a random TCP listener");
|
||||
let port = tcp_listener.local_addr().unwrap().port();
|
||||
poem::listener::TcpListener::bind(format!("127.0.0.1:{port}"))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn get_test_app() -> startup::App {
|
||||
let tcp_listener = make_random_tcp_listener();
|
||||
prepare(Some(tcp_listener)).make_app().into()
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
//! Backend server entry point.
|
||||
|
||||
#[cfg(not(tarpaulin_include))]
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), std::io::Error> {
|
||||
phundrak_dot_com_backend::run(None).await
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
//! Custom middleware for the application.
|
||||
//!
|
||||
//! This module contains custom middleware implementations including rate limiting.
|
||||
|
||||
pub mod rate_limit;
|
||||
@@ -1,211 +0,0 @@
|
||||
//! Rate limiting middleware using the governor crate.
|
||||
//!
|
||||
//! This middleware implements per-IP rate limiting using the Generic Cell Rate
|
||||
//! Algorithm (GCRA) via the governor crate. It stores rate limiters in memory
|
||||
//! without requiring external dependencies like Redis.
|
||||
|
||||
use std::{
|
||||
net::IpAddr,
|
||||
num::NonZeroU32,
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use governor::{
|
||||
clock::DefaultClock,
|
||||
state::{InMemoryState, NotKeyed},
|
||||
Quota, RateLimiter,
|
||||
};
|
||||
use poem::{
|
||||
Endpoint, Error, IntoResponse, Middleware, Request, Response, Result,
|
||||
};
|
||||
|
||||
/// Rate limiting configuration.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RateLimitConfig {
|
||||
/// Maximum number of requests allowed in the time window (burst size).
|
||||
pub burst_size: u32,
|
||||
/// Time window in seconds for rate limiting.
|
||||
pub per_seconds: u64,
|
||||
}
|
||||
|
||||
impl RateLimitConfig {
|
||||
/// Creates a new rate limit configuration.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `burst_size` - Maximum number of requests allowed in the time window
|
||||
/// * `per_seconds` - Time window in seconds
|
||||
#[must_use]
|
||||
pub const fn new(burst_size: u32, per_seconds: u64) -> Self {
|
||||
Self {
|
||||
burst_size,
|
||||
per_seconds,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a rate limiter from this configuration.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if `burst_size` is zero.
|
||||
#[must_use]
|
||||
pub fn create_limiter(&self) -> RateLimiter<NotKeyed, InMemoryState, DefaultClock> {
|
||||
let quota = Quota::with_period(Duration::from_secs(self.per_seconds))
|
||||
.expect("Failed to create quota")
|
||||
.allow_burst(NonZeroU32::new(self.burst_size).expect("Burst size must be non-zero"));
|
||||
RateLimiter::direct(quota)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for RateLimitConfig {
|
||||
fn default() -> Self {
|
||||
// Default: 10 requests per second with burst of 20
|
||||
Self::new(20, 1)
|
||||
}
|
||||
}
|
||||
|
||||
/// Middleware for rate limiting based on IP address.
|
||||
pub struct RateLimit {
|
||||
limiter: Arc<RateLimiter<NotKeyed, InMemoryState, DefaultClock>>,
|
||||
}
|
||||
|
||||
impl RateLimit {
|
||||
/// Creates a new rate limiting middleware with the given configuration.
|
||||
#[must_use]
|
||||
pub fn new(config: &RateLimitConfig) -> Self {
|
||||
Self {
|
||||
limiter: Arc::new(config.create_limiter()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Endpoint> Middleware<E> for RateLimit {
|
||||
type Output = RateLimitEndpoint<E>;
|
||||
|
||||
fn transform(&self, ep: E) -> Self::Output {
|
||||
RateLimitEndpoint {
|
||||
endpoint: ep,
|
||||
limiter: self.limiter.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The endpoint wrapper that performs rate limiting checks.
|
||||
pub struct RateLimitEndpoint<E> {
|
||||
endpoint: E,
|
||||
limiter: Arc<RateLimiter<NotKeyed, InMemoryState, DefaultClock>>,
|
||||
}
|
||||
|
||||
impl<E: Endpoint> Endpoint for RateLimitEndpoint<E> {
|
||||
type Output = Response;
|
||||
|
||||
async fn call(&self, req: Request) -> Result<Self::Output> {
|
||||
// Check rate limit
|
||||
if self.limiter.check().is_err() {
|
||||
let client_ip = Self::get_client_ip(&req)
|
||||
.map_or_else(|| "unknown".to_string(), |ip| ip.to_string());
|
||||
|
||||
tracing::event!(
|
||||
target: "backend::middleware::rate_limit",
|
||||
tracing::Level::WARN,
|
||||
client_ip = %client_ip,
|
||||
"Rate limit exceeded"
|
||||
);
|
||||
|
||||
return Err(Error::from_status(poem::http::StatusCode::TOO_MANY_REQUESTS));
|
||||
}
|
||||
|
||||
// Process the request
|
||||
let response = self.endpoint.call(req).await;
|
||||
response.map(IntoResponse::into_response)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E> RateLimitEndpoint<E> {
|
||||
/// Extracts the client IP address from the request.
|
||||
fn get_client_ip(req: &Request) -> Option<IpAddr> {
|
||||
req.remote_addr().as_socket_addr().map(std::net::SocketAddr::ip)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn rate_limit_config_new() {
|
||||
let config = RateLimitConfig::new(10, 60);
|
||||
assert_eq!(config.burst_size, 10);
|
||||
assert_eq!(config.per_seconds, 60);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rate_limit_config_default() {
|
||||
let config = RateLimitConfig::default();
|
||||
assert_eq!(config.burst_size, 20);
|
||||
assert_eq!(config.per_seconds, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rate_limit_config_creates_limiter() {
|
||||
let config = RateLimitConfig::new(5, 1);
|
||||
let limiter = config.create_limiter();
|
||||
|
||||
// First 5 requests should succeed
|
||||
for _ in 0..5 {
|
||||
assert!(limiter.check().is_ok());
|
||||
}
|
||||
|
||||
// 6th request should fail
|
||||
assert!(limiter.check().is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn rate_limit_middleware_allows_within_limit() {
|
||||
use poem::{handler, test::TestClient, EndpointExt, Route};
|
||||
|
||||
#[handler]
|
||||
async fn index() -> String {
|
||||
"Hello".to_string()
|
||||
}
|
||||
|
||||
let config = RateLimitConfig::new(5, 60);
|
||||
let app = Route::new()
|
||||
.at("/", poem::get(index))
|
||||
.with(RateLimit::new(&config));
|
||||
let cli = TestClient::new(app);
|
||||
|
||||
// First 5 requests should succeed
|
||||
for _ in 0..5 {
|
||||
let response = cli.get("/").send().await;
|
||||
response.assert_status_is_ok();
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn rate_limit_middleware_blocks_over_limit() {
|
||||
use poem::{handler, test::TestClient, EndpointExt, Route};
|
||||
|
||||
#[handler]
|
||||
async fn index() -> String {
|
||||
"Hello".to_string()
|
||||
}
|
||||
|
||||
let config = RateLimitConfig::new(3, 60);
|
||||
let app = Route::new()
|
||||
.at("/", poem::get(index))
|
||||
.with(RateLimit::new(&config));
|
||||
let cli = TestClient::new(app);
|
||||
|
||||
// First 3 requests should succeed
|
||||
for _ in 0..3 {
|
||||
let response = cli.get("/").send().await;
|
||||
response.assert_status_is_ok();
|
||||
}
|
||||
|
||||
// 4th request should be rate limited
|
||||
let response = cli.get("/").send().await;
|
||||
response.assert_status(poem::http::StatusCode::TOO_MANY_REQUESTS);
|
||||
}
|
||||
}
|
||||
@@ -1,514 +0,0 @@
|
||||
//! Contact form endpoint for handling user submissions and sending emails.
|
||||
//!
|
||||
//! This module provides functionality to:
|
||||
//! - Validate contact form submissions
|
||||
//! - Detect spam using honeypot fields
|
||||
//! - Send emails via SMTP with various TLS configurations
|
||||
|
||||
use lettre::{
|
||||
Message, SmtpTransport, Transport, message::header::ContentType,
|
||||
transport::smtp::authentication::Credentials,
|
||||
};
|
||||
use poem_openapi::{ApiResponse, Object, OpenApi, payload::Json};
|
||||
use validator::Validate;
|
||||
|
||||
use super::ApiCategory;
|
||||
use crate::settings::{EmailSettings, Starttls};
|
||||
|
||||
impl TryFrom<&EmailSettings> for SmtpTransport {
|
||||
type Error = lettre::transport::smtp::Error;
|
||||
|
||||
fn try_from(settings: &EmailSettings) -> Result<Self, Self::Error> {
|
||||
if settings.tls {
|
||||
// Implicit TLS (SMTPS) - typically port 465
|
||||
tracing::event!(target: "backend::contact", tracing::Level::DEBUG, "Using implicit TLS (SMTPS)");
|
||||
let creds = Credentials::new(settings.user.clone(), settings.password.clone());
|
||||
Ok(Self::relay(&settings.host)?
|
||||
.port(settings.port)
|
||||
.credentials(creds)
|
||||
.build())
|
||||
} else {
|
||||
// STARTTLS or no encryption
|
||||
match settings.starttls {
|
||||
Starttls::Never => {
|
||||
// For local development without TLS
|
||||
tracing::event!(target: "backend::contact", tracing::Level::DEBUG, "Using unencrypted connection");
|
||||
let builder = Self::builder_dangerous(&settings.host).port(settings.port);
|
||||
if settings.user.is_empty() {
|
||||
Ok(builder.build())
|
||||
} else {
|
||||
let creds =
|
||||
Credentials::new(settings.user.clone(), settings.password.clone());
|
||||
Ok(builder.credentials(creds).build())
|
||||
}
|
||||
}
|
||||
Starttls::Opportunistic | Starttls::Always => {
|
||||
// STARTTLS - typically port 587
|
||||
tracing::event!(target: "backend::contact", tracing::Level::DEBUG, "Using STARTTLS");
|
||||
let creds = Credentials::new(settings.user.clone(), settings.password.clone());
|
||||
Ok(Self::starttls_relay(&settings.host)?
|
||||
.port(settings.port)
|
||||
.credentials(creds)
|
||||
.build())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Object, Validate)]
|
||||
struct ContactRequest {
|
||||
#[validate(length(
|
||||
min = 1,
|
||||
max = "100",
|
||||
message = "Name must be between 1 and 100 characters"
|
||||
))]
|
||||
name: String,
|
||||
#[validate(email(message = "Invalid email address"))]
|
||||
email: String,
|
||||
#[validate(length(
|
||||
min = 10,
|
||||
max = 5000,
|
||||
message = "Message must be between 10 and 5000 characters"
|
||||
))]
|
||||
message: String,
|
||||
/// Honeypot field - should always be empty
|
||||
#[oai(rename = "website")]
|
||||
honeypot: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Object, serde::Deserialize)]
|
||||
struct ContactResponse {
|
||||
success: bool,
|
||||
message: String,
|
||||
}
|
||||
|
||||
impl From<ContactResponse> for Json<ContactResponse> {
|
||||
fn from(value: ContactResponse) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(ApiResponse)]
|
||||
enum ContactApiResponse {
|
||||
/// Success
|
||||
#[oai(status = 200)]
|
||||
Ok(Json<ContactResponse>),
|
||||
/// Bad Request - validation failed
|
||||
#[oai(status = 400)]
|
||||
BadRequest(Json<ContactResponse>),
|
||||
/// Too Many Requests - rate limit exceeded
|
||||
#[oai(status = 429)]
|
||||
#[allow(dead_code)]
|
||||
TooManyRequests,
|
||||
/// Internal Server Error
|
||||
#[oai(status = 500)]
|
||||
InternalServerError(Json<ContactResponse>),
|
||||
}
|
||||
|
||||
/// API for handling contact form submissions and sending emails.
|
||||
#[derive(Clone)]
|
||||
pub struct ContactApi {
|
||||
settings: EmailSettings,
|
||||
}
|
||||
|
||||
impl From<EmailSettings> for ContactApi {
|
||||
fn from(settings: EmailSettings) -> Self {
|
||||
Self { settings }
|
||||
}
|
||||
}
|
||||
|
||||
#[OpenApi(tag = "ApiCategory::Contact")]
|
||||
impl ContactApi {
|
||||
/// Submit a contact form
|
||||
///
|
||||
/// Send a message through the contact form. Rate limited to prevent spam.
|
||||
#[oai(path = "/contact", method = "post")]
|
||||
async fn submit_contact(
|
||||
&self,
|
||||
body: Json<ContactRequest>,
|
||||
remote_addr: Option<poem::web::Data<&poem::web::RemoteAddr>>,
|
||||
) -> ContactApiResponse {
|
||||
let body = body.0;
|
||||
if body.honeypot.is_some() {
|
||||
tracing::event!(target: "backend::contact", tracing::Level::INFO, "Honeypot triggered, rejecting request silently. IP: {}", remote_addr.map_or_else(|| "No remote address found".to_owned(), |ip| ip.0.to_string()));
|
||||
return ContactApiResponse::Ok(
|
||||
ContactResponse {
|
||||
success: true,
|
||||
message: "Message sent successfully, but not really, you bot".to_owned(),
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
if let Err(e) = body.validate() {
|
||||
return ContactApiResponse::BadRequest(
|
||||
ContactResponse {
|
||||
success: false,
|
||||
message: format!("Validation error: {e}"),
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
match self.send_email(&body).await {
|
||||
Ok(()) => {
|
||||
tracing::event!(target: "backend::contact", tracing::Level::INFO, "Message sent successfully from: {}", body.email);
|
||||
ContactApiResponse::Ok(
|
||||
ContactResponse {
|
||||
success: true,
|
||||
message: "Message sent successfully".to_owned(),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::event!(target: "backend::contact", tracing::Level::ERROR, "Failed to send email: {}", e);
|
||||
ContactApiResponse::InternalServerError(
|
||||
ContactResponse {
|
||||
success: false,
|
||||
message: "Failed to send message. Please try again later.".to_owned(),
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn send_email(&self, request: &ContactRequest) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let email_body = format!(
|
||||
r"New contact form submission:
|
||||
|
||||
Name: {}
|
||||
Email: {},
|
||||
|
||||
Message:
|
||||
{}",
|
||||
request.name, request.email, request.message
|
||||
);
|
||||
tracing::event!(target: "email", tracing::Level::DEBUG, "Sending email content: {}", email_body);
|
||||
let email = Message::builder()
|
||||
.from(self.settings.from.parse()?)
|
||||
.reply_to(format!("{} <{}>", request.name, request.email).parse()?)
|
||||
.to(self.settings.recipient.parse()?)
|
||||
.subject(format!("Contact Form: {}", request.name))
|
||||
.header(ContentType::TEXT_PLAIN)
|
||||
.body(email_body)?;
|
||||
tracing::event!(target: "email", tracing::Level::DEBUG, "Email to be sent: {}", format!("{email:?}"));
|
||||
|
||||
let mailer = SmtpTransport::try_from(&self.settings)?;
|
||||
mailer.send(&email)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// Tests for ContactRequest validation
|
||||
#[test]
|
||||
fn contact_request_valid() {
|
||||
let request = ContactRequest {
|
||||
name: "John Doe".to_string(),
|
||||
email: "john@example.com".to_string(),
|
||||
message: "This is a test message that is long enough.".to_string(),
|
||||
honeypot: None,
|
||||
};
|
||||
assert!(request.validate().is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn contact_request_name_too_short() {
|
||||
let request = ContactRequest {
|
||||
name: String::new(),
|
||||
email: "john@example.com".to_string(),
|
||||
message: "This is a test message that is long enough.".to_string(),
|
||||
honeypot: None,
|
||||
};
|
||||
assert!(request.validate().is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn contact_request_name_too_long() {
|
||||
let request = ContactRequest {
|
||||
name: "a".repeat(101),
|
||||
email: "john@example.com".to_string(),
|
||||
message: "This is a test message that is long enough.".to_string(),
|
||||
honeypot: None,
|
||||
};
|
||||
assert!(request.validate().is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn contact_request_name_at_max_length() {
|
||||
let request = ContactRequest {
|
||||
name: "a".repeat(100),
|
||||
email: "john@example.com".to_string(),
|
||||
message: "This is a test message that is long enough.".to_string(),
|
||||
honeypot: None,
|
||||
};
|
||||
assert!(request.validate().is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn contact_request_invalid_email() {
|
||||
let request = ContactRequest {
|
||||
name: "John Doe".to_string(),
|
||||
email: "not-an-email".to_string(),
|
||||
message: "This is a test message that is long enough.".to_string(),
|
||||
honeypot: None,
|
||||
};
|
||||
assert!(request.validate().is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn contact_request_message_too_short() {
|
||||
let request = ContactRequest {
|
||||
name: "John Doe".to_string(),
|
||||
email: "john@example.com".to_string(),
|
||||
message: "Short".to_string(),
|
||||
honeypot: None,
|
||||
};
|
||||
assert!(request.validate().is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn contact_request_message_too_long() {
|
||||
let request = ContactRequest {
|
||||
name: "John Doe".to_string(),
|
||||
email: "john@example.com".to_string(),
|
||||
message: "a".repeat(5001),
|
||||
honeypot: None,
|
||||
};
|
||||
assert!(request.validate().is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn contact_request_message_at_min_length() {
|
||||
let request = ContactRequest {
|
||||
name: "John Doe".to_string(),
|
||||
email: "john@example.com".to_string(),
|
||||
message: "a".repeat(10),
|
||||
honeypot: None,
|
||||
};
|
||||
assert!(request.validate().is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn contact_request_message_at_max_length() {
|
||||
let request = ContactRequest {
|
||||
name: "John Doe".to_string(),
|
||||
email: "john@example.com".to_string(),
|
||||
message: "a".repeat(5000),
|
||||
honeypot: None,
|
||||
};
|
||||
assert!(request.validate().is_ok());
|
||||
}
|
||||
|
||||
// Tests for SmtpTransport TryFrom implementation
|
||||
#[test]
|
||||
fn smtp_transport_implicit_tls() {
|
||||
let settings = EmailSettings {
|
||||
host: "smtp.example.com".to_string(),
|
||||
port: 465,
|
||||
user: "user@example.com".to_string(),
|
||||
password: "password".to_string(),
|
||||
from: "from@example.com".to_string(),
|
||||
recipient: "to@example.com".to_string(),
|
||||
tls: true,
|
||||
starttls: Starttls::Never,
|
||||
};
|
||||
|
||||
let result = SmtpTransport::try_from(&settings);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn smtp_transport_starttls_always() {
|
||||
let settings = EmailSettings {
|
||||
host: "smtp.example.com".to_string(),
|
||||
port: 587,
|
||||
user: "user@example.com".to_string(),
|
||||
password: "password".to_string(),
|
||||
from: "from@example.com".to_string(),
|
||||
recipient: "to@example.com".to_string(),
|
||||
tls: false,
|
||||
starttls: Starttls::Always,
|
||||
};
|
||||
|
||||
let result = SmtpTransport::try_from(&settings);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn smtp_transport_starttls_opportunistic() {
|
||||
let settings = EmailSettings {
|
||||
host: "smtp.example.com".to_string(),
|
||||
port: 587,
|
||||
user: "user@example.com".to_string(),
|
||||
password: "password".to_string(),
|
||||
from: "from@example.com".to_string(),
|
||||
recipient: "to@example.com".to_string(),
|
||||
tls: false,
|
||||
starttls: Starttls::Opportunistic,
|
||||
};
|
||||
|
||||
let result = SmtpTransport::try_from(&settings);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn smtp_transport_no_encryption_with_credentials() {
|
||||
let settings = EmailSettings {
|
||||
host: "localhost".to_string(),
|
||||
port: 1025,
|
||||
user: "user@example.com".to_string(),
|
||||
password: "password".to_string(),
|
||||
from: "from@example.com".to_string(),
|
||||
recipient: "to@example.com".to_string(),
|
||||
tls: false,
|
||||
starttls: Starttls::Never,
|
||||
};
|
||||
|
||||
let result = SmtpTransport::try_from(&settings);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn smtp_transport_no_encryption_no_credentials() {
|
||||
let settings = EmailSettings {
|
||||
host: "localhost".to_string(),
|
||||
port: 1025,
|
||||
user: String::new(),
|
||||
password: String::new(),
|
||||
from: "from@example.com".to_string(),
|
||||
recipient: "to@example.com".to_string(),
|
||||
tls: false,
|
||||
starttls: Starttls::Never,
|
||||
};
|
||||
|
||||
let result = SmtpTransport::try_from(&settings);
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
// Integration tests for contact API endpoint
|
||||
#[tokio::test]
|
||||
async fn contact_endpoint_honeypot_triggered() {
|
||||
let app = crate::get_test_app();
|
||||
let cli = poem::test::TestClient::new(app);
|
||||
|
||||
let body = serde_json::json!({
|
||||
"name": "Bot Name",
|
||||
"email": "bot@example.com",
|
||||
"message": "This is a spam message from a bot.",
|
||||
"website": "http://spam.com"
|
||||
});
|
||||
|
||||
let resp = cli.post("/api/contact").body_json(&body).send().await;
|
||||
resp.assert_status_is_ok();
|
||||
|
||||
let json_text = resp.0.into_body().into_string().await.unwrap();
|
||||
let json: ContactResponse = serde_json::from_str(&json_text).unwrap();
|
||||
assert!(json.success);
|
||||
assert!(json.message.contains("not really"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn contact_endpoint_validation_error_empty_name() {
|
||||
let app = crate::get_test_app();
|
||||
let cli = poem::test::TestClient::new(app);
|
||||
|
||||
let body = serde_json::json!({
|
||||
"name": "",
|
||||
"email": "test@example.com",
|
||||
"message": "This is a valid message that is long enough."
|
||||
});
|
||||
|
||||
let resp = cli.post("/api/contact").body_json(&body).send().await;
|
||||
resp.assert_status(poem::http::StatusCode::BAD_REQUEST);
|
||||
|
||||
let json_text = resp.0.into_body().into_string().await.unwrap();
|
||||
let json: ContactResponse = serde_json::from_str(&json_text).unwrap();
|
||||
assert!(!json.success);
|
||||
assert!(json.message.contains("Validation error"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn contact_endpoint_validation_error_invalid_email() {
|
||||
let app = crate::get_test_app();
|
||||
let cli = poem::test::TestClient::new(app);
|
||||
|
||||
let body = serde_json::json!({
|
||||
"name": "Test User",
|
||||
"email": "not-an-email",
|
||||
"message": "This is a valid message that is long enough."
|
||||
});
|
||||
|
||||
let resp = cli.post("/api/contact").body_json(&body).send().await;
|
||||
resp.assert_status(poem::http::StatusCode::BAD_REQUEST);
|
||||
|
||||
let json_text = resp.0.into_body().into_string().await.unwrap();
|
||||
let json: ContactResponse = serde_json::from_str(&json_text).unwrap();
|
||||
assert!(!json.success);
|
||||
assert!(json.message.contains("Validation error"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn contact_endpoint_validation_error_message_too_short() {
|
||||
let app = crate::get_test_app();
|
||||
let cli = poem::test::TestClient::new(app);
|
||||
|
||||
let body = serde_json::json!({
|
||||
"name": "Test User",
|
||||
"email": "test@example.com",
|
||||
"message": "Short"
|
||||
});
|
||||
|
||||
let resp = cli.post("/api/contact").body_json(&body).send().await;
|
||||
resp.assert_status(poem::http::StatusCode::BAD_REQUEST);
|
||||
|
||||
let json_text = resp.0.into_body().into_string().await.unwrap();
|
||||
let json: ContactResponse = serde_json::from_str(&json_text).unwrap();
|
||||
assert!(!json.success);
|
||||
assert!(json.message.contains("Validation error"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn contact_endpoint_validation_error_name_too_long() {
|
||||
let app = crate::get_test_app();
|
||||
let cli = poem::test::TestClient::new(app);
|
||||
|
||||
let body = serde_json::json!({
|
||||
"name": "a".repeat(101),
|
||||
"email": "test@example.com",
|
||||
"message": "This is a valid message that is long enough."
|
||||
});
|
||||
|
||||
let resp = cli.post("/api/contact").body_json(&body).send().await;
|
||||
resp.assert_status(poem::http::StatusCode::BAD_REQUEST);
|
||||
|
||||
let json_text = resp.0.into_body().into_string().await.unwrap();
|
||||
let json: ContactResponse = serde_json::from_str(&json_text).unwrap();
|
||||
assert!(!json.success);
|
||||
assert!(json.message.contains("Validation error"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn contact_endpoint_validation_error_message_too_long() {
|
||||
let app = crate::get_test_app();
|
||||
let cli = poem::test::TestClient::new(app);
|
||||
|
||||
let body = serde_json::json!({
|
||||
"name": "Test User",
|
||||
"email": "test@example.com",
|
||||
"message": "a".repeat(5001)
|
||||
});
|
||||
|
||||
let resp = cli.post("/api/contact").body_json(&body).send().await;
|
||||
resp.assert_status(poem::http::StatusCode::BAD_REQUEST);
|
||||
|
||||
let json_text = resp.0.into_body().into_string().await.unwrap();
|
||||
let json: ContactResponse = serde_json::from_str(&json_text).unwrap();
|
||||
assert!(!json.success);
|
||||
assert!(json.message.contains("Validation error"));
|
||||
}
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
//! Health check endpoint for monitoring service availability.
|
||||
|
||||
use poem_openapi::{ApiResponse, OpenApi};
|
||||
|
||||
use super::ApiCategory;
|
||||
|
||||
#[derive(ApiResponse)]
|
||||
enum HealthResponse {
|
||||
/// Success
|
||||
#[oai(status = 200)]
|
||||
Ok,
|
||||
/// Too Many Requests - rate limit exceeded
|
||||
#[oai(status = 429)]
|
||||
#[allow(dead_code)]
|
||||
TooManyRequests,
|
||||
}
|
||||
|
||||
/// Health check API for monitoring service availability.
|
||||
#[derive(Default, Clone)]
|
||||
pub struct HealthApi;
|
||||
|
||||
#[OpenApi(tag = "ApiCategory::Health")]
|
||||
impl HealthApi {
|
||||
#[oai(path = "/health", method = "get")]
|
||||
async fn ping(&self) -> HealthResponse {
|
||||
tracing::event!(target: "backend::health", tracing::Level::DEBUG, "Accessing health-check endpoint");
|
||||
HealthResponse::Ok
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn health_check_works() {
|
||||
let app = crate::get_test_app();
|
||||
let cli = poem::test::TestClient::new(app);
|
||||
let resp = cli.get("/api/health").send().await;
|
||||
resp.assert_status_is_ok();
|
||||
resp.assert_text("").await;
|
||||
}
|
||||
@@ -1,86 +0,0 @@
|
||||
//! Application metadata endpoint for retrieving version and name information.
|
||||
|
||||
use poem::Result;
|
||||
use poem_openapi::{ApiResponse, Object, OpenApi, payload::Json};
|
||||
|
||||
use super::ApiCategory;
|
||||
use crate::settings::ApplicationSettings;
|
||||
|
||||
#[derive(Object, Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||||
struct Meta {
|
||||
version: String,
|
||||
name: String,
|
||||
}
|
||||
|
||||
impl From<&MetaApi> for Meta {
|
||||
fn from(value: &MetaApi) -> Self {
|
||||
let version = value.version.clone();
|
||||
let name = value.name.clone();
|
||||
Self { version, name }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(ApiResponse)]
|
||||
enum MetaResponse {
|
||||
/// Success
|
||||
#[oai(status = 200)]
|
||||
Meta(Json<Meta>),
|
||||
/// Too Many Requests - rate limit exceeded
|
||||
#[oai(status = 429)]
|
||||
#[allow(dead_code)]
|
||||
TooManyRequests,
|
||||
}
|
||||
|
||||
/// API for retrieving application metadata (name and version).
|
||||
#[derive(Clone)]
|
||||
pub struct MetaApi {
|
||||
name: String,
|
||||
version: String,
|
||||
}
|
||||
|
||||
impl From<&ApplicationSettings> for MetaApi {
|
||||
fn from(value: &ApplicationSettings) -> Self {
|
||||
let name = value.name.clone();
|
||||
let version = value.version.clone();
|
||||
Self { name, version }
|
||||
}
|
||||
}
|
||||
|
||||
#[OpenApi(tag = "ApiCategory::Meta")]
|
||||
impl MetaApi {
|
||||
#[oai(path = "/meta", method = "get")]
|
||||
async fn meta(&self) -> Result<MetaResponse> {
|
||||
tracing::event!(target: "backend::meta", tracing::Level::DEBUG, "Accessing meta endpoint");
|
||||
Ok(MetaResponse::Meta(Json(self.into())))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[tokio::test]
|
||||
async fn meta_endpoint_returns_correct_data() {
|
||||
let app = crate::get_test_app();
|
||||
let cli = poem::test::TestClient::new(app);
|
||||
let resp = cli.get("/api/meta").send().await;
|
||||
resp.assert_status_is_ok();
|
||||
|
||||
let json_value: serde_json::Value = resp.json().await.value().deserialize();
|
||||
|
||||
assert!(
|
||||
json_value.get("version").is_some(),
|
||||
"Response should have version field"
|
||||
);
|
||||
assert!(
|
||||
json_value.get("name").is_some(),
|
||||
"Response should have name field"
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn meta_endpoint_returns_200_status() {
|
||||
let app = crate::get_test_app();
|
||||
let cli = poem::test::TestClient::new(app);
|
||||
let resp = cli.get("/api/meta").send().await;
|
||||
resp.assert_status_is_ok();
|
||||
}
|
||||
}
|
||||
@@ -1,46 +0,0 @@
|
||||
//! API route handlers for the backend server.
|
||||
//!
|
||||
//! This module contains all the HTTP endpoint handlers organized by functionality:
|
||||
//! - Contact form handling
|
||||
//! - Health checks
|
||||
//! - Application metadata
|
||||
|
||||
use poem_openapi::Tags;
|
||||
|
||||
mod contact;
|
||||
mod health;
|
||||
mod meta;
|
||||
|
||||
use crate::settings::Settings;
|
||||
|
||||
#[derive(Tags)]
|
||||
enum ApiCategory {
|
||||
Contact,
|
||||
Health,
|
||||
Meta,
|
||||
}
|
||||
|
||||
pub(crate) struct Api {
|
||||
contact: contact::ContactApi,
|
||||
health: health::HealthApi,
|
||||
meta: meta::MetaApi,
|
||||
}
|
||||
|
||||
impl From<&Settings> for Api {
|
||||
fn from(value: &Settings) -> Self {
|
||||
let contact = contact::ContactApi::from(value.clone().email);
|
||||
let health = health::HealthApi;
|
||||
let meta = meta::MetaApi::from(&value.application);
|
||||
Self {
|
||||
contact,
|
||||
health,
|
||||
meta,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Api {
|
||||
pub fn apis(self) -> (contact::ContactApi, health::HealthApi, meta::MetaApi) {
|
||||
(self.contact, self.health, self.meta)
|
||||
}
|
||||
}
|
||||
@@ -1,619 +0,0 @@
|
||||
//! Application configuration settings.
|
||||
//!
|
||||
//! This module provides configuration structures that can be loaded from:
|
||||
//! - YAML configuration files (base.yaml and environment-specific files)
|
||||
//! - Environment variables (prefixed with APP__)
|
||||
//!
|
||||
//! Settings include application details, email server configuration, and environment settings.
|
||||
|
||||
/// Application configuration settings.
|
||||
///
|
||||
/// Loads configuration from YAML files and environment variables.
|
||||
#[derive(Debug, serde::Deserialize, Clone, Default)]
|
||||
pub struct Settings {
|
||||
/// Application-specific settings (name, version, host, port, etc.)
|
||||
pub application: ApplicationSettings,
|
||||
/// Debug mode flag
|
||||
pub debug: bool,
|
||||
/// Email server configuration for contact form
|
||||
pub email: EmailSettings,
|
||||
/// Frontend URL for CORS configuration
|
||||
pub frontend_url: String,
|
||||
/// Rate limiting configuration
|
||||
#[serde(default)]
|
||||
pub rate_limit: RateLimitSettings,
|
||||
}
|
||||
|
||||
impl Settings {
|
||||
/// Creates a new `Settings` instance by loading configuration from files and environment variables.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns a `config::ConfigError` if:
|
||||
/// - Configuration files cannot be read or parsed
|
||||
/// - Required configuration values are missing
|
||||
/// - Configuration values cannot be deserialized into the expected types
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if:
|
||||
/// - The current directory cannot be determined
|
||||
/// - The `APP_ENVIRONMENT` variable contains an invalid value (not "dev", "development", "prod", or "production")
|
||||
pub fn new() -> Result<Self, config::ConfigError> {
|
||||
let base_path = std::env::current_dir().expect("Failed to determine the current directory");
|
||||
let settings_directory = base_path.join("settings");
|
||||
let environment: Environment = std::env::var("APP_ENVIRONMENT")
|
||||
.unwrap_or_else(|_| "dev".into())
|
||||
.try_into()
|
||||
.expect("Failed to parse APP_ENVIRONMENT");
|
||||
let environment_filename = format!("{environment}.yaml");
|
||||
// Lower = takes precedence
|
||||
let settings = config::Config::builder()
|
||||
.add_source(config::File::from(settings_directory.join("base.yaml")))
|
||||
.add_source(config::File::from(
|
||||
settings_directory.join(environment_filename),
|
||||
))
|
||||
.add_source(
|
||||
config::Environment::with_prefix("APP")
|
||||
.prefix_separator("__")
|
||||
.separator("__"),
|
||||
)
|
||||
.build()?;
|
||||
settings.try_deserialize()
|
||||
}
|
||||
}
|
||||
|
||||
/// Application-specific configuration settings.
|
||||
#[derive(Debug, serde::Deserialize, Clone, Default)]
|
||||
pub struct ApplicationSettings {
|
||||
/// Application name
|
||||
pub name: String,
|
||||
/// Application version
|
||||
pub version: String,
|
||||
/// Port to bind to
|
||||
pub port: u16,
|
||||
/// Host address to bind to
|
||||
pub host: String,
|
||||
/// Base URL of the application
|
||||
pub base_url: String,
|
||||
/// Protocol (http or https)
|
||||
pub protocol: String,
|
||||
}
|
||||
|
||||
/// Application environment.
|
||||
#[derive(Debug, PartialEq, Eq, Default)]
|
||||
pub enum Environment {
|
||||
/// Development environment
|
||||
#[default]
|
||||
Development,
|
||||
/// Production environment
|
||||
Production,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Environment {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let self_str = match self {
|
||||
Self::Development => "development",
|
||||
Self::Production => "production",
|
||||
};
|
||||
write!(f, "{self_str}")
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<String> for Environment {
|
||||
type Error = String;
|
||||
|
||||
fn try_from(value: String) -> Result<Self, Self::Error> {
|
||||
Self::try_from(value.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for Environment {
|
||||
type Error = String;
|
||||
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
match value.to_lowercase().as_str() {
|
||||
"development" | "dev" => Ok(Self::Development),
|
||||
"production" | "prod" => Ok(Self::Production),
|
||||
other => Err(format!(
|
||||
"{other} is not a supported environment. Use either `development` or `production`"
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Email server configuration for the contact form.
|
||||
#[derive(serde::Deserialize, Clone, Default)]
|
||||
pub struct EmailSettings {
|
||||
/// SMTP server hostname
|
||||
pub host: String,
|
||||
/// SMTP server port
|
||||
pub port: u16,
|
||||
/// SMTP authentication username
|
||||
pub user: String,
|
||||
/// Email address to send from
|
||||
pub from: String,
|
||||
/// SMTP authentication password
|
||||
pub password: String,
|
||||
/// Email address to send contact form submissions to
|
||||
pub recipient: String,
|
||||
/// STARTTLS configuration
|
||||
pub starttls: Starttls,
|
||||
/// Whether to use implicit TLS (SMTPS)
|
||||
pub tls: bool,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for EmailSettings {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("EmailSettings")
|
||||
.field("host", &self.host)
|
||||
.field("port", &self.port)
|
||||
.field("user", &self.user)
|
||||
.field("from", &self.from)
|
||||
.field("password", &"[REDACTED]")
|
||||
.field("recipient", &self.recipient)
|
||||
.field("starttls", &self.starttls)
|
||||
.field("tls", &self.tls)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
/// STARTTLS configuration for SMTP connections.
|
||||
#[derive(Debug, PartialEq, Eq, Default, Clone)]
|
||||
pub enum Starttls {
|
||||
/// Never use STARTTLS (unencrypted connection)
|
||||
#[default]
|
||||
Never,
|
||||
/// Use STARTTLS if available (opportunistic encryption)
|
||||
Opportunistic,
|
||||
/// Always use STARTTLS (required encryption)
|
||||
Always,
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for Starttls {
|
||||
type Error = String;
|
||||
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
match value.to_lowercase().as_str() {
|
||||
"off" | "no" | "never" => Ok(Self::Never),
|
||||
"opportunistic" => Ok(Self::Opportunistic),
|
||||
"yes" | "always" => Ok(Self::Always),
|
||||
other => Err(format!(
|
||||
"{other} is not a supported option. Use either `yes`, `no`, or `opportunistic`"
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<String> for Starttls {
|
||||
type Error = String;
|
||||
fn try_from(value: String) -> Result<Self, Self::Error> {
|
||||
value.as_str().try_into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<bool> for Starttls {
|
||||
fn from(value: bool) -> Self {
|
||||
if value { Self::Always } else { Self::Never }
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Starttls {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let self_str = match self {
|
||||
Self::Never => "never",
|
||||
Self::Opportunistic => "opportunistic",
|
||||
Self::Always => "always",
|
||||
};
|
||||
write!(f, "{self_str}")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for Starttls {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
struct StartlsVisitor;
|
||||
|
||||
impl serde::de::Visitor<'_> for StartlsVisitor {
|
||||
type Value = Starttls;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("a string or boolean representing STARTTLS setting (e.g., 'yes', 'no', 'opportunistic', true, false)")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, value: &str) -> Result<Starttls, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Starttls::try_from(value).map_err(E::custom)
|
||||
}
|
||||
|
||||
fn visit_string<E>(self, value: String) -> Result<Starttls, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Starttls::try_from(value.as_str()).map_err(E::custom)
|
||||
}
|
||||
|
||||
fn visit_bool<E>(self, value: bool) -> Result<Starttls, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
Ok(Starttls::from(value))
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_any(StartlsVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
/// Rate limiting configuration.
|
||||
#[derive(Debug, serde::Deserialize, Clone)]
|
||||
pub struct RateLimitSettings {
|
||||
/// Whether rate limiting is enabled
|
||||
#[serde(default = "default_rate_limit_enabled")]
|
||||
pub enabled: bool,
|
||||
/// Maximum number of requests allowed in the time window (burst size)
|
||||
#[serde(default = "default_burst_size")]
|
||||
pub burst_size: u32,
|
||||
/// Time window in seconds for rate limiting
|
||||
#[serde(default = "default_per_seconds")]
|
||||
pub per_seconds: u64,
|
||||
}
|
||||
|
||||
impl Default for RateLimitSettings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enabled: default_rate_limit_enabled(),
|
||||
burst_size: default_burst_size(),
|
||||
per_seconds: default_per_seconds(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const fn default_rate_limit_enabled() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
const fn default_burst_size() -> u32 {
|
||||
100
|
||||
}
|
||||
|
||||
const fn default_per_seconds() -> u64 {
|
||||
60
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn environment_display_development() {
|
||||
let env = Environment::Development;
|
||||
assert_eq!(env.to_string(), "development");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn environment_display_production() {
|
||||
let env = Environment::Production;
|
||||
assert_eq!(env.to_string(), "production");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn environment_from_str_development() {
|
||||
assert_eq!(
|
||||
Environment::try_from("development").unwrap(),
|
||||
Environment::Development
|
||||
);
|
||||
assert_eq!(
|
||||
Environment::try_from("dev").unwrap(),
|
||||
Environment::Development
|
||||
);
|
||||
assert_eq!(
|
||||
Environment::try_from("Development").unwrap(),
|
||||
Environment::Development
|
||||
);
|
||||
assert_eq!(
|
||||
Environment::try_from("DEV").unwrap(),
|
||||
Environment::Development
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn environment_from_str_production() {
|
||||
assert_eq!(
|
||||
Environment::try_from("production").unwrap(),
|
||||
Environment::Production
|
||||
);
|
||||
assert_eq!(
|
||||
Environment::try_from("prod").unwrap(),
|
||||
Environment::Production
|
||||
);
|
||||
assert_eq!(
|
||||
Environment::try_from("Production").unwrap(),
|
||||
Environment::Production
|
||||
);
|
||||
assert_eq!(
|
||||
Environment::try_from("PROD").unwrap(),
|
||||
Environment::Production
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn environment_from_str_invalid() {
|
||||
let result = Environment::try_from("invalid");
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().contains("not a supported environment"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn environment_from_string_development() {
|
||||
assert_eq!(
|
||||
Environment::try_from("development".to_string()).unwrap(),
|
||||
Environment::Development
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn environment_from_string_production() {
|
||||
assert_eq!(
|
||||
Environment::try_from("production".to_string()).unwrap(),
|
||||
Environment::Production
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn environment_from_string_invalid() {
|
||||
let result = Environment::try_from("invalid".to_string());
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn environment_default_is_development() {
|
||||
let env = Environment::default();
|
||||
assert_eq!(env, Environment::Development);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_deserialize_from_string_never() {
|
||||
let json = r#""never""#;
|
||||
let result: Starttls = serde_json::from_str(json).unwrap();
|
||||
assert_eq!(result, Starttls::Never);
|
||||
|
||||
let json = r#""no""#;
|
||||
let result: Starttls = serde_json::from_str(json).unwrap();
|
||||
assert_eq!(result, Starttls::Never);
|
||||
|
||||
let json = r#""off""#;
|
||||
let result: Starttls = serde_json::from_str(json).unwrap();
|
||||
assert_eq!(result, Starttls::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_deserialize_from_string_always() {
|
||||
let json = r#""always""#;
|
||||
let result: Starttls = serde_json::from_str(json).unwrap();
|
||||
assert_eq!(result, Starttls::Always);
|
||||
|
||||
let json = r#""yes""#;
|
||||
let result: Starttls = serde_json::from_str(json).unwrap();
|
||||
assert_eq!(result, Starttls::Always);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_deserialize_from_string_opportunistic() {
|
||||
let json = r#""opportunistic""#;
|
||||
let result: Starttls = serde_json::from_str(json).unwrap();
|
||||
assert_eq!(result, Starttls::Opportunistic);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_deserialize_from_bool() {
|
||||
let json = "true";
|
||||
let result: Starttls = serde_json::from_str(json).unwrap();
|
||||
assert_eq!(result, Starttls::Always);
|
||||
|
||||
let json = "false";
|
||||
let result: Starttls = serde_json::from_str(json).unwrap();
|
||||
assert_eq!(result, Starttls::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_deserialize_from_string_invalid() {
|
||||
let json = r#""invalid""#;
|
||||
let result: Result<Starttls, _> = serde_json::from_str(json);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_default_is_never() {
|
||||
let startls = Starttls::default();
|
||||
assert_eq!(startls, Starttls::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_try_from_str_never() {
|
||||
assert_eq!(Starttls::try_from("never").unwrap(), Starttls::Never);
|
||||
assert_eq!(Starttls::try_from("no").unwrap(), Starttls::Never);
|
||||
assert_eq!(Starttls::try_from("off").unwrap(), Starttls::Never);
|
||||
assert_eq!(Starttls::try_from("NEVER").unwrap(), Starttls::Never);
|
||||
assert_eq!(Starttls::try_from("No").unwrap(), Starttls::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_try_from_str_always() {
|
||||
assert_eq!(Starttls::try_from("always").unwrap(), Starttls::Always);
|
||||
assert_eq!(Starttls::try_from("yes").unwrap(), Starttls::Always);
|
||||
assert_eq!(Starttls::try_from("ALWAYS").unwrap(), Starttls::Always);
|
||||
assert_eq!(Starttls::try_from("Yes").unwrap(), Starttls::Always);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_try_from_str_opportunistic() {
|
||||
assert_eq!(
|
||||
Starttls::try_from("opportunistic").unwrap(),
|
||||
Starttls::Opportunistic
|
||||
);
|
||||
assert_eq!(
|
||||
Starttls::try_from("OPPORTUNISTIC").unwrap(),
|
||||
Starttls::Opportunistic
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_try_from_str_invalid() {
|
||||
let result = Starttls::try_from("invalid");
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
.unwrap_err()
|
||||
.contains("not a supported option"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_try_from_string_never() {
|
||||
assert_eq!(
|
||||
Starttls::try_from("never".to_string()).unwrap(),
|
||||
Starttls::Never
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_try_from_string_always() {
|
||||
assert_eq!(
|
||||
Starttls::try_from("yes".to_string()).unwrap(),
|
||||
Starttls::Always
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_try_from_string_opportunistic() {
|
||||
assert_eq!(
|
||||
Starttls::try_from("opportunistic".to_string()).unwrap(),
|
||||
Starttls::Opportunistic
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_try_from_string_invalid() {
|
||||
let result = Starttls::try_from("invalid".to_string());
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_from_bool_true() {
|
||||
assert_eq!(Starttls::from(true), Starttls::Always);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_from_bool_false() {
|
||||
assert_eq!(Starttls::from(false), Starttls::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_display_never() {
|
||||
let startls = Starttls::Never;
|
||||
assert_eq!(startls.to_string(), "never");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_display_always() {
|
||||
let startls = Starttls::Always;
|
||||
assert_eq!(startls.to_string(), "always");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_display_opportunistic() {
|
||||
let startls = Starttls::Opportunistic;
|
||||
assert_eq!(startls.to_string(), "opportunistic");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rate_limit_settings_default() {
|
||||
let settings = RateLimitSettings::default();
|
||||
assert!(settings.enabled);
|
||||
assert_eq!(settings.burst_size, 100);
|
||||
assert_eq!(settings.per_seconds, 60);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rate_limit_settings_deserialize_full() {
|
||||
let json = r#"{"enabled": true, "burst_size": 50, "per_seconds": 30}"#;
|
||||
let settings: RateLimitSettings = serde_json::from_str(json).unwrap();
|
||||
assert!(settings.enabled);
|
||||
assert_eq!(settings.burst_size, 50);
|
||||
assert_eq!(settings.per_seconds, 30);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rate_limit_settings_deserialize_partial() {
|
||||
let json = r#"{"enabled": false}"#;
|
||||
let settings: RateLimitSettings = serde_json::from_str(json).unwrap();
|
||||
assert!(!settings.enabled);
|
||||
assert_eq!(settings.burst_size, 100); // default
|
||||
assert_eq!(settings.per_seconds, 60); // default
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rate_limit_settings_deserialize_empty() {
|
||||
let json = "{}";
|
||||
let settings: RateLimitSettings = serde_json::from_str(json).unwrap();
|
||||
assert!(settings.enabled); // default
|
||||
assert_eq!(settings.burst_size, 100); // default
|
||||
assert_eq!(settings.per_seconds, 60); // default
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_deserialize_from_incompatible_type() {
|
||||
// Test that deserialization from an array fails with expected error message
|
||||
let json = "[1, 2, 3]";
|
||||
let result: Result<Starttls, _> = serde_json::from_str(json);
|
||||
assert!(result.is_err());
|
||||
let error = result.unwrap_err().to_string();
|
||||
// The error should mention what was expected
|
||||
assert!(
|
||||
error.contains("STARTTLS") || error.contains("string") || error.contains("boolean")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_deserialize_from_number() {
|
||||
// Test that deserialization from a number fails
|
||||
let json = "42";
|
||||
let result: Result<Starttls, _> = serde_json::from_str(json);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn startls_deserialize_from_object() {
|
||||
// Test that deserialization from an object fails
|
||||
let json = r#"{"foo": "bar"}"#;
|
||||
let result: Result<Starttls, _> = serde_json::from_str(json);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn email_settings_debug_redacts_password() {
|
||||
let settings = EmailSettings {
|
||||
host: "smtp.example.com".to_string(),
|
||||
port: 587,
|
||||
user: "user@example.com".to_string(),
|
||||
from: "noreply@example.com".to_string(),
|
||||
password: "super_secret_password".to_string(),
|
||||
recipient: "admin@example.com".to_string(),
|
||||
starttls: Starttls::Always,
|
||||
tls: false,
|
||||
};
|
||||
|
||||
let debug_output = format!("{settings:?}");
|
||||
|
||||
// Password should be redacted
|
||||
assert!(debug_output.contains("[REDACTED]"));
|
||||
// Password should not appear in output
|
||||
assert!(!debug_output.contains("super_secret_password"));
|
||||
// Other fields should still be present
|
||||
assert!(debug_output.contains("smtp.example.com"));
|
||||
assert!(debug_output.contains("user@example.com"));
|
||||
}
|
||||
}
|
||||
@@ -1,228 +0,0 @@
|
||||
//! Application startup and server configuration.
|
||||
//!
|
||||
//! This module handles:
|
||||
//! - Building the application with routes and middleware
|
||||
//! - Setting up the OpenAPI service and Swagger UI
|
||||
//! - Configuring CORS
|
||||
//! - Starting the HTTP server
|
||||
|
||||
use poem::middleware::{AddDataEndpoint, Cors, CorsEndpoint};
|
||||
use poem::{EndpointExt, Route};
|
||||
use poem_openapi::OpenApiService;
|
||||
|
||||
use crate::{
|
||||
middleware::rate_limit::{RateLimit, RateLimitConfig},
|
||||
route::Api,
|
||||
settings::Settings,
|
||||
};
|
||||
|
||||
use crate::middleware::rate_limit::RateLimitEndpoint;
|
||||
|
||||
type Server = poem::Server<poem::listener::TcpListener<String>, std::convert::Infallible>;
|
||||
/// The configured application with rate limiting, CORS, and settings data.
|
||||
pub type App = AddDataEndpoint<CorsEndpoint<RateLimitEndpoint<Route>>, Settings>;
|
||||
|
||||
/// Application builder that holds the server configuration before running.
|
||||
pub struct Application {
|
||||
server: Server,
|
||||
app: poem::Route,
|
||||
host: String,
|
||||
port: u16,
|
||||
settings: Settings,
|
||||
}
|
||||
|
||||
/// A fully configured application ready to run.
|
||||
pub struct RunnableApplication {
|
||||
server: Server,
|
||||
app: App,
|
||||
}
|
||||
|
||||
impl RunnableApplication {
|
||||
/// Runs the application server.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns a `std::io::Error` if the server fails to start or encounters
|
||||
/// an I/O error during runtime (e.g., port already in use, network issues).
|
||||
pub async fn run(self) -> Result<(), std::io::Error> {
|
||||
self.server.run(self.app).await
|
||||
}
|
||||
}
|
||||
|
||||
impl From<RunnableApplication> for App {
|
||||
fn from(value: RunnableApplication) -> Self {
|
||||
value.app
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Application> for RunnableApplication {
|
||||
fn from(value: Application) -> Self {
|
||||
// Configure rate limiting based on settings
|
||||
let rate_limit_config = if value.settings.rate_limit.enabled {
|
||||
tracing::event!(
|
||||
target: "backend::startup",
|
||||
tracing::Level::INFO,
|
||||
burst_size = value.settings.rate_limit.burst_size,
|
||||
per_seconds = value.settings.rate_limit.per_seconds,
|
||||
"Rate limiting enabled"
|
||||
);
|
||||
RateLimitConfig::new(
|
||||
value.settings.rate_limit.burst_size,
|
||||
value.settings.rate_limit.per_seconds,
|
||||
)
|
||||
} else {
|
||||
tracing::event!(
|
||||
target: "backend::startup",
|
||||
tracing::Level::INFO,
|
||||
"Rate limiting disabled (using very high limits)"
|
||||
);
|
||||
// Use very high limits to effectively disable rate limiting
|
||||
RateLimitConfig::new(u32::MAX, 1)
|
||||
};
|
||||
|
||||
let app = value
|
||||
.app
|
||||
.with(RateLimit::new(&rate_limit_config))
|
||||
.with(Cors::new())
|
||||
.data(value.settings);
|
||||
|
||||
let server = value.server;
|
||||
Self { server, app }
|
||||
}
|
||||
}
|
||||
|
||||
impl Application {
|
||||
fn setup_app(settings: &Settings) -> poem::Route {
|
||||
let api_service = OpenApiService::new(
|
||||
Api::from(settings).apis(),
|
||||
settings.application.clone().name,
|
||||
settings.application.clone().version,
|
||||
)
|
||||
.url_prefix("/api");
|
||||
let ui = api_service.swagger_ui();
|
||||
poem::Route::new()
|
||||
.nest("/api", api_service.clone())
|
||||
.nest("/specs", api_service.spec_endpoint_yaml())
|
||||
.nest("/", ui)
|
||||
}
|
||||
|
||||
fn setup_server(
|
||||
settings: &Settings,
|
||||
tcp_listener: Option<poem::listener::TcpListener<String>>,
|
||||
) -> Server {
|
||||
let tcp_listener = tcp_listener.unwrap_or_else(|| {
|
||||
let address = format!(
|
||||
"{}:{}",
|
||||
settings.application.host, settings.application.port
|
||||
);
|
||||
poem::listener::TcpListener::bind(address)
|
||||
});
|
||||
poem::Server::new(tcp_listener)
|
||||
}
|
||||
|
||||
/// Builds a new application with the given settings and optional TCP listener.
|
||||
///
|
||||
/// If no listener is provided, one will be created based on the settings.
|
||||
#[must_use]
|
||||
pub fn build(
|
||||
settings: Settings,
|
||||
tcp_listener: Option<poem::listener::TcpListener<String>>,
|
||||
) -> Self {
|
||||
let port = settings.application.port;
|
||||
let host = settings.application.clone().host;
|
||||
let app = Self::setup_app(&settings);
|
||||
let server = Self::setup_server(&settings, tcp_listener);
|
||||
Self {
|
||||
server,
|
||||
app,
|
||||
host,
|
||||
port,
|
||||
settings,
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts the application into a runnable application.
|
||||
#[must_use]
|
||||
pub fn make_app(self) -> RunnableApplication {
|
||||
self.into()
|
||||
}
|
||||
|
||||
/// Returns the host address the application is configured to bind to.
|
||||
#[must_use]
|
||||
pub fn host(&self) -> String {
|
||||
self.host.clone()
|
||||
}
|
||||
|
||||
/// Returns the port the application is configured to bind to.
|
||||
#[must_use]
|
||||
pub const fn port(&self) -> u16 {
|
||||
self.port
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn create_test_settings() -> Settings {
|
||||
Settings {
|
||||
application: crate::settings::ApplicationSettings {
|
||||
name: "test-app".to_string(),
|
||||
version: "1.0.0".to_string(),
|
||||
port: 8080,
|
||||
host: "127.0.0.1".to_string(),
|
||||
base_url: "http://localhost:8080".to_string(),
|
||||
protocol: "http".to_string(),
|
||||
},
|
||||
debug: false,
|
||||
email: crate::settings::EmailSettings::default(),
|
||||
frontend_url: "http://localhost:3000".to_string(),
|
||||
rate_limit: crate::settings::RateLimitSettings {
|
||||
enabled: false,
|
||||
burst_size: 100,
|
||||
per_seconds: 60,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn application_build_and_host() {
|
||||
let settings = create_test_settings();
|
||||
let app = Application::build(settings.clone(), None);
|
||||
assert_eq!(app.host(), settings.application.host);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn application_build_and_port() {
|
||||
let settings = create_test_settings();
|
||||
let app = Application::build(settings, None);
|
||||
assert_eq!(app.port(), 8080);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn application_host_returns_correct_value() {
|
||||
let settings = create_test_settings();
|
||||
let app = Application::build(settings, None);
|
||||
assert_eq!(app.host(), "127.0.0.1");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn application_port_returns_correct_value() {
|
||||
let settings = create_test_settings();
|
||||
let app = Application::build(settings, None);
|
||||
assert_eq!(app.port(), 8080);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn application_with_custom_listener() {
|
||||
let settings = create_test_settings();
|
||||
let tcp_listener =
|
||||
std::net::TcpListener::bind("127.0.0.1:0").expect("Failed to bind random port");
|
||||
let port = tcp_listener.local_addr().unwrap().port();
|
||||
let listener = poem::listener::TcpListener::bind(format!("127.0.0.1:{port}"));
|
||||
|
||||
let app = Application::build(settings, Some(listener));
|
||||
assert_eq!(app.host(), "127.0.0.1");
|
||||
assert_eq!(app.port(), 8080);
|
||||
}
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
//! Logging and tracing configuration.
|
||||
//!
|
||||
//! This module provides utilities for setting up structured logging using the tracing crate.
|
||||
//! Supports both pretty-printed logs for development and JSON logs for production.
|
||||
|
||||
use tracing_subscriber::layer::SubscriberExt;
|
||||
|
||||
/// Creates a tracing subscriber configured for the given debug mode.
|
||||
///
|
||||
/// In debug mode, logs are pretty-printed to stdout.
|
||||
/// In production mode, logs are output as JSON.
|
||||
#[must_use]
|
||||
pub fn get_subscriber(debug: bool) -> impl tracing::Subscriber + Send + Sync {
|
||||
let env_filter = if debug { "debug" } else { "info" }.to_string();
|
||||
let env_filter = tracing_subscriber::EnvFilter::try_from_default_env()
|
||||
.unwrap_or_else(|_| tracing_subscriber::EnvFilter::new(env_filter));
|
||||
let stdout_log = tracing_subscriber::fmt::layer().pretty();
|
||||
let subscriber = tracing_subscriber::Registry::default()
|
||||
.with(env_filter)
|
||||
.with(stdout_log);
|
||||
let json_log = if debug {
|
||||
None
|
||||
} else {
|
||||
Some(tracing_subscriber::fmt::layer().json())
|
||||
};
|
||||
subscriber.with(json_log)
|
||||
}
|
||||
|
||||
/// Initializes the global tracing subscriber.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if:
|
||||
/// - A global subscriber has already been set
|
||||
/// - The subscriber cannot be set as the global default
|
||||
pub fn init_subscriber(subscriber: impl tracing::Subscriber + Send + Sync) {
|
||||
tracing::subscriber::set_global_default(subscriber).expect("Failed to set subscriber");
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn get_subscriber_debug_mode() {
|
||||
let subscriber = get_subscriber(true);
|
||||
// If we can create the subscriber without panicking, the test passes
|
||||
// We can't easily inspect the subscriber's internals, but we can verify it's created
|
||||
let _ = subscriber;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_subscriber_production_mode() {
|
||||
let subscriber = get_subscriber(false);
|
||||
// If we can create the subscriber without panicking, the test passes
|
||||
let _ = subscriber;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_subscriber_creates_valid_subscriber() {
|
||||
// Test both debug and non-debug modes create valid subscribers
|
||||
let debug_subscriber = get_subscriber(true);
|
||||
let prod_subscriber = get_subscriber(false);
|
||||
|
||||
// Basic smoke test - if these are created without panicking, they're valid
|
||||
let _ = debug_subscriber;
|
||||
let _ = prod_subscriber;
|
||||
}
|
||||
}
|
||||
184
content/en/keine-tashi.md
Normal file
184
content/en/keine-tashi.md
Normal file
@@ -0,0 +1,184 @@
|
||||
---
|
||||
title: BSUP01 Keine Tashi
|
||||
---
|
||||
|
||||
# BSUP01 Keine Tashi
|
||||
|
||||
## Introduction
|
||||
|
||||
KEINE Tashi is a character and set of vocal libraries developed for
|
||||
the shareware [UTAU](http://utau2008.web.fc2.com/), a singing voice
|
||||
synthesizer. I developed KEINE Tashi over the course of several years,
|
||||
from 2012 to 2015. Three vocal libraries have been released to the
|
||||
public, the most used one being his **JPN Power Extend** one. On March
|
||||
10th, 2017, I announced I would cease any kind of activity related to
|
||||
UTAU.
|
||||
|
||||
<blockquote class="twitter-tweet" data-dnt="true" data-theme="dark">
|
||||
<p lang="en" dir="ltr">
|
||||
I’d like to also announce that from now on I am
|
||||
dropping my previous UTAU projects other than covers and won’t develop
|
||||
any new UTAU library
|
||||
</p>
|
||||
— P’undrak (@Phundrak)
|
||||
<a href="https://twitter.com/Phundrak/status/840174634377105408?ref_src=twsrc%5Etfw">March
|
||||
10th, 2017</a>
|
||||
</blockquote>
|
||||
<component is="script" async src="https://platform.twitter.com/widgets.js" charset="utf-8">
|
||||
</component>
|
||||
|
||||
## Character and vocal libraries
|
||||
|
||||
Here's a copy and paste of some old pages describing KEINE Tashi:
|
||||
|
||||
### Presentation
|
||||
|
||||
{class="small-img"}
|
||||
|
||||
- **Codename**: BSUP01 恵音བཀྲ་ཤིས་ KEINE Tashi
|
||||
- **First name**: Tashi (བཀྲ་ཤིས་), Tibetan name meaning "auspicious"
|
||||
- **Last name**: Keine (恵音), Japanese name meaning "Blessing sound". It reads as "keine", although its regular reading should be "megumine".
|
||||
- **Model**: BSUP (Bödkay Shetang UTAU Project)
|
||||
- **Number**: 01
|
||||
- **Gender**: male
|
||||
- **Birthday (lore)**: June 28th, 1991
|
||||
- **Birthday (first release)**: October 14th, 2012
|
||||
- **Weight**: 154 lb / 70 kg
|
||||
- **Heigh**: 6′0″ / 182 cm (very tall for a Tibetan)
|
||||
- **Hair color**: black
|
||||
- **Eyes color**: brown/black
|
||||
- **Appearance**: Tashi wears a modernized Tibetan suit from the Amdo
|
||||
Region (Chinese: 安多 Ānduō), colored in blue. He also wears some
|
||||
turquoise jeweleries.
|
||||
- **Favorite food**: meat momo (Tibetan raviolies)
|
||||
- **Character item**: a Tibetan manuscript
|
||||
- **Voice and creator**: [Phundrak](https://phundrak.com) (me)
|
||||
- **Likes**: to meditate, calligraphy, old books, manuscripts
|
||||
- **Dislikes**: selfishness, lies, arrogance
|
||||
- **Personality**: Tashi is somebody very calm, sweet. He really
|
||||
enjoys old books and manuscripts, and he LOVES meditate! He’s
|
||||
never hungry, so, he can stay meditating for 2 to 3 days meditating,
|
||||
just like that, until he realizes that he should eat something.
|
||||
And he always keeps quiet, it’s really hard to make him angry.
|
||||
|
||||
But when he is, his anger becomes wrath. Anyone who experienced it
|
||||
can attest how complex and difficult it is to calm him down.
|
||||
Strangely enough, shortly after being confronted by Tashi, the
|
||||
victims of this wrath see their quality of life greatly improve.
|
||||
Maybe these people needed to hear some truths they refused to face
|
||||
before?
|
||||
|
||||
### Vocal libraries
|
||||
|
||||
#### JPN VCV
|
||||
|
||||
- **Download links**:
|
||||
|
||||
| Extension | Size | Link |
|
||||
| --------- | -------- | --------------------------------------------------------------------------------- |
|
||||
| 7z | 25.7 MiB | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_VCV.7z) |
|
||||
| tar.xz | 32.5 MiB | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_VCV.tar.xz) |
|
||||
| zip | 38.0 MiB | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_VCV.zip) |
|
||||
|
||||
- **File size**: 60.7 MB
|
||||
- **Total uncompressed size**: 94.4 MB
|
||||
- **Number of voice phonemes**: 1264 (253 audio files)
|
||||
- **Average frequency**: G#2
|
||||
- **Vocal range**: C2\~D3
|
||||
- **FRQ file presence**: partial
|
||||
- **Release date**: October, 14th 2012
|
||||
- **Phoneme encoding**: Romaji with hiragana and CV romaji aliases
|
||||
- **Supported languages**: Japanese
|
||||
- **oto.ini**: Tuned myself
|
||||
- **Recommended engines**: TIPS, VS4U
|
||||
|
||||
#### JPN Extend Power
|
||||
|
||||
- **Download links**:
|
||||
|
||||
| Extension | Size | Link |
|
||||
| --------- | ------ | ------------------------------------------------------------------------------------------ |
|
||||
| 7z | 1.1Gio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Power.7z) |
|
||||
| tar.xz | 1.1Gio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Power.tar.xz) |
|
||||
| zip | 1.2Gio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Power.zip) |
|
||||
|
||||
- **File size**: 114 MB
|
||||
- **Total uncompressed size**: 155 MB
|
||||
- **Number of voice phonemes**: 3020 (546 audio files)
|
||||
- **Average frequency**: C3
|
||||
- **Vocal range**: B1\~D4
|
||||
- **FRQ file presence**: partial
|
||||
- **Release date**: June 28th, 2013
|
||||
- **Phoneme encoding**: Romaji (hiragana aliases)
|
||||
- **Supported languages**: Japanese
|
||||
- **oto.ini**: Tuned myself
|
||||
- **Recommended engines**: VS4U, world4utau
|
||||
|
||||
#### JPN Extend Youth
|
||||
|
||||
- **Download links**:
|
||||
|
||||
| Extension | Size | Link |
|
||||
| --------- | -------- | ------------------------------------------------------------------------------------------ |
|
||||
| 7z | 237.7Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Youth.7z) |
|
||||
| tar.xz | 243.5Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Youth.tar.xz) |
|
||||
| zip | 268.7Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Youth.zip) |
|
||||
|
||||
- **File size**: 36.9 MB
|
||||
- **Total uncompressed size**: 42.0 MB
|
||||
- **Number of voice phonemes**: 1954 (182 audio files)
|
||||
- **Average frequency**: C4
|
||||
- **Vocal range**: F#3\~A#4
|
||||
- **FRQ file presence**: partial
|
||||
- **Release date**: June 28th, 2013
|
||||
- **Phoneme encoding**: Romaji (hiragana aliases, romaji added with the oto.ini update)
|
||||
- **Supported languages**: Japanese
|
||||
- **oto.ini**: Tuned myself
|
||||
- **Recommended engines**: fresamp, VS4U, world4utau
|
||||
|
||||
#### JPN Extend Native
|
||||
|
||||
- **Status**: abandonned
|
||||
|
||||
{class="small-img"}
|
||||
|
||||
#### TIB CVVC
|
||||
|
||||
- **Status**: abandonned
|
||||
|
||||
#### ENG
|
||||
|
||||
- **Status**: abandonned
|
||||
|
||||
## Usage clause and license
|
||||
|
||||
KEINE Tashi is released under the [CC BY-SA-NC 4.0
|
||||
license](https://creativecommons.org/licenses/by-nc-sa/4.0/), meaning
|
||||
you are free to:
|
||||
|
||||
- **use**: make use of the vocal libraries in UTAU or any other
|
||||
singing vocal synthesizer software.
|
||||
- **adapt**: remix, transform, and build upon the material
|
||||
- **share**: copy and redistribute the material in any medium or
|
||||
format
|
||||
|
||||
my work, on the condition of:
|
||||
|
||||
- **Attribution**: You must give appropriate credit, provide a link to
|
||||
the license, and indicate if changes were made. You may do so in
|
||||
any reasonable manner, but not in any way that suggests the
|
||||
licensor endorses you or your use.
|
||||
- **NonCommercial**: You may not use the material for commercial purposes.
|
||||
- **ShareAlike**: If you remix, transform, or build upon the material,
|
||||
you must distribute your contributions under the same license as
|
||||
the original.
|
||||
|
||||
Although I cannot add anything to this legal notice, I would also like
|
||||
if you followed the following rules of thumb regarding this character:
|
||||
any religious use of this character and its vocal libraries is
|
||||
forbidden, except for folk music, and Buddhist and Bön songs. However,
|
||||
due to the current controversy, any song linked to His Holiness the
|
||||
Gyalwa Karmapa is strictly forbidden until said controversy has been
|
||||
officially resolved. This is also applicable to His Holiness the Dalai
|
||||
Lama, the Venerable Shamar Rinpoche, and Tai Situ Rinpoche. If you
|
||||
have any question or if you are unsure, please [email me](/contact).
|
||||
51
content/en/languages.md
Normal file
51
content/en/languages.md
Normal file
@@ -0,0 +1,51 @@
|
||||
---
|
||||
title: Languages & Worldbuilding
|
||||
description: Constructed languages (conlangs) and worldbuilding projects.
|
||||
---
|
||||
|
||||
# Languages and Worldbuilding
|
||||
|
||||
_Conlangs_, short for _constructed languages_, are artificial
|
||||
languages created by individuals rather than evolving naturally over
|
||||
time. They serve various purposes: international auxiliary languages
|
||||
like Esperanto, philosophical experiments like Lojban, fictional
|
||||
world-building like Tolkien's Elvish languages or Klingon, or simply
|
||||
artistic expression.
|
||||
|
||||
I have been creating constructed languages and worlds, both for fun
|
||||
and for literary projects. My conlanging work is documented on my
|
||||
[dedicated conlanging website](https://conlang.phundrak.com/).
|
||||
|
||||
## Eittlandic
|
||||
|
||||
Eittlandic is the language of **Eittland**, a fictional Nordic
|
||||
country, born from the question: _what if there was another island
|
||||
like Iceland that never got Christianised?_
|
||||
|
||||
The language derives from Old Norse and has been evolved
|
||||
naturalistically towards specific artistic goals. In Eittlandic, the
|
||||
country's name is pronounced /ɑɪʔlɑ̃d/.
|
||||
|
||||
This project has been a personal artistic endeavour since 2018,
|
||||
focusing on worldbuilding and conlanging. More details are available
|
||||
on the [Eittland wiki](https://wiki.phundrak.com/s/eittland) and the
|
||||
[Eittlandic language
|
||||
documentation](https://conlang.phundrak.com/eittlandic).
|
||||
|
||||
## Proto-Ñyqy
|
||||
|
||||
Proto-Ñyqy is the proto-language and mother language of the Ñyqy
|
||||
language family tree. The documentation is written as an in-universe
|
||||
document would be, meaning all cultural and historical references are
|
||||
entirely fictional. The writing style draws inspiration from academic
|
||||
linguistic work, particularly Benjamin W. Fortson's _Indo-European
|
||||
Language and Culture_. The [Proto-Ñyqy
|
||||
documentation](https://conlang.phundrak.com/proto-nyqy) is publicly
|
||||
available online.
|
||||
|
||||
## Zikãti
|
||||
|
||||
Zikãti is another conlanging project currently in development. This
|
||||
one draws more from a conlanging experiment than a real worldbuilding
|
||||
project. [Its documentation](https://conlang.phundrak.com/zik%C3%A3ti)
|
||||
is also publicly available.
|
||||
125
content/en/resume.json
Normal file
125
content/en/resume.json
Normal file
@@ -0,0 +1,125 @@
|
||||
{
|
||||
"experience": [
|
||||
{
|
||||
"date": "Since Septembre 2023",
|
||||
"title": "Consultant – Aubay",
|
||||
"description": "Web development consultant working on enterprise applications. Continued focus on Angular front-end development and Java Spring Boot back-end services with PostgreSQL databases.",
|
||||
"tools": [
|
||||
{ "name": "Angular", "link": "https://angular.dev/" },
|
||||
{ "name": "TypeScript", "link": "https://www.typescriptlang.org/" },
|
||||
{ "name": "Java", "link": "https://www.java.com/" },
|
||||
{ "name": "Spring Boot", "link": "https://spring.io/projects/spring-boot" },
|
||||
{ "name": "Spring Batch", "link": "https://spring.io/projects/spring-batch" },
|
||||
{ "name": "PostgreSQL", "link": "https://www.postgresql.org/" },
|
||||
{ "name": "VS Code", "link": "https://code.visualstudio.com/" },
|
||||
{ "name": "Eclipse", "link": "https://www.eclipse.org/" },
|
||||
{ "name": "IntelliJ Idea", "link": "https://www.jetbrains.com/idea/" },
|
||||
{ "name": "Git", "link": "https://git-scm.com/" }
|
||||
],
|
||||
"icon": "mdi:laptop"
|
||||
},
|
||||
{
|
||||
"date": "February 2023 – August 2023",
|
||||
"title": "Intern – Aubay",
|
||||
"description": "Web application development internship focused on full-stack development. Worked on projects using Angular for front-end and Java Spring Boot for back-end, with PostgreSQL databases.",
|
||||
"tools": [
|
||||
{ "name": "Angular", "link": "https://angular.dev/" },
|
||||
{ "name": "TypeScript", "link": "https://www.typescriptlang.org/" },
|
||||
{ "name": "Java", "link": "https://www.java.com/" },
|
||||
{ "name": "Spring Boot", "link": "https://spring.io/projects/spring-boot" },
|
||||
{ "name": "PostgreSQL", "link": "https://www.postgresql.org/" },
|
||||
{ "name": "VS Code", "link": "https://code.visualstudio.com/" },
|
||||
{ "name": "Eclipse", "link": "https://www.eclipse.org/" },
|
||||
{ "name": "Git", "link": "https://git-scm.com/" }
|
||||
],
|
||||
"icon": "mdi:book"
|
||||
},
|
||||
{
|
||||
"date": "October 2014 – July 2018",
|
||||
"title": "CTO – Voxwave",
|
||||
"description": "Co-founded a startup specialized in creating French virtual singers using vocal synthesis. Developed singing synthesis vocal libraries, conducted linguistic research, provided user support, and trained recruits in vocal library development. Led technical development of ALYS, the first professional French singing voice library.",
|
||||
"tools": [
|
||||
{ "name": "Alter/Ego", "link": "https://www.plogue.com/products/alter-ego.html" },
|
||||
{ "name": "UTAU", "link": "http://utau2008.xrea.jp/" },
|
||||
{ "name": "FL Studio", "link": "https://www.image-line.com/" },
|
||||
{ "name": "iZotope RX", "link": "https://www.izotope.com/en/products/rx.html" },
|
||||
{ "name": "T-RackS CS", "link": "https://www.ikmultimedia.com/products/tr6/" }
|
||||
],
|
||||
"icon": "mdi:waveform"
|
||||
}
|
||||
],
|
||||
"education": [
|
||||
{
|
||||
"date": "September 2022 – September 2023",
|
||||
"title": "Master's Degree in Hypermedia Technologies – University of Paris 8",
|
||||
"description": "Obtained Master's degree in THYP (Hypermedia Technologies) on 11 September 2023. Repeated the year for health reasons without any lasting effects.",
|
||||
"icon": "mdi:network"
|
||||
},
|
||||
{
|
||||
"date": "September 2020 – September 2021",
|
||||
"title": "Master's Degree in Computer Science – University of Paris 8",
|
||||
"description": "First year of my Master’s degree.",
|
||||
"icon": "mdi:code-tags"
|
||||
},
|
||||
{
|
||||
"date": "September 2016 – July 2019",
|
||||
"title": "Bachelor's Degree in Computer Science – University of Paris 8",
|
||||
"description": "Bachelor's degree in Computer Science obtained in July 2019",
|
||||
"icon": "mdi:school-outline"
|
||||
},
|
||||
{
|
||||
"date": "Septembre 2013 – Décembre 2014",
|
||||
"title": "English Literature – Université Lyon 2",
|
||||
"description": "One and a half years of literary English studies in an LLCE English degree. Studies interrupted following the creation of VoxWave.",
|
||||
"icon": "mdi:book-open-page-variant"
|
||||
}
|
||||
],
|
||||
"otherTools": [
|
||||
{ "name": "Emacs", "link": "https://www.gnu.org/software/emacs/" },
|
||||
{ "name": "vim", "link": "https://www.vim.org/" },
|
||||
{ "name": "VS Code", "link": "https://code.visualstudio.com/" },
|
||||
{ "name": "Eclipse", "link": "https://www.eclipse.org/" },
|
||||
{ "name": "IntelliJ Idea", "link": "https://www.jetbrains.com/idea/" },
|
||||
{ "name": "jj", "link": "https://docs.jj-vcs.dev/latest/" },
|
||||
{ "name": "Git", "link": "https://git-scm.com/" },
|
||||
{ "name": "PostgreSQL", "link": "https://www.postgresql.org/" },
|
||||
{ "name": "SQLite", "link": "https://sqlite.org/index.html" }
|
||||
],
|
||||
"devops": [
|
||||
{ "name": "GitHub", "link": "https://github.com" },
|
||||
{ "name": "Gitlab", "link": "https://gitlab.com" },
|
||||
{ "name": "Gitea", "link": "https://about.gitea.com/" },
|
||||
{ "name": "GitHub Actions", "link": "https://docs.github.com/en/actions" },
|
||||
{ "name": "Drone.io", "link": "https://www.drone.io/" },
|
||||
{ "name": "Docker", "link": "https://www.docker.com/" },
|
||||
{ "name": "Podman", "link": "https://podman.io/" }
|
||||
],
|
||||
"os": [
|
||||
{ "name": "NixOS", "link": "https://nixos.org/" },
|
||||
{ "name": "Debian", "link": "https://www.debian.org/" },
|
||||
{ "name": "Arch Linux", "link": "https://archlinux.org/" },
|
||||
{ "name": "Void Linux", "link": "https://voidlinux.org/" },
|
||||
{ "name": "Alpine Linux", "link": "https://www.alpinelinux.org/" },
|
||||
{ "name": "Windows", "link": "https://support.microsoft.com/en-us/welcometowindows" }
|
||||
],
|
||||
"programmingLanguages": [
|
||||
{ "name": "TypeScript", "link": "https://www.typescriptlang.org/" },
|
||||
{ "name": "Java", "link": "https://www.java.com/" },
|
||||
{ "name": "Rust", "link": "https://rust-lang.org/" },
|
||||
{ "name": "C", "link": "https://www.c-language.org/" },
|
||||
{ "name": "EmacsLisp", "link": "https://www.gnu.org/software/emacs/manual/html_node/eintr/index.html" },
|
||||
{ "name": "Bash", "link": "https://www.gnu.org/software/bash/" },
|
||||
{ "name": "Zsh", "link": "https://www.zsh.org/" },
|
||||
{ "name": "C++", "link": "https://isocpp.org/" },
|
||||
{ "name": "Python", "link": "https://www.python.org/" },
|
||||
{ "name": "CommonLisp", "link": "https://lisp-lang.org/" }
|
||||
],
|
||||
"frameworks": [
|
||||
{ "name": "Angular", "link": "https://angular.dev/" },
|
||||
{ "name": "Vue", "link": "https://vuejs.org/" },
|
||||
{ "name": "Nuxt", "link": "https://nuxt.com/" },
|
||||
{ "name": "Spring Boot", "link": "https://spring.io/projects/spring-boot" },
|
||||
{ "name": "Poem (Rust)", "link": "https://github.com/poem-web/poem" },
|
||||
{ "name": "Loco.rs", "link": "https://loco.rs/" }
|
||||
]
|
||||
}
|
||||
@@ -31,5 +31,15 @@
|
||||
"link": "https://alys.phundrak.com/en/faq#are-there-any-plans-for-leora"
|
||||
}
|
||||
],
|
||||
"tools": ["Alter/Ego", "UTAU", "VOCALOID", "ChipSpeech", "FL Studio", "Audacity", "iZotope RX", "T-RackS CS", "C++"]
|
||||
"tools": [
|
||||
{ "name": "Alter/Ego", "link": "https://www.plogue.com/products/alter-ego.html" },
|
||||
{ "name": "UTAU", "link": "http://utau2008.xrea.jp/" },
|
||||
{ "name": "VOCALOID", "link": "https://www.vocaloid.com/en/" },
|
||||
{ "name": "ChipSpeech", "link": "https://plogue.com/products/chipspeech.html" },
|
||||
{ "name": "FL Studio", "link": "https://www.image-line.com/" },
|
||||
{ "name": "Audacity", "link": "https://www.audacityteam.org/" },
|
||||
{ "name": "iZotope RX", "link": "https://www.izotope.com/en/products/rx.html" },
|
||||
{ "name": "T-RackS CS", "link": "https://www.ikmultimedia.com/products/tr6/" },
|
||||
{ "name": "C++", "link": "https://isocpp.org/" }
|
||||
]
|
||||
}
|
||||
184
content/fr/keine-tashi.md
Normal file
184
content/fr/keine-tashi.md
Normal file
@@ -0,0 +1,184 @@
|
||||
---
|
||||
title: BSUP01 Keine Tashi
|
||||
---
|
||||
|
||||
# BSUP01 Keine Tashi
|
||||
|
||||
## Présentation
|
||||
|
||||
Keine Tashi est un personnage et le nom d’une collection de banques
|
||||
vocales développées pour le logiciel
|
||||
[UTAU](http://utau2008.web.fc2.com/), un logiciel de synthèse de voix
|
||||
pour le chant. J’ai développé Keine Tashi de 2012 à 2015 et publiai
|
||||
trois de ses banques vocales. Celle ayant rencontre le plus de succès
|
||||
fut sa banque vocale /JPN Extend Power/. Le 10 mars 2017, j’annonçai
|
||||
arrêter toutes activités liées à UTAU.
|
||||
|
||||
<blockquote class="twitter-tweet" data-dnt="true" data-theme="dark">
|
||||
<p lang="en" dir="ltr">
|
||||
I'd like to also announce that from now on I
|
||||
am dropping my previous UTAU projects other than covers and won't
|
||||
develop any new UTAU library
|
||||
</p>
|
||||
— P'undrak (@Phundrak) <a href="https://twitter.com/Phundrak/status/840174634377105408?ref_src=twsrc%5Etfw">March 10, 2017</a>
|
||||
</blockquote>
|
||||
<component is="script" async src="https://platform.twitter.com/widgets.js" charset="utf-8"></component>
|
||||
|
||||
## Personnage et banques vocales
|
||||
|
||||
Voici une traduction en français des informations ayant trait à Keine
|
||||
Tashi sur d’anciennes pages le présentant.
|
||||
|
||||
### Présentation
|
||||
|
||||
{class="small-img"}
|
||||
|
||||
- **Nom de code**: BSUP01 恵音བཀྲ་ཤིས་ Keine Tashi
|
||||
- **Prénom**: Tashi (བཀྲ་ཤིས་), prénom tibétain signifiant « auspicieux »
|
||||
- **Nom**: Keine (恵音), nom japonais signifiant « son bénissant ». Le
|
||||
nom se lit « keine » bien que sa lecture normale devrait être
|
||||
« megumine ».
|
||||
- **Modèle**: BSUP (Bödkay Shetang UTAU Project, /Projet UTAU de Chant
|
||||
Tibétain/)
|
||||
- **Numéro**: 01
|
||||
- **Sexe**: homme
|
||||
- **Anniversaire (personnage)**: 28 juin 1998
|
||||
- **Première publication**: 14 octobre 2012
|
||||
- **Poids**: 154 lb / 70 kg
|
||||
- **Taille**: 182 cm
|
||||
- **Couleur de cheveux**: noir
|
||||
- **Couleur des yeux**: entre le marron et le noir
|
||||
- **Apparance**: Tashi porte une version modernisée d’un habit tibétain
|
||||
traditionnel de la région de l’Amdo (Chinois : 安多 Ānduō) coloré en
|
||||
bleu. Il porte également quelques bijoux de turquoise.
|
||||
- **Nourriture préférée**: momo à la viande (raviolis tibétains)
|
||||
- **Objet signature**: un manuscrit tibétain
|
||||
- **Voix et créateur**: [Phundrak](https://phundrak.com) (moi)
|
||||
- **Aime**: méditer, la calligraphie, les vieux livres et manuscripts
|
||||
(en gros, moi à l’époque ou je créai ce personnage)
|
||||
- **N’aime pas**: l’égoïsme, les mensonges, l’arrogance
|
||||
- **Personnalité**: Tashi est quelqu’un de très calme et d’agréable. Il
|
||||
adore les vieux livres et manuscrits, mais ce qu’il aime par-dessus
|
||||
tout est la méditation. Il n’a jamais faim, ce qui fait qu’il peut
|
||||
rester pendant plusieurs jours à méditer si l’envie le prend,
|
||||
jusqu’au moment où il réalise qu’il a _besoin_ de manger. Il est très
|
||||
difficile de le mettre en colère.
|
||||
|
||||
Mais quand il le devient, sa colère devient explosive. Le calmer
|
||||
devient alors une tâche extrêmement complexe. Étrangement, les
|
||||
victimes de son courroux voient peu de temps après leur qualité de
|
||||
vie grandement s’améliorer. Peut-être ces personnes avaient besoin
|
||||
d’entendre des réalités auxquelles elles refusaient de faire face ?
|
||||
|
||||
### Banques vocales
|
||||
|
||||
#### JPN VCV
|
||||
|
||||
- **Lien de téléchargement**:
|
||||
| Extension | Taille | Lien |
|
||||
|-----------|----------|-----------------------------------------------------------------------------------|
|
||||
| 7z | 25.7 MiB | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_VCV.7z) |
|
||||
| tar.xz | 32.5 MiB | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_VCV.tar.xz) |
|
||||
| zip | 38.0 MiB | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_VCV.zip) |
|
||||
- **Taille décompressée**: 47.1Mio
|
||||
- **Nombre de phonèmes**: 1264 (253 fichiers audio)
|
||||
- **Note moyenne**: G#2
|
||||
- **Plage vocale**: C2~D3
|
||||
- **Présence de fichiers FRQ**: partiel
|
||||
- **Date de publication**: 14 octobre 2012
|
||||
- **Encodage des phonèmes**: Romaji avec des alias hiragana et un
|
||||
support CV en romaji
|
||||
- **Langues supportées**: Japonais
|
||||
- **Moteurs de synthèse recommandés**: TIPS, VS4U
|
||||
|
||||
#### JPN Extend Power
|
||||
|
||||
- **Lien de téléchargement**:
|
||||
| Extension | Taille | Lien |
|
||||
|-----------|--------|--------------------------------------------------------------------------------------------|
|
||||
| 7z | 1.1Gio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Power.7z) |
|
||||
| tar.xz | 1.1Gio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Power.tar.xz) |
|
||||
| zip | 1.2Gio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Power.zip) |
|
||||
- **Taille décompressée**: 1.3Gio
|
||||
- **Nombre de phonèmes**: 3020 (546 fichiers audio)
|
||||
- **Note moyenne**: C3
|
||||
- **Plage vocale**: B1~D4
|
||||
- **Présence de fichiers FRQ**: partiel
|
||||
- **Date de publication**: 28 juin 2013
|
||||
- **Encodage des phonèmes**: Romaji (alias hiragana)
|
||||
- **Langues supportées**: Japonais
|
||||
- **Moteurs de synthèse recommandés**: VS4U, world4utau
|
||||
|
||||
#### JPN Extend Youth
|
||||
|
||||
- **Lien de téléchargement**:
|
||||
| Extension | Taille | Lien |
|
||||
|-----------|----------|--------------------------------------------------------------------------------------------|
|
||||
| 7z | 237.7Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Youth.7z) |
|
||||
| tar.xz | 243.5Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Youth.tar.xz) |
|
||||
| zip | 268.7Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Youth.zip) |
|
||||
- **Taille décompressée**: 301.1Mio
|
||||
- **Nombre de phonèmes**: 1954 (182 fichiers audio)
|
||||
- **Note moyenne**: C4
|
||||
- **Plage vocale**: F#3~A#4
|
||||
- **Présence de fichiers FRQ**: partiel
|
||||
- **Date de publication**: 28 juin 2013
|
||||
- **Encodage des phonèmes**: Romaji (alias hiragana)
|
||||
- **Langues supportées**: Japonais
|
||||
- **Moteurs de synthèse recommandés**: fresamp, VS4U, world4utau
|
||||
|
||||
#### JPN Extend Native
|
||||
|
||||
- **Status**: abandonné
|
||||
|
||||
{class="small-img"}
|
||||
|
||||
#### TIB CVVC
|
||||
|
||||
- **Status**: abandonné
|
||||
|
||||
#### ENG
|
||||
|
||||
- **Status**: abandonné
|
||||
|
||||
# Licence d’utilisation
|
||||
|
||||
Keine Tashi est publié sous la licence [CC BY-SA-NC
|
||||
4.0](https://creativecommons.org/licenses/by-nc-sa/4.0/). Cela
|
||||
signifie que vous êtes libres :
|
||||
|
||||
- **d’utiliser**: utiliser les banques vocales dans UTAU ou tout autre
|
||||
logiciel ;
|
||||
- **de partager**: copier, distribuer et communiquer le matériel par
|
||||
tous moyens et sous tous formats ;
|
||||
- **d’adapter**: remixer, transformer et créer à partir du matériel ;
|
||||
|
||||
Selon les conditions suivantes :
|
||||
|
||||
- **Attribution**: Vous devez me créditer lors de l’utilisation de
|
||||
Tashi, intégrer un lien vers la licence et indiquer si des
|
||||
modifications ont été effectuées. Vous devez indiquer ces
|
||||
informations par tous les moyens raisonnables, sans toutefois
|
||||
suggérer que je vous soutienne ou que je soutienne la façon dont
|
||||
vous utilisez Tashi ;
|
||||
- **Pas d’Utilisation Commerciale**: Vous n’êtes pas autorisé à faire
|
||||
un usage commercial de Tashi, tout ou partie du matériel le
|
||||
composant ;
|
||||
- **Partage dans les Mêmes Conditions**: Dans le cas où vous effectuez
|
||||
un remix, que vous transformez ou créez à partir du matériel
|
||||
composant Tashi, vous devez le diffuser modifié dans les mêmes
|
||||
conditions, c'est-à-dire avec la même licence avec laquelle Tashi
|
||||
est diffusé ici.
|
||||
|
||||
Bien que je ne puisse pas ajouter d’éléments à cette licence légale,
|
||||
je souhaiterais ajouter une requête personnelle : merci de ne pas
|
||||
créer de chansons à caractère religieux, à l’exception des chansons
|
||||
tibétaines bouddhistes ou bön. Cependant, du fait de la controverse
|
||||
actuelle concernant l’identité de Sa Sainteté le Gyalwa Karmapa, toute
|
||||
chanson lié à sa personne est également interdite jusqu’à résolution
|
||||
officielle de la situation. Cette interdiction est également
|
||||
applicable à Sa Sainteté le Dalaï Lama, au Vénérable Shamar Rinpoché
|
||||
et Tai Situ Rinpoche. Si vous avez la moindre question, n’hésitez pas
|
||||
à m’[envoyer un email](/contact).
|
||||
|
||||
#+include: other-links
|
||||
58
content/fr/languages.md
Normal file
58
content/fr/languages.md
Normal file
@@ -0,0 +1,58 @@
|
||||
---
|
||||
titre: Langues et création d'univers
|
||||
description: Langues construites (conlangs) et projets de création d'univers.
|
||||
---
|
||||
|
||||
# Langues et création d'univers
|
||||
|
||||
Les _idéolangues_, ou _langues construites_, sont des langues
|
||||
artificielles créées par des individus plutôt que d'avoir évolué
|
||||
naturellement au fil du temps. Ils servent à diverses fins : langues
|
||||
auxiliaires internationales comme l'Espéranto, expériences
|
||||
philosophiques comme le Lojban, création de mondes fictifs comme les
|
||||
langues elfiques de Tolkien ou le Klingon, ou simplement expression
|
||||
artistique.
|
||||
|
||||
Je crée des langues et des mondes construits, à la fois pour le
|
||||
plaisir et pour des projets littéraires. Mon travail sur les langues
|
||||
construites est documenté sur mon [site web dédié à mes langues
|
||||
construites](https://conlang.phundrak.com/).
|
||||
|
||||
## L'Éittlandais
|
||||
|
||||
L'Éittlandais est la langue de l'**Éittlande**, un pays nordique
|
||||
fictif, né de la question suivante : _et s'il existait une autre île
|
||||
comme l'Islande qui n'ait jamais été christianisée ?_
|
||||
|
||||
La langue dérive du vieux norrois et a évolué de manière naturaliste
|
||||
vers des objectifs artistiques spécifiques. En eittlandais, le nom du
|
||||
pays se prononce /ɑɪʔlɑ̃d/.
|
||||
|
||||
Ce projet est un projet artistique personnel depuis 2018, axée sur la
|
||||
création d'univers et la construction de langues artificielles. Plus
|
||||
de détails sont disponibles sur le [wiki
|
||||
Eittland](https://wiki.phundrak.com/s/eittland) et dans la
|
||||
[documentation sur la langue
|
||||
eittlandaise](https://conlang.phundrak.com/eittlandic).
|
||||
|
||||
## Proto-Ñyqy
|
||||
|
||||
Le Proto-Ñyqy est la proto-langue et la langue mère de l'arbre
|
||||
généalogique de la famille linguistique Ñyqy. La documentation est
|
||||
rédigée comme le serait un document interne à l'univers, ce qui
|
||||
signifie que toutes les références culturelles et historiques sont
|
||||
entièrement fictives. Le style d'écriture s'inspire des travaux
|
||||
linguistiques universitaires, en particulier de l'ouvrage
|
||||
_Indo-European Language and Culture_ de Benjamin W. Fortson. La
|
||||
[documentation sur le
|
||||
Proto-Ñyqy](https://conlang.phundrak.com/proto-nyqy) est accessible en
|
||||
ligne.
|
||||
|
||||
## Zikãti
|
||||
|
||||
Le Zikãti est un autre projet de langue artificielle actuellement en
|
||||
cours de développement. Celui-ci s'inspire davantage d'une expérience
|
||||
de création de langue artificielle que d'un véritable projet de
|
||||
construction d'univers. [Sa
|
||||
documentation](https://conlang.phundrak.com/zik%C3%A3ti) est également
|
||||
accessible en ligne.
|
||||
124
content/fr/resume.json
Normal file
124
content/fr/resume.json
Normal file
@@ -0,0 +1,124 @@
|
||||
{
|
||||
"experience": [
|
||||
{
|
||||
"date": "Depuis septembre 2023",
|
||||
"title": "Consultant – Aubay",
|
||||
"description": "Consultant en développement web travaillant sur des applications d'entreprise. Je continue à me concentrer sur le développement front-end Angular et les services back-end Java Spring Boot avec des bases de données PostgreSQL.",
|
||||
"tools": [
|
||||
{ "name": "Angular", "link": "https://angular.dev/" },
|
||||
{ "name": "TypeScript", "link": "https://www.typescriptlang.org/" },
|
||||
{ "name": "Java", "link": "https://www.java.com/" },
|
||||
{ "name": "Spring Boot", "link": "https://spring.io/projects/spring-boot" },
|
||||
{ "name": "Spring Batch", "link": "https://spring.io/projects/spring-batch" },
|
||||
{ "name": "PostgreSQL", "link": "https://www.postgresql.org/" },
|
||||
{ "name": "VS Code", "link": "https://code.visualstudio.com/" },
|
||||
{ "name": "Eclipse", "link": "https://www.eclipse.org/" },
|
||||
{ "name": "IntelliJ Idea", "link": "https://www.jetbrains.com/idea/" },
|
||||
{ "name": "Git", "link": "https://git-scm.com/" }
|
||||
],
|
||||
"icon": "mdi:laptop"
|
||||
},
|
||||
{
|
||||
"date": "Février 2023 – Août 2023",
|
||||
"title": "Stagiaire – Aubay",
|
||||
"description": "Stage en développement d'applications web axé sur le développement full-stack. J'ai travaillé sur des projets utilisant Angular pour le front-end et Java Spring Boot pour le back-end, avec des bases de données PostgreSQL.",
|
||||
"tools": [
|
||||
{ "name": "Angular", "link": "https://angular.dev/" },
|
||||
{ "name": "TypeScript", "link": "https://www.typescriptlang.org/" },
|
||||
{ "name": "Java", "link": "https://www.java.com/" },
|
||||
{ "name": "Spring Boot", "link": "https://spring.io/projects/spring-boot" },
|
||||
{ "name": "PostgreSQL", "link": "https://www.postgresql.org/" },
|
||||
{ "name": "VS Code", "link": "https://code.visualstudio.com/" },
|
||||
{ "name": "Eclipse", "link": "https://www.eclipse.org/" },
|
||||
{ "name": "Git", "link": "https://git-scm.com/" }
|
||||
],
|
||||
"icon": "mdi:book"
|
||||
},
|
||||
{
|
||||
"date": "Octobre 2014 – Juillet 2018",
|
||||
"title": "Directeur technique – Voxwave",
|
||||
"description": "Co-fondateur d'une start-up spécialisée dans la création de chanteurs virtuels français à l'aide de la synthèse vocale. Développement de banques vocales de synthèse chantée, recherche linguistique, assistance aux utilisateurs et formation des recrues au développement de banques vocales. Direction du développement technique d'ALYS, la première banques vocale professionnelle de chant en français.",
|
||||
"tools": [
|
||||
{ "name": "Alter/Ego", "link": "https://www.plogue.com/products/alter-ego.html" },
|
||||
{ "name": "UTAU", "link": "http://utau2008.xrea.jp/" },
|
||||
{ "name": "FL Studio", "link": "https://www.image-line.com/" },
|
||||
{ "name": "iZotope RX", "link": "https://www.izotope.com/en/products/rx.html" },
|
||||
{ "name": "T-RackS CS", "link": "https://www.ikmultimedia.com/products/tr6/" }
|
||||
],
|
||||
"icon": "mdi:waveform"
|
||||
}
|
||||
],
|
||||
"education": [
|
||||
{
|
||||
"date": "Septembre 2022 – Septembre 2023",
|
||||
"title": "Master 2 Technologies de l’Hypermédia – Université Paris 8",
|
||||
"description": "Obtention du diplôme Master 2 THYP le 11 septembre 2023. Redoublement pour causes de santé sans séquelles.",
|
||||
"icon": "mdi:network"
|
||||
},
|
||||
{
|
||||
"date": "Septembre 2020 – Septembre 2021",
|
||||
"title": "Master 1 Informatique – Université Paris 8",
|
||||
"description": "",
|
||||
"icon": "mdi:code-tags"
|
||||
},
|
||||
{
|
||||
"date": "Septembre 2016 – Juillet 2019",
|
||||
"title": "Licence Informatique – Université Paris 8",
|
||||
"description": "Licence d’Informatique obtenue en Juillet 2019",
|
||||
"icon": "mdi:school-outline"
|
||||
},
|
||||
{
|
||||
"date": "Septembre 2013 – Décembre 2014",
|
||||
"title": "Anglais LLCE – Université Lyon 2",
|
||||
"description": "Un an et demi d’études d’anglais littéraire en licence d’anglais LLCE. Études interrompues suite à la création de VoxWave.",
|
||||
"icon": "mdi:book-open-page-variant"
|
||||
}
|
||||
],
|
||||
"otherTools": [
|
||||
{ "name": "Emacs", "link": "https://www.gnu.org/software/emacs/" },
|
||||
{ "name": "vim", "link": "https://www.vim.org/" },
|
||||
{ "name": "VS Code", "link": "https://code.visualstudio.com/" },
|
||||
{ "name": "Eclipse", "link": "https://www.eclipse.org/" },
|
||||
{ "name": "IntelliJ Idea", "link": "https://www.jetbrains.com/idea/" },
|
||||
{ "name": "jj", "link": "https://docs.jj-vcs.dev/latest/" },
|
||||
{ "name": "Git", "link": "https://git-scm.com/" },
|
||||
{ "name": "PostgreSQL", "link": "https://www.postgresql.org/" },
|
||||
{ "name": "SQLite", "link": "https://sqlite.org/index.html" }
|
||||
],
|
||||
"devops": [
|
||||
{ "name": "GitHub", "link": "https://github.com" },
|
||||
{ "name": "Gitlab", "link": "https://gitlab.com" },
|
||||
{ "name": "Gitea", "link": "https://about.gitea.com/" },
|
||||
{ "name": "GitHub Actions", "link": "https://docs.github.com/en/actions" },
|
||||
{ "name": "Drone.io", "link": "https://www.drone.io/" },
|
||||
{ "name": "Docker", "link": "https://www.docker.com/" },
|
||||
{ "name": "Podman", "link": "https://podman.io/" }
|
||||
],
|
||||
"os": [
|
||||
{ "name": "NixOS", "link": "https://nixos.org/" },
|
||||
{ "name": "Debian", "link": "https://www.debian.org/" },
|
||||
{ "name": "Arch Linux", "link": "https://archlinux.org/" },
|
||||
{ "name": "Void Linux", "link": "https://voidlinux.org/" },
|
||||
{ "name": "Alpine Linux", "link": "https://www.alpinelinux.org/" },
|
||||
{ "name": "Windows", "link": "https://support.microsoft.com/en-us/welcometowindows" }
|
||||
],
|
||||
"programmingLanguages": [
|
||||
{ "name": "TypeScript", "link": "https://www.typescriptlang.org/" },
|
||||
{ "name": "Rust", "link": "https://rust-lang.org/" },
|
||||
{ "name": "C", "link": "https://www.c-language.org/" },
|
||||
{ "name": "EmacsLisp", "link": "https://www.gnu.org/software/emacs/manual/html_node/eintr/index.html" },
|
||||
{ "name": "Bash", "link": "https://www.gnu.org/software/bash/" },
|
||||
{ "name": "Zsh", "link": "https://www.zsh.org/" },
|
||||
{ "name": "C++", "link": "https://isocpp.org/" },
|
||||
{ "name": "Python", "link": "https://www.python.org/" },
|
||||
{ "name": "CommonLisp", "link": "https://lisp-lang.org/" }
|
||||
],
|
||||
"frameworks": [
|
||||
{ "name": "Angular", "link": "https://angular.dev/" },
|
||||
{ "name": "Vue", "link": "https://vuejs.org/" },
|
||||
{ "name": "Nuxt", "link": "https://nuxt.com/" },
|
||||
{ "name": "Spring Boot", "link": "https://spring.io/projects/spring-boot" },
|
||||
{ "name": "Poem (Rust)", "link": "https://github.com/poem-web/poem" },
|
||||
{ "name": "Loco.rs", "link": "https://loco.rs/" }
|
||||
]
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user