Compare commits

..

73 Commits

Author SHA1 Message Date
9f1d4db0de feat: fill pages
All checks were successful
Publish Docker Images / build-and-publish (push) Successful in 9m29s
2025-11-13 22:13:40 +01:00
3f828a754b feat(frontend): main page 2025-11-11 19:12:03 +01:00
89c7588883 feat(backend): build backend with Nix and add CI 2025-11-06 09:27:39 +01:00
e2b975fa12 feat(backend): redact email password in logs 2025-11-06 09:27:39 +01:00
def25632d1 feat(backend): add rate limiting to the backend’s API 2025-11-06 09:27:39 +01:00
d0642d031b feat(backend): relay contact requests to SMTP server 2025-11-06 09:27:39 +01:00
007c3d1c18 feat: initialization migration to Nuxt + Backend
This commit initializes both the Nuxt frontend and the Rust backend of
the new version of phundrak.com
2025-11-06 09:27:39 +01:00
cc62d0bb95 chore: deploy to Cloudflare Pages 2025-02-09 12:11:44 +01:00
727ec58600 chore: update vuepress and plugins 2025-02-09 11:57:54 +01:00
333b7a7562 docs: update mastodon link
All checks were successful
deploy / deploy (push) Successful in 3m28s
2024-11-17 14:29:45 +01:00
baf999ea1c docs: update resume
All checks were successful
deploy / deploy (push) Successful in 5m23s
2024-10-01 23:20:02 +02:00
9a92f57986 chore: update metadata 2024-07-09 20:45:36 +02:00
c8ce7ca6da chore(package.json): remove unused packages
All checks were successful
deploy / deploy (push) Successful in 1m30s
2024-06-20 09:33:58 +02:00
d54aabd621 refactor: rework API loader and caching
All checks were successful
deploy / deploy (push) Successful in 2m24s
This commit removes dependency on rxjs.

It also implements better composables to handle data fetching from
remote APIs and caching these values more transparently.

This commit also switches from yarn to npm

It also switches to the official Umami plugin
2024-06-20 09:27:59 +02:00
24d558e0f5 test
All checks were successful
deploy / deploy (push) Successful in 2m51s
2024-02-26 07:38:38 +01:00
bc36bdec90 feat(umami): switch to dedicated Vuepress plugin
All checks were successful
deploy / deploy (push) Successful in 3m24s
2024-02-26 06:51:54 +01:00
1b54860f93 docs(find-me): Update Discord handle
Update content/en/find-me.org
Update content/find-me.org
Update content/lfn/find-me.org
2024-02-26 06:51:27 +01:00
37f9b36b2f feat,docs: add Umami to website, update privacy pages
All checks were successful
deploy / deploy (push) Successful in 2m1s
2024-01-27 18:25:44 +01:00
4b447369c2 chore: switch from Drone to Gitea Actions
All checks were successful
deploy / deploy (push) Successful in 2m8s
2024-01-27 16:55:30 +01:00
cf1147204c chore: update Vuepress, add search bar 2024-01-27 16:15:57 +01:00
1ff33bfd64 chore: update dependencies
All checks were successful
continuous-integration/drone/push Build is passing
2023-11-03 19:48:47 +01:00
2496bff82f fix: only subscribe once to observables 2023-11-03 19:48:47 +01:00
6a37029e55 docs: update index and privacy 2023-11-03 19:48:47 +01:00
2f11f5a477 docs: update index in English and French 2023-11-03 19:48:46 +01:00
b84d320908 docs: update content/lfn/index.org 2023-11-03 19:48:40 +01:00
48ec3f49a0 docs(README): update Emacs version on badge 2023-11-03 19:48:40 +01:00
0fa7aa7a97 docs(resume): update resume 2023-11-03 19:48:40 +01:00
236e28c14e docs(vocal synth): update ALYS links 2023-11-03 19:48:39 +01:00
ec625ce8a5 Revert "chore: update mastodon account"
This reverts commit 5d88c1855a.
2023-11-03 19:48:39 +01:00
4ae4912268 chore: update mastodon account 2023-11-03 19:48:39 +01:00
7d09cfa8ef docs: update webfinger 2023-11-03 19:48:39 +01:00
fc863eab30 docs: fixed incorrect link to image 2023-11-03 19:48:39 +01:00
1a5565a953 docs: update readme 2023-11-03 19:48:39 +01:00
c1f6a4b110 chore: add two static files
The webfinger is my general Mastodon alias.

I use sometimes the CSS files in my emails to stylize them a bit.
2023-11-03 19:48:39 +01:00
4f0aee4c62 feat: implement Gemini export and deployment 2023-11-03 19:48:39 +01:00
23bbcfabe3 docs: update pinned GitHub repositories 2023-11-03 19:48:39 +01:00
c19d847686 chore: add gitattributes file for orgmode detection in languages of project 2023-11-03 19:48:39 +01:00
e9d890f4dc docs: update README instructions 2023-11-03 19:48:39 +01:00
6eb11a8e09 refactor: simplify code head generation 2023-11-03 19:48:39 +01:00
c0b7ed4984 chore: update CI 2023-11-03 19:48:34 +01:00
cc0a479550 docs: switch from Markdown to orgmode
BREAKING CHANGE: Emacs required to export orgmode files to Markdown
2023-05-11 00:19:47 +02:00
e4014d78d4 docs: fix English spelling
All checks were successful
continuous-integration/drone/push Build is passing
2023-05-08 18:30:12 +02:00
89d2e1b9b3 feat: display API errors
All checks were successful
continuous-integration/drone/push Build is passing
2023-05-08 18:04:30 +02:00
c026ed4c6f docs: update projects page
All checks were successful
continuous-integration/drone/push Build is passing
Remove linguistics projects since it would be a duplicate of the already existing conlanging page
2023-05-08 17:39:44 +02:00
e1180f6227 docs: update find me page 2023-05-08 17:37:29 +02:00
9ee9b2e4c2 docs: update about page 2023-05-08 17:37:29 +02:00
40b88ee4fd feat: change order of pages, set about page to second to last 2023-05-08 17:37:29 +02:00
8b5ce594c5 chore: update fonts 2023-05-08 17:37:20 +02:00
937e618a7c docs: update resume
All checks were successful
continuous-integration/drone/push Build is passing
2023-05-08 16:35:33 +02:00
1885072da2 feat: better repository card
All checks were successful
continuous-integration/drone/push Build is passing
2023-05-08 15:57:45 +02:00
1e738b51b7 docs: better Elefen translation 2023-05-08 15:57:14 +02:00
893544a53c docs: add credits for icons
All checks were successful
continuous-integration/drone/push Build is passing
2023-05-08 15:34:20 +02:00
5fbab2eefc feat: add custom icons to the website
This commit adds icons from various sources as a single font file to the website. Icons can be
inserted with the new Icon component.
2023-05-08 15:33:22 +02:00
9641f40f65 feat: fix missing privacy in sidebar
All checks were successful
continuous-integration/drone/push Build is passing
This commit not only fixes the missing privacy page in the sidebar when the website is in English or
Elefen, but it also simplifies listing all pages available on the website regardless of the
language.
2023-05-08 14:44:10 +02:00
158b61e57c docs: fix headings in project pages
All checks were successful
continuous-integration/drone/push Build is passing
2023-05-08 13:22:18 +02:00
a025f39d5a feat: add link on repository cards to repos
All checks were successful
continuous-integration/drone/push Build is passing
2023-05-08 13:16:58 +02:00
af8ae8aea2 docs: fix typos 2023-05-08 13:16:29 +02:00
a92580cdd3 docs: separate about and privacy pages 2023-05-08 13:16:10 +02:00
09971c0f24 docs: update projets pages 2023-05-08 12:48:56 +02:00
1e3e15ab4e feat: add possibility to list specific repositories
ListRepositories will now fetch repos with FetchRepositories only if no repositories are already
passed to the component in its default slot.
2023-05-08 12:48:56 +02:00
1678100198 feat: improve caching of individual repositories
This commit adds the possibility to provide to the caching component data already known and to be
cached immediately without the need of a callback function. This allows caching individual
repositories without having to rely on additional API calls. However, repos can also be retrieved
individually from the GitHub API based on their full name.
2023-05-08 11:19:41 +02:00
e0bcdb6dd3 refactor(cache): remove unused elements in Cache's script 2023-05-08 10:37:39 +02:00
85da82cd70 feat(cache): improve error message in console
No need to try and give the data found in localStorage, just say what went wrong and move on.
2023-05-08 10:36:19 +02:00
e02a336353 docs: update projects page to use new components
All checks were successful
continuous-integration/drone/push Build is passing
2023-05-08 03:43:36 +02:00
4927ef369a fix: remove debug console.log
All checks were successful
continuous-integration/drone/push Build is passing
2023-05-08 03:40:59 +02:00
620296d22f fix: also apply margin to loader
All checks were successful
continuous-integration/drone/push Build is passing
2023-05-08 03:38:17 +02:00
fea30d5bea feat: nicer styling of repositories and repositories lists
All checks were successful
continuous-integration/drone/push Build is passing
2023-05-08 03:35:28 +02:00
de2f11f8fe feat: add loader spinner to API loader 2023-05-08 03:34:48 +02:00
8cbd1dbf07 feat: remove unused PreviewImage component
All checks were successful
continuous-integration/drone/push Build is passing
This commit removes the PreviewImage component that hasn't been used for quite some time in the
codebase.

BREAKING CHANGE: deletion of PreviewImage component
2023-05-08 03:02:37 +02:00
0a1e9536cd feat: handle API calls and caching through Vue components
BREAKING CHANGE: API calls and cache no longer made in their respective composable
2023-05-08 03:01:17 +02:00
08825d870b feat: properly display GitHub repositories 2023-05-05 00:42:11 +02:00
aa82e265c8 fix: correctly update displayed repos after fetch
All checks were successful
continuous-integration/drone/push Build is passing
2023-05-05 00:38:37 +02:00
28223c44d4 fix: fix website compilation
All checks were successful
continuous-integration/drone/push Build is passing
2023-05-05 00:26:34 +02:00
156 changed files with 21398 additions and 4995 deletions

1
.devenv-root Normal file
View File

@@ -0,0 +1 @@
/home/phundrak/code/web/phundrak.com

View File

@@ -1,121 +0,0 @@
---
kind: pipeline
type: docker
name: CD
steps:
- name: restore cache
image: drillster/drone-volume-cache
volumes:
- name: cache
path: /cache
settings:
restore: true
mount:
- ./node_modules
- name: generate
image: node:19-alpine
commands:
- yarn install
- yarn build
depends_on:
- "restore cache"
- name: rebuild cache
image: drillster/drone-volume-cache
volumes:
- name: cache
path: /cache
settings:
rebuild: true
mount:
- ./node_modules
depends_on:
- generate
- name: deploy stable
image: appleboy/drone-scp
settings:
host:
from_secret: ssh_host
target:
from_secret: ssh_target
source: content/.vuepress/dist/*
strip_components: 3
username:
from_secret: ssh_username
password:
from_secret: ssh_password
port:
from_secret: ssh_port
depends_on:
- generate
when:
branch:
- main
event:
exclude:
- pull_request
- name: purge cache stable
image: jetrails/drone-cloudflare-caching
settings:
api_token:
from_secret: cloudflare_cache_api
zone_identifier:
from_secret: phundrak_com_zone_id
action: purge_files
list:
- https://beta.phundrak.com
depends_on:
- "deploy stable"
when:
branch:
- main
event:
exclude:
- pull_request
- name: deploy devel
image: appleboy/drone-scp
settings:
host:
from_secret: ssh_host
target:
from_secret: ssh_target_devel
source: content/.vuepress/dist/*
strip_components: 3
username:
from_secret: ssh_username
password:
from_secret: ssh_password
port:
from_secret: ssh_port
depends_on:
- generate
when:
branch:
- devel
event:
exclude:
- pull_request
- name: purge cache devel
image: jetrails/drone-cloudflare-caching
settings:
api_token:
from_secret: cloudflare_cache_api
zone_identifier:
from_secret: phundrak_com_zone_id
action: purge_files
list:
- https://alpha.phundrak.com
depends_on:
- "deploy devel"
when:
branch:
- devel
event:
exclude:
- pull_request

View File

@@ -7,6 +7,10 @@ insert_final_newline = true
charset = utf-8 charset = utf-8
trim_trailing_whitespace = true trim_trailing_whitespace = true
[*.{rs, toml}]
indent_style = space
indent_size = 4
[*.{json,ts,css}] [*.{json,ts,css}]
indent_style = space indent_style = space
indent_size = 2 indent_size = 2

12
.env.example Normal file
View File

@@ -0,0 +1,12 @@
APP_ENVIRONMENT=dev
APP__EMAIL__HOST=mail.example.com
APP__EMAIL__PORT=465
APP__EMAIL__TLS=true
APP__EMAIL__STARTTLS=no
APP__EMAIL__USER="username"
APP__EMAIL__PASSWORD="changeme"
APP__EMAIL__RECIPIENT="Recipient <user@example.com>"
APP__EMAIL__FROM="Contact Form <noreply@example.com>"
NUXT_PUBLIC_BACKEND_URL=http://localhost:3100
NUXT_PUBLIC_TURNSTILE_SITE_KEY="changeme"
NUXT_TURNSTILE_SECRET_KEY="changeme"

54
.envrc Normal file
View File

@@ -0,0 +1,54 @@
#!/usr/bin/env bash
if ! has nix_direnv_version || ! nix_direnv_version 3.1.0; then
source_url "https://raw.githubusercontent.com/nix-community/nix-direnv/3.1.0/direnvrc" "sha256-yMJ2OVMzrFaDPn7q8nCBZFRYpL/f0RcHzhmw/i6btJM="
fi
export DEVENV_IN_DIRENV_SHELL=true
# Load .env file if present
dotenv_if_exists
watch_file flake.nix
watch_file flake.lock
watch_file .envrc.local
watch_file backend/shell.nix
watch_file frontend/shell.nix
# Check if .envrc.local exists and contains a shell preference
if [[ -f .envrc.local ]]; then
source .envrc.local
fi
# If no shell is specified, prompt the user interactively
if [[ -z "$NIX_SHELL_NAME" ]]; then
echo ""
echo "🔧 Available development shells:"
echo " 1) frontend - Nuxt.js/Vue development environment"
echo " 2) backend - Rust backend development environment"
echo ""
echo "💡 Tip: Create a .envrc.local file with 'export NIX_SHELL_NAME=frontend' to skip this prompt"
echo ""
# Read user input
read -p "Select shell (1 or 2): " choice
case $choice in
1|frontend)
NIX_SHELL_NAME=frontend
;;
2|backend)
NIX_SHELL_NAME=backend
;;
*)
echo "❌ Invalid choice. Please select 1 or 2."
return 1
;;
esac
echo "✅ Loading ${NIX_SHELL_NAME} environment..."
fi
if ! use flake ".#${NIX_SHELL_NAME}" --no-pure-eval; then
echo "❌ devenv could not be built. The devenv environment was not loaded. Make the necessary changes to flake.nix and hit enter to try again." >&2
fi

1
.gitattributes vendored Normal file
View File

@@ -0,0 +1 @@
*.org linguist-detectable=true

217
.github/workflows/README.md vendored Normal file
View File

@@ -0,0 +1,217 @@
# GitHub Actions Workflows
## Docker Image Publishing
The `publish-docker.yml` workflow automatically builds and publishes Docker images for the backend service using Nix.
### Triggers and Tagging Strategy
| Event | Condition | Published Tags | Example |
|--------------+-----------------------------+------------------------+-------------------|
| Tag push | Tag pushed to `main` branch | `latest` + version tag | `latest`, `1.0.0` |
| Branch push | Push to `develop` branch | `develop` | `develop` |
| Pull request | PR opened or updated | `pr<number>` | `pr12` |
| Branch push | Push to `main` (no tag) | `latest` | `latest` |
### Required Secrets
Configure these secrets in your repository settings (`Settings``Secrets and variables``Actions`):
| Secret Name | Description | Example Value |
|---------------------+---------------------------------------------+-----------------------------------------|
| `DOCKER_USERNAME` | Username for Docker registry authentication | `phundrak` |
| `DOCKER_PASSWORD` | Password or token for Docker registry | Personal Access Token (PAT) or password |
| `CACHIX_AUTH_TOKEN` | (Optional) Token for Cachix caching | Your Cachix auth token |
#### For GitHub Container Registry (ghcr.io)
1. Create a Personal Access Token (PAT):
- Go to GitHub Settings → Developer settings → Personal access tokens → Tokens (classic)
- Click "Generate new token (classic)"
- Select scopes: `write:packages`, `read:packages`, `delete:packages`
- Copy the generated token
2. Add secrets:
- `DOCKER_USERNAME`: Your GitHub username
- `DOCKER_PASSWORD`: The PAT you just created
#### For Docker Hub
1. Create an access token:
- Go to Docker Hub → Account Settings → Security → Access Tokens
- Click "New Access Token"
- Set permissions to "Read, Write, Delete"
- Copy the generated token
2. Add secrets:
- `DOCKER_USERNAME`: Your Docker Hub username
- `DOCKER_PASSWORD`: The access token you just created
#### For Gitea Registry (e.g., labs.phundrak.com)
1. Create an access token in Gitea:
- Log in to your Gitea instance
- Go to Settings (click your avatar → Settings)
- Navigate to Applications → Manage Access Tokens
- Click "Generate New Token"
- Give it a descriptive name (e.g., "Phundrak Labs Docker Registry")
- Select the required permissions:
- `write:package` - Required to publish packages
- `read:package` - Required to pull packages
- Click "Generate Token"
- Copy the generated token immediately (it won't be shown again)
2. Add secrets:
- `DOCKER_USERNAME`: Your Gitea username
- `DOCKER_PASSWORD`: The access token you just created
Note: Gitea's container registry is accessed at `https://your-gitea-instance/username/-/packages`
#### For Other Custom Registries
1. Obtain credentials from your registry administrator
2. Add secrets:
- `DOCKER_USERNAME`: Your registry username
- `DOCKER_PASSWORD`: Your registry password or token
### Configuring Cachix (Build Caching)
Cachix is a Nix binary cache that dramatically speeds up builds by caching build artifacts. The workflow supports configurable Cachix settings.
#### Environment Variables
Configure these in the workflow's `env` section or as repository variables:
| Variable | Description | Default Value | Example |
|--------------------+------------------------------------------------+---------------+--------------------|
| `CACHIX_NAME` | Name of the Cachix cache to use | `devenv` | `phundrak-dot-com` |
| `CACHIX_SKIP_PUSH` | Whether to skip pushing artifacts to the cache | `true` | `false` |
#### Option 1: Pull from Public Cache Only
If you only want to pull from a public cache (no pushing):
1. Set environment variables in the workflow:
```yaml
env:
CACHIX_NAME: devenv # or any public cache name
CACHIX_SKIP_PUSH: true
```
2. No `CACHIX_AUTH_TOKEN` secret is needed
This is useful when using public caches like `devenv` or `nix-community`.
#### Option 2: Use Your Own Cache (Recommended for Faster Builds)
To cache your own build artifacts for faster subsequent builds:
1. Create a Cachix cache:
- Go to https://app.cachix.org
- Sign up and create a new cache (e.g., `your-project-name`)
- Free for public/open-source projects
2. Get your auth token:
- In Cachix, go to your cache settings
- Find your auth token under "Auth tokens"
- Copy the token
3. Add your cache configuration to `flake.nix`:
```nix
nixConfig = {
extra-trusted-public-keys = [
"devenv.cachix.org-1:w1cLUi8dv3hnoSPGAuibQv+f9TZLr6cv/Hm9XgU50cw="
"your-cache-name.cachix.org-1:YOUR_PUBLIC_KEY_HERE"
];
extra-substituters = [
"https://devenv.cachix.org"
"https://your-cache-name.cachix.org"
];
};
```
4. Configure the workflow:
- Edit `.github/workflows/publish-docker.yml`:
```yaml
env:
CACHIX_NAME: your-cache-name
CACHIX_SKIP_PUSH: false
```
- Or set as repository variables in GitHub/Gitea
5. Add your auth token as a secret:
- Go to repository `Settings` → `Secrets and variables` → `Actions`
- Add secret `CACHIX_AUTH_TOKEN` with your token
#### Benefits of Using Your Own Cache
- **Faster builds**: Subsequent builds reuse cached artifacts (Rust dependencies, compiled binaries)
- **Reduced CI time**: Can reduce build time from 10+ minutes to under 1 minute
- **Cost savings**: Less compute time means lower CI costs
- **Shared across branches**: All branches benefit from the same cache
### Configuring the Docker Registry
The target registry is set via the `DOCKER_REGISTRY` environment variable in the workflow file. To change it:
1. Edit `.github/workflows/publish-docker.yml`
2. Modify the `env` section:
```yaml
env:
DOCKER_REGISTRY: ghcr.io # Change to your registry (e.g., docker.io, labs.phundrak.com)
IMAGE_NAME: phundrak/phundrak-dot-com-backend
```
Or set it as a repository variable:
- Go to `Settings` → `Secrets and variables` → `Actions` → `Variables` tab
- Add `DOCKER_REGISTRY` with your desired registry URL
### Image Naming
Images are published with the name: `${DOCKER_REGISTRY}/${IMAGE_NAME}:${TAG}`
For example:
- `labs.phundrak.com/phundrak/phundrak-dot-com-backend:latest`
- `labs.phundrak.com/phundrak/phundrak-dot-com-backend:1.0.0`
- `labs.phundrak.com/phundrak/phundrak-dot-com-backend:develop`
- `labs.phundrak.com/phundrak/phundrak-dot-com-backend:pr12`
### Local Testing
To test the Docker image build locally:
```bash
# Build the image with Nix
nix build .#backendDockerLatest
# Load it into Docker
docker load < result
# Run the container (image name comes from Cargo.toml package.name)
docker run -p 3100:3100 phundrak/phundrak-dot-com-backend:latest
```
### Troubleshooting
#### Authentication Failures
If you see authentication errors:
1. Verify your `DOCKER_USERNAME` and `DOCKER_PASSWORD` secrets are correct
2. For ghcr.io, ensure your PAT has the correct permissions
3. Check that the `DOCKER_REGISTRY` matches your credentials
#### Build Failures
If the Nix build fails:
1. Test the build locally first: `nix build .#backendDockerLatest`
2. Check the GitHub Actions logs for specific error messages
3. Ensure all dependencies in `flake.nix` are correctly specified
#### Image Not Appearing in Registry
1. Verify the workflow completed successfully in the Actions tab
2. Check that the registry URL is correct
3. For ghcr.io, images appear at: `https://github.com/users/USERNAME/packages/container/IMAGE_NAME`
4. Ensure your token has write permissions

123
.github/workflows/publish-docker.yml vendored Normal file
View File

@@ -0,0 +1,123 @@
name: Publish Docker Images
on:
push:
branches:
- main
- develop
tags:
- 'v*.*.*'
pull_request:
types: [opened, synchronize, reopened]
env:
CACHIX_NAME: devenv
CACHIX_SKIP_PUSH: true
DOCKER_REGISTRY: labs.phundrak.com # Override in repository settings if needed
IMAGE_NAME: phundrak/phundrak-dot-com-backend
jobs:
build-and-publish:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write # Required for pushing to Phundrak Labs registry
pull-requests: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install Nix
uses: cachix/install-nix-action@v27
with:
nix_path: nixpkgs=channel:nixos-unstable
- name: Setup Cachix
uses: cachix/cachix-action@v15
with:
name: '${{ env.CACHIX_NAME }}'
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
skipPush: ${{ env.CACHIX_SKIP_PUSH }}
- name: Build Docker image with Nix
run: |
echo "Building Docker image..."
nix build .#backendDockerLatest --accept-flake-config
- name: Load Docker image
run: |
echo "Loading Docker image into Docker daemon..."
docker load < result
- name: Log in to Docker Registry
run: |
echo "${{ secrets.DOCKER_PASSWORD }}" | docker login ${{ env.DOCKER_REGISTRY }} -u ${{ secrets.DOCKER_USERNAME }} --password-stdin
- name: Determine tags and push images
run: |
set -euo pipefail
REGISTRY="${{ env.DOCKER_REGISTRY }}"
IMAGE_NAME="${{ env.IMAGE_NAME }}"
# The locally built image from Nix (name comes from Cargo.toml package.name)
LOCAL_IMAGE="phundrak/phundrak-dot-com-backend:latest"
echo "Event: ${{ github.event_name }}"
echo "Ref: ${{ github.ref }}"
echo "Ref type: ${{ github.ref_type }}"
# Determine which tags to push based on the event
if [[ "${{ github.event_name }}" == "push" && "${{ github.ref_type }}" == "tag" ]]; then
# Tag push on main branch → publish 'latest' and versioned tag
echo "Tag push detected"
TAG_VERSION="${{ github.ref_name }}"
# Remove 'v' prefix if present (v1.0.0 → 1.0.0)
TAG_VERSION="${TAG_VERSION#v}"
echo "Tagging and pushing: ${REGISTRY}/${IMAGE_NAME}:latest"
docker tag "${LOCAL_IMAGE}" "${REGISTRY}/${IMAGE_NAME}:latest"
docker push "${REGISTRY}/${IMAGE_NAME}:latest"
echo "Tagging and pushing: ${REGISTRY}/${IMAGE_NAME}:${TAG_VERSION}"
docker tag "${LOCAL_IMAGE}" "${REGISTRY}/${IMAGE_NAME}:${TAG_VERSION}"
docker push "${REGISTRY}/${IMAGE_NAME}:${TAG_VERSION}"
elif [[ "${{ github.event_name }}" == "push" && "${{ github.ref }}" == "refs/heads/develop" ]]; then
# Push on develop branch → publish 'develop' tag
echo "Push to develop branch detected"
echo "Tagging and pushing: ${REGISTRY}/${IMAGE_NAME}:develop"
docker tag "${LOCAL_IMAGE}" "${REGISTRY}/${IMAGE_NAME}:develop"
docker push "${REGISTRY}/${IMAGE_NAME}:develop"
elif [[ "${{ github.event_name }}" == "pull_request" ]]; then
# Pull request → publish 'pr<number>' tag
echo "Pull request detected"
PR_NUMBER="${{ github.event.pull_request.number }}"
echo "Tagging and pushing: ${REGISTRY}/${IMAGE_NAME}:pr${PR_NUMBER}"
docker tag "${LOCAL_IMAGE}" "${REGISTRY}/${IMAGE_NAME}:pr${PR_NUMBER}"
docker push "${REGISTRY}/${IMAGE_NAME}:pr${PR_NUMBER}"
elif [[ "${{ github.event_name }}" == "push" && "${{ github.ref }}" == "refs/heads/main" ]]; then
# Push to main branch (not a tag) → publish 'latest'
echo "Push to main branch detected"
echo "Tagging and pushing: ${REGISTRY}/${IMAGE_NAME}:latest"
docker tag "${LOCAL_IMAGE}" "${REGISTRY}/${IMAGE_NAME}:latest"
docker push "${REGISTRY}/${IMAGE_NAME}:latest"
else
echo "Unknown event or ref, skipping push"
exit 1
fi
- name: Log out from Docker Registry
if: always()
run: docker logout ${{ env.DOCKER_REGISTRY }}
- name: Image published successfully
run: |
echo "✅ Docker image(s) published successfully to ${{ env.DOCKER_REGISTRY }}/${{ env.IMAGE_NAME }}"

37
.gitignore vendored
View File

@@ -1,4 +1,37 @@
node_modules
.temp .temp
.cache .cache
/content/.vuepress/dist/* .devenv
# Logs
logs
*.log
# Misc
.DS_Store
.fleet
.idea
# Local env files
.env
.env.*
!.env.example
# Backend
target/
coverage/
# Frontend
## Nuxt dev/build outputs
.output
.data
.nuxt
.nitro
.cache
dist
## Node dependencies
node_modules
# Nix
result
.data/

View File

@@ -0,0 +1,9 @@
{
"vueCompilerOptions": {
"target": 3.5,
"extensions": [".vue"]
},
"typescript": {
"tsdk": "frontend/node_modules/typescript/lib"
}
}

View File

@@ -1,44 +1,76 @@
#+title: phundrak.com #+title: phundrak.com
#+html: <a href="https://www.gnu.org/software/emacs/"><img src="https://img.shields.io/badge/Emacs-30.0.50-blueviolet.svg?style=flat-square&logo=GNU%20Emacs&logoColor=white" /></a> #+html: <a href="https://www.rust-lang.org/"><img src="https://img.shields.io/badge/Rust-Backend-orange.svg?style=flat-square&logo=Rust&logoColor=white" /></a>
#+html: <a href="https://orgmode.org/"><img src="https://img.shields.io/badge/Written%20with-Org%20mode-success?logo=Org&logoColor=white&style=flat-square"/></a> #+html: <a href="https://nuxt.com/"><img src="https://img.shields.io/badge/Frontend-Nuxt%204-00DC82?logo=Nuxt.js&logoColor=white&style=flat-square"/></a>
#+html: <a href="https://v2.vuepress.vuejs.org/"><img src="https://img.shields.io/badge/Framework-Vuepress-42D392?logo=Vue.js&logoColor=white&style=flat-square"/></a> #+html: <a href="https://vuejs.org/"><img src="https://img.shields.io/badge/Vue-3-42B883?logo=Vue.js&logoColor=white&style=flat-square"/></a>
#+html: <a href="https://beta.phundrak.com"><img src="https://img.shields.io/badge/dynamic/json?label=Website&query=%24%5B%3A1%5D.status&url=https%3A%2F%2Fdrone.phundrak.com%2Fapi%2Frepos%2Fphundrak%2Fphundrak.com%2Fbuilds&style=flat-square&logo=buffer" /></a> #+html: <a href="https://phundrak.com"><img src="https://img.shields.io/badge/Website-phundrak.com-blue?style=flat-square&logo=buffer" /></a>
* Introduction * Introduction
This is the repository for my website [[https://phundrak.com][phundrak.com]]. While it is not This is the repository for my website [[https://phundrak.com][phundrak.com]] which contains the
yet live on this address, development versions can be found at code available on the =main= branch. Code available on the =develop=
[[https://alpha.phundrak.com][alpha.phundrak.com]] and [[https://beta.phundrak.com][beta.phundrak.com]] (the former follows the branch is available at [[https://beta.phundrak.com][beta.phundrak.com]].
=develop= branch while the latter follows the =master= branch).
* Structure of the project * Architecture
This website is made with [[https://v2.vuepress.vuejs.org/][VuePress]], a Vue-powered static site The website follows a modern full-stack architecture:
generator. You can find its Node.JS configuration in the [[file:package.json][package.json]]
file as well as its content and general configuration in the directory
[[file:content/][content]].
** Installing and running - *Backend*: Rust using the [[https://github.com/poem-web/poem][Poem]] web framework (located in [[file:backend/][backend/]])
To install the NPM dependencies for the project, run one of the - *Frontend*: Nuxt 4 + Vue 3 + TypeScript (located in [[file:frontend/][frontend/]])
following commands:
** Backend
The backend is written in Rust and provides a RESTful API using the
Poem framework with OpenAPI support.
*** Running the Backend
To run the backend in development mode:
#+begin_src shell #+begin_src shell
yarn cd backend
# or cargo run
npm install # delete the yarn.lock file before
#+end_src #+end_src
To run the project, run one of the following commands using the same To run tests:
package manager as above:
#+begin_src shell #+begin_src shell
yarn dev cd backend
# or cargo test
npm run dev
#+end_src #+end_src
You can compile the website to a static website by running For continuous testing and linting during development, use [[https://dystroy.org/bacon/][bacon]]:
#+begin_src shell #+begin_src shell
yarn build cd backend
# or bacon
npm run build
#+end_src #+end_src
The compiled version of the website can then be found in =content/.vuepress/dist=. *** Building the Backend
To build the backend for production:
#+begin_src shell
cd backend
cargo build --release
#+end_src
The compiled binary will be available at =backend/target/release/backend=.
** Frontend
The frontend is built with Nuxt 4, Vue 3, and TypeScript, providing a
modern single-page application experience.
*** Installing Dependencies
First, install the required dependencies using =pnpm=:
#+begin_src shell
cd frontend
pnpm install
#+end_src
*** Running the Frontend
To run the frontend in development mode:
#+begin_src shell
cd frontend
pnpm dev
#+end_src
*** Building the Frontend
To build the frontend for production:
#+begin_src shell
cd frontend
pnpm build
#+end_src
The compiled version of the website can then be found in =frontend/.output=.

View File

@@ -0,0 +1,6 @@
[all]
out = ["Xml"]
target-dir = "coverage"
output-dir = "coverage"
fail-under = 60
exclude-files = ["target/*"]

View File

@@ -0,0 +1,7 @@
[all]
out = ["Html", "Lcov"]
skip-clean = true
target-dir = "coverage"
output-dir = "coverage"
fail-under = 60
exclude-files = ["target/*"]

3249
backend/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

33
backend/Cargo.toml Normal file
View File

@@ -0,0 +1,33 @@
[package]
name = "phundrak-dot-com-backend"
version = "0.1.0"
edition = "2024"
publish = false
authors = ["Lucien Cartier-Tilet <lucien@phundrak.com>"]
license = "AGPL-3.0-only"
[lib]
path = "src/lib.rs"
[[bin]]
path = "src/main.rs"
name = "phundrak-dot-com-backend"
[dependencies]
chrono = { version = "0.4.42", features = ["serde"] }
config = { version = "0.15.18", features = ["yaml"] }
dotenvy = "0.15.7"
governor = "0.8.0"
lettre = { version = "0.11.19", default-features = false, features = ["builder", "hostname", "pool", "rustls-tls", "tokio1", "tokio1-rustls-tls", "smtp-transport"] }
poem = { version = "3.1.12", default-features = false, features = ["csrf", "rustls", "test"] }
poem-openapi = { version = "5.1.16", features = ["chrono", "swagger-ui"] }
serde = "1.0.228"
serde_json = "1.0.145"
thiserror = "2.0.17"
tokio = { version = "1.48.0", features = ["macros", "rt-multi-thread"] }
tracing = "0.1.41"
tracing-subscriber = { version = "0.3.20", features = ["fmt", "std", "env-filter", "registry", "json", "tracing-log"] }
validator = { version = "0.20.0", features = ["derive"] }
[lints.rust]
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(tarpaulin_include)'] }

424
backend/README.md Normal file
View File

@@ -0,0 +1,424 @@
# phundrak.com Backend
The backend for [phundrak.com](https://phundrak.com), built with Rust and the [Poem](https://github.com/poem-web/poem) web framework.
## Features
- **RESTful API** with automatic OpenAPI/Swagger documentation
- **Rate limiting** with configurable per-second limits using the
Generic Cell Rate Algorithm (thanks to
[`governor`](https://github.com/boinkor-net/governor))
- **Contact form** with SMTP email relay (supports TLS, STARTTLS, and
unencrypted)
- **Type-safe routing** using Poem's declarative API
- **Hierarchical configuration** with YAML files and environment
variable overrides
- **Structured logging** with `tracing` and `tracing-subscriber`
- **Strict linting** for code quality and safety
- **Comprehensive testing** with integration test support
## API Endpoints
The application provides the following endpoints:
- **Swagger UI**: `/` - Interactive API documentation
- **OpenAPI Spec**: `/specs` - OpenAPI specification in YAML format
- **Health Check**: `GET /api/health` - Returns server health status
- **Application Metadata**: `GET /api/meta` - Returns version and build info
- **Contact Form**: `POST /api/contact` - Submit contact form (relays to SMTP)
## Configuration
Configuration is loaded from multiple sources in order of precedence:
1. `settings/base.yaml` - Base configuration
2. `settings/{environment}.yaml` - Environment-specific (development/production)
3. Environment variables prefixed with `APP__` (e.g., `APP__APPLICATION__PORT=8080`)
The environment is determined by the `APP_ENVIRONMENT` variable (defaults to "development").
### Configuration Example
```yaml
application:
port: 3100
version: "0.1.0"
email:
host: smtp.example.com
port: 587
user: user@example.com
from: Contact Form <noreply@example.com>
password: your_password
recipient: Admin <admin@example.com>
starttls: true # Use STARTTLS (typically port 587)
tls: false # Use implicit TLS (typically port 465)
rate_limit:
enabled: true # Enable/disable rate limiting
burst_size: 10 # Maximum requests allowed in time window
per_seconds: 60 # Time window in seconds (100 req/60s = ~1.67 req/s)
```
You can also use a `.env` file for local development settings.
### Rate Limiting
The application includes built-in rate limiting to protect against abuse:
- Uses the **Generic Cell Rate Algorithm (GCRA)** via the `governor` crate
- **In-memory rate limiting** - no external dependencies like Redis required
- **Configurable limits** via YAML configuration or environment variables
- **Per-second rate limiting** with burst support
- Returns `429 Too Many Requests` when limits are exceeded
Default configuration: 100 requests per 60 seconds (approximately 1.67 requests per second with burst capacity).
To disable rate limiting, set `rate_limit.enabled: false` in your configuration.
## Development
### Prerequisites
**Option 1: Native Development**
- Rust (latest stable version recommended)
- Cargo (comes with Rust)
**Option 2: Nix Development (Recommended)**
- [Nix](https://nixos.org/download) with flakes enabled
- All dependencies managed automatically
### Running the Server
**With Cargo:**
```bash
cargo run
```
**With Nix development shell:**
```bash
nix develop .#backend
cargo run
```
The server will start on the configured port (default: 3100).
### Building
**With Cargo:**
For development builds:
```bash
cargo build
```
For optimized production builds:
```bash
cargo build --release
```
The compiled binary will be at `target/release/backend`.
**With Nix:**
Build the backend binary:
```bash
nix build .#backend
# Binary available at: ./result/bin/backend
```
Build Docker images:
```bash
# Build versioned Docker image (e.g., 0.1.0)
nix build .#backendDocker
# Build latest Docker image
nix build .#backendDockerLatest
# Load into Docker
docker load < result
# Image will be available as: localhost/phundrak/backend-rust:latest
```
The Nix build ensures reproducible builds with all dependencies pinned.
## Testing
Run all tests:
```bash
cargo test
# or
just test
```
Run a specific test:
```bash
cargo test <test_name>
```
Run tests with output:
```bash
cargo test -- --nocapture
```
Run tests with coverage:
```bash
cargo tarpaulin --config .tarpaulin.local.toml
# or
just coverage
```
### Testing Notes
- Integration tests use random TCP ports to avoid conflicts
- Tests use `get_test_app()` helper for consistent test setup
- Telemetry is automatically disabled during tests
- Tests are organized in `#[cfg(test)]` modules within each file
## Code Quality
### Linting
This project uses extremely strict Clippy linting rules:
- `#![deny(clippy::all)]`
- `#![deny(clippy::pedantic)]`
- `#![deny(clippy::nursery)]`
- `#![warn(missing_docs)]`
Run Clippy to check for issues:
```bash
cargo clippy --all-targets
# or
just lint
```
All code must pass these checks before committing.
### Continuous Checking with Bacon
For continuous testing and linting during development, use [bacon](https://dystroy.org/bacon/):
```bash
bacon # Runs clippy-all by default
bacon test # Runs tests continuously
bacon clippy # Runs clippy on default target only
```
Press 'c' in bacon to run clippy-all.
## Code Style
### Error Handling
- Use `thiserror` for custom error types
- Always return `Result` types for fallible operations
- Use descriptive error messages
### Logging
Always use `tracing::event!` with proper target and level:
```rust
tracing::event!(
target: "backend", // or "backend::module_name"
tracing::Level::INFO,
"Message here"
);
```
### Imports
Organize imports in three groups:
1. Standard library (`std::*`)
2. External crates (poem, serde, etc.)
3. Local modules (`crate::*`)
### Testing Conventions
- Use `#[tokio::test]` for async tests
- Use descriptive test names that explain what is being tested
- Test both success and error cases
- For endpoint tests, verify both status codes and response bodies
## Project Structure
```
backend/
├── src/
│ ├── main.rs # Application entry point
│ ├── lib.rs # Library root with run() and prepare()
│ ├── startup.rs # Application builder, server setup
│ ├── settings.rs # Configuration management
│ ├── telemetry.rs # Logging and tracing setup
│ ├── middleware/ # Custom middleware
│ │ ├── mod.rs # Middleware module
│ │ └── rate_limit.rs # Rate limiting middleware
│ └── route/ # API route handlers
│ ├── mod.rs # Route organization
│ ├── contact.rs # Contact form endpoint
│ ├── health.rs # Health check endpoint
│ └── meta.rs # Metadata endpoint
├── settings/ # Configuration files
│ ├── base.yaml # Base configuration
│ ├── development.yaml # Development overrides
│ └── production.yaml # Production overrides
├── Cargo.toml # Dependencies and metadata
└── README.md # This file
```
## Architecture
### Application Initialization Flow
1. `main.rs` calls `run()` from `lib.rs`
2. `run()` calls `prepare()` which:
- Loads environment variables from `.env` file
- Initializes `Settings` from YAML files and environment variables
- Sets up telemetry/logging (unless in test mode)
- Builds the `Application` with optional TCP listener
3. `Application::build()`:
- Sets up OpenAPI service with all API endpoints
- Configures Swagger UI at the root path (`/`)
- Configures API routes under `/api` prefix
- Creates server with TCP listener
4. Application runs with CORS middleware and settings injected as data
### Email Handling
The contact form supports multiple SMTP configurations:
- **Implicit TLS (SMTPS)** - typically port 465
- **STARTTLS (Always/Opportunistic)** - typically port 587
- **Unencrypted** (for local dev) - with or without authentication
The `SmtpTransport` is built dynamically from `EmailSettings` based on
TLS/STARTTLS configuration.
## Docker Deployment
### Using Pre-built Images
Docker images are automatically built and published via GitHub Actions to the configured container registry.
Pull and run the latest image:
```bash
# Pull from Phundrak Labs (labs.phundrak.com)
docker pull labs.phundrak.com/phundrak/phundrak-dot-com-backend:latest
# Run the container
docker run -d \
--name phundrak-backend \
-p 3100:3100 \
-e APP__APPLICATION__PORT=3100 \
-e APP__EMAIL__HOST=smtp.example.com \
-e APP__EMAIL__PORT=587 \
-e APP__EMAIL__USER=user@example.com \
-e APP__EMAIL__PASSWORD=your_password \
-e APP__EMAIL__FROM="Contact Form <noreply@example.com>" \
-e APP__EMAIL__RECIPIENT="Admin <admin@example.com>" \
labs.phundrak.com/phundrak/phundrak-dot-com-backend:latest
```
### Available Image Tags
The following tags are automatically published:
- `latest` - Latest stable release (from tagged commits on `main`)
- `<version>` - Specific version (e.g., `1.0.0`, from tagged commits like `v1.0.0`)
- `develop` - Latest development build (from `develop` branch)
- `pr<number>` - Pull request preview builds (e.g., `pr12`)
### Building Images Locally
Build with Nix (recommended for reproducibility):
```bash
nix build .#backendDockerLatest
docker load < result
docker run -p 3100:3100 localhost/phundrak/backend-rust:latest
```
Build with Docker directly:
```bash
# Note: This requires a Dockerfile (not included in this project)
# Use Nix builds for containerization
```
### Docker Compose Example
```yaml
version: '3.8'
services:
backend:
image: labs.phundrak.com/phundrak/phundrak-dot-com-backend:latest
ports:
- "3100:3100"
environment:
APP__APPLICATION__PORT: 3100
APP__EMAIL__HOST: smtp.example.com
APP__EMAIL__PORT: 587
APP__EMAIL__USER: ${SMTP_USER}
APP__EMAIL__PASSWORD: ${SMTP_PASSWORD}
APP__EMAIL__FROM: "Contact Form <noreply@example.com>"
APP__EMAIL__RECIPIENT: "Admin <admin@example.com>"
APP__EMAIL__STARTTLS: true
APP__RATE_LIMIT__ENABLED: true
APP__RATE_LIMIT__BURST_SIZE: 10
APP__RATE_LIMIT__PER_SECONDS: 60
restart: unless-stopped
```
## CI/CD Pipeline
### Automated Docker Publishing
GitHub Actions automatically builds and publishes Docker images based on repository events:
| Event Type | Trigger | Published Tags |
|-----------------|------------------------------|-------------------------------|
| Tag push | `v*.*.*` tag on `main` | `latest`, `<version>` |
| Branch push | Push to `develop` | `develop` |
| Pull request | PR opened/updated | `pr<number>` |
| Branch push | Push to `main` (no tag) | `latest` |
### Workflow Details
The CI/CD pipeline (`.github/workflows/publish-docker.yml`):
1. **Checks out the repository**
2. **Installs Nix** with flakes enabled
3. **Builds the Docker image** using Nix for reproducibility
4. **Authenticates** with the configured Docker registry
5. **Tags and pushes** images based on the event type
### Registry Configuration
Images are published to the registry specified by the `DOCKER_REGISTRY` environment variable in the workflow (default: `labs.phundrak.com`).
To use the published images, authenticate with the registry:
```bash
# For Phundrak Labs (labs.phundrak.com)
echo $GITHUB_TOKEN | docker login labs.phundrak.com -u USERNAME --password-stdin
# Pull the image
docker pull labs.phundrak.com/phundrak/phundrak-dot-com-backend:latest
```
### Required Secrets
The workflow requires these GitHub secrets:
- `DOCKER_USERNAME` - Registry username
- `DOCKER_PASSWORD` - Registry password or token
- `CACHIX_AUTH_TOKEN` - (Optional) For Nix build caching
See [.github/workflows/README.md](../.github/workflows/README.md) for detailed setup instructions.
## License
AGPL-3.0-only - See the root repository for full license information.

84
backend/bacon.toml Normal file
View File

@@ -0,0 +1,84 @@
# This is a configuration file for the bacon tool
#
# Bacon repository: https://github.com/Canop/bacon
# Complete help on configuration: https://dystroy.org/bacon/config/
# You can also check bacon's own bacon.toml file
# as an example: https://github.com/Canop/bacon/blob/main/bacon.toml
default_job = "clippy-all"
[jobs.check]
command = ["cargo", "check", "--color", "always"]
need_stdout = false
[jobs.check-all]
command = ["cargo", "check", "--all-targets", "--color", "always"]
need_stdout = false
# Run clippy on the default target
[jobs.clippy]
command = [
"cargo", "clippy",
"--color", "always",
]
need_stdout = false
[jobs.clippy-all]
command = [
"cargo", "clippy",
"--all-targets",
"--color", "always",
]
need_stdout = false
[jobs.test]
command = [
"cargo", "test", "--color", "always",
"--", "--color", "always", # see https://github.com/Canop/bacon/issues/124
]
need_stdout = true
[jobs.doc]
command = ["cargo", "doc", "--color", "always", "--no-deps"]
need_stdout = false
# If the doc compiles, then it opens in your browser and bacon switches
# to the previous job
[jobs.doc-open]
command = ["cargo", "doc", "--color", "always", "--no-deps", "--open"]
need_stdout = false
on_success = "back" # so that we don't open the browser at each change
# You can run your application and have the result displayed in bacon,
# *if* it makes sense for this crate.
# Don't forget the `--color always` part or the errors won't be
# properly parsed.
# If your program never stops (eg a server), you may set `background`
# to false to have the cargo run output immediately displayed instead
# of waiting for program's end.
[jobs.run]
command = [
"cargo", "run",
"--color", "always",
# put launch parameters for your program behind a `--` separator
]
need_stdout = true
allow_warnings = true
background = true
# This parameterized job runs the example of your choice, as soon
# as the code compiles.
# Call it as
# bacon ex -- my-example
[jobs.ex]
command = ["cargo", "run", "--color", "always", "--example"]
need_stdout = true
allow_warnings = true
# You may define here keybindings that would be specific to
# a project, for example a shortcut to launch a specific job.
# Shortcuts to internal functions (scrolling, toggling, etc.)
# should go in your personal global prefs.toml file instead.
[keybindings]
# alt-m = "job:my-job"
c = "job:clippy-all" # comment this to have 'c' run clippy on only the default target

51
backend/deny.toml Normal file
View File

@@ -0,0 +1,51 @@
[output]
feature-depth = 1
[advisories]
ignore = []
[licenses]
# List of explicitly allowed licenses
# See https://spdx.org/licenses/ for list of possible licenses
allow = [
"0BSD",
"AGPL-3.0-only",
"Apache-2.0 WITH LLVM-exception",
"Apache-2.0",
"BSD-3-Clause",
"CDLA-Permissive-2.0",
"ISC",
"MIT",
"MPL-2.0",
"OpenSSL",
"Unicode-3.0",
"Zlib",
]
confidence-threshold = 0.8
exceptions = []
[licenses.private]
ignore = false
registries = []
[bans]
multiple-versions = "allow"
wildcards = "allow"
highlight = "all"
workspace-default-features = "allow"
external-default-features = "allow"
allow = []
deny = []
skip = []
skip-tree = []
[sources]
unknown-registry = "deny"
unknown-git = "deny"
allow-registry = ["https://github.com/rust-lang/crates.io-index"]
allow-git = []
[sources.allow-org]
github = []
gitlab = []
bitbucket = []

48
backend/justfile Normal file
View File

@@ -0,0 +1,48 @@
default: run
run:
cargo run
run-release:
cargo run --release
format:
cargo fmt --all
format-check:
cargo fmt --check --all
audit:
cargo deny
build:
cargo build
build-release:
cargo build --release
lint:
cargo clippy --all-targets
release-build:
cargo build --release
release-run:
cargo run --release
test:
cargo test
coverage:
mkdir -p coverage
cargo tarpaulin --config .tarpaulin.local.toml
coverage-ci:
mkdir -p coverage
cargo tarpaulin --config .tarpaulin.ci.toml
check-all: format-check lint coverage audit
## Local Variables:
## mode: makefile
## End:

60
backend/nix/package.nix Normal file
View File

@@ -0,0 +1,60 @@
{
rust-overlay,
inputs,
system,
...
}: let
rust = import ./rust-version.nix { inherit rust-overlay inputs system; };
pkgs = rust.pkgs;
rustPlatform = pkgs.makeRustPlatform {
cargo = rust.version;
rustc = rust.version;
};
cargoToml = builtins.fromTOML (builtins.readFile ../Cargo.toml);
name = cargoToml.package.name;
version = cargoToml.package.version;
rustBuild = rustPlatform.buildRustPackage {
pname = name;
inherit version;
src = ../.;
cargoLock.lockFile = ../Cargo.lock;
};
settingsDir = pkgs.runCommand "settings" {} ''
mkdir -p $out/settings
cp ${../settings}/*.yaml $out/settings/
'';
makeDockerImage = tag:
pkgs.dockerTools.buildLayeredImage {
name = "phundrak/${name}";
inherit tag;
created = "now";
config = {
Entrypoint = ["${rustBuild}/bin/${name}"];
WorkingDir = "/";
Env = [
"SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt"
];
ExposedPorts = {
"3100/tcp" = {};
};
Labels = {
"org.opencontainers.image.title" = name;
"org.opencontainers.image.version" = version;
"org.opencontainers.image.description" = "REST API backend for phundrak.com";
"org.opencontainers.image.authors" = "Lucien Cartier-Tilet <lucien@phundrak.com>";
"org.opencontainers.image.licenses" = "AGPL-3.0-only";
"org.opencontainers.image.source" = "https://labs.phundrak.com/phundrak/phundrak.com";
"org.opencontainers.image.url" = "https://labs.phundrak.com/phundrak/phundrak.com";
"org.opencontainers.image.documentation" = "https://labs.phundrak.com/phundrak/phundrak.com";
"org.opencontainers.image.vendor" = "Phundrak";
};
};
contents = [rustBuild pkgs.cacert settingsDir];
};
dockerImageLatest = makeDockerImage "latest";
dockerImageVersioned = makeDockerImage version;
in {
backend = rustBuild;
backendDocker = dockerImageVersioned;
backendDockerLatest = dockerImageLatest;
}

View File

@@ -0,0 +1,6 @@
{rust-overlay, inputs, system, ...}: let
overlays = [(import rust-overlay)];
in rec {
pkgs = import inputs.nixpkgs {inherit system overlays;};
version = pkgs.rust-bin.stable.latest.default;
}

75
backend/nix/shell.nix Normal file
View File

@@ -0,0 +1,75 @@
{
inputs,
pkgs,
system,
self,
rust-overlay,
...
}: let
rustPlatform = import ./rust-version.nix { inherit rust-overlay inputs system; };
in
inputs.devenv.lib.mkShell {
inherit inputs pkgs;
modules = [
{
devenv.root = let
devenvRootFileContent = builtins.readFile "${self}/.devenv-root";
in
pkgs.lib.mkIf (devenvRootFileContent != "") devenvRootFileContent;
}
{
packages = with rustPlatform.pkgs; [
(rustPlatform.version.override {
extensions = [
"clippy"
"rust-src"
"rust-analyzer"
"rustfmt"
];
})
bacon
cargo-deny
cargo-shuttle
cargo-tarpaulin
cargo-watch
flyctl
just
marksman
tombi # TOML lsp server
];
services.mailpit = {
enable = true;
# HTTP interface for viewing emails
uiListenAddress = "127.0.0.1:8025";
# SMTP server for receiving emails
smtpListenAddress = "127.0.0.1:1025";
};
processes.run.exec = "cargo watch -x run";
enterShell = ''
echo "🦀 Rust backend development environment loaded!"
echo "📦 Rust version: $(rustc --version)"
echo "📦 Cargo version: $(cargo --version)"
echo ""
echo "Available tools:"
echo " - rust-analyzer (LSP)"
echo " - clippy (linter)"
echo " - rustfmt (formatter)"
echo " - bacon (continuous testing/linting)"
echo " - cargo-deny (dependency checker)"
echo " - cargo-tarpaulin (code coverage)"
echo ""
echo "📧 Mailpit service:"
echo " - SMTP server: 127.0.0.1:1025"
echo " - Web UI: http://127.0.0.1:8025"
echo ""
echo "🚀 Quick start:"
echo " Run 'devenv up' to launch:"
echo " - Mailpit service (email testing)"
echo " - Backend with 'cargo watch -x run' (auto-reload)"
'';
}
];
}

View File

@@ -0,0 +1,8 @@
application:
port: 3100
version: "0.1.0"
rate_limit:
enabled: true
burst_size: 10
per_seconds: 60

View File

@@ -0,0 +1,18 @@
frontend_url: http://localhost:3000
debug: true
application:
protocol: http
host: 127.0.0.1
base_url: http://127.0.0.1:3100
name: "com.phundrak.backend.dev"
email:
host: localhost
port: 1025
user: ""
password: ""
from: Contact Form <noreply@example.com>
recipient: Admin <user@example.com>
tls: false
starttls: false

View File

@@ -0,0 +1,18 @@
debug: false
frontend_url: ""
application:
name: "com.phundrak.backend.prod"
protocol: https
host: 0.0.0.0
base_url: ""
email:
host: ""
port: 0
user: ""
password: ""
from: ""
recipient: ""
tls: false
starttls: false

82
backend/src/lib.rs Normal file
View File

@@ -0,0 +1,82 @@
//! Backend API server for phundrak.com
//!
//! This is a REST API built with the Poem framework that provides:
//! - Health check endpoints
//! - Application metadata endpoints
//! - Contact form submission with email integration
#![deny(clippy::all)]
#![deny(clippy::pedantic)]
#![deny(clippy::nursery)]
#![warn(missing_docs)]
#![allow(clippy::unused_async)]
/// Custom middleware implementations
pub mod middleware;
/// API route handlers and endpoints
pub mod route;
/// Application configuration settings
pub mod settings;
/// Application startup and server configuration
pub mod startup;
/// Logging and tracing setup
pub mod telemetry;
type MaybeListener = Option<poem::listener::TcpListener<String>>;
fn prepare(listener: MaybeListener) -> startup::Application {
dotenvy::dotenv().ok();
let settings = settings::Settings::new().expect("Failed to read settings");
if !cfg!(test) {
let subscriber = telemetry::get_subscriber(settings.debug);
telemetry::init_subscriber(subscriber);
}
tracing::event!(
target: "backend",
tracing::Level::DEBUG,
"Using these settings: {:?}",
settings
);
let application = startup::Application::build(settings, listener);
tracing::event!(
target: "backend",
tracing::Level::INFO,
"Listening on http://{}:{}/",
application.host(),
application.port()
);
tracing::event!(
target: "backend",
tracing::Level::INFO,
"Documentation available at http://{}:{}/",
application.host(),
application.port()
);
application
}
/// Runs the application with the specified TCP listener.
///
/// # Errors
///
/// Returns a `std::io::Error` if the server fails to start or encounters
/// an I/O error during runtime (e.g., port already in use, network issues).
#[cfg(not(tarpaulin_include))]
pub async fn run(listener: MaybeListener) -> Result<(), std::io::Error> {
let application = prepare(listener);
application.make_app().run().await
}
#[cfg(test)]
fn make_random_tcp_listener() -> poem::listener::TcpListener<String> {
let tcp_listener =
std::net::TcpListener::bind("127.0.0.1:0").expect("Failed to bind a random TCP listener");
let port = tcp_listener.local_addr().unwrap().port();
poem::listener::TcpListener::bind(format!("127.0.0.1:{port}"))
}
#[cfg(test)]
fn get_test_app() -> startup::App {
let tcp_listener = make_random_tcp_listener();
prepare(Some(tcp_listener)).make_app().into()
}

7
backend/src/main.rs Normal file
View File

@@ -0,0 +1,7 @@
//! Backend server entry point.
#[cfg(not(tarpaulin_include))]
#[tokio::main]
async fn main() -> Result<(), std::io::Error> {
phundrak_dot_com_backend::run(None).await
}

View File

@@ -0,0 +1,5 @@
//! Custom middleware for the application.
//!
//! This module contains custom middleware implementations including rate limiting.
pub mod rate_limit;

View File

@@ -0,0 +1,211 @@
//! Rate limiting middleware using the governor crate.
//!
//! This middleware implements per-IP rate limiting using the Generic Cell Rate
//! Algorithm (GCRA) via the governor crate. It stores rate limiters in memory
//! without requiring external dependencies like Redis.
use std::{
net::IpAddr,
num::NonZeroU32,
sync::Arc,
time::Duration,
};
use governor::{
clock::DefaultClock,
state::{InMemoryState, NotKeyed},
Quota, RateLimiter,
};
use poem::{
Endpoint, Error, IntoResponse, Middleware, Request, Response, Result,
};
/// Rate limiting configuration.
#[derive(Debug, Clone)]
pub struct RateLimitConfig {
/// Maximum number of requests allowed in the time window (burst size).
pub burst_size: u32,
/// Time window in seconds for rate limiting.
pub per_seconds: u64,
}
impl RateLimitConfig {
/// Creates a new rate limit configuration.
///
/// # Arguments
///
/// * `burst_size` - Maximum number of requests allowed in the time window
/// * `per_seconds` - Time window in seconds
#[must_use]
pub const fn new(burst_size: u32, per_seconds: u64) -> Self {
Self {
burst_size,
per_seconds,
}
}
/// Creates a rate limiter from this configuration.
///
/// # Panics
///
/// Panics if `burst_size` is zero.
#[must_use]
pub fn create_limiter(&self) -> RateLimiter<NotKeyed, InMemoryState, DefaultClock> {
let quota = Quota::with_period(Duration::from_secs(self.per_seconds))
.expect("Failed to create quota")
.allow_burst(NonZeroU32::new(self.burst_size).expect("Burst size must be non-zero"));
RateLimiter::direct(quota)
}
}
impl Default for RateLimitConfig {
fn default() -> Self {
// Default: 10 requests per second with burst of 20
Self::new(20, 1)
}
}
/// Middleware for rate limiting based on IP address.
pub struct RateLimit {
limiter: Arc<RateLimiter<NotKeyed, InMemoryState, DefaultClock>>,
}
impl RateLimit {
/// Creates a new rate limiting middleware with the given configuration.
#[must_use]
pub fn new(config: &RateLimitConfig) -> Self {
Self {
limiter: Arc::new(config.create_limiter()),
}
}
}
impl<E: Endpoint> Middleware<E> for RateLimit {
type Output = RateLimitEndpoint<E>;
fn transform(&self, ep: E) -> Self::Output {
RateLimitEndpoint {
endpoint: ep,
limiter: self.limiter.clone(),
}
}
}
/// The endpoint wrapper that performs rate limiting checks.
pub struct RateLimitEndpoint<E> {
endpoint: E,
limiter: Arc<RateLimiter<NotKeyed, InMemoryState, DefaultClock>>,
}
impl<E: Endpoint> Endpoint for RateLimitEndpoint<E> {
type Output = Response;
async fn call(&self, req: Request) -> Result<Self::Output> {
// Check rate limit
if self.limiter.check().is_err() {
let client_ip = Self::get_client_ip(&req)
.map_or_else(|| "unknown".to_string(), |ip| ip.to_string());
tracing::event!(
target: "backend::middleware::rate_limit",
tracing::Level::WARN,
client_ip = %client_ip,
"Rate limit exceeded"
);
return Err(Error::from_status(poem::http::StatusCode::TOO_MANY_REQUESTS));
}
// Process the request
let response = self.endpoint.call(req).await;
response.map(IntoResponse::into_response)
}
}
impl<E> RateLimitEndpoint<E> {
/// Extracts the client IP address from the request.
fn get_client_ip(req: &Request) -> Option<IpAddr> {
req.remote_addr().as_socket_addr().map(std::net::SocketAddr::ip)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn rate_limit_config_new() {
let config = RateLimitConfig::new(10, 60);
assert_eq!(config.burst_size, 10);
assert_eq!(config.per_seconds, 60);
}
#[test]
fn rate_limit_config_default() {
let config = RateLimitConfig::default();
assert_eq!(config.burst_size, 20);
assert_eq!(config.per_seconds, 1);
}
#[test]
fn rate_limit_config_creates_limiter() {
let config = RateLimitConfig::new(5, 1);
let limiter = config.create_limiter();
// First 5 requests should succeed
for _ in 0..5 {
assert!(limiter.check().is_ok());
}
// 6th request should fail
assert!(limiter.check().is_err());
}
#[tokio::test]
async fn rate_limit_middleware_allows_within_limit() {
use poem::{handler, test::TestClient, EndpointExt, Route};
#[handler]
async fn index() -> String {
"Hello".to_string()
}
let config = RateLimitConfig::new(5, 60);
let app = Route::new()
.at("/", poem::get(index))
.with(RateLimit::new(&config));
let cli = TestClient::new(app);
// First 5 requests should succeed
for _ in 0..5 {
let response = cli.get("/").send().await;
response.assert_status_is_ok();
}
}
#[tokio::test]
async fn rate_limit_middleware_blocks_over_limit() {
use poem::{handler, test::TestClient, EndpointExt, Route};
#[handler]
async fn index() -> String {
"Hello".to_string()
}
let config = RateLimitConfig::new(3, 60);
let app = Route::new()
.at("/", poem::get(index))
.with(RateLimit::new(&config));
let cli = TestClient::new(app);
// First 3 requests should succeed
for _ in 0..3 {
let response = cli.get("/").send().await;
response.assert_status_is_ok();
}
// 4th request should be rate limited
let response = cli.get("/").send().await;
response.assert_status(poem::http::StatusCode::TOO_MANY_REQUESTS);
}
}

View File

@@ -0,0 +1,514 @@
//! Contact form endpoint for handling user submissions and sending emails.
//!
//! This module provides functionality to:
//! - Validate contact form submissions
//! - Detect spam using honeypot fields
//! - Send emails via SMTP with various TLS configurations
use lettre::{
Message, SmtpTransport, Transport, message::header::ContentType,
transport::smtp::authentication::Credentials,
};
use poem_openapi::{ApiResponse, Object, OpenApi, payload::Json};
use validator::Validate;
use super::ApiCategory;
use crate::settings::{EmailSettings, Starttls};
impl TryFrom<&EmailSettings> for SmtpTransport {
type Error = lettre::transport::smtp::Error;
fn try_from(settings: &EmailSettings) -> Result<Self, Self::Error> {
if settings.tls {
// Implicit TLS (SMTPS) - typically port 465
tracing::event!(target: "backend::contact", tracing::Level::DEBUG, "Using implicit TLS (SMTPS)");
let creds = Credentials::new(settings.user.clone(), settings.password.clone());
Ok(Self::relay(&settings.host)?
.port(settings.port)
.credentials(creds)
.build())
} else {
// STARTTLS or no encryption
match settings.starttls {
Starttls::Never => {
// For local development without TLS
tracing::event!(target: "backend::contact", tracing::Level::DEBUG, "Using unencrypted connection");
let builder = Self::builder_dangerous(&settings.host).port(settings.port);
if settings.user.is_empty() {
Ok(builder.build())
} else {
let creds =
Credentials::new(settings.user.clone(), settings.password.clone());
Ok(builder.credentials(creds).build())
}
}
Starttls::Opportunistic | Starttls::Always => {
// STARTTLS - typically port 587
tracing::event!(target: "backend::contact", tracing::Level::DEBUG, "Using STARTTLS");
let creds = Credentials::new(settings.user.clone(), settings.password.clone());
Ok(Self::starttls_relay(&settings.host)?
.port(settings.port)
.credentials(creds)
.build())
}
}
}
}
}
#[derive(Debug, Object, Validate)]
struct ContactRequest {
#[validate(length(
min = 1,
max = "100",
message = "Name must be between 1 and 100 characters"
))]
name: String,
#[validate(email(message = "Invalid email address"))]
email: String,
#[validate(length(
min = 10,
max = 5000,
message = "Message must be between 10 and 5000 characters"
))]
message: String,
/// Honeypot field - should always be empty
#[oai(rename = "website")]
honeypot: Option<String>,
}
#[derive(Debug, Object, serde::Deserialize)]
struct ContactResponse {
success: bool,
message: String,
}
impl From<ContactResponse> for Json<ContactResponse> {
fn from(value: ContactResponse) -> Self {
Self(value)
}
}
#[derive(ApiResponse)]
enum ContactApiResponse {
/// Success
#[oai(status = 200)]
Ok(Json<ContactResponse>),
/// Bad Request - validation failed
#[oai(status = 400)]
BadRequest(Json<ContactResponse>),
/// Too Many Requests - rate limit exceeded
#[oai(status = 429)]
#[allow(dead_code)]
TooManyRequests,
/// Internal Server Error
#[oai(status = 500)]
InternalServerError(Json<ContactResponse>),
}
/// API for handling contact form submissions and sending emails.
#[derive(Clone)]
pub struct ContactApi {
settings: EmailSettings,
}
impl From<EmailSettings> for ContactApi {
fn from(settings: EmailSettings) -> Self {
Self { settings }
}
}
#[OpenApi(tag = "ApiCategory::Contact")]
impl ContactApi {
/// Submit a contact form
///
/// Send a message through the contact form. Rate limited to prevent spam.
#[oai(path = "/contact", method = "post")]
async fn submit_contact(
&self,
body: Json<ContactRequest>,
remote_addr: Option<poem::web::Data<&poem::web::RemoteAddr>>,
) -> ContactApiResponse {
let body = body.0;
if body.honeypot.is_some() {
tracing::event!(target: "backend::contact", tracing::Level::INFO, "Honeypot triggered, rejecting request silently. IP: {}", remote_addr.map_or_else(|| "No remote address found".to_owned(), |ip| ip.0.to_string()));
return ContactApiResponse::Ok(
ContactResponse {
success: true,
message: "Message sent successfully, but not really, you bot".to_owned(),
}
.into(),
);
}
if let Err(e) = body.validate() {
return ContactApiResponse::BadRequest(
ContactResponse {
success: false,
message: format!("Validation error: {e}"),
}
.into(),
);
}
match self.send_email(&body).await {
Ok(()) => {
tracing::event!(target: "backend::contact", tracing::Level::INFO, "Message sent successfully from: {}", body.email);
ContactApiResponse::Ok(
ContactResponse {
success: true,
message: "Message sent successfully".to_owned(),
}
.into(),
)
}
Err(e) => {
tracing::event!(target: "backend::contact", tracing::Level::ERROR, "Failed to send email: {}", e);
ContactApiResponse::InternalServerError(
ContactResponse {
success: false,
message: "Failed to send message. Please try again later.".to_owned(),
}
.into(),
)
}
}
}
async fn send_email(&self, request: &ContactRequest) -> Result<(), Box<dyn std::error::Error>> {
let email_body = format!(
r"New contact form submission:
Name: {}
Email: {},
Message:
{}",
request.name, request.email, request.message
);
tracing::event!(target: "email", tracing::Level::DEBUG, "Sending email content: {}", email_body);
let email = Message::builder()
.from(self.settings.from.parse()?)
.reply_to(format!("{} <{}>", request.name, request.email).parse()?)
.to(self.settings.recipient.parse()?)
.subject(format!("Contact Form: {}", request.name))
.header(ContentType::TEXT_PLAIN)
.body(email_body)?;
tracing::event!(target: "email", tracing::Level::DEBUG, "Email to be sent: {}", format!("{email:?}"));
let mailer = SmtpTransport::try_from(&self.settings)?;
mailer.send(&email)?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
// Tests for ContactRequest validation
#[test]
fn contact_request_valid() {
let request = ContactRequest {
name: "John Doe".to_string(),
email: "john@example.com".to_string(),
message: "This is a test message that is long enough.".to_string(),
honeypot: None,
};
assert!(request.validate().is_ok());
}
#[test]
fn contact_request_name_too_short() {
let request = ContactRequest {
name: String::new(),
email: "john@example.com".to_string(),
message: "This is a test message that is long enough.".to_string(),
honeypot: None,
};
assert!(request.validate().is_err());
}
#[test]
fn contact_request_name_too_long() {
let request = ContactRequest {
name: "a".repeat(101),
email: "john@example.com".to_string(),
message: "This is a test message that is long enough.".to_string(),
honeypot: None,
};
assert!(request.validate().is_err());
}
#[test]
fn contact_request_name_at_max_length() {
let request = ContactRequest {
name: "a".repeat(100),
email: "john@example.com".to_string(),
message: "This is a test message that is long enough.".to_string(),
honeypot: None,
};
assert!(request.validate().is_ok());
}
#[test]
fn contact_request_invalid_email() {
let request = ContactRequest {
name: "John Doe".to_string(),
email: "not-an-email".to_string(),
message: "This is a test message that is long enough.".to_string(),
honeypot: None,
};
assert!(request.validate().is_err());
}
#[test]
fn contact_request_message_too_short() {
let request = ContactRequest {
name: "John Doe".to_string(),
email: "john@example.com".to_string(),
message: "Short".to_string(),
honeypot: None,
};
assert!(request.validate().is_err());
}
#[test]
fn contact_request_message_too_long() {
let request = ContactRequest {
name: "John Doe".to_string(),
email: "john@example.com".to_string(),
message: "a".repeat(5001),
honeypot: None,
};
assert!(request.validate().is_err());
}
#[test]
fn contact_request_message_at_min_length() {
let request = ContactRequest {
name: "John Doe".to_string(),
email: "john@example.com".to_string(),
message: "a".repeat(10),
honeypot: None,
};
assert!(request.validate().is_ok());
}
#[test]
fn contact_request_message_at_max_length() {
let request = ContactRequest {
name: "John Doe".to_string(),
email: "john@example.com".to_string(),
message: "a".repeat(5000),
honeypot: None,
};
assert!(request.validate().is_ok());
}
// Tests for SmtpTransport TryFrom implementation
#[test]
fn smtp_transport_implicit_tls() {
let settings = EmailSettings {
host: "smtp.example.com".to_string(),
port: 465,
user: "user@example.com".to_string(),
password: "password".to_string(),
from: "from@example.com".to_string(),
recipient: "to@example.com".to_string(),
tls: true,
starttls: Starttls::Never,
};
let result = SmtpTransport::try_from(&settings);
assert!(result.is_ok());
}
#[test]
fn smtp_transport_starttls_always() {
let settings = EmailSettings {
host: "smtp.example.com".to_string(),
port: 587,
user: "user@example.com".to_string(),
password: "password".to_string(),
from: "from@example.com".to_string(),
recipient: "to@example.com".to_string(),
tls: false,
starttls: Starttls::Always,
};
let result = SmtpTransport::try_from(&settings);
assert!(result.is_ok());
}
#[test]
fn smtp_transport_starttls_opportunistic() {
let settings = EmailSettings {
host: "smtp.example.com".to_string(),
port: 587,
user: "user@example.com".to_string(),
password: "password".to_string(),
from: "from@example.com".to_string(),
recipient: "to@example.com".to_string(),
tls: false,
starttls: Starttls::Opportunistic,
};
let result = SmtpTransport::try_from(&settings);
assert!(result.is_ok());
}
#[test]
fn smtp_transport_no_encryption_with_credentials() {
let settings = EmailSettings {
host: "localhost".to_string(),
port: 1025,
user: "user@example.com".to_string(),
password: "password".to_string(),
from: "from@example.com".to_string(),
recipient: "to@example.com".to_string(),
tls: false,
starttls: Starttls::Never,
};
let result = SmtpTransport::try_from(&settings);
assert!(result.is_ok());
}
#[test]
fn smtp_transport_no_encryption_no_credentials() {
let settings = EmailSettings {
host: "localhost".to_string(),
port: 1025,
user: String::new(),
password: String::new(),
from: "from@example.com".to_string(),
recipient: "to@example.com".to_string(),
tls: false,
starttls: Starttls::Never,
};
let result = SmtpTransport::try_from(&settings);
assert!(result.is_ok());
}
// Integration tests for contact API endpoint
#[tokio::test]
async fn contact_endpoint_honeypot_triggered() {
let app = crate::get_test_app();
let cli = poem::test::TestClient::new(app);
let body = serde_json::json!({
"name": "Bot Name",
"email": "bot@example.com",
"message": "This is a spam message from a bot.",
"website": "http://spam.com"
});
let resp = cli.post("/api/contact").body_json(&body).send().await;
resp.assert_status_is_ok();
let json_text = resp.0.into_body().into_string().await.unwrap();
let json: ContactResponse = serde_json::from_str(&json_text).unwrap();
assert!(json.success);
assert!(json.message.contains("not really"));
}
#[tokio::test]
async fn contact_endpoint_validation_error_empty_name() {
let app = crate::get_test_app();
let cli = poem::test::TestClient::new(app);
let body = serde_json::json!({
"name": "",
"email": "test@example.com",
"message": "This is a valid message that is long enough."
});
let resp = cli.post("/api/contact").body_json(&body).send().await;
resp.assert_status(poem::http::StatusCode::BAD_REQUEST);
let json_text = resp.0.into_body().into_string().await.unwrap();
let json: ContactResponse = serde_json::from_str(&json_text).unwrap();
assert!(!json.success);
assert!(json.message.contains("Validation error"));
}
#[tokio::test]
async fn contact_endpoint_validation_error_invalid_email() {
let app = crate::get_test_app();
let cli = poem::test::TestClient::new(app);
let body = serde_json::json!({
"name": "Test User",
"email": "not-an-email",
"message": "This is a valid message that is long enough."
});
let resp = cli.post("/api/contact").body_json(&body).send().await;
resp.assert_status(poem::http::StatusCode::BAD_REQUEST);
let json_text = resp.0.into_body().into_string().await.unwrap();
let json: ContactResponse = serde_json::from_str(&json_text).unwrap();
assert!(!json.success);
assert!(json.message.contains("Validation error"));
}
#[tokio::test]
async fn contact_endpoint_validation_error_message_too_short() {
let app = crate::get_test_app();
let cli = poem::test::TestClient::new(app);
let body = serde_json::json!({
"name": "Test User",
"email": "test@example.com",
"message": "Short"
});
let resp = cli.post("/api/contact").body_json(&body).send().await;
resp.assert_status(poem::http::StatusCode::BAD_REQUEST);
let json_text = resp.0.into_body().into_string().await.unwrap();
let json: ContactResponse = serde_json::from_str(&json_text).unwrap();
assert!(!json.success);
assert!(json.message.contains("Validation error"));
}
#[tokio::test]
async fn contact_endpoint_validation_error_name_too_long() {
let app = crate::get_test_app();
let cli = poem::test::TestClient::new(app);
let body = serde_json::json!({
"name": "a".repeat(101),
"email": "test@example.com",
"message": "This is a valid message that is long enough."
});
let resp = cli.post("/api/contact").body_json(&body).send().await;
resp.assert_status(poem::http::StatusCode::BAD_REQUEST);
let json_text = resp.0.into_body().into_string().await.unwrap();
let json: ContactResponse = serde_json::from_str(&json_text).unwrap();
assert!(!json.success);
assert!(json.message.contains("Validation error"));
}
#[tokio::test]
async fn contact_endpoint_validation_error_message_too_long() {
let app = crate::get_test_app();
let cli = poem::test::TestClient::new(app);
let body = serde_json::json!({
"name": "Test User",
"email": "test@example.com",
"message": "a".repeat(5001)
});
let resp = cli.post("/api/contact").body_json(&body).send().await;
resp.assert_status(poem::http::StatusCode::BAD_REQUEST);
let json_text = resp.0.into_body().into_string().await.unwrap();
let json: ContactResponse = serde_json::from_str(&json_text).unwrap();
assert!(!json.success);
assert!(json.message.contains("Validation error"));
}
}

View File

@@ -0,0 +1,38 @@
//! Health check endpoint for monitoring service availability.
use poem_openapi::{ApiResponse, OpenApi};
use super::ApiCategory;
#[derive(ApiResponse)]
enum HealthResponse {
/// Success
#[oai(status = 200)]
Ok,
/// Too Many Requests - rate limit exceeded
#[oai(status = 429)]
#[allow(dead_code)]
TooManyRequests,
}
/// Health check API for monitoring service availability.
#[derive(Default, Clone)]
pub struct HealthApi;
#[OpenApi(tag = "ApiCategory::Health")]
impl HealthApi {
#[oai(path = "/health", method = "get")]
async fn ping(&self) -> HealthResponse {
tracing::event!(target: "backend::health", tracing::Level::DEBUG, "Accessing health-check endpoint");
HealthResponse::Ok
}
}
#[tokio::test]
async fn health_check_works() {
let app = crate::get_test_app();
let cli = poem::test::TestClient::new(app);
let resp = cli.get("/api/health").send().await;
resp.assert_status_is_ok();
resp.assert_text("").await;
}

86
backend/src/route/meta.rs Normal file
View File

@@ -0,0 +1,86 @@
//! Application metadata endpoint for retrieving version and name information.
use poem::Result;
use poem_openapi::{ApiResponse, Object, OpenApi, payload::Json};
use super::ApiCategory;
use crate::settings::ApplicationSettings;
#[derive(Object, Debug, Clone, serde::Serialize, serde::Deserialize)]
struct Meta {
version: String,
name: String,
}
impl From<&MetaApi> for Meta {
fn from(value: &MetaApi) -> Self {
let version = value.version.clone();
let name = value.name.clone();
Self { version, name }
}
}
#[derive(ApiResponse)]
enum MetaResponse {
/// Success
#[oai(status = 200)]
Meta(Json<Meta>),
/// Too Many Requests - rate limit exceeded
#[oai(status = 429)]
#[allow(dead_code)]
TooManyRequests,
}
/// API for retrieving application metadata (name and version).
#[derive(Clone)]
pub struct MetaApi {
name: String,
version: String,
}
impl From<&ApplicationSettings> for MetaApi {
fn from(value: &ApplicationSettings) -> Self {
let name = value.name.clone();
let version = value.version.clone();
Self { name, version }
}
}
#[OpenApi(tag = "ApiCategory::Meta")]
impl MetaApi {
#[oai(path = "/meta", method = "get")]
async fn meta(&self) -> Result<MetaResponse> {
tracing::event!(target: "backend::meta", tracing::Level::DEBUG, "Accessing meta endpoint");
Ok(MetaResponse::Meta(Json(self.into())))
}
}
#[cfg(test)]
mod tests {
#[tokio::test]
async fn meta_endpoint_returns_correct_data() {
let app = crate::get_test_app();
let cli = poem::test::TestClient::new(app);
let resp = cli.get("/api/meta").send().await;
resp.assert_status_is_ok();
let json_value: serde_json::Value = resp.json().await.value().deserialize();
assert!(
json_value.get("version").is_some(),
"Response should have version field"
);
assert!(
json_value.get("name").is_some(),
"Response should have name field"
);
}
#[tokio::test]
async fn meta_endpoint_returns_200_status() {
let app = crate::get_test_app();
let cli = poem::test::TestClient::new(app);
let resp = cli.get("/api/meta").send().await;
resp.assert_status_is_ok();
}
}

46
backend/src/route/mod.rs Normal file
View File

@@ -0,0 +1,46 @@
//! API route handlers for the backend server.
//!
//! This module contains all the HTTP endpoint handlers organized by functionality:
//! - Contact form handling
//! - Health checks
//! - Application metadata
use poem_openapi::Tags;
mod contact;
mod health;
mod meta;
use crate::settings::Settings;
#[derive(Tags)]
enum ApiCategory {
Contact,
Health,
Meta,
}
pub(crate) struct Api {
contact: contact::ContactApi,
health: health::HealthApi,
meta: meta::MetaApi,
}
impl From<&Settings> for Api {
fn from(value: &Settings) -> Self {
let contact = contact::ContactApi::from(value.clone().email);
let health = health::HealthApi;
let meta = meta::MetaApi::from(&value.application);
Self {
contact,
health,
meta,
}
}
}
impl Api {
pub fn apis(self) -> (contact::ContactApi, health::HealthApi, meta::MetaApi) {
(self.contact, self.health, self.meta)
}
}

619
backend/src/settings.rs Normal file
View File

@@ -0,0 +1,619 @@
//! Application configuration settings.
//!
//! This module provides configuration structures that can be loaded from:
//! - YAML configuration files (base.yaml and environment-specific files)
//! - Environment variables (prefixed with APP__)
//!
//! Settings include application details, email server configuration, and environment settings.
/// Application configuration settings.
///
/// Loads configuration from YAML files and environment variables.
#[derive(Debug, serde::Deserialize, Clone, Default)]
pub struct Settings {
/// Application-specific settings (name, version, host, port, etc.)
pub application: ApplicationSettings,
/// Debug mode flag
pub debug: bool,
/// Email server configuration for contact form
pub email: EmailSettings,
/// Frontend URL for CORS configuration
pub frontend_url: String,
/// Rate limiting configuration
#[serde(default)]
pub rate_limit: RateLimitSettings,
}
impl Settings {
/// Creates a new `Settings` instance by loading configuration from files and environment variables.
///
/// # Errors
///
/// Returns a `config::ConfigError` if:
/// - Configuration files cannot be read or parsed
/// - Required configuration values are missing
/// - Configuration values cannot be deserialized into the expected types
///
/// # Panics
///
/// Panics if:
/// - The current directory cannot be determined
/// - The `APP_ENVIRONMENT` variable contains an invalid value (not "dev", "development", "prod", or "production")
pub fn new() -> Result<Self, config::ConfigError> {
let base_path = std::env::current_dir().expect("Failed to determine the current directory");
let settings_directory = base_path.join("settings");
let environment: Environment = std::env::var("APP_ENVIRONMENT")
.unwrap_or_else(|_| "dev".into())
.try_into()
.expect("Failed to parse APP_ENVIRONMENT");
let environment_filename = format!("{environment}.yaml");
// Lower = takes precedence
let settings = config::Config::builder()
.add_source(config::File::from(settings_directory.join("base.yaml")))
.add_source(config::File::from(
settings_directory.join(environment_filename),
))
.add_source(
config::Environment::with_prefix("APP")
.prefix_separator("__")
.separator("__"),
)
.build()?;
settings.try_deserialize()
}
}
/// Application-specific configuration settings.
#[derive(Debug, serde::Deserialize, Clone, Default)]
pub struct ApplicationSettings {
/// Application name
pub name: String,
/// Application version
pub version: String,
/// Port to bind to
pub port: u16,
/// Host address to bind to
pub host: String,
/// Base URL of the application
pub base_url: String,
/// Protocol (http or https)
pub protocol: String,
}
/// Application environment.
#[derive(Debug, PartialEq, Eq, Default)]
pub enum Environment {
/// Development environment
#[default]
Development,
/// Production environment
Production,
}
impl std::fmt::Display for Environment {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let self_str = match self {
Self::Development => "development",
Self::Production => "production",
};
write!(f, "{self_str}")
}
}
impl TryFrom<String> for Environment {
type Error = String;
fn try_from(value: String) -> Result<Self, Self::Error> {
Self::try_from(value.as_str())
}
}
impl TryFrom<&str> for Environment {
type Error = String;
fn try_from(value: &str) -> Result<Self, Self::Error> {
match value.to_lowercase().as_str() {
"development" | "dev" => Ok(Self::Development),
"production" | "prod" => Ok(Self::Production),
other => Err(format!(
"{other} is not a supported environment. Use either `development` or `production`"
)),
}
}
}
/// Email server configuration for the contact form.
#[derive(serde::Deserialize, Clone, Default)]
pub struct EmailSettings {
/// SMTP server hostname
pub host: String,
/// SMTP server port
pub port: u16,
/// SMTP authentication username
pub user: String,
/// Email address to send from
pub from: String,
/// SMTP authentication password
pub password: String,
/// Email address to send contact form submissions to
pub recipient: String,
/// STARTTLS configuration
pub starttls: Starttls,
/// Whether to use implicit TLS (SMTPS)
pub tls: bool,
}
impl std::fmt::Debug for EmailSettings {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("EmailSettings")
.field("host", &self.host)
.field("port", &self.port)
.field("user", &self.user)
.field("from", &self.from)
.field("password", &"[REDACTED]")
.field("recipient", &self.recipient)
.field("starttls", &self.starttls)
.field("tls", &self.tls)
.finish()
}
}
/// STARTTLS configuration for SMTP connections.
#[derive(Debug, PartialEq, Eq, Default, Clone)]
pub enum Starttls {
/// Never use STARTTLS (unencrypted connection)
#[default]
Never,
/// Use STARTTLS if available (opportunistic encryption)
Opportunistic,
/// Always use STARTTLS (required encryption)
Always,
}
impl TryFrom<&str> for Starttls {
type Error = String;
fn try_from(value: &str) -> Result<Self, Self::Error> {
match value.to_lowercase().as_str() {
"off" | "no" | "never" => Ok(Self::Never),
"opportunistic" => Ok(Self::Opportunistic),
"yes" | "always" => Ok(Self::Always),
other => Err(format!(
"{other} is not a supported option. Use either `yes`, `no`, or `opportunistic`"
)),
}
}
}
impl TryFrom<String> for Starttls {
type Error = String;
fn try_from(value: String) -> Result<Self, Self::Error> {
value.as_str().try_into()
}
}
impl From<bool> for Starttls {
fn from(value: bool) -> Self {
if value { Self::Always } else { Self::Never }
}
}
impl std::fmt::Display for Starttls {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let self_str = match self {
Self::Never => "never",
Self::Opportunistic => "opportunistic",
Self::Always => "always",
};
write!(f, "{self_str}")
}
}
impl<'de> serde::Deserialize<'de> for Starttls {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
struct StartlsVisitor;
impl serde::de::Visitor<'_> for StartlsVisitor {
type Value = Starttls;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a string or boolean representing STARTTLS setting (e.g., 'yes', 'no', 'opportunistic', true, false)")
}
fn visit_str<E>(self, value: &str) -> Result<Starttls, E>
where
E: serde::de::Error,
{
Starttls::try_from(value).map_err(E::custom)
}
fn visit_string<E>(self, value: String) -> Result<Starttls, E>
where
E: serde::de::Error,
{
Starttls::try_from(value.as_str()).map_err(E::custom)
}
fn visit_bool<E>(self, value: bool) -> Result<Starttls, E>
where
E: serde::de::Error,
{
Ok(Starttls::from(value))
}
}
deserializer.deserialize_any(StartlsVisitor)
}
}
/// Rate limiting configuration.
#[derive(Debug, serde::Deserialize, Clone)]
pub struct RateLimitSettings {
/// Whether rate limiting is enabled
#[serde(default = "default_rate_limit_enabled")]
pub enabled: bool,
/// Maximum number of requests allowed in the time window (burst size)
#[serde(default = "default_burst_size")]
pub burst_size: u32,
/// Time window in seconds for rate limiting
#[serde(default = "default_per_seconds")]
pub per_seconds: u64,
}
impl Default for RateLimitSettings {
fn default() -> Self {
Self {
enabled: default_rate_limit_enabled(),
burst_size: default_burst_size(),
per_seconds: default_per_seconds(),
}
}
}
const fn default_rate_limit_enabled() -> bool {
true
}
const fn default_burst_size() -> u32 {
100
}
const fn default_per_seconds() -> u64 {
60
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn environment_display_development() {
let env = Environment::Development;
assert_eq!(env.to_string(), "development");
}
#[test]
fn environment_display_production() {
let env = Environment::Production;
assert_eq!(env.to_string(), "production");
}
#[test]
fn environment_from_str_development() {
assert_eq!(
Environment::try_from("development").unwrap(),
Environment::Development
);
assert_eq!(
Environment::try_from("dev").unwrap(),
Environment::Development
);
assert_eq!(
Environment::try_from("Development").unwrap(),
Environment::Development
);
assert_eq!(
Environment::try_from("DEV").unwrap(),
Environment::Development
);
}
#[test]
fn environment_from_str_production() {
assert_eq!(
Environment::try_from("production").unwrap(),
Environment::Production
);
assert_eq!(
Environment::try_from("prod").unwrap(),
Environment::Production
);
assert_eq!(
Environment::try_from("Production").unwrap(),
Environment::Production
);
assert_eq!(
Environment::try_from("PROD").unwrap(),
Environment::Production
);
}
#[test]
fn environment_from_str_invalid() {
let result = Environment::try_from("invalid");
assert!(result.is_err());
assert!(result.unwrap_err().contains("not a supported environment"));
}
#[test]
fn environment_from_string_development() {
assert_eq!(
Environment::try_from("development".to_string()).unwrap(),
Environment::Development
);
}
#[test]
fn environment_from_string_production() {
assert_eq!(
Environment::try_from("production".to_string()).unwrap(),
Environment::Production
);
}
#[test]
fn environment_from_string_invalid() {
let result = Environment::try_from("invalid".to_string());
assert!(result.is_err());
}
#[test]
fn environment_default_is_development() {
let env = Environment::default();
assert_eq!(env, Environment::Development);
}
#[test]
fn startls_deserialize_from_string_never() {
let json = r#""never""#;
let result: Starttls = serde_json::from_str(json).unwrap();
assert_eq!(result, Starttls::Never);
let json = r#""no""#;
let result: Starttls = serde_json::from_str(json).unwrap();
assert_eq!(result, Starttls::Never);
let json = r#""off""#;
let result: Starttls = serde_json::from_str(json).unwrap();
assert_eq!(result, Starttls::Never);
}
#[test]
fn startls_deserialize_from_string_always() {
let json = r#""always""#;
let result: Starttls = serde_json::from_str(json).unwrap();
assert_eq!(result, Starttls::Always);
let json = r#""yes""#;
let result: Starttls = serde_json::from_str(json).unwrap();
assert_eq!(result, Starttls::Always);
}
#[test]
fn startls_deserialize_from_string_opportunistic() {
let json = r#""opportunistic""#;
let result: Starttls = serde_json::from_str(json).unwrap();
assert_eq!(result, Starttls::Opportunistic);
}
#[test]
fn startls_deserialize_from_bool() {
let json = "true";
let result: Starttls = serde_json::from_str(json).unwrap();
assert_eq!(result, Starttls::Always);
let json = "false";
let result: Starttls = serde_json::from_str(json).unwrap();
assert_eq!(result, Starttls::Never);
}
#[test]
fn startls_deserialize_from_string_invalid() {
let json = r#""invalid""#;
let result: Result<Starttls, _> = serde_json::from_str(json);
assert!(result.is_err());
}
#[test]
fn startls_default_is_never() {
let startls = Starttls::default();
assert_eq!(startls, Starttls::Never);
}
#[test]
fn startls_try_from_str_never() {
assert_eq!(Starttls::try_from("never").unwrap(), Starttls::Never);
assert_eq!(Starttls::try_from("no").unwrap(), Starttls::Never);
assert_eq!(Starttls::try_from("off").unwrap(), Starttls::Never);
assert_eq!(Starttls::try_from("NEVER").unwrap(), Starttls::Never);
assert_eq!(Starttls::try_from("No").unwrap(), Starttls::Never);
}
#[test]
fn startls_try_from_str_always() {
assert_eq!(Starttls::try_from("always").unwrap(), Starttls::Always);
assert_eq!(Starttls::try_from("yes").unwrap(), Starttls::Always);
assert_eq!(Starttls::try_from("ALWAYS").unwrap(), Starttls::Always);
assert_eq!(Starttls::try_from("Yes").unwrap(), Starttls::Always);
}
#[test]
fn startls_try_from_str_opportunistic() {
assert_eq!(
Starttls::try_from("opportunistic").unwrap(),
Starttls::Opportunistic
);
assert_eq!(
Starttls::try_from("OPPORTUNISTIC").unwrap(),
Starttls::Opportunistic
);
}
#[test]
fn startls_try_from_str_invalid() {
let result = Starttls::try_from("invalid");
assert!(result.is_err());
assert!(result
.unwrap_err()
.contains("not a supported option"));
}
#[test]
fn startls_try_from_string_never() {
assert_eq!(
Starttls::try_from("never".to_string()).unwrap(),
Starttls::Never
);
}
#[test]
fn startls_try_from_string_always() {
assert_eq!(
Starttls::try_from("yes".to_string()).unwrap(),
Starttls::Always
);
}
#[test]
fn startls_try_from_string_opportunistic() {
assert_eq!(
Starttls::try_from("opportunistic".to_string()).unwrap(),
Starttls::Opportunistic
);
}
#[test]
fn startls_try_from_string_invalid() {
let result = Starttls::try_from("invalid".to_string());
assert!(result.is_err());
}
#[test]
fn startls_from_bool_true() {
assert_eq!(Starttls::from(true), Starttls::Always);
}
#[test]
fn startls_from_bool_false() {
assert_eq!(Starttls::from(false), Starttls::Never);
}
#[test]
fn startls_display_never() {
let startls = Starttls::Never;
assert_eq!(startls.to_string(), "never");
}
#[test]
fn startls_display_always() {
let startls = Starttls::Always;
assert_eq!(startls.to_string(), "always");
}
#[test]
fn startls_display_opportunistic() {
let startls = Starttls::Opportunistic;
assert_eq!(startls.to_string(), "opportunistic");
}
#[test]
fn rate_limit_settings_default() {
let settings = RateLimitSettings::default();
assert!(settings.enabled);
assert_eq!(settings.burst_size, 100);
assert_eq!(settings.per_seconds, 60);
}
#[test]
fn rate_limit_settings_deserialize_full() {
let json = r#"{"enabled": true, "burst_size": 50, "per_seconds": 30}"#;
let settings: RateLimitSettings = serde_json::from_str(json).unwrap();
assert!(settings.enabled);
assert_eq!(settings.burst_size, 50);
assert_eq!(settings.per_seconds, 30);
}
#[test]
fn rate_limit_settings_deserialize_partial() {
let json = r#"{"enabled": false}"#;
let settings: RateLimitSettings = serde_json::from_str(json).unwrap();
assert!(!settings.enabled);
assert_eq!(settings.burst_size, 100); // default
assert_eq!(settings.per_seconds, 60); // default
}
#[test]
fn rate_limit_settings_deserialize_empty() {
let json = "{}";
let settings: RateLimitSettings = serde_json::from_str(json).unwrap();
assert!(settings.enabled); // default
assert_eq!(settings.burst_size, 100); // default
assert_eq!(settings.per_seconds, 60); // default
}
#[test]
fn startls_deserialize_from_incompatible_type() {
// Test that deserialization from an array fails with expected error message
let json = "[1, 2, 3]";
let result: Result<Starttls, _> = serde_json::from_str(json);
assert!(result.is_err());
let error = result.unwrap_err().to_string();
// The error should mention what was expected
assert!(
error.contains("STARTTLS") || error.contains("string") || error.contains("boolean")
);
}
#[test]
fn startls_deserialize_from_number() {
// Test that deserialization from a number fails
let json = "42";
let result: Result<Starttls, _> = serde_json::from_str(json);
assert!(result.is_err());
}
#[test]
fn startls_deserialize_from_object() {
// Test that deserialization from an object fails
let json = r#"{"foo": "bar"}"#;
let result: Result<Starttls, _> = serde_json::from_str(json);
assert!(result.is_err());
}
#[test]
fn email_settings_debug_redacts_password() {
let settings = EmailSettings {
host: "smtp.example.com".to_string(),
port: 587,
user: "user@example.com".to_string(),
from: "noreply@example.com".to_string(),
password: "super_secret_password".to_string(),
recipient: "admin@example.com".to_string(),
starttls: Starttls::Always,
tls: false,
};
let debug_output = format!("{settings:?}");
// Password should be redacted
assert!(debug_output.contains("[REDACTED]"));
// Password should not appear in output
assert!(!debug_output.contains("super_secret_password"));
// Other fields should still be present
assert!(debug_output.contains("smtp.example.com"));
assert!(debug_output.contains("user@example.com"));
}
}

228
backend/src/startup.rs Normal file
View File

@@ -0,0 +1,228 @@
//! Application startup and server configuration.
//!
//! This module handles:
//! - Building the application with routes and middleware
//! - Setting up the OpenAPI service and Swagger UI
//! - Configuring CORS
//! - Starting the HTTP server
use poem::middleware::{AddDataEndpoint, Cors, CorsEndpoint};
use poem::{EndpointExt, Route};
use poem_openapi::OpenApiService;
use crate::{
middleware::rate_limit::{RateLimit, RateLimitConfig},
route::Api,
settings::Settings,
};
use crate::middleware::rate_limit::RateLimitEndpoint;
type Server = poem::Server<poem::listener::TcpListener<String>, std::convert::Infallible>;
/// The configured application with rate limiting, CORS, and settings data.
pub type App = AddDataEndpoint<CorsEndpoint<RateLimitEndpoint<Route>>, Settings>;
/// Application builder that holds the server configuration before running.
pub struct Application {
server: Server,
app: poem::Route,
host: String,
port: u16,
settings: Settings,
}
/// A fully configured application ready to run.
pub struct RunnableApplication {
server: Server,
app: App,
}
impl RunnableApplication {
/// Runs the application server.
///
/// # Errors
///
/// Returns a `std::io::Error` if the server fails to start or encounters
/// an I/O error during runtime (e.g., port already in use, network issues).
pub async fn run(self) -> Result<(), std::io::Error> {
self.server.run(self.app).await
}
}
impl From<RunnableApplication> for App {
fn from(value: RunnableApplication) -> Self {
value.app
}
}
impl From<Application> for RunnableApplication {
fn from(value: Application) -> Self {
// Configure rate limiting based on settings
let rate_limit_config = if value.settings.rate_limit.enabled {
tracing::event!(
target: "backend::startup",
tracing::Level::INFO,
burst_size = value.settings.rate_limit.burst_size,
per_seconds = value.settings.rate_limit.per_seconds,
"Rate limiting enabled"
);
RateLimitConfig::new(
value.settings.rate_limit.burst_size,
value.settings.rate_limit.per_seconds,
)
} else {
tracing::event!(
target: "backend::startup",
tracing::Level::INFO,
"Rate limiting disabled (using very high limits)"
);
// Use very high limits to effectively disable rate limiting
RateLimitConfig::new(u32::MAX, 1)
};
let app = value
.app
.with(RateLimit::new(&rate_limit_config))
.with(Cors::new())
.data(value.settings);
let server = value.server;
Self { server, app }
}
}
impl Application {
fn setup_app(settings: &Settings) -> poem::Route {
let api_service = OpenApiService::new(
Api::from(settings).apis(),
settings.application.clone().name,
settings.application.clone().version,
)
.url_prefix("/api");
let ui = api_service.swagger_ui();
poem::Route::new()
.nest("/api", api_service.clone())
.nest("/specs", api_service.spec_endpoint_yaml())
.nest("/", ui)
}
fn setup_server(
settings: &Settings,
tcp_listener: Option<poem::listener::TcpListener<String>>,
) -> Server {
let tcp_listener = tcp_listener.unwrap_or_else(|| {
let address = format!(
"{}:{}",
settings.application.host, settings.application.port
);
poem::listener::TcpListener::bind(address)
});
poem::Server::new(tcp_listener)
}
/// Builds a new application with the given settings and optional TCP listener.
///
/// If no listener is provided, one will be created based on the settings.
#[must_use]
pub fn build(
settings: Settings,
tcp_listener: Option<poem::listener::TcpListener<String>>,
) -> Self {
let port = settings.application.port;
let host = settings.application.clone().host;
let app = Self::setup_app(&settings);
let server = Self::setup_server(&settings, tcp_listener);
Self {
server,
app,
host,
port,
settings,
}
}
/// Converts the application into a runnable application.
#[must_use]
pub fn make_app(self) -> RunnableApplication {
self.into()
}
/// Returns the host address the application is configured to bind to.
#[must_use]
pub fn host(&self) -> String {
self.host.clone()
}
/// Returns the port the application is configured to bind to.
#[must_use]
pub const fn port(&self) -> u16 {
self.port
}
}
#[cfg(test)]
mod tests {
use super::*;
fn create_test_settings() -> Settings {
Settings {
application: crate::settings::ApplicationSettings {
name: "test-app".to_string(),
version: "1.0.0".to_string(),
port: 8080,
host: "127.0.0.1".to_string(),
base_url: "http://localhost:8080".to_string(),
protocol: "http".to_string(),
},
debug: false,
email: crate::settings::EmailSettings::default(),
frontend_url: "http://localhost:3000".to_string(),
rate_limit: crate::settings::RateLimitSettings {
enabled: false,
burst_size: 100,
per_seconds: 60,
},
}
}
#[test]
fn application_build_and_host() {
let settings = create_test_settings();
let app = Application::build(settings.clone(), None);
assert_eq!(app.host(), settings.application.host);
}
#[test]
fn application_build_and_port() {
let settings = create_test_settings();
let app = Application::build(settings, None);
assert_eq!(app.port(), 8080);
}
#[test]
fn application_host_returns_correct_value() {
let settings = create_test_settings();
let app = Application::build(settings, None);
assert_eq!(app.host(), "127.0.0.1");
}
#[test]
fn application_port_returns_correct_value() {
let settings = create_test_settings();
let app = Application::build(settings, None);
assert_eq!(app.port(), 8080);
}
#[test]
fn application_with_custom_listener() {
let settings = create_test_settings();
let tcp_listener =
std::net::TcpListener::bind("127.0.0.1:0").expect("Failed to bind random port");
let port = tcp_listener.local_addr().unwrap().port();
let listener = poem::listener::TcpListener::bind(format!("127.0.0.1:{port}"));
let app = Application::build(settings, Some(listener));
assert_eq!(app.host(), "127.0.0.1");
assert_eq!(app.port(), 8080);
}
}

69
backend/src/telemetry.rs Normal file
View File

@@ -0,0 +1,69 @@
//! Logging and tracing configuration.
//!
//! This module provides utilities for setting up structured logging using the tracing crate.
//! Supports both pretty-printed logs for development and JSON logs for production.
use tracing_subscriber::layer::SubscriberExt;
/// Creates a tracing subscriber configured for the given debug mode.
///
/// In debug mode, logs are pretty-printed to stdout.
/// In production mode, logs are output as JSON.
#[must_use]
pub fn get_subscriber(debug: bool) -> impl tracing::Subscriber + Send + Sync {
let env_filter = if debug { "debug" } else { "info" }.to_string();
let env_filter = tracing_subscriber::EnvFilter::try_from_default_env()
.unwrap_or_else(|_| tracing_subscriber::EnvFilter::new(env_filter));
let stdout_log = tracing_subscriber::fmt::layer().pretty();
let subscriber = tracing_subscriber::Registry::default()
.with(env_filter)
.with(stdout_log);
let json_log = if debug {
None
} else {
Some(tracing_subscriber::fmt::layer().json())
};
subscriber.with(json_log)
}
/// Initializes the global tracing subscriber.
///
/// # Panics
///
/// Panics if:
/// - A global subscriber has already been set
/// - The subscriber cannot be set as the global default
pub fn init_subscriber(subscriber: impl tracing::Subscriber + Send + Sync) {
tracing::subscriber::set_global_default(subscriber).expect("Failed to set subscriber");
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn get_subscriber_debug_mode() {
let subscriber = get_subscriber(true);
// If we can create the subscriber without panicking, the test passes
// We can't easily inspect the subscriber's internals, but we can verify it's created
let _ = subscriber;
}
#[test]
fn get_subscriber_production_mode() {
let subscriber = get_subscriber(false);
// If we can create the subscriber without panicking, the test passes
let _ = subscriber;
}
#[test]
fn get_subscriber_creates_valid_subscriber() {
// Test both debug and non-debug modes create valid subscribers
let debug_subscriber = get_subscriber(true);
let prod_subscriber = get_subscriber(false);
// Basic smoke test - if these are created without panicking, they're valid
let _ = debug_subscriber;
let _ = prod_subscriber;
}
}

View File

@@ -1,4 +0,0 @@
;;; Directory Local Variables -*- no-byte-compile: t -*-
;;; For more information see (info "(emacs) Directory Variables")
((typescript-mode . ((typescript-indent-level . 2))))

View File

@@ -1,15 +0,0 @@
import { defineClientConfig } from '@vuepress/client';
import PreviewImage from './components/PreviewImage.vue';
import ResponsiveImage from './components/ResponsiveImage.vue';
import LatestRepositories from './components/LatestRepositories.vue';
export default defineClientConfig({
enhance({ app, router, siteData }) {
app.component('PreviewImage', PreviewImage);
app.component('ResponsiveImage', ResponsiveImage);
app.component('LatestRepositories', LatestRepositories);
},
setup() {},
layouts: {},
rootComponents: [],
});

View File

@@ -1,36 +0,0 @@
<template>
<div v-if="error">
{{ error }}
</div>
<div v-else v-for="repo in githubRepos">
<p>{{ repo.name }} updated at {{ repo.updated_at }}</p>
</div>
</template>
<script setup lang="ts">
import { readFromCache } from '../composables/cache';
import {
GithubError,
GithubRepo,
getLatestRepositories,
} from '../composables/github';
let githubRepos: GithubRepo[] | null = null;
let error: GithubError | null;
const getRepositories = () => {
return getLatestRepositories('phundrak', 5);
};
readFromCache<GithubRepo[]>('latestRepos', getRepositories).subscribe({
next: (repos: GithubRepo[]) => {
githubRepos = repos;
error = null;
},
error: (errorResponse: GithubError) => {
githubRepos = null;
error = errorResponse;
},
});
</script>
<style lang="less"></style>

View File

@@ -1,51 +0,0 @@
<template>
<a class="no-decoration" :href="src">
<figure class="img-prev" :style="style">
<ResponsiveImage
:source="props.src"
:size="props.width"
:preview="props.preview"
:previewWidth="props.previewWidth"
:previewTheshold="props.maxwidth"
/>
<figcaption>
<slot />
</figcaption>
</figure>
</a>
</template>
<script setup lang="ts">
const props = defineProps<{
src: string;
width: number;
preview: string;
previewWidth: number;
maxwidth?: number;
}>();
const style = props.maxwidth ? `max-width: ${props.maxwidth}px` : '';
</script>
<style scoped lang="scss">
img {
height: auto;
width: 100%;
}
figure {
float: left;
margin: 0.5rem 1rem;
display: flex;
justify-content: center;
align-items: center;
flex-direction: column;
max-width: 400px;
}
@media only screen and (max-width: 800px) {
figure {
float: none;
}
}
</style>

View File

@@ -1,25 +0,0 @@
<template>
<img :srcset="srcset" :sizes="sizes" :alt="props.alt" :src="props.src" />
</template>
<script setup lang="ts">
const props = defineProps<{
src: string;
width: number;
preview: string;
previewWidth: number;
previewThreshold?: number;
alt?: string;
}>();
const srcset = [
`${props.preview} ${props.previewWidth}w`,
`${props.src} ${props.width}w`,
].join(', ');
const sizes = [
`(max-width: ${props.previewThreshold || props.previewWidth}px) ${
props.previewWidth
}px`,
`${props.width}px`,
].join(', ');
</script>

View File

@@ -1,46 +0,0 @@
import { Observable, of } from 'rxjs';
const cacheAgeLimitInMilliseconds = 1000 * 60 * 60;
export function isDataOutdated(name: string): boolean {
const lastUpdated: number = +localStorage.getItem(name + '-timestamp');
const now: number = Date.now();
const elapsedTime: number = now - lastUpdated;
return elapsedTime > cacheAgeLimitInMilliseconds;
}
export function storeInCache<T>(
data: Observable<T>,
name: string
): Observable<T> {
data.subscribe({
next: (response: T) => {
localStorage.setItem(name, JSON.stringify(response));
localStorage.setItem(name + '-timestamp', `${Date.now()}`);
},
});
return data;
}
export function readFromCache<T>(
name: string,
callback: () => Observable<T>
): Observable<T> {
let data: Observable<T>;
if (isDataOutdated(name)) {
data = storeInCache<T>(callback(), name);
} else {
let dataFromCache = localStorage.getItem(name);
try {
data = of(JSON.parse(dataFromCache));
} catch (err) {
console.error(
`Could not parse ${JSON.stringify(
dataFromCache
)}: ${err}. Fetching again data from callback function.`
);
data = storeInCache<T>(callback(), name);
}
}
return data;
}

View File

@@ -1,139 +0,0 @@
import { Observable, switchMap, map } from 'rxjs';
import { fromFetch } from 'rxjs/fetch';
export interface GithubRepo {
id: number;
node_id: string;
name: string;
full_name: string;
private: boolean;
owner: Owner;
html_url: string;
description: string;
fork: boolean;
url: string;
forks_url: string;
keys_url: string;
collaborators_url: string;
teams_url: string;
hooks_url: string;
issue_events_url: string;
events_url: string;
assignees_url: string;
branches_url: string;
tags_url: string;
blobs_url: string;
git_tags_url: string;
git_refs_url: string;
trees_url: string;
statuses_url: string;
languages_url: string;
stargazers_url: string;
contributors_url: string;
subscribers_url: string;
subscription_url: string;
commits_url: string;
git_commits_url: string;
comments_url: string;
issue_comment_url: string;
contents_url: string;
compare_url: string;
merges_url: string;
archive_url: string;
downloads_url: string;
issues_url: string;
pulls_url: string;
milestones_url: string;
notifications_url: string;
labels_url: string;
releases_url: string;
deployments_url: string;
created_at: Date;
updated_at: Date;
pushed_at: Date;
git_url: string;
ssh_url: string;
clone_url: string;
svn_url: string;
homepage: string;
size: number;
stargazers_count: number;
watchers_count: number;
language: string;
has_issues: boolean;
has_projects: boolean;
has_downloads: boolean;
has_wiki: boolean;
has_pages: boolean;
forks_count: number;
mirror_url: null;
archived: boolean;
disabled: boolean;
open_issues_count: number;
license: null;
allow_forking: boolean;
is_template: boolean;
web_commit_signoff_required: boolean;
topics: any[];
visibility: string;
forks: number;
open_issues: number;
watchers: number;
default_branch: string;
}
export interface Owner {
login: string;
id: number;
node_id: string;
avatar_url: string;
gravatar_id: string;
url: string;
html_url: string;
followers_url: string;
following_url: string;
gists_url: string;
starred_url: string;
subscriptions_url: string;
organizations_url: string;
repos_url: string;
events_url: string;
received_events_url: string;
type: string;
site_admin: boolean;
}
export interface GithubError {
message: string;
documentation_url: string;
}
export function getLatestRepositories(
user: string,
amount: number
): Observable<GithubRepo[]> {
return getRepositoriesOfUser(user).pipe(
map((repositories: GithubRepo[]) => {
return repositories
.sort(
(a: GithubRepo, b: GithubRepo) =>
+b.updated_at - +a.updated_at
)
.slice(0, amount);
})
);
}
export function getRepositoriesOfUser(user: string): Observable<GithubRepo[]> {
const fetchUrl = `https://api.github.com/users/${user}/repos`;
return fromFetch(fetchUrl).pipe(
switchMap((response: Response) => {
if (response.ok) {
return response.json();
} else {
console.error(`Error ${response.status}: ${JSON.stringify(response)}`);
return [];
}
}),
);
}

View File

@@ -1,24 +0,0 @@
import { defineUserConfig, defaultTheme } from 'vuepress';
import { removeHtmlExtensionPlugin } from 'vuepress-plugin-remove-html-extension';
import head from './head';
import locales from './locales';
import themeLocales from './themeLocales';
export default defineUserConfig({
lang: 'fr-FR',
title: 'Lucien Cartier-Tilet',
description: 'Site web personnel de Lucien Cartier-Tilet',
head: head,
markdown: {
html: true,
linkify: true,
typographer: true,
},
plugins: [removeHtmlExtensionPlugin()],
locales: locales,
theme: defaultTheme({
contributors: false,
locales: themeLocales,
repo: 'https://labs.phundrak.com/phundrak/phundrak.com',
}),
});

View File

@@ -1,128 +0,0 @@
interface SimplifiedHeader {
tag: string;
content: [any];
}
const simplifiedHead = [
{
tag: 'meta',
content: [
{
name: 'author',
content: 'Lucien Cartier-Tilet',
},
{
property: 'og:image',
content: 'https://cdn.phundrak.com/img/rich_preview.png',
},
{
property: 'org:title',
content: 'Lucien Cartier-Tilet',
},
{
property: 'og:description',
content: 'Site web personnel de Lucien Cartier-Tilet',
},
{
name: 'twitter:card',
content: 'summary',
},
{
name: 'twitter:site',
content: '@phundrak',
},
{
name: 'twitter:creator',
content: '@phundrak',
},
{ name: 'msapplication-TileColor', content: '#3b4252' },
{ name: 'msapplication-TileImage', content: '/ms-icon-144x144.png' },
{ name: 'theme-color', content: '#3b4252' },
],
},
{
tag: 'link',
content: [
{
rel: 'apple-touch-icon',
sizes: '57x57',
href: '/apple-icon-57x57.png',
},
{
rel: 'apple-touch-icon',
sizes: '60x60',
href: '/apple-icon-60x60.png',
},
{
rel: 'apple-touch-icon',
sizes: '72x72',
href: '/apple-icon-72x72.png',
},
{
rel: 'apple-touch-icon',
sizes: '76x76',
href: '/apple-icon-76x76.png',
},
{
rel: 'apple-touch-icon',
sizes: '114x114',
href: '/apple-icon-114x114.png',
},
{
rel: 'apple-touch-icon',
sizes: '120x120',
href: '/apple-icon-120x120.png',
},
{
rel: 'apple-touch-icon',
sizes: '144x144',
href: '/apple-icon-144x144.png',
},
{
rel: 'apple-touch-icon',
sizes: '152x152',
href: '/apple-icon-152x152.png',
},
{
rel: 'apple-touch-icon',
sizes: '180x180',
href: '/apple-icon-180x180.png',
},
{
rel: 'icon',
type: 'image/png',
sizes: '192x192',
href: '/android-icon-192x192.png',
},
{
rel: 'icon',
type: 'image/png',
sizes: '32x32',
href: '/favicon-32x32.png',
},
{
rel: 'icon',
type: 'image/png',
sizes: '96x96',
href: '/favicon-96x96.png',
},
{
rel: 'icon',
type: 'image/png',
sizes: '16x16',
href: '/favicon-16x16.png',
},
{ rel: 'manifest', href: '/manifest.json' },
],
},
];
let head = [];
simplifiedHead.map((tag: SimplifiedHeader) => {
let tagName = tag.tag;
tag.content.forEach((element) => {
head.push([tagName, element]);
});
});
export default head;

View File

@@ -1,19 +0,0 @@
const locales = {
'/': {
lang: 'fr-FR',
title: 'Lucien Cartier-Tilet',
description: 'Site web personnel de Lucien Cartier-Tilet',
},
'/en/': {
lang: 'en-US',
title: 'Lucien Cartier-Tilet',
description: 'Personal website of Lucien Cartier-Tilet',
},
'/lfn/': {
lang: 'lfn',
title: 'Lucien Cartier-Tilet',
description: 'loca ueb de Lucien Cartier-Tilet',
},
};
export default locales;

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 45 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 41 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 46 KiB

View File

@@ -1,2 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<browserconfig><msapplication><tile><square70x70logo src="/ms-icon-70x70.png"/><square150x150logo src="/ms-icon-150x150.png"/><square310x310logo src="/ms-icon-310x310.png"/><TileColor>#eceff4</TileColor></tile></msapplication></browserconfig>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 94 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.4 KiB

View File

@@ -1,174 +0,0 @@
/*
* Nord Theme:
* - Copyright (c) 2016-present Arctic Ice Studio <development@arcticicestudio.com>
* - Copyright (c) 2016-present Sven Greb <development@svengreb.de>
*/
:root {
--nord0: #2e3440;
--nord1: #3b4252;
--nord2: #434c5e;
--nord3: #4c566a;
--nord4: #d8dee9;
--nord5: #e5e9f0;
--nord6: #eceff4;
--nord7: #8fbcbb;
--nord8: #88c0d0;
--nord9: #81a1c1;
--nord10: #5e81ac;
--nord11: #bf616a;
--nord12: #d08770;
--nord13: #ebcb8b;
--nord14: #a3be8c;
--nord15: #b48ead;
scroll-behavior: smooth;
// brand colors
--c-brand: var(--nord10);
--c-brand-light: var(--nord9);
// background colors
--c-bg: var(--nord6);
--c-bg-light: var(--nord6);
--c-bg-lighter: var(--nord5);
--c-bg-dark: var(--nord5);
--c-bg-darker: var(--nord4);
--c-bg-navbar: var(--c-bg);
--c-bg-sidebar: var(--c-bg);
--c-bg-arrow: var(--nord4);
// text colors
--c-text: var(--nord1);
--c-text-accent: var(--c-brand);
--c-text-light: var(--nord2);
--c-text-lighter: var(--nord3);
--c-text-lightest: var(--nord4);
--c-text-quote: var(--nord2);
// border colors
--c-border: var(--nord4);
--c-border-dark: var(--nord4);
// custom container colors
--c-tip: var(--nord14);
--c-tip-bg: var(--c-bg);
--c-tip-title: var(--c-text);
--c-tip-text: var(--c-text);
--c-tip-text-accent: var(--c-text-accent);
--c-warning: var(--nord13);
--c-warning-bg: var(--c-bg);
--c-warning-bg-light: var(--c-bg-light);
--c-warning-bg-lighter: var(--c-bg-lighter);
--c-warning-border-dark: var(--nord3);
--c-warning-details-bg: var(--c-bg);
--c-warning-title: var(--nord12);
--c-warning-text: var(--nord12);
--c-warning-text-accent: var(--nord12);
--c-warning-text-light: var(--nord12);
--c-warning-text-quote: var(--nord12);
--c-danger: var(--nord11);
--c-danger-bg: var(--c-bg);
--c-danger-bg-light: var(--c-bg-light);
--c-danger-bg-lighter: var(--c-bg-light);
--c-danger-border-dark: var(--nord11);
--c-danger-details-bg: var(--nord2);
--c-danger-title: var(--nord11);
--c-danger-text: var(--nord11);
--c-danger-text-accent: var(--nord11);
--c-danger-text-light: var(--nord11);
--c-danger-text-quote: var(--nord11);
--c-details-bg: var(--c-bg-lighter);
// badge component colors
--c-badge-tip: var(--c-tip);
--c-badge-warning: var(--c-warning);
--c-badge-warning-text: var(--c-bg);
--c-badge-danger: var(--c-danger);
--c-badge-danger-text: var(--c-bg);
// transition vars
--t-color: 0.3s ease;
--t-transform: 0.3s ease;
// code blocks vars
--code-bg-color: var(--nord0);
--code-hl-bg-color: var(--nord1);
--code-ln-color: #9e9e9e;
--code-ln-wrapper-width: 3.5rem;
// font vars
--font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen,
Ubuntu, Cantarell, "Fira Sans", "Droid Sans", "Helvetica Neue", sans-serif;
--font-family-code: Consolas, Monaco, "Andale Mono", "Ubuntu Mono", monospace;
// layout vars
--navbar-height: 3.6rem;
--navbar-padding-v: 0.7rem;
--navbar-padding-h: 1.5rem;
--sidebar-width: 20rem;
--sidebar-width-mobile: calc(var(--sidebar-width) * 0.82);
--content-width: 740px;
--homepage-width: 960px;
}
html.dark {
// brand colors
--c-brand: var(--nord14);
--c-brand-light: var(--nord14);
// background colors
--c-bg: var(--nord1);
--c-bg-light: var(--nord2);
--c-bg-lighter: var(--nord2);
--c-bg-dark: var(--nord3);
--c-bg-darker: var(--nord3);
// text colors
--c-text: var(--nord4);
--c-text-light: var(--nord5);
--c-text-lighter: var(--nord5);
--c-text-lightest: var(--nord6);
--c-text-quote: var(--c-text);
// border colors
--c-border: var(--nord3);
--c-border-dark: var(--nord3);
// custom container colors
--c-tip: var(--nord14);
--c-warning: var(--nord13);
--c-warning-bg: var(--c-bg);
--c-warning-bg-light: var(--c-bg-light);
--c-warning-bg-lighter: var(--c-bg-lighter);
--c-warning-border-dark: var(--nord3);
--c-warning-details-bg: var(--c-bg);
--c-warning-title: var(--nord13);
--c-warning-text: var(--nord13);
--c-warning-text-accent: var(--nord13);
--c-warning-text-light: var(--nord13);
--c-warning-text-quote: var(--nord13);
--c-danger: var(--nord11);
--c-danger-bg: var(--c-bg);
--c-danger-bg-light: var(--c-bg-light);
--c-danger-bg-lighter: var(--c-bg-light);
--c-danger-border-dark: var(--nord11);
--c-danger-details-bg: var(--nord2);
--c-danger-title: var(--nord11);
--c-danger-text: var(--nord11);
--c-danger-text-accent: var(--nord11);
--c-danger-text-light: var(--nord11);
--c-danger-text-quote: var(--nord11);
--c-details-bg: var(--c-bg-light);
// badge component colors
--c-badge-warning-text: var(--nord0);
--c-badge-danger-text: var(--nord0);
// code blocks vars
--code-hl-bg-color: var(--nord2);
}

View File

@@ -1,67 +0,0 @@
const themeLocales = {
'/': {
selectLanguageName: 'Français',
tip: 'nota bene',
warning: 'attention',
sidebar: [
'/README.md',
'/about.md',
'/find-me.md',
'/resume.md',
'/projects.md',
'/conlanging.md',
'/vocal-synthesis.md',
],
notFound: [
'Cest bien vide ici',
'Pourquoi sommes-nous ici?',
'Erreur 404',
'Le lien ne semble pas être correct',
],
backToHome: 'Retour accueil',
openInNewWindow: 'Ouvrir dans une nouvelle fenêtre',
toggleColorMode: 'Changer de thème',
toggleSidebar: 'Barre latérale',
lastUpdatedText: 'Dernière mise à jour',
},
'/lfn/': {
selectLanguageName: 'Elefen',
tip: 'avisa',
warning: 'averti',
danger: 'peril',
sidebar: [
'/lfn/index.md',
'/lfn/about.md',
'/lfn/find-me.md',
'/lfn/resume.md',
'/lfn/projects.md',
'/lfn/conlanging.md',
'/lfn/vocal-synthesis.md',
],
notFound: [
'Ce? Se no ave no cosa asi',
'A do vade tu?',
'Era 404',
'La lia no es coreta',
],
backToHome: 'reversa a la paja prima',
openInNewWindow: 'abri en un nova fenetra',
toggleColorMode: 'cambia la colores',
toggleSidebar: 'bara ladal',
lastUpdatedText: 'Ultima refresci',
},
'/en/': {
selectLanguageName: 'English',
sidebar: [
'/en/index.md',
'/en/about.md',
'/en/find-me.md',
'/en/resume.md',
'/en/projects.md',
'/en/conlanging.md',
'/en/vocal-synthesis.md',
],
},
};
export default themeLocales;

View File

@@ -1,21 +0,0 @@
# Accueil
Bonjour, je suis Lucien Cartier-Tilet, un étudiant en Master 2 THYP
(*Technologies de lHypermédia*) à lUniversité Vincennes Saint-Denis
(Paris 8).
Jai travaillé chez VoxWave de 2012 à 2018 en tant que co-fondateur et
directeur technique de lentreprise. Jy ai notamment développé les
chanteuses virtuelles francophones nommées ALYS et LEORA.
Je suis un enthousiaste du locigiel libre, utilisant Linux depuis 2008
et Emacs depuis 2016.
Mes passe-temps principaux sont la programmation, aussi bien de la
programmation système que de la programmation web, et la construction
de langues et univers fictifs. Jaime aussi faire de lescalade et
quand lopportunité se présente, de la randonnée.
Ma langue maternelle est le Français, mais je parle également
couramment en Anglais. Jai également des bases en Japonais, [Lingua
Franca Nova](https://elefen.org), et en Norvégien Bokmål.

View File

@@ -1,76 +0,0 @@
# À Propos
## Introducion
Ceci est le site web personnel de Lucien Cartier-Tilet, aussi connu
sous le nom de « Pundrak » ou « Phundrak ».
Il est écrit grâce à Vuepress et est entièrement open-source. Vous pouvez
trouver son code source sur [mon instance personnelle
Gitea](https://labs.phundrak.com/phundrak/phundrak.com).
## Où est hébergé le site?
Ce site est hébergé sur mon serveur personnel, situé dans la ville de
Bron en France, comme la majorité de mes sites. Deux autres sites,
`labs.phundrak.com` et `mail.phundrak.com`, sont hébergé sur dautres
serveurs loués à Scaleway et à OVH France respectivement, et les
serveurs se situent également en France.
## Cookies
### Que sont les cookies?
Les cookies sont des petits fichiers sauvegardés par un site web sur
votre ordinateur ou votre téléphone portable lorsque vous visitez un
site. Bien que tous les sites nen fassent pas forcément usage, ils
sont néanmoins extrêmement répandus afin de permettre aux sites de
fonctionner correctement ou plus efficacement.
Ce site utilise quelques cookies fonctionnels dans le but de se
remémorer vos préférences, comme la langue du site ou bien son thème.
Ces cookies ne sont pas et ne peuvent pas être utilisés pour vous
traquer.
Cependant, ce site étant protégé par Cloudflare, ce dernier pourra
également héberger quelques cookies afin par exemple de se souvenir
que votre navigateur ne présente pas de risque ou bien pour
enregistrer le trafic sur le site.
### Comment puis-je contrôler les cookies sur mon ordinateur?
Si vous ne souhaitez pas que Cloudflare enregistre ces dernières
activités, un bon anti-pubs devrait faire laffaire. Je recommande
personnellement [uBlock Origin](https://ublockorigin.com/), lun des
bloqueurs de pub les plus efficaces que je connaisse.
Vous pouvez également supprimer manuellement les cookies de votre
navigateur, mais étant donné le nombre de navigateurs existants, il
sera sans doute plus rapide pour vous de chercher sur DuckDuckGo,
Qwant ou Startpage comment faire pour votre navigateur actuel (si vous
vous inquiétez de lutilisation des cookies, je suppose que vous
voudrez éviter Google).
### Quid des autres méthodes de tracking?
Il existe dautres méthodes plus subtiles qui permettent de traquer
quelquun sur internet, ou même via des mails ou tout contenu web
rendu à lécran, comme pixels espions (des images extrêmement
petites). Il est également possible de stocker des cookies Flash ou
des objets locaux partagés.
Ce site nen utilise absolument pas.
## Est-ce quil y a de la pub ciblée sur ce site?
Il ny a tout simplement aucune pub sur ce site. Si vous en voyez,
vous avez sans doute un virus installé sur votre ordinateur.
## Est-ce que cette page est souvent mise à jour?
Je peux la mettre à jour de temps en temps afin de refléter des
changements de fonctionnement du site, ou si je remarque une erreur
sur la page. Il se peut aussi que jajoute un jour un tracking des
utilisateurs sur mon site via Matomo, un service de tracking
respectant la vie privée des utilisateurs et qui est tout à fait
bloquable. La date de la derniène mise à jour de cette page peut être
trouvée à son tout début.
## Jai dautres questions
Et je serai heureux dy répondre par mail. Vous pouvez me contacter
via ladresse mail [lucien@phundrak.com](mailto:lucien@phundrak.com).

View File

@@ -1,39 +0,0 @@
# Création de langues
Les *idéolangues*, ou *langues construites* (en anglais *conlang*),
sont des langues construites et artificielles, nées de lesprit dune
ou parfois quelques personnes. Elles se distinguent ainsi des *langues
naturelles* qui sont des langues ayant naturellement évolué depuis
dautres langues plus anciennes, comme le Français, lAnglais, le
Mandarin, le Japonais, le Bahasa ou le !Xhosa (oui, le point
dexclamation fait partie de lorthographe du nom de la langue).
Les idéolangues peuvent avoir différents buts lors de leur création,
comme par exemple :
- être parlées comme des langues naturelles par des individus afin de
servire de *lingua franca* entre plusieurs communautés
linguistiques, comme le célèbre
[Esperanto](https://en.wikipedia.org/wiki/Esperanto) ou bien la
[Lingua Franca Nova](https://elefen.org)
- être une langue secrète que seules quelques personnes connaissent
afin de communiquer entre eux sans que dautres personnes puissent
comprendre, un peu comme un argot mais plus poussé encore
- être une expérience concrète de linguistique, comme le
[Lojban](https://en.wikipedia.org/wiki/Lojban) qui essaie dêtre la
langue la plus logique qui soit
- complémenter un univers littéraire, comme les langues elfiques de
Tolkien ou le Klingon de Star Trek
- juste être une forme dart, comme la peinture ou la poésie
Dans mon cas, les deux dernières justifications sont celles qui me
poussent à créer de nouvelles langues. Mes deux projets principaux
actuellement sont le
[Proto-Ñyqy](https://conlang.phundrak.com/proto-nyqy) et
l[Éittlandais](https://conlang.phundrak.com/eittlandic). La première
est une langue racine qui me permettra de développer toute une famille
de langues dans mon univers littéraire, tandis que la seconde
sinscrit dans un exercice créatif de création dun pays fictif
présent dans notre monde.
Plus dinformations peuvent être trouvées sur [mon site
didéolinguistique](https://conlang.phundrak.com/) (en anglais)

View File

@@ -1,80 +0,0 @@
# About
## Introduction
This is the personal website of Lucien “Phundrak” Cartier-Tilet.
This website is made with Vuepress and is entirely free and
open-source. You can find its source code on my Gitea instance
[here](https://labs.phundrak.com/phundrak/phundrak.com).
## Where is the website hosted?
This website is hosted on my private physical server, located in the
town of Bron in France, near Lyon. All of my websites are also hosted
on this server, except for
[`labs.phundrak.com`](https://labs.phundrak.com) and
`mail.phundrak.com` which are hosted on servers rented to Scaleway and
OVH France respectively. These servers are also located in France.
## Cookies
### What are cookies?
Cookies are small files a website saves on your computer or mobile
phone when you visit a website. site. Although not all sites make use
of them, they are nevertheless extremely common in order to allow
websites to function properly or function properly or more
efficiently.
This website uses some functional cookies in order to remember your
preferences, such as your preferred language or its color theme. These
cookies are not and cannot be used to track you.
However, as this site is protected by Cloudflare, they may also host
some cookies to remember, for example, that your browser is safe or to
record traffic to the site.
### How can I control cookies on my computer?
If you don't want Cloudflare to record your browsing activity on my
website, a good adblocker should do the trick. I personally recommend
[uBlock Origin](https://ublockorigin.com/), one of the most effective
adblockers I know of if not the most effective one.
You can also manually delete cookies from your browser, but given the
number of browsers out there, it might be quicker for you to look up
DuckDuckGo, Qwant or Startpage to do this for your current browser (if
you're worried about cookie usage, I guess you'll want to avoid
Google).
### What about other methods of tracking users?
There are other more subtle methods of tracking someone on the
internet, or even via emails or any web content rendered on the
screen, such as web beacons (extremely small images). It is also
possible to store Flash cookies or local shared objects.
This site does not use them at all.
## Is there targeted advertisement on this website?
Theres no advertisement to begin with. If you see any, check your
computer and browser for virus, that is not normal.
## How often is this page updated?
It is updated from time to time to reflect any changes in how my
website behaves, or if I notice errors on this page (such as typos). I
might add some user tracking, however dont worry, Matomo (the service
I would use) would only track you on this website and this website
only. Matomo respects the privacy of a websites users.
The date of the last update of this web page can be found at its very
beginning.
## I have other questions
And I have the answers! Ill be more thang happy to chat with you by
email, feel free to send me one at
[lucien@phundrak.com](mailto:lucien@phundrak.com).

View File

@@ -1,28 +0,0 @@
# Conlanging
*Conlangs*, short for *constructed languages*, are artificial
languages born out of the mind of a single individual (sometimes a
couple of them), unlike natural languages born through countless
iterations by their native speakers, slowly evolving over time like
English, French, Mandarin, Japanese, Bahasa, or !Xhosa did.
They can serve various goals from their creators:
- be spoken by as many people as possible as a neutral language, like
[Esperanto](https://en.wikipedia.org/wiki/Esperanto) and [Lingua
Franca Nova](https://elefen.org)
- be a secret language between a couple of people
- as a thought experiment, like [Lojban](https://en.wikipedia.org/wiki/Lojban)
- fill a litterary universe, like Tolkiens elvish languages or Star
Treks Klingon
- for the sake of art itself
In my case, the last two reasons are the main ones driving me to
create languages. My two main projects at the time of writing this
page are [Proto-Ñyqy](https://conlang.phundrak.com/proto-nyqy) and
[Eittlandic](https://conlang.phundrak.com/eittlandic). Both are
accompanied by their own worldbuilding project, although Proto-Ñyqys
worldbuilding is still largely secret while Eittlands worldbuilding
is mostly public.
More information can be found on my [conlanging
website](https://conlang.phundrak.com/).

View File

@@ -1,26 +0,0 @@
# Where to find me?
I am on various websites and some social networks where you can follow
me.
## Social Networks
- **Mastodon** : [@phundrak@phundrak.com](https://emacs.ch/@phundrak)
should work, otherwise head over to
[@phundrak@emacs.ch](https://emacs.ch/@phundrak)
- **Twitter** : [@phundrak](https://twitter.com/phundrak), though I
harldy use it anymore and mostly reshare my Mastodon messages and
sometimes they get truncated
- **Writefreely** :
- [**@phundrak@write.phundrak.com**](https://write.phundrak.com/phundrak) :
blog alternative
- [**@phundraks-short-stories@write.phundrak.com**](https://write.phundrak.com/phundraks-short-stories) :
short stories, mainly in French for now
- **Discord** : `Phundrak#0001` (tell me you come from here, otherwise
theres a chance Ill consider your message as spam)
## Other Websites
- **Email** : [lucien@phundrak.com](mailto:lucien@phundrak.com)
- **Blog** : [blog.phundrak.com](https://blog.phundrak.com)
- **Gitea** : [@phundrak@labs.phundrak.com](https://labs.phundrak.com/phundrak)
- **GitHub** : [Phundrak](https://github.com/Phundrak)
- **YouTube** : [@phundrak](https://www.youtube.com/@phundrak)

View File

@@ -1,21 +0,0 @@
# Home
Hi, Im Lucien Cartier-Tilet, a CS student studying for my Masters 2
degree in THYP (in French: *Technologies de lHypermédia*, in English:
*Hypermedias Technologies*) at the Université Vincennes Saint-Denis
(Paris 8).
I worked at VoxWave from 2012 to 2018 as its co-founder and CTO. During
that time, I developed French singing vocal libraries for vocal
synthesizers, known as ALYS and LEORA.
Im a free software enthusiast, using GNU/Linux since 2008 and Emacs
since 2016.
I spend my personnal programming projects as well as on my constructed
worlds and languages. I also like to go climbing, and hiking whenever
I have the opportunity to.
I speak natively French, and English at a native level. I also speak
some Japanese, [Lingua Franca Nova](https://elefen.org), and Norwegian
Bokmål.

View File

@@ -1,177 +0,0 @@
# Introduction
KEINE Tashi is a character and set of vocal libraries developed for
the shareware [UTAU](http://utau2008.web.fc2.com/), a singing voice
synthesizer. I developed KEINE Tashi over the course of several years,
from 2012 to 2015. Three vocal libraries have been released to the
public, the most used one being his *JPN Power Extend* one. On March
10th, 2017, I announced I would cease any kind of activity related to
UTAU.
<blockquote class="twitter-tweet" data-dnt="true" data-theme="dark"><p
lang="en" dir="ltr">Id like to also announce that from now on I am
dropping my previous UTAU projects other than covers and wont develop
any new UTAU library</p>— Pundrak (@Phundrak) <a
href="https://twitter.com/Phundrak/status/840174634377105408?ref_src=twsrc%5Etfw">March
10, 2017</a></blockquote> <component is="script" async
src="https://platform.twitter.com/widgets.js"
charset="utf-8"></component>
# Character and vocal libraries
Heres a copy and paste of some old pages describing KEINE Tashi:
## Presentation
<ResponsiveImage
src="https://cdn.phundrak.com/img/UTAU/KEINE_Tashi_1024.webp"
width="1024"
preview="https://cdn.phundrak.com/img/UTAU/KEINE_Tashi_512.webp"
previewWidth="512">
Illustration de KEINE Tashi par Umi
</ResponsiveImage>
- **Codename:** BSUP01 恵音བཀྲ་ཤིས་ KEINE Tashi
- **First name:** Tashi (བཀྲ་ཤིས་), Tibetan name meaning “auspicious”
- **Last name:** Keine (恵音), Japanese name meaning “Blessing
sound”. It reads as “keine”, although its regular reading should
be “megumine”.
- **Model:** BSUP (Bödkay Shetang UTAU Project)
- **Number:** 01
- **Gender:** male
- **Birthday (lore):** June 28th, 1991
- **Birthday (first release):** October 14th, 2012
- **Weight:** 154lb / 70kg
- **Heigh:** 60″ / 182cm (very tall for a Tibetan)
- **Hair color:** black
- **Eyes color:** brown~black
- **Appearance:** Tashi wears a modernized Tibetan suit from the
Amdo Region (Chinese: 安多 Ānduō), colored in blue. He also wears
some turquoise jeweleries.
- **Favorite food:** meat momo (Tibetan raviolies)
- **Character item:** a Tibetan manuscript
- **Voice and creator:** [Phundrak](https://phundrak.com) (me)
- **Likes :** to meditate, calligraphy, old books, manuscripts (is
that a self-insert?)
- **Dislikes:** selfishness, lies, arrogance
- **Personality:** Tashi is somebody very calm, sweet. He really
enjoy old books and manuscripts, and he LOVES meditate! He's never
hungry, so, he can stay meditating for 2~3 days meditating, just
like that, until he realizes that he should eat something. And he
always keep quiet, it's really hard to make him angry.
But when he is, his anger becomes wrath. Anyone who experienced it
can attest how complex and difficult it is to calm him down.
Strangely enough, shortly after being confronted by Tashi, the
victims of this wrath see their quality of life greatly improve.
Maybe these people needed to hear some truths they refused to face
before?
## Vocal libraries
### JPN VCV
- **Download link:**
| Extension | Size | Link |
|-----------|---------|-----------------------------------------------------------------------------------|
| 7z | 25.7MiB | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_VCV.7z) |
| tar.xz | 32.5MiB | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_VCV.tar.xz) |
| zip | 38.0MiB | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_VCV.zip) |
- **File size:** 60.7MB
- **Total uncompressed size:** 94.4MB
- **Number of voice phonemes:** 1264 (253 audio files)
- **Average frequency:** G#2
- **Vocal range:** C2~D3
- **FRQ file presence:** partial
- **Release date:** October, 14th 2012
- **Phoneme encoding:** Romaji with hiragana and CV romaji aliases
- **Supported languages:** Japanese
- **oto.ini:** Tuned myself
- **Recommended engines:** TIPS, VS4U
### JPN Extend Power
- **Download link:**
| Extension | Size | Link |
|-----------|--------|--------------------------------------------------------------------------------------------|
| 7z | 1.1Gio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Power.7z) |
| tar.xz | 1.1Gio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Power.tar.xz) |
| zip | 1.2Gio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Power.zip) |
- **File size:** 114MB
- **Total uncompressed size:** 155MB
- **Number of voice phonemes:** 3020 (546 audio files)
- **Average frequency:** C3
- **Vocal range:** B1~D4
- **FRQ file presence:** partial
- **Release date:** June 28th, 2013
- **Phoneme encoding:** Romaji (hiragana aliases)
- **Supported languages:** Japanese
- **oto.ini:** Tuned myself
- **Recommended engines:** VS4U, world4utau
### JPN Extend Youth
- **Download link:**
| Extension | Size | Link |
|-----------|----------|--------------------------------------------------------------------------------------------|
| 7z | 237.7Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Youth.7z) |
| tar.xz | 243.5Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Youth.tar.xz) |
| zip | 268.7Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Youth.zip) |
- **File size:** 36.9MB
- **Total uncompressed size:** 42.0MB
- **Number of voice phonemes:** 1954 (182 audio files)
- **Average frequency:** C4
- **Vocal range:** F#3~A#4
- **FRQ file presence:** partial
- **Release date:** June 28th, 2013
- **Phoneme encoding:** Romaji (hiragana aliases, romaji added with
the oto.ini update)
- **Supported languages:** Japanese
- **oto.ini:** Tuned myself
- **Recommended engines:** fresamp, VS4U, world4utau
### JPN Extend Native
- **Status:** abandonned
### TIB CVVC
- **Status:** abandonned
### ENG
<ResponsiveImage
src="https://cdn.phundrak.com/img/UTAU/KEINE_Tashi_EN_673.webp"
width="673"
preview="https://cdn.phundrak.com/img/UTAU/KEINE_Tashi_EN_246.webp"
previewWidth="300">
Illustration de KEINE Tashi EN
</ResponsiveImage>
- **Status:** abandonned
# Usage clause and license
KEINE Tashi is released under the [CC BY-SA-NC 4.0
license](https://creativecommons.org/licenses/by-nc-sa/4.0/), meaning
you are free to:
- **use:** make use of the vocal libraries in UTAU or any other
singing vocal synthesizer software.
- **adapt:** remix, transform, and build upon the material
- **share:** copy and redistribute the material in any medium or
format
my work, on the condition of:
- **Attribution:** You must give appropriate credit, provide a link
to the license, and indicate if changes were made. You may do so
in any reasonable manner, but not in any way that suggests the
licensor endorses you or your use.
- **NonCommercial:** You may not use the material for commercial
purposes.
- **ShareAlike:** If you remix, transform, or build upon the
material, you must distribute your contributions under the same
license as the original.
Although I cannot add anything to this legal notice, I would also like
if you followed the following rules of thumb regarding this character:
any religious use of this character and its vocal libraries is
forbidden, with the exception of folk music, and Buddhist and Bön
songs. However, due to the current controversy, any song linked to His
Holiness the Gyalwa Karmapa is strictly forbidden until said
controversy has been officially resolved. This is also applicable to
His Holiness the Dalai Lama, the Venerable Shamar Rinpoche, and Tai
Situ Rinpoche. If you have any question or if you are unsure, please
send me an email.

View File

@@ -1,7 +0,0 @@
---
title: Projects
---
# Programming
## Most Starred Projects on GitHub
## Latest Active Repositories on GitHub
# Linguistics

View File

@@ -1,75 +0,0 @@
# Resume
## Profesionnal Experiences
### Aubay (2023 - )
### VoxWave (2014 - 2018)
## Education
### 2nd Year Masters Degree (University of Paris 8)
### 1st Year Masters Degree (University of Paris 8)
### Computer Science Bachelor Degree (University of Paris 8)
### English Litterature (University of Lyon 2)
### Baccalaureate
## Web Programming
### Front-end
- Good knowledge in HTML5, CSS3 (including SASS, SCSS, and LESS), and
Javascript
- I know my way around in Python, Dart, and TypeScript
- Currently building experience with Vue, Nuxt.js, and Node.js
- Learning React and Next.js
### Back-end
- Some experience in back-end development with Django (Python) as well
as Rust with Rocket
- Some experience communicating with a database with Djangos and
[Diesel](https://diesel.rs)s ORM. Know my way around EmacSQL.
- Used MySQL and PostgreSQL
## System Programming
- Experienced in Rust, C and EmacsLisp knowledge
- I know my way around C++, Python, and UNIX shells (bash, fish,
Eshell)
- Limited knowledge in Prolog and Scheme
## Development Tools
### IDEs and Text Editors
- Advanced user of Emacs, including its LSP and Git integrations
- Good knowledge of Git (including Magit under Emacs)
- Basic knowledge of Vim, CLion, Pycharm, and WebStorm
### CI/CD and Deploying to the Web
- Experienced with web servers such as Nginx and Caddyserver
- Good knowledge of virtualization and deployment with Docker and
Docker Compose for virtualization, Drone.io, and GitHub Actions for
deployment.
## Operating Systems
- Usage and administration of Linux (Arch Linux, Void Linux, Debian,
Ubuntu, Alpine Linux)
- Administration of web servers and storage servers (Arch Linux,
Debian, Raspbian, Alpine Linux)
- Basic knowledge with Guix System and NixOS, and Windows XP through
10 (except Vista)
## Office Applications
- Good knowledge with org-mode (main tool), LaTeX
- I know my way around LibreOffice, Microsoft Office, OnlyOffice, and
WPS Office
## Audio
### Singing Vocal Synthesis
- Development and creation of vocal libraries for VOCALOID3,
Alter/Ego, Chipspeech, and UTAU
- Usage of VOCALOID 2 through 4, Alter/Ego, Chipspeech, UTAU, CeVIO
Creative Studio
### Audio Engineering
- Music writing and mix software: FL Studio
- Audio repair and cleaning: iZotope RX
- Mastering: T-RackS CS

View File

@@ -1,61 +0,0 @@
---
title: Vocal Synthesis
---
# My works in vocal synthesis
From 2011 to 2018, I worked as an amateur and professional in singing
vocal synthesis. More precisely, I was creating vocal libraries used
by various libraries, mainly UTAU and Alter/Ego.
## UTAU
I began working with UTAU first by the end of 2011 on an unnamed and
deleted Japanese vocal library. While I didnt maintain it for long,
mainly due to its bad recording quality (I recorded it with a low-end
desktop microphone) and configuration, it did teach me the basics of
creating vocal libraries and working with audio files.
In October 14th, 2012, I released my second vocal library, named
*BSUP01 KEINE Tashi JPN VCV* which was of higher quality both due to
the recording equipment, manner of recording, and configuration,
though still relatively average for the time. My best work with this
series of vocal libraries was *BSUP01 KEINE Tashi JPN Extend Power*, a
high-energy voice made in similar circumstances but with yet again
better know-how.
This series of vocal libraries also featured *BSUP01 KEINE Tashi TIB
CVVC* and *BSUP02 Drolma TIB*, the two first Tibetan vocal libraries
for singing vocal synthesis worldwide.
I later created in UTAU *ALYS 001 JPN*, *ALYS 001 FRA*, and *ALYS 002
FRA* as prototypes, known as *ALYS4UTAU*, for our upcoming product
while working at VoxWave.
While all these vocal libraries have been discontinued, vocal
libraries for *BSUP01 KEINE Tashi* and *ALYS* are available for
download. Please refer to the following pages:
- **BSUP01 KEINE Tashi**: [BSUP01 KEINE Tashi](keine-tashi.md)
- **ALYS for UTAU**: [Open-Sourcing
ALYS](https://blog.phundrak.com/open-sourcing-alys/)
## Alter/Ego
[Alter/Ego](https://www.plogue.com/products/alter-ego.html) is a
singing vocal synthesis engine made by [Plogue
Inc.](https://www.plogue.com/). ALYS was its first commercial vocal
library as well as the first professional singing vocal library
available in French.
Due to the architecture and behaviour of Alter/Ego, important changes
had to be done to the recording script for ALYS (later re-used for
LEORA). Including the development of the new recording scripts, the
initial development period for ALYS spanned well over a year, with
some additional eight to nine months for its first major update.
ALYS for Alter/Ego, also known as *ALYS4AE*, is available free of
charge as a module for Alter/Ego, and its source files are publicly
available since December 15th, 2021. However, in accordance with
Plogue, no reciepe for building ALYS for Alter/Ego have been made
available.
More information on open-sourcing ALYS
[here](https://blog.phundrak.com/open-sourcing-alys/).

View File

@@ -1,26 +0,0 @@
# Où me trouver ?
Je suis présent sur différentes plateformes et quelques réseaux
sociaux où vous pouvez me suivre.
## Réseaux sociaux
- **Mastodon** : [@phundrak@phundrak.com](https://emacs.ch/@phundrak)
devrait fonctionner, sinon direction
[@phundrak@emacs.ch](https://emacs.ch/@phundrak)
- **Twitter** : [@phundrak](https://twitter.com/phundrak), cependant
je ny suis plus très actif et jy repartage principalement mes
messages Mastodon qui parfois se font tronquer
- **Writefreely** :
- [**@phundrak@write.phundrak.com**](https://write.phundrak.com/phundrak) :
billets personnels
- [**@phundraks-short-stories@write.phundrak.com**](https://write.phundrak.com/phundraks-short-stories) :
histoires courtes
- **Discord** : `Phundrak#0001` (dites-moi que vous venez dici,
autrement il est possible que je considère le message comme du spam)
## Autres plateformes
- **Courriel** : [lucien@phundrak.com](mailto:lucien@phundrak.com)
- **Blog** : [blog.phundrak.com](https://blog.phundrak.com)
- **Gitea** : [@phundrak@labs.phundrak.com](https://labs.phundrak.com/phundrak)
- **GitHub** : [Phundrak](https://github.com/Phundrak)
- **YouTube** : [@phundrak](https://www.youtube.com/@phundrak)

View File

@@ -1,183 +0,0 @@
---
title: BSUP01 Keine Tashi
---
# Présentation
KEINE Tashi est un personnage et le nom dune collection de banques
vocales développées pour le logiciel
[UTAU](http://utau2008.web.fc2.com/), un logiciel de synthèse de voix
pour le chant. Jai développé KEINE Tashi de 2012 à 2015 et publiai
trois de ses banques vocales. Celle ayant rencontre le plus de succés
fut sa banque vocale *JPN Extend Power*. Le 10 Mars 2017, jannonçai
arrêter toutes activités liées à UTAU.
<blockquote class="twitter-tweet" data-dnt="true" data-theme="dark"><p
lang="en" dir="ltr">I&#39;d like to also announce that from now on I
am dropping my previous UTAU projects other than covers and won&#39;t
develop any new UTAU library</p>&mdash; P&#39;undrak (@Phundrak) <a
href="https://twitter.com/Phundrak/status/840174634377105408?ref_src=twsrc%5Etfw">March
10, 2017</a></blockquote> <component is="script" async
src="https://platform.twitter.com/widgets.js"
charset="utf-8"></component>
# Personnage et banques vocales
Voici une traduction en français des informations ayant trait à KEINE
Tashi sur danciennes pages le présentant.
## Présentation
<ResponsiveImage
src="https://cdn.phundrak.com/img/UTAU/KEINE_Tashi_1024.webp"
width="1024"
preview="https://cdn.phundrak.com/img/UTAU/KEINE_Tashi_512.webp"
previewWidth="512">
Illustration de KEINE Tashi par Umi
</ResponsiveImage>
- **Nom de code :** BSUP01 恵音བཀྲ་ཤིས་ KEINE Tashi
- **Prénom :** Tashi (བཀྲ་ཤིས་), prénom tibétain signifiant « auspicieux »
- **Nom :** Keine (恵音), nom japonais signifiant « son bénissant ».
Le nom se lit « keine » bien que sa lecture normale devrait être
« megumine ».
- **Modèle :** BSUP (Bödkay Shetang UTAU Project, *Projet UTAU de Chant Tibétain*)
- **Numéro :** 01
- **Sexe :** homme
- **Anniversaire (personnage) :** 28 Juin 1998
- **Première publication :** 14 Octobre 2012
- **Poids :** 154lb / 70kg
- **Taille :** 182cm
- **Couleur de cheveux :** noir
- **Couleur des yeux :** entre le marron et le noir
- **Apparance :** Tashi porte une version modernisée dun habit
tibétain traditionel de la région de lAmdo (Chinois : 安多 Ānduō)
coloré en bleu. Il porte également quelques bijoux de turquoise.
- **Nourriture préférée :** momo à la viande (raviolis tibétains)
- **Objet signature :** un manuscrit tibétain
- **Voix et créateur :** [Phundrak](https ://phundrak.com) (moi)
- **Aime :** méditer, la calligraphie, les vieux livres et
manuscripts (en gros, moi à lépoque ou je créai ce personnage)
- **Naime pas :** légoïsme, les mensonges, larrogance
- **Personalité :** Tashi est quelquun de très calme et dagréable.
Il adore les vieux livres et manuscrits, mais ce quil aime par
dessus tout est méditer. Il na jamais faim, ce qui fait quil
peut rester pendant plusieurs jours à méditer si lenvie le prend,
jusquau moment où il réalise quil a *besoin* de manger. Il est
très difficile de le mettre en colère.
Mais quand il le devient, sa colère devient explosive. Le calmer
devient alors une tâche extrêmement complexe. Étrangement, les
victimes de son couroux voient peu de temps après leur qualité de
vie grandement saméliorer. Peut-être ces personnes avaient besoin
dentendre des réalités auxquelles elles refusaient de faire
face ?
## Banques vocales
### JPN VCV
- **Lien de téléchargement :**
| Extension | Taille | Lien |
|-----------|---------|-----------------------------------------------------------------------------------|
| 7z | 25.7Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_VCV.7z) |
| tar.xz | 32.5Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_VCV.tar.xz) |
| zip | 38.0Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_VCV.zip) |
- **Taille décompressée :** 47.1Mio
- **Nombre de phonèmes :** 1264 (253 fichiers audio)
- **Note moyenne :** G#2
- **Plage vocale :** C2~D3
- **Présence de fichiers FRQ :** partiel
- **Date de publication :** 14 Octobre 2012
- **Encodage des phonèmes :** Romaji avec des alias hiragana et un
support CV en romaji
- **Langues supportées :** Japonais
- **Moteurs de synthèse recommandés:** TIPS, VS4U
### JPN Extend Power
- **Lien de téléchargement :**
| Extension | Taille | Lien |
|-----------|--------|--------------------------------------------------------------------------------------------|
| 7z | 1.1Gio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Power.7z) |
| tar.xz | 1.1Gio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Power.tar.xz) |
| zip | 1.2Gio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Power.zip) |
- **Taille décompressée :** 1.3Gio
- **Nombre de phonèmes :** 3020 (546 fichiers audio)
- **Note moyenne :** C3
- **Plage vocale :** B1~D4
- **Présence de fichiers FRQ :** partiel
- **Date de publication :** 28 Juin 2013
- **Encodage des phonèmes :** Romaji (alias hiragana)
- **Langues supportées :** Japonais
- **Moteurs de synthèse recommandés:** VS4U, world4utau
### JPN Extend Youth
- **Lien de téléchargement :**
| Extension | Taille | Lien |
|-----------|----------|--------------------------------------------------------------------------------------------|
| 7z | 237.7Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Youth.7z) |
| tar.xz | 243.5Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Youth.tar.xz) |
| zip | 268.7Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Youth.zip) |
- **Taille décompressée :** 301.1Mio
- **Nombre de phonèmes :** 1954 (182 fichiers audio)
- **Note moyenne :** C4
- **Plage vocale :** F#3~A#4
- **Présence de fichiers FRQ :** partiel
- **Date de publication :** 28 Juin 2013
- **Encodage des phonèmes :** Romaji (alias hiragana)
- **Langues supportées :** Japonais
- **Moteurs de synthèse recommandés:** fresamp, VS4U, world4utau
### JPN Extend Native
- **Status :** abandonné
### TIB CVVC
- **Status :** abandonné
### ENG
<ResponsiveImage
src="https://cdn.phundrak.com/img/UTAU/KEINE_Tashi_EN_673.webp"
width="673"
preview="https://cdn.phundrak.com/img/UTAU/KEINE_Tashi_EN_246.webp"
previewWidth="300">
Illustration de KEINE Tashi EN
</ResponsiveImage>
- **Status :** abandonné
# Licence dutilisation
KEINE Tashi est publié sous la licence [CC BY-SA-NC
4.0](https://creativecommons.org/licenses/by-nc-sa/4.0/). Cela
signifie que vous êtes libres :
- **dutiliser :** utiliser les banques vocales dans UTAU ou tout
autre logiciel ;
- **de partager :** copier, distribuer et communiquer le matériel
par tous moyens et sous tous formats ;
- **dadapter :** remixer, transformer et créer à partir du
matériel ;
Selon les conditions suivantes :
- **Attribution :** Vous devez me créditer lors de lutilisation de
Tashi, intégrer un lien vers la licence et indiquer si des
modifications ont été effectuées. Vous devez indiquer ces
informations par tous les moyeens raisonnables, sans toutefois
suggérer que je vous soutienne ou que je soutienne la façon dont
vous utilisez Tashi ;
- **Pas dUtilisation Commerciale :** Vous nêtes pas autorisé à
faire un usage commercial de Tashi, tout ou partie du matériel le
composant ;
- **Partage dans les Mêmes Conditions :** Dans le cas où vous
effectuez un remix, que vous transformez, ou créez à partir du
matériel composant Tashi, vous devez le diffuser modifié dans les
même conditions, cest à dire avec la même licence avec laquelle
Tashi est diffusé ici.
Bien que je ne puisse pas ajouter déléments à cette licence légale,
je souhaiterais ajouter une requête personnelle : merci de ne pas
crére de chansons à caractère religieux, à lexception des chansons
tibétaines bouddhistes ou bön. Cependant, du fait de la controverse
actuelle concernant lidentité de Sa Sainteté le Gyalwa Karmapa, toute
chanson lié à sa personne est également interdite jusquà résolution
officielle de la situation. Cette interdiction est également
applicable à Sa Sainteté le Dalaï Lama, au Vénérable Shamar Rinpoché
et Tai Situ Rinpoche. Si vous avez la moindre question, nhésitez pas
à m[envoyer un email](mailto:lucien@phundrak.com).

View File

@@ -1,63 +0,0 @@
# A tema de esta loca ueb
## Introdui
Asi es la loca ueb personal de Lucien Cartier-Tilet, ance conoseda
commo “Pundrak” o “Phundrak”.
Lo es scriveda con Vuepress e es completa de fonte abrida. On pote
trova la testo de fonte sur [mea loca ueb Gitea
personal](https://labs.phundrak.com/phundrak/phundrak.com).
# Do es ospitada esa loca ueb?
Esa loca ueb es ospitada sur mea servador ueb personal, en la site de
Bron en Frans, como la majoria de mea locos ueb.
[`labs.phundrak.com`](https://labs.phundrak.com) e `mail.phundrak.com`
ambos es ospitada on servadores ueb ce me lua a Scaleway e OVH Frans
en ordina, e esa servadores simil es en Frans.
# Cucis
## Ce es cucis
Cucis es peti fixes registrada par un loca ueb sur tua computa o
telefoneta cuando te lo visita. An si no tota la locas ueb usa lo, los
es an con tota estrema comun. Los capasi esa locas ueb a funsiona
coreta o plu eficas.
Esa loca ueb usa cucis con la ojeto de recorda tua prefere, como ance
la lingua o la motif ce te ia eleje. Lo ance usa cucis de mi personal
Matomo par sabe lo ce lo usores de esa loca ueb fa asi, ma Matomo
trasa lo sola sur mea locas ueb.
An tal, esa loca ueb es protejeda par Cloudflare, esa compania ance
pote ospita alga cucis afin lo recorda si tu surfador es un risca par
me loca ueb o no.
## Como me pote controla la cucis sur mea computa?
Si te no vole ce Cloudflare o Matomo recorda cucis, un bon
anti-comersial como [uBlock Origin](https://ublockorigin.com/) ta pote
te proteje (es la plu eficas ce me conose). Matomo ance respecta la
demanda “no trasa me” de surfadores.
Te pote ance supresa con mano la cucis de tua surfador, ma me no pote
te dise como, lo ave tro ce esiste. Ma te pote xerca sur DuckDuckGo,
Qwant o Startpage como supresa lo (me suposa ce si tu vole supresa lo,
te vole evita Google).
## E la otra metodos afin de trasa?
Lo esista otra metodos plu sutil afin de trasa usores sur la interede,
o an con epostas o cada contenida ueb, como pixeles spia (imajes
estrema peti), cucis Flash o ojetos local compartida.
Ma esa loca ueb no usa lo.
## Esa loca ueb usa comersiales intendeda?
Lo no ave no comersiales. Si te lo vide asi, te ave un virus sur tua
computa o surfador.
## Ave esa pajina frecuente refrescis?
Me dona esa pajina un refresci aora e alora cuando lo debe a mostra
cambias de funsiona de mea loca ueb, o si me trova eras. Te pote trove
la ultima refresci de esa pajina a supra.
## Me ave otra demandas
Te pote scrive me un eposta a la adirije de eposta
[lucien@phundrak.com](mailto:lucien@phundrak.com).

View File

@@ -1,30 +0,0 @@
# Crea de linguas
La *linguas construida* (en engles *conlang*) es linguas artifis
naseda de la spirito de un, o a veses alga persones. Les es diferente
de linguas natural ce apare con la evolui de un lingua presedente
parlada par un popla completa dura multiple sentenios, como franses,
engles, putong, nion, bahasa o cosa (engles: *!Xhosa*).
Linguas construida pote ave difere intende tra sua crea, pe:
- es parlada como otra linguas natural afin de es un lingua franca
entre multiple comunias, como elefen o esperanto
- es un lingua secreta ce sola algun persones conose afin de comunica
sin ce on comprende los
- esperia lo ce es posible linguisticamente, como
[Lojban](https://en.wikipedia.org/wiki/Lojban) ce atenta a es la
lingua la plu lojica
- completa un universo fantasial, como la linguas elfin de Tolkien o
klingon de Star Trek
- sola es arte, como la pinta o la poesia.
En mea caso, la du ultima espicas es los ce me impulsa a crea nova
linguas. Mea du projetas xef es
[proto-ñyqy](https://conlang.phundrak.com/proto-nyqy) e
[eittlansce](https://conlang.phundrak.com/eittlandic). La prima es un
lingua prima ce se fundi sur no otra lingua, ma ce me va usa a crea un
familia linguistica completa, en ce eittlansce es desende de la lingua
de la vicinges, vea nordica, e ce se parla en la pais fantasial de
Eittland.
On pote trove plu de informa sur [mea loca ueb de linguas construida](https://conlang.phundrak.com/).

View File

@@ -1,23 +0,0 @@
# Do on pote me trova?
On pote me trova sur multe loca ueb e redes sosial do te pote me segue.
## Redes sosial
- **Mastodon** : [@phundrak@phundrak.com](https://emacs.ch/@phundrak) ta debe funsiona, si no visita [@phundrak@emacs.ch](https://emacs.ch/@phundrak)
- **Twitter** : [@phundrak](https://twitter.com/phundrak), ma me lo
usa a poca veses, la plu de mea tuitas es mea mesajes mastodon ce es
a vesas truncada
- **Writefreely** :
- [**@phundrak@write.phundrak.com**](https://write.phundrak.com/phundrak) :
revistas personal
- [**@phundraks-short-stories@write.phundrak.com**](https://write.phundrak.com/phundraks-short-stories) :
istorias corta (a multe veses en Frans)
- **Discord** : `Phundrak#0001` (dise me ce tu veni de asi, si no me
pote pensa ce tua mesaje es spam)
## Other Websites
- **Eposta** : [lucien@phundrak.com](mailto:lucien@phundrak.com)
- **Blog** : [blog.phundrak.com](https://blog.phundrak.com)
- **Gitea** : [@phundrak@labs.phundrak.com](https://labs.phundrak.com/phundrak)
- **GitHub** : [Phundrak](https://github.com/Phundrak)
- **YouTube** : [@phundrak](https://www.youtube.com/@phundrak)

View File

@@ -1,20 +0,0 @@
# Paje Prima
Saluta, me es Lucien Cartier-Tilet, un studiante de informatica en la
Master 2 THYP (franses: *Technologies de lHypermédia*, elefen:
*Tecnolojia de la Ipermedia*) en la Universia Vincennes Saint-Denis
(Paris 8).
Me ia labora a VoxWave de 2012 a 2018 como un de sua co-fundores e sua
dirijor tecnical. Dura esta tempo, me ia crea la cantores virtual
franses ALYS e LEORA.
Me es un zelo de la programes libre, usante GNU/Linux de 2008 e Emacs
de 2016.
Me amatos es la crea de programes e construi mundos e linguas
fantasial. Me ama ance asende e, cuando me lo pote, pasea en la
montania.
Mea lingua prima es franses, ma me pote parla fasil engles. Me ance
pote parla alga nion, norsce (bokmål) e elefen.

View File

@@ -1,167 +0,0 @@
---
title: BSUP01 KEINE Tashi
---
# Presenta
KEINE Tashi es un carater e la nom de un colie de bancos de voses
developa per la program [UTAU](http://utau2008.web.fc2.com/), un
program de sintese de vose per canta. Me ia developa KEINE Tashi de
2012 a 2015 e me ia publici tre de sua bancos de vose. La ce ia ave la
plu de susede ia sua banco de vose *JPN Extend Power*. La 10 marto
2017, me ia anunsia ce me para tota mea ativias liada a UTAU.
<blockquote class="twitter-tweet" data-dnt="true" data-theme="dark"><p
lang="en" dir="ltr">I&#39;d like to also announce that from now on I
am dropping my previous UTAU projects other than covers and won&#39;t
develop any new UTAU library</p>&mdash; P&#39;undrak (@Phundrak) <a
href="https://twitter.com/Phundrak/status/840174634377105408?ref_src=twsrc%5Etfw">March
10, 2017</a></blockquote> <component is="script" async
src="https://platform.twitter.com/widgets.js"
charset="utf-8"></component>
# Carater e bancos de vose
On ave asi en elefen la informas liada a KEINE Tashi sur vea pajinas
ce le ia presenta.
## Presenta
<ResponsiveImage
src="https://cdn.phundrak.com/img/UTAU/KEINE_Tashi_1024.webp"
width="1024"
preview="https://cdn.phundrak.com/img/UTAU/KEINE_Tashi_512.webp"
previewWidth="512">
Desinia de KEINE Tashi par Umi
</ResponsiveImage>
- **Nom de codigo:** BSUP01 恵音བཀྲ་ཤིས་ KEINE Tashi
- **Nom individua:** Tashi (བཀྲ་ཤིས་), nom individua bod ce sinifia "augurida"
- **Nom familial:** Keine (恵音), nom familial nion ce sinifia "sona
bondisada". On debe leje la nom como "keine", an si la leje ta es
"megumine".
- **Model:** BSUP (Bödkay Shetang UTAU Project, franses: *Projet UTAU
de Chant Tibétain*, elefen: *Projeta UTAU de Canta Bod*)
- **Numero:** 01
- **Seso:** om
- **Aniversario (carater):** 28 junio 1998
- **Primera Publici:** 14 otobre 2012
- **Pesa:** 70 kg / 154 lb
- **Taie:** 182 cm / 5 ft 11 in
- **Color de capeles:** negra
- **Color de oios:** entre la brun e la negra
- **Aspeta:** Tashi apone un varia moderna de un veste bod tradisional
de la rejion de Amdo (Jonguo: 安多 Ānduō) de color azul o blu. El
apone egal joalas de turcesa.
- **Comable prefereda:** momo a la carne (raviolis bod)
- **Ojeto suscrive:** un manoscrito bod
- **Vose e creor:** [Phundrak](https://phundrak.com) (me)
- **Ama:** medita, la caligrafia, vea libros e manuscritos (en
jeneral, mea cuando me ia crea esa carater)
- **No ama:** la egosia, mentis, vania
- **Personalia:** Tashi es algun ce es multe calma e gustable. El
gusta vea libros e manuscritos, ma lo ce el gusta plu ce tota es
medita. El fami an nunca, par acel el pote mudi et medita durante
multe dias si el vole lo, asta el momento cuando el **nesesa** come.
El coleri multe difisil.
Ma cuando coleri, sua coler deveni esplodente. Calmi el deveni alora
un taxe multa compleso. Ma es bizara ce la vitimes de sua furia ave
la cualia de sua vives deveni multe plu bon e se boni enorme. Se
pote ce esa personas ia nesesa oia realias ce los ia refusa oia?
## Bancos de voses
### JPN VCV
- **Descarga:**
| Sufisa | Taie | Iperlia |
|--------|---------|-----------------------------------------------------------------------------------|
| 7z | 25.7Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_VCV.7z) |
| tar.xz | 32.5Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_VCV.tar.xz) |
| zip | 38.0Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_VCV.zip) |
- **Taie descompresada:** 47.1Mio
- **Numero de fonemes:** 1264 (253 fixes audio)
- **Tono media:** G#2
- **Estende vosal:** C2~D3
- **Presente de fixes FRQ:** partal
- **Data de publici:** 14 otobre 2012
- **Codigo de la fonemes:** Romaji con aliases hiragana e CV en
romaji
- **Linguas suportada:** nion
- **Motores de sintese recomenda:** TIPS, VS4U
### JPN Extend Power
- **Descarga:**
| Sufisa | Taie | Iperlia |
|--------|--------|--------------------------------------------------------------------------------------------|
| 7z | 1.1Gio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Power.7z) |
| tar.xz | 1.1Gio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Power.tar.xz) |
| zip | 1.2Gio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Power.zip) |
- **Taie descompresada:** 1.3Gio
- **Numero de fonemes:** 3020 (546 fixes audio)
- **Tono media:** C3
- **Estende vosal:** B1~D4
- **Presente de fixes FRQ:** partal
- **Data de publici:** 28 junio 2013
- **Codigo de la fonemes:** Romaji (aliases hiragana)
- **Linguas suportada:** nion
- **Motores de sintese recomenda:** VS4U, world4utau
### JPN Extend Youth
- **Descarga:**
| Sufisa | Taie | Iperlia |
|--------|----------|--------------------------------------------------------------------------------------------|
| 7z | 237.7Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Youth.7z) |
| tar.xz | 243.5Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Youth.tar.xz) |
| zip | 268.7Mio | [DL](https://cdn.phundrak.com/files/KeineTashi/BSUP01_KEINE_Tashi_JPN_Extend_Youth.zip) |
- **Taie descompresada:** 301.1Mio
- **Numero de fonemes:** 1954 (182 fixes audio)
- **Tono media:** C4
- **Estende vosal:** F#3~A#4
- **Presente de fixes FRQ:** partal
- **Data de publici:** 28 junio 2013
- **Codigo de la fonemes:** Romaji (aliases hiragana)
- **Linguas suportada:** nion
- **Motores de sintese recomenda:** fresamp, VS4U, world4utau
### JPN Extend Native
- **State:** abandonada
### TIB CVVC
- **State:** abandonada
### ENG
<ResponsiveImage
src="https://cdn.phundrak.com/img/UTAU/KEINE_Tashi_EN_673.webp"
width="673"
preview="https://cdn.phundrak.com/img/UTAU/KEINE_Tashi_EN_246.webp"
previewWidth="300">
Desinia de KEINE Tashi EN
</ResponsiveImage>
- **State:** abandonada
# Lisensa de usa
KEINE Tashi es publica con la lisensa [CC BY-SA-NC
4.0](https://creativecommons.org/licenses/by-nc-sa/4.0/). Esa sinifia
ce tu pote:
- **usa**: usa la bancos de voses en UTAU o tota otra program
- **comparti**: copia e distribui la materia en cualce media o formato
- **adata**: remisca, muta e crea partinte de la materia
Si te respeta esa condisionas:
- **Atribui**: te debe de atribui mea cuando te usa Tashi, inclui un
iperlia a la lisensa e indica si te ia muta alga cosa. Te debe
incida esa informas con tota media razonante, ma no de forma ce on
pote crede ce me promove o suporta tua usa de Tashi
- **NoComersial**: te pote a no grado ave un usa comersial Tashi
- **CompartiEgal**: si te fe un remisca, un modifia o si te crea
partinte de la materia de Tashi, te debe comparti lo con la mesma
state, per dise con la mesma lisensa ce me comparti Tashi asi.
An con ce me no pote ajunta elementos a esa lisensa legal, me vole
ajunta un solisita personal: me prefere si te no ta crea cantas
relijial estra de cantas bod budiste o bon. An con tota, par causa de
la controversa consernante la identia de Sua Santia La Gialua Karmapa,
tota canta lia a sua person es nonpermeteda asta esa situa es solveda.
Esa despermete es simil aplicada a Sia Santia la Dalai Lama, la
Onorable Xamar Rinpotxe e Tai Situ Rinpotxe. Si te ave un demanda o si
te no es serta, envia me [un eposta](mailto:lucien@phundrak.com).

View File

@@ -1,7 +0,0 @@
---
title: Projetas
---
# Programi
## Mea projetas GitHub lo plu stelada
## Ultima retenerias de codigo sur GitHub
# Linguistica

View File

@@ -1,66 +0,0 @@
# Resoma de carera
## Informas
Lucien Cartier-Tilet
## Esperia Profesal
### Aubay (2023 - )
### VoxWave (2014 - 2018)
## Educa
### Mestral 2 de Tecnolojia de la Ipermedia (Universia de Paris 8)
### Mestral 1 de Informatica (Universia de Paris 8)
### Mestral 1 de Informatica (Universia de Paris 8)
### Lisensa de Informatica (Universia de Paris 8)
### Engles Leteratural (Universia de Lyon 2)
### Laural
## Programi ueb
### Front-end
- Bon conose de HTML5, CSS3 (incluinte SASS, SCSS e LESS) e Javascript
- Conoses de Python, Dart e Typescript
- Usa de Vue, Nuxt, Vuepress e Node.js
- Aprende React e Next.js
### Backend
- Esperia en programi de backend con Rocket (Rust) e alga con Django (Python)
- Esperia en comunica con banco de datos con
[Diesel](https://diesel.rs) (Rust) e Django, conose alga EmacSQL
- Usa de PostgreSQL e MySQL
## Programi sistem
- Esperia con Rust, C e EmacsLisp
- Conoses de C++, Python, CommonLisp e shelles UNIX (bash, fish, eshell)
- Conoses limitada de Prolog e Scheme
## Utiles de developa
### IDEs e editadores
- Conoses esperta de Emacs, inclui con la incluis de LSP e Git
- Bon conoses de Git (inclui sua interfas Magit per Emacs)
- Conoses fundal de Vim, CLion, PyCharm e WebStorm
## CI/CD e desplia ueb
- Esperia con la servadores ueb Nginx e Caddyserver
- Bon conoses de Docker, Drone.io e GitHub Actions per despia ueb
## Sistemes de opera
- Usa et manejablia de Linux (ArchLinux, VoidLinux, Debian, Ubuntu,
AlpineLinux)
- Manjablia de servidores ueb e servidores de conserva (ArchLinux,
Debian, Ubuntu, AlpineLinux)
- Conosas fundal de Guix System, NixOS e Windows (de XP a 10, con
eseta de Vista)
## Program de ofisia
- Bon conosas con org-mode (Emacs) e LaTeX
- Conosas con LibreOffice, Microsoft Office, WPS Office e OnlyOffice
## Audio
### Sintese vocal cantada
- Developa e crea de bancos de voses per sintese vocal cantada per
VOCALOID3, Alter/Ego, Chipspeech e UTAU
- Usa de VOCALOID2 a VOCALOID4, Alter/Ego, Chpispeech, UTAU, CeVIO
Creative Studio
## Injenia audio
- program de musica: FL Studio
- repara e puri audio: iZotope RX
- mastering: T-RackS CS

View File

@@ -1,53 +0,0 @@
---
title: Sintese vocal
---
# Labora en sintese vocal
De 2011 a 2028, me ia labora esente un amator alora esente un profesal
en la domina de la sintese vocal. Plu eseta, me ia crea e usa bancos
de voses par la program UTAU e alora Alter/Ego xef.
## UTAU
Me ia comensa a usa UTAU a la fin de 2011 con un banco de vose nion
findida sur mea vose, sin nom e perdeda de tempo longa. An si no me ia
conserva lo longua, xef par causa de la mal cualia de sua ajusta et
sua samples audio (me ia sample con un microfon de mal cualia), acel
ia educa me la fundas de la crea de bancos de voses per UTAU e labora
con fixes audio.
La 14 otobre 2012, me ia publici mea du banco de vose, *BSUP01 KEINE
Tashi JPN VCV*, un banco de vose egal findida sur mea vose e con un
cualia de sua samples e de sua ajusta multe plu bon. Ma mea plu bon
banco vocal per UTAU ia es *BSUP01 KEINE Tashi Extend Power* ce ia ave
un vose plu forte cuando ia ave plu de esperia.
Esa serie de bancos de voses simil inclui *BSUP01 KEINE Tashi TIB
CVVC* e *BSUP02 Djetsün*, la prima bancos de voses de la mundo ce ia
pote canta en bod (ma me ia publici an nunca los).
Me ia crea en 2014 la banco de vose *ALYS 001 JPN*, *ALYS 001 FRA* e
*ALYS 002 FRA* esente prototipos de ALYS con UTAU. Esa bancos de voses
es conoseda como *ALYS4UTAU*.
En ce cada esa bancos oji no plu es developa e sua supporta tecnica no
plu es garantiada, on pote encora descarga *BSUP01 KEINE Tashi* e
*ALYS*:
- **BSUP01 KEINE Tashi**: [BSUP01 KEINE Tashi](keine-tashi.md)
- **ALYS**: [Open-Sourcing
ALYS](https://blog.phundrak.com/open-sourcing-alys/) (en engles)
## Alter/Ego
[Alter/Ego](https://www.plogue.com/products/alter-ego.html) es un
motor de sintese vocal crea par [Plogue Inc.](https://www.plogue.com).
ALYS ia es la prima banco de vose comersial, como la prima vose de
sintese profesal en franses crea per canta.
Par causa de la arciteta e de la condui de Alter/Ego, cambias par la
senario de rejistra par ALYS (e plu tarda par LEORA) ia es nesesada.
Cuando on inclui la reali de la senario, la developa de ALYS per
Alter/Ego (simil conoseda como *ALYS4AE*) ia dura plu de un anio. La
developa de la prima refresci de ALYS ia dura nove menses completinte.
On pote aora descarga *ALYS4AE* sin custa con un modulo de Alter/Ego
de 15 desembre 2021. An con tota, me no pote publici informas e la
metodo per compila la banco de vose de ALYS per Alter/Ego. Plu
informas [asi](https://blog.phundrak.com/open-sourcing-alys/).

View File

@@ -1,9 +0,0 @@
---
title: Projets
---
# Programmation
## Projets GitHub les plus étoilés
<LatestRepositories />
## Derniers dépôts de code actifs sur GitHub
# Linguistique

View File

@@ -1,77 +0,0 @@
---
title: CV
---
# Curriculum Vitae
## Expériences profesionnelles
### Aubay (2023 - )
### VoxWave (2014 - 2018)
## Éducation
### Master 2 Technologies de lHypermédia (Université Paris 8)
### Master 1 Informatique (Université Paris 8)
### Licence Informatique (Université Paris 8)
### Anglais LLCE (Université Lyon 2)
### Baccalauréat
## Programmation Web
### Front-end
- Bonnes connaissances en HTML5, CSS3 (y compris SASS, SCSS et LESS)
et Javascript
- Connaissances en Python, Dart et TypeScript
- Utilisation en cours de Vue, Nuxt.js et Node.js
- Apprentissage de React et Next.js
### Back-end
- De lexpérience en développement backend avec Django (Python) et Rocket (Rust)
- De lexpérience en communication avec des bases de données via
Django et [Diesel](https://diesel.rs). Connaissances de base avec EmacSQL.
- Utilisation de MySQL et PostgreSQL.
## Programmation Système
- De lexpérience avec Rust, C et EmacsLisp
- Connaissances en C++, Python, CommonLisp et les shells UNIX
(bash, fish, Eshell)
- Connaissances limités en Prolog et Scheme
## Outils de développement
### IDEs et éditeurs de texte
- Utilisateur avancé dEmacs, y compris avec ses intégrations pour LSP
et Git
- Bonnes connaissances de Git (y compris avec son interface Magit pour
Emacs)
- Connaissances basiques de Vim, CLion, PyCharm et WebStorm
### CI/CD et déploiement sur le web
- De lexpérience avec les serveurs web Nginx et Caddyserver
- Bonnes connaissances de Docker, Drone.io et GitHub Actions pour du
déploiement
## Systèmes dexploitation
- Utilisation et administration de Linux (ArchLinux, VoidLinux,
Debian, Ubuntu, AlpineLinux)
- Administration de serveurs web et serveurs de stockage (ArchLinux,
Debian, Ubuntu, AlpineLinux)
- Connaissances élémentaires de Guix System, NixOS et Windows de XP à
10 (excepté Vista)
## Bureautique
- Bonnes connaissances avec org-mode et LaTeX
- Connaissances avec Libre Office, Microsoft Office, WPS Office et OnlyOffice
## Audio
### Synthèse de voix chantée
- Développement et création de banques vocales de synthèse vocale
chantée pour VOCALOID3, Alter/Ego, Chipspeech et UTAU
- Utilisation de VOCALOID2 à 4, Alter/Ego, Chipspeech, UTAU, CeVIO
Creative Studio
### Ingénieurie audio
- Logiciel de musique : FL Studio
- Réparation et nettoyage audio : iZotope RX
- Mastering : T-RackS CS

View File

@@ -1,67 +0,0 @@
---
title: Synthèse vocale
---
# Travaux en synthèse vocale
De 2011 à 2018, jai travaillé autant en tant quamateur puis en tant
que professionnel dans le domaine de la synthèse vocale chantée. Plus
précisément, je créais et utilisait des banques vocales pour le
logiciel UTAU puis Alter/Ego principalement.
## UTAU
Jai commencé à travailler avec UTAU durant la fin de 2011 avec une
banque vocale japonaise basée sur ma voix, anonyme et perdue depuis.
Bien que je ne la maintint pas longtemps, principalement dû à la
mauvaise qualité de sa configuration et de ses échantillons audio
source (je lenregistrai avec un micro de bureau de mauvaise qualité),
cela menseigna les bases de la création de banques vocales pour UTAU
et du travail avec des fichiers audio.
Le 14 Octobre 2012, je publiai ma seconde banque vocale, *BSUP01 KEINE
Tashi JPN VCV*, une banque vocale également basée sur ma voix et dune
qualité bien supérieure du fait du matériel denregistrement
professionel et de la méthode denregistrement très différente à celle
utilisé à lorigine. Bien que sa configuration nétait rien
dextraordinaire pour lépoque, il sagissait tout de même dun gain
de qualité net. Ma meilleure banque vocale fut *BSUP01 KEINE Tashi JPN
Extend Power*, une voix puissante créée dans des circonstances
similaires mais avec à nouveau un meilleur savoir-faire.
Cette série de banques vocales basées sur ma voix inclus également
*BSUP01 KEINE Tashi TIB CVVC* ainsi quune autre banque vocale basée
sur une autre voix, celle de *BSUP02 Drolma TIB*, les deux premières
banques vocales tibétaines optimisées pour la synthèse de chant au
monde.
Je créai plus tard *ALYS 001 JPN*, *ALYS 001 FRA* et *ALYS 002 FRA* en
tant que prototypes dALYS sous UTAU. Ces banques vocales furent
connues plus tard sous le nom de *ALYS4UTAU*.
Tandis que chacune de ces banques vocales ne sont plus en
développement et leur support technique nest plus assuré, *BSUP01
KEINE Tashi* et *ALYS* sont toujours disponibles au téléchargement.
- **BSUP01 KEINE Tashi**: [BSUP01 KEINE Tashi](keine-tashi.md)
- **ALYS for UTAU**: [Open-Sourcing
ALYS](https://blog.phundrak.com/open-sourcing-alys/) (en anglais)
## Alter/Ego
[Alter/Ego](https://www.plogue.com/products/alter-ego.html) est un
moteur de synthèse vocale créé par [Plogue
Inc.](https://www.plogue.com/). ALYS fut la première voix de synthèse
commerciale créée pour ce moteur, ainsi que la première voix de
synthèse professionelle francophone créée pour le chant.
Du fait de larchitecture et du comportement dAlter/Ego, des
changements importants ont dû être apportés aux scripts
denregistrement dALYS (plus tard ré-utilisés pour LEORA). En
incluant la réalisation du script denregistrement, le développement
initial dALYS prit plus dun an. Le développement de la première mise
à jour majeure dALYS prit neuf mois supplémentaires.
*ALYS for Alter/Ego* est désormais disponible gratuitement tant que
module pour Alter/Ego depuis le 15 Décembre 2021. Cependant, les
informations et la méthode nécessaires pour compiler sa banque vocale
pour Alter/Ego ne peuvent pas être rendus publique. Plus
dinformations [ici](https://blog.phundrak.com/open-sourcing-alys/).

327
flake.lock generated Normal file
View File

@@ -0,0 +1,327 @@
{
"nodes": {
"alejandra": {
"inputs": {
"fenix": "fenix",
"flakeCompat": "flakeCompat",
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1744324181,
"narHash": "sha256-Oi1n2ncF4/AWeY6X55o2FddIRICokbciqFYK64XorYk=",
"owner": "kamadorueda",
"repo": "alejandra",
"rev": "3e2a85506627062313e131bf8a85315f3387c8e0",
"type": "github"
},
"original": {
"owner": "kamadorueda",
"ref": "4.0.0",
"repo": "alejandra",
"type": "github"
}
},
"cachix": {
"inputs": {
"devenv": [
"devenv"
],
"flake-compat": [
"devenv",
"flake-compat"
],
"git-hooks": [
"devenv",
"git-hooks"
],
"nixpkgs": [
"devenv",
"nixpkgs"
]
},
"locked": {
"lastModified": 1760971495,
"narHash": "sha256-IwnNtbNVrlZIHh7h4Wz6VP0Furxg9Hh0ycighvL5cZc=",
"owner": "cachix",
"repo": "cachix",
"rev": "c5bfd933d1033672f51a863c47303fc0e093c2d2",
"type": "github"
},
"original": {
"owner": "cachix",
"ref": "latest",
"repo": "cachix",
"type": "github"
}
},
"devenv": {
"inputs": {
"cachix": "cachix",
"flake-compat": "flake-compat",
"flake-parts": "flake-parts",
"git-hooks": "git-hooks",
"nix": "nix",
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1761922975,
"narHash": "sha256-j4EB5ku/gDm7h7W7A+k70RYj5nUiW/l9wQtXMJUD2hg=",
"owner": "cachix",
"repo": "devenv",
"rev": "c9f0b47815a4895fadac87812de8a4de27e0ace1",
"type": "github"
},
"original": {
"owner": "cachix",
"repo": "devenv",
"type": "github"
}
},
"fenix": {
"inputs": {
"nixpkgs": [
"alejandra",
"nixpkgs"
],
"rust-analyzer-src": "rust-analyzer-src"
},
"locked": {
"lastModified": 1730615655,
"narHash": "sha256-2HBR3zLn57LXKNRtxBb+O+uDqHM4n0pz51rPayMl4cg=",
"owner": "nix-community",
"repo": "fenix",
"rev": "efeb50e2535b17ffd4a135e6e3e5fd60a525180c",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "fenix",
"type": "github"
}
},
"flake-compat": {
"flake": false,
"locked": {
"lastModified": 1761588595,
"narHash": "sha256-XKUZz9zewJNUj46b4AJdiRZJAvSZ0Dqj2BNfXvFlJC4=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "f387cd2afec9419c8ee37694406ca490c3f34ee5",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"flake-parts": {
"inputs": {
"nixpkgs-lib": [
"devenv",
"nixpkgs"
]
},
"locked": {
"lastModified": 1760948891,
"narHash": "sha256-TmWcdiUUaWk8J4lpjzu4gCGxWY6/Ok7mOK4fIFfBuU4=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "864599284fc7c0ba6357ed89ed5e2cd5040f0c04",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "flake-parts",
"type": "github"
}
},
"flakeCompat": {
"flake": false,
"locked": {
"lastModified": 1696426674,
"narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
"type": "github"
},
"original": {
"owner": "edolstra",
"repo": "flake-compat",
"type": "github"
}
},
"git-hooks": {
"inputs": {
"flake-compat": [
"devenv",
"flake-compat"
],
"gitignore": "gitignore",
"nixpkgs": [
"devenv",
"nixpkgs"
]
},
"locked": {
"lastModified": 1760663237,
"narHash": "sha256-BflA6U4AM1bzuRMR8QqzPXqh8sWVCNDzOdsxXEguJIc=",
"owner": "cachix",
"repo": "git-hooks.nix",
"rev": "ca5b894d3e3e151ffc1db040b6ce4dcc75d31c37",
"type": "github"
},
"original": {
"owner": "cachix",
"repo": "git-hooks.nix",
"type": "github"
}
},
"gitignore": {
"inputs": {
"nixpkgs": [
"devenv",
"git-hooks",
"nixpkgs"
]
},
"locked": {
"lastModified": 1709087332,
"narHash": "sha256-HG2cCnktfHsKV0s4XW83gU3F57gaTljL9KNSuG6bnQs=",
"owner": "hercules-ci",
"repo": "gitignore.nix",
"rev": "637db329424fd7e46cf4185293b9cc8c88c95394",
"type": "github"
},
"original": {
"owner": "hercules-ci",
"repo": "gitignore.nix",
"type": "github"
}
},
"nix": {
"inputs": {
"flake-compat": [
"devenv",
"flake-compat"
],
"flake-parts": [
"devenv",
"flake-parts"
],
"git-hooks-nix": [
"devenv",
"git-hooks"
],
"nixpkgs": [
"devenv",
"nixpkgs"
],
"nixpkgs-23-11": [
"devenv"
],
"nixpkgs-regression": [
"devenv"
]
},
"locked": {
"lastModified": 1761648602,
"narHash": "sha256-H97KSB/luq/aGobKRuHahOvT1r7C03BgB6D5HBZsbN8=",
"owner": "cachix",
"repo": "nix",
"rev": "3e5644da6830ef65f0a2f7ec22830c46285bfff6",
"type": "github"
},
"original": {
"owner": "cachix",
"ref": "devenv-2.30.6",
"repo": "nix",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1761313199,
"narHash": "sha256-wCIACXbNtXAlwvQUo1Ed++loFALPjYUA3dpcUJiXO44=",
"owner": "cachix",
"repo": "devenv-nixpkgs",
"rev": "d1c30452ebecfc55185ae6d1c983c09da0c274ff",
"type": "github"
},
"original": {
"owner": "cachix",
"ref": "rolling",
"repo": "devenv-nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"alejandra": "alejandra",
"devenv": "devenv",
"nixpkgs": "nixpkgs",
"rust-overlay": "rust-overlay",
"systems": "systems"
}
},
"rust-analyzer-src": {
"flake": false,
"locked": {
"lastModified": 1730555913,
"narHash": "sha256-KNHZUlqsEibg3YtfUyOFQSofP8hp1HKoY+laoesBxRM=",
"owner": "rust-lang",
"repo": "rust-analyzer",
"rev": "f17a5bbfd0969ba2e63a74505a80e55ecb174ed9",
"type": "github"
},
"original": {
"owner": "rust-lang",
"ref": "nightly",
"repo": "rust-analyzer",
"type": "github"
}
},
"rust-overlay": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1762223900,
"narHash": "sha256-caxpESVH71mdrdihYvQZ9rTZPZqW0GyEG9un7MgpyRM=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "cfe1598d69a42a5edb204770e71b8df77efef2c3",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

56
flake.nix Normal file
View File

@@ -0,0 +1,56 @@
{
inputs = {
nixpkgs.url = "github:cachix/devenv-nixpkgs/rolling";
systems.url = "github:nix-systems/default";
alejandra = {
url = "github:kamadorueda/alejandra/4.0.0";
inputs.nixpkgs.follows = "nixpkgs";
};
devenv = {
url = "github:cachix/devenv";
inputs.nixpkgs.follows = "nixpkgs";
};
rust-overlay = {
url = "github:oxalica/rust-overlay";
inputs.nixpkgs.follows = "nixpkgs";
};
};
nixConfig = {
extra-trusted-public-keys = [
"devenv.cachix.org-1:w1cLUi8dv3hnoSPGAuibQv+f9TZLr6cv/Hm9XgU50cw="
"phundrak-dot-com.cachix.org-1:c02/xlCknJIDoaQPUzEWSJHPoXcmIXYzCa+hVRhbDgE="
];
extra-substituters = [
"https://devenv.cachix.org"
"https://phundrak-dot-com.cachix.org"
];
};
outputs = {
self,
nixpkgs,
devenv,
systems,
rust-overlay,
alejandra,
...
} @ inputs: let
forEachSystem = nixpkgs.lib.genAttrs (import systems);
in {
formatter = forEachSystem (system: alejandra.defaultPackage.${system});
packages = forEachSystem (system: import ./backend/nix/package.nix { inherit rust-overlay inputs system; });
devShells = forEachSystem (
system: let
pkgs = nixpkgs.legacyPackages.${system};
in {
backend = import ./backend/nix/shell.nix {
inherit inputs pkgs system self rust-overlay;
};
frontend = import ./frontend/shell.nix {
inherit inputs pkgs self;
};
}
);
};
}

Some files were not shown because too many files have changed in this diff Show More