Compare commits
1 Commits
feat/38-fs
...
7ac8215382
Author | SHA1 | Date | |
---|---|---|---|
7ac8215382
|
@ -1,3 +1,2 @@
|
|||||||
SESAM_FSV_VERSION=1.40.13
|
SESAM_FSV_VERSION=1.40.13
|
||||||
SESAM_INI_PATH=/etc/opt/santesocial/fsv/${SESAM_FSV_VERSION}/conf/sesam.ini
|
SESAM_INI_PATH=/etc/opt/santesocial/fsv/${SESAM_FSV_VERSION}/conf/sesam.ini
|
||||||
DATABASE_URL=sqlite://p4pillon.sqlite?mode=rwc
|
|
||||||
|
@ -1,3 +1,2 @@
|
|||||||
SESAM_FSV_VERSION=1.40.13
|
SESAM_FSV_VERSION=1.40.13
|
||||||
SESAM_INI_PATH=${ALLUSERSPROFILE}\\santesocial\\fsv\\${SESAM_FSV_VERSION}\\conf\\sesam.ini
|
SESAM_INI_PATH=${ALLUSERSPROFILE}\\santesocial\\fsv\\${SESAM_FSV_VERSION}\\conf\\sesam.ini
|
||||||
DATABASE_URL=sqlite://p4pillon.sqlite?mode=rwc
|
|
||||||
|
1
.gitattributes
vendored
1
.gitattributes
vendored
@ -1 +0,0 @@
|
|||||||
Cargo.lock -merge linguist-generated=false
|
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -23,6 +23,3 @@ target/
|
|||||||
|
|
||||||
# Ignore .env files
|
# Ignore .env files
|
||||||
.env
|
.env
|
||||||
|
|
||||||
# Development Database
|
|
||||||
*.sqlite
|
|
||||||
|
2370
Cargo.lock
generated
2370
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
12
Cargo.toml
12
Cargo.toml
@ -4,18 +4,6 @@ members = [
|
|||||||
"crates/backend",
|
"crates/backend",
|
||||||
"crates/desktop",
|
"crates/desktop",
|
||||||
"crates/sesam-vitale",
|
"crates/sesam-vitale",
|
||||||
"crates/fsv",
|
|
||||||
"crates/fsv-sys",
|
"crates/fsv-sys",
|
||||||
"crates/utils",
|
"crates/utils",
|
||||||
"migration",
|
|
||||||
"entity",
|
|
||||||
".",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[workspace.dependencies]
|
|
||||||
anyhow = "1.0"
|
|
||||||
dotenv = "0.15"
|
|
||||||
sea-orm-cli = "1.0.1"
|
|
||||||
sea-orm = "1.0.1"
|
|
||||||
serde = { version = "1.0.210", features = ["derive"] }
|
|
||||||
thiserror = "1.0"
|
|
||||||
|
48
README.md
48
README.md
@ -44,24 +44,6 @@ La CLI Tauri est nécessaire au lancement du client `desktop`. Elle peut être i
|
|||||||
cargo install tauri-cli --version "^2.0.0-rc"
|
cargo install tauri-cli --version "^2.0.0-rc"
|
||||||
```
|
```
|
||||||
|
|
||||||
#### SeaORM CLI
|
|
||||||
|
|
||||||
SeaORM est notre ORM. Le CLI SeaORM est nécessaire pour la génération des modèles de la base de données et des migrations associées. Elle peut être installée via Cargo :
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cargo install sea-orm-cli
|
|
||||||
```
|
|
||||||
|
|
||||||
L'applicatif va chercher les informations de connexion à la base de données dans la variable `DATABASE_URL` importée depuis les [fichiers de configuration](#fichiers-de-configuration).
|
|
||||||
|
|
||||||
```.env
|
|
||||||
DATABASE_URL=sqlite://p4pillon.sqlite?mode=rwc
|
|
||||||
```
|
|
||||||
|
|
||||||
Toutefois, l'usage de la CLI de SeaORM nécessite de renseigner les informations de connexion à la base de données dans un fichier `.env` situé à la racine du projet.
|
|
||||||
|
|
||||||
> Astuce : utilisé un lien symbolique pour éviter de dupliquer le fichier `.env`.
|
|
||||||
|
|
||||||
#### FSV-sys
|
#### FSV-sys
|
||||||
|
|
||||||
La crate `fsv-sys` nécessite la présence des librairies fournies par le package FSV et la CryptolibCPS. Les instructions d'installation sont disponibles dans le [README](crates/sesam-vitale/README.md) de la crate `fsv-sys`.
|
La crate `fsv-sys` nécessite la présence des librairies fournies par le package FSV et la CryptolibCPS. Les instructions d'installation sont disponibles dans le [README](crates/sesam-vitale/README.md) de la crate `fsv-sys`.
|
||||||
@ -76,7 +58,7 @@ Pour lancer l'application en mode développement, il est nécessaire d'exécuter
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Lancement du serveur backend
|
# Lancement du serveur backend
|
||||||
systemfd --no-pid -s http::8080 -- cargo watch -w crates/backend -x 'run --bin backend'
|
systemfd --no-pid -s http::3030 -- cargo watch -x 'run --bin backend'
|
||||||
```
|
```
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@ -84,13 +66,9 @@ systemfd --no-pid -s http::8080 -- cargo watch -w crates/backend -x 'run --bin b
|
|||||||
# - frontend (serveur web, accessible via navigateur)
|
# - frontend (serveur web, accessible via navigateur)
|
||||||
bun run --cwd frontend/ dev
|
bun run --cwd frontend/ dev
|
||||||
# - desktop (client desktop, basé sur Tauri)
|
# - desktop (client desktop, basé sur Tauri)
|
||||||
cargo tauri dev --no-watch
|
cargo tauri dev
|
||||||
```
|
```
|
||||||
|
|
||||||
> Pour circonscrire les hot-reloads intempestifs mais peu utiles :
|
|
||||||
> - le `backend` n'est rechargé que si des modifications sont détectées dans le dossier précisé par `-w crates/backend`
|
|
||||||
> - le rechargement du `desktop` est désactivé par l'option `--no-watch` ; en effet, le rechargement du `frontend` est déjà pris en charge par `bun` et ne nécessite pas de rechargement du `desktop`
|
|
||||||
|
|
||||||
## Build
|
## Build
|
||||||
|
|
||||||
Pour packager le client `desktop`, il est nécessaire de faire appel à la CLI Tauri, qui se charge de gérer le build du `frontend` et son intégration au bundle :
|
Pour packager le client `desktop`, il est nécessaire de faire appel à la CLI Tauri, qui se charge de gérer le build du `frontend` et son intégration au bundle :
|
||||||
@ -98,25 +76,3 @@ Pour packager le client `desktop`, il est nécessaire de faire appel à la CLI T
|
|||||||
```bash
|
```bash
|
||||||
cargo tauri build
|
cargo tauri build
|
||||||
```
|
```
|
||||||
|
|
||||||
## Gestion de la base de données
|
|
||||||
|
|
||||||
### Création d'une migration
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sea-orm-cli migrate generate <nom_de_la_migration>
|
|
||||||
```
|
|
||||||
|
|
||||||
Cette commande génère un fichier de migration à adapter dans le dossier `migration/src`.
|
|
||||||
|
|
||||||
### Appliquer les migrations
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sea-orm-cli migrate up
|
|
||||||
```
|
|
||||||
|
|
||||||
### Génération des entitées
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sea-orm-cli generate entity -o entity/src/entities --with-serde both
|
|
||||||
```
|
|
4
crates/app/.gitignore
vendored
Normal file
4
crates/app/.gitignore
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
/target
|
||||||
|
|
||||||
|
# Tailwind CSS CLI
|
||||||
|
tailwindcss
|
21
crates/app/Cargo.toml
Normal file
21
crates/app/Cargo.toml
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
[package]
|
||||||
|
name = "app"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
askama = "0.12.1"
|
||||||
|
askama_axum = "0.4.0"
|
||||||
|
axum = "0.7.5"
|
||||||
|
axum-htmx = { version = "0.6", features = ["auto-vary"] }
|
||||||
|
listenfd = "1.0.1"
|
||||||
|
notify = "6.1.1"
|
||||||
|
serde = { version = "1.0.204", features = ["derive"] }
|
||||||
|
thiserror = "1.0.63"
|
||||||
|
tokio = { version = "1.39.1", features = ["macros", "rt-multi-thread"] }
|
||||||
|
tower-http = { version = "0.5.2", features = ["fs"] }
|
||||||
|
tower-livereload = "0.9.3"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
cargo-watch = "8.5.1"
|
||||||
|
systemfd = "0.4.0"
|
35
crates/app/README.md
Normal file
35
crates/app/README.md
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
## Pré-requis
|
||||||
|
|
||||||
|
- Récupérer le binaire TailwindCSS : https://tailwindcss.com/blog/standalone-cli
|
||||||
|
|
||||||
|
## Exécution
|
||||||
|
|
||||||
|
- Lancer tailwindcss en mode watch dans un terminal :
|
||||||
|
```bash
|
||||||
|
./tailwindcss -i css/input.css -o assets/css/style.css --watch
|
||||||
|
```
|
||||||
|
|
||||||
|
- Lancer le serveur web dans un autre terminal :
|
||||||
|
```bash
|
||||||
|
cargo run --bin app
|
||||||
|
```
|
||||||
|
|
||||||
|
## Rechargement automatique (_auto-reload_)
|
||||||
|
|
||||||
|
Pour le projet `app`, nous utilisons en plus de `cargo-watch` ses librairies :
|
||||||
|
- [`systemfd`](https://github.com/mitsuhiko/systemfd) permet de redémarrer un serveur sans interrompre les connexions en cours, il transmet le descripteur de fichier du socket à une nouvelle instance du serveur (exemple: `cargo watch -x run` --> `systemfd --no-pid -s http::3000 -- cargo watch -x run`). Si le port est déjà pris il en prendra un autre.
|
||||||
|
- [`listenfd`](https://github.com/mitsuhiko/listenfd) permet, côté _Rust_, de démarrer un serveur en utilisant des connexions déjà ouvertes.
|
||||||
|
|
||||||
|
Pour notre application voici la commande à lancer :
|
||||||
|
|
||||||
|
```bash
|
||||||
|
systemfd --no-pid -s http::3000 -- cargo watch -x 'run --bin app'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Chargement à chaud (_livereload_)
|
||||||
|
|
||||||
|
Pour que notre navigateur rafraîchisse automatique notre page lorsque le serveur a été recompilé, nous utilisons la librairie [`tower-livereload`](https://github.com/leotaku/tower-livereload).
|
||||||
|
|
||||||
|
A chaque changement, que ça soit sur du code en _Rust_, _HTML_, _CSS_ ou _JS_ alors le navigateur va recharger entièrement la page.
|
||||||
|
|
||||||
|
En Rust, il n'existe pas encore d'outil de _Hot Reload_ complet et intégré comme on en trouve dans d'autres environnements de développement web, comme pour _Node.js_.
|
6
crates/app/askama.toml
Normal file
6
crates/app/askama.toml
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
[general]
|
||||||
|
# Directories to search for templates, relative to the crate root.
|
||||||
|
dirs = [
|
||||||
|
"src/pages",
|
||||||
|
"src/components",
|
||||||
|
]
|
1203
crates/app/assets/css/style.css
Normal file
1203
crates/app/assets/css/style.css
Normal file
File diff suppressed because it is too large
Load Diff
5
crates/app/assets/js/alpinejs@3.14.1.min.js
vendored
Normal file
5
crates/app/assets/js/alpinejs@3.14.1.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
2
crates/app/assets/js/flowbite@2.5.1.min.js
vendored
Normal file
2
crates/app/assets/js/flowbite@2.5.1.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1
crates/app/assets/js/htmx@2.0.1.min.js
vendored
Normal file
1
crates/app/assets/js/htmx@2.0.1.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
3
crates/app/css/input.css
Normal file
3
crates/app/css/input.css
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
@tailwind base;
|
||||||
|
@tailwind components;
|
||||||
|
@tailwind utilities;
|
23
crates/app/src/components/base.html
Normal file
23
crates/app/src/components/base.html
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
{% if hx_request %}
|
||||||
|
<title>{% block title %}{{ title }}{% endblock %}</title>
|
||||||
|
{% block body %}{% endblock %}
|
||||||
|
{% else %}
|
||||||
|
<!doctype html>
|
||||||
|
<html lang="fr" class="h-full">
|
||||||
|
<head>
|
||||||
|
<title>{% block title %}{{ title }}{% endblock %}</title>
|
||||||
|
|
||||||
|
<script src="/assets/js/htmx@2.0.1.min.js"></script>
|
||||||
|
<script src="/assets/js/alpinejs@3.14.1.min.js" defer></script>
|
||||||
|
<script src="/assets/js/flowbite@2.5.1.min.js"></script>
|
||||||
|
<link href="/assets/css/style.css" rel="stylesheet">
|
||||||
|
|
||||||
|
{% block head %}{% endblock %}
|
||||||
|
</head>
|
||||||
|
<body class="h-full">
|
||||||
|
<div class="min-h-full">
|
||||||
|
{% block body %}{% endblock %}
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
{% endif %}
|
18
crates/app/src/components/navbar/menu-item.html
Normal file
18
crates/app/src/components/navbar/menu-item.html
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
{% set selected = item.id == current %}
|
||||||
|
<li>
|
||||||
|
<a
|
||||||
|
href="{{ item.href }}"
|
||||||
|
{% if selected -%}
|
||||||
|
class="block py-2 px-3 text-white bg-blue-700 rounded md:bg-transparent md:text-blue-700 md:p-0 md:dark:text-blue-500"
|
||||||
|
aria-current="page"
|
||||||
|
{% else -%}
|
||||||
|
class="block py-2 px-3 text-gray-900 rounded hover:bg-gray-100 md:hover:bg-transparent md:hover:text-blue-700 md:p-0 dark:text-white md:dark:hover:text-blue-500 dark:hover:bg-gray-700 dark:hover:text-white md:dark:hover:bg-transparent dark:border-gray-700"
|
||||||
|
{% endif -%}
|
||||||
|
hx-get="{{ item.href }}"
|
||||||
|
hx-push-url="true"
|
||||||
|
hx-swap="outerHTML"
|
||||||
|
hx-select-oob="#menu-items,#page-header,#page-main"
|
||||||
|
>
|
||||||
|
{{ item.label }}
|
||||||
|
</a>
|
||||||
|
</li>
|
50
crates/app/src/components/navbar/navbar.html
Normal file
50
crates/app/src/components/navbar/navbar.html
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
{% macro navbar(current) %}
|
||||||
|
|
||||||
|
{% let items=crate::menu::get_menu_items() %}
|
||||||
|
|
||||||
|
<nav class="bg-white border-gray-200 dark:bg-gray-900">
|
||||||
|
<div class="max-w-screen-xl flex flex-wrap items-center justify-between mx-auto p-4">
|
||||||
|
<a href="/" class="flex items-center space-x-3 rtl:space-x-reverse">
|
||||||
|
<img src="https://flowbite.com/docs/images/logo.svg" class="h-8" alt="Flowbite Logo" />
|
||||||
|
<span class="self-center text-2xl font-semibold whitespace-nowrap dark:text-white">Krys4lide</span>
|
||||||
|
</a>
|
||||||
|
<div class="flex items-center md:order-2 space-x-3 md:space-x-0 rtl:space-x-reverse">
|
||||||
|
<button type="button" class="flex text-sm bg-gray-800 rounded-full md:me-0 focus:ring-4 focus:ring-gray-300 dark:focus:ring-gray-600" id="user-menu-button" aria-expanded="false" data-dropdown-toggle="user-dropdown" data-dropdown-placement="bottom">
|
||||||
|
<span class="sr-only">Ouvrir le menu de profil</span>
|
||||||
|
<img class="w-8 h-8 rounded-full" src="https://flowbite.com/docs/images/people/profile-picture-3.jpg" alt="user photo">
|
||||||
|
</button>
|
||||||
|
<!-- Dropdown menu -->
|
||||||
|
<div class="z-50 hidden my-4 text-base list-none bg-white divide-y divide-gray-100 rounded-lg shadow dark:bg-gray-700 dark:divide-gray-600" id="user-dropdown">
|
||||||
|
<div class="px-4 py-3">
|
||||||
|
<span class="block text-sm text-gray-900 dark:text-white">Bonnie Green</span>
|
||||||
|
<span class="block text-sm text-gray-500 truncate dark:text-gray-400">name@flowbite.com</span>
|
||||||
|
</div>
|
||||||
|
<ul class="py-2" aria-labelledby="user-menu-button">
|
||||||
|
<li>
|
||||||
|
<a href="#" class="block px-4 py-2 text-sm text-gray-700 hover:bg-gray-100 dark:hover:bg-gray-600 dark:text-gray-200 dark:hover:text-white">Profile</a>
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<a href="#" class="block px-4 py-2 text-sm text-gray-700 hover:bg-gray-100 dark:hover:bg-gray-600 dark:text-gray-200 dark:hover:text-white">Settings</a>
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<a href="#" class="block px-4 py-2 text-sm text-gray-700 hover:bg-gray-100 dark:hover:bg-gray-600 dark:text-gray-200 dark:hover:text-white">Sign out</a>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
<button data-collapse-toggle="navbar-user" type="button" class="inline-flex items-center p-2 w-10 h-10 justify-center text-sm text-gray-500 rounded-lg md:hidden hover:bg-gray-100 focus:outline-none focus:ring-2 focus:ring-gray-200 dark:text-gray-400 dark:hover:bg-gray-700 dark:focus:ring-gray-600" aria-controls="navbar-user" aria-expanded="false">
|
||||||
|
<span class="sr-only">Ouvrir le menu de navigation</span>
|
||||||
|
<svg class="w-5 h-5" aria-hidden="true" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 17 14">
|
||||||
|
<path stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M1 1h15M1 7h15M1 13h15"/>
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<div class="items-center justify-between hidden w-full md:flex md:w-auto md:order-1" id="navbar-user">
|
||||||
|
<ul id="menu-items" class="flex flex-col font-medium p-4 md:p-0 mt-4 border border-gray-100 rounded-lg bg-gray-50 md:space-x-8 rtl:space-x-reverse md:flex-row md:mt-0 md:border-0 md:bg-white dark:bg-gray-800 md:dark:bg-gray-900 dark:border-gray-700">
|
||||||
|
{% for item in items %}
|
||||||
|
{% include "navbar/menu-item.html" %}
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
{% endmacro %}
|
22
crates/app/src/components/skeletons/card.html
Normal file
22
crates/app/src/components/skeletons/card.html
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
<div role="status" class="animate-pulse max-w-sm p-4 border border-gray-200 rounded shadow md:p-6 dark:border-gray-700">
|
||||||
|
<div class="flex items-center justify-center h-48 mb-4 bg-gray-300 rounded dark:bg-gray-700">
|
||||||
|
<svg class="w-10 h-10 text-gray-200 dark:text-gray-600" aria-hidden="true" xmlns="http://www.w3.org/2000/svg" fill="currentColor" viewBox="0 0 16 20">
|
||||||
|
<path d="M14.066 0H7v5a2 2 0 0 1-2 2H0v11a1.97 1.97 0 0 0 1.934 2h12.132A1.97 1.97 0 0 0 16 18V2a1.97 1.97 0 0 0-1.934-2ZM10.5 6a1.5 1.5 0 1 1 0 2.999A1.5 1.5 0 0 1 10.5 6Zm2.221 10.515a1 1 0 0 1-.858.485h-8a1 1 0 0 1-.9-1.43L5.6 10.039a.978.978 0 0 1 .936-.57 1 1 0 0 1 .9.632l1.181 2.981.541-1a.945.945 0 0 1 .883-.522 1 1 0 0 1 .879.529l1.832 3.438a1 1 0 0 1-.031.988Z"/>
|
||||||
|
<path d="M5 5V.13a2.96 2.96 0 0 0-1.293.749L.879 3.707A2.98 2.98 0 0 0 .13 5H5Z"/>
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<div class="h-2.5 bg-gray-200 rounded-full dark:bg-gray-700 w-48 mb-4"></div>
|
||||||
|
<div class="h-2 bg-gray-200 rounded-full dark:bg-gray-700 mb-2.5"></div>
|
||||||
|
<div class="h-2 bg-gray-200 rounded-full dark:bg-gray-700 mb-2.5"></div>
|
||||||
|
<div class="h-2 bg-gray-200 rounded-full dark:bg-gray-700"></div>
|
||||||
|
<div class="flex items-center mt-4">
|
||||||
|
<svg class="w-10 h-10 me-3 text-gray-200 dark:text-gray-700" aria-hidden="true" xmlns="http://www.w3.org/2000/svg" fill="currentColor" viewBox="0 0 20 20">
|
||||||
|
<path d="M10 0a10 10 0 1 0 10 10A10.011 10.011 0 0 0 10 0Zm0 5a3 3 0 1 1 0 6 3 3 0 0 1 0-6Zm0 13a8.949 8.949 0 0 1-4.951-1.488A3.987 3.987 0 0 1 9 13h2a3.987 3.987 0 0 1 3.951 3.512A8.949 8.949 0 0 1 10 18Z"/>
|
||||||
|
</svg>
|
||||||
|
<div>
|
||||||
|
<div class="h-2.5 bg-gray-200 rounded-full dark:bg-gray-700 w-32 mb-2"></div>
|
||||||
|
<div class="w-48 h-2 bg-gray-200 rounded-full dark:bg-gray-700"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<span class="sr-only">Loading...</span>
|
||||||
|
</div>
|
4
crates/app/src/components/skeletons/menu-items.html
Normal file
4
crates/app/src/components/skeletons/menu-items.html
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
<div role="status" class="animate-pulse flex items-center justify-center h-full">
|
||||||
|
<div class="w-32 h-4 bg-gray-200 rounded-full dark:bg-gray-700 me-3"></div>
|
||||||
|
<div class="w-32 h-4 bg-gray-200 rounded-full dark:bg-gray-700"></div>
|
||||||
|
</div>
|
1
crates/app/src/components/skeletons/page-title.html
Normal file
1
crates/app/src/components/skeletons/page-title.html
Normal file
@ -0,0 +1 @@
|
|||||||
|
<div role="status" class="animate-pulse h-7 bg-gray-200 rounded-full dark:bg-gray-700 w-48 mt-3"></div>
|
21
crates/app/src/lib.rs
Normal file
21
crates/app/src/lib.rs
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use axum::http::{StatusCode, Uri};
|
||||||
|
use axum_htmx::AutoVaryLayer;
|
||||||
|
use tower_http::services::ServeDir;
|
||||||
|
|
||||||
|
mod menu;
|
||||||
|
mod pages;
|
||||||
|
|
||||||
|
async fn fallback(uri: Uri) -> (StatusCode, String) {
|
||||||
|
(StatusCode::NOT_FOUND, format!("No route for {uri}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_router(assets_path: PathBuf) -> axum::Router<()> {
|
||||||
|
axum::Router::new()
|
||||||
|
.nest_service("/assets", ServeDir::new(assets_path))
|
||||||
|
.merge(pages::get_routes())
|
||||||
|
.fallback(fallback)
|
||||||
|
// The AutoVaryLayer is used to avoid cache issues with htmx (cf: https://github.com/robertwayne/axum-htmx?tab=readme-ov-file#auto-caching-management)
|
||||||
|
.layer(AutoVaryLayer)
|
||||||
|
}
|
84
crates/app/src/main.rs
Normal file
84
crates/app/src/main.rs
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::{env, io};
|
||||||
|
|
||||||
|
use axum::body::Body;
|
||||||
|
use axum::http::Request;
|
||||||
|
use listenfd::ListenFd;
|
||||||
|
use notify::Watcher;
|
||||||
|
use thiserror::Error;
|
||||||
|
use tokio::net::TcpListener;
|
||||||
|
use tower_livereload::predicate::Predicate;
|
||||||
|
use tower_livereload::LiveReloadLayer;
|
||||||
|
|
||||||
|
use ::app::get_router;
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
pub enum AppError {
|
||||||
|
#[error("Unable to bind to TCP listener")]
|
||||||
|
TCPListener(#[from] std::io::Error),
|
||||||
|
#[error("Error with the notify watcher")]
|
||||||
|
NotifyWatcher(#[from] notify::Error),
|
||||||
|
#[error("Missing environment variable {var}")]
|
||||||
|
MissingEnvVar { var: &'static str },
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Nous filtrons les requêtes de `htmx` pour ne pas inclure le script _JS_ qui gère le rechargement
|
||||||
|
/// Voir https://github.com/leotaku/tower-livereload/pull/3
|
||||||
|
#[derive(Copy, Clone)]
|
||||||
|
struct NotHtmxPredicate;
|
||||||
|
impl<T> Predicate<Request<T>> for NotHtmxPredicate {
|
||||||
|
fn check(&mut self, req: &Request<T>) -> bool {
|
||||||
|
!(req.headers().contains_key("hx-request"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const DEFAULT_LISTENER: &str = "localhost:3000";
|
||||||
|
async fn get_tcp_listener() -> Result<TcpListener, io::Error> {
|
||||||
|
let mut listenfd = ListenFd::from_env();
|
||||||
|
|
||||||
|
match listenfd.take_tcp_listener(0)? {
|
||||||
|
// if we are given a tcp listener on listen fd 0, we use that one
|
||||||
|
Some(listener) => {
|
||||||
|
listener.set_nonblocking(true)?;
|
||||||
|
Ok(TcpListener::from_std(listener)?)
|
||||||
|
}
|
||||||
|
// otherwise fall back to local listening
|
||||||
|
None => Ok(TcpListener::bind(DEFAULT_LISTENER).await?),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_livereload_layer(
|
||||||
|
templates_paths: Vec<PathBuf>,
|
||||||
|
) -> Result<LiveReloadLayer<NotHtmxPredicate>, notify::Error> {
|
||||||
|
let livereload = LiveReloadLayer::new();
|
||||||
|
let reloader = livereload.reloader();
|
||||||
|
let mut watcher = notify::recommended_watcher(move |_| reloader.reload())?;
|
||||||
|
for templates_path in templates_paths {
|
||||||
|
watcher.watch(templates_path.as_path(), notify::RecursiveMode::Recursive)?;
|
||||||
|
}
|
||||||
|
Ok(livereload.request_predicate::<Body, NotHtmxPredicate>(NotHtmxPredicate))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<(), AppError> {
|
||||||
|
let manifest_dir = env::var("CARGO_MANIFEST_DIR").map_err(|_| AppError::MissingEnvVar {
|
||||||
|
var: "CARGO_MANIFEST_DIR",
|
||||||
|
})?;
|
||||||
|
let assets_path = Path::new(&manifest_dir).join("assets");
|
||||||
|
let templates_paths = vec![
|
||||||
|
Path::new(&manifest_dir).join("src/pages"),
|
||||||
|
Path::new(&manifest_dir).join("src/components"),
|
||||||
|
];
|
||||||
|
|
||||||
|
let livereload_layer =
|
||||||
|
get_livereload_layer(templates_paths).map_err(AppError::NotifyWatcher)?;
|
||||||
|
let router = get_router(assets_path).await.layer(livereload_layer);
|
||||||
|
|
||||||
|
let listener: TcpListener = get_tcp_listener().await.map_err(AppError::TCPListener)?;
|
||||||
|
let local_addr = listener.local_addr().map_err(AppError::TCPListener)?;
|
||||||
|
println!("Listening on: http://{}", local_addr);
|
||||||
|
|
||||||
|
// Run the server with the router
|
||||||
|
axum::serve(listener, router.into_make_service()).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
23
crates/app/src/menu.rs
Normal file
23
crates/app/src/menu.rs
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
pub struct MenuItem {
|
||||||
|
pub id: String,
|
||||||
|
pub label: String,
|
||||||
|
pub href: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the menu items
|
||||||
|
/// This function is the central place to define the menu items
|
||||||
|
/// It can be used directly in templates, for example in the `navbar` component to render the menu
|
||||||
|
pub fn get_menu_items() -> Vec<MenuItem> {
|
||||||
|
vec![
|
||||||
|
MenuItem {
|
||||||
|
id: "home".to_string(),
|
||||||
|
label: "Accueil".to_string(),
|
||||||
|
href: "/".to_string(),
|
||||||
|
},
|
||||||
|
MenuItem {
|
||||||
|
id: "cps".to_string(),
|
||||||
|
label: "CPS".to_string(),
|
||||||
|
href: "/cps".to_string(),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
43
crates/app/src/pages/cps.html
Normal file
43
crates/app/src/pages/cps.html
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
{% import "navbar/navbar.html" as navbar -%}
|
||||||
|
|
||||||
|
{% block title %}Pharma Libre - CPS{% endblock %}
|
||||||
|
|
||||||
|
{% block body %}
|
||||||
|
{% call navbar::navbar(current="cps") %}
|
||||||
|
<div class="py-10">
|
||||||
|
<header id="page-header">
|
||||||
|
<div class="mx-auto max-w-7xl px-4 sm:px-6 lg:px-8">
|
||||||
|
<h1
|
||||||
|
id="page-title"
|
||||||
|
class="text-3xl font-bold leading-tight tracking-tight text-gray-900"
|
||||||
|
>
|
||||||
|
CPS
|
||||||
|
</h1>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
<main id="page-main">
|
||||||
|
<div
|
||||||
|
class="mx-auto max-w-7xl px-4 py-8 sm:px-6 lg:px-8"
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
class="border-2 border-dashed rounded-lg border-gray-300 dark:border-gray-600 h-96 mb-4"
|
||||||
|
>A</div>
|
||||||
|
<div class="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-4 gap-4 mb-4">
|
||||||
|
<div
|
||||||
|
class="border-2 border-dashed border-gray-300 rounded-lg dark:border-gray-600 h-32 md:h-64"
|
||||||
|
>B</div>
|
||||||
|
<div
|
||||||
|
class="border-2 border-dashed rounded-lg border-gray-300 dark:border-gray-600 h-32 md:h-64"
|
||||||
|
>C</div>
|
||||||
|
<div
|
||||||
|
class="border-2 border-dashed rounded-lg border-gray-300 dark:border-gray-600 h-32 md:h-64"
|
||||||
|
>D</div>
|
||||||
|
<div
|
||||||
|
class="border-2 border-dashed rounded-lg border-gray-300 dark:border-gray-600 h-32 md:h-64"
|
||||||
|
>E</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</main>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
12
crates/app/src/pages/cps.rs
Normal file
12
crates/app/src/pages/cps.rs
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
use askama_axum::Template;
|
||||||
|
use axum_htmx::HxRequest;
|
||||||
|
|
||||||
|
#[derive(Template)]
|
||||||
|
#[template(path = "cps.html")]
|
||||||
|
pub struct CpsTemplate {
|
||||||
|
hx_request: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn cps(HxRequest(hx_request): HxRequest) -> CpsTemplate {
|
||||||
|
CpsTemplate { hx_request }
|
||||||
|
}
|
43
crates/app/src/pages/home.html
Normal file
43
crates/app/src/pages/home.html
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
{% import "navbar/navbar.html" as navbar -%}
|
||||||
|
|
||||||
|
{% block title %}Pharma Libre - Accueil{% endblock %}
|
||||||
|
|
||||||
|
{% block body %}
|
||||||
|
{% call navbar::navbar(current="home") %}
|
||||||
|
<div class="py-10">
|
||||||
|
<header id="page-header">
|
||||||
|
<div class="mx-auto max-w-7xl px-4 sm:px-6 lg:px-8">
|
||||||
|
<h1
|
||||||
|
id="page-title"
|
||||||
|
class="text-3xl font-bold leading-tight tracking-tight text-gray-900"
|
||||||
|
>
|
||||||
|
Accueil
|
||||||
|
</h1>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
<main id="page-main">
|
||||||
|
<div
|
||||||
|
class="mx-auto max-w-7xl px-4 py-8 sm:px-6 lg:px-8"
|
||||||
|
>
|
||||||
|
<div class="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-4 gap-4 mb-4">
|
||||||
|
<div
|
||||||
|
class="border-2 border-dashed border-gray-300 rounded-lg dark:border-gray-600 h-32 md:h-64"
|
||||||
|
>A</div>
|
||||||
|
<div
|
||||||
|
class="border-2 border-dashed rounded-lg border-gray-300 dark:border-gray-600 h-32 md:h-64"
|
||||||
|
>B</div>
|
||||||
|
<div
|
||||||
|
class="border-2 border-dashed rounded-lg border-gray-300 dark:border-gray-600 h-32 md:h-64"
|
||||||
|
>C</div>
|
||||||
|
<div
|
||||||
|
class="border-2 border-dashed rounded-lg border-gray-300 dark:border-gray-600 h-32 md:h-64"
|
||||||
|
>D</div>
|
||||||
|
</div>
|
||||||
|
<div
|
||||||
|
class="border-2 border-dashed rounded-lg border-gray-300 dark:border-gray-600 h-96 mb-4"
|
||||||
|
>E</div>
|
||||||
|
</div>
|
||||||
|
</main>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
12
crates/app/src/pages/home.rs
Normal file
12
crates/app/src/pages/home.rs
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
use askama_axum::Template;
|
||||||
|
use axum_htmx::HxRequest;
|
||||||
|
|
||||||
|
#[derive(Template)]
|
||||||
|
#[template(path = "home.html")]
|
||||||
|
pub struct GetHomeTemplate {
|
||||||
|
hx_request: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn home(HxRequest(hx_request): HxRequest) -> GetHomeTemplate {
|
||||||
|
GetHomeTemplate { hx_request }
|
||||||
|
}
|
10
crates/app/src/pages/mod.rs
Normal file
10
crates/app/src/pages/mod.rs
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
use axum::{routing, Router};
|
||||||
|
|
||||||
|
mod cps;
|
||||||
|
mod home;
|
||||||
|
|
||||||
|
pub fn get_routes() -> Router {
|
||||||
|
Router::new()
|
||||||
|
.route("/", routing::get(home::home))
|
||||||
|
.route("/cps", routing::get(cps::cps))
|
||||||
|
}
|
12
crates/app/tailwind.config.js
Normal file
12
crates/app/tailwind.config.js
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
/** @type {import('tailwindcss').Config} */
|
||||||
|
module.exports = {
|
||||||
|
content: [
|
||||||
|
'./src/**/*.html',
|
||||||
|
'./css/**/*.css',
|
||||||
|
],
|
||||||
|
theme: {
|
||||||
|
extend: {},
|
||||||
|
},
|
||||||
|
plugins: [],
|
||||||
|
}
|
||||||
|
|
@ -5,25 +5,10 @@ edition = "2021"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.89"
|
anyhow = "1.0.89"
|
||||||
axum = { version = "0.7.6", features = ["macros"] }
|
axum = "0.7.6"
|
||||||
listenfd = "1.0.1"
|
listenfd = "1.0.1"
|
||||||
tokio = { version = "1.40.0", features = ["macros", "rt-multi-thread"] }
|
tokio = { version = "1.40.0", features = ["macros", "rt-multi-thread"] }
|
||||||
tower-http = { version = "0.6.1", features = ["cors"] }
|
|
||||||
|
|
||||||
sea-orm = { workspace = true, features = [
|
|
||||||
# Same `ASYNC_RUNTIME` and `DATABASE_DRIVER` as in the migration crate
|
|
||||||
"sqlx-sqlite",
|
|
||||||
"runtime-tokio-rustls",
|
|
||||||
"macros",
|
|
||||||
] }
|
|
||||||
serde.workspace = true
|
|
||||||
thiserror.workspace = true
|
|
||||||
|
|
||||||
entity = { path = "../../entity" }
|
|
||||||
migration = { path = "../../migration" }
|
|
||||||
utils = { path = "../utils" }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
cargo-watch = "8.5.2"
|
cargo-watch = "8.5.2"
|
||||||
sea-orm-cli.workspace = true
|
|
||||||
systemfd = "0.4.3"
|
systemfd = "0.4.3"
|
||||||
|
@ -10,19 +10,10 @@ En développement, le mécanisme de hot-reload nécessite de disposer de `cargo-
|
|||||||
cargo install cargo-watch systemfd
|
cargo install cargo-watch systemfd
|
||||||
```
|
```
|
||||||
|
|
||||||
## Configuration
|
|
||||||
|
|
||||||
> Astuce : lorsqu'on exécute directement la crate `backend` à des fins de développement, le système de configuration n'utilisera pas l'éventuel fichier `.env` situé à la racine du workspace Rust. Pour éviter de dupliquer le fichier `.env`, il est possible de créer un lien symbolique vers le fichier `.env` de la crate `backend` :
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd crates/backend
|
|
||||||
ln -s ../../.env .env
|
|
||||||
```
|
|
||||||
|
|
||||||
## Développement
|
## Développement
|
||||||
|
|
||||||
Pour lancer le serveur en mode développement, exécutez la commande suivante :
|
Pour lancer le serveur en mode développement, exécutez la commande suivante :
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
systemfd --no-pid -s http::8080 -- cargo watch -w crates/backend -x 'run --bin backend'
|
systemfd --no-pid -s http::3030 -- cargo watch -x 'run --bin backend'
|
||||||
```
|
```
|
||||||
|
@ -1,48 +0,0 @@
|
|||||||
use axum::{extract::State, routing, Json};
|
|
||||||
use sea_orm::*;
|
|
||||||
use serde::Serialize;
|
|
||||||
|
|
||||||
use ::entity::{debug, debug::Entity as DebugEntity};
|
|
||||||
|
|
||||||
use crate::{AppError, AppState};
|
|
||||||
|
|
||||||
// DATABASE DEBUG CONTROLLERS
|
|
||||||
|
|
||||||
async fn get_debug_entries(db: &DatabaseConnection) -> Result<Vec<debug::Model>, DbErr> {
|
|
||||||
DebugEntity::find().all(db).await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn add_random_debug_entry(State(AppState { db_connection }): State<AppState>) {
|
|
||||||
let random_entry = debug::ActiveModel {
|
|
||||||
title: Set("Random title".to_string()),
|
|
||||||
text: Set("Random text".to_string()),
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
random_entry.insert(&db_connection).await.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
// API HANDLER
|
|
||||||
|
|
||||||
#[derive(Serialize, Debug)]
|
|
||||||
struct DebugResponse {
|
|
||||||
db_ping_status: bool,
|
|
||||||
entries: Vec<debug::Model>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[axum::debug_handler]
|
|
||||||
async fn debug(
|
|
||||||
State(AppState { db_connection }): State<AppState>,
|
|
||||||
) -> Result<Json<DebugResponse>, AppError> {
|
|
||||||
let db_ping_status = db_connection.ping().await.is_ok();
|
|
||||||
let debug_entries = get_debug_entries(&db_connection).await?;
|
|
||||||
Ok(Json(DebugResponse {
|
|
||||||
db_ping_status,
|
|
||||||
entries: debug_entries,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_routes() -> axum::Router<crate::AppState> {
|
|
||||||
axum::Router::new()
|
|
||||||
.route("/", routing::get(debug))
|
|
||||||
.route("/add_random", routing::post(add_random_debug_entry))
|
|
||||||
}
|
|
@ -1,9 +0,0 @@
|
|||||||
use axum::Router;
|
|
||||||
|
|
||||||
use crate::AppState;
|
|
||||||
|
|
||||||
mod debug;
|
|
||||||
|
|
||||||
pub fn get_routes() -> Router<AppState> {
|
|
||||||
Router::new().nest("/debug", debug::get_routes())
|
|
||||||
}
|
|
@ -1,11 +0,0 @@
|
|||||||
use migration::{Migrator, MigratorTrait};
|
|
||||||
use sea_orm::{Database, DatabaseConnection, DbErr};
|
|
||||||
use std::env;
|
|
||||||
|
|
||||||
pub async fn get_connection() -> Result<DatabaseConnection, DbErr> {
|
|
||||||
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
|
|
||||||
|
|
||||||
let db_connection = Database::connect(database_url).await?;
|
|
||||||
Migrator::up(&db_connection, None).await?;
|
|
||||||
Ok(db_connection)
|
|
||||||
}
|
|
@ -1,47 +1,12 @@
|
|||||||
use anyhow::Error as AnyError;
|
use anyhow::Error as AnyError;
|
||||||
use axum::http::{header, StatusCode, Uri};
|
use axum::http::{StatusCode, Uri};
|
||||||
use axum::response::{IntoResponse, Response};
|
use axum::response::{IntoResponse, Response};
|
||||||
use axum::{routing::get, Router};
|
use axum::{routing::get, Router};
|
||||||
use sea_orm::{DatabaseConnection, DbErr};
|
|
||||||
use thiserror::Error;
|
|
||||||
use tower_http::cors::{Any, CorsLayer};
|
|
||||||
|
|
||||||
use ::utils::config::{load_config, ConfigError};
|
pub fn get_router() -> Router {
|
||||||
|
Router::new()
|
||||||
mod api;
|
|
||||||
mod db;
|
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
|
||||||
pub enum InitError {
|
|
||||||
#[error(transparent)]
|
|
||||||
ConfigError(#[from] ConfigError),
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn init() -> Result<(), InitError> {
|
|
||||||
load_config(None)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct AppState {
|
|
||||||
db_connection: DatabaseConnection,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_router() -> Result<Router, DbErr> {
|
|
||||||
let db_connection = db::get_connection().await?;
|
|
||||||
let state: AppState = AppState { db_connection };
|
|
||||||
|
|
||||||
let cors = CorsLayer::new()
|
|
||||||
.allow_methods(Any)
|
|
||||||
.allow_origin(Any)
|
|
||||||
.allow_headers([header::CONTENT_TYPE]);
|
|
||||||
|
|
||||||
Ok(Router::new()
|
|
||||||
.route("/", get(|| async { "Hello, world!" }))
|
.route("/", get(|| async { "Hello, world!" }))
|
||||||
.merge(api::get_routes())
|
|
||||||
.fallback(fallback)
|
.fallback(fallback)
|
||||||
.with_state(state)
|
|
||||||
.layer(cors))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn fallback(uri: Uri) -> (StatusCode, String) {
|
async fn fallback(uri: Uri) -> (StatusCode, String) {
|
||||||
|
@ -1,39 +1,24 @@
|
|||||||
use listenfd::ListenFd;
|
use listenfd::ListenFd;
|
||||||
use thiserror::Error;
|
|
||||||
use tokio::net::TcpListener;
|
use tokio::net::TcpListener;
|
||||||
|
|
||||||
use backend::{get_router, init, InitError};
|
use backend::get_router;
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
|
||||||
pub enum BackendError {
|
|
||||||
#[error("Error while setting up or serving the TCP listener")]
|
|
||||||
ServeTCPListener(#[from] std::io::Error),
|
|
||||||
#[error("Error while initialising the backend")]
|
|
||||||
InitError(#[from] InitError),
|
|
||||||
#[error("Error with the database connection")]
|
|
||||||
DatabaseConnection(#[from] sea_orm::DbErr),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> Result<(), BackendError> {
|
async fn main() {
|
||||||
init()?;
|
let app = get_router();
|
||||||
|
|
||||||
let app = get_router().await?;
|
|
||||||
|
|
||||||
let mut listenfd = ListenFd::from_env();
|
let mut listenfd = ListenFd::from_env();
|
||||||
let listener = match listenfd.take_tcp_listener(0)? {
|
|
||||||
|
let listener = match listenfd.take_tcp_listener(0).unwrap() {
|
||||||
// if we are given a tcp listener on listen fd 0, we use that one
|
// if we are given a tcp listener on listen fd 0, we use that one
|
||||||
Some(listener) => {
|
Some(listener) => {
|
||||||
listener.set_nonblocking(true)?;
|
listener.set_nonblocking(true).unwrap();
|
||||||
TcpListener::from_std(listener)?
|
TcpListener::from_std(listener).unwrap()
|
||||||
}
|
}
|
||||||
// otherwise fall back to local listening
|
// otherwise fall back to local listening
|
||||||
None => TcpListener::bind("0.0.0.0:8080").await?,
|
None => TcpListener::bind("0.0.0.0:8080").await.unwrap(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let local_addr = listener.local_addr()?;
|
println!("Listening on {}", listener.local_addr().unwrap());
|
||||||
println!("Listening on http://{}", local_addr);
|
axum::serve(listener, app).await.unwrap();
|
||||||
axum::serve(listener, app).await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
@ -13,12 +13,8 @@ crate-type = ["lib", "cdylib", "staticlib"]
|
|||||||
tauri-build = { version = "2.0.0-rc", features = [] }
|
tauri-build = { version = "2.0.0-rc", features = [] }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
bytes = "1.6.1"
|
|
||||||
http = "1.1.0"
|
|
||||||
serde = { version = "1", features = ["derive"] }
|
|
||||||
serde_json = "1"
|
|
||||||
tauri = { version = "2.0.0-rc", features = [] }
|
tauri = { version = "2.0.0-rc", features = [] }
|
||||||
tauri-plugin-shell = "2.0.0-rc"
|
tauri-plugin-shell = "2.0.0-rc"
|
||||||
tower = "0.4.13"
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
serde_json = "1"
|
||||||
|
|
||||||
thiserror.workspace = true
|
|
||||||
|
@ -11,8 +11,7 @@ links = "ssvlux64,ssvosx,Ssvw64"
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
libc = "0.2.159"
|
libc = "0.2.159"
|
||||||
libloading = "0.8.5"
|
libloading = "0.8.5"
|
||||||
|
thiserror = "1.0.64"
|
||||||
thiserror.workspace = true
|
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
bindgen = "0.70.1"
|
bindgen = "0.70.1"
|
||||||
|
@ -1,32 +0,0 @@
|
|||||||
# État d'avancement de l'implémentation des bindings FSV
|
|
||||||
|
|
||||||
| Module | Progression |
|
|
||||||
|-------------|------------------------------------|
|
|
||||||
| [SSV](#ssv) |  |
|
|
||||||
| [SGD](#sgd) |  |
|
|
||||||
| [SRT](#srt) |  |
|
|
||||||
| [STS](#sts) |  |
|
|
||||||
|
|
||||||
## SSV
|
|
||||||
|
|
||||||
| Fonctions implémentées |
|
|
||||||
|------------------------|
|
|
||||||
| SSV_InitLIB2 |
|
|
||||||
| SSV_LireConfig |
|
|
||||||
| SSV_LireCartePS |
|
|
||||||
|
|
||||||
## SGD
|
|
||||||
|
|
||||||
| Fonctions implémentées |
|
|
||||||
|------------------------|
|
|
||||||
|
|
||||||
|
|
||||||
## SRT
|
|
||||||
|
|
||||||
| Fonctions implémentées |
|
|
||||||
|------------------------|
|
|
||||||
|
|
||||||
## STS
|
|
||||||
|
|
||||||
| Fonctions implémentées |
|
|
||||||
|------------------------|
|
|
@ -1,18 +1,5 @@
|
|||||||
# FSV-sys, bindings Rust pour le package FSV SESAM-Vitale
|
# FSV-sys, bindings Rust pour le package FSV SESAM-Vitale
|
||||||
|
|
||||||
## Librairies FSV
|
|
||||||
|
|
||||||
### Versions supportées
|
|
||||||
|
|
||||||
| Version FSV |
|
|
||||||
|-------------|
|
|
||||||
| 1.40.14 |
|
|
||||||
| 1.40.13 |
|
|
||||||
|
|
||||||
### État d'avancement de l'implémentation des bindings FSV
|
|
||||||
|
|
||||||
Les détails de l'avancement de l'implémentation des bindings FSV sont donnés dans le fichier [PROGRESS.md](PROGRESS.md)
|
|
||||||
|
|
||||||
## Utilisation
|
## Utilisation
|
||||||
|
|
||||||
### Pré-requis
|
### Pré-requis
|
||||||
@ -32,5 +19,5 @@ Les détails de l'avancement de l'implémentation des bindings FSV sont donnés
|
|||||||
|
|
||||||
### Pré-requis
|
### Pré-requis
|
||||||
|
|
||||||
- Pour la génération des bindings lors de la phase de `build` à l'aide de `bindgen`, il est nécessaire d'avoir installé `clang` ([documentation](https://rust-lang.github.io/rust-bindgen/requirements.html)).
|
- Pour la génération des bindings lors de la pahse de `build` à l'aide de `bindgen`, il est nécessaire d'avoir installé `clang` ([documentation](https://rust-lang.github.io/rust-bindgen/requirements.html)).
|
||||||
|
|
||||||
|
@ -1,7 +0,0 @@
|
|||||||
#ifndef WRAPPER_LINUX_H
|
|
||||||
#define WRAPPER_LINUX_H
|
|
||||||
|
|
||||||
#include "../../vendor/fsv/1.40.14.13/includes/SYS_DEF/linux/mc_sys_def.h"
|
|
||||||
#include "../../vendor/fsv/1.40.14.13/includes/SSV/pourFSV1.40.13/ssv.h"
|
|
||||||
|
|
||||||
#endif // WRAPPER_LINUX_H
|
|
@ -1,7 +0,0 @@
|
|||||||
#ifndef WRAPPER_MACOSX_H
|
|
||||||
#define WRAPPER_MACOSX_H
|
|
||||||
|
|
||||||
#include "../../vendor/fsv/1.40.14.13/includes/SYS_DEF/macosx/mc_sys_def.h"
|
|
||||||
#include "../../vendor/fsv/1.40.14.13/includes/SSV/pourFSV1.40.13/ssv.h"
|
|
||||||
|
|
||||||
#endif // WRAPPER_MACOSX_H
|
|
@ -1,7 +0,0 @@
|
|||||||
#ifndef WRAPPER_WIN_H
|
|
||||||
#define WRAPPER_WIN_H
|
|
||||||
|
|
||||||
#include "../../vendor/fsv/1.40.14.13/includes/SYS_DEF/win/mc_sys_def.h"
|
|
||||||
#include "../../vendor/fsv/1.40.14.13/includes/SSV/pourFSV1.40.13/ssv.h"
|
|
||||||
|
|
||||||
#endif // WRAPPER_WIN_H
|
|
@ -1,7 +0,0 @@
|
|||||||
#ifndef WRAPPER_LINUX_H
|
|
||||||
#define WRAPPER_LINUX_H
|
|
||||||
|
|
||||||
#include "../../vendor/fsv/1.40.14.13/includes/SYS_DEF/linux/mc_sys_def.h"
|
|
||||||
#include "../../vendor/fsv/1.40.14.13/includes/SSV/pourFSV1.40.14/ssv.h"
|
|
||||||
|
|
||||||
#endif // WRAPPER_LINUX_H
|
|
@ -1,7 +0,0 @@
|
|||||||
#ifndef WRAPPER_MACOSX_H
|
|
||||||
#define WRAPPER_MACOSX_H
|
|
||||||
|
|
||||||
#include "../../vendor/fsv/1.40.14.13/includes/SYS_DEF/macosx/mc_sys_def.h"
|
|
||||||
#include "../../vendor/fsv/1.40.14.13/includes/SSV/pourFSV1.40.14/ssv.h"
|
|
||||||
|
|
||||||
#endif // WRAPPER_MACOSX_H
|
|
@ -1,7 +0,0 @@
|
|||||||
#ifndef WRAPPER_WIN_H
|
|
||||||
#define WRAPPER_WIN_H
|
|
||||||
|
|
||||||
#include "../../vendor/fsv/1.40.14.13/includes/SYS_DEF/win/mc_sys_def.h"
|
|
||||||
#include "../../vendor/fsv/1.40.14.13/includes/SSV/pourFSV1.40.14/ssv.h"
|
|
||||||
|
|
||||||
#endif // WRAPPER_WIN_H
|
|
@ -1,11 +1,43 @@
|
|||||||
use std::{env, path::PathBuf};
|
use std::{env, path::PathBuf};
|
||||||
|
|
||||||
// Supported versions of FSV
|
fn main() {
|
||||||
static SUPPORTED_FSV_VERSIONS: [&str; 2] = ["1.40.14", "1.40.13"];
|
// // TODO: make the path dynamic
|
||||||
|
// println!("cargo:rustc-link-search=native=/home/florianbriand/TMP/SESAM-VITALE/FSV_1.40.1317_Linux/x86_64/Release/installeur/opt/santesocial/fsv/1.40.13/lib");
|
||||||
|
|
||||||
fn build_bindings(version: &str, target_code: &str) -> PathBuf {
|
// Configure for various targets
|
||||||
|
let target_code;
|
||||||
|
|
||||||
|
// Use CARGO configuration env Variable, because !cfg(target_os) is not available in build.rs
|
||||||
|
// Source: https://kazlauskas.me/entries/writing-proper-buildrs-scripts
|
||||||
let target = env::var("TARGET").expect("TARGET not set");
|
let target = env::var("TARGET").expect("TARGET not set");
|
||||||
let wrapper_path = format!("bindgen-wrappers/{}/wrapper.{}.h", version, target_code);
|
let target_os = env::var("CARGO_CFG_TARGET_OS");
|
||||||
|
|
||||||
|
println!("Target: {:?}", target);
|
||||||
|
|
||||||
|
match target_os.as_ref().map(|x| &**x) {
|
||||||
|
Ok("linux") => {
|
||||||
|
println!("Building for Linux");
|
||||||
|
// lib_name = "ssvlux64";
|
||||||
|
target_code = "linux";
|
||||||
|
},
|
||||||
|
Ok("windows") => {
|
||||||
|
println!("Building for Windows");
|
||||||
|
// lib_name = "Ssvw64";
|
||||||
|
target_code = "win";
|
||||||
|
},
|
||||||
|
Ok("macos") => {
|
||||||
|
println!("Building for MacOS");
|
||||||
|
// lib_name = "ssvosx";
|
||||||
|
target_code = "macosx";
|
||||||
|
},
|
||||||
|
tos => panic!("Unsupported target_os {:?}", tos),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Link the library
|
||||||
|
// println!("cargo:rustc-link-lib={}", lib_name);
|
||||||
|
|
||||||
|
// Build the bindings
|
||||||
|
let wrapper_path = format!("vendor/fsv/1.40.14.13/includes/wrapper.{}.h", target_code);
|
||||||
let bindings = bindgen::Builder::default()
|
let bindings = bindgen::Builder::default()
|
||||||
// The input header we would like to generate
|
// The input header we would like to generate
|
||||||
// bindings for.
|
// bindings for.
|
||||||
@ -23,35 +55,8 @@ fn build_bindings(version: &str, target_code: &str) -> PathBuf {
|
|||||||
.expect("Unable to generate bindings");
|
.expect("Unable to generate bindings");
|
||||||
|
|
||||||
// Write the bindings to the $OUT_DIR/bindings.rs file.
|
// Write the bindings to the $OUT_DIR/bindings.rs file.
|
||||||
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
|
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
|
||||||
let out_file = format!("bindings_{}.rs", version);
|
|
||||||
let out_path = out_dir.join(out_file);
|
|
||||||
bindings
|
bindings
|
||||||
.write_to_file(&out_path)
|
.write_to_file(out_path.join("bindings.rs"))
|
||||||
.expect("Couldn't write bindings! ");
|
.expect("Couldn't write bindings!");
|
||||||
out_path
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_target_code() -> String {
|
|
||||||
// Use CARGO configuration env Variable, because !cfg(target_os) is not available in build.rs
|
|
||||||
// Source: https://kazlauskas.me/entries/writing-proper-buildrs-scripts
|
|
||||||
let target_os = env::var("CARGO_CFG_TARGET_OS");
|
|
||||||
|
|
||||||
match target_os.as_ref().map(|x| &**x) {
|
|
||||||
Ok("linux") => "linux", // lib_name = "ssvlux64";
|
|
||||||
Ok("windows") => "win", // lib_name = "Ssvw64";
|
|
||||||
Ok("macos") => "macosx", // lib_name = "ssvosx";
|
|
||||||
tos => panic!("Unsupported target_os {:?}", tos),
|
|
||||||
}
|
|
||||||
.to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
let target_code = get_target_code();
|
|
||||||
// Build the bindings for each supported version of FSV
|
|
||||||
let bindings_paths: Vec<PathBuf> = SUPPORTED_FSV_VERSIONS
|
|
||||||
.iter()
|
|
||||||
.map(|version| build_bindings(version, &target_code))
|
|
||||||
.collect();
|
|
||||||
println!("FSV bindings generated: {:#?}", bindings_paths);
|
|
||||||
}
|
}
|
@ -1,12 +0,0 @@
|
|||||||
#![allow(non_upper_case_globals)]
|
|
||||||
#![allow(non_camel_case_types)]
|
|
||||||
#![allow(non_snake_case)]
|
|
||||||
#![allow(unused)]
|
|
||||||
|
|
||||||
pub mod BINDINGS_V1_40_14 {
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/bindings_1.40.14.rs"));
|
|
||||||
}
|
|
||||||
|
|
||||||
pub mod BINDINGS_V1_40_13 {
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/bindings_1.40.13.rs"));
|
|
||||||
}
|
|
@ -1,23 +1,31 @@
|
|||||||
|
#![allow(non_upper_case_globals)]
|
||||||
|
#![allow(non_camel_case_types)]
|
||||||
#![allow(non_snake_case)]
|
#![allow(non_snake_case)]
|
||||||
|
|
||||||
use std::marker::PhantomData;
|
pub mod BINDINGS {
|
||||||
|
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
|
||||||
mod bindings;
|
|
||||||
use bindings::*;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum SupportedFsvVersion {
|
|
||||||
V1_40_14, // 1.40.14
|
|
||||||
V1_40_13, // 1.40.13
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SupportedFsvVersion {
|
// We need to keep the this use statement to get `ssv_function` macro working well
|
||||||
fn as_str(&self) -> &'static str {
|
use BINDINGS::*;
|
||||||
match self {
|
|
||||||
Self::V1_40_14 => "1.40.14",
|
/// Macro to generate a function that implements a call to an external function in BINDINGS
|
||||||
Self::V1_40_13 => "1.40.13",
|
macro_rules! ssv_function {
|
||||||
|
($binding:ty, $func_name:ident, {$($arg_name:ident: $arg_type:ty),*}) => {
|
||||||
|
/// # Safety
|
||||||
|
/// This function is unsafe because it calls an external function through FFI.
|
||||||
|
/// The caller must ensure that the provided arguments are valid and that the
|
||||||
|
/// external function is safe to call.
|
||||||
|
pub unsafe fn $func_name(&self, $($arg_name: $arg_type),*) -> Result<u16, Error> {
|
||||||
|
let func_struct: libloading::Symbol<'_, $binding> =
|
||||||
|
unsafe { self.library.get(stringify!($binding).as_bytes())? };
|
||||||
|
let func = match *func_struct {
|
||||||
|
Some(func) => func,
|
||||||
|
None => return Err(Error::SymbolMissing(stringify!($binding))),
|
||||||
|
};
|
||||||
|
Ok(func($($arg_name),*))
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(thiserror::Error, Debug)]
|
#[derive(thiserror::Error, Debug)]
|
||||||
@ -28,81 +36,39 @@ pub enum Error {
|
|||||||
SymbolMissing(&'static str),
|
SymbolMissing(&'static str),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Macro to generate a function that implements a call to an external function in BINDINGS
|
|
||||||
macro_rules! ssv_function {
|
|
||||||
($binding:ty, $func_name:ident, {$($arg_name:ident: $arg_type:ty),*}) => {
|
|
||||||
/// # Safety
|
|
||||||
/// This function is unsafe because it calls an external function through FFI.
|
|
||||||
/// The caller must ensure that the provided arguments are valid and that the
|
|
||||||
/// external function is safe to call.
|
|
||||||
pub unsafe fn $func_name(&self, $($arg_name: $arg_type),*) -> Result<u16, Error> {
|
|
||||||
let symbol_name = match stringify!($binding)
|
|
||||||
.split(&[' ', ':'])
|
|
||||||
.last() {
|
|
||||||
Some(name) => name,
|
|
||||||
None => return Err(Error::SymbolMissing(stringify!($binding))),
|
|
||||||
};
|
|
||||||
let func_struct: libloading::Symbol<'_, $binding> =
|
|
||||||
unsafe { self.library.get(symbol_name.as_bytes())? };
|
|
||||||
let func = match *func_struct {
|
|
||||||
Some(func) => func,
|
|
||||||
None => return Err(Error::SymbolMissing(stringify!($binding))),
|
|
||||||
};
|
|
||||||
Ok(func($($arg_name),*))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/// `sealed::Sealed` trait is used to prevent external crates from implementing the LibVersion trait.
|
|
||||||
mod sealed { pub trait Sealed {}}
|
|
||||||
|
|
||||||
/// Wrapper around the SESAM-VITALE library
|
/// Wrapper around the SESAM-VITALE library
|
||||||
/// This struct is responsible for loading the library and providing an interface to call its functions.
|
/// This struct is responsible for loading the library and providing an interface to call its functions.
|
||||||
/// The library is loaded at creation and kept in memory until the struct is dropped.
|
/// The library is loaded at creation and kept in memory until the struct is dropped.
|
||||||
pub trait SSVLibraryCommon {
|
#[derive(Debug)]
|
||||||
fn new(path: &str) -> Result<Self, Error> where Self: Sized;
|
pub struct SSVLibrary {
|
||||||
}
|
|
||||||
|
|
||||||
pub trait SSVLibraryVersion: sealed::Sealed {}
|
|
||||||
|
|
||||||
pub struct V1_40_13 {}
|
|
||||||
impl sealed::Sealed for V1_40_13 {}
|
|
||||||
impl SSVLibraryVersion for V1_40_13 {}
|
|
||||||
|
|
||||||
pub struct V1_40_14 {}
|
|
||||||
impl sealed::Sealed for V1_40_14 {}
|
|
||||||
impl SSVLibraryVersion for V1_40_14 {}
|
|
||||||
|
|
||||||
pub struct SSVLibrary<Version: SSVLibraryVersion> {
|
|
||||||
_version: PhantomData<Version>,
|
|
||||||
library: libloading::Library,
|
library: libloading::Library,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Version: SSVLibraryVersion> SSVLibraryCommon for SSVLibrary<Version> {
|
pub fn get_library_path() -> String {
|
||||||
fn new(path: &str) -> Result<Self, Error> {
|
// TODO : Use libloading::library_filename to get platform-specific filename ?
|
||||||
let library = unsafe { libloading::Library::new(path)?};
|
"/opt/santesocial/fsv/1.40.13/lib/libssvlux64.so".to_string()
|
||||||
Ok(Self {
|
|
||||||
_version: PhantomData,
|
|
||||||
library
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SSVLibrary<V1_40_14> {
|
impl SSVLibrary {
|
||||||
|
pub fn new(library_path: &str) -> Result<Self, Error> {
|
||||||
|
let library = unsafe { libloading::Library::new(library_path)? };
|
||||||
|
Ok(SSVLibrary { library })
|
||||||
|
}
|
||||||
|
|
||||||
pub fn library(&self) -> &libloading::Library {
|
pub fn library(&self) -> &libloading::Library {
|
||||||
&self.library
|
&self.library
|
||||||
}
|
}
|
||||||
|
|
||||||
ssv_function!(BINDINGS_V1_40_14::SSV_InitLIB2, ssv_init_lib2, {
|
ssv_function!(SSV_InitLIB2, ssv_init_lib2, {
|
||||||
pcFichierSesam: *const i8
|
pcFichierSesam: *const i8
|
||||||
});
|
});
|
||||||
|
|
||||||
ssv_function!(BINDINGS_V1_40_14::SSV_LireConfig, ssv_lire_config, {
|
ssv_function!(SSV_LireConfig, ssv_lire_config, {
|
||||||
pZDataOut: *mut *mut libc::c_void,
|
pZDataOut: *mut *mut libc::c_void,
|
||||||
psTailleDataOut: *mut usize
|
psTailleDataOut: *mut usize
|
||||||
});
|
});
|
||||||
|
|
||||||
ssv_function!(BINDINGS_V1_40_14::SSV_LireCartePS, ssv_lire_carte_ps, {
|
ssv_function!(SSV_LireCartePS, ssv_lire_carte_ps, {
|
||||||
NomRessourcePS: *const i8,
|
NomRessourcePS: *const i8,
|
||||||
NomRessourceLecteur: *const i8,
|
NomRessourceLecteur: *const i8,
|
||||||
CodePorteurPS: *const i8,
|
CodePorteurPS: *const i8,
|
||||||
@ -111,51 +77,6 @@ impl SSVLibrary<V1_40_14> {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SSVLibrary<V1_40_13> {
|
|
||||||
ssv_function!(BINDINGS_V1_40_13::SSV_InitLIB2, ssv_init_lib2, {
|
|
||||||
pcFichierSesam: *const i8
|
|
||||||
});
|
|
||||||
|
|
||||||
ssv_function!(BINDINGS_V1_40_13::SSV_LireConfig, ssv_lire_config, {
|
|
||||||
pZDataOut: *mut *mut libc::c_void,
|
|
||||||
psTailleDataOut: *mut usize
|
|
||||||
});
|
|
||||||
|
|
||||||
ssv_function!(BINDINGS_V1_40_13::SSV_LireCartePS, ssv_lire_carte_ps, {
|
|
||||||
NomRessourcePS: *const i8,
|
|
||||||
NomRessourceLecteur: *const i8,
|
|
||||||
CodePorteurPS: *const i8,
|
|
||||||
pZDataOut: *mut *mut libc::c_void,
|
|
||||||
pTailleZone: *mut usize
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_library_path(version: &SupportedFsvVersion) -> String {
|
|
||||||
let root_path = get_library_root_path();
|
|
||||||
let library_name = get_library_name();
|
|
||||||
let version = version.as_str();
|
|
||||||
format!("{root_path}/{version}/lib/{library_name}")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn sesam_ini_path(version: &SupportedFsvVersion) -> String {
|
|
||||||
let root_path = get_sesam_ini_root_path();
|
|
||||||
let version = version.as_str();
|
|
||||||
format!("{root_path}/{version}/conf/sesam.ini")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_library_name() -> &'static str {
|
|
||||||
// TODO : Use libloading::library_filename to get platform-specific filename ?
|
|
||||||
"libssvlux64.so"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_library_root_path() -> &'static str {
|
|
||||||
"/opt/santesocial/fsv"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_sesam_ini_root_path() -> &'static str {
|
|
||||||
"/etc/opt/santesocial/fsv"
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use std::{ffi::CString, ptr};
|
use std::{ffi::CString, ptr};
|
||||||
@ -164,22 +85,22 @@ mod test {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_initlib2() {
|
fn test_initlib2() {
|
||||||
let lib_path = &get_library_path(&SupportedFsvVersion::V1_40_13);
|
let library_path = get_library_path();
|
||||||
let ssv_library = SSVLibrary::<V1_40_13>::new(lib_path).expect("SSVLibrary::new failed");
|
let ssv_library = SSVLibrary::new(&library_path).expect("SSVLibrary::new failed");
|
||||||
|
|
||||||
let sesam_ini_str =
|
let sesam_ini_str = CString::new("/etc/opt/santesocial/fsv/1.40.13/conf/sesam.ini")
|
||||||
CString::new(sesam_ini_path(&SupportedFsvVersion::V1_40_13)).expect("CString::new failed");
|
.expect("CString::new failed");
|
||||||
let result = unsafe { ssv_library.ssv_init_lib2(sesam_ini_str.as_ptr()) }.unwrap();
|
let result = unsafe { ssv_library.ssv_init_lib2(sesam_ini_str.as_ptr()) }.unwrap();
|
||||||
assert_eq!(result, 0);
|
assert_eq!(result, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_lire_config_and_carte_ps() {
|
fn test_lire_config_and_carte_ps() {
|
||||||
let lib_path = &get_library_path(&SupportedFsvVersion::V1_40_13);
|
let library_path = get_library_path();
|
||||||
let ssv_library = SSVLibrary::<V1_40_13>::new(lib_path).expect("SSVLibrary::new failed");
|
let ssv_library = SSVLibrary::new(&library_path).expect("SSVLibrary::new failed");
|
||||||
|
|
||||||
let sesam_ini_str =
|
let sesam_ini_str = CString::new("/etc/opt/santesocial/fsv/1.40.13/conf/sesam.ini")
|
||||||
CString::new(sesam_ini_path(&SupportedFsvVersion::V1_40_13)).expect("CString::new failed");
|
.expect("CString::new failed");
|
||||||
let result = unsafe { ssv_library.ssv_init_lib2(sesam_ini_str.as_ptr()) }.unwrap();
|
let result = unsafe { ssv_library.ssv_init_lib2(sesam_ini_str.as_ptr()) }.unwrap();
|
||||||
assert_eq!(result, 0);
|
assert_eq!(result, 0);
|
||||||
|
|
||||||
@ -187,12 +108,11 @@ mod test {
|
|||||||
let mut size: libc::size_t = 0;
|
let mut size: libc::size_t = 0;
|
||||||
let result = unsafe { ssv_library.ssv_lire_config(&mut buffer_ptr, &mut size) }.unwrap();
|
let result = unsafe { ssv_library.ssv_lire_config(&mut buffer_ptr, &mut size) }.unwrap();
|
||||||
assert_eq!(result, 0);
|
assert_eq!(result, 0);
|
||||||
unsafe { libc::free(buffer_ptr) };
|
|
||||||
|
|
||||||
let nom_ressource_ps =
|
let nom_ressource_ps = CString::new("Gemalto PC Twin Reader (645D94C3) 00 00")
|
||||||
CString::new("Gemalto PC Twin Reader (645D94C3) 00 00").expect("CString::new failed");
|
.expect("CString::new failed");
|
||||||
let nom_ressource_lecteur =
|
let nom_ressource_lecteur = CString::new("Gemalto PC Twin Reader (645D94C3) 00 00")
|
||||||
CString::new("Gemalto PC Twin Reader (645D94C3) 00 00").expect("CString::new failed");
|
.expect("CString::new failed");
|
||||||
let code_porteur_ps = CString::new("1234").expect("CString::new failed");
|
let code_porteur_ps = CString::new("1234").expect("CString::new failed");
|
||||||
let mut buffer_ptr: *mut libc::c_void = ptr::null_mut();
|
let mut buffer_ptr: *mut libc::c_void = ptr::null_mut();
|
||||||
let mut size: libc::size_t = 0;
|
let mut size: libc::size_t = 0;
|
||||||
@ -204,9 +124,7 @@ mod test {
|
|||||||
&mut buffer_ptr,
|
&mut buffer_ptr,
|
||||||
&mut size,
|
&mut size,
|
||||||
)
|
)
|
||||||
}
|
}.unwrap();
|
||||||
.unwrap();
|
|
||||||
assert_eq!(result, 0);
|
assert_eq!(result, 0);
|
||||||
unsafe { libc::free(buffer_ptr) };
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
7
crates/fsv-sys/vendor/fsv/1.40.14.13/includes/wrapper.linux.h
vendored
Normal file
7
crates/fsv-sys/vendor/fsv/1.40.14.13/includes/wrapper.linux.h
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
#ifndef WRAPPER_LINUX_H
|
||||||
|
#define WRAPPER_LINUX_H
|
||||||
|
|
||||||
|
#include "SYS_DEF/linux/mc_sys_def.h"
|
||||||
|
#include "SSV/pourFSV1.40.14/ssv.h"
|
||||||
|
|
||||||
|
#endif // WRAPPER_LINUX_H
|
7
crates/fsv-sys/vendor/fsv/1.40.14.13/includes/wrapper.macosx.h
vendored
Normal file
7
crates/fsv-sys/vendor/fsv/1.40.14.13/includes/wrapper.macosx.h
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
#ifndef WRAPPER_MACOSX_H
|
||||||
|
#define WRAPPER_MACOSX_H
|
||||||
|
|
||||||
|
#include "SYS_DEF/macosx/mc_sys_def.h"
|
||||||
|
#include "SSV/pourFSV1.40.14/ssv.h"
|
||||||
|
|
||||||
|
#endif // WRAPPER_MACOSX_H
|
7
crates/fsv-sys/vendor/fsv/1.40.14.13/includes/wrapper.win.h
vendored
Normal file
7
crates/fsv-sys/vendor/fsv/1.40.14.13/includes/wrapper.win.h
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
#ifndef WRAPPER_WIN_H
|
||||||
|
#define WRAPPER_WIN_H
|
||||||
|
|
||||||
|
#include "SYS_DEF/win/mc_sys_def.h"
|
||||||
|
#include "SSV/pourFSV1.40.14/ssv.h"
|
||||||
|
|
||||||
|
#endif // WRAPPER_WIN_H
|
@ -1,20 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "fsv"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
anyhow = "1.0.89"
|
|
||||||
libc = "0.2.159"
|
|
||||||
num_enum = { version = "0.7.3", features = ["complex-expressions"] }
|
|
||||||
deku = { version = "0.18.1", features = ["logging"] }
|
|
||||||
|
|
||||||
thiserror.workspace = true
|
|
||||||
serde.workspace = true
|
|
||||||
|
|
||||||
fsv-sys = { path = "../fsv-sys" }
|
|
||||||
utils = { path = "../utils" }
|
|
||||||
|
|
||||||
#[dev-dependencies]
|
|
||||||
log = "0.4.22"
|
|
||||||
env_logger = "0.11.5"
|
|
@ -1,223 +0,0 @@
|
|||||||
use deku::deku_derive;
|
|
||||||
|
|
||||||
use super::{ groups, size_read };
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
/// # Data: FSV data structure
|
|
||||||
/// This structure is the core structure to read FSV raw data
|
|
||||||
/// It handles directly the raw data returned by the FSV library
|
|
||||||
/// A `Data` structure is composed of multiple `DataBlock` structures
|
|
||||||
pub struct Data {
|
|
||||||
#[deku(read_all)]
|
|
||||||
pub blocks: Vec<DataBlock>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
/// # Data block structure
|
|
||||||
/// The `DataBlock` are the main structures inside a `Data` struct
|
|
||||||
pub struct DataBlock {
|
|
||||||
pub header: BlockHeader,
|
|
||||||
#[deku(ctx = "header.group_id.0")]
|
|
||||||
pub content: DataGroup,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
/// # Block header structure
|
|
||||||
/// The `BlockHeader` structure is the header of a `DataBlock`
|
|
||||||
/// It contains the group ID and the size of the `DataBlock` contained data (`inner` field)
|
|
||||||
pub struct BlockHeader {
|
|
||||||
|
|
||||||
pub group_id: GroupId,
|
|
||||||
#[deku(reader = "size_read(deku::reader)")]
|
|
||||||
pub data_size: u64, // This field is not really used, but we have to parse it to move the reader cursor
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
/// # Group ID
|
|
||||||
/// Allow to identify the type of data contained in a `DataBlock`
|
|
||||||
/// It is use as matching ID in the `DataGroup` enum. All the
|
|
||||||
/// IDs are documented on the SSV documentation, pages 23-28
|
|
||||||
pub struct GroupId(
|
|
||||||
#[deku(endian="big", bytes= 2)]
|
|
||||||
pub u16,
|
|
||||||
);
|
|
||||||
|
|
||||||
/// # Data group enum
|
|
||||||
/// This enum is used to match a `DataBlock` content with the
|
|
||||||
/// correct data structure, able to parse the data contained in
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[deku(ctx = "group_id: u16", id = "group_id")]
|
|
||||||
#[allow(non_camel_case_types)]
|
|
||||||
pub enum DataGroup {
|
|
||||||
#[deku(id = 60)]
|
|
||||||
LireConfig_Group60_ConfigHeader(groups::ssv_lire_config::group_60_header_config::ConfigHeader),
|
|
||||||
#[deku(id = 61)]
|
|
||||||
LireConfig_Group61_ReaderConfig(groups::ssv_lire_config::group_61_reader_config::ReaderConfig),
|
|
||||||
#[deku(id = 64)]
|
|
||||||
LireConfig_Group64_SVComponentsConfig(groups::ssv_lire_config::group_64_sv_config::SVComponentsConfig),
|
|
||||||
#[deku(id = 67)]
|
|
||||||
LireConfig_Group67_PCSCReaderConfig(groups::ssv_lire_config::group_67_pcsc_config::PCSCReaderConfig),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use deku::DekuContainerRead as _;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
mod deku_testing {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[deku(endian = "big")]
|
|
||||||
pub struct DekuTest {
|
|
||||||
#[deku(bits = 4)]
|
|
||||||
pub a: u8,
|
|
||||||
#[deku(bits = 4)]
|
|
||||||
pub b: u8,
|
|
||||||
pub c: u16,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[deku(endian = "big")]
|
|
||||||
pub struct DekuTestWithSizeReader {
|
|
||||||
#[deku(bytes = 2)]
|
|
||||||
pub id: u16,
|
|
||||||
#[deku(reader = "size_read(deku::reader)")]
|
|
||||||
pub size: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
pub struct DekuTestWithGroupId {
|
|
||||||
pub group_id: GroupId,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_deserialize_deku_test() {
|
|
||||||
let buffer: &[u8] = &[0b0110_1001, 0xBE, 0xEF];
|
|
||||||
let offset: usize = 0;
|
|
||||||
let ((rest, offset), val) = deku_testing::DekuTest::from_bytes((buffer, offset)).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(val.a, 0b0110);
|
|
||||||
assert_eq!(val.b, 0b1001);
|
|
||||||
assert_eq!(val.c, 0xBEEF);
|
|
||||||
|
|
||||||
assert_eq!(offset, 0);
|
|
||||||
assert_eq!(rest, &[]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_deserialize_deku_test_with_offset() {
|
|
||||||
let buffer: &[u8] = &[0b0000_1111, 0b0110_1001, 0xBE, 0xEF];
|
|
||||||
let offset: usize = 8;
|
|
||||||
let ((rest, offset), val) = deku_testing::DekuTest::from_bytes((buffer, offset)).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(val.a, 0b0110);
|
|
||||||
assert_eq!(val.b, 0b1001);
|
|
||||||
assert_eq!(val.c, 0xBEEF);
|
|
||||||
|
|
||||||
assert_eq!(offset, 0);
|
|
||||||
assert_eq!(rest, &[]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_serialize_deku_test_with_rest() {
|
|
||||||
let buffer: &[u8] = &[0b0110_1001, 0xBE, 0xEF, 0x1F, 0x2F];
|
|
||||||
let offset: usize = 0;
|
|
||||||
let ((rest, offset), val) = deku_testing::DekuTest::from_bytes((buffer, offset)).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(val.a, 0b0110);
|
|
||||||
assert_eq!(val.b, 0b1001);
|
|
||||||
assert_eq!(val.c, 0xBEEF);
|
|
||||||
|
|
||||||
assert_eq!(offset, 0);
|
|
||||||
assert_eq!(rest, &[0x1F, 0x2F]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_size_read() {
|
|
||||||
let buffer: &[u8] = &[
|
|
||||||
0, 60, // ID (60)
|
|
||||||
0b0100_0000, // Size type bit (0) + Size (64)
|
|
||||||
3, 4, 5, 6, 7, 8, 9, 10, 11, 12 // Extra data (10 bytes ; should be 64)
|
|
||||||
];
|
|
||||||
let ((rest, _offset), val) = deku_testing::DekuTestWithSizeReader::from_bytes((buffer, 0)).unwrap();
|
|
||||||
assert_eq!(val.id, 60, "EX1: ID");
|
|
||||||
assert_eq!(val.size, 64, "EX1: Size");
|
|
||||||
assert_eq!(rest.len(), 10, "EX1: Rest");
|
|
||||||
|
|
||||||
let buffer: &[u8] = &[
|
|
||||||
0, 60, // ID (60)
|
|
||||||
0b1000_0010, // Size type bit (1) + Size block length (2)
|
|
||||||
0b0000_0001, 0b0100_0000, // Size (320)
|
|
||||||
3, 4, 5, 6, 7, 8, 9, 10, 11, 12 // Extra data (10 bytes ; should be 320)
|
|
||||||
];
|
|
||||||
let ((rest, _offset), val) = deku_testing::DekuTestWithSizeReader::from_bytes((buffer, 0)).unwrap();
|
|
||||||
assert_eq!(val.id, 60, "EX2: ID");
|
|
||||||
assert_eq!(val.size, 320, "EX2: Size");
|
|
||||||
println!("{:?}", rest);
|
|
||||||
// assert_eq!(val.size, 320, "EX2: Size");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_endianness() {
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
struct DekuTest {
|
|
||||||
#[deku(endian = "big")]
|
|
||||||
field_be: u16,
|
|
||||||
#[deku(endian = "little")]
|
|
||||||
field_le: u16,
|
|
||||||
field_default: u16,
|
|
||||||
}
|
|
||||||
|
|
||||||
let buffer: &[u8] = &[
|
|
||||||
0xAB, 0xCD,
|
|
||||||
0xAB, 0xCD,
|
|
||||||
0xAB, 0xCD,
|
|
||||||
];
|
|
||||||
let (_rest, result) = DekuTest::from_bytes((buffer, 0)).unwrap();
|
|
||||||
assert_eq!(result.field_be, 0xABCD, "0xAB,0xCD - Big Endian");
|
|
||||||
assert_eq!(result.field_le, 0xCDAB, "0xAB,0xCD - Little Endian");
|
|
||||||
assert_eq!(deku::ctx::Endian::default(), deku::ctx::Endian::Little, "Default Endian");
|
|
||||||
assert_eq!(result.field_default, 0xCDAB, "0xAB,0xCD - Default Endian");
|
|
||||||
|
|
||||||
let buffer: &[u8] = &[
|
|
||||||
0, 64,
|
|
||||||
0, 64,
|
|
||||||
0, 64,
|
|
||||||
];
|
|
||||||
let (_rest, result) = DekuTest::from_bytes((buffer, 0)).unwrap();
|
|
||||||
assert_eq!(result.field_be, 64, "0,64 - Big Endian");
|
|
||||||
assert_eq!(result.field_le, 16384, "0,64 - Little Endian");
|
|
||||||
assert_eq!(deku::ctx::Endian::default(), deku::ctx::Endian::Little);
|
|
||||||
assert_eq!(result.field_default, 16384, "0,64 - Default Endian");
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_group_id() {
|
|
||||||
// env_logger::init(); // Uncomment and run with RUST_LOG=trace for deku debugging
|
|
||||||
let buffer: &[u8] = &[
|
|
||||||
0, 60, // ID (60)
|
|
||||||
];
|
|
||||||
let (_rest, val) = deku_testing::DekuTestWithGroupId::from_bytes((buffer, 0)).unwrap();
|
|
||||||
assert_eq!(val.group_id.0, 60, "EX1: ID");
|
|
||||||
|
|
||||||
let buffer: &[u8] = &[
|
|
||||||
7, 118, // ID (1910)
|
|
||||||
];
|
|
||||||
let (_rest, val) = deku_testing::DekuTestWithGroupId::from_bytes((buffer, 0)).unwrap();
|
|
||||||
assert_eq!(val.group_id.0, 1910, "EX2: ID");
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,102 +0,0 @@
|
|||||||
use std::{fmt, str::FromStr};
|
|
||||||
|
|
||||||
use deku::{deku_derive, DekuError};
|
|
||||||
|
|
||||||
use super::{ size_read, map_bytes_to_lossy_string };
|
|
||||||
|
|
||||||
pub mod ssv_lire_config;
|
|
||||||
|
|
||||||
/// # Convert a DataField to a specific type
|
|
||||||
/// Using this as deku map function to fill a field value from
|
|
||||||
/// a DataField
|
|
||||||
fn map_from_data_field<T>(data_field: DataField) -> Result<T, DekuError>
|
|
||||||
where
|
|
||||||
T: FromStr,
|
|
||||||
T::Err: std::fmt::Display,
|
|
||||||
{
|
|
||||||
let text = String::from_utf8(data_field.data)
|
|
||||||
.map_err(|e| DekuError::Parse(e.to_string().into()))?;
|
|
||||||
T::from_str(&text)
|
|
||||||
.map_err(|e| DekuError::Parse(e.to_string().into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
// ------------------- DATA FIELD TYPES -------------------
|
|
||||||
|
|
||||||
/// # Data field structure
|
|
||||||
/// This structure is the core structure to read data fields
|
|
||||||
/// It is usually used by other structures implementing the
|
|
||||||
/// `#[deku(map = "map_from_data_field")]` attribute
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
struct DataField {
|
|
||||||
#[deku(temp, reader = "size_read(deku::reader)")]
|
|
||||||
pub data_size: u64,
|
|
||||||
#[deku(bytes_read = "data_size")]
|
|
||||||
pub data: Vec<u8>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
/// # Numeric string
|
|
||||||
/// TODO: check if all the characters are numeric
|
|
||||||
pub struct NumericString(
|
|
||||||
#[deku(map = "map_from_data_field")]
|
|
||||||
String
|
|
||||||
);
|
|
||||||
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub struct AlphaNumericString(
|
|
||||||
#[deku(map = "map_from_data_field")]
|
|
||||||
String
|
|
||||||
);
|
|
||||||
impl From<&str> for AlphaNumericString {
|
|
||||||
fn from(s: &str) -> Self {
|
|
||||||
AlphaNumericString(s.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
#[deku(endian = "big")]
|
|
||||||
/// # Software version
|
|
||||||
/// An almost standard software version structure in FSV
|
|
||||||
/// It is composed of a version and a revision, encoded on 2 bytes each
|
|
||||||
pub struct SoftwareVersion {
|
|
||||||
#[deku(temp, reader = "size_read(deku::reader)", assert_eq = "4")]
|
|
||||||
data_size: u64,
|
|
||||||
#[deku(bytes= 2, map = "|x: [u8; 2]| map_bytes_to_lossy_string(&x)")]
|
|
||||||
pub version: String,
|
|
||||||
#[deku(bytes= 2, map = "|x: [u8; 2]| map_bytes_to_lossy_string(&x)")]
|
|
||||||
pub revision: String,
|
|
||||||
}
|
|
||||||
impl fmt::Display for SoftwareVersion {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{}.{}", self.version, self.revision)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use deku::DekuContainerRead as _;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test() {
|
|
||||||
let version_bytes: [u8; 2] = [48, 55];
|
|
||||||
let version = map_bytes_to_lossy_string(&version_bytes).unwrap();
|
|
||||||
assert_eq!(version, "07");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_software_version() {
|
|
||||||
// env_logger::init(); // Uncomment and run with RUST_LOG=trace for deku debugging
|
|
||||||
let data: [u8; 5] = [4, 48, 55, 50, 48];
|
|
||||||
|
|
||||||
let (_rest, software_version) = SoftwareVersion::from_bytes((&data, 0)).unwrap();
|
|
||||||
// assert_eq!(software_version.data_size, 4);
|
|
||||||
assert_eq!(software_version.version, "07");
|
|
||||||
assert_eq!(software_version.revision, "20");
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,295 +0,0 @@
|
|||||||
//! # Structures de parsing des données de la fonction SSV_LireConfig
|
|
||||||
|
|
||||||
//! Le groupe `ReaderConfig61` décrit ci-dessous est renseigné en cas d’utilisation d’un
|
|
||||||
//! lecteur homologué sesam-vitale uniquement et non en cas
|
|
||||||
//! d’utilisation de lecteur(s) PC/SC. dans le cas d’un TL ou TLA
|
|
||||||
//! configuré en mode PC/SC, un groupe `ReaderConfig61` est restitué pour chaque
|
|
||||||
//! lecteur exposé par le gestionnaire de ressources PC/SC. les
|
|
||||||
//! informations sont alors dupliquées dans chacun des groupes `ReaderConfig61`.
|
|
||||||
//! les informations sur les lecteurs PC/SC sont disponibles
|
|
||||||
//! dans les groupes `PCSCReaderConfig67`.
|
|
||||||
|
|
||||||
use deku::deku_derive;
|
|
||||||
|
|
||||||
use super::{AlphaNumericString, NumericString, SoftwareVersion};
|
|
||||||
|
|
||||||
/// # En-tête de configuration
|
|
||||||
/// 1 occurence
|
|
||||||
pub mod group_60_header_config {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
/// Groupe 60 - En-tête de configuration
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
pub struct ConfigHeader {
|
|
||||||
pub ssv_version: SSVVersionNumber,
|
|
||||||
pub galss_version: GALSSVersionNumber,
|
|
||||||
pub pss_version: PSSVersionNumber,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fields
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
pub struct SSVVersionNumber(pub SoftwareVersion);
|
|
||||||
|
|
||||||
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
pub struct GALSSVersionNumber(pub SoftwareVersion);
|
|
||||||
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
pub struct PSSVersionNumber(pub SoftwareVersion);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// # Configuration du lecteur
|
|
||||||
/// 0 à 15 occurences
|
|
||||||
pub mod group_61_reader_config {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
/// Groupe 61 - Configuration du lecteur
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
pub struct ReaderConfig {
|
|
||||||
pub manufacturer_name: AlphaNumericString, // 15 CA
|
|
||||||
pub reader_type: AlphaNumericString, // 30 CA
|
|
||||||
pub serial_number: AlphaNumericString, // 20 CA
|
|
||||||
pub os: NumericString, // 2 CN
|
|
||||||
pub software_count: NumericString, // 2 CN
|
|
||||||
pub software_name: AlphaNumericString, // 30 CA
|
|
||||||
pub software_version: ReaderSoftwareVersion, // 4 CA
|
|
||||||
pub reader_datetime: ReaderSoftwareDate, // 12 CN
|
|
||||||
pub software_checksum: AlphaNumericString, // 4 CA
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fields
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
pub struct ReaderSoftwareVersion(pub SoftwareVersion);
|
|
||||||
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
/// Format « AAAAMMJJhhmm »
|
|
||||||
/// TODO: Build a generic date-time structure
|
|
||||||
/// TODO: Implement a date parsing, like chrono crate
|
|
||||||
pub struct ReaderSoftwareDate(pub AlphaNumericString);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// # Configuration SESAM-Vitale
|
|
||||||
/// N occurences
|
|
||||||
pub mod group_64_sv_config {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
/// Groupe 64 - Configuration SESAM-Vitale
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
pub struct SVComponentsConfig {
|
|
||||||
pub id: ComponentID,
|
|
||||||
pub description: ComponentDescription,
|
|
||||||
pub version: ComponentVersion,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fields
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
pub struct ComponentID(pub NumericString);
|
|
||||||
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
pub struct ComponentDescription(pub AlphaNumericString);
|
|
||||||
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
pub struct ComponentVersion(pub AlphaNumericString);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// # Configuration du lecteur PC/SC
|
|
||||||
/// N occurences
|
|
||||||
pub mod group_67_pcsc_config {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
/// Groupe 67 - Configuration du lecteur PC/SC
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
pub struct PCSCReaderConfig {
|
|
||||||
pub name: ReaderName,
|
|
||||||
pub card_type: CardType,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fields
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
pub struct ReaderName(pub AlphaNumericString);
|
|
||||||
|
|
||||||
#[deku_derive(DekuRead)]
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
pub struct CardType(pub NumericString);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use deku::DekuContainerRead as _;
|
|
||||||
|
|
||||||
use crate::fsv_parsing::blocks::{BlockHeader, Data, DataBlock, DataGroup};
|
|
||||||
|
|
||||||
mod data {
|
|
||||||
pub const BUFFER: &[u8] = &[
|
|
||||||
0, 60, // Block ID
|
|
||||||
15, // Block Size
|
|
||||||
4, // SSV Version
|
|
||||||
48, 55, 50, 48, // 0720
|
|
||||||
4, // GALSS Version
|
|
||||||
48, 48, 48, 48, // 0000
|
|
||||||
4, // PSS Version
|
|
||||||
48, 48, 48, 48, // 0000
|
|
||||||
0, 67, // Block ID
|
|
||||||
42, // Block Size
|
|
||||||
39, // PCSC Reader Name
|
|
||||||
71, 101, 109, 97, 108, 116, 111, 32, 80, 67,
|
|
||||||
32, 84, 119, 105, 110, 32, 82, 101, 97, 100,
|
|
||||||
101, 114, 32, 40, 54, 52, 53, 68, 57, 52,
|
|
||||||
67, 51, 41, 32, 48, 48, 32, 48, 48,
|
|
||||||
1, // Card type
|
|
||||||
50,
|
|
||||||
0, 64, // Block ID
|
|
||||||
44, // Block Size
|
|
||||||
2, // Component ID
|
|
||||||
49, 49,
|
|
||||||
35, // Component label
|
|
||||||
86, 69, 82, 83, 73, 79, 78, 32, 68, 69,
|
|
||||||
32, 76, 65, 32, 66, 73, 66, 76, 73, 79,
|
|
||||||
84, 72, 69, 81, 85, 69, 32, 68, 85, 32,
|
|
||||||
71, 65, 76, 83, 83,
|
|
||||||
4, // Component version
|
|
||||||
48, 48, 48, 48,
|
|
||||||
0, 64, // Block ID
|
|
||||||
69, // Block Size
|
|
||||||
3, // Component ID
|
|
||||||
49, 53, 49,
|
|
||||||
27, // Component label
|
|
||||||
73, 68, 69, 78, 84, 73, 70, 73, 65, 78,
|
|
||||||
84, 32, 85, 78, 73, 81, 85, 69, 32, 68,
|
|
||||||
85, 32, 80, 79, 83, 84, 69,
|
|
||||||
36, // Component version
|
|
||||||
50, 54, 57, 102, 99, 55, 101, 98, 45, 49,
|
|
||||||
100, 56, 53, 45, 52, 55, 57, 51, 45, 98,
|
|
||||||
55, 48, 101, 45, 51, 55, 49, 99, 51, 56,
|
|
||||||
102, 57, 49, 54, 51, 52,
|
|
||||||
0, 61, // Block ID
|
|
||||||
62, // Block Size
|
|
||||||
17, // Manufacturer Name
|
|
||||||
84, 69, 83, 84, 32, 77, 65, 78, 85, 70,
|
|
||||||
65, 67, 84, 85, 82, 69, 82,
|
|
||||||
4, // Reader Type
|
|
||||||
84, 69, 83, 84,
|
|
||||||
4, // Serial Number
|
|
||||||
84, 69, 83, 84,
|
|
||||||
2, // OS
|
|
||||||
79, 83,
|
|
||||||
2, // Software Count
|
|
||||||
48, 49,
|
|
||||||
4, // Software Name
|
|
||||||
84, 69, 83, 84,
|
|
||||||
4, // Software Version
|
|
||||||
48, 49, 53, 53,
|
|
||||||
12, // Reader Datetime
|
|
||||||
50, 48, 50, 52, // 2024
|
|
||||||
48, 54, 50, 53, // 06-25
|
|
||||||
49, 50, 52, 53, // 12:45
|
|
||||||
4, // Software Checksum
|
|
||||||
49, 50, 51, 52,
|
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_lire_config_first_header() {
|
|
||||||
// env_logger::init(); // Uncomment and run with RUST_LOG=trace for deku debugging
|
|
||||||
let buffer = data::BUFFER;
|
|
||||||
let offset: usize = 0;
|
|
||||||
|
|
||||||
let ((_rest, _offset), block_header) = BlockHeader::from_bytes((buffer, offset)).unwrap();
|
|
||||||
assert_eq!(block_header.group_id.0, 60, "Header ID");
|
|
||||||
// assert_eq!(block_header.data_size, 15, "Header Size");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_lire_config_first_block() {
|
|
||||||
// env_logger::init(); // Uncomment and run with RUST_LOG=trace for deku debugging
|
|
||||||
let buffer = data::BUFFER;
|
|
||||||
let offset: usize = 0;
|
|
||||||
|
|
||||||
let ((_rest, _offset), block) = DataBlock::from_bytes((buffer, offset)).unwrap();
|
|
||||||
let header = block.header;
|
|
||||||
let content = match block.content {
|
|
||||||
DataGroup::LireConfig_Group60_ConfigHeader(content) => content,
|
|
||||||
_ => panic!("Unexpected data block type"),
|
|
||||||
};
|
|
||||||
assert_eq!(header.group_id.0, 60, "Header ID");
|
|
||||||
assert_eq!(header.data_size, 15, "Header Size");
|
|
||||||
assert_eq!(content.ssv_version.0.version, "07", "SSV Version");
|
|
||||||
assert_eq!(content.ssv_version.0.revision, "20", "SSV Revision");
|
|
||||||
assert_eq!(content.galss_version.0.to_string(), "00.00", "GALSS Version");
|
|
||||||
assert_eq!(content.pss_version.0.to_string(), "00.00", "PSS Version");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_lire_config_all() {
|
|
||||||
// env_logger::init(); // Uncomment and run with RUST_LOG=trace for deku debugging
|
|
||||||
let buffer = data::BUFFER;
|
|
||||||
let offset: usize = 0;
|
|
||||||
|
|
||||||
let ((_rest, _offset), data) = Data::from_bytes((buffer, offset)).unwrap();
|
|
||||||
let blocks = data.blocks;
|
|
||||||
assert_eq!(blocks.len(), 5, "Number of blocks");
|
|
||||||
for block in blocks {
|
|
||||||
match block.content {
|
|
||||||
DataGroup::LireConfig_Group60_ConfigHeader(content) => {
|
|
||||||
assert_eq!(block.header.group_id.0, 60, "Header ID");
|
|
||||||
assert_eq!(block.header.data_size, 15, "Header Size");
|
|
||||||
assert_eq!(content.ssv_version.0.version, "07", "SSV Version");
|
|
||||||
assert_eq!(content.ssv_version.0.revision, "20", "SSV Revision");
|
|
||||||
assert_eq!(content.galss_version.0.to_string(), "00.00", "GALSS Version");
|
|
||||||
assert_eq!(content.pss_version.0.to_string(), "00.00", "PSS Version");
|
|
||||||
},
|
|
||||||
DataGroup::LireConfig_Group61_ReaderConfig(content) => {
|
|
||||||
assert_eq!(block.header.group_id.0, 61, "Header ID");
|
|
||||||
assert_eq!(block.header.data_size, 62, "Header Size");
|
|
||||||
assert_eq!(content.manufacturer_name.0, "TEST MANUFACTURER", "Manufacturer Name");
|
|
||||||
assert_eq!(content.reader_type.0, "TEST", "Reader Type");
|
|
||||||
assert_eq!(content.serial_number.0, "TEST", "Serial Number");
|
|
||||||
assert_eq!(content.os.0, "OS", "OS");
|
|
||||||
assert_eq!(content.software_count.0, "01", "Software Count");
|
|
||||||
assert_eq!(content.software_name.0, "TEST", "Software Name");
|
|
||||||
assert_eq!(content.software_version.0.version, "01", "Software Version");
|
|
||||||
assert_eq!(content.software_version.0.revision, "55", "Software Revision");
|
|
||||||
assert_eq!(content.reader_datetime.0.0, "202406251245", "Reader Datetime");
|
|
||||||
assert_eq!(content.software_checksum.0, "1234", "Software Checksum");
|
|
||||||
},
|
|
||||||
DataGroup::LireConfig_Group64_SVComponentsConfig(content) => {
|
|
||||||
assert_eq!(block.header.group_id.0, 64, "Header ID");
|
|
||||||
match content.id.0.0.as_str() {
|
|
||||||
"11" => {
|
|
||||||
assert_eq!(block.header.data_size, 44, "Header Size");
|
|
||||||
assert_eq!(content.id.0.0, "11", "G64 - 11 : Component ID");
|
|
||||||
assert_eq!(content.description.0.0, "VERSION DE LA BIBLIOTHEQUE DU GALSS", "G64 - 11 : Component Description");
|
|
||||||
assert_eq!(content.version.0.0, "0000", "G64 - 11 : Component Version");
|
|
||||||
},
|
|
||||||
"151" => {
|
|
||||||
assert_eq!(block.header.data_size, 69, "Header Size");
|
|
||||||
assert_eq!(content.id.0.0, "151", "G64 - 151 : Component ID");
|
|
||||||
assert_eq!(content.description.0.0, "IDENTIFIANT UNIQUE DU POSTE", "G64 - 151 : Component Description");
|
|
||||||
assert_eq!(content.version.0.0, "269fc7eb-1d85-4793-b70e-371c38f91634", "G64 - 151 : Component Version");
|
|
||||||
},
|
|
||||||
_ => panic!("Unexpected Component ID"),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
DataGroup::LireConfig_Group67_PCSCReaderConfig(content) => {
|
|
||||||
assert_eq!(block.header.group_id.0, 67, "Header ID");
|
|
||||||
assert_eq!(block.header.data_size, 42, "Header Size");
|
|
||||||
assert_eq!(content.name.0.0, "Gemalto PC Twin Reader (645D94C3) 00 00", "Reader Name");
|
|
||||||
assert_eq!(content.card_type.0.0, "2", "Card Type");
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,45 +0,0 @@
|
|||||||
use deku::ctx::BitSize;
|
|
||||||
use deku::prelude::*;
|
|
||||||
use deku::reader::ReaderRet;
|
|
||||||
use deku::{reader::Reader, DekuError};
|
|
||||||
|
|
||||||
pub mod blocks;
|
|
||||||
pub mod groups;
|
|
||||||
pub mod prelude;
|
|
||||||
|
|
||||||
pub use blocks::Data;
|
|
||||||
|
|
||||||
/// # Read the size of a FSV block / field
|
|
||||||
/// Documentation: SSV Documentation, page 29
|
|
||||||
fn size_read<R: std::io::Read + std::io::Seek>(reader: &mut Reader<R>) -> Result<u64, DekuError> {
|
|
||||||
let size_bytes = u8::from_reader_with_ctx(reader, BitSize(8))?;
|
|
||||||
let size: u64 = if size_bytes & 0b1000_0000 == 0 {
|
|
||||||
// If the Most Significant Bit is 0, the size is encoded on 7 bits
|
|
||||||
size_bytes.into()
|
|
||||||
} else {
|
|
||||||
// Else, the 7 following bits indicate the number of bytes of the block containing the size
|
|
||||||
let size_block_len: usize = (size_bytes & 0b0111_1111).into();
|
|
||||||
if size_block_len > 4 {
|
|
||||||
return Err(DekuError::Parse(format!("Unexpected size block length: {}", size_block_len).into()));
|
|
||||||
};
|
|
||||||
// The block containing the size is encoded on 1 to 4 bytes
|
|
||||||
let buffer: &mut [u8; 4] = &mut [0; 4];
|
|
||||||
let write_offset = 4 - size_block_len;
|
|
||||||
match reader.read_bytes(size_block_len, &mut buffer[write_offset..])? {
|
|
||||||
ReaderRet::Bits(_bit_vec) => return Err(DekuError::Parse("Unexpected result reading size bytes: got bits".into())),
|
|
||||||
ReaderRet::Bytes => u32::from_be_bytes(*buffer).into(),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
Ok(size)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/// # Map bytes to a lossy string
|
|
||||||
/// This function is used to map bytes to a string, ignoring invalid UTF-8 characters
|
|
||||||
/// Example: [0x41, 0x42] -> "AB"
|
|
||||||
/// Example: [48, 49, 50, 51] -> "0123"
|
|
||||||
fn map_bytes_to_lossy_string(data: &[u8]) -> Result<String, DekuError> {
|
|
||||||
// let data = data.to_vec();
|
|
||||||
let version: String = String::from_utf8_lossy(data).to_string();
|
|
||||||
Ok(version)
|
|
||||||
}
|
|
@ -1,6 +0,0 @@
|
|||||||
/*! Crate prelude
|
|
||||||
|
|
||||||
[What is a prelude?](std::prelude)
|
|
||||||
*/
|
|
||||||
pub use deku::DekuContainerRead as _;
|
|
||||||
pub use super::Data;
|
|
@ -1,2 +0,0 @@
|
|||||||
pub mod fsv_parsing;
|
|
||||||
pub mod ssv;
|
|
@ -1,183 +0,0 @@
|
|||||||
use num_enum::FromPrimitive;
|
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
#[derive(Error, Debug, Eq, PartialEq, FromPrimitive)]
|
|
||||||
#[repr(u16)]
|
|
||||||
/// Liste des codes d'erreur retournés par la librairie C SSV
|
|
||||||
/// Documentation: Manuel de programmation SSV - Annexe A (p. 215)
|
|
||||||
pub enum SSVErrorCodes {
|
|
||||||
#[error("La Carte du Professionnel de Santé est absente du lecteur.")]
|
|
||||||
CPSMissing = 0xF001,
|
|
||||||
#[error("La Carte du Professionnel de Santé bloquée après trois codes porteur erronés.")]
|
|
||||||
CPSBlocked = 0xF002,
|
|
||||||
#[error("Le code porteur présenté est erroné.")]
|
|
||||||
CPSPinWrong = 0xF003,
|
|
||||||
#[error("Carte du Professionnel de Santé non valide ou inexploitable par le Logiciel Lecteur. Vérifier la présence d'un Domaine d'Assurance Maladie (DAM).")]
|
|
||||||
CPSInvalid = 0xF004,
|
|
||||||
#[error("La Carte du Professionnel de Santé est retirée du lecteur.")]
|
|
||||||
CPSRemoved = 0xF005,
|
|
||||||
/// - Sécurisation d'une série de lots en cours.
|
|
||||||
/// - Pour les fonctions TLA (sauf Identifier TLA) : Cette erreur survient lorsque le simulateur TLA est en mode 1.50.
|
|
||||||
/// - Lire Date Lecteur, Mettre à jour Date Lecteur, Lire Droits Vitale : Cette erreur peut survenir lorsque le Logiciel Lecteur ne connaît pas la fonction sollicitée, c'est-à-dire si la version du Logiciel Lecteur est antérieure à 2.00.
|
|
||||||
/// - Décharger Données Bénéficiaires : cette erreur peut survenir pour signaler que le format des données issues du lecteur est incompatible avec cette version de SSV.
|
|
||||||
#[error("F022: Erreur commune à plusieurs fonctions.")]
|
|
||||||
F022 = 0xF022,
|
|
||||||
#[error("Message du lecteur incohérent. Débrancher et rebrancher le lecteur.")]
|
|
||||||
PCSCInconsistentMessage = 0xF0FF,
|
|
||||||
#[error("Le nom de lecteur fourni ne correspond à aucun lecteur reconnu.")]
|
|
||||||
PCSCReaderNotFound = 0xF101,
|
|
||||||
#[error("La fonction InitLIB2 n'est pas encore appelée ou la fonction TermLIB a déjà été appelée.")]
|
|
||||||
FunctionInitLib2NotCalled = 0xF600,
|
|
||||||
#[error("La bibliothèque SSV n’est pas chargée en mémoire. Vérifier que la fonction InitLIB2 a bien été appelée.")]
|
|
||||||
LibraryNotLoaded = 0xF690, // Warning
|
|
||||||
#[error("Carte vitale en opposition.")]
|
|
||||||
VitaleOpposition = 0xF6A1,
|
|
||||||
#[error("Zone de mémoire non allouée en sortie.")]
|
|
||||||
MemoryNotAllocated = 0xF800,
|
|
||||||
#[error("Erreur d'allocation de la zone de mémoire en sortie.")]
|
|
||||||
MemoryAllocationError = 0xF801,
|
|
||||||
#[error("Un des paramètres obligatoires d'entrée est non alloué ou invalide.")]
|
|
||||||
InputParameterNotAllocatedOrInvalid = 0xF802,
|
|
||||||
#[error("Zone de mémoire spécifiée en entrée non valide. Vérifier que la zone allouée ne dépasse pas la taille maximale autorisée (MAXBLOC).")]
|
|
||||||
InputMemoryInvalid = 0xF803,
|
|
||||||
#[error("Le format de la zone de mémoire d'entrée ou le nombre de zones mémoire est incorrect.")]
|
|
||||||
InputMemoryFormatIncorrect = 0xF810,
|
|
||||||
#[error("Problème lors de l’initialisation du protocole. Erreur du Ressource Manager PC/SC. Vérifiez le lecteur.")]
|
|
||||||
PCSCProtocolInitError = 0xFF01,
|
|
||||||
#[error("Time-out au niveau protocolaire ou transmission déjà en cours avec le lecteur. Vérifiez le lecteur et l'insertion de la carte.")]
|
|
||||||
PCSCProtocolTimeout = 0xFF02,
|
|
||||||
#[error("Taille insuffisante allouée en entrée d’une fonction du Resource Manager.")]
|
|
||||||
PCSCProtocolInputMemoryTooSmall = 0xFF03,
|
|
||||||
#[error("Erreur de transmission du protocole. Vérifiez le lecteur et l'insertion de la carte.")]
|
|
||||||
PCSCProtocolTransmissionError = 0xFF04,
|
|
||||||
#[error("Lecteur absent ou indisponible.")]
|
|
||||||
PCSCReaderMissingOrUnavailable = 0xFF05,
|
|
||||||
#[error("Le nom du lecteur transmis est inconnu du Resource Manager PC/SC.")]
|
|
||||||
PCSCReaderUnknown = 0xFF06,
|
|
||||||
#[error("Erreur inconnue remontée par le Resource Manager PC/SC.")]
|
|
||||||
PCSCUnknownError = 0xFF07,
|
|
||||||
#[error("Erreur interne Resource Manager PC/SC.")]
|
|
||||||
PCSCInternalError = 0xFF08,
|
|
||||||
#[error("Ressource PC/SC déjà prise en exclusivité. Vérifiez qu'une autre application n'utilise pas le lecteur.")]
|
|
||||||
PCSCResourceAlreadyExclusive = 0xFF09,
|
|
||||||
#[error("Protocole incompatible avec la carte à puce. Vérifiez l'insertion de la carte et son état.")]
|
|
||||||
PCSCProtocolIncompatible = 0xFF0A,
|
|
||||||
#[error("Paramètre incorrect. Erreur interne à la librairie SSV.")]
|
|
||||||
PCSCIncorrectParameter = 0xFF0B,
|
|
||||||
#[error("Carte absente. Insérez une carte dans le lecteur.")]
|
|
||||||
PCSCCardMissing = 0xFF0C,
|
|
||||||
#[error("L'état de la carte a été modifié (RAZ ou mise hors tension). Vérifiez si la carte n'a pas été retirée ou si une autre application n'utilise pas la carte.")]
|
|
||||||
PCSCCardStateChanged = 0xFF0D,
|
|
||||||
#[error("Carte muette ou non supportée. Vérifiez l'insertion de la carte.")]
|
|
||||||
PCSCCardUnsupported = 0xFF0E,
|
|
||||||
#[error("Code porteur CPS non renseigné.")]
|
|
||||||
CPSPinMissing = 0xFF21,
|
|
||||||
#[error("Ressource PC/SC déjà prise en exclusivité. Vérifiez que le processus en cours n'utilise pas déjà le lecteur.")]
|
|
||||||
PCSCReaderAlreadyExclusiveForCurrentProcess = 0xFF24,
|
|
||||||
#[error("Plusieurs lecteurs ou cartes de même type identifiés lors de la détection automatique.")]
|
|
||||||
PCSCDuplicatedReadersOrCardsDetected = 0xFF29,
|
|
||||||
#[error("Problème de chargement de la librairie cryptographique ou erreur retournée par la librairie cryptographique.")]
|
|
||||||
CryptoLibraryError = 0xFF30,
|
|
||||||
#[error("Erreurs internes aux Services SESAM-Vitale. Vérifiez les traces.")]
|
|
||||||
#[num_enum(alternatives = [0xFFF1..=0xFFFF])]
|
|
||||||
SSVInternalError = 0xFFF0,
|
|
||||||
#[error("Le fichier `tablebin.smc` est inaccessible en lecture (inexistant ou pas de droits d'accès).")]
|
|
||||||
FileMissingTablebinMsc = 0xF610, // tablebin.smc
|
|
||||||
#[error("Le fichier `scripts.sms` est inaccessible en lecture (inexistant ou pas de droits d'accès).")]
|
|
||||||
FileMissingScriptsSms = 0xF611, // scripts.sms
|
|
||||||
#[error("Le fichier `tablebin.ssv` est inaccessible en lecture (inexistant ou pas de droits d'accès).")]
|
|
||||||
FileMissingTablebinSsv = 0xF612, // tablebin.ssv
|
|
||||||
#[error("Le fichier `script.ssv` est inaccessible en lecture (inexistant ou pas de droits d'accès).")]
|
|
||||||
FileMissingScriptSsv = 0xF613, // script.ssv
|
|
||||||
#[error("La version du fichier `tablebin.smc` est incompatible avec la bibliothèque des SSV.")]
|
|
||||||
FileVersionIncompatibleTablebinMsc = 0xF620, // tablebin.smc
|
|
||||||
#[error("La version du fichier `scripts.sms` est incompatible avec la bibliothèque des SSV.")]
|
|
||||||
FileVersionIncompatibleScriptsSms = 0xF621, // scripts.sms
|
|
||||||
#[error("La version du fichier `tablebin.ssv` est incompatible avec la bibliothèque des SSV.")]
|
|
||||||
FileVersionIncompatibleTablebinSsv = 0xF622, // tablebin.ssv
|
|
||||||
#[error("La version du fichier `script.ssv` est incompatible avec la bibliothèque des SSV.")]
|
|
||||||
FileVersionIncompatibleScriptSsv = 0xF623, // script.ssv
|
|
||||||
#[error("L'intégrité du fichier `tablebin.smc` est incorrecte.")]
|
|
||||||
FileIntegrityIncorrectTablebinMsc = 0xF630, // tablebin.smc
|
|
||||||
#[error("L'intégrité du fichier `scripts.sms` est incorrecte.")]
|
|
||||||
FileIntegrityIncorrectScriptsSms = 0xF631, // scripts.sms
|
|
||||||
#[error("L'intégrité du fichier `tablebin.ssv` est incorrecte.")]
|
|
||||||
FileIntegrityIncorrectTablebinSsv = 0xF632, // tablebin.ssv
|
|
||||||
#[error("L'intégrité du fichier `script.ssv` est incorrecte.")]
|
|
||||||
FileIntegrityIncorrectScriptSsv = 0xF633, // script.ssv
|
|
||||||
#[error("La structure interne du fichier `tablebin.smc` est invalide.")]
|
|
||||||
FileStructureInvalidTablebinMsc = 0xF640, // tablebin.smc
|
|
||||||
#[error("La structure interne du fichier `scripts.sms` est invalide.")]
|
|
||||||
FileStructureInvalidScriptsSms = 0xF641, // scripts.sms
|
|
||||||
#[error("La structure interne du fichier `tablebin.ssv` est invalide.")]
|
|
||||||
FileStructureInvalidTablebinSsv = 0xF642, // tablebin.ssv
|
|
||||||
#[error("La structure interne du fichier `script.ssv` est invalide.")]
|
|
||||||
FileStructureInvalidScriptSsv = 0xF643, // script.ssv
|
|
||||||
#[error("Le fichier `tablebin.smc` n'a pas pu être chargé en mémoire. Essayez de libérer de la mémoire.")]
|
|
||||||
FileLoadFailedTablebinMsc = 0xF650, // tablebin.smc
|
|
||||||
#[error("Le fichier `scripts.sms` n'a pas pu être chargé en mémoire. Essayez de libérer de la mémoire.")]
|
|
||||||
FileLoadFailedScriptsSms = 0xF651, // scripts.sms
|
|
||||||
#[error("Le fichier `tablebin.ssv` n'a pas pu être chargé en mémoire. Essayez de libérer de la mémoire.")]
|
|
||||||
FileLoadFailedTablebinSsv = 0xF652, // tablebin.ssv
|
|
||||||
#[error("Le fichier `script.ssv` n'a pas pu être chargé en mémoire. Essayez de libérer de la mémoire.")]
|
|
||||||
FileLoadFailedScriptSsv = 0xF653, // script.ssv
|
|
||||||
#[error("Le nom du fichier `tablebin.smc` est invalide.")]
|
|
||||||
FileNameInvalidTablebinMsc = 0xF660, // tablebin.smc
|
|
||||||
#[error("Le nom du fichier `scripts.sms` est invalide.")]
|
|
||||||
FileNameInvalidScriptsSms = 0xF661, // scripts.sms
|
|
||||||
#[error("Le nom du fichier `tablebin.ssv` est invalide.")]
|
|
||||||
FileNameInvalidTablebinSsv = 0xF662, // tablebin.ssv
|
|
||||||
#[error("Le nom du fichier `script.ssv` est invalide.")]
|
|
||||||
FileNameInvalidScriptSsv = 0xF663, // script.ssv
|
|
||||||
#[error("La fonction Initialiser Librairie est déjà appelée.")]
|
|
||||||
FunctionInitLib2AlreadyCalled = 0xF670, // Warning
|
|
||||||
#[error("Le fichier SESAM.INI est inaccessible en lecture (fichier ou droit d’accès manquant) ou ne contient pas le chemin des tables binaires des SSV.")]
|
|
||||||
SesamIniMissingFileOrTablebinPath = 0xF680,
|
|
||||||
#[error("Le chemin du répertoire de travail est absent du fichier SESAM.INI.")]
|
|
||||||
SesamIniMissingWorkDir = 0xF6F1,
|
|
||||||
#[error("Les fichiers d’extension adm ne sont pas accessibles en écriture.")]
|
|
||||||
AdmFilesNotWritable = 0xF6F2, // Warning
|
|
||||||
#[error("Aucune version de FSV du socle technique trouvé. Vérifier que la version du fichier script.sms est bonne.")]
|
|
||||||
NoFsvVersionFound = 0xF6F4,
|
|
||||||
#[error("Librairie SGD absente ou incomplète.")]
|
|
||||||
LibraryMissingOrIncompleteSGD = 0xF6F5,
|
|
||||||
#[error("Librairie SMC absente ou incomplète.")]
|
|
||||||
LibraryMissingOrIncompleteSMC = 0xF6F6,
|
|
||||||
#[error("Librairie SJS absente ou incomplète.")]
|
|
||||||
LibraryMissingOrIncompleteSJS = 0xF6F7,
|
|
||||||
#[error("Librairie SMS absente ou incomplète.")]
|
|
||||||
LibraryMissingOrIncompleteSMS = 0xF6F8,
|
|
||||||
#[error("Section MGC absente / clé RepertoireConfigTrace absente / fichier log4crc.xml non trouvé à l’emplacement indiqué par la clé RepertoireConfigTrace du fichier SESAM.INI.")]
|
|
||||||
SesamIniTracingConfigMissing = 0xFF22, // Warning
|
|
||||||
#[error("Interface Full PC/SC : problème de chargement de la librairie cryptographique ou erreur retournée par la librairie cryptographique.")]
|
|
||||||
PCSCInterfaceCryptoLibraryError = 0xFF25,
|
|
||||||
#[error("Valorisation incorrecte des paramètres de gestion de l'accès aux ressources dans le SESAM.INI. Vérifier les valeurs des clés tempoexclusivite, repetitionexclusivite, tempoexclusivitePCSC, repetitionexclusivitePCSC")]
|
|
||||||
SesamIniResourceAccessParamsIncorrect = 0xFF2A,
|
|
||||||
#[num_enum(catch_all)]
|
|
||||||
#[error("Erreur inattendue de la librairie SSV (code d'erreur: {0}).")]
|
|
||||||
Unexpected(u16),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_code_ranges() {
|
|
||||||
let error_code = 0xFFF1;
|
|
||||||
let error = SSVErrorCodes::from(error_code);
|
|
||||||
assert_eq!(error, SSVErrorCodes::SSVInternalError);
|
|
||||||
|
|
||||||
let error_code = 0xFFF8;
|
|
||||||
let error = SSVErrorCodes::from(error_code);
|
|
||||||
assert_eq!(error, SSVErrorCodes::SSVInternalError);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_catch_all() {
|
|
||||||
let error_code = 0xFBFF; // Not a valid error code
|
|
||||||
let error = SSVErrorCodes::from(error_code);
|
|
||||||
assert_eq!(error, SSVErrorCodes::Unexpected(0xFBFF));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,227 +0,0 @@
|
|||||||
use std::{ffi::CString, ptr};
|
|
||||||
|
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
use fsv_sys::{
|
|
||||||
get_library_path,
|
|
||||||
Error as FsvError,
|
|
||||||
SSVLibrary,
|
|
||||||
SSVLibraryCommon,
|
|
||||||
SupportedFsvVersion,
|
|
||||||
V1_40_13,
|
|
||||||
V1_40_14
|
|
||||||
};
|
|
||||||
|
|
||||||
mod errors_ssv;
|
|
||||||
|
|
||||||
use errors_ssv::SSVErrorCodes;
|
|
||||||
use crate::fsv_parsing::prelude::*;
|
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
|
||||||
pub enum Error {
|
|
||||||
#[error(transparent)]
|
|
||||||
FSVSysLibrary(#[from] FsvError),
|
|
||||||
#[error(transparent)]
|
|
||||||
SSVError(#[from] SSVErrorCodes),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Enum to hold the different versions of the SSV library
|
|
||||||
pub enum SsvLibraryVersion {
|
|
||||||
V1_40_13(SSVLibrary<V1_40_13>),
|
|
||||||
V1_40_14(SSVLibrary<V1_40_14>),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Struct to hold the SSV library and access its functions
|
|
||||||
pub struct SSV {
|
|
||||||
library: SsvLibraryVersion,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SSV {
|
|
||||||
pub fn new(version: SupportedFsvVersion) -> Result<Self, Error> {
|
|
||||||
let library = match version {
|
|
||||||
SupportedFsvVersion::V1_40_13 => {
|
|
||||||
let lib_path = get_library_path(&version);
|
|
||||||
let library = SSVLibrary::<V1_40_13>::new(&lib_path)?;
|
|
||||||
SsvLibraryVersion::V1_40_13(library)
|
|
||||||
},
|
|
||||||
SupportedFsvVersion::V1_40_14 => {
|
|
||||||
let lib_path = get_library_path(&version);
|
|
||||||
let library = SSVLibrary::<V1_40_14>::new(&lib_path)?;
|
|
||||||
SsvLibraryVersion::V1_40_14(library)
|
|
||||||
},
|
|
||||||
};
|
|
||||||
Ok(Self {
|
|
||||||
library,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// # Initialize the SSV library
|
|
||||||
/// Implement: SSV_InitLIB2
|
|
||||||
pub fn init_library(&self, sesam_ini_path: &str) -> Result<(), Error> {
|
|
||||||
let sesam_ini_path = CString::new(sesam_ini_path).expect("CString::new failed");
|
|
||||||
let result = match &self.library {
|
|
||||||
SsvLibraryVersion::V1_40_13(library) => {
|
|
||||||
unsafe { library.ssv_init_lib2(sesam_ini_path.as_ptr()) }?
|
|
||||||
},
|
|
||||||
SsvLibraryVersion::V1_40_14(library) => {
|
|
||||||
unsafe { library.ssv_init_lib2(sesam_ini_path.as_ptr()) }?
|
|
||||||
},
|
|
||||||
};
|
|
||||||
if result != 0 {
|
|
||||||
let error = SSVErrorCodes::from(result);
|
|
||||||
return Err(Error::SSVError(error));
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// # Read the CPS card
|
|
||||||
/// Implement: SSV_LireCartePS
|
|
||||||
pub fn read_professional_card(&self, pin_code: &str) -> Result<(), Error> {
|
|
||||||
let pcsc_reader_name = "Gemalto PC Twin Reader (645D94C3) 00 00";
|
|
||||||
|
|
||||||
let pin_code = CString::new(pin_code).expect("CString::new failed");
|
|
||||||
let pcsc_reader_name = CString::new(pcsc_reader_name).expect("CString::new failed");
|
|
||||||
let mut out_buffer_ptr: *mut libc::c_void = ptr::null_mut();
|
|
||||||
let mut out_buffer_size: libc::size_t = 0;
|
|
||||||
|
|
||||||
let result = match &self.library {
|
|
||||||
SsvLibraryVersion::V1_40_13(library) => {
|
|
||||||
unsafe { library.ssv_lire_carte_ps(
|
|
||||||
pcsc_reader_name.as_ptr(),
|
|
||||||
pcsc_reader_name.as_ptr(),
|
|
||||||
pin_code.as_ptr(),
|
|
||||||
&mut out_buffer_ptr,
|
|
||||||
&mut out_buffer_size)
|
|
||||||
}?
|
|
||||||
},
|
|
||||||
SsvLibraryVersion::V1_40_14(library) => {
|
|
||||||
unsafe { library.ssv_lire_carte_ps(
|
|
||||||
pcsc_reader_name.as_ptr(),
|
|
||||||
pcsc_reader_name.as_ptr(),
|
|
||||||
pin_code.as_ptr(),
|
|
||||||
&mut out_buffer_ptr,
|
|
||||||
&mut out_buffer_size)
|
|
||||||
}?
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
if result != 0 {
|
|
||||||
// Free memory
|
|
||||||
unsafe { libc::free(out_buffer_ptr) };
|
|
||||||
let error = SSVErrorCodes::from(result);
|
|
||||||
return Err(Error::SSVError(error));
|
|
||||||
}
|
|
||||||
// Print 10 bytes of the buffer
|
|
||||||
let buffer = unsafe { std::slice::from_raw_parts(out_buffer_ptr as *const u8, 10) };
|
|
||||||
println!("{:?}", buffer);
|
|
||||||
// Free memory
|
|
||||||
unsafe { libc::free(out_buffer_ptr) };
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// # Get the configuration of the SSV library
|
|
||||||
/// Implement: SSV_LireConfig
|
|
||||||
pub fn get_config(&self) -> Result<Data, Error> {
|
|
||||||
let mut out_buffer_ptr: *mut libc::c_void = ptr::null_mut();
|
|
||||||
let mut out_buffer_size: libc::size_t = 0;
|
|
||||||
|
|
||||||
let result = match &self.library {
|
|
||||||
SsvLibraryVersion::V1_40_13(library) => {
|
|
||||||
unsafe { library.ssv_lire_config(&mut out_buffer_ptr, &mut out_buffer_size) }?
|
|
||||||
},
|
|
||||||
SsvLibraryVersion::V1_40_14(library) => {
|
|
||||||
unsafe { library.ssv_lire_config(&mut out_buffer_ptr, &mut out_buffer_size) }?
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
if result != 0 {
|
|
||||||
// Free memory
|
|
||||||
unsafe { libc::free(out_buffer_ptr) };
|
|
||||||
let error = SSVErrorCodes::from(result);
|
|
||||||
return Err(Error::SSVError(error));
|
|
||||||
}
|
|
||||||
// Parse the buffer into a Data struct
|
|
||||||
let buffer = unsafe { std::slice::from_raw_parts(out_buffer_ptr as *const u8, out_buffer_size) };
|
|
||||||
let (_rest, config_blocks) = Data::from_bytes((buffer, 0)).unwrap();
|
|
||||||
|
|
||||||
// Free memory
|
|
||||||
unsafe { libc::free(out_buffer_ptr) };
|
|
||||||
Ok(config_blocks)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use std::env;
|
|
||||||
|
|
||||||
use utils::config::load_config;
|
|
||||||
use anyhow::{bail, Result};
|
|
||||||
|
|
||||||
use crate::fsv_parsing::blocks::DataGroup;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
mod setup {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
pub fn init() -> Result<SSV> {
|
|
||||||
load_config(None)?;
|
|
||||||
let sesam_ini_path = env::var("SESAM_INI_PATH").expect("SESAM_INI_PATH must be set");
|
|
||||||
let lib = SSV::new(SupportedFsvVersion::V1_40_13)?;
|
|
||||||
lib.init_library(&sesam_ini_path)?;
|
|
||||||
Ok(lib)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
#[ignore="Not working with other tests using SSV library in parallel - Need to fix"]
|
|
||||||
// We should implement a way to initialize the library only once
|
|
||||||
// Or implement them sequentially with [serial_test crate](https://docs.rs/serial_test/latest/serial_test)
|
|
||||||
fn test_init_library() -> Result<()> {
|
|
||||||
setup::init()?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
#[ignore="WARNING: Read the card with PIN 1234 - Risk of blocking the card"]
|
|
||||||
fn test_read_professional_card_good_pin() -> Result<()> {
|
|
||||||
let lib = setup::init()?;
|
|
||||||
let pin_code = "1234";
|
|
||||||
lib.read_professional_card(pin_code)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
#[ignore="WARNING: Read the card with PIN 0000 - Risk of blocking the card"]
|
|
||||||
fn test_read_professional_card_bad_pin() -> Result<()> {
|
|
||||||
let lib = setup::init()?;
|
|
||||||
let pin_code = "0000";
|
|
||||||
// Should return an error
|
|
||||||
let err = lib.read_professional_card(pin_code).unwrap_err();
|
|
||||||
assert_eq!(err.to_string(), "Le code porteur présenté est erroné.");
|
|
||||||
match err {
|
|
||||||
Error::SSVError(err) => {
|
|
||||||
assert_eq!(err as SSVErrorCodes, SSVErrorCodes::CPSPinWrong);
|
|
||||||
},
|
|
||||||
_ => bail!("Error type is not SSVError"),
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
// #[ignore="Needs a valid FSV installation"]
|
|
||||||
fn test_get_config() -> Result<()> {
|
|
||||||
let lib = setup::init()?;
|
|
||||||
let data = lib.get_config()?;
|
|
||||||
// I don't know what to assert here ...
|
|
||||||
let header_group = data.blocks.first().unwrap();
|
|
||||||
assert_eq!(header_group.header.group_id.0, 60);
|
|
||||||
let header_content = match &header_group.content {
|
|
||||||
DataGroup::LireConfig_Group60_ConfigHeader(content) => { content },
|
|
||||||
_ => bail!("Wrong group type"),
|
|
||||||
};
|
|
||||||
assert_eq!(header_content.ssv_version.0.version, "07");
|
|
||||||
assert_eq!(header_content.ssv_version.0.revision, "20");
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
@ -4,11 +4,10 @@ version = "0.1.0"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow.workspace = true
|
anyhow = "1.0"
|
||||||
libc = "0.2"
|
libc = "0.2"
|
||||||
thiserror.workspace = true
|
thiserror = "1.0"
|
||||||
|
|
||||||
utils = { path = "../utils" }
|
utils = { path = "../utils" }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
dotenv.workspace = true
|
dotenv = "0.15"
|
||||||
|
@ -9,7 +9,7 @@ use thiserror::Error;
|
|||||||
use crate::cps::lire_carte;
|
use crate::cps::lire_carte;
|
||||||
use crate::libssv::{SSV_InitLIB2, SSV_LireConfig};
|
use crate::libssv::{SSV_InitLIB2, SSV_LireConfig};
|
||||||
|
|
||||||
use ::utils::config::{load_config, ConfigError};
|
use ::utils::config::load_config;
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
pub enum SSVDemoError {
|
pub enum SSVDemoError {
|
||||||
@ -18,7 +18,7 @@ pub enum SSVDemoError {
|
|||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
SSVLibErrorCode(#[from] crate::libssv::LibSSVError),
|
SSVLibErrorCode(#[from] crate::libssv::LibSSVError),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Configuration(#[from] ConfigError),
|
Anyhow(#[from] anyhow::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ssv_init_lib_2() -> Result<(), SSVDemoError> {
|
fn ssv_init_lib_2() -> Result<(), SSVDemoError> {
|
||||||
@ -71,7 +71,7 @@ pub fn demo() -> Result<(), SSVDemoError> {
|
|||||||
|
|
||||||
println!("------- Demo for the SSV library --------");
|
println!("------- Demo for the SSV library --------");
|
||||||
|
|
||||||
load_config(None)?;
|
load_config()?;
|
||||||
|
|
||||||
ssv_init_lib_2()?;
|
ssv_init_lib_2()?;
|
||||||
|
|
||||||
|
@ -4,7 +4,6 @@ version = "0.1.0"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow.workspace = true
|
anyhow = "1.0"
|
||||||
directories = "5.0"
|
directories = "5.0"
|
||||||
dotenv.workspace = true
|
dotenv = "0.15"
|
||||||
thiserror.workspace = true
|
|
||||||
|
@ -1,23 +1,11 @@
|
|||||||
use std::{env, path::PathBuf, sync::atomic::AtomicBool};
|
use std::{env, path::PathBuf};
|
||||||
|
|
||||||
|
use anyhow::{bail, Context, Result};
|
||||||
use directories::ProjectDirs;
|
use directories::ProjectDirs;
|
||||||
use dotenv::from_path;
|
use dotenv::from_path;
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
const CONFIG_FILE_NAME: &str = ".env";
|
const CONFIG_FILE_NAME: &str = ".env";
|
||||||
|
|
||||||
static CONFIG_INITIALIZED: AtomicBool = AtomicBool::new(false);
|
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
|
||||||
pub enum ConfigError {
|
|
||||||
#[error("No config file {0} found in the following directories: {1:#?}")]
|
|
||||||
ConfigFileNotFound(String, Vec<PathBuf>),
|
|
||||||
#[error("Failed to load config file: {0}")]
|
|
||||||
LoadConfigError(#[from] dotenv::Error),
|
|
||||||
#[error("Environment variable error: {0}")]
|
|
||||||
EnvVarError(#[from] std::env::VarError),
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_config_dirs() -> Vec<PathBuf> {
|
pub fn get_config_dirs() -> Vec<PathBuf> {
|
||||||
let mut config_dirs = vec![
|
let mut config_dirs = vec![
|
||||||
PathBuf::from(""), // Current directory
|
PathBuf::from(""), // Current directory
|
||||||
@ -31,7 +19,7 @@ pub fn get_config_dirs() -> Vec<PathBuf> {
|
|||||||
config_dirs
|
config_dirs
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_config_files() -> Result<Vec<PathBuf>, ConfigError> {
|
pub fn get_config_files() -> Result<Vec<PathBuf>> {
|
||||||
let config_dirs = get_config_dirs();
|
let config_dirs = get_config_dirs();
|
||||||
let mut config_files = Vec::new();
|
let mut config_files = Vec::new();
|
||||||
for config_dir in config_dirs.iter() {
|
for config_dir in config_dirs.iter() {
|
||||||
@ -41,20 +29,14 @@ pub fn get_config_files() -> Result<Vec<PathBuf>, ConfigError> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if config_files.is_empty() {
|
if config_files.is_empty() {
|
||||||
return Err(ConfigError::ConfigFileNotFound(
|
bail!(
|
||||||
CONFIG_FILE_NAME.to_string(),
|
"No config file {CONFIG_FILE_NAME} found in the following directories: {config_dirs:#?}"
|
||||||
config_dirs,
|
);
|
||||||
));
|
|
||||||
}
|
}
|
||||||
Ok(config_files)
|
Ok(config_files)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn load_config(force: Option<bool>) -> Result<(), ConfigError> {
|
pub fn load_config() -> Result<()> {
|
||||||
let force = force.unwrap_or(false);
|
|
||||||
if CONFIG_INITIALIZED.load(std::sync::atomic::Ordering::Relaxed) && force {
|
|
||||||
println!("DEBUG: Config already initialized, skipping");
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
let config_files = get_config_files()?;
|
let config_files = get_config_files()?;
|
||||||
// Load the first config file found
|
// Load the first config file found
|
||||||
// TODO: add a verbose log to list all config files found
|
// TODO: add a verbose log to list all config files found
|
||||||
@ -62,7 +44,5 @@ pub fn load_config(force: Option<bool>) -> Result<(), ConfigError> {
|
|||||||
"DEBUG: Config files found (1st loaded): {:#?}",
|
"DEBUG: Config files found (1st loaded): {:#?}",
|
||||||
config_files
|
config_files
|
||||||
);
|
);
|
||||||
from_path(config_files[0].as_path()).map_err(ConfigError::LoadConfigError)?;
|
from_path(config_files[0].as_path()).context("Failed to load config file")
|
||||||
CONFIG_INITIALIZED.store(true, std::sync::atomic::Ordering::Relaxed);
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
@ -1,15 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "entity"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
name = "entity"
|
|
||||||
path = "src/lib.rs"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
sea-orm.workspace = true
|
|
||||||
serde.workspace = true
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
sea-orm-cli.workspace = true
|
|
@ -1,18 +0,0 @@
|
|||||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.1
|
|
||||||
|
|
||||||
use sea_orm::entity::prelude::*;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
|
||||||
#[sea_orm(table_name = "debug")]
|
|
||||||
pub struct Model {
|
|
||||||
#[sea_orm(primary_key)]
|
|
||||||
pub id: i32,
|
|
||||||
pub title: String,
|
|
||||||
pub text: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
|
||||||
pub enum Relation {}
|
|
||||||
|
|
||||||
impl ActiveModelBehavior for ActiveModel {}
|
|
@ -1,5 +0,0 @@
|
|||||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.1
|
|
||||||
|
|
||||||
pub mod prelude;
|
|
||||||
|
|
||||||
pub mod debug;
|
|
@ -1,3 +0,0 @@
|
|||||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.1
|
|
||||||
|
|
||||||
pub use super::debug::Entity as Debug;
|
|
@ -1,2 +0,0 @@
|
|||||||
mod entities;
|
|
||||||
pub use entities::*;
|
|
@ -1,6 +1,5 @@
|
|||||||
<template>
|
<template>
|
||||||
<div>
|
<div>
|
||||||
<NuxtLoadingIndicator />
|
|
||||||
<NuxtRouteAnnouncer />
|
<NuxtRouteAnnouncer />
|
||||||
<NavBar />
|
<NavBar />
|
||||||
<NuxtPage />
|
<NuxtPage />
|
||||||
|
@ -17,6 +17,6 @@
|
|||||||
if (user.avatar) {
|
if (user.avatar) {
|
||||||
return user.avatar;
|
return user.avatar;
|
||||||
}
|
}
|
||||||
return 'https://i.pravatar.cc/150?u=' + user.name;
|
return 'https://avatar.iran.liara.run/username?username=' + user.name;
|
||||||
};
|
};
|
||||||
</script>
|
</script>
|
||||||
|
@ -29,15 +29,15 @@
|
|||||||
|
|
||||||
const users: User[] = [
|
const users: User[] = [
|
||||||
{ id: 1, name: 'John Doe', avatar: 'https://img.daisyui.com/images/stock/photo-1534528741775-53994a69daeb.webp' },
|
{ id: 1, name: 'John Doe', avatar: 'https://img.daisyui.com/images/stock/photo-1534528741775-53994a69daeb.webp' },
|
||||||
{ id: 2, name: 'Jane Doe', avatar: 'https://i.pravatar.cc/150?u=JANEDOE728' },
|
{ id: 2, name: 'Jane Doe', avatar: 'https://avatar.iran.liara.run/public' },
|
||||||
{ id: 3, name: 'Michel Moulin' },
|
{ id: 3, name: 'Michel Moulin', avatar: '' },
|
||||||
{ id: 4, name: 'Jean Paris' },
|
{ id: 4, name: 'Jean Paris', avatar: '' },
|
||||||
{ id: 5, name: 'Marie Dupont' },
|
{ id: 5, name: 'Marie Dupont', avatar: '' },
|
||||||
{ id: 6, name: 'Émilie Fournier' },
|
{ id: 6, name: 'Émilie Fournier', avatar: '' },
|
||||||
{ id: 7, name: 'Pierre Lefevre' },
|
{ id: 7, name: 'Pierre Lefevre', avatar: '' },
|
||||||
{ id: 8, name: 'Sophie Lemoine' },
|
{ id: 8, name: 'Sophie Lemoine', avatar: '' },
|
||||||
{ id: 9, name: 'Lucie Simon' },
|
{ id: 9, name: 'Lucie Simon', avatar: '' },
|
||||||
{ id: 10, name: 'Kevin Boucher' },
|
{ id: 10, name: 'Kevin Boucher', avatar: '' },
|
||||||
];
|
];
|
||||||
|
|
||||||
const loginModal = useTemplateRef('login_modal');
|
const loginModal = useTemplateRef('login_modal');
|
||||||
|
@ -6,7 +6,6 @@
|
|||||||
<nav class="navbar-center">
|
<nav class="navbar-center">
|
||||||
<NuxtLink to="/" class="btn btn-ghost">Accueil</NuxtLink>
|
<NuxtLink to="/" class="btn btn-ghost">Accueil</NuxtLink>
|
||||||
<NuxtLink to="/CPS" class="btn btn-ghost">Carte CPS</NuxtLink>
|
<NuxtLink to="/CPS" class="btn btn-ghost">Carte CPS</NuxtLink>
|
||||||
<NuxtLink to="/debug" class="btn btn-ghost">Debug</NuxtLink>
|
|
||||||
</nav>
|
</nav>
|
||||||
<div class="navbar-end">
|
<div class="navbar-end">
|
||||||
<template v-if="!current_user">
|
<template v-if="!current_user">
|
||||||
|
@ -1,67 +0,0 @@
|
|||||||
<template>
|
|
||||||
<div>
|
|
||||||
<h1 class="text-3xl mb-8">Debug</h1>
|
|
||||||
<div class="stats shadow mb-8">
|
|
||||||
<div class="stat">
|
|
||||||
<div class="stat-title">DB Ping Status</div>
|
|
||||||
<div class="stat-value">{{ data?.db_ping_status || "?" }}</div>
|
|
||||||
</div>
|
|
||||||
<div class="stat">
|
|
||||||
<div class="stat-title">Entries Count</div>
|
|
||||||
<div class="stat-value">{{ data?.entries.length || "?" }}</div>
|
|
||||||
<div class="stat-actions">
|
|
||||||
<button class="btn btn-sm" @click="addRandomEntry">Add entry</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="stat">
|
|
||||||
<div class="stat-title">Network status</div>
|
|
||||||
<div class="stat-value">{{ status }}</div>
|
|
||||||
<div class="stat-description">{{ error }}</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<h2 class="text-2xl mb-4">Entries</h2>
|
|
||||||
<table class="table">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th>Id</th>
|
|
||||||
<th>Title</th>
|
|
||||||
<th>Text</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody>
|
|
||||||
<tr v-for="entry in data?.entries" :key="entry.id">
|
|
||||||
<td>{{ entry.id }}</td>
|
|
||||||
<td>{{ entry.title }}</td>
|
|
||||||
<td>{{ entry.text }}</td>
|
|
||||||
</tr>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</template>
|
|
||||||
|
|
||||||
<script setup lang="ts">
|
|
||||||
type Entry = {
|
|
||||||
id: number;
|
|
||||||
title: string;
|
|
||||||
text: string;
|
|
||||||
};
|
|
||||||
type DebugResponse = {
|
|
||||||
db_ping_status: string;
|
|
||||||
entries: Entry[];
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
// TODO : handle a default backend URL by building a custom `$fetch` and `useFetch` functions with a `baseURL` option : https://nuxt.com/docs/guide/recipes/custom-usefetch#custom-fetch
|
|
||||||
|
|
||||||
const { data, refresh, error, status } = await useFetch<DebugResponse>('http://127.0.0.1:8080/debug');
|
|
||||||
|
|
||||||
async function addRandomEntry() {
|
|
||||||
await $fetch('http://127.0.0.1:8080/debug/add_random', {
|
|
||||||
method: 'POST',
|
|
||||||
});
|
|
||||||
refresh();
|
|
||||||
}
|
|
||||||
|
|
||||||
</script>
|
|
@ -1,24 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "migration"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
publish = false
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
name = "migration"
|
|
||||||
path = "src/lib.rs"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
async-std = { version = "1", features = ["attributes", "tokio1"] }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
sea-orm-cli.workspace = true
|
|
||||||
|
|
||||||
[dependencies.sea-orm-migration]
|
|
||||||
version = "1.0.0"
|
|
||||||
features = [
|
|
||||||
# `ASYNC_RUNTIME` and `DATABASE_DRIVER` are required to run migration using the cli
|
|
||||||
# They must be the same as the features in the `sea-orm` dependency in the `app` crate
|
|
||||||
"sqlx-sqlite", # `DATABASE_DRIVER` feature
|
|
||||||
"runtime-tokio-rustls", # `ASYNC_RUNTIME` feature
|
|
||||||
]
|
|
@ -1,41 +0,0 @@
|
|||||||
# Running Migrator CLI
|
|
||||||
|
|
||||||
- Generate a new migration file
|
|
||||||
```sh
|
|
||||||
cargo run -- generate MIGRATION_NAME
|
|
||||||
```
|
|
||||||
- Apply all pending migrations
|
|
||||||
```sh
|
|
||||||
cargo run
|
|
||||||
```
|
|
||||||
```sh
|
|
||||||
cargo run -- up
|
|
||||||
```
|
|
||||||
- Apply first 10 pending migrations
|
|
||||||
```sh
|
|
||||||
cargo run -- up -n 10
|
|
||||||
```
|
|
||||||
- Rollback last applied migrations
|
|
||||||
```sh
|
|
||||||
cargo run -- down
|
|
||||||
```
|
|
||||||
- Rollback last 10 applied migrations
|
|
||||||
```sh
|
|
||||||
cargo run -- down -n 10
|
|
||||||
```
|
|
||||||
- Drop all tables from the database, then reapply all migrations
|
|
||||||
```sh
|
|
||||||
cargo run -- fresh
|
|
||||||
```
|
|
||||||
- Rollback all applied migrations, then reapply all migrations
|
|
||||||
```sh
|
|
||||||
cargo run -- refresh
|
|
||||||
```
|
|
||||||
- Rollback all applied migrations
|
|
||||||
```sh
|
|
||||||
cargo run -- reset
|
|
||||||
```
|
|
||||||
- Check the status of all migrations
|
|
||||||
```sh
|
|
||||||
cargo run -- status
|
|
||||||
```
|
|
@ -1,12 +0,0 @@
|
|||||||
pub use sea_orm_migration::prelude::*;
|
|
||||||
|
|
||||||
mod m20220101_000001_create_debug_table;
|
|
||||||
|
|
||||||
pub struct Migrator;
|
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
|
||||||
impl MigratorTrait for Migrator {
|
|
||||||
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
|
|
||||||
vec![Box::new(m20220101_000001_create_debug_table::Migration)]
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,35 +0,0 @@
|
|||||||
use sea_orm_migration::{prelude::*, schema::*};
|
|
||||||
|
|
||||||
#[derive(DeriveMigrationName)]
|
|
||||||
pub struct Migration;
|
|
||||||
|
|
||||||
#[async_trait::async_trait]
|
|
||||||
impl MigrationTrait for Migration {
|
|
||||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
manager
|
|
||||||
.create_table(
|
|
||||||
Table::create()
|
|
||||||
.table(Debug::Table)
|
|
||||||
.if_not_exists()
|
|
||||||
.col(pk_auto(Debug::Id))
|
|
||||||
.col(string(Debug::Title))
|
|
||||||
.col(string(Debug::Text))
|
|
||||||
.to_owned(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
|
||||||
manager
|
|
||||||
.drop_table(Table::drop().table(Debug::Table).to_owned())
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(DeriveIden)]
|
|
||||||
enum Debug {
|
|
||||||
Table,
|
|
||||||
Id,
|
|
||||||
Title,
|
|
||||||
Text,
|
|
||||||
}
|
|
@ -1,6 +0,0 @@
|
|||||||
use sea_orm_migration::prelude::*;
|
|
||||||
|
|
||||||
#[async_std::main]
|
|
||||||
async fn main() {
|
|
||||||
cli::run_cli(migration::Migrator).await;
|
|
||||||
}
|
|
Reference in New Issue
Block a user