Implémentation d'une première approche de gestion des erreurs #52
4
Cargo.lock
generated
4
Cargo.lock
generated
@ -92,6 +92,7 @@ dependencies = [
|
||||
"notify 6.1.1",
|
||||
"serde",
|
||||
"systemfd",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"tower-http",
|
||||
"tower-livereload",
|
||||
@ -1066,6 +1067,7 @@ dependencies = [
|
||||
"http",
|
||||
"tauri",
|
||||
"tauri-build",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"tower",
|
||||
]
|
||||
@ -3915,8 +3917,10 @@ dependencies = [
|
||||
name = "sesam-vitale"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"dotenv",
|
||||
"libc",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -10,6 +10,7 @@ axum = "0.7.5"
|
||||
listenfd = "1.0.1"
|
||||
notify = "6.1.1"
|
||||
serde = { version = "1.0.204", features = ["derive"] }
|
||||
thiserror = "1.0.63"
|
||||
tokio = { version = "1.39.1", features = ["macros", "rt-multi-thread"] }
|
||||
tower-http = { version = "0.5.2", features = ["fs"] }
|
||||
tower-livereload = "0.9.3"
|
||||
|
@ -3,12 +3,23 @@ use axum::body::Body;
|
||||
use axum::http::Request;
|
||||
use listenfd::ListenFd;
|
||||
use notify::Watcher;
|
||||
use std::env;
|
||||
use std::path::Path;
|
||||
use std::{env, io};
|
||||
use thiserror::Error;
|
||||
use tokio::net::TcpListener;
|
||||
use tower_livereload::predicate::Predicate;
|
||||
use tower_livereload::LiveReloadLayer;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum AppError {
|
||||
#[error("Unable to bind to TCP listener")]
|
||||
TCPListener(#[from] std::io::Error),
|
||||
#[error("Error with the notify watcher")]
|
||||
NotifyWatcher(#[from] notify::Error),
|
||||
#[error("Missing environment variable {var}")]
|
||||
MissingEnvVar { var: &'static str },
|
||||
}
|
||||
|
||||
/// Nous filtrons les requêtes de `htmx` pour ne pas inclure le script _JS_ qui gère le rechargement
|
||||
/// Voir https://github.com/leotaku/tower-livereload/pull/3
|
||||
#[derive(Copy, Clone)]
|
||||
@ -20,44 +31,47 @@ impl<T> Predicate<Request<T>> for NotHtmxPredicate {
|
||||
}
|
||||
|
||||
const DEFAULT_LISTENER: &str = "localhost:3000";
|
||||
async fn get_tcp_listener() -> TcpListener {
|
||||
async fn get_tcp_listener() -> Result<TcpListener, io::Error> {
|
||||
let mut listenfd = ListenFd::from_env();
|
||||
|
||||
match listenfd.take_tcp_listener(0).unwrap() {
|
||||
match listenfd.take_tcp_listener(0)? {
|
||||
// if we are given a tcp listener on listen fd 0, we use that one
|
||||
Some(listener) => {
|
||||
listener.set_nonblocking(true).unwrap();
|
||||
TcpListener::from_std(listener).unwrap()
|
||||
listener.set_nonblocking(true)?;
|
||||
Ok(TcpListener::from_std(listener)?)
|
||||
}
|
||||
// otherwise fall back to local listening
|
||||
None => TcpListener::bind(DEFAULT_LISTENER).await.unwrap(),
|
||||
None => Ok(TcpListener::bind(DEFAULT_LISTENER).await?),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_livereload_layer(templates_path: &Path) -> LiveReloadLayer<NotHtmxPredicate> {
|
||||
fn get_livereload_layer(
|
||||
templates_path: &Path,
|
||||
) -> Result<LiveReloadLayer<NotHtmxPredicate>, notify::Error> {
|
||||
let livereload = LiveReloadLayer::new();
|
||||
let reloader = livereload.reloader();
|
||||
let mut watcher = notify::recommended_watcher(move |_| reloader.reload()).unwrap();
|
||||
watcher
|
||||
.watch(templates_path, notify::RecursiveMode::Recursive)
|
||||
.unwrap();
|
||||
livereload.request_predicate::<Body, NotHtmxPredicate>(NotHtmxPredicate)
|
||||
let mut watcher = notify::recommended_watcher(move |_| reloader.reload())?;
|
||||
watcher.watch(templates_path, notify::RecursiveMode::Recursive)?;
|
||||
Ok(livereload.request_predicate::<Body, NotHtmxPredicate>(NotHtmxPredicate))
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
|
||||
async fn main() -> Result<(), AppError> {
|
||||
let manifest_dir = env::var("CARGO_MANIFEST_DIR").map_err(|_| AppError::MissingEnvVar {
|
||||
var: "CARGO_MANIFEST_DIR",
|
||||
})?;
|
||||
let assets_path = Path::new(&manifest_dir).join("assets");
|
||||
let templates_path = Path::new(&manifest_dir).join("templates");
|
||||
|
||||
let livereload_layer = get_livereload_layer(&templates_path);
|
||||
let livereload_layer =
|
||||
get_livereload_layer(&templates_path).map_err(AppError::NotifyWatcher)?;
|
||||
let router = get_router(assets_path.as_path()).layer(livereload_layer);
|
||||
|
||||
let listener: TcpListener = get_tcp_listener().await;
|
||||
println!("Listening on: http://{}", listener.local_addr().unwrap());
|
||||
let listener: TcpListener = get_tcp_listener().await.map_err(AppError::TCPListener)?;
|
||||
let local_addr = listener.local_addr().map_err(AppError::TCPListener)?;
|
||||
println!("Listening on: http://{}", local_addr);
|
||||
|
||||
// Run the server with the router
|
||||
axum::serve(listener, router.into_make_service())
|
||||
.await
|
||||
.unwrap();
|
||||
axum::serve(listener, router.into_make_service()).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -21,4 +21,5 @@ tokio = "1.39.1"
|
||||
app = { path = "../app" }
|
||||
http = "1.1.0"
|
||||
bytes = "1.6.1"
|
||||
thiserror = "1.0.63"
|
||||
|
||||
|
@ -1,21 +1,30 @@
|
||||
use axum::body::{to_bytes, Body};
|
||||
use axum::Router;
|
||||
use bytes::Bytes;
|
||||
use http::{request, response, Request, Response};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use axum::body::{to_bytes, Body};
|
||||
|
||||
use axum::Router;
|
||||
|
||||
use tauri::path::BaseDirectory;
|
||||
use tauri::Manager;
|
||||
use thiserror::Error;
|
||||
use tokio::sync::{Mutex, MutexGuard};
|
||||
use tower::{Service, ServiceExt};
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum DesktopError {
|
||||
#[error("Axum error:\n{0}")]
|
||||
Axum(#[from] axum::Error),
|
||||
#[error("Infallible error")]
|
||||
Infallible(#[from] std::convert::Infallible),
|
||||
}
|
||||
|
||||
/// Process requests sent to Tauri (with the `axum://` protocol) and handle them with Axum
|
||||
/// When an error occurs, this function is expected to panic, which should result in a 500 error
|
||||
/// being sent to the client, so we let the client handle the error recovering
|
||||
async fn process_tauri_request(
|
||||
tauri_request: Request<Vec<u8>>,
|
||||
mut router: MutexGuard<'_, Router>,
|
||||
) -> Response<Vec<u8>> {
|
||||
) -> Result<Response<Vec<u8>>, DesktopError> {
|
||||
let (parts, body): (request::Parts, Vec<u8>) = tauri_request.into_parts();
|
||||
let axum_request: Request<Body> = Request::from_parts(parts, body.into());
|
||||
|
||||
@ -23,17 +32,16 @@ async fn process_tauri_request(
|
||||
.as_service()
|
||||
.ready()
|
||||
.await
|
||||
.expect("Failed to get ready service from router")
|
||||
.map_err(DesktopError::Infallible)?
|
||||
.call(axum_request)
|
||||
.await
|
||||
.expect("Could not get response from router");
|
||||
.map_err(DesktopError::Infallible)?;
|
||||
|
||||
let (parts, body): (response::Parts, Body) = axum_response.into_parts();
|
||||
let body: Bytes = to_bytes(body, usize::MAX).await.unwrap_or_default();
|
||||
let body: Bytes = to_bytes(body, usize::MAX).await?;
|
||||
|
||||
let tauri_response: Response<Vec<u8>> = Response::from_parts(parts, body.into());
|
||||
|
||||
tauri_response
|
||||
Ok(tauri_response)
|
||||
}
|
||||
|
||||
#[cfg_attr(mobile, tauri::mobile_entry_point)]
|
||||
@ -43,7 +51,7 @@ pub fn run() {
|
||||
let assets_path: PathBuf = app
|
||||
.path()
|
||||
.resolve("assets", BaseDirectory::Resource)
|
||||
.expect("Path should be resolvable");
|
||||
.expect("Assets path should be resolvable");
|
||||
|
||||
// Adds Axum router to application state
|
||||
// This makes it so we can retrieve it from any app instance (see bellow)
|
||||
@ -60,8 +68,19 @@ pub fn run() {
|
||||
// Spawn a new async task to process the request
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let router = router.lock().await;
|
||||
let response = process_tauri_request(request, router).await;
|
||||
responder.respond(response);
|
||||
match process_tauri_request(request, router).await {
|
||||
Ok(response) => responder.respond(response),
|
||||
Err(err) => {
|
||||
let body = format!("Failed to process an axum:// request:\n{}", err);
|
||||
responder.respond(
|
||||
http::Response::builder()
|
||||
.status(http::StatusCode::BAD_REQUEST)
|
||||
.header(http::header::CONTENT_TYPE, "text/plain")
|
||||
.body::<Vec<u8>>(body.into())
|
||||
.expect("BAD_REQUEST response should be valid"),
|
||||
)
|
||||
}
|
||||
}
|
||||
});
|
||||
})
|
||||
.run(tauri::generate_context!())
|
||||
|
@ -4,8 +4,10 @@ version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.86"
|
||||
dotenv = "0.15"
|
||||
libc = "0.2"
|
||||
thiserror = "1.0.63"
|
||||
|
||||
[build-dependencies]
|
||||
dotenv = "0.15"
|
||||
|
@ -5,7 +5,7 @@ use std::path::PathBuf;
|
||||
|
||||
fn main() {
|
||||
// Load the .env.build file for build-time environment variables
|
||||
let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
|
||||
let manifest_dir = env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR must be set");
|
||||
let manifest_path = PathBuf::from(manifest_dir);
|
||||
dotenv::from_path(manifest_path.join(".env.build")).ok();
|
||||
|
||||
@ -22,12 +22,13 @@ fn main() {
|
||||
);
|
||||
|
||||
// Add the SESAM_FSV_LIB_PATH to the linker search path
|
||||
let fsv_lib_path = PathBuf::from(env::var("SESAM_FSV_LIB_PATH").unwrap());
|
||||
let fsv_lib_path =
|
||||
PathBuf::from(env::var("SESAM_FSV_LIB_PATH").expect("SESAM_FSV_LIB_PATH must be set"));
|
||||
println!("cargo::rustc-link-search=native={}", fsv_lib_path.display());
|
||||
|
||||
// Add the SESAM_FSV_LIB_PATH to the PATH environment variable
|
||||
if cfg!(target_os = "windows") {
|
||||
let path = env::var("PATH").unwrap_or(String::new());
|
||||
let path = env::var("PATH").unwrap_or_default();
|
||||
println!("cargo:rustc-env=PATH={};{}", fsv_lib_path.display(), path);
|
||||
} else if cfg!(target_os = "linux") {
|
||||
println!("cargo:rustc-env=LD_LIBRARY_PATH={}", fsv_lib_path.display());
|
||||
@ -36,7 +37,7 @@ fn main() {
|
||||
// Link the SESAM_FSV_SSVLIB dynamic library
|
||||
println!(
|
||||
"cargo::rustc-link-lib=dylib={}",
|
||||
env::var("SESAM_FSV_SSVLIB").unwrap()
|
||||
env::var("SESAM_FSV_SSVLIB").expect("SESAM_FSV_SSVLIB must be set")
|
||||
);
|
||||
// TODO : try `raw-dylib` instead of `dylib` on Windows to avoid the need of the `lib` headers compiled from the `def`
|
||||
}
|
||||
|
@ -1,9 +1,24 @@
|
||||
use libc::{c_void, size_t};
|
||||
use std::ffi::CString;
|
||||
use std::ptr;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::libssv::SSV_LireCartePS;
|
||||
use crate::ssv_memory::{decode_ssv_memory, Block};
|
||||
use crate::libssv::{self, SSV_LireCartePS};
|
||||
use crate::ssv_memory::{decode_ssv_memory, Block, SSVMemoryError};
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum CartePSError {
|
||||
#[error("Unknown (group, field) pair: ({group}, {field})")]
|
||||
UnknownGroupFieldPair { group: u16, field: u16 },
|
||||
#[error("CString creation error: {0}")]
|
||||
CString(#[from] std::ffi::NulError),
|
||||
#[error("Unable to get the last situation while parsing a CartePS")]
|
||||
InvalidLastSituation,
|
||||
#[error(transparent)]
|
||||
SSVMemory(#[from] SSVMemoryError),
|
||||
#[error(transparent)]
|
||||
SSVLibErrorCode(#[from] libssv::LibSSVError),
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct CartePS {
|
||||
@ -52,10 +67,10 @@ struct SituationPS {
|
||||
habilitation_à_signer_un_lot: String,
|
||||
}
|
||||
|
||||
pub fn lire_carte(code_pin: &str, lecteur: &str) -> Result<CartePS, String> {
|
||||
let resource_ps = CString::new(lecteur).expect("CString::new failed");
|
||||
let resource_reader = CString::new("").expect("CString::new failed");
|
||||
let card_number = CString::new(code_pin).expect("CString::new failed");
|
||||
pub fn lire_carte(code_pin: &str, lecteur: &str) -> Result<CartePS, CartePSError> {
|
||||
let resource_ps = CString::new(lecteur)?;
|
||||
let resource_reader = CString::new("")?;
|
||||
let card_number = CString::new(code_pin)?;
|
||||
|
||||
let mut buffer: *mut c_void = ptr::null_mut();
|
||||
let mut size: size_t = 0;
|
||||
@ -69,17 +84,32 @@ pub fn lire_carte(code_pin: &str, lecteur: &str) -> Result<CartePS, String> {
|
||||
&mut size,
|
||||
);
|
||||
println!("SSV_LireCartePS result: {}", result);
|
||||
if result != 0 {
|
||||
return Err(libssv::LibSSVError::StandardErrorCode {
|
||||
code: result,
|
||||
function: "SSV_LireCartePS",
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
||||
if !buffer.is_null() {
|
||||
hex_values = std::slice::from_raw_parts(buffer as *const u8, size);
|
||||
libc::free(buffer);
|
||||
}
|
||||
}
|
||||
let groups = decode_ssv_memory(hex_values, hex_values.len());
|
||||
let groups =
|
||||
decode_ssv_memory(hex_values, hex_values.len()).map_err(CartePSError::SSVMemory)?;
|
||||
decode_carte_ps(groups)
|
||||
}
|
||||
|
||||
fn decode_carte_ps(groups: Vec<Block>) -> Result<CartePS, String> {
|
||||
fn get_last_mut_situation(carte_ps: &mut CartePS) -> Result<&mut SituationPS, CartePSError> {
|
||||
carte_ps
|
||||
.situations
|
||||
.last_mut()
|
||||
.ok_or(CartePSError::InvalidLastSituation)
|
||||
}
|
||||
|
||||
fn decode_carte_ps(groups: Vec<Block>) -> Result<CartePS, CartePSError> {
|
||||
let mut carte_ps = CartePS::default();
|
||||
for group in groups {
|
||||
for field in group.content {
|
||||
@ -118,137 +148,99 @@ fn decode_carte_ps(groups: Vec<Block>) -> Result<CartePS, String> {
|
||||
}
|
||||
(2..=16, 1) => {
|
||||
carte_ps.situations.push(SituationPS::default());
|
||||
carte_ps
|
||||
.situations
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
get_last_mut_situation(&mut carte_ps)?
|
||||
.numero_logique_de_la_situation_de_facturation_du_ps = field.content[0];
|
||||
}
|
||||
(2..=16, 2) => {
|
||||
carte_ps.situations.last_mut().unwrap().mode_d_exercice =
|
||||
get_last_mut_situation(&mut carte_ps)?.mode_d_exercice =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
(2..=16, 3) => {
|
||||
carte_ps.situations.last_mut().unwrap().statut_d_exercice =
|
||||
get_last_mut_situation(&mut carte_ps)?.statut_d_exercice =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
(2..=16, 4) => {
|
||||
carte_ps.situations.last_mut().unwrap().secteur_d_activite =
|
||||
get_last_mut_situation(&mut carte_ps)?.secteur_d_activite =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
(2..=16, 5) => {
|
||||
carte_ps
|
||||
.situations
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
.type_d_identification_structure =
|
||||
get_last_mut_situation(&mut carte_ps)?.type_d_identification_structure =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
(2..=16, 6) => {
|
||||
carte_ps
|
||||
.situations
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
.numero_d_identification_structure =
|
||||
get_last_mut_situation(&mut carte_ps)?.numero_d_identification_structure =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
(2..=16, 7) => {
|
||||
carte_ps
|
||||
.situations
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
get_last_mut_situation(&mut carte_ps)?
|
||||
.cle_du_numero_d_identification_structure =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
(2..=16, 8) => {
|
||||
carte_ps
|
||||
.situations
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
.raison_sociale_structure =
|
||||
get_last_mut_situation(&mut carte_ps)?.raison_sociale_structure =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
(2..=16, 9) => {
|
||||
carte_ps
|
||||
.situations
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
get_last_mut_situation(&mut carte_ps)?
|
||||
.numero_d_identification_de_facturation_du_ps =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
(2..=16, 10) => {
|
||||
carte_ps
|
||||
.situations
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
get_last_mut_situation(&mut carte_ps)?
|
||||
.cle_du_numero_d_identification_de_facturation_du_ps =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
(2..=16, 11) => {
|
||||
carte_ps
|
||||
.situations
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
get_last_mut_situation(&mut carte_ps)?
|
||||
.numero_d_identification_du_ps_remplaçant =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
(2..=16, 12) => {
|
||||
carte_ps
|
||||
.situations
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
get_last_mut_situation(&mut carte_ps)?
|
||||
.cle_du_numero_d_identification_du_ps_remplaçant =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
(2..=16, 13) => {
|
||||
carte_ps.situations.last_mut().unwrap().code_conventionnel =
|
||||
get_last_mut_situation(&mut carte_ps)?.code_conventionnel =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
(2..=16, 14) => {
|
||||
carte_ps.situations.last_mut().unwrap().code_specialite =
|
||||
get_last_mut_situation(&mut carte_ps)?.code_specialite =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
(2..=16, 15) => {
|
||||
carte_ps.situations.last_mut().unwrap().code_zone_tarifaire =
|
||||
get_last_mut_situation(&mut carte_ps)?.code_zone_tarifaire =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
(2..=16, 16) => {
|
||||
carte_ps.situations.last_mut().unwrap().code_zone_ik =
|
||||
get_last_mut_situation(&mut carte_ps)?.code_zone_ik =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
(2..=16, 17) => {
|
||||
carte_ps.situations.last_mut().unwrap().code_agrement_1 =
|
||||
get_last_mut_situation(&mut carte_ps)?.code_agrement_1 =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
(2..=16, 18) => {
|
||||
carte_ps.situations.last_mut().unwrap().code_agrement_2 =
|
||||
get_last_mut_situation(&mut carte_ps)?.code_agrement_2 =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
(2..=16, 19) => {
|
||||
carte_ps.situations.last_mut().unwrap().code_agrement_3 =
|
||||
get_last_mut_situation(&mut carte_ps)?.code_agrement_3 =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
(2..=16, 20) => {
|
||||
carte_ps
|
||||
.situations
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
.habilitation_à_signer_une_facture =
|
||||
get_last_mut_situation(&mut carte_ps)?.habilitation_à_signer_une_facture =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
(2..=16, 21) => {
|
||||
carte_ps
|
||||
.situations
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
.habilitation_à_signer_un_lot =
|
||||
get_last_mut_situation(&mut carte_ps)?.habilitation_à_signer_un_lot =
|
||||
String::from_utf8_lossy(field.content).to_string();
|
||||
}
|
||||
_ => {
|
||||
return Err(format!(
|
||||
"Unknown (group, field) pair: ({}, {})",
|
||||
group.id, field.id
|
||||
))
|
||||
return Err(CartePSError::UnknownGroupFieldPair {
|
||||
group: group.id,
|
||||
field: field.id,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -279,7 +271,7 @@ mod test_decode_carte_ps {
|
||||
57, 53, 8, 48, 48, 50, 48, 50, 52, 49, 57, 1, 56, 0, 1, 48, 1, 49, 2, 53, 48, 2, 49,
|
||||
48, 2, 48, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49,
|
||||
];
|
||||
let blocks = decode_ssv_memory(bytes, bytes.len());
|
||||
let blocks = decode_ssv_memory(bytes, bytes.len()).unwrap();
|
||||
let carte_ps = decode_carte_ps(blocks).unwrap();
|
||||
|
||||
assert_eq!(carte_ps.titulaire.type_de_carte_ps, "0");
|
||||
@ -370,7 +362,7 @@ mod test_decode_carte_ps {
|
||||
57, 53, 8, 48, 48, 50, 48, 50, 52, 49, 57, 1, 56, 0, 1, 48, 1, 49, 2, 53, 48, 2, 49,
|
||||
48, 2, 48, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49,
|
||||
];
|
||||
let blocks = decode_ssv_memory(bytes, bytes.len());
|
||||
let blocks = decode_ssv_memory(bytes, bytes.len()).unwrap();
|
||||
let carte_ps = decode_carte_ps(blocks).unwrap();
|
||||
|
||||
assert_eq!(carte_ps.situations.len(), 3);
|
||||
|
@ -3,6 +3,13 @@
|
||||
/// Low level bindings to the SSVLIB dynamic library.
|
||||
// TODO : look for creating a dedicated *-sys crate : https://kornel.ski/rust-sys-crate
|
||||
use libc::{c_char, c_ushort, c_void, size_t};
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum LibSSVError {
|
||||
#[error("SSV library error in {function}: {code}")]
|
||||
StandardErrorCode { code: u16, function: &'static str },
|
||||
}
|
||||
|
||||
#[cfg_attr(target_os = "linux", link(name = "ssvlux64"))]
|
||||
#[cfg_attr(target_os = "windows", link(name = "ssvw64"))]
|
||||
|
@ -3,6 +3,8 @@ mod libssv;
|
||||
mod ssv_memory;
|
||||
mod ssvlib_demo;
|
||||
|
||||
fn main() {
|
||||
ssvlib_demo::demo();
|
||||
use anyhow::{Context, Result};
|
||||
|
||||
fn main() -> Result<()> {
|
||||
ssvlib_demo::demo().context("Error while running the SSV library demo")
|
||||
}
|
||||
|
@ -1,6 +1,33 @@
|
||||
/// # SSV Memory
|
||||
/// Provide functions to manipulate raw memory from SSV library.
|
||||
use std::convert::TryFrom;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum BytesReadingError {
|
||||
#[error("Empty bytes input")]
|
||||
EmptyBytes,
|
||||
#[error("Invalid memory: not enough bytes ({actual}) to read the expected size ({expected})")]
|
||||
InvalidSize { expected: usize, actual: usize },
|
||||
#[error("Invalid memory: size ({actual}) is expected to be less than {expected} bytes")]
|
||||
SizeTooBig { expected: usize, actual: usize },
|
||||
#[error("Invalid memory: not enough bytes to read the block id")]
|
||||
InvalidBlockId(#[from] std::array::TryFromSliceError),
|
||||
#[error("Error while reading field at offset {offset}")]
|
||||
InvalidField {
|
||||
source: Box<BytesReadingError>,
|
||||
offset: usize,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum SSVMemoryError {
|
||||
#[error("Error while parsing block at offset {offset}")]
|
||||
BlockParsing {
|
||||
source: BytesReadingError,
|
||||
offset: usize,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Debug)]
|
||||
struct ElementSize {
|
||||
@ -9,13 +36,12 @@ struct ElementSize {
|
||||
}
|
||||
|
||||
// TODO : Est-ce qu'on pourrait/devrait définir un type custom pour représenter les tableaux de bytes ?
|
||||
|
||||
impl TryFrom<&[u8]> for ElementSize {
|
||||
type Error = &'static str;
|
||||
type Error = BytesReadingError;
|
||||
|
||||
fn try_from(bytes: &[u8]) -> Result<Self, Self::Error> {
|
||||
if bytes.is_empty() {
|
||||
return Err("Empty bytes input");
|
||||
return Err(BytesReadingError::EmptyBytes);
|
||||
}
|
||||
|
||||
let mut element_size = ElementSize { size: 0, pad: 1 };
|
||||
@ -30,9 +56,15 @@ impl TryFrom<&[u8]> for ElementSize {
|
||||
// N are the 7 lower bits of the first byte
|
||||
let size_bytes_len = (bytes[0] & 0b0111_1111) as usize;
|
||||
if size_bytes_len > bytes.len() - 1 {
|
||||
return Err("Invalid memory: not enough bytes to read the size");
|
||||
return Err(BytesReadingError::InvalidSize {
|
||||
expected: size_bytes_len,
|
||||
actual: bytes.len() - 1,
|
||||
});
|
||||
} else if size_bytes_len > 4 {
|
||||
return Err("Invalid memory: size is too big");
|
||||
return Err(BytesReadingError::SizeTooBig {
|
||||
expected: 4,
|
||||
actual: size_bytes_len,
|
||||
});
|
||||
}
|
||||
let size_bytes = &bytes[1..1 + size_bytes_len];
|
||||
|
||||
@ -54,15 +86,21 @@ pub struct Block<'a> {
|
||||
pub content: Vec<Field<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> From<&'a [u8]> for Block<'a> {
|
||||
fn from(bytes: &'a [u8]) -> Self {
|
||||
impl<'a> TryFrom<&'a [u8]> for Block<'a> {
|
||||
type Error = BytesReadingError;
|
||||
|
||||
fn try_from(bytes: &'a [u8]) -> Result<Self, Self::Error> {
|
||||
let mut offset = 0;
|
||||
let id = u16::from_be_bytes(bytes[..2].try_into().unwrap());
|
||||
let id = u16::from_be_bytes(
|
||||
bytes[..2]
|
||||
.try_into()
|
||||
.map_err(BytesReadingError::InvalidBlockId)?,
|
||||
);
|
||||
offset += 2;
|
||||
let ElementSize {
|
||||
size: block_size,
|
||||
pad,
|
||||
} = bytes[2..].try_into().unwrap();
|
||||
} = bytes[2..].try_into()?;
|
||||
offset += pad;
|
||||
let raw_content = &bytes[offset..];
|
||||
let mut field_offset = 0;
|
||||
@ -70,17 +108,22 @@ impl<'a> From<&'a [u8]> for Block<'a> {
|
||||
let mut content = Vec::new();
|
||||
let mut field_id = 1;
|
||||
while field_offset < block_size {
|
||||
let mut field: Field<'a> = raw_content[field_offset..].into();
|
||||
let mut field: Field<'a> = raw_content[field_offset..].try_into().map_err(|err| {
|
||||
BytesReadingError::InvalidField {
|
||||
source: Box::new(err),
|
||||
offset: field_offset,
|
||||
}
|
||||
})?;
|
||||
field.id = field_id;
|
||||
field_offset += field.size;
|
||||
field_id += 1;
|
||||
content.push(field);
|
||||
}
|
||||
Block {
|
||||
Ok(Block {
|
||||
id,
|
||||
size: offset + block_size,
|
||||
content,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -91,31 +134,41 @@ pub struct Field<'a> {
|
||||
pub content: &'a [u8],
|
||||
}
|
||||
|
||||
impl<'a> From<&'a [u8]> for Field<'a> {
|
||||
fn from(bytes: &'a [u8]) -> Self {
|
||||
let ElementSize { size, pad } = bytes.try_into().unwrap();
|
||||
impl<'a> TryFrom<&'a [u8]> for Field<'a> {
|
||||
type Error = BytesReadingError;
|
||||
|
||||
fn try_from(bytes: &'a [u8]) -> Result<Self, Self::Error> {
|
||||
let ElementSize { size, pad } = bytes.try_into()?;
|
||||
let contenu = &bytes[pad..pad + size];
|
||||
Field {
|
||||
Ok(Field {
|
||||
id: 0,
|
||||
size: pad + size,
|
||||
content: contenu,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn decode_ssv_memory(bytes: &[u8], size: usize) -> Vec<Block> {
|
||||
pub fn decode_ssv_memory(bytes: &[u8], size: usize) -> Result<Vec<Block>, SSVMemoryError> {
|
||||
let mut blocks: Vec<Block> = Vec::new();
|
||||
let mut offset = 0;
|
||||
while offset < size {
|
||||
let block: Block = bytes[offset..].into();
|
||||
let block: Block =
|
||||
bytes[offset..]
|
||||
.try_into()
|
||||
.map_err(|err| SSVMemoryError::BlockParsing {
|
||||
source: err,
|
||||
offset,
|
||||
})?;
|
||||
offset += block.size;
|
||||
blocks.push(block);
|
||||
}
|
||||
blocks
|
||||
Ok(blocks)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test_element_size {
|
||||
use std::any::Any;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
@ -142,29 +195,51 @@ mod test_element_size {
|
||||
#[test]
|
||||
fn null_size() {
|
||||
let bytes: &[u8] = &[];
|
||||
let result: Result<ElementSize, &str> = bytes.try_into();
|
||||
assert_eq!(result, Err("Empty bytes input"),);
|
||||
let result: Result<ElementSize, BytesReadingError> = bytes.try_into();
|
||||
assert!(result.is_err());
|
||||
assert_eq!(
|
||||
result.unwrap_err().type_id(),
|
||||
BytesReadingError::EmptyBytes.type_id()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_memory() {
|
||||
let bytes: &[u8] = &[0b_1000_0001_u8];
|
||||
let result: Result<ElementSize, &str> = bytes.try_into();
|
||||
let result: Result<ElementSize, BytesReadingError> = bytes.try_into();
|
||||
assert!(result.is_err());
|
||||
assert_eq!(
|
||||
result,
|
||||
Err("Invalid memory: not enough bytes to read the size"),
|
||||
result.unwrap_err().to_string(),
|
||||
BytesReadingError::InvalidSize {
|
||||
expected: 1,
|
||||
actual: 0
|
||||
}
|
||||
.to_string()
|
||||
);
|
||||
|
||||
let bytes: &[u8] = &[0b_1000_0010_u8, 1];
|
||||
let result: Result<ElementSize, &str> = bytes.try_into();
|
||||
let result: Result<ElementSize, BytesReadingError> = bytes.try_into();
|
||||
assert!(result.is_err());
|
||||
assert_eq!(
|
||||
result,
|
||||
Err("Invalid memory: not enough bytes to read the size"),
|
||||
result.unwrap_err().to_string(),
|
||||
BytesReadingError::InvalidSize {
|
||||
expected: 2,
|
||||
actual: 1
|
||||
}
|
||||
.to_string()
|
||||
);
|
||||
|
||||
let bytes: &[u8] = &[0b_1000_0101_u8, 1, 1, 1, 1, 1];
|
||||
let result: Result<ElementSize, &str> = bytes.try_into();
|
||||
assert_eq!(result, Err("Invalid memory: size is too big"),);
|
||||
let result: Result<ElementSize, BytesReadingError> = bytes.try_into();
|
||||
assert!(result.is_err());
|
||||
assert_eq!(
|
||||
result.unwrap_err().to_string(),
|
||||
BytesReadingError::SizeTooBig {
|
||||
expected: 4,
|
||||
actual: 5
|
||||
}
|
||||
.to_string()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -179,7 +254,7 @@ mod test_field {
|
||||
80, 72, 65, 82, 77, 65, 67, 73, 69, 78, 48, 48, 53, 50, 52, 49, 57, 9, 70, 82, 65, 78,
|
||||
67, 79, 73, 83, 69, 1, 84,
|
||||
];
|
||||
let element: Field = bytes.into();
|
||||
let element: Field = bytes.try_into().unwrap();
|
||||
assert_eq!(element.size, 52);
|
||||
assert_eq!(element.content[..5], [1, 48, 1, 56, 11]);
|
||||
}
|
||||
@ -194,7 +269,7 @@ mod test_field {
|
||||
// Add 256 bytes to the content
|
||||
bytes_vec.append(&mut vec![1; 256]);
|
||||
let bytes: &[u8] = &bytes_vec;
|
||||
let element: Field = bytes.into();
|
||||
let element: Field = bytes.try_into().unwrap();
|
||||
assert_eq!(element.size, 259);
|
||||
assert_eq!(element.content.len(), 256);
|
||||
}
|
||||
@ -208,15 +283,15 @@ mod test_block {
|
||||
fn test_francoise_pharmacien0052419_partial_block_1() {
|
||||
let bytes: &[u8] = &[1, 48, 1, 56, 11, 57, 57, 55, 48, 48, 53, 50, 52, 49, 57, 52];
|
||||
|
||||
let field1: Field = bytes.into();
|
||||
let field1: Field = bytes.try_into().unwrap();
|
||||
assert_eq!(field1.size, 2);
|
||||
assert_eq!(field1.content, &[48]);
|
||||
|
||||
let field2: Field = bytes[field1.size..].into();
|
||||
let field2: Field = bytes[field1.size..].try_into().unwrap();
|
||||
assert_eq!(field2.size, 2);
|
||||
assert_eq!(field2.content, &[56]);
|
||||
|
||||
let field3: Field = bytes[field1.size + field2.size..].into();
|
||||
let field3: Field = bytes[field1.size + field2.size..].try_into().unwrap();
|
||||
assert_eq!(field3.size, 12);
|
||||
assert_eq!(
|
||||
field3.content,
|
||||
@ -243,12 +318,12 @@ mod test_block {
|
||||
48, 2, 49, 48, 2, 48, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49,
|
||||
];
|
||||
|
||||
let first_block: Block = bytes.into();
|
||||
let first_block: Block = bytes.try_into().unwrap();
|
||||
assert_eq!(first_block.id, 1);
|
||||
assert_eq!(first_block.size, 54);
|
||||
assert_eq!(first_block.content.len(), 8);
|
||||
|
||||
let second_block: Block = bytes[first_block.size..].into();
|
||||
let second_block: Block = bytes[first_block.size..].try_into().unwrap();
|
||||
assert_eq!(second_block.id, 2);
|
||||
assert_eq!(second_block.size, 86);
|
||||
assert_eq!(second_block.content.len(), 21);
|
||||
@ -277,7 +352,7 @@ mod test_decode_ssv_memory {
|
||||
50, 50, 49, 57, 53, 8, 48, 48, 50, 48, 50, 52, 49, 57, 1, 56, 0, 1, 48, 1, 49, 2, 53,
|
||||
48, 2, 49, 48, 2, 48, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49,
|
||||
];
|
||||
let blocks = decode_ssv_memory(bytes, bytes.len());
|
||||
let blocks = decode_ssv_memory(bytes, bytes.len()).unwrap();
|
||||
assert_eq!(blocks.len(), 2);
|
||||
}
|
||||
}
|
||||
|
@ -6,25 +6,49 @@ use std::env;
|
||||
use std::ffi::CString;
|
||||
use std::path::PathBuf;
|
||||
use std::ptr;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::cps::lire_carte;
|
||||
use crate::libssv::{SSV_InitLIB2, SSV_LireConfig};
|
||||
|
||||
fn ssv_init_lib_2() {
|
||||
#[derive(Error, Debug)]
|
||||
pub enum SSVDemoError {
|
||||
#[error(transparent)]
|
||||
CartePSReading(#[from] crate::cps::CartePSError),
|
||||
#[error(transparent)]
|
||||
SSVLibErrorCode(#[from] crate::libssv::LibSSVError),
|
||||
}
|
||||
|
||||
fn ssv_init_lib_2() -> Result<(), SSVDemoError> {
|
||||
let ini_str = env::var("SESAM_INI_PATH").expect("SESAM_INI_PATH must be set");
|
||||
let ini = CString::new(ini_str).expect("CString::new failed");
|
||||
unsafe {
|
||||
let result = SSV_InitLIB2(ini.as_ptr());
|
||||
println!("SSV_InitLIB2 result: {}", result);
|
||||
if result != 0 {
|
||||
return Err(crate::libssv::LibSSVError::StandardErrorCode {
|
||||
code: result,
|
||||
function: "SSV_InitLIB2",
|
||||
}
|
||||
.into());
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn ssv_lire_config() {
|
||||
fn ssv_lire_config() -> Result<(), SSVDemoError> {
|
||||
let mut buffer: *mut c_void = ptr::null_mut();
|
||||
let mut size: size_t = 0;
|
||||
unsafe {
|
||||
let result = SSV_LireConfig(&mut buffer, &mut size);
|
||||
println!("SSV_LireConfig result: {}", result);
|
||||
if result != 0 {
|
||||
return Err(crate::libssv::LibSSVError::StandardErrorCode {
|
||||
code: result,
|
||||
function: "SSV_LireConfig",
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
||||
if !buffer.is_null() {
|
||||
let hex_values = std::slice::from_raw_parts(buffer as *const u8, size);
|
||||
@ -36,25 +60,27 @@ fn ssv_lire_config() {
|
||||
libc::free(buffer);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn demo() {
|
||||
pub fn demo() -> Result<(), SSVDemoError> {
|
||||
// TODO : this is probably not working on release, because I'm not sure it exists a CARGO_MANIFEST_DIR and so it can find the `.env`
|
||||
// Maybe we could use a system standard config path to store a config file
|
||||
let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
|
||||
let manifest_dir = env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR must be set");
|
||||
let manifest_path = PathBuf::from(manifest_dir);
|
||||
dotenv::from_path(manifest_path.join(".env")).ok();
|
||||
|
||||
println!("------- Demo for the SSV library --------");
|
||||
|
||||
ssv_init_lib_2();
|
||||
ssv_init_lib_2()?;
|
||||
|
||||
let code_pin = "1234";
|
||||
let lecteur = "HID Global OMNIKEY 3x21 Smart Card Reader 0";
|
||||
let carte_ps = lire_carte(code_pin, lecteur).unwrap();
|
||||
let carte_ps = lire_carte(code_pin, lecteur)?;
|
||||
println!("CartePS: {:#?}", carte_ps);
|
||||
|
||||
ssv_lire_config();
|
||||
ssv_lire_config()?;
|
||||
|
||||
println!("-----------------------------------------");
|
||||
Ok(())
|
||||
}
|
||||
|
85
docs/errors.md
Normal file
85
docs/errors.md
Normal file
@ -0,0 +1,85 @@
|
||||
# Gestion des erreurs
|
||||
|
||||
Ce document décrit comment les erreurs sont gérées dans le projet.
|
||||
|
||||
## Gestion native
|
||||
|
||||
Par principe, en Rust, on évite au maximum la gestion par exception, ne la réservant qu'aux situations où un crash du programme est la meilleure solution.
|
||||
En temps normal, on renvoie des `Result<Valeur, Erreur>` (pour les situations réussite/erreur) ou des `Option<Valeur>` (pour les situations valeur non-nulle/nulle).
|
||||
|
||||
Quand on fait face à une situation d'erreur, on cherchera à la gérer de manière explicite (voir [Récupération des erreurs](#récupération-des-erreurs)) ou à la remonter à un niveau supérieur, généralement à l'aide de l'opérateur `?`.
|
||||
|
||||
On évitera, par contre, au maximum de générer des exceptions (appelées "panics" en Rust), que ce soit par l'usage de `panic!` ou par des appels à des fonctions qui paniquent en cas d'erreur (comme `unwrap` ou `expect`).
|
||||
|
||||
De nombreux exemples des idiomes natifs de gestion des erreurs en Rust sont disponibles dans la documentation [Rust by example](https://doc.rust-lang.org/rust-by-example/error.html).
|
||||
|
||||
## Librairies de gestion des erreurs
|
||||
|
||||
Deux librairies sont utilisées pour gérer les erreurs dans le projet :
|
||||
- [`anyhow`](https://docs.rs/anyhow/latest/anyhow/) : qui permet de renvoyer des erreurs faiblement typées, mais très facile à enrichir avec des messages d'explication. On l'utilise pour communiquer facilement des erreurs de haut niveau avec un utilisateur final.
|
||||
```rust
|
||||
use anyhow::{anyhow, Result};
|
||||
|
||||
fn get_cluster_info() -> Result<ClusterInfo> {
|
||||
let data = fs::read_to_string("cluster.json")
|
||||
.with_context(|| "failed to read cluster config")?;
|
||||
let info: ClusterInfo = serde_json::from_str(&data)
|
||||
.with_context(|| "failed to parse cluster config")?;
|
||||
Ok(info)
|
||||
}
|
||||
```
|
||||
- [`thiserror`](https://docs.rs/thiserror/latest/thiserror/) : qui fournit des macros pour définir des erreurs fortement typées. On l'utilise pour définir des erreurs spécifiques à une partie du code, contextualisées avec des données structurées plutôt que de simples messages d'erreurs, afin de favoriser la "récupération" face aux erreurs.
|
||||
```rust
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum DataStoreError {
|
||||
#[error("data store disconnected")]
|
||||
Disconnect(#[from] io::Error),
|
||||
#[error("the data for key `{0}` is not available")]
|
||||
Redaction(String),
|
||||
#[error("invalid header (expected {expected:?}, found {found:?})")]
|
||||
InvalidHeader {
|
||||
expected: String,
|
||||
found: String,
|
||||
},
|
||||
#[error("unknown data store error")]
|
||||
Unknown,
|
||||
}
|
||||
```
|
||||
|
||||
## Récupération des erreurs
|
||||
|
||||
Dans la mesure du possible, on essaie de privilégier la "récupération" face à une erreur plutôt que le "crash". Les stratégies de récupération sont :
|
||||
- Réessayer l'opération, tel quel ou avec des paramètres différents
|
||||
- Contourner l'opération, en la remplaçant par une autre
|
||||
- À défaut, informer l'utilisateur de l'erreur et :
|
||||
- arrêter / annuler l'opération en cours
|
||||
- ignorer l'erreur et continuer l'exécution
|
||||
|
||||
Quand on ne peut pas récupérer une erreur, on la remonte à un niveau supérieur, si besoin en la convertissant dans un type d'erreur plus générique et approprié au niveau considéré.
|
||||
|
||||
## Conversion des erreurs
|
||||
|
||||
Quand on remonte une erreur à un niveau supérieur, on peut être amené à la convertir dans un type d'erreur plus générique et approprié au niveau considéré. Pour faciliter cette conversion, on implémente le trait `From`. Avec `thiserror`, on peut utiliser l'attribut `#[from]` ou le paramètre `source` pour automatiser l'implémentation de telles conversions.
|
||||
|
||||
On peut ensuite, lors de la gestion d'une erreur, on pourra :
|
||||
- soit directement renvoyer l'erreur à l'aide de l'opérateur `?`, qui se chargera de la conversion ;
|
||||
- soit convertir l'erreur explicitement, par exemple en utilisant la méthode `map_err` sur un `Result`, en particulier quand on veut enrichir l'erreur avec des informations supplémentaires.
|
||||
|
||||
## Usages exceptionnels de `unwrap` et `expect`
|
||||
|
||||
Provoquant des "panics" en cas d'erreur, les fonctions `unwrap` et `expect` ne doivent être utilisées que dans des cas exceptionnels :
|
||||
- Dans les tests, pour signaler une erreur de test
|
||||
- Au plus haut niveau de l'application, pour signaler une erreur fatale qui ne peut pas être récupérée
|
||||
- Dans des situations où l'erreur ne peut pas se produire, par exemple après une vérification de préconditions
|
||||
|
||||
Dans l'idéal, on préférera l'usage de `expect` à `unwrap`, car il permet de donner un message d'erreur explicite.
|
||||
|
||||
## Ressources
|
||||
|
||||
- [The Rust Programming Language - Ch. 9: Error Handling](https://doc.rust-lang.org/book/ch09-00-error-handling.html)
|
||||
- [The NRC Book - Error Handling in Rust](https://nrc.github.io/error-docs/intro.html)
|
||||
- [The Error Design Patterns Book - by Project Error Handling WG](https://github.com/rust-lang/project-error-handling/blob/master/error-design-patterns-book/src/SUMMARY.md)
|
||||
- [The Rust Cookbook - Error Handling](https://rust-lang-nursery.github.io/rust-cookbook/error-handling.html)
|
||||
- [Le ticket initial de l'intégration de la gestion des erreurs dans le projet](https://forge.p4pillon.org/P4Pillon/Krys4lide/issues/34)
|
Loading…
Reference in New Issue
Block a user