feat: implement a full SSV_LireConfig output parsing, using deku for a declarative bytes parsing

Co-authored-by: theo <t.lettermann@criteo.com>
This commit is contained in:
Florian Briand 2024-10-03 23:01:53 +02:00
parent c83824ae34
commit 8f935ab81e
Signed by: florian_briand
GPG Key ID: CC981B9E6B98E70B
9 changed files with 824 additions and 30 deletions

View File

@ -722,7 +722,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf4918709cc4dd777ad2b6303ed03cb37f3ca0ccede8c1b0d28ac6db8f4710e0" checksum = "bf4918709cc4dd777ad2b6303ed03cb37f3ca0ccede8c1b0d28ac6db8f4710e0"
dependencies = [ dependencies = [
"once_cell", "once_cell",
"proc-macro-crate 2.0.2", "proc-macro-crate 2.0.0",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.77", "syn 2.0.77",
@ -1384,6 +1384,32 @@ dependencies = [
"syn 2.0.77", "syn 2.0.77",
] ]
[[package]]
name = "deku"
version = "0.18.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9711031e209dc1306d66985363b4397d4c7b911597580340b93c9729b55f6eb"
dependencies = [
"bitvec",
"deku_derive",
"log",
"no_std_io2",
"rustversion",
]
[[package]]
name = "deku_derive"
version = "0.18.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58cb0719583cbe4e81fb40434ace2f0d22ccc3e39a74bb3796c22b451b4f139d"
dependencies = [
"darling 0.20.10",
"proc-macro-crate 3.2.0",
"proc-macro2",
"quote",
"syn 2.0.77",
]
[[package]] [[package]]
name = "der" name = "der"
version = "0.7.9" version = "0.7.9"
@ -1747,6 +1773,29 @@ dependencies = [
"syn 2.0.77", "syn 2.0.77",
] ]
[[package]]
name = "env_filter"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4f2c92ceda6ceec50f43169f9ee8424fe2db276791afde7b2cd8bc084cb376ab"
dependencies = [
"log",
"regex",
]
[[package]]
name = "env_logger"
version = "0.11.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e13fa619b91fb2381732789fc5de83b45675e882f66623b7d8cb4f643017018d"
dependencies = [
"anstream",
"anstyle",
"env_filter",
"humantime",
"log",
]
[[package]] [[package]]
name = "equivalent" name = "equivalent"
version = "1.0.1" version = "1.0.1"
@ -1968,9 +2017,13 @@ name = "fsv"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"deku",
"env_logger",
"fsv-sys", "fsv-sys",
"libc", "libc",
"log",
"num_enum", "num_enum",
"serde",
"thiserror", "thiserror",
"utils", "utils",
] ]
@ -2365,7 +2418,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bb0228f477c0900c880fd78c8759b95c7636dbd7842707f49e132378aa2acdc" checksum = "0bb0228f477c0900c880fd78c8759b95c7636dbd7842707f49e132378aa2acdc"
dependencies = [ dependencies = [
"heck 0.4.1", "heck 0.4.1",
"proc-macro-crate 2.0.2", "proc-macro-crate 2.0.0",
"proc-macro-error", "proc-macro-error",
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -2633,6 +2686,12 @@ version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]]
name = "humantime"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]] [[package]]
name = "hyper" name = "hyper"
version = "1.4.1" version = "1.4.1"
@ -3467,6 +3526,15 @@ dependencies = [
"memoffset 0.9.1", "memoffset 0.9.1",
] ]
[[package]]
name = "no_std_io2"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f038b95e66372ec5f4adabd615fc9a46a1fe42bcfe549863921c0e44667b605"
dependencies = [
"memchr",
]
[[package]] [[package]]
name = "nodrop" name = "nodrop"
version = "0.1.14" version = "0.1.14"
@ -3612,7 +3680,7 @@ version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56"
dependencies = [ dependencies = [
"proc-macro-crate 2.0.2", "proc-macro-crate 3.2.0",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.77", "syn 2.0.77",
@ -4286,14 +4354,22 @@ dependencies = [
[[package]] [[package]]
name = "proc-macro-crate" name = "proc-macro-crate"
version = "2.0.2" version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b00f26d3400549137f92511a46ac1cd8ce37cb5598a96d382381458b992a5d24" checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8"
dependencies = [ dependencies = [
"toml_datetime",
"toml_edit 0.20.2", "toml_edit 0.20.2",
] ]
[[package]]
name = "proc-macro-crate"
version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b"
dependencies = [
"toml_edit 0.22.22",
]
[[package]] [[package]]
name = "proc-macro-error" name = "proc-macro-error"
version = "1.0.4" version = "1.0.4"
@ -6377,9 +6453,9 @@ dependencies = [
[[package]] [[package]]
name = "toml_datetime" name = "toml_datetime"
version = "0.6.3" version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41"
dependencies = [ dependencies = [
"serde", "serde",
] ]
@ -6394,7 +6470,7 @@ dependencies = [
"serde", "serde",
"serde_spanned", "serde_spanned",
"toml_datetime", "toml_datetime",
"winnow", "winnow 0.5.40",
] ]
[[package]] [[package]]
@ -6407,7 +6483,18 @@ dependencies = [
"serde", "serde",
"serde_spanned", "serde_spanned",
"toml_datetime", "toml_datetime",
"winnow", "winnow 0.5.40",
]
[[package]]
name = "toml_edit"
version = "0.22.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5"
dependencies = [
"indexmap 2.5.0",
"toml_datetime",
"winnow 0.6.20",
] ]
[[package]] [[package]]
@ -7485,6 +7572,15 @@ dependencies = [
"memchr", "memchr",
] ]
[[package]]
name = "winnow"
version = "0.6.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b"
dependencies = [
"memchr",
]
[[package]] [[package]]
name = "winreg" name = "winreg"
version = "0.52.0" version = "0.52.0"

View File

@ -7,7 +7,14 @@ edition = "2021"
anyhow = "1.0.89" anyhow = "1.0.89"
libc = "0.2.159" libc = "0.2.159"
num_enum = { version = "0.7.3", features = ["complex-expressions"] } num_enum = { version = "0.7.3", features = ["complex-expressions"] }
thiserror = "1.0.64" deku = { version = "0.18.1", features = ["logging"] }
thiserror.workspace = true
serde.workspace = true
fsv-sys = { path = "../fsv-sys" } fsv-sys = { path = "../fsv-sys" }
utils = { path = "../utils" } utils = { path = "../utils" }
#[dev-dependencies]
log = "0.4.22"
env_logger = "0.11.5"

View File

@ -0,0 +1,223 @@
use deku::deku_derive;
use super::{ groups, size_read };
#[derive(Debug, PartialEq)]
#[deku_derive(DekuRead)]
/// # Data: FSV data structure
/// This structure is the core structure to read FSV raw data
/// It handles directly the raw data returned by the FSV library
/// A `Data` structure is composed of multiple `DataBlock` structures
pub struct Data {
#[deku(read_all)]
pub blocks: Vec<DataBlock>,
}
#[derive(Debug, PartialEq)]
#[deku_derive(DekuRead)]
/// # Data block structure
/// The `DataBlock` are the main structures inside a `Data` struct
pub struct DataBlock {
pub header: BlockHeader,
#[deku(ctx = "header.group_id.0")]
pub content: DataGroup,
}
#[derive(Debug, PartialEq)]
#[deku_derive(DekuRead)]
/// # Block header structure
/// The `BlockHeader` structure is the header of a `DataBlock`
/// It contains the group ID and the size of the `DataBlock` contained data (`inner` field)
pub struct BlockHeader {
pub group_id: GroupId,
#[deku(reader = "size_read(deku::reader)")]
pub data_size: u64, // This field is not really used, but we have to parse it to move the reader cursor
}
#[derive(Debug, PartialEq)]
#[deku_derive(DekuRead)]
/// # Group ID
/// Allow to identify the type of data contained in a `DataBlock`
/// It is use as matching ID in the `DataGroup` enum. All the
/// IDs are documented on the SSV documentation, pages 23-28
pub struct GroupId(
#[deku(endian="big", bytes= 2)]
pub u16,
);
/// # Data group enum
/// This enum is used to match a `DataBlock` content with the
/// correct data structure, able to parse the data contained in
#[derive(Debug, PartialEq)]
#[deku_derive(DekuRead)]
#[deku(ctx = "group_id: u16", id = "group_id")]
#[allow(non_camel_case_types)]
pub enum DataGroup {
#[deku(id = 60)]
LireConfig_Group60_ConfigHeader(groups::ssv_lire_config::group_60_header_config::ConfigHeader),
#[deku(id = 61)]
LireConfig_Group61_ReaderConfig(groups::ssv_lire_config::group_61_reader_config::ReaderConfig),
#[deku(id = 64)]
LireConfig_Group64_SVComponentsConfig(groups::ssv_lire_config::group_64_sv_config::SVComponentsConfig),
#[deku(id = 67)]
LireConfig_Group67_PCSCReaderConfig(groups::ssv_lire_config::group_67_pcsc_config::PCSCReaderConfig),
}
#[cfg(test)]
mod tests {
use deku::DekuContainerRead as _;
use super::*;
mod deku_testing {
use super::*;
#[derive(Debug, PartialEq)]
#[deku_derive(DekuRead)]
#[deku(endian = "big")]
pub struct DekuTest {
#[deku(bits = 4)]
pub a: u8,
#[deku(bits = 4)]
pub b: u8,
pub c: u16,
}
#[derive(Debug, PartialEq)]
#[deku_derive(DekuRead)]
#[deku(endian = "big")]
pub struct DekuTestWithSizeReader {
#[deku(bytes = 2)]
pub id: u16,
#[deku(reader = "size_read(deku::reader)")]
pub size: u64,
}
#[derive(Debug, PartialEq)]
#[deku_derive(DekuRead)]
pub struct DekuTestWithGroupId {
pub group_id: GroupId,
}
}
#[test]
fn test_deserialize_deku_test() {
let buffer: &[u8] = &[0b0110_1001, 0xBE, 0xEF];
let offset: usize = 0;
let ((rest, offset), val) = deku_testing::DekuTest::from_bytes((buffer, offset)).unwrap();
assert_eq!(val.a, 0b0110);
assert_eq!(val.b, 0b1001);
assert_eq!(val.c, 0xBEEF);
assert_eq!(offset, 0);
assert_eq!(rest, &[]);
}
#[test]
fn test_deserialize_deku_test_with_offset() {
let buffer: &[u8] = &[0b0000_1111, 0b0110_1001, 0xBE, 0xEF];
let offset: usize = 8;
let ((rest, offset), val) = deku_testing::DekuTest::from_bytes((buffer, offset)).unwrap();
assert_eq!(val.a, 0b0110);
assert_eq!(val.b, 0b1001);
assert_eq!(val.c, 0xBEEF);
assert_eq!(offset, 0);
assert_eq!(rest, &[]);
}
#[test]
fn test_serialize_deku_test_with_rest() {
let buffer: &[u8] = &[0b0110_1001, 0xBE, 0xEF, 0x1F, 0x2F];
let offset: usize = 0;
let ((rest, offset), val) = deku_testing::DekuTest::from_bytes((buffer, offset)).unwrap();
assert_eq!(val.a, 0b0110);
assert_eq!(val.b, 0b1001);
assert_eq!(val.c, 0xBEEF);
assert_eq!(offset, 0);
assert_eq!(rest, &[0x1F, 0x2F]);
}
#[test]
fn test_size_read() {
let buffer: &[u8] = &[
0, 60, // ID (60)
0b0100_0000, // Size type bit (0) + Size (64)
3, 4, 5, 6, 7, 8, 9, 10, 11, 12 // Extra data (10 bytes ; should be 64)
];
let ((rest, _offset), val) = deku_testing::DekuTestWithSizeReader::from_bytes((buffer, 0)).unwrap();
assert_eq!(val.id, 60, "EX1: ID");
assert_eq!(val.size, 64, "EX1: Size");
assert_eq!(rest.len(), 10, "EX1: Rest");
let buffer: &[u8] = &[
0, 60, // ID (60)
0b1000_0010, // Size type bit (1) + Size block length (2)
0b0000_0001, 0b0100_0000, // Size (320)
3, 4, 5, 6, 7, 8, 9, 10, 11, 12 // Extra data (10 bytes ; should be 320)
];
let ((rest, _offset), val) = deku_testing::DekuTestWithSizeReader::from_bytes((buffer, 0)).unwrap();
assert_eq!(val.id, 60, "EX2: ID");
assert_eq!(val.size, 320, "EX2: Size");
println!("{:?}", rest);
// assert_eq!(val.size, 320, "EX2: Size");
}
#[test]
fn test_endianness() {
#[derive(Debug, PartialEq)]
#[deku_derive(DekuRead)]
struct DekuTest {
#[deku(endian = "big")]
field_be: u16,
#[deku(endian = "little")]
field_le: u16,
field_default: u16,
}
let buffer: &[u8] = &[
0xAB, 0xCD,
0xAB, 0xCD,
0xAB, 0xCD,
];
let (_rest, result) = DekuTest::from_bytes((buffer, 0)).unwrap();
assert_eq!(result.field_be, 0xABCD, "0xAB,0xCD - Big Endian");
assert_eq!(result.field_le, 0xCDAB, "0xAB,0xCD - Little Endian");
assert_eq!(deku::ctx::Endian::default(), deku::ctx::Endian::Little, "Default Endian");
assert_eq!(result.field_default, 0xCDAB, "0xAB,0xCD - Default Endian");
let buffer: &[u8] = &[
0, 64,
0, 64,
0, 64,
];
let (_rest, result) = DekuTest::from_bytes((buffer, 0)).unwrap();
assert_eq!(result.field_be, 64, "0,64 - Big Endian");
assert_eq!(result.field_le, 16384, "0,64 - Little Endian");
assert_eq!(deku::ctx::Endian::default(), deku::ctx::Endian::Little);
assert_eq!(result.field_default, 16384, "0,64 - Default Endian");
}
#[test]
fn test_group_id() {
// env_logger::init(); // Uncomment and run with RUST_LOG=trace for deku debugging
let buffer: &[u8] = &[
0, 60, // ID (60)
];
let (_rest, val) = deku_testing::DekuTestWithGroupId::from_bytes((buffer, 0)).unwrap();
assert_eq!(val.group_id.0, 60, "EX1: ID");
let buffer: &[u8] = &[
7, 118, // ID (1910)
];
let (_rest, val) = deku_testing::DekuTestWithGroupId::from_bytes((buffer, 0)).unwrap();
assert_eq!(val.group_id.0, 1910, "EX2: ID");
}
}

View File

@ -0,0 +1,102 @@
use std::{fmt, str::FromStr};
use deku::{deku_derive, DekuError};
use super::{ size_read, map_bytes_to_lossy_string };
pub mod ssv_lire_config;
/// # Convert a DataField to a specific type
/// Using this as deku map function to fill a field value from
/// a DataField
fn map_from_data_field<T>(data_field: DataField) -> Result<T, DekuError>
where
T: FromStr,
T::Err: std::fmt::Display,
{
let text = String::from_utf8(data_field.data)
.map_err(|e| DekuError::Parse(e.to_string().into()))?;
T::from_str(&text)
.map_err(|e| DekuError::Parse(e.to_string().into()))
}
// ------------------- DATA FIELD TYPES -------------------
/// # Data field structure
/// This structure is the core structure to read data fields
/// It is usually used by other structures implementing the
/// `#[deku(map = "map_from_data_field")]` attribute
#[deku_derive(DekuRead)]
#[derive(Debug, PartialEq)]
struct DataField {
#[deku(temp, reader = "size_read(deku::reader)")]
pub data_size: u64,
#[deku(bytes_read = "data_size")]
pub data: Vec<u8>,
}
#[deku_derive(DekuRead)]
#[derive(Debug, Clone, PartialEq)]
/// # Numeric string
/// TODO: check if all the characters are numeric
pub struct NumericString(
#[deku(map = "map_from_data_field")]
String
);
#[deku_derive(DekuRead)]
#[derive(Debug, Clone, PartialEq)]
pub struct AlphaNumericString(
#[deku(map = "map_from_data_field")]
String
);
impl From<&str> for AlphaNumericString {
fn from(s: &str) -> Self {
AlphaNumericString(s.to_string())
}
}
#[deku_derive(DekuRead)]
#[derive(Debug, Clone, PartialEq)]
#[deku(endian = "big")]
/// # Software version
/// An almost standard software version structure in FSV
/// It is composed of a version and a revision, encoded on 2 bytes each
pub struct SoftwareVersion {
#[deku(temp, reader = "size_read(deku::reader)", assert_eq = "4")]
data_size: u64,
#[deku(bytes= 2, map = "|x: [u8; 2]| map_bytes_to_lossy_string(&x)")]
pub version: String,
#[deku(bytes= 2, map = "|x: [u8; 2]| map_bytes_to_lossy_string(&x)")]
pub revision: String,
}
impl fmt::Display for SoftwareVersion {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}.{}", self.version, self.revision)
}
}
#[cfg(test)]
mod test {
use deku::DekuContainerRead as _;
use super::*;
#[test]
fn test() {
let version_bytes: [u8; 2] = [48, 55];
let version = map_bytes_to_lossy_string(&version_bytes).unwrap();
assert_eq!(version, "07");
}
#[test]
fn test_software_version() {
// env_logger::init(); // Uncomment and run with RUST_LOG=trace for deku debugging
let data: [u8; 5] = [4, 48, 55, 50, 48];
let (_rest, software_version) = SoftwareVersion::from_bytes((&data, 0)).unwrap();
// assert_eq!(software_version.data_size, 4);
assert_eq!(software_version.version, "07");
assert_eq!(software_version.revision, "20");
}
}

View File

@ -0,0 +1,295 @@
//! # Structures de parsing des données de la fonction SSV_LireConfig
//! Le groupe `ReaderConfig61` décrit ci-dessous est renseigné en cas dutilisation dun
//! lecteur homologué sesam-vitale uniquement et non en cas
//! dutilisation de lecteur(s) PC/SC. dans le cas dun TL ou TLA
//! configuré en mode PC/SC, un groupe `ReaderConfig61` est restitué pour chaque
//! lecteur exposé par le gestionnaire de ressources PC/SC. les
//! informations sont alors dupliquées dans chacun des groupes `ReaderConfig61`.
//! les informations sur les lecteurs PC/SC sont disponibles
//! dans les groupes `PCSCReaderConfig67`.
use deku::deku_derive;
use super::{AlphaNumericString, NumericString, SoftwareVersion};
/// # En-tête de configuration
/// 1 occurence
pub mod group_60_header_config {
use super::*;
/// Groupe 60 - En-tête de configuration
#[deku_derive(DekuRead)]
#[derive(Debug, PartialEq)]
pub struct ConfigHeader {
pub ssv_version: SSVVersionNumber,
pub galss_version: GALSSVersionNumber,
pub pss_version: PSSVersionNumber,
}
// Fields
#[deku_derive(DekuRead)]
#[derive(Debug, PartialEq)]
pub struct SSVVersionNumber(pub SoftwareVersion);
#[deku_derive(DekuRead)]
#[derive(Debug, PartialEq)]
pub struct GALSSVersionNumber(pub SoftwareVersion);
#[deku_derive(DekuRead)]
#[derive(Debug, PartialEq)]
pub struct PSSVersionNumber(pub SoftwareVersion);
}
/// # Configuration du lecteur
/// 0 à 15 occurences
pub mod group_61_reader_config {
use super::*;
/// Groupe 61 - Configuration du lecteur
#[deku_derive(DekuRead)]
#[derive(Debug, PartialEq)]
pub struct ReaderConfig {
pub manufacturer_name: AlphaNumericString, // 15 CA
pub reader_type: AlphaNumericString, // 30 CA
pub serial_number: AlphaNumericString, // 20 CA
pub os: NumericString, // 2 CN
pub software_count: NumericString, // 2 CN
pub software_name: AlphaNumericString, // 30 CA
pub software_version: ReaderSoftwareVersion, // 4 CA
pub reader_datetime: ReaderSoftwareDate, // 12 CN
pub software_checksum: AlphaNumericString, // 4 CA
}
// Fields
#[deku_derive(DekuRead)]
#[derive(Debug, PartialEq)]
pub struct ReaderSoftwareVersion(pub SoftwareVersion);
#[deku_derive(DekuRead)]
#[derive(Debug, PartialEq)]
/// Format « AAAAMMJJhhmm »
/// TODO: Build a generic date-time structure
/// TODO: Implement a date parsing, like chrono crate
pub struct ReaderSoftwareDate(pub AlphaNumericString);
}
/// # Configuration SESAM-Vitale
/// N occurences
pub mod group_64_sv_config {
use super::*;
/// Groupe 64 - Configuration SESAM-Vitale
#[deku_derive(DekuRead)]
#[derive(Debug, PartialEq)]
pub struct SVComponentsConfig {
pub id: ComponentID,
pub description: ComponentDescription,
pub version: ComponentVersion,
}
// Fields
#[deku_derive(DekuRead)]
#[derive(Debug, PartialEq)]
pub struct ComponentID(pub NumericString);
#[deku_derive(DekuRead)]
#[derive(Debug, PartialEq)]
pub struct ComponentDescription(pub AlphaNumericString);
#[deku_derive(DekuRead)]
#[derive(Debug, PartialEq)]
pub struct ComponentVersion(pub AlphaNumericString);
}
/// # Configuration du lecteur PC/SC
/// N occurences
pub mod group_67_pcsc_config {
use super::*;
/// Groupe 67 - Configuration du lecteur PC/SC
#[deku_derive(DekuRead)]
#[derive(Debug, PartialEq)]
pub struct PCSCReaderConfig {
pub name: ReaderName,
pub card_type: CardType,
}
// Fields
#[deku_derive(DekuRead)]
#[derive(Debug, PartialEq)]
pub struct ReaderName(pub AlphaNumericString);
#[deku_derive(DekuRead)]
#[derive(Debug, PartialEq)]
pub struct CardType(pub NumericString);
}
#[cfg(test)]
mod tests {
use deku::DekuContainerRead as _;
use crate::fsv_parsing::blocks::{BlockHeader, Data, DataBlock, DataGroup};
mod data {
pub const BUFFER: &[u8] = &[
0, 60, // Block ID
15, // Block Size
4, // SSV Version
48, 55, 50, 48, // 0720
4, // GALSS Version
48, 48, 48, 48, // 0000
4, // PSS Version
48, 48, 48, 48, // 0000
0, 67, // Block ID
42, // Block Size
39, // PCSC Reader Name
71, 101, 109, 97, 108, 116, 111, 32, 80, 67,
32, 84, 119, 105, 110, 32, 82, 101, 97, 100,
101, 114, 32, 40, 54, 52, 53, 68, 57, 52,
67, 51, 41, 32, 48, 48, 32, 48, 48,
1, // Card type
50,
0, 64, // Block ID
44, // Block Size
2, // Component ID
49, 49,
35, // Component label
86, 69, 82, 83, 73, 79, 78, 32, 68, 69,
32, 76, 65, 32, 66, 73, 66, 76, 73, 79,
84, 72, 69, 81, 85, 69, 32, 68, 85, 32,
71, 65, 76, 83, 83,
4, // Component version
48, 48, 48, 48,
0, 64, // Block ID
69, // Block Size
3, // Component ID
49, 53, 49,
27, // Component label
73, 68, 69, 78, 84, 73, 70, 73, 65, 78,
84, 32, 85, 78, 73, 81, 85, 69, 32, 68,
85, 32, 80, 79, 83, 84, 69,
36, // Component version
50, 54, 57, 102, 99, 55, 101, 98, 45, 49,
100, 56, 53, 45, 52, 55, 57, 51, 45, 98,
55, 48, 101, 45, 51, 55, 49, 99, 51, 56,
102, 57, 49, 54, 51, 52,
0, 61, // Block ID
62, // Block Size
17, // Manufacturer Name
84, 69, 83, 84, 32, 77, 65, 78, 85, 70,
65, 67, 84, 85, 82, 69, 82,
4, // Reader Type
84, 69, 83, 84,
4, // Serial Number
84, 69, 83, 84,
2, // OS
79, 83,
2, // Software Count
48, 49,
4, // Software Name
84, 69, 83, 84,
4, // Software Version
48, 49, 53, 53,
12, // Reader Datetime
50, 48, 50, 52, // 2024
48, 54, 50, 53, // 06-25
49, 50, 52, 53, // 12:45
4, // Software Checksum
49, 50, 51, 52,
];
}
#[test]
fn test_lire_config_first_header() {
// env_logger::init(); // Uncomment and run with RUST_LOG=trace for deku debugging
let buffer = data::BUFFER;
let offset: usize = 0;
let ((_rest, _offset), block_header) = BlockHeader::from_bytes((buffer, offset)).unwrap();
assert_eq!(block_header.group_id.0, 60, "Header ID");
// assert_eq!(block_header.data_size, 15, "Header Size");
}
#[test]
fn test_lire_config_first_block() {
// env_logger::init(); // Uncomment and run with RUST_LOG=trace for deku debugging
let buffer = data::BUFFER;
let offset: usize = 0;
let ((_rest, _offset), block) = DataBlock::from_bytes((buffer, offset)).unwrap();
let header = block.header;
let content = match block.content {
DataGroup::LireConfig_Group60_ConfigHeader(content) => content,
_ => panic!("Unexpected data block type"),
};
assert_eq!(header.group_id.0, 60, "Header ID");
assert_eq!(header.data_size, 15, "Header Size");
assert_eq!(content.ssv_version.0.version, "07", "SSV Version");
assert_eq!(content.ssv_version.0.revision, "20", "SSV Revision");
assert_eq!(content.galss_version.0.to_string(), "00.00", "GALSS Version");
assert_eq!(content.pss_version.0.to_string(), "00.00", "PSS Version");
}
#[test]
fn test_lire_config_all() {
// env_logger::init(); // Uncomment and run with RUST_LOG=trace for deku debugging
let buffer = data::BUFFER;
let offset: usize = 0;
let ((_rest, _offset), data) = Data::from_bytes((buffer, offset)).unwrap();
let blocks = data.blocks;
assert_eq!(blocks.len(), 5, "Number of blocks");
for block in blocks {
match block.content {
DataGroup::LireConfig_Group60_ConfigHeader(content) => {
assert_eq!(block.header.group_id.0, 60, "Header ID");
assert_eq!(block.header.data_size, 15, "Header Size");
assert_eq!(content.ssv_version.0.version, "07", "SSV Version");
assert_eq!(content.ssv_version.0.revision, "20", "SSV Revision");
assert_eq!(content.galss_version.0.to_string(), "00.00", "GALSS Version");
assert_eq!(content.pss_version.0.to_string(), "00.00", "PSS Version");
},
DataGroup::LireConfig_Group61_ReaderConfig(content) => {
assert_eq!(block.header.group_id.0, 61, "Header ID");
assert_eq!(block.header.data_size, 62, "Header Size");
assert_eq!(content.manufacturer_name.0, "TEST MANUFACTURER", "Manufacturer Name");
assert_eq!(content.reader_type.0, "TEST", "Reader Type");
assert_eq!(content.serial_number.0, "TEST", "Serial Number");
assert_eq!(content.os.0, "OS", "OS");
assert_eq!(content.software_count.0, "01", "Software Count");
assert_eq!(content.software_name.0, "TEST", "Software Name");
assert_eq!(content.software_version.0.version, "01", "Software Version");
assert_eq!(content.software_version.0.revision, "55", "Software Revision");
assert_eq!(content.reader_datetime.0.0, "202406251245", "Reader Datetime");
assert_eq!(content.software_checksum.0, "1234", "Software Checksum");
},
DataGroup::LireConfig_Group64_SVComponentsConfig(content) => {
assert_eq!(block.header.group_id.0, 64, "Header ID");
match content.id.0.0.as_str() {
"11" => {
assert_eq!(block.header.data_size, 44, "Header Size");
assert_eq!(content.id.0.0, "11", "G64 - 11 : Component ID");
assert_eq!(content.description.0.0, "VERSION DE LA BIBLIOTHEQUE DU GALSS", "G64 - 11 : Component Description");
assert_eq!(content.version.0.0, "0000", "G64 - 11 : Component Version");
},
"151" => {
assert_eq!(block.header.data_size, 69, "Header Size");
assert_eq!(content.id.0.0, "151", "G64 - 151 : Component ID");
assert_eq!(content.description.0.0, "IDENTIFIANT UNIQUE DU POSTE", "G64 - 151 : Component Description");
assert_eq!(content.version.0.0, "269fc7eb-1d85-4793-b70e-371c38f91634", "G64 - 151 : Component Version");
},
_ => panic!("Unexpected Component ID"),
}
},
DataGroup::LireConfig_Group67_PCSCReaderConfig(content) => {
assert_eq!(block.header.group_id.0, 67, "Header ID");
assert_eq!(block.header.data_size, 42, "Header Size");
assert_eq!(content.name.0.0, "Gemalto PC Twin Reader (645D94C3) 00 00", "Reader Name");
assert_eq!(content.card_type.0.0, "2", "Card Type");
},
}
}
}
}

View File

@ -0,0 +1,45 @@
use deku::ctx::BitSize;
use deku::prelude::*;
use deku::reader::ReaderRet;
use deku::{reader::Reader, DekuError};
pub mod blocks;
pub mod groups;
pub mod prelude;
pub use blocks::Data;
/// # Read the size of a FSV block / field
/// Documentation: SSV Documentation, page 29
fn size_read<R: std::io::Read + std::io::Seek>(reader: &mut Reader<R>) -> Result<u64, DekuError> {
let size_bytes = u8::from_reader_with_ctx(reader, BitSize(8))?;
let size: u64 = if size_bytes & 0b1000_0000 == 0 {
// If the Most Significant Bit is 0, the size is encoded on 7 bits
size_bytes.into()
} else {
// Else, the 7 following bits indicate the number of bytes of the block containing the size
let size_block_len: usize = (size_bytes & 0b0111_1111).into();
if size_block_len > 4 {
return Err(DekuError::Parse(format!("Unexpected size block length: {}", size_block_len).into()));
};
// The block containing the size is encoded on 1 to 4 bytes
let buffer: &mut [u8; 4] = &mut [0; 4];
let write_offset = 4 - size_block_len;
match reader.read_bytes(size_block_len, &mut buffer[write_offset..])? {
ReaderRet::Bits(_bit_vec) => return Err(DekuError::Parse("Unexpected result reading size bytes: got bits".into())),
ReaderRet::Bytes => u32::from_be_bytes(*buffer).into(),
}
};
Ok(size)
}
/// # Map bytes to a lossy string
/// This function is used to map bytes to a string, ignoring invalid UTF-8 characters
/// Example: [0x41, 0x42] -> "AB"
/// Example: [48, 49, 50, 51] -> "0123"
fn map_bytes_to_lossy_string(data: &[u8]) -> Result<String, DekuError> {
// let data = data.to_vec();
let version: String = String::from_utf8_lossy(data).to_string();
Ok(version)
}

View File

@ -0,0 +1,6 @@
/*! Crate prelude
[What is a prelude?](std::prelude)
*/
pub use deku::DekuContainerRead as _;
pub use super::Data;

View File

@ -1 +1,2 @@
mod ssv; pub mod fsv_parsing;
pub mod ssv;

View File

@ -13,7 +13,9 @@ use fsv_sys::{
}; };
mod errors_ssv; mod errors_ssv;
use errors_ssv::SSVErrorCodes; use errors_ssv::SSVErrorCodes;
use crate::fsv_parsing::prelude::*;
#[derive(Error, Debug)] #[derive(Error, Debug)]
pub enum Error { pub enum Error {
@ -24,7 +26,7 @@ pub enum Error {
} }
/// Enum to hold the different versions of the SSV library /// Enum to hold the different versions of the SSV library
enum SsvLibraryVersion { pub enum SsvLibraryVersion {
V1_40_13(SSVLibrary<V1_40_13>), V1_40_13(SSVLibrary<V1_40_13>),
V1_40_14(SSVLibrary<V1_40_14>), V1_40_14(SSVLibrary<V1_40_14>),
} }
@ -35,7 +37,7 @@ pub struct SSV {
} }
impl SSV { impl SSV {
fn new(version: SupportedFsvVersion) -> Result<Self, Error> { pub fn new(version: SupportedFsvVersion) -> Result<Self, Error> {
let library = match version { let library = match version {
SupportedFsvVersion::V1_40_13 => { SupportedFsvVersion::V1_40_13 => {
let lib_path = get_library_path(&version); let lib_path = get_library_path(&version);
@ -119,31 +121,32 @@ impl SSV {
/// # Get the configuration of the SSV library /// # Get the configuration of the SSV library
/// Implement: SSV_LireConfig /// Implement: SSV_LireConfig
pub fn get_config(&self) -> Result<(), Error> { pub fn get_config(&self) -> Result<Data, Error> {
let mut buffer_ptr: *mut libc::c_void = ptr::null_mut(); let mut out_buffer_ptr: *mut libc::c_void = ptr::null_mut();
let mut size: libc::size_t = 0; let mut out_buffer_size: libc::size_t = 0;
let result = match &self.library { let result = match &self.library {
SsvLibraryVersion::V1_40_13(library) => { SsvLibraryVersion::V1_40_13(library) => {
unsafe { library.ssv_lire_config(&mut buffer_ptr, &mut size) }? unsafe { library.ssv_lire_config(&mut out_buffer_ptr, &mut out_buffer_size) }?
}, },
SsvLibraryVersion::V1_40_14(library) => { SsvLibraryVersion::V1_40_14(library) => {
unsafe { library.ssv_lire_config(&mut buffer_ptr, &mut size) }? unsafe { library.ssv_lire_config(&mut out_buffer_ptr, &mut out_buffer_size) }?
}, },
}; };
if result != 0 { if result != 0 {
// Free memory // Free memory
unsafe { libc::free(buffer_ptr) }; unsafe { libc::free(out_buffer_ptr) };
let error = SSVErrorCodes::from(result); let error = SSVErrorCodes::from(result);
return Err(Error::SSVError(error)); return Err(Error::SSVError(error));
} }
// Print 10 bytes of the buffer // Parse the buffer into a Data struct
let buffer = unsafe { std::slice::from_raw_parts(buffer_ptr as *const u8, 10) }; let buffer = unsafe { std::slice::from_raw_parts(out_buffer_ptr as *const u8, out_buffer_size) };
println!("{:?}", buffer); let (_rest, config_blocks) = Data::from_bytes((buffer, 0)).unwrap();
// Free memory // Free memory
unsafe { libc::free(buffer_ptr) }; unsafe { libc::free(out_buffer_ptr) };
Ok(()) Ok(config_blocks)
} }
} }
@ -152,7 +155,9 @@ mod tests {
use std::env; use std::env;
use utils::config::load_config; use utils::config::load_config;
use anyhow::Result; use anyhow::{bail, Result};
use crate::fsv_parsing::blocks::DataGroup;
use super::*; use super::*;
@ -160,7 +165,7 @@ mod tests {
use super::*; use super::*;
pub fn init() -> Result<SSV> { pub fn init() -> Result<SSV> {
load_config().unwrap(); load_config(None)?;
let sesam_ini_path = env::var("SESAM_INI_PATH").expect("SESAM_INI_PATH must be set"); let sesam_ini_path = env::var("SESAM_INI_PATH").expect("SESAM_INI_PATH must be set");
let lib = SSV::new(SupportedFsvVersion::V1_40_13)?; let lib = SSV::new(SupportedFsvVersion::V1_40_13)?;
lib.init_library(&sesam_ini_path)?; lib.init_library(&sesam_ini_path)?;
@ -169,12 +174,16 @@ mod tests {
} }
#[test] #[test]
#[ignore="Not working with other tests using SSV library in parallel - Need to fix"]
// We should implement a way to initialize the library only once
// Or implement them sequentially with [serial_test crate](https://docs.rs/serial_test/latest/serial_test)
fn test_init_library() -> Result<()> { fn test_init_library() -> Result<()> {
setup::init()?; setup::init()?;
Ok(()) Ok(())
} }
#[test] #[test]
#[ignore="WARNING: Read the card with PIN 1234 - Risk of blocking the card"]
fn test_read_professional_card_good_pin() -> Result<()> { fn test_read_professional_card_good_pin() -> Result<()> {
let lib = setup::init()?; let lib = setup::init()?;
let pin_code = "1234"; let pin_code = "1234";
@ -182,8 +191,8 @@ mod tests {
Ok(()) Ok(())
} }
#[ignore]
#[test] #[test]
#[ignore="WARNING: Read the card with PIN 0000 - Risk of blocking the card"]
fn test_read_professional_card_bad_pin() -> Result<()> { fn test_read_professional_card_bad_pin() -> Result<()> {
let lib = setup::init()?; let lib = setup::init()?;
let pin_code = "0000"; let pin_code = "0000";
@ -194,15 +203,25 @@ mod tests {
Error::SSVError(err) => { Error::SSVError(err) => {
assert_eq!(err as SSVErrorCodes, SSVErrorCodes::CPSPinWrong); assert_eq!(err as SSVErrorCodes, SSVErrorCodes::CPSPinWrong);
}, },
_ => panic!("Error type is not SSVError"), _ => bail!("Error type is not SSVError"),
} }
Ok(()) Ok(())
} }
#[test] #[test]
// #[ignore="Needs a valid FSV installation"]
fn test_get_config() -> Result<()> { fn test_get_config() -> Result<()> {
let lib = setup::init()?; let lib = setup::init()?;
lib.get_config()?; let data = lib.get_config()?;
// I don't know what to assert here ...
let header_group = data.blocks.first().unwrap();
assert_eq!(header_group.header.group_id.0, 60);
let header_content = match &header_group.content {
DataGroup::LireConfig_Group60_ConfigHeader(content) => { content },
_ => bail!("Wrong group type"),
};
assert_eq!(header_content.ssv_version.0.version, "07");
assert_eq!(header_content.ssv_version.0.revision, "20");
Ok(()) Ok(())
} }
} }