From f186fb20ce286cd2a1b4c01e948060fcfaadc65a Mon Sep 17 00:00:00 2001 From: Florian Briand Date: Mon, 7 Oct 2024 12:11:08 +0200 Subject: [PATCH] feat: Take the size into account for Block reading, to handle missing fields --- crates/fsv/src/fsv_parsing/blocks.rs | 29 ++++++---- crates/fsv/src/fsv_parsing/mod.rs | 87 ++++++++++++++++++++++++++++ 2 files changed, 106 insertions(+), 10 deletions(-) diff --git a/crates/fsv/src/fsv_parsing/blocks.rs b/crates/fsv/src/fsv_parsing/blocks.rs index 46af3e8..6dd2372 100644 --- a/crates/fsv/src/fsv_parsing/blocks.rs +++ b/crates/fsv/src/fsv_parsing/blocks.rs @@ -1,6 +1,6 @@ use deku::deku_derive; -use super::{ groups, size_read }; +use super::{ groups, size_read, read_with_size }; #[derive(Debug, PartialEq)] #[deku_derive(DekuRead)] @@ -19,7 +19,7 @@ pub struct Data { /// The `DataBlock` are the main structures inside a `Data` struct pub struct DataBlock { pub header: BlockHeader, - #[deku(ctx = "header.group_id.0")] + #[deku(ctx = "header.group_id.0, header.data_size")] pub content: DataGroup, } @@ -32,7 +32,7 @@ pub struct BlockHeader { pub group_id: GroupId, #[deku(reader = "size_read(deku::reader)")] - pub data_size: u64, // This field is not really used, but we have to parse it to move the reader cursor + pub data_size: u64, } #[derive(Debug, PartialEq)] @@ -51,19 +51,29 @@ pub struct GroupId( /// correct data structure, able to parse the data contained in #[derive(Debug, PartialEq)] #[deku_derive(DekuRead)] -#[deku(ctx = "group_id: u16", id = "group_id")] +#[deku(ctx = "group_id: u16, data_size: u64", id = "group_id")] #[allow(non_camel_case_types)] pub enum DataGroup { #[deku(id = 1)] - LireCartePS_Group1_Holder(groups::ssv_lire_carte_ps::group_1_holder::Holder), + LireCartePS_Group1_Holder( + #[deku(reader = "read_with_size(deku::reader, data_size as usize)")] + groups::ssv_lire_carte_ps::group_1_holder::Holder), #[deku(id = 60)] - LireConfig_Group60_ConfigHeader(groups::ssv_lire_config::group_60_header_config::ConfigHeader), + LireConfig_Group60_ConfigHeader( + #[deku(reader = "read_with_size(deku::reader, data_size as usize)")] + groups::ssv_lire_config::group_60_header_config::ConfigHeader), #[deku(id = 61)] - LireConfig_Group61_ReaderConfig(groups::ssv_lire_config::group_61_reader_config::ReaderConfig), + LireConfig_Group61_ReaderConfig( + #[deku(reader = "read_with_size(deku::reader, data_size as usize)")] + groups::ssv_lire_config::group_61_reader_config::ReaderConfig), #[deku(id = 64)] - LireConfig_Group64_SVComponentsConfig(groups::ssv_lire_config::group_64_sv_config::SVComponentsConfig), + LireConfig_Group64_SVComponentsConfig( + #[deku(reader = "read_with_size(deku::reader, data_size as usize)")] + groups::ssv_lire_config::group_64_sv_config::SVComponentsConfig), #[deku(id = 67)] - LireConfig_Group67_PCSCReaderConfig(groups::ssv_lire_config::group_67_pcsc_config::PCSCReaderConfig), + LireConfig_Group67_PCSCReaderConfig( + #[deku(reader = "read_with_size(deku::reader, data_size as usize)")] + groups::ssv_lire_config::group_67_pcsc_config::PCSCReaderConfig), } #[cfg(test)] @@ -221,5 +231,4 @@ mod tests { let (_rest, val) = deku_testing::DekuTestWithGroupId::from_bytes((buffer, 0)).unwrap(); assert_eq!(val.group_id.0, 1910, "EX2: ID"); } - } \ No newline at end of file diff --git a/crates/fsv/src/fsv_parsing/mod.rs b/crates/fsv/src/fsv_parsing/mod.rs index 26d749f..6ad7b91 100644 --- a/crates/fsv/src/fsv_parsing/mod.rs +++ b/crates/fsv/src/fsv_parsing/mod.rs @@ -33,6 +33,26 @@ fn size_read(reader: &mut Reader) -> Result Ok(size) } +/// Deku Reader taking an expected size into account +/// This function limit the reading to the size given in input +fn read_with_size( + reader: &mut Reader, + size: usize +) -> Result + where T: for<'a> DekuContainerRead<'a> +{ + let max_size = core::mem::size_of::(); + let mut buf = vec![0; max_size]; + let buf: &mut [u8] = &mut buf; + let ret = reader.read_bytes(size, buf)?; + let (_rest, block) = match ret { + ReaderRet::Bytes => { + T::from_bytes((buf, 0))? + }, + _ => return Err(DekuError::Parse("Unexpected result reading size bytes: got bits".into())), + }; + Ok(block) +} /// # Map bytes to a lossy string /// This function is used to map bytes to a string, ignoring invalid UTF-8 characters @@ -43,3 +63,70 @@ fn map_bytes_to_lossy_string(data: &[u8]) -> Result { let version: String = String::from_utf8_lossy(data).to_string(); Ok(version) } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_read_with_size_reader() { + #[derive(Debug, PartialEq)] + #[deku_derive(DekuRead)] + struct Data { + #[deku(read_all)] + pub blocks: Vec, + } + #[derive(Debug, PartialEq)] + #[deku_derive(DekuRead)] + struct Block { + pub size: u8, + pub id: u8, + #[deku(ctx = "*id,*size-1")] // size-1 to remove the ID size + pub data: BlockType, + } + #[derive(Debug, PartialEq)] + #[deku_derive(DekuRead)] + #[deku(ctx = "id: u8, size: u8", id = "id")] + enum BlockType { + #[deku(id = 1)] + Block1( + #[deku(reader = "read_with_size(deku::reader, size as usize)")] + Block1 + ), + #[deku(id = 2)] + Block2( + #[deku(reader = "read_with_size(deku::reader, size as usize)")] + Block2 + ), + } + #[derive(Debug, PartialEq)] + #[deku_derive(DekuRead)] + struct Block1 { + pub field1_size: u8, + pub field1: u16, + pub field2_size: u8, + pub field2: u64, + } + #[derive(Debug, PartialEq)] + #[deku_derive(DekuRead)] + struct Block2; + + let buffer = &[ + // 1st Block, type 1 + 4, // Size: 4 + 1, // ID: 1 + 2, 0x12, 0x34, // Field 1, size 2 + // No Field 2 + // 2nd Block, type 1 + 6, // Size: Y + 1, // ID: 2 + 1, 0x56, // Field 1, size 1 (casted into u16) + 2, 0x78, 0x9A // Field 2, size 2 + ]; + + let (_rest, val) = Data::from_bytes((buffer, 0)).unwrap(); + assert_eq!(val.blocks.len(), 2); + assert_eq!(val.blocks[0].size, 4); + assert_eq!(val.blocks[1].size, 6); + } +} \ No newline at end of file