feat: Take the size into account for Block reading, to handle missing fields

This commit is contained in:
Florian Briand 2024-10-07 12:11:08 +02:00
parent a53360d114
commit f186fb20ce
Signed by: florian_briand
GPG Key ID: CC981B9E6B98E70B
2 changed files with 106 additions and 10 deletions

View File

@ -1,6 +1,6 @@
use deku::deku_derive; use deku::deku_derive;
use super::{ groups, size_read }; use super::{ groups, size_read, read_with_size };
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
#[deku_derive(DekuRead)] #[deku_derive(DekuRead)]
@ -19,7 +19,7 @@ pub struct Data {
/// The `DataBlock` are the main structures inside a `Data` struct /// The `DataBlock` are the main structures inside a `Data` struct
pub struct DataBlock { pub struct DataBlock {
pub header: BlockHeader, pub header: BlockHeader,
#[deku(ctx = "header.group_id.0")] #[deku(ctx = "header.group_id.0, header.data_size")]
pub content: DataGroup, pub content: DataGroup,
} }
@ -32,7 +32,7 @@ pub struct BlockHeader {
pub group_id: GroupId, pub group_id: GroupId,
#[deku(reader = "size_read(deku::reader)")] #[deku(reader = "size_read(deku::reader)")]
pub data_size: u64, // This field is not really used, but we have to parse it to move the reader cursor pub data_size: u64,
} }
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
@ -51,19 +51,29 @@ pub struct GroupId(
/// correct data structure, able to parse the data contained in /// correct data structure, able to parse the data contained in
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
#[deku_derive(DekuRead)] #[deku_derive(DekuRead)]
#[deku(ctx = "group_id: u16", id = "group_id")] #[deku(ctx = "group_id: u16, data_size: u64", id = "group_id")]
#[allow(non_camel_case_types)] #[allow(non_camel_case_types)]
pub enum DataGroup { pub enum DataGroup {
#[deku(id = 1)] #[deku(id = 1)]
LireCartePS_Group1_Holder(groups::ssv_lire_carte_ps::group_1_holder::Holder), LireCartePS_Group1_Holder(
#[deku(reader = "read_with_size(deku::reader, data_size as usize)")]
groups::ssv_lire_carte_ps::group_1_holder::Holder),
#[deku(id = 60)] #[deku(id = 60)]
LireConfig_Group60_ConfigHeader(groups::ssv_lire_config::group_60_header_config::ConfigHeader), LireConfig_Group60_ConfigHeader(
#[deku(reader = "read_with_size(deku::reader, data_size as usize)")]
groups::ssv_lire_config::group_60_header_config::ConfigHeader),
#[deku(id = 61)] #[deku(id = 61)]
LireConfig_Group61_ReaderConfig(groups::ssv_lire_config::group_61_reader_config::ReaderConfig), LireConfig_Group61_ReaderConfig(
#[deku(reader = "read_with_size(deku::reader, data_size as usize)")]
groups::ssv_lire_config::group_61_reader_config::ReaderConfig),
#[deku(id = 64)] #[deku(id = 64)]
LireConfig_Group64_SVComponentsConfig(groups::ssv_lire_config::group_64_sv_config::SVComponentsConfig), LireConfig_Group64_SVComponentsConfig(
#[deku(reader = "read_with_size(deku::reader, data_size as usize)")]
groups::ssv_lire_config::group_64_sv_config::SVComponentsConfig),
#[deku(id = 67)] #[deku(id = 67)]
LireConfig_Group67_PCSCReaderConfig(groups::ssv_lire_config::group_67_pcsc_config::PCSCReaderConfig), LireConfig_Group67_PCSCReaderConfig(
#[deku(reader = "read_with_size(deku::reader, data_size as usize)")]
groups::ssv_lire_config::group_67_pcsc_config::PCSCReaderConfig),
} }
#[cfg(test)] #[cfg(test)]
@ -221,5 +231,4 @@ mod tests {
let (_rest, val) = deku_testing::DekuTestWithGroupId::from_bytes((buffer, 0)).unwrap(); let (_rest, val) = deku_testing::DekuTestWithGroupId::from_bytes((buffer, 0)).unwrap();
assert_eq!(val.group_id.0, 1910, "EX2: ID"); assert_eq!(val.group_id.0, 1910, "EX2: ID");
} }
} }

View File

@ -33,6 +33,26 @@ fn size_read<R: std::io::Read + std::io::Seek>(reader: &mut Reader<R>) -> Result
Ok(size) Ok(size)
} }
/// Deku Reader taking an expected size into account
/// This function limit the reading to the size given in input
fn read_with_size<T, R: std::io::Read + std::io::Seek>(
reader: &mut Reader<R>,
size: usize
) -> Result<T, DekuError>
where T: for<'a> DekuContainerRead<'a>
{
let max_size = core::mem::size_of::<T>();
let mut buf = vec![0; max_size];
let buf: &mut [u8] = &mut buf;
let ret = reader.read_bytes(size, buf)?;
let (_rest, block) = match ret {
ReaderRet::Bytes => {
T::from_bytes((buf, 0))?
},
_ => return Err(DekuError::Parse("Unexpected result reading size bytes: got bits".into())),
};
Ok(block)
}
/// # Map bytes to a lossy string /// # Map bytes to a lossy string
/// This function is used to map bytes to a string, ignoring invalid UTF-8 characters /// This function is used to map bytes to a string, ignoring invalid UTF-8 characters
@ -43,3 +63,70 @@ fn map_bytes_to_lossy_string(data: &[u8]) -> Result<String, DekuError> {
let version: String = String::from_utf8_lossy(data).to_string(); let version: String = String::from_utf8_lossy(data).to_string();
Ok(version) Ok(version)
} }
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_read_with_size_reader() {
#[derive(Debug, PartialEq)]
#[deku_derive(DekuRead)]
struct Data {
#[deku(read_all)]
pub blocks: Vec<Block>,
}
#[derive(Debug, PartialEq)]
#[deku_derive(DekuRead)]
struct Block {
pub size: u8,
pub id: u8,
#[deku(ctx = "*id,*size-1")] // size-1 to remove the ID size
pub data: BlockType,
}
#[derive(Debug, PartialEq)]
#[deku_derive(DekuRead)]
#[deku(ctx = "id: u8, size: u8", id = "id")]
enum BlockType {
#[deku(id = 1)]
Block1(
#[deku(reader = "read_with_size(deku::reader, size as usize)")]
Block1
),
#[deku(id = 2)]
Block2(
#[deku(reader = "read_with_size(deku::reader, size as usize)")]
Block2
),
}
#[derive(Debug, PartialEq)]
#[deku_derive(DekuRead)]
struct Block1 {
pub field1_size: u8,
pub field1: u16,
pub field2_size: u8,
pub field2: u64,
}
#[derive(Debug, PartialEq)]
#[deku_derive(DekuRead)]
struct Block2;
let buffer = &[
// 1st Block, type 1
4, // Size: 4
1, // ID: 1
2, 0x12, 0x34, // Field 1, size 2
// No Field 2
// 2nd Block, type 1
6, // Size: Y
1, // ID: 2
1, 0x56, // Field 1, size 1 (casted into u16)
2, 0x78, 0x9A // Field 2, size 2
];
let (_rest, val) = Data::from_bytes((buffer, 0)).unwrap();
assert_eq!(val.blocks.len(), 2);
assert_eq!(val.blocks[0].size, 4);
assert_eq!(val.blocks[1].size, 6);
}
}