@ -1,210 +1,79 @@
use core ::panic ;
use std ::io ::Cursor ;
use bitvec ::index ::BitIdx ;
use std ::{ error ::Error , vec ::Vec } ;
use binrw ::{
binread ,
helpers ::{ read_u24 , write_u24 } ,
BinRead , BinReaderExt , BinResult , BinWriterExt , Endian ,
use deku ::{
bitvec ::{ BitStore , Msb0 } ,
ctx ::ByteSize ,
deku_derive ,
reader ::{ Reader , ReaderRet } ,
DekuError , DekuReader ,
} ;
const U8_MAX : u32 = u8 ::MAX as u32 ;
const U16_MAX : u32 = u16 ::MAX as u32 ;
const U24_MAX : u32 = 16_777_215 ;
#[ deku_derive(DekuRead) ]
#[ derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd) ]
#[ deku(endian = " big " ) ]
pub struct GroupId ( u16 ) ;
#[ binrw::parser(reader) ]
fn parse_data_size ( ) -> Bin Result< u32 > {
let first_byte : u8 = reader . read_be ( ) ? ;
let first_bit : bool = ( first_byte & 0b1000_0000 ) = = 0 ;
Ok ( match first_bit {
// first bit is 0 -> size is encoded in the first byte
true = > first_byte as u32 ,
// first bit is 1 -> size is encoded on N bytes
// N being encoded by the first byte
false = > match first_byte {
0 = > 0 ,
1 = > reader . read_be ::< u8 > ( ) ? as u32 ,
2 = > reader . read_be ::< u16 > ( ) ? as u32 ,
3 = > read_u24 ( reader , Endian ::Big , ( ) ) ? ,
4 = > reader . read_be ::< u32 > ( ) ? ,
_ = > panic! ( " Length should not be more than 4 bytes " ) ,
} ,
} )
trait MapToDekuParseError < T > {
fn map_to_deku_parse_error ( self ) -> Result< T , DekuError > ;
}
#[ binrw::writer(writer) ]
fn write_data_size ( memory_size : & u32 ) -> Bin Result< ( ) > {
match memory_size {
..= U8_MAX = > writer . write_be ( & ( * memory_size as u8 ) ) ,
// Since size is not encodable on a single byte
// We write the length encoding the size first, marking it with a flipped first bit
// Then write the size on the following bytes
..= U16_MAX = > {
let size_encoding_length = 2 u8 | 0b1000_0000 ;
writer . write_be ( & size_encoding_length ) ? ;
writer . write_be ( & ( * memory_size as u16 ) )
}
..= U24_MAX = > {
let size_encoding_length = 3 u8 | 0b1000_0000 ;
writer . write_be ( & size_encoding_length ) ? ;
write_u24 ( memory_size , writer , Endian ::Big , ( ) )
}
_ = > {
let size_encoding_length = 4 u8 | 0b1000_0000 ;
writer . write_be ( & size_encoding_length ) ? ;
writer . write_be ( memory_size )
}
impl < T , E : Error > MapToDekuParseError < T > for Result < T , E > {
fn map_to_deku_parse_error ( self ) -> Result< T , DekuError > {
self . map_err ( | e | DekuError ::Parse ( e . to_string ( ) . into ( ) ) )
}
}
// To parse the data
// allocate the multiple buffers
// chain them to make a single buffer
// use the parse_data_size function to get a size
// use take method to limit number of bytes read
// use binread implementaiton on each struct/enum de structure it
// do this recursively until there is no more data
#[ deku_derive(DekuRead) ]
#[ derive(Debug, PartialEq) ]
pub struct DekuDataField {
#[ deku(reader = " read_size(deku::reader) " ) ]
data_size : ByteSize ,
// Memory has three embricked concepts:
// Memory Zone(s) -Contains-> DataBlock(s) -Contains-> DataField(s)
// DataBlocks (and DataFields) can be cut off by the end of a memory zone
// the data continues on the following memory zone
//
// will probably not be used
#[ binread ]
pub struct DataBlock { //<T: From<Vec<u8>>> {
data_struct_id : u16 ,
#[ br(temp, parse_with = parse_data_size) ]
memory_size : u32 ,
// spec indicates the DataBlock can be very large (up to 4GB)
// in this case, we can use memmap2 to use the disk to store the data
//pub data: Vec<DataField<T>>,
#[ deku(bytes_read = " data_size.0 " ) ]
pub data : Vec < u8 > ,
}
#[ binr ead]
pub struct DataField < T >
where
for < ' a > T : BinRead < Args < ' a > = ( ) > ,
{
#[ br(parse_with = parse_data_size) ]
memory_size : u32 ,
#[ deku_derive(DekuR ead) ]
#[ derive(Debug, PartialEq) ]
pub struct BlockHeader {
pub group_id : GroupId ,
// using data -> not using the parser fw well, I think we can directly parse to the
// corresponding enum
//
// spec indicates the DataBlock can be very large (up to 4GB)
// in this case, we can use memmap2 to use the disk to store the data
#[ br(count = memory_size) ]
#[ br(try_map = |data: Vec<u8>| T::read_be(&mut Cursor::new(data))) ]
pub value : T ,
#[ deku(reader = " read_size(deku::reader) " ) ]
pub data_size : ByteSize ,
}
//// Memory allocation functions
//trait DataMaxSize {
// fn max_size(&self) -> usize;
//}
//
//pub struct Real;
//pub struct Test;
//pub struct Demo;
//
//// Trait for categories
//pub trait Category: 'static {
// const NAME: &'static str;
//}
//
//impl Category for Real {
// const NAME: &'static str = "Real";
//}
//impl Category for Test {
// const NAME: &'static str = "Test";
//}
//impl Category for Demo {
// const NAME: &'static str = "Demo";
//}
//
//// Enum for runtime category representation
//pub enum CategoryType {
// Real(Real),
// Test(Test),
// Demo(Demo),
//}
//
//// Card type with generic category
//#[derive(Debug)]
//pub enum CartePS<C: CategoryType> {
// CPS {
// reader_port: u32,
// _category: std::marker::PhantomData<C >,
// },
// CPF {
// some_cpf_data: String,
// _category: std::marker::PhantomData<C>,
// },
// CPE {
// some_cpe_data: bool,
// _category: std::marker::PhantomData<C>,
// },
//}
//
//// Function that only accepts Real CPS cards
//fn process_real_cps_card(card: CartePS<>) {
// if let CartePS::CPS { reader_port, .. } = card {
// println!(
// "Processing a real CPS card with reader port: {}",
// reader_port
// );
// }
//}
//fn main() {
// let cps = CartePS::<Real>::CPS {
// reader_port: 1,
// _category: std::marker::PhantomData,
// };
// process_real_cps_card(cps);
//}
//// need to see how to interface enums with binrw
//enum IdentificationStructure {
// NumeroAdeliCabinet,
// NumeroFINESS,
// NumeroSIREN,
// NumeroSIRET,
// NumeroRPPSCabinet,
//}
//
//pub enum TypeDIdentificationNationale {
// NumeroAdeli,
// NumeroAdeliCabinetNumeroEmploye,
// NumeroDRASS,
// NumeroFINESSNumeroEmploye,
// NumeroSIRENNumeroEmploye,
// NumeroSIRETNumeroEmploye,
// NumeroRPPSCabinetNumeroEmploye,
// NumeroRPPS,
// /// N° Etudiant Médecin type ADELI sur 9 caractères (information transmise par l’ ANS)
// NumeroEtudiantMedecin,
//}
//
////#[derive(BinRead)]
////#[br(repr = [char;2], map = |[u8;2]| )]
//pub(crate) enum TypeCartePS {
// /// Carte de Professionnel de Santé (CPS)
// // CarteDeProfessionnelSante = ('0', '0'),
// /// Carte de Professionnel de Santé en Formation (CPF)
// // CarteDeProfessionnelSanteEnFormation = ('0', '1'),
// /// Carte de Personnel d'Établissement de Santé (CDE/CPE)
// CarteDePersonnelEtablissementSante,
// /// Carte de Personnel Autorisé (CDA/CPA)
// CarteDePersonnelAutorise,
// /// Carte de Personne Morale
// CarteDePersonneMorale,
//}
//
//impl DataMaxSize for TypeCartePS {
// fn max_size(&self) -> usize {
// 2
// }
//}
#[ deku_derive(DekuRead) ]
#[ derive(Debug, PartialEq) ]
pub struct DataBlock {
pub header : BlockHeader ,
#[ deku(bytes_read = " header.data_size.0 " ) ]
pub data : Vec < DekuDataField > ,
}
fn read_size < R : std ::io ::Read > ( reader : & mut Reader < R > ) -> Result < ByteSize , DekuError > {
let first_byte : u8 = u8 ::from_reader_with_ctx ( reader , ( ) ) ? ;
let is_length_expanded = first_byte . get_bit ::< Msb0 > ( BitIdx ::new ( 0 ) . map_to_deku_parse_error ( ) ? ) ;
match is_length_expanded {
true = > {
let size_of_data_size : ByteSize = ByteSize ( ( first_byte & 0b0111_1111 ) as usize ) ;
if size_of_data_size . 0 > 4 {
return Err ( DekuError ::Parse ( " Size of the length encoding is > 4, this is not normal. Probable parsing error " . to_string ( ) . into ( ) ) ) ;
} ;
// maximum size of the buffer is 4, we use the offset to read values less than 4 bytes
let buffer : & mut [ u8 ; 4 ] = & mut [ 0 ; 4 ] ;
let write_offset = 4 - size_of_data_size . 0 ;
match reader . read_bytes ( size_of_data_size . 0 , & mut buffer [ write_offset .. ] ) ? {
ReaderRet ::Bits ( _bit_vec ) = > Err ( DekuError ::Parse ( " Got bits when trying to read bytes -> reader is unaligned, this is not normal. " . to_string ( ) . into ( ) ) ) ,
ReaderRet ::Bytes = > Ok ( ByteSize ( u32 ::from_be_bytes ( * buffer ) as usize ) ) ,
}
}
false = > Ok ( ByteSize ( first_byte as usize ) ) ,
}
}