I found I do not understand how part of my code works. So, I have a macro that generates dynamic struct and I have a trait implemented for each type I want to use as field type:
pub trait BinaryConverter {
fn write_into(&mut self, buffer: &mut Vec<u8>) -> Result<(), Error>;
fn read_from<R: BufRead>(reader: R) -> Result<Self, Error> where Self: Sized;
}
And I do not understand how Rust compiler knows on which type should be called read_from
:
let initial = Self {
$(
$field_name: if dynamic_fields.contains(&stringify!($field_name)) {
// ...
<$field_type>::default()
} else {
// how Rust knows on which type this should be called ?
$crate::traits::binary_converter::BinaryConverter::read_from(
&mut reader
).unwrap()
}
),*
};
since I do not pass &self
, just reader, it's not clear.
This is my macro:
#[macro_export]
macro_rules! packet {
(
$(@option[login_opcode=$login_opcode_value:expr])?
$(@option[world_opcode=$world_opcode_value:expr])?
$(@option[compressed:$compressed_value:expr])?
$(@option[dynamic_fields:$($dynamic_field:expr),*])?
$(#[$outer:meta])*
$vis:vis struct $PacketStruct:ident {
$($field_vis:vis $field_name:ident: $field_type:ty),*$(,)?
}
$($PacketStructImpl: item)*
) => {
$(#[$outer])*
#[derive(Clone, Debug, Default)]
$vis struct $PacketStruct {
$($field_vis $field_name: $field_type),*
}
$($PacketStructImpl)*
impl $PacketStruct {
#[allow(dead_code)]
pub fn from_binary(buffer: &Vec<u8>) -> Self {
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_assignments)]
let mut omit_bytes: usize = INCOMING_HEADER_LENGTH;
$(
omit_bytes = $login_opcode_value.to_le_bytes().len();
)?
$(
if $compressed_value {
// 4 bytes uncompressed + 2 bytes used by zlib
omit_bytes += 6;
}
)?
let mut internal_buffer: Vec<u8> = Vec::new();
$(
if $compressed_value {
let data = &buffer[omit_bytes..];
let mut decoder = flate2::read::DeflateDecoder::new(data);
std::io::Read::read_to_end(&mut decoder, &mut internal_buffer).unwrap();
}
)?
let buffer = if internal_buffer.is_empty() {
buffer[omit_bytes..].to_vec()
} else {
internal_buffer
};
let mut dynamic_fields: Vec<&str> = vec![];
$(
$(
for index in 0..$dynamic_field.len() {
dynamic_fields.push($dynamic_field[index]);
}
)*
)?
let mut reader = std::io::Cursor::new(&buffer);
let initial = Self {
$(
$field_name: if dynamic_fields.contains(&stringify!($field_name)) {
<$field_type>::default()
} else {
BinaryConverter::read_from(
&mut reader
).unwrap()
}
),*
};
initial
}
#[allow(dead_code)]
pub fn to_binary(&mut self) -> Vec<u8> {
#![allow(unused_mut)]
let mut body = Vec::new();
$(
BinaryConverter::write_into(
&mut self.$field_name,
&mut body
).unwrap();
)*
let header = Self::_build_header(&body);
[header, body].concat()
}
#[allow(unused_variables)]
fn _build_header(body: &Vec<u8>) -> Vec<u8> {
#![allow(unused_mut)]
let mut header: Vec<u8> = Vec::new();
$(
byteorder::WriteBytesExt::write_u8(
&mut header,
$login_opcode_value as u8
).unwrap();
)?
$(
let size = body.len() + OUTCOMING_OPCODE_LENGTH;
byteorder::WriteBytesExt::write_u16::<byteorder::BigEndian>(
&mut header,
size as u16,
).unwrap();
byteorder::WriteBytesExt::write_u32::<byteorder::LittleEndian>(
&mut header,
$world_opcode_value as u32
).unwrap();
)?
header
}
$(
#[allow(dead_code)]
pub fn unpack(&mut self) -> PacketOutcome {
($login_opcode_value as u32, self.to_binary())
}
)?
$(
#[allow(dead_code)]
pub fn unpack(&mut self) -> PacketOutcome {
($world_opcode_value as u32, self.to_binary())
}
)?
}
};
}
This is sandbox to reproduce how it works.
Could somebody explain how Rust compiler knows on which type should be BinaryConverter::read_from
called ?