From a872a5c2454ac7d7d66216ccebd69bf97d485b10 Mon Sep 17 00:00:00 2001 From: Gabriel Bjørnager Jensen Date: Sat, 31 Aug 2024 12:55:15 +0200 Subject: Make 'alloc' and 'std' default features; Make serialisations variably sized again; Refactor derive implementations; Completely rework streams; Fix tuple deserialisation; Encode 'FixedString' in UTF-8; Remove methods 'from_chars' and 'set_len' from 'FixedString'; Rename 'as_slice' and 'as_mut_slice' methods in 'FixedString' to 'as_st' and 'as_mut_str'; Add methods 'as_bytes', 'push_str', 'chars', 'capacity', and 'char_indices' to 'FixedString'; Rework 'FixedString' traits; Remove 'FixedIter'; Update lints; Add methods 'set_len' and 'set_len_unchecked' to 'Buffer'; Elaborate docs; Update readme; Do not require 'Serialise' for 'Deserialise'; Rename 'SERIALISED_SIZE' in 'Serialise' to 'MAX_SERIALISED_SIZE'; Use streams in 'Serialise' and 'Deserialise'; Drop 'Serialise' requirement for 'Buffer'; Add methods 'with_capacity' and 'capacity' to 'Buffer'; --- CHANGELOG.md | 23 ++ Cargo.toml | 2 - README.md | 96 +++--- bzipper/Cargo.toml | 6 +- bzipper/src/buffer/mod.rs | 152 +++++++-- bzipper/src/buffer/test.rs | 4 +- bzipper/src/deserialise/mod.rs | 142 ++++----- bzipper/src/deserialise/test.rs | 11 +- bzipper/src/deserialise/tuple.rs | 254 +++++++-------- bzipper/src/dstream/mod.rs | 91 +++++- bzipper/src/error/mod.rs | 42 +-- bzipper/src/fixed_iter/mod.rs | 46 --- bzipper/src/fixed_string/mod.rs | 413 ++++++++++++++---------- bzipper/src/fixed_string/test.rs | 6 +- bzipper/src/lib.rs | 99 +++--- bzipper/src/serialise/mod.rs | 168 ++++------ bzipper/src/serialise/test.rs | 37 +-- bzipper/src/serialise/tuple.rs | 426 +++++++++++-------------- bzipper/src/sstream/mod.rs | 93 +++++- bzipper_macros/Cargo.toml | 2 +- bzipper_macros/src/impls/deserialise_enum.rs | 20 +- bzipper_macros/src/impls/deserialise_struct.rs | 57 ++-- bzipper_macros/src/impls/serialise_enum.rs | 19 +- bzipper_macros/src/impls/serialise_struct.rs | 20 +- 24 files changed, 1202 insertions(+), 1027 deletions(-) delete mode 100644 bzipper/src/fixed_iter/mod.rs diff --git a/CHANGELOG.md b/CHANGELOG.md index 6ffd76f..a32a290 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,29 @@ This is the changelog of bzipper. See `"README.md"` for more information. +## 0.7.0 + +* Make `alloc` and `std` default features +* Make serialisations variably sized again +* Refactor derive implementations +* Completely rework streams +* Fix tuple deserialisation +* Encode `FixedString` in UTF-8 +* Remove methods `from_chars` and `set_len` from `FixedString` +* Rename `as_slice` and `as_mut_slice` methods in `FixedString` to `as_st` and `as_mut_str` +* Add methods `as_bytes`, `push_str`, `chars`, `capacity`, and `char_indices` to `FixedString` +* Rework `FixedString` traits +* Remove `FixedIter` +* Update lints +* Add methods `set_len` and `set_len_unchecked` to `Buffer` +* Elaborate docs +* Update readme +* Do not require `Serialise` for `Deserialise` +* Rename `SERIALISED_SIZE` in `Serialise` to `MAX_SERIALISED_SIZE` +* Use streams in `Serialise` and `Deserialise` +* Drop `Serialise` requirement for `Buffer` +* Add methods `with_capacity` and `capacity` to `Buffer` + ## 0.6.2 * Fix `Deserialise` derive for unit variants diff --git a/Cargo.toml b/Cargo.toml index 90953b6..9194694 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -101,7 +101,6 @@ option_as_ref_cloned = "warn" option_if_let_else = "warn" option_option = "deny" or_fun_call = "deny" -panic_in_result_fn = "deny" path_buf_push_overwrite = "deny" pattern_type_mismatch = "deny" ptr_as_ptr = "forbid" @@ -122,7 +121,6 @@ return_self_not_must_use = "deny" same_functions_in_if_condition = "deny" same_name_method = "deny" self_named_module_files = "deny" -semicolon_outside_block = "warn" single_char_pattern = "warn" str_split_at_newline = "warn" string_lit_as_bytes = "deny" diff --git a/README.md b/README.md index 94ae801..bffd266 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [bzipper](https://crates.io/crates/bzipper/) is a binary (de)serialiser for the Rust language. -Contrary to [Serde](https://crates.io/crates/serde/)/[Bincode](https://crates.io/crates/bincode/), the goal of bzipper is to serialise with a known size constraint. +In contrast to [Serde](https://crates.io/crates/serde/)/[Bincode](https://crates.io/crates/bincode/), the primary goal of bzipper is to serialise with a known size constraint. Therefore, this crate may be more suited for networking or other cases where a fixed-sized buffer is needed. Keep in mind that this project is still work-in-progress. @@ -20,15 +20,15 @@ For strings, the `FixedString` type is also provided. ## Usage -This crate revolves around the `Serialise` and `Deserialise` traits, both of which are commonly used in conjunction with streams (more specifically, s-streams and d-streams). +This crate revolves around the `Serialise` and `Deserialise` traits, both of which use *streams* – or more specifically – s-streams and d-streams. Many core types come implemented with bzipper, including primitives as well as some standard library types such as `Option` and `Result`. -It is recommended in most cases to just derive these traits for custom types (enumerations and structures only). -Here, each field is chained in declaration order: +It is recommended in most cases to just derive these two traits for custom types (although this is only supported with enumerations and structures). +Here, each field is *chained* according to declaration order: -```rs -use bzipper::{Deserialise, Serialise}; +```rust +use bzipper::{Buffer, Deserialise, Serialise}; #[derive(Debug, Deserialise, PartialEq, Serialise)] struct IoRegister { @@ -36,45 +36,55 @@ struct IoRegister { value: u16, } -let mut buf: [u8; IoRegister::SERIALISED_SIZE] = Default::default(); -IoRegister { addr: 0x04000000, value: 0x0402 }.serialise(&mut buf).unwrap(); +let mut buf = Buffer::new(); +buf.write(IoRegister { addr: 0x04000000, value: 0x0402 }).unwrap(); + +assert_eq!(buf.len(), 0x6); assert_eq!(buf, [0x04, 0x00, 0x00, 0x00, 0x04, 0x02]); -assert_eq!(IoRegister::deserialise(&buf).unwrap(), IoRegister { addr: 0x04000000, value: 0x0402 }); +assert_eq!(buf.read().unwrap(), IoRegister { addr: 0x04000000, value: 0x0402 }); ``` ### Serialisation -To serialise an object implementing `Serialise`, simply allocate a buffer for the serialisation. -The required size of any given serialisation is specified by the `SERIALISED_SIZE` constant: +To serialise an object implementing `Serialise`, simply allocate a buffer for the serialisation and wrap it in an s-stream (*serialisation stream*) with the `Sstream` type. + +```rust +use bzipper::{Serialise, Sstream}; -```rs -use bzipper::Serialise; +let mut buf = [Default::default(); char::MAX_SERIALISED_SIZE]; +let mut stream = Sstream::new(&mut buf); -let mut buf: [u8; char::SERIALISED_SIZE] = Default::default(); -'Ж'.serialise(&mut buf).unwrap(); +'Ж'.serialise(&mut stream).unwrap(); -assert_eq!(buf, [0x00, 0x00, 0x04, 0x16]); +assert_eq!(stream, [0x00, 0x00, 0x04, 0x16]); ``` -The only special requirement of the `serialise` method is that the provided byte slice has an element count of exactly `SERIALISED_SIZE`. +The maximum size of any given serialisation is specified by the `MAX_SERIALISED_SIZE` constant. + +We can also use streams to chain multiple elements together: -We can also use streams to *chain* multiple elements together: +```rust +use bzipper::{Serialise, Sstream}; -```rs -use bzipper::Serialise; +let mut buf = [Default::default(); char::MAX_SERIALISED_SIZE * 0x5]; +let mut stream = Sstream::new(&mut buf); -let mut buf: [u8; char::SERIALISED_SIZE * 5] = Default::default(); -let mut stream = bzipper::Sstream::new(&mut buf); +// Note: For serialising multiple characters, the +// `FixedString` type is usually preferred. -stream.append(&'ل'); -stream.append(&'ا'); -stream.append(&'م'); -stream.append(&'د'); -stream.append(&'ا'); +'ل'.serialise(&mut stream).unwrap(); +'ا'.serialise(&mut stream).unwrap(); +'م'.serialise(&mut stream).unwrap(); +'د'.serialise(&mut stream).unwrap(); +'ا'.serialise(&mut stream).unwrap(); -assert_eq!(buf, [0x00, 0x00, 0x06, 0x44, 0x00, 0x00, 0x06, 0x27, 0x00, 0x00, 0x06, 0x45, 0x00, 0x00, 0x06, 0x2F, 0x00, 0x00, 0x06, 0x27]); +assert_eq!(buf, [ + 0x00, 0x00, 0x06, 0x44, 0x00, 0x00, 0x06, 0x27, + 0x00, 0x00, 0x06, 0x45, 0x00, 0x00, 0x06, 0x2F, + 0x00, 0x00, 0x06, 0x27 +]); ``` When serialising primitives, the resulting byte stream is in big endian (a.k.a. network endian). @@ -82,25 +92,33 @@ It is recommended for implementors to adhere to this convention as well. ### Deserialisation -Deserialisation works with an almost identical syntax to serialisation. +Deserialisation works with a similar syntax to serialisation. -To deserialise a buffer, simply call the `deserialise` method: +D-streams (*deserialisation streams*) use the `Dstream` type and are constructed in a manner similar to s-streams. +To deserialise a buffer, simply call the `deserialise` method with the strema: -```rs -use bzipper::Deserialise; +```rust +use bzipper::{Deserialise, Dstream}; let data = [0x45, 0x54]; -assert_eq!(::deserialise(&data).unwrap(), 0x4554); +let stream = Dstream::new(&data); +assert_eq!(u16::deserialise(&stream).unwrap(), 0x4554); ``` -Just like with serialisations, the `Dstream` can be used to deserialise chained elements: +And just like s-streams, d-streams can also be used to handle chaining: -```rs -use bzipper::Deserialise; +```rust +use bzipper::{Deserialise, Dstream}; let data = [0x45, 0x54]; -let stream = bzipper::Dstream::new(&data); +let stream = Dstream::new(&data); + +assert_eq!(u8::deserialise(&stream).unwrap(), 0x45); +assert_eq!(u8::deserialise(&stream).unwrap(), 0x54); + +// The data can also be deserialised as a tuple (up +// to twelve elements). -assert_eq!(stream.take::().unwrap(), 0x45); -assert_eq!(stream.take::().unwrap(), 0x54); +let stream = Dstream::new(&data); +assert_eq!(<(u8, u8)>::deserialise(&stream).unwrap(), (0x45, 0x54)); ``` diff --git a/bzipper/Cargo.toml b/bzipper/Cargo.toml index 8f6f98d..28fae76 100644 --- a/bzipper/Cargo.toml +++ b/bzipper/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "bzipper" -version = "0.6.2" +version = "0.7.0" edition = "2021" rust-version = "1.81" documentation = "https://docs.rs/bzipper/" @@ -16,11 +16,13 @@ license.workspace = true all-features = true [features] +default = ["alloc", "std"] + alloc = [] std = [] [dependencies] -bzipper_macros = { path = "../bzipper_macros", version = "0.6.2"} +bzipper_macros = { path = "../bzipper_macros", version = "0.7.0"} [lints] workspace = true diff --git a/bzipper/src/buffer/mod.rs b/bzipper/src/buffer/mod.rs index f39b108..c0da902 100644 --- a/bzipper/src/buffer/mod.rs +++ b/bzipper/src/buffer/mod.rs @@ -22,112 +22,201 @@ #[cfg(test)] mod test; -use crate::{Deserialise, Result, Serialise}; +use crate::{Deserialise, Dstream, Result, Serialise, Sstream}; use alloc::vec; use alloc::boxed::Box; +use core::borrow::Borrow; use core::fmt::{Debug, Formatter}; use core::marker::PhantomData; use core::ops::{Deref, DerefMut}; -// We cannot use arrays for the `Buffer` type as -// that would require `generic_const_exprs`. - /// Typed (de)serialisation buffer. /// /// This structure is intended as a lightweight wrapper around byte buffers for specific (de)serialisations of specific types. /// -/// The methods [`write`](Self::write) and [`read`](Self::read) can be used to the internal buffer. +/// The methods [`write`](Self::write) and [`read`](Self::read) can be used to handle the internal buffer. /// Other methods exist for accessing the internal buffer directly. /// /// # Examples /// /// Create a buffer for holding a `Request` enumeration: /// -/// ``` +/// ```rust /// use bzipper::{Buffer, FixedString, Serialise}; /// /// #[derive(Serialise)] /// enum Request { -/// Join { username: FixedString<0x10> }, +/// Join { username: FixedString<0x40> }, /// -/// Quit { username: FixedString<0x10> }, +/// Quit { username: FixedString<0x40> }, /// -/// SendMessage { message: FixedString<0x20> }, +/// SendMessage { message: FixedString<0x80> }, /// } /// /// use Request::*; /// /// let join_request = Join { username: FixedString::try_from("epsiloneridani").unwrap() }; /// -/// let mut buf = Buffer::::new(); -/// buf.write(&join_request); +/// let mut buf = Buffer::new(); +/// buf.write(join_request); /// /// // Do something with the buffer... /// ``` #[cfg_attr(doc, doc(cfg(feature = "alloc")))] #[derive(Clone, Eq, PartialEq)] -pub struct Buffer { +pub struct Buffer { buf: Box<[u8]>, + len: usize, _phanton: PhantomData } -impl Buffer { - /// Allocates a new buffer suitable for (de)serialisation. +impl Buffer { + /// Allocates a new buffer suitable for serialisation. + /// + /// The given capacity should be large enough to hold any expected serialisation of `T`. + /// Therefore, if `T` implements [`Serialise`], it is recommended to use [`new`](Self::new) instead, which is equivalent to passing [`MAX_SERIALISED_SIZE`](Serialise::MAX_SERIALISED_SIZE) to this function: + #[inline] #[must_use] - pub fn new() -> Self { Self { buf: vec![0x00; T::SERIALISED_SIZE].into(), _phanton: PhantomData } } + pub fn with_capacity(len: usize) -> Self { + Self { + buf: vec![0x00; len].into(), + len: 0x0, - /// Serialises into the contained buffer. + _phanton: PhantomData, + } + } + + /// Sets the length of the used buffer. + /// + /// The provided size is checked before being written. + /// For the same operation *without* checks, see [`set_len_unchecked`](Self::set_len_unchecked). + /// + /// # Panics + /// + /// The provided size must not be greater than the buffer's capacity. + /// If this is the case, however, this method will panic. + #[inline(always)] + pub fn set_len(&mut self, len: usize) { + assert!(len <= self.capacity(), "cannot extend buffer beyond capacity"); + + self.len = len; + } + + /// Sets the length of the used buffer without checks. + /// + /// The validity of the provided size is **not** checked before being written. + /// For the same operation *with* checks, see [`set_len`](Self::set_len). + /// + /// # Safety + /// + /// If the value of `len` is greater than the buffer's capacity, behaviour is undefined. #[inline(always)] - pub fn write(&mut self, value: &T) -> Result<()> { value.serialise(&mut self.buf) } + pub unsafe fn set_len_unchecked(&mut self, len: usize) { self.len = len } - /// Retrieves a pointer to the first byte. + /// Retrieves a pointer to the first byte of the internal buffer. #[inline(always)] #[must_use] pub const fn as_ptr(&self) -> *const u8 { self.buf.as_ptr() } - /// Retrieves a mutable pointer to the first byte. + /// Retrieves a mutable pointer to the first byte of the internal buffer. #[inline(always)] #[must_use] pub fn as_mut_ptr(&mut self) -> *mut u8 { self.buf.as_mut_ptr() } /// Gets a slice of the internal buffer. + /// + /// The returned slice will only include the used part of the buffer (as specified by [`len`](Self::len)). #[inline(always)] #[must_use] pub const fn as_slice(&self) -> &[u8] { unsafe { core::slice::from_raw_parts(self.as_ptr(), self.len()) } } /// Gets a mutable slice of the internal buffer. + /// + /// In contrast to [`as_slice`](Self::as_slice), this method returns a slice of the **entire** internal buffer. + /// + /// If the returned reference is written through, the new buffer length -- if different -- should be set using [`set_len`](Self::set_len). #[inline(always)] #[must_use] pub fn as_mut_slice(&mut self) -> &mut [u8] { &mut self.buf } /// Gets the length of the buffer. - /// - /// This is defined as (and therefore always equal to) the value of [SERIALISED_SIZE](Serialise::SERIALISED_SIZE) as specified by `T`. #[allow(clippy::len_without_is_empty)] #[inline(always)] #[must_use] - pub const fn len(&self) -> usize { T::SERIALISED_SIZE } + pub const fn len(&self) -> usize { self.len } + + /// Gets the capacity of the buffer. + /// + /// If the buffer was constructed using [`new`](Self::new), this value is exactly the same as [`MAX_SERIALISED_SIZE`](Serialise::MAX_SERIALISED_SIZE). + #[inline(always)] + #[must_use] + pub const fn capacity(&self) -> usize { self.buf.len() } +} + +impl Buffer { + /// Allocates a new buffer suitable for serialisation. + /// + /// The capacity of the internal buffer is set so that any serialisation of `T` may be stored. + /// + /// This is equivalent to calling [`with_capacity`](Self::with_capacity) with [`MAX_SERIALISED_SIZE`](Serialise::MAX_SERIALISED_SIZE). + #[inline(always)] + #[must_use] + pub fn new() -> Self { Self::with_capacity(T::MAX_SERIALISED_SIZE) } + + /// Serialises into the contained buffer. + /// + /// # Errors + /// + /// Any error that occurs during serialisation is passed on and returned from this method. + /// + /// # Panics + /// + /// If the amount of bytes read by [`serialise`](Serialise::serialise) is greater than that specified by [`MAX_SERIALISED_SIZE`](Serialise::MAX_SERIALISED_SIZE), this method panics. + /// + /// In reality, however, this error can only be detected if the buffer's capacity is set to a value greater than `MAX_SERIALISED_SIZE` to begin with (e.g. using [`with_capacity`](Self::with_capacity)). + #[inline(always)] + pub fn write>(&mut self, value: U) -> Result<()> { + let mut stream = Sstream::new(&mut self.buf); + value.borrow().serialise(&mut stream)?; + + assert!(stream.len() <= T::MAX_SERIALISED_SIZE); + self.len = stream.len(); + + Ok(()) + } } impl Buffer { /// Deserialises from the contained buffer. + /// + /// # Errors + /// + /// Any error that occurs during deserialisation is passed on and returned from this method. #[inline(always)] - pub fn read(&self) -> Result { T::deserialise(&self.buf) } + pub fn read(&self) -> Result { + // We should only pass the used part of the buffer + // to `deserialise`. + + let stream = Dstream::new(&self.buf[0x0..self.len()]); + let value = Deserialise::deserialise(&stream)?; + + Ok(value) + } } -impl AsMut<[u8]> for Buffer { +impl AsMut<[u8]> for Buffer { #[inline(always)] fn as_mut(&mut self) -> &mut [u8] { self.as_mut_slice() } } -impl AsRef<[u8]> for Buffer { +impl AsRef<[u8]> for Buffer { #[inline(always)] fn as_ref(&self) -> &[u8] { self.as_slice() } } -impl Debug for Buffer { +impl Debug for Buffer { #[inline(always)] fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { write!(f, "{:?}", self.as_slice()) } } @@ -137,19 +226,24 @@ impl Default for Buffer { fn default() -> Self { Self::new() } } -impl Deref for Buffer { +impl Deref for Buffer { type Target = [u8]; #[inline(always)] fn deref(&self) -> &Self::Target { self.as_slice() } } -impl DerefMut for Buffer { +impl DerefMut for Buffer { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { self.as_mut_slice() } } -impl PartialEq<&[u8]> for Buffer { +impl PartialEq<&[u8]> for Buffer { #[inline(always)] fn eq(&self, other: &&[u8]) -> bool { self.as_slice() == *other } } + +impl PartialEq<[u8; N]> for Buffer { + #[inline(always)] + fn eq(&self, other: &[u8; N]) -> bool { self.as_slice() == other.as_slice() } +} diff --git a/bzipper/src/buffer/test.rs b/bzipper/src/buffer/test.rs index 4f24e45..e92ae4b 100644 --- a/bzipper/src/buffer/test.rs +++ b/bzipper/src/buffer/test.rs @@ -25,11 +25,11 @@ use crate::{Buffer, Error}; fn test_buffer() { let mut buf = Buffer::::new(); - buf.write(&'\u{1F44D}').unwrap(); + buf.write('\u{1F44D}').unwrap(); assert_eq!(buf, [0x00, 0x01, 0xF4, 0x4D].as_slice()); buf.as_mut_slice().copy_from_slice(&[0x00, 0x00, 0xD8, 0x00]); - assert!(matches!(buf.read(), Err(Error::InvalidCodePoint { value: 0xD800 }))); + assert!(matches!(buf.read(), Err(Error::InvalidCodePoint(0xD800)))); buf.as_mut_slice().copy_from_slice(&[0x00, 0x00, 0xFF, 0x3A]); assert_eq!(buf.read().unwrap(), '\u{FF3A}'); diff --git a/bzipper/src/deserialise/mod.rs b/bzipper/src/deserialise/mod.rs index 49ca74e..e51a552 100644 --- a/bzipper/src/deserialise/mod.rs +++ b/bzipper/src/deserialise/mod.rs @@ -31,15 +31,12 @@ use core::num::NonZero; mod tuple; -/// Types capable of being deserialised. -/// -/// This trait requires [`Serialise`] also being implemented as it relies on the [`SERIALISED_SIZE`](crate::Serialise::SERIALISED_SIZE) constant. -pub trait Deserialise: Serialise + Sized { - /// Deserialises a slice into an object. +/// Denotes a type capable of deserialisation. +pub trait Deserialise: Sized { + /// Deserialises an object from the given d-stream. /// - /// This function must **never** take more bytes than specified by [`SERIALISED_SIZE`](crate::Serialise::SERIALISED_SIZE). + /// This method must **never** read more bytes than specified by [`MAX_SERIALISED_SIZE`](crate::Serialise::MAX_SERIALISED_SIZE) (if [`Serialise`] is defined, that is). /// Doing so is considered a logic error. - /// Likewise, providing more than this amount is also disfavoured. /// /// # Errors /// @@ -47,22 +44,20 @@ pub trait Deserialise: Serialise + Sized { /// /// # Panics /// - /// This method will usually panic if the provided slice has a length *less* than the value of `SERIALISED_SIZE`. + /// This method will usually panic if the provided slice has a length *less* than the value of `MAX_SERIALISED_SIZE`. /// Official implementations of this trait (including those that are derived) always panic in debug mode if the provided slice has a length that is different at all. - fn deserialise(data: &[u8]) -> Result; + fn deserialise(stream: &Dstream) -> Result; } macro_rules! impl_numeric { ($ty:ty) => { impl ::bzipper::Deserialise for $ty { - fn deserialise(data: &[u8]) -> ::bzipper::Result { - ::core::debug_assert_eq!(data.len(), ::SERIALISED_SIZE); - - const SIZE: usize = ::core::mem::size_of::<$ty>(); - - let data = data - .get(0x0..SIZE) - .ok_or(::bzipper::Error::EndOfStream { req: SIZE, rem: data.len() })? + #[inline] + fn deserialise(stream: &Dstream) -> ::bzipper::Result { + let data = stream + .read(Self::MAX_SERIALISED_SIZE) + .unwrap() + //.ok_or(::bzipper::Error::EndOfStream { req: Self::MAX_SERIALISED_SIZE, rem: data.len() })? .try_into() .unwrap(); @@ -75,34 +70,29 @@ macro_rules! impl_numeric { macro_rules! impl_non_zero { ($ty:ty) => { impl ::bzipper::Deserialise for NonZero<$ty> { - fn deserialise(data: &[u8]) -> ::bzipper::Result { - ::core::debug_assert_eq!(data.len(), ::SERIALISED_SIZE); + #[inline] + fn deserialise(stream: &Dstream) -> ::bzipper::Result { + let value = <$ty as ::bzipper::Deserialise>::deserialise(stream)?; - let value = <$ty as ::bzipper::Deserialise>::deserialise(data)?; + let value = NonZero::new(value) + .ok_or(Error::NullInteger)?; - NonZero::new(value) - .ok_or(Error::NullInteger) + Ok(value) } } }; } -impl Deserialise for [T; N] -where - T: Deserialise { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); - +impl Deserialise for [T; N] { + #[inline] + fn deserialise(stream: &Dstream) -> Result { // Initialise the array incrementally. let mut buf: [MaybeUninit; N] = unsafe { MaybeUninit::uninit().assume_init() }; - let mut pos = 0x0; for item in &mut buf { - let range = pos..pos + T::SERIALISED_SIZE; - - pos = range.end; - item.write(Deserialise::deserialise(&data[range])?); + let value = T::deserialise(stream)?; + item.write(value); } // This should be safe as `MaybeUninit` is @@ -118,83 +108,80 @@ where } impl Deserialise for bool { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); - - let value = u8::deserialise(data)?; + #[inline] + fn deserialise(stream: &Dstream) -> Result { + let value = u8::deserialise(stream)?; match value { 0x00 => Ok(false), 0x01 => Ok(true), - _ => Err(Error::InvalidBoolean { value }) + _ => Err(Error::InvalidBoolean(value)) } } } impl Deserialise for char { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); + #[inline] + fn deserialise(stream: &Dstream) -> Result { + let value = u32::deserialise(stream)?; - let value = u32::deserialise(data)?; + let value = value + .try_into() + .map_err(|_| Error::InvalidCodePoint(value))?; - Self::from_u32(value) - .ok_or(Error::InvalidCodePoint { value }) + Ok(value) } } impl Deserialise for Infallible { #[allow(clippy::panic_in_result_fn)] #[inline(always)] - fn deserialise(_data: &[u8]) -> Result { panic!("cannot deserialise `Infallible` as it cannot be serialised to begin with") } + fn deserialise(_stream: &Dstream) -> Result { panic!("cannot deserialise `Infallible` as it cannot be serialised to begin with") } } impl Deserialise for isize { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); + #[inline] + fn deserialise(stream: &Dstream) -> Result { + let value = i32::deserialise(stream)?; - let value = i32::deserialise(data)? - .try_into().expect("unable to convert from `i32` to `isize`"); + let value = value + .try_into() + .expect("unable to convert from `i32` to `isize`"); Ok(value) } } impl Deserialise for Option { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); - - let stream = Dstream::new(data); - - let sign = stream.take::()?; + #[allow(clippy::if_then_some_else_none)] + #[inline] + fn deserialise(stream: &Dstream) -> Result { + let sign = bool::deserialise(stream)?; - if sign { - Ok(Some(stream.take::()?)) + let value = if sign { + Some(T::deserialise(stream)?) } else { - Ok(None) - } + None + }; + + Ok(value) } } impl Deserialise for PhantomData { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); - - Ok(Self) - } + #[inline(always)] + fn deserialise(_stream: &Dstream) -> Result { Ok(Self) } } impl Deserialise for core::result::Result { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); - - let stream = Dstream::new(data); - - let sign = stream.take::()?; + #[inline] + fn deserialise(stream: &Dstream) -> Result { + let sign = bool::deserialise(stream)?; let value = if sign { - Err(stream.take::()?) + Err(E::deserialise(stream)?) } else { - Ok(stream.take::()?) + Ok(T::deserialise(stream)?) }; Ok(value) @@ -202,15 +189,18 @@ impl Deserialise for core::result::Result } impl Deserialise for () { - fn deserialise(_data: &[u8]) -> Result { Ok(()) } + #[inline(always)] + fn deserialise(_stream: &Dstream) -> Result { Ok(()) } } impl Deserialise for usize { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); + #[inline] + fn deserialise(stream: &Dstream) -> Result { + let value = u32::deserialise(stream)?; - let value = u32::deserialise(data)? - .try_into().expect("unable to convert from `u32` to `usize`"); + let value = value + .try_into() + .expect("must be able to convert from `u32` to `usize`"); Ok(value) } diff --git a/bzipper/src/deserialise/test.rs b/bzipper/src/deserialise/test.rs index 3caebae..8624448 100644 --- a/bzipper/src/deserialise/test.rs +++ b/bzipper/src/deserialise/test.rs @@ -19,7 +19,9 @@ // er General Public License along with bzipper. If // not, see . -use crate::{Deserialise, Serialise}; +use core::char; + +use crate::{Deserialise, Dstream, Serialise}; #[test] fn test() { @@ -46,9 +48,10 @@ fn test() { ($ty:ty: $data:expr => $value:expr) => {{ use ::bzipper::{Deserialise, Serialise}; - let buf: [u8; <$ty as Serialise>::SERIALISED_SIZE] = $data; + let mut buf: [u8; <$ty as Serialise>::MAX_SERIALISED_SIZE] = $data; + let stream = Dstream::new(&mut buf); - let left = <$ty as Deserialise>::deserialise(&buf).unwrap(); + let left = <$ty as Deserialise>::deserialise(&stream).unwrap(); let right = $value; assert_eq!(left, right); @@ -80,6 +83,8 @@ fn test() { 0xBF, 0x4F, 0xAF, 0x5F, 0x9F, 0x6F, 0x8F, 0x7F, ] => 0xFF_0F_EF_1F_DF_2F_CF_3F_BF_4F_AF_5F_9F_6F_8F_7F); + test!(char: [0x00, 0x00, 0xFF, 0xFD] => char::REPLACEMENT_CHARACTER); + test!([char; 0x5]: [ 0x00, 0x00, 0x03, 0xBB, 0x00, 0x00, 0x03, 0x91, 0x00, 0x00, 0x03, 0xBC, 0x00, 0x00, 0x03, 0x94, diff --git a/bzipper/src/deserialise/tuple.rs b/bzipper/src/deserialise/tuple.rs index b1f7ac1..fedbad6 100644 --- a/bzipper/src/deserialise/tuple.rs +++ b/bzipper/src/deserialise/tuple.rs @@ -19,17 +19,17 @@ // er General Public License along with bzipper. If // not, see . -use crate::{Deserialise, Result, Serialise}; +use crate::{Deserialise, Dstream, Result}; impl Deserialise for (T0, ) where T0: Deserialise, { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); + fn deserialise(stream: &Dstream) -> Result { + let value = ( + Deserialise::deserialise(stream)?, + ); - Ok(( - Deserialise::deserialise(data)?, - )) + Ok(value) } } @@ -37,13 +37,13 @@ impl Deserialise for (T0, T1) where T0: Deserialise, T1: Deserialise, { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); + fn deserialise(stream: &Dstream) -> Result { + let value = ( + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + ); - Ok(( - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - )) + Ok(value) } } @@ -52,14 +52,14 @@ where T0: Deserialise, T1: Deserialise, T2: Deserialise, { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); + fn deserialise(stream: &Dstream) -> Result { + let value = ( + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + ); - Ok(( - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - )) + Ok(value) } } @@ -69,15 +69,15 @@ where T1: Deserialise, T2: Deserialise, T3: Deserialise, { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); + fn deserialise(stream: &Dstream) -> Result { + let value = ( + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + ); - Ok(( - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - )) + Ok(value) } } @@ -88,16 +88,16 @@ where T2: Deserialise, T3: Deserialise, T4: Deserialise, { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); + fn deserialise(stream: &Dstream) -> Result { + let value = ( + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + ); - Ok(( - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - )) + Ok(value) } } @@ -109,17 +109,17 @@ where T3: Deserialise, T4: Deserialise, T5: Deserialise, { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); + fn deserialise(stream: &Dstream) -> Result { + let value = ( + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + ); - Ok(( - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - )) + Ok(value) } } @@ -132,18 +132,18 @@ where T4: Deserialise, T5: Deserialise, T6: Deserialise, { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); + fn deserialise(stream: &Dstream) -> Result { + let value = ( + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + ); - Ok(( - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - )) + Ok(value) } } @@ -157,19 +157,19 @@ where T5: Deserialise, T6: Deserialise, T7: Deserialise, { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); + fn deserialise(stream: &Dstream) -> Result { + let value = ( + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + ); - Ok(( - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - )) + Ok(value) } } @@ -184,20 +184,20 @@ where T6: Deserialise, T7: Deserialise, T8: Deserialise, { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); + fn deserialise(stream: &Dstream) -> Result { + let value = ( + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + ); - Ok(( - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - )) + Ok(value) } } @@ -213,21 +213,21 @@ where T7: Deserialise, T8: Deserialise, T9: Deserialise, { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); + fn deserialise(stream: &Dstream) -> Result { + let value = ( + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + ); - Ok(( - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - )) + Ok(value) } } @@ -244,22 +244,22 @@ where T8: Deserialise, T9: Deserialise, T10: Deserialise, { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); + fn deserialise(stream: &Dstream) -> Result { + let value = ( + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + ); - Ok(( - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - )) + Ok(value) } } @@ -277,22 +277,22 @@ where T9: Deserialise, T10: Deserialise, T11: Deserialise, { - fn deserialise(data: &[u8]) -> Result { - debug_assert_eq!(data.len(), Self::SERIALISED_SIZE); + fn deserialise(stream: &Dstream) -> Result { + let value = ( + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + Deserialise::deserialise(stream)?, + ); - Ok(( - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - Deserialise::deserialise(data)?, - )) + Ok(value) } } diff --git a/bzipper/src/dstream/mod.rs b/bzipper/src/dstream/mod.rs index e87edf8..3cdae50 100644 --- a/bzipper/src/dstream/mod.rs +++ b/bzipper/src/dstream/mod.rs @@ -19,16 +19,17 @@ // er General Public License along with bzipper. If // not, see . -use crate::{Deserialise, Error, Result}; +use crate::{Error, Result}; use core::cell::Cell; +use core::fmt::{Debug, Formatter}; -/// Byte stream for deserialisation. +/// Byte stream suitable for deserialisation. /// -/// This type borrows a slice, keeping track internally of the used bytes. +/// This type borrows a buffer, keeping track internally of the used bytes. pub struct Dstream<'a> { - data: &'a [u8], - pos: Cell, + pub(in crate) data: &'a [u8], + pub(in crate) pos: Cell, } impl<'a> Dstream<'a> { @@ -37,22 +38,84 @@ impl<'a> Dstream<'a> { #[must_use] pub const fn new(data: &'a [u8]) -> Self { Self { data, pos: Cell::new(0x0) } } - /// Deserialises an object from the stream. - /// - /// # Errors - /// - /// If the stream doesn't hold at least the amount of bytes specified by [`SERIALISED_SIZE`](crate::Serialise::SERIALISED_SIZE), an [`EndOfStream`](Error::EndOfStream) error is returned. + /// Takes (borrows) raw bytes from the stream. #[inline] - pub fn take(&self) -> Result { + pub fn read(&self, count: usize) -> Result<&[u8]> { let rem = self.data.len() - self.pos.get(); - let req = T::SERIALISED_SIZE; + let req = count; - if rem < req { return Err(Error::EndOfStream { req, rem }) }; + if rem < req { return Err(Error::EndOfStream { req, rem }) } let start = self.pos.get(); let stop = start + req; self.pos.set(stop); - T::deserialise(&self.data[start..stop]) + + let data = &self.data[start..stop]; + Ok(data) + } + + /// Gets a pointer to the first byte in the stream. + #[inline(always)] + #[must_use] + pub const fn as_ptr(&self) -> *const u8 { self.data.as_ptr() } + + /// Gets a slice of the stream. + #[inline(always)] + #[must_use] + pub const fn as_slice(&self) -> &[u8] { + let ptr = self.as_ptr(); + let len = self.len(); + + unsafe { core::slice::from_raw_parts(ptr, len) } } + + /// Gets the length of the stream. + #[inline(always)] + #[must_use] + pub const fn len(&self) -> usize { unsafe { self.pos.as_ptr().read() } } + + /// Tests if the stream is empty. + /// + /// If no deserialisations have been made at the time of calling, this method returns `false`. + #[inline(always)] + #[must_use] + pub const fn is_empty(&self) -> bool { self.len() == 0x0 } + + /// Tests if the stream is full. + /// + /// Note that zero-sized types such as [`()`](unit) can still be deserialised from this stream. + #[inline(always)] + #[must_use] + pub const fn is_full(&self) -> bool { self.len() == self.data.len() } +} + +impl Debug for Dstream<'_> { + #[inline(always)] + fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { Debug::fmt(self.as_slice(), f) } +} + +impl<'a> From<&'a [u8]> for Dstream<'a> { + #[inline(always)] + fn from(value: &'a [u8]) -> Self { Self::new(value) } +} + +impl<'a> From<&'a mut [u8]> for Dstream<'a> { + #[inline(always)] + fn from(value: &'a mut [u8]) -> Self { Self::new(value) } +} + +impl PartialEq for Dstream<'_> { + #[inline(always)] + fn eq(&self, other: &Self) -> bool { self.as_slice() == other.as_slice() } +} + +impl PartialEq<&[u8]> for Dstream<'_> { + #[inline(always)] + fn eq(&self, other: &&[u8]) -> bool { self.as_slice() == *other } +} + +impl PartialEq<[u8; N]> for Dstream<'_> { + #[inline(always)] + fn eq(&self, other: &[u8; N]) -> bool { self.as_slice() == other.as_slice() } } diff --git a/bzipper/src/error/mod.rs b/bzipper/src/error/mod.rs index 090215a..4e0fe11 100644 --- a/bzipper/src/error/mod.rs +++ b/bzipper/src/error/mod.rs @@ -28,47 +28,53 @@ use alloc::boxed::Box; /// Mapping of [`core::result::Result`]. pub type Result = core::result::Result; -/// (De)serialisation failures. +/// bzipper errors. /// /// These variants are used when deserialisation fails. /// Serialisations are assumed infallible. #[derive(Debug)] pub enum Error { /// An array could not hold the requested amount of elements. - ArrayTooShort { req: usize, len: usize }, + ArrayTooShort { + /// The required amount of bytes. + req: usize, + + /// The remaining amount of bytes. + len: usize, + }, /// A string encountered an invalid UTF-8 sequence. BadString { source: Utf8Error }, - /// An implementor-defined error. + /// An unspecified (de)serialisation error. /// /// This is mainly useful if none of the predefined errors are appropriate. #[cfg(feature = "alloc")] #[cfg_attr(doc, doc(cfg(feature = "alloc")))] - CustomError { source: Box }, + CustomError(Box), /// Bytes were requested on an empty stream. EndOfStream { req: usize, rem: usize }, /// A boolean encountered a value outside `0` and `1`. - InvalidBoolean { value: u8 }, + InvalidBoolean(u8), /// An invalid code point was encountered. /// /// This includes surrogate points in the inclusive range `U+D800` to `U+DFFF`, as well as values larger than `U+10FFFF`. - InvalidCodePoint { value: u32 }, + InvalidCodePoint(u32), /// An invalid enumeration descriminant was provided. - InvalidDiscriminant { value: u32 }, + InvalidDiscriminant(u32), - /// An `isize` value couldn't fit into `16` bits. - IsizeOutOfRange { value: isize }, + /// An `isize` value couldn't fit into `32` bits. + IsizeOutOfRange(isize), /// A non-zero integer encountered the value `0`. NullInteger, - /// A `usize` value couldn't fit into `16` bits. - UsizeOutOfRange { value: usize }, + /// A `usize` value couldn't fit into `32` bits. + UsizeOutOfRange(usize), } impl Display for Error { @@ -83,28 +89,28 @@ impl Display for Error { => write!(f, "unable to parse utf8: \"{source}\""), #[cfg(feature = "alloc")] - CustomError { ref source } + CustomError(ref source) => write!(f, "{source}"), EndOfStream { req, rem } => write!(f, "({req}) byte(s) were requested but only ({rem}) byte(s) were left"), - InvalidBoolean { value } + InvalidBoolean(value) => write!(f, "expected boolean but got {value:#02X}"), - InvalidCodePoint { value } + InvalidCodePoint(value) => write!(f, "code point U+{value:04X} is not valid"), - InvalidDiscriminant { value } + InvalidDiscriminant(value) => write!(f, "discriminant ({value}) is not valid for the given enumeration"), - IsizeOutOfRange { value } + IsizeOutOfRange(value) => write!(f, "signed size value ({value}) cannot be serialised: must be in the range ({}) to ({})", i16::MIN, i16::MAX), NullInteger => write!(f, "expected non-zero integer but got (0)"), - UsizeOutOfRange { value } + UsizeOutOfRange(value) => write!(f, "unsigned size value ({value}) cannot be serialised: must be at most ({})", u16::MAX), } } @@ -118,7 +124,7 @@ impl core::error::Error for Error { BadString { ref source } => Some(source), #[cfg(feature = "alloc")] - CustomError { ref source } => Some(source.as_ref()), + CustomError(ref source) => Some(source.as_ref()), _ => None, } diff --git a/bzipper/src/fixed_iter/mod.rs b/bzipper/src/fixed_iter/mod.rs deleted file mode 100644 index cc2110d..0000000 --- a/bzipper/src/fixed_iter/mod.rs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2024 Gabriel Bjørnager Jensen. -// -// This file is part of bzipper. -// -// bzipper is free software: you can redistribute -// it and/or modify it under the terms of the GNU -// Lesser General Public License as published by -// the Free Software Foundation, either version 3 -// of the License, or (at your option) any later -// version. -// -// bzipper is distributed in the hope that it will -// be useful, but WITHOUT ANY WARRANTY; without -// even the implied warranty of MERCHANTABILITY or -// FITNESS FOR A PARTICULAR PURPOSE. See the GNU -// Lesser General Public License for more details. -// -// You should have received a copy of the GNU Less- -// er General Public License along with bzipper. If -// not, see . - -use core::mem::MaybeUninit; - -/// Iterator to a fixed vector. -/// -/// This type is used by the [`FixedString`](crate::FixedString) type for iterating over an owned string. -#[must_use] -pub struct FixedIter { - pub(in crate) buf: [MaybeUninit; N], - - pub(in crate) pos: usize, - pub(in crate) len: usize, -} - -impl Iterator for FixedIter { - type Item = T; - - fn next(&mut self) -> Option { - if self.pos >= self.len { return None }; - - let item = unsafe { self.buf[self.pos].assume_init_read() }; - self.pos += 0x1; - - Some(item) - } -} diff --git a/bzipper/src/fixed_string/mod.rs b/bzipper/src/fixed_string/mod.rs index 15c31aa..377937a 100644 --- a/bzipper/src/fixed_string/mod.rs +++ b/bzipper/src/fixed_string/mod.rs @@ -22,39 +22,55 @@ #[cfg(test)] mod test; -use crate::{Deserialise, Error, FixedIter, Serialise}; - +use crate::{ + Deserialise, + Dstream, + Error, + Serialise, + Sstream, +}; + +use core::borrow::{Borrow, BorrowMut}; use core::cmp::Ordering; use core::fmt::{Debug, Display, Formatter}; -use core::mem::MaybeUninit; -use core::ops::{Deref, DerefMut, Index, IndexMut}; +use core::hash::{Hash, Hasher}; +use core::ops::{Add, AddAssign, Deref, DerefMut, Index, IndexMut}; use core::slice::SliceIndex; -use core::str::FromStr; +use core::str::{Chars, CharIndices, FromStr}; #[cfg(feature = "alloc")] -use alloc::string::{String, ToString}; +use alloc::string::String; + +#[cfg(feature = "std")] +use std::ffi::OsStr; + +#[cfg(feature = "std")] +use std::net::ToSocketAddrs; + +#[cfg(feature = "std")] +use std::path::Path; -/// Owned string with maximum size. +/// Heap-allocated string with maximum size. /// /// This is in contrast to [String] -- which has no size limit in practice -- and [str], which is unsized. /// -/// # Examples +/// The string itself is encoded in UTF-8 for interoperability wtih Rust's standard string facilities, as well as for memory concerns. /// -/// All instances of this type have the same size if the value of `N` is also the same. -/// This size can be found through +/// Keep in mind that the size limit specified by `N` denotes *bytes* and not *characters* -- i.e. a value of `8` may translate to between two and eight characters, depending on their codepoints. /// -/// `size_of::() * N + size_of::()`. +/// # Examples /// +/// All instances of this type have the same size if the value of `N` is also the same. /// Therefore, the following four strings have -- despite their different contents -- the same total size. /// -/// ``` +/// ```rust /// use bzipper::FixedString; /// use std::str::FromStr; /// -/// let str0 = FixedString::<0xF>::new(); // Empty string. -/// let str1 = FixedString::<0xF>::from_str("Hello there!"); -/// let str2 = FixedString::<0xF>::from_str("أنا من أوروپا"); -/// let str3 = FixedString::<0xF>::from_str("COGITO ERGO SUM"); +/// let str0 = FixedString::<0x40>::new(); // Empty string. +/// let str1 = FixedString::<0x40>::from_str("Hello there!").unwrap(); +/// let str2 = FixedString::<0x40>::from_str("أنا من أوروپا").unwrap(); +/// let str3 = FixedString::<0x40>::from_str("COGITO ERGO SUM").unwrap(); /// /// assert_eq!(size_of_val(&str0), size_of_val(&str1)); /// assert_eq!(size_of_val(&str0), size_of_val(&str2)); @@ -64,10 +80,10 @@ use alloc::string::{String, ToString}; /// assert_eq!(size_of_val(&str2), size_of_val(&str3)); /// ``` /// -/// These three strings can---by extend in theory---also interchange their contents between each other. -#[derive(Clone, Deserialise, Serialise)] +/// These three strings can -- by extend in theory -- also interchange their contents between each other. +#[derive(Clone)] pub struct FixedString { - buf: [char; N], + buf: [u8; N], len: usize, } @@ -81,62 +97,78 @@ impl FixedString { /// The constructed string will have a null length. /// All characters inside the internal buffer are instanced as `U+0000 NULL`. /// - /// For constructing a string with an already defined buffer, see [`from_chars`](Self::from_chars) and [`from_raw_parts`](Self::from_raw_parts). + /// For constructing a string with an already defined buffer, see [`from_raw_parts`](Self::from_raw_parts) and [`from_str`](Self::from_str). #[inline(always)] #[must_use] - pub const fn new() -> Self { Self { buf: ['\0'; N], len: 0x0 } } + pub const fn new() -> Self { Self { buf: [0x00; N], len: 0x0 } } - /// Consumes the buffer into a fixed string. + /// Constructs a new, fixed-size string from raw parts. /// - /// The internal length is to `N`. - /// For a similar function but with an explicit size, see [`from_raw_parts`](Self::from_raw_parts). - #[inline(always)] - #[must_use] - pub const fn from_chars(buf: [char; N]) -> Self { Self { buf, len: N } } - - /// Constructs a fixed string from raw parts. + /// The provided parts are not tested in any way. + /// + /// # Safety + /// + /// The value of `len` may not exceed that of `N`. + /// Additionally, the octets in `buf` (from index zero up to the value of `len`) must be valid UTF-8 codepoints. + /// + /// If any of these requirements are violated, behaviour is undefined. #[inline(always)] #[must_use] - pub const fn from_raw_parts(buf: [char; N], len: usize) -> Self { Self { buf, len } } + pub const unsafe fn from_raw_parts(buf: [u8; N], len: usize) -> Self { Self { buf, len } } - /// Deconstructs a fixed string into its raw parts. + /// Destructs the provided string into its raw parts. + /// + /// The returned values are valid to pass on to [`from_raw_parts`](Self::from_raw_parts). + /// + /// The returned byte array is guaranteed to be fully initialised. + /// However, only octets up to an index of [`len`](Self::len) are also guaranteed to be valid UTF-8 codepoints. #[inline(always)] #[must_use] - pub const fn into_raw_parts(self) -> ([char; N], usize) { (self.buf, self.len) } + pub const fn into_raw_parts(self) -> ([u8; N], usize) { (self.buf, self.len) } - /// Gets a pointer to the first character. + /// Gets a pointer to the first octet. #[inline(always)] #[must_use] - pub const fn as_ptr(&self) -> *const char { self.buf.as_ptr() } + pub const fn as_ptr(&self) -> *const u8 { self.buf.as_ptr() } - /// Gets a mutable pointer to the first character. + // This function can only be marked as `const` when + // `const_mut_refs` is implemented. See tracking + // issue #57349 for more information. + /// Gets a mutable pointer to the first octet. /// - /// This function can only be marked as `const` when `const_mut_refs` is implemented. - /// See tracking issue [`#57349`](https://github.com/rust-lang/rust/issues/57349/) for more information. #[inline(always)] #[must_use] - pub fn as_mut_ptr(&mut self) -> *mut char { self.buf.as_mut_ptr() } + pub fn as_mut_ptr(&mut self) -> *mut u8 { self.buf.as_mut_ptr() } - /// Borrows the string as a character slice. + /// Borrows the string as a byte slice. /// /// The range of the returned slice only includes characters that are "used." - /// For borrowing the entire internal buffer, see [`as_mut_slice`](Self::as_mut_slice). #[inline(always)] #[must_use] - pub const fn as_slice(&self) -> &[char] { + pub const fn as_bytes(&self) -> &[u8] { // We need to use `from_raw_parts` to mark this // function `const`. unsafe { core::slice::from_raw_parts(self.as_ptr(), self.len()) } } - /// Mutably borrows the string as a character slice. + /// Borrows the string as a string slice. + /// + /// The range of the returned slice only includes characters that are "used." + #[inline(always)] + #[must_use] + pub const fn as_str(&self) -> &str { unsafe { core::str::from_utf8_unchecked(self.as_bytes()) } } + + /// Mutably borrows the string as a string slice. /// - /// The range of the returned slice includes the entire internal buffer. - /// For borrowing only the "used" characters, see [`as_slice`](Self::as_slice). + /// The range of the returned slice only includes characters that are "used." #[inline(always)] #[must_use] - pub fn as_mut_slice(&mut self) -> &mut [char] { &mut self.buf[0x0..self.len] } + pub fn as_mut_str(&mut self) -> &mut str { + let range = 0x0..self.len(); + + unsafe { core::str::from_utf8_unchecked_mut(&mut self.buf[range]) } + } /// Returns the length of the string. /// @@ -145,42 +177,67 @@ impl FixedString { #[must_use] pub const fn len(&self) -> usize { self.len } - /// Checks if the string is empty, i.e. `self.len() == 0x0`. + /// Checks if the string is empty, i.e. no characters are contained. #[inline(always)] #[must_use] pub const fn is_empty(&self) -> bool { self.len() == 0x0 } - /// Checks if the string is full, i.e. `self.len() == N`. + /// Checks if the string is full, i.e. it cannot hold any more characters. #[inline(always)] #[must_use] pub const fn is_full(&self) -> bool { self.len() == N } - /// Sets the internal length. + /// Returns the total capacity of the string. /// - /// The length is compared with `N` to guarantee that bounds are honoured. + /// This is defined as being exactly the value of `N`. + #[inline(always)] + #[must_use] + pub const fn capacity(&self) -> usize { N } + + /// Gets a substring of the string. + #[inline(always)] + #[must_use] + pub fn get>(&self, index: I) -> Option<&I::Output> { self.as_str().get(index) } + + /// Gets a mutable substring of the string. + #[inline(always)] + #[must_use] + pub fn get_mut>(&mut self, index: I) -> Option<&mut I::Output> { self.as_mut_str().get_mut(index) } + + /// Pushes a character into the string. + /// + /// The internal length is updated accordingly. /// /// # Panics /// - /// This method panics if the value of `len` is greater than that of `N`. + /// If the string cannot hold the provided character *after* encoding, this method will panic. #[inline(always)] - pub fn set_len(&mut self, len: usize) { - assert!(self.len <= N, "cannot set length longer than the fixed size"); - self.len = len; + pub fn push(&mut self, c: char) { + let mut buf = [0x00; 0x4]; + let s = c.encode_utf8(&mut buf); + + self.push_str(s); } - /// Pushes a character into the string. + /// Pushes a string slice into the string. /// /// The internal length is updated accordingly. /// /// # Panics /// - /// If the string cannot hold any more character (i.e. it is full), this method will panic. + /// If the string cannot hold the provided slice, this method will panic. #[inline(always)] - pub fn push(&mut self, c: char) { - assert!(!self.is_full(), "cannot push character to full string"); + pub fn push_str(&mut self, s: &str) { + let rem = self.buf.len() - self.len; + let req = s.len(); + + assert!(rem >= req, "cannot push string beyond fixed length"); - self.buf[self.len] = c; - self.len += 0x1; + let start = self.len; + let stop = start + req; + + let buf = &mut self.buf[start..stop]; + buf.copy_from_slice(s.as_bytes()); } /// Pops a character from the string. @@ -188,38 +245,76 @@ impl FixedString { /// The internal length is updated accordingly. /// /// If no characters are left (i.e. the string is empty), an instance of [`None`] is returned. + /// + /// **Note that this method is currently unimplemented.** + #[deprecated = "temporarily unimplemented"] #[inline(always)] - pub fn pop(&mut self) -> Option { - self.len - .checked_sub(0x1) - .map(|len| { - let c = self.buf[self.len]; - self.len = len; + pub fn pop(&mut self) -> Option { todo!() } - c - }) + /// Returns an iterator of the string's characters. + #[inline(always)] + pub fn chars(&self) -> Chars { self.as_str().chars() } + + /// Returns an iterator of the string's characters along with their positions. + #[inline(always)] + pub fn char_indices(&self) -> CharIndices { self.as_str().char_indices() } +} + +impl Add<&str> for FixedString { + type Output = Self; + + fn add(mut self, rhs: &str) -> Self::Output { + self.push_str(rhs); + self } } -impl AsMut<[char]> for FixedString { +impl AddAssign<&str> for FixedString { + fn add_assign(&mut self, rhs: &str) { self.push_str(rhs) } +} + +impl AsMut for FixedString { + #[inline(always)] + fn as_mut(&mut self) -> &mut str { self.as_mut_str() } +} + +#[cfg(feature = "std")] +#[cfg_attr(doc, doc(cfg(feature = "std")))] +impl AsRef for FixedString { + #[inline(always)] + fn as_ref(&self) -> &OsStr { self.as_str().as_ref() } +} + +#[cfg(feature = "std")] +#[cfg_attr(doc, doc(cfg(feature = "std")))] +impl AsRef for FixedString { + #[inline(always)] + fn as_ref(&self) -> &Path { self.as_str().as_ref() } +} + +impl AsRef for FixedString { + #[inline(always)] + fn as_ref(&self) -> &str { self.as_str() } +} + +impl AsRef<[u8]> for FixedString { + #[inline(always)] + fn as_ref(&self) -> &[u8] { self.as_bytes() } +} + +impl Borrow for FixedString { #[inline(always)] - fn as_mut(&mut self) -> &mut [char] { self.as_mut_slice() } + fn borrow(&self) -> &str { self.as_str() } } -impl AsRef<[char]> for FixedString { +impl BorrowMut for FixedString { #[inline(always)] - fn as_ref(&self) -> &[char] { self.as_slice() } + fn borrow_mut(&mut self) -> &mut str { self.as_mut_str() } } impl Debug for FixedString { #[inline] - fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { - write!(f, "\"")?; - for c in self { write!(f, "{}", c.escape_debug())? } - write!(f, "\"")?; - - Ok(()) - } + fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { Debug::fmt(self.as_str(), f) } } impl Default for FixedString { @@ -227,159 +322,129 @@ impl Default for FixedString { fn default() -> Self { Self { buf: [Default::default(); N], len: 0x0 } } } -/// See [`as_slice`](Self::as_slice). impl Deref for FixedString { - type Target = [char]; + type Target = str; #[inline(always)] - fn deref(&self) -> &Self::Target { self.as_slice() } + fn deref(&self) -> &Self::Target { self.as_str() } } -/// See [`as_mut_slice`](Self::as_mut_slice). impl DerefMut for FixedString { #[inline(always)] - fn deref_mut(&mut self) -> &mut Self::Target { self.as_mut_slice() } + fn deref_mut(&mut self) -> &mut Self::Target { self.as_mut_str() } } -impl Display for FixedString { +impl Deserialise for FixedString { #[inline] - fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { - for c in self { write!(f, "{c}")? } + fn deserialise(stream: &Dstream) -> Result { + let len = Deserialise::deserialise(stream)?; + if len > N { return Err(Error::ArrayTooShort { req: len, len: N }) }; - Ok(()) + let bytes = stream.read(len)?; + + let s = core::str::from_utf8(bytes) + .map_err(|e| Error::BadString { source: e })?; + + Self::from_str(s) } } -impl Eq for FixedString { } - -impl From<[char; N]> for FixedString { - #[inline(always)] - fn from(value: [char; N]) -> Self { Self::from_chars(value) } +impl Display for FixedString { + #[inline] + fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { Display::fmt(self.as_str(), f) } } +impl Eq for FixedString { } + impl FromStr for FixedString { type Err = Error; #[inline] fn from_str(s: &str) -> Result { - let mut buf = [Default::default(); N]; - let len = s.chars().count(); + let len = s.len(); + if len > N { return Err(Error::ArrayTooShort { req: len, len: N }) }; - for (i, c) in s.chars().enumerate() { - if i >= N { return Err(Error::ArrayTooShort { req: len, len: N }) } + let mut buf = [0x00; N]; + unsafe { core::ptr::copy_nonoverlapping(s.as_ptr(), buf.as_mut_ptr(), len) }; - buf[i] = c; - } + // The remaining bytes are already initialised to + // null. Ok(Self { buf, len }) } } -impl, const N: usize> Index for FixedString { - type Output = I::Output; +impl Hash for FixedString { + #[inline(always)] + fn hash(&self, state: &mut H) { self.as_str().hash(state) } +} + +impl, const N: usize> Index for FixedString { + type Output = I::Output; #[inline(always)] fn index(&self, index: I) -> &Self::Output { self.get(index).unwrap() } } -impl, const N: usize> IndexMut for FixedString { +impl, const N: usize> IndexMut for FixedString { #[inline(always)] fn index_mut(&mut self, index: I) -> &mut Self::Output { self.get_mut(index).unwrap() } } -impl IntoIterator for FixedString { - type Item = char; - - type IntoIter = FixedIter; - +impl Ord for FixedString { #[inline(always)] - fn into_iter(self) -> Self::IntoIter { - FixedIter { - buf: unsafe { self.buf.as_ptr().cast::<[MaybeUninit; N]>().read() }, - - pos: 0x0, - len: self.len, - } - } + fn cmp(&self, other: &Self) -> Ordering { self.as_str().cmp(other.as_str()) } } -impl<'a, const N: usize> IntoIterator for &'a FixedString { - type Item = &'a char; - - type IntoIter = core::slice::Iter<'a, char>; - +impl PartialEq> for FixedString { #[inline(always)] - fn into_iter(self) -> Self::IntoIter { self.iter() } + fn eq(&self, other: &FixedString) -> bool { self.as_str() == other.as_str() } } -impl<'a, const N: usize> IntoIterator for &'a mut FixedString { - type Item = &'a mut char; - - type IntoIter = core::slice::IterMut<'a, char>; - +impl PartialEq<&str> for FixedString { #[inline(always)] - fn into_iter(self) -> Self::IntoIter { self.iter_mut() } + fn eq(&self, other: &&str) -> bool { self.as_str() == *other } } -impl Ord for FixedString { +impl PartialOrd> for FixedString { #[inline(always)] - fn cmp(&self, other: &Self) -> Ordering { self.partial_cmp(other).unwrap() } + fn partial_cmp(&self, other: &FixedString) -> Option { self.as_str().partial_cmp(other.as_str()) } } -impl PartialEq> for FixedString { +impl PartialOrd<&str> for FixedString { #[inline(always)] - fn eq(&self, other: &FixedString) -> bool { self.as_slice() == other.as_slice() } + fn partial_cmp(&self, other: &&str) -> Option { self.as_str().partial_cmp(*other) } } -impl PartialEq<&[char]> for FixedString { - #[inline(always)] - fn eq(&self, other: &&[char]) -> bool { self.as_slice() == *other } -} +impl Serialise for FixedString { + const MAX_SERIALISED_SIZE: usize = N + usize::MAX_SERIALISED_SIZE; -impl PartialEq<&str> for FixedString { - #[inline] - fn eq(&self, other: &&str) -> bool { - for (i, c) in other.chars().enumerate() { - if self.get(i) != Some(&c) { return false }; - } + fn serialise(&self, stream: &mut Sstream) -> Result<(), Error> { + self.len().serialise(stream)?; + stream.write(self.as_bytes())?; - true + Ok(()) } } -impl PartialOrd> for FixedString { - #[inline(always)] - fn partial_cmp(&self, other: &FixedString) -> Option { self.partial_cmp(&other.as_slice()) } -} +#[cfg(feature = "std")] +#[cfg_attr(doc, doc(cfg(feature = "std")))] +impl ToSocketAddrs for FixedString { + type Iter = ::Iter; -impl PartialOrd<&[char]> for FixedString { #[inline(always)] - fn partial_cmp(&self, other: &&[char]) -> Option { self.as_slice().partial_cmp(other) } + fn to_socket_addrs(&self) -> std::io::Result { self.as_str().to_socket_addrs() } } -impl PartialOrd<&str> for FixedString { - #[inline] - fn partial_cmp(&self, other: &&str) -> Option { - let llen = self.len(); - let rlen = other.chars().count(); - - match llen.cmp(&rlen) { - Ordering::Equal => {}, - - ordering => return Some(ordering), - }; - - for (i, rc) in other.chars().enumerate() { - let lc = self[i]; - - match lc.cmp(&rc) { - Ordering::Equal => {}, +impl TryFrom for FixedString { + type Error = ::Err; - ordering => return Some(ordering), - } - } + #[inline(always)] + fn try_from(value: char) -> Result { + let mut buf = [0x00; 0x4]; + let s = value.encode_utf8(&mut buf); - Some(Ordering::Equal) + s.parse() } } @@ -391,6 +456,7 @@ impl TryFrom<&str> for FixedString { } #[cfg(feature = "alloc")] +#[cfg_attr(doc, doc(cfg(feature = "alloc")))] impl TryFrom for FixedString { type Error = ::Err; @@ -398,8 +464,17 @@ impl TryFrom for FixedString { fn try_from(value: String) -> Result { Self::from_str(&value) } } +/// Converts the fixed-size string into a dynamic string. +/// +/// The capacity of the resulting [`String`] object is equal to the value of `N`. #[cfg(feature = "alloc")] +#[cfg_attr(doc, doc(cfg(feature = "alloc")))] impl From> for String { #[inline(always)] - fn from(value: FixedString) -> Self { value.to_string() } + fn from(value: FixedString) -> Self { + let mut s = Self::with_capacity(N); + s.push_str(value.as_str()); + + s + } } diff --git a/bzipper/src/fixed_string/test.rs b/bzipper/src/fixed_string/test.rs index c86a944..09f4b39 100644 --- a/bzipper/src/fixed_string/test.rs +++ b/bzipper/src/fixed_string/test.rs @@ -25,9 +25,9 @@ use core::cmp::Ordering; #[test] fn test_fixed_string() { - let str0 = FixedString::<0xC>::try_from("Hello there!").unwrap(); - let str1 = FixedString::<0xE>::try_from("MEIN_GRO\u{1E9E}_GOTT").unwrap(); - let str2 = FixedString::<0x5>::try_from("Hello").unwrap(); + let str0 = FixedString::<0x0C>::try_from("Hello there!").unwrap(); + let str1 = FixedString::<0x12>::try_from("MEIN_GRO\u{1E9E}_GOTT").unwrap(); + let str2 = FixedString::<0x05>::try_from("Hello").unwrap(); assert_eq!(str0.partial_cmp(&str0), Some(Ordering::Equal)); assert_eq!(str0.partial_cmp(&str1), Some(Ordering::Less)); diff --git a/bzipper/src/lib.rs b/bzipper/src/lib.rs index dd64693..c505f50 100644 --- a/bzipper/src/lib.rs +++ b/bzipper/src/lib.rs @@ -23,7 +23,7 @@ //! Binary (de)serialisation. //! -//! Contrary to [Serde](https://crates.io/crates/serde/)/[Bincode](https://crates.io/crates/bincode/), the goal of bzipper is to serialise with a known size constraint. +//! In contrast to [Serde](https://crates.io/crates/serde/)/[Bincode](https://crates.io/crates/bincode/), the primary goal of bzipper is to serialise with a known size constraint. //! Therefore, this crate may be more suited for networking or other cases where a fixed-sized buffer is needed. //! //! Keep in mind that this project is still work-in-progress. @@ -41,15 +41,15 @@ //! //! # Usage //! -//! This crate revolves around the [`Serialise`] and [`Deserialise`] traits, both of which are commonly used in conjunction with streams (more specifically, [s-streams](Sstream) and [d-streams](Dstream)). +//! This crate revolves around the [`Serialise`] and [`Deserialise`] traits, both of which use *streams* -- or more specifically -- [s-streams](Sstream) and [d-streams](Dstream). //! //! Many core types come implemented with bzipper, including primitives as well as some standard library types such as [`Option`] and [`Result`](core::result::Result). //! -//! It is recommended in most cases to just derive these traits for custom types (enumerations and structures only). -//! Here, each field is chained in declaration order: +//! It is recommended in most cases to just derive these two traits for custom types (although this is only supported with enumerations and structures). +//! Here, each field is *chained* according to declaration order: //! //! ``` -//! use bzipper::{Deserialise, Serialise}; +//! use bzipper::{Buffer, Deserialise, Serialise}; //! //! #[derive(Debug, Deserialise, PartialEq, Serialise)] //! struct IoRegister { @@ -57,45 +57,55 @@ //! value: u16, //! } //! -//! let mut buf: [u8; IoRegister::SERIALISED_SIZE] = Default::default(); -//! IoRegister { addr: 0x04000000, value: 0x0402 }.serialise(&mut buf).unwrap(); +//! let mut buf = Buffer::new(); //! +//! buf.write(IoRegister { addr: 0x04000000, value: 0x0402 }).unwrap(); +//! +//! assert_eq!(buf.len(), 0x6); //! assert_eq!(buf, [0x04, 0x00, 0x00, 0x00, 0x04, 0x02]); //! -//! assert_eq!(IoRegister::deserialise(&buf).unwrap(), IoRegister { addr: 0x04000000, value: 0x0402 }); +//! assert_eq!(buf.read().unwrap(), IoRegister { addr: 0x04000000, value: 0x0402 }); //! ``` //! //! ## Serialisation //! -//! To serialise an object implementing `Serialise`, simply allocate a buffer for the serialisation. -//! The required size of any given serialisation is specified by the [`SERIALISED_SIZE`](Serialise::SERIALISED_SIZE) constant: +//! To serialise an object implementing `Serialise`, simply allocate a buffer for the serialisation and wrap it in an s-stream (*serialisation stream*) with the [`Sstream`] type. //! //! ``` -//! use bzipper::Serialise; +//! use bzipper::{Serialise, Sstream}; +//! +//! let mut buf = [Default::default(); char::MAX_SERIALISED_SIZE]; +//! let mut stream = Sstream::new(&mut buf); //! -//! let mut buf: [u8; char::SERIALISED_SIZE] = Default::default(); -//! 'Ж'.serialise(&mut buf).unwrap(); +//! 'Ж'.serialise(&mut stream).unwrap(); //! -//! assert_eq!(buf, [0x00, 0x00, 0x04, 0x16]); +//! assert_eq!(stream, [0x00, 0x00, 0x04, 0x16]); //! ``` //! -//! The only special requirement of the [`serialise`](Serialise::serialise) method is that the provided byte slice has an element count of exactly `SERIALISED_SIZE`. +//! The maximum size of any given serialisation is specified by the [`MAX_SERIALISED_SIZE`](Serialise::MAX_SERIALISED_SIZE) constant. //! -//! We can also use streams to *chain* multiple elements together. +//! We can also use streams to chain multiple elements together: //! //! ``` -//! use bzipper::Serialise; +//! use bzipper::{Serialise, Sstream}; //! -//! let mut buf: [u8; char::SERIALISED_SIZE * 5] = Default::default(); -//! let mut stream = bzipper::Sstream::new(&mut buf); +//! let mut buf = [Default::default(); char::MAX_SERIALISED_SIZE * 0x5]; +//! let mut stream = Sstream::new(&mut buf); //! -//! stream.append(&'ل'); -//! stream.append(&'ا'); -//! stream.append(&'م'); -//! stream.append(&'د'); -//! stream.append(&'ا'); +//! // Note: For serialising multiple characters, the +//! // `FixedString` type is usually preferred. //! -//! assert_eq!(buf, [0x00, 0x00, 0x06, 0x44, 0x00, 0x00, 0x06, 0x27, 0x00, 0x00, 0x06, 0x45, 0x00, 0x00, 0x06, 0x2F, 0x00, 0x00, 0x06, 0x27]); +//! 'ل'.serialise(&mut stream).unwrap(); +//! 'ا'.serialise(&mut stream).unwrap(); +//! 'م'.serialise(&mut stream).unwrap(); +//! 'د'.serialise(&mut stream).unwrap(); +//! 'ا'.serialise(&mut stream).unwrap(); +//! +//! assert_eq!(buf, [ +//! 0x00, 0x00, 0x06, 0x44, 0x00, 0x00, 0x06, 0x27, +//! 0x00, 0x00, 0x06, 0x45, 0x00, 0x00, 0x06, 0x2F, +//! 0x00, 0x00, 0x06, 0x27 +//! ]); //! ``` //! //! When serialising primitives, the resulting byte stream is in big endian (a.k.a. network endian). @@ -103,27 +113,35 @@ //! //! ## Deserialisation //! -//! Deserialisation works with an almost identical syntax to serialisation. +//! Deserialisation works with a similar syntax to serialisation. //! -//! To deserialise a buffer, simply call the [`deserialise`](Deserialise::deserialise) method: +//! D-streams (*deserialisation streams*) use the [`Dstream`] type and are constructed in a manner similar to s-streams. +//! To deserialise a buffer, simply call the [`deserialise`](Deserialise::deserialise) method with the strema: //! //! ``` -//! use bzipper::Deserialise; +//! use bzipper::{Deserialise, Dstream}; //! //! let data = [0x45, 0x54]; -//! assert_eq!(::deserialise(&data).unwrap(), 0x4554); +//! let stream = Dstream::new(&data); +//! assert_eq!(u16::deserialise(&stream).unwrap(), 0x4554); //! ``` //! -//! Just like with serialisations, the [`Dstream`] can be used to deserialise chained elements: +//! And just like s-streams, d-streams can also be used to handle chaining: //! //! ``` -//! use bzipper::Deserialise; +//! use bzipper::{Deserialise, Dstream}; //! //! let data = [0x45, 0x54]; -//! let stream = bzipper::Dstream::new(&data); +//! let stream = Dstream::new(&data); +//! +//! assert_eq!(u8::deserialise(&stream).unwrap(), 0x45); +//! assert_eq!(u8::deserialise(&stream).unwrap(), 0x54); //! -//! assert_eq!(stream.take::().unwrap(), 0x45); -//! assert_eq!(stream.take::().unwrap(), 0x54); +//! // The data can also be deserialised as a tuple (up +//! // to twelve elements). +//! +//! let stream = Dstream::new(&data); +//! assert_eq!(<(u8, u8)>::deserialise(&stream).unwrap(), (0x45, 0x54)); //! ``` #![no_std] @@ -139,8 +157,6 @@ extern crate alloc; extern crate std; /// Implements [`Deserialise`] for the provided type. -/// -/// This macro assumes that `Serialise` was also derived, although this is not strictly required as it is unenforceable. #[doc(inline)] pub use bzipper_macros::Deserialise; @@ -151,7 +167,7 @@ pub use bzipper_macros::Deserialise; /// For structures, each element is chained in **order of declaration.** /// For example, the following struct will serialise its field `foo` before `bar`: /// -/// ``` +/// ```rust /// use bzipper::Serialise; /// /// #[derive(Serialise)] @@ -161,9 +177,9 @@ pub use bzipper_macros::Deserialise; /// } /// ``` /// -/// Should the order of declaration change, then most of---if not all---previous dervied serialisations become void. +/// Should the structure's declaration change, then all previous derived serialisations be considered void. /// -/// The value of [`SERIALISED_SIZE`](Serialise::SERIALISED_SIZE) is set to the combined value of all fields. +/// The value of [`MAX_SERIALISED_SIZE`](Serialise::MAX_SERIALISED_SIZE) is set to the combined value of all fields. /// /// If the structure is a unit structure (i.e. it has *no* fields), it is serialised equivalently to the [unit] type. /// @@ -176,12 +192,12 @@ pub use bzipper_macros::Deserialise; /// Variants with fields are serialised exactly like structures. /// That is, each field is chained in order of declaration. /// -/// Each variant has its own serialised size, and the largest of these values is chosen as the serialised size of the enumeration type. +/// Each variant has its own value of `MAX_SERIALISED_SIZE`, and the largest of these values is chosen as the value of the enumeration's own `MAX_SERIALISED_SIZE`. /// /// # Unions /// /// Unions cannot derive `Serialise` due to the uncertainty of their contents. -/// The trait should therefore be implemented manually. +/// The trait should therefore be implemented manually for such types. #[doc(inline)] pub use bzipper_macros::Serialise; @@ -196,7 +212,6 @@ pub(in crate) use use_mod; use_mod!(pub deserialise); use_mod!(pub dstream); use_mod!(pub error); -use_mod!(pub fixed_iter); use_mod!(pub fixed_string); use_mod!(pub serialise); use_mod!(pub sstream); diff --git a/bzipper/src/serialise/mod.rs b/bzipper/src/serialise/mod.rs index 9c89e09..b22d68e 100644 --- a/bzipper/src/serialise/mod.rs +++ b/bzipper/src/serialise/mod.rs @@ -24,16 +24,20 @@ mod test; use crate::{Error, Result, Sstream}; -use core::{convert::Infallible, marker::PhantomData}; +use core::{convert::Infallible, hint::unreachable_unchecked, marker::PhantomData}; mod tuple; -/// Denotes a type capable of being serialised. +/// Denotes a type capable of serialisation. /// /// It is recommended to simply derive this trait for custom types. -/// It can, however, be manually implemented: +/// It can, however, also be manually implemented: +/// +/// ```rust +/// // Manual implementation of custom type. This im- +/// // plementation is equivalent to what would have +/// // been derived. /// -/// ``` /// use bzipper::{Result, Serialise, Sstream}; /// /// struct Foo { @@ -42,60 +46,46 @@ mod tuple; /// } /// /// impl Serialise for Foo { -/// const SERIALISED_SIZE: usize = u16::SERIALISED_SIZE + f32::SERIALISED_SIZE; -/// -/// fn serialise(&self, buf: &mut [u8]) -> Result<()> { -/// debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); +/// const MAX_SERIALISED_SIZE: usize = u16::MAX_SERIALISED_SIZE + f32::MAX_SERIALISED_SIZE; /// +/// fn serialise(&self, stream: &mut Sstream) -> Result<()> { /// // Serialise fields using chaining. /// -/// let mut stream = Sstream::new(buf); -/// -/// stream.append(&self.bar)?; -/// stream.append(&self.baz)?; +/// self.bar.serialise(stream)?; +/// self.baz.serialise(stream)?; /// /// Ok(()) /// } /// } /// ``` /// -/// Implementors of this trait should make sure that [`SERIALISED_SIZE`](Serialise::SERIALISED_SIZE) is properly defined. -/// This value indicates the definitive size of any serialisation of the `Self` type. +/// Implementors of this trait should make sure that [`MAX_SERIALISED_SIZE`](Self::MAX_SERIALISED_SIZE) is properly defined. +/// This value indicates the definitively largest size of any serialisation of `Self`. pub trait Serialise: Sized { - /// The amount of bytes that result from a serialisation. + /// The maximum amount of bytes that can result from a serialisation. /// /// Implementors of this trait should make sure that no serialisation (or deserialisation) uses more than the amount specified by this constant. - /// When using these traits, always assume that exactly this amount has or will be used. - const SERIALISED_SIZE: usize; + const MAX_SERIALISED_SIZE: usize; - /// Serialises `self` into a slice. + /// Serialises `self` into the given s-stream. /// - /// In most cases it is wiser to chain serialisations using [`Sstream`] instead of using this method directly. + /// This method must **never** write more bytes than specified by [`MAX_SERIALISED_SIZE`](Self::MAX_SERIALISED_SIZE). + /// Doing so is considered a logic error. /// /// # Errors /// - /// If serialisation failed, e.g. by an unencodable value being provided, an error is returned. - /// - /// # Panics - /// - /// This method will usually panic if the provided slice has a length *less* than the value of `SERIALISED_SIZE`. - /// Official implementations of this trait (including those that are derived) always panic in debug mode if the provided slice has a length that is different at all. - fn serialise(&self, buf: &mut [u8]) -> Result<()>; + /// If serialisation fails, e.g. by an unencodable value being provided, an error is returned. + fn serialise(&self, stream: &mut Sstream) -> Result<()>; } macro_rules! impl_numeric { ($ty:ty) => { impl ::bzipper::Serialise for $ty { - const SERIALISED_SIZE: usize = size_of::<$ty>(); + const MAX_SERIALISED_SIZE: usize = size_of::<$ty>(); #[inline] - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - ::core::debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - let data = self.to_be_bytes(); - buf.copy_from_slice(&data); + fn serialise(&self, stream: &mut Sstream) -> Result<()> { + stream.write(&self.to_be_bytes())?; Ok(()) } @@ -106,101 +96,82 @@ macro_rules! impl_numeric { macro_rules! impl_non_zero { ($ty:ty) => { impl ::bzipper::Serialise for ::core::num::NonZero<$ty> { - const SERIALISED_SIZE: usize = ::core::mem::size_of::<$ty>(); + const MAX_SERIALISED_SIZE: usize = ::core::mem::size_of::<$ty>(); #[inline(always)] - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - self.get().serialise(buf) - } + fn serialise(&self, stream: &mut Sstream) -> Result<()> { self.get().serialise(stream) } } }; } impl Serialise for [T; N] { - const SERIALISED_SIZE: usize = T::SERIALISED_SIZE * N; - - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); + const MAX_SERIALISED_SIZE: usize = T::MAX_SERIALISED_SIZE * N; - let mut stream = Sstream::new(buf); - - for v in self { stream.append(v)? } + fn serialise(&self, stream: &mut Sstream) -> Result<()> { + for v in self { v.serialise(stream)? } Ok(()) } } impl Serialise for bool { - const SERIALISED_SIZE: usize = u8::SERIALISED_SIZE; + const MAX_SERIALISED_SIZE: usize = u8::MAX_SERIALISED_SIZE; #[inline(always)] - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - u8::from(*self).serialise(buf) + fn serialise(&self, stream: &mut Sstream) -> Result<()> { + u8::from(*self).serialise(stream) } } impl Serialise for char { - const SERIALISED_SIZE: usize = u32::SERIALISED_SIZE; + const MAX_SERIALISED_SIZE: usize = u32::MAX_SERIALISED_SIZE; #[inline(always)] - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - u32::from(*self).serialise(buf) + fn serialise(&self, stream: &mut Sstream) -> Result<()> { + u32::from(*self).serialise(stream) } } // Especially useful for `Result`. -// *If* that is needed, of course. +// *If* that is even needed, of course. impl Serialise for Infallible { - const SERIALISED_SIZE: usize = 0x0; + const MAX_SERIALISED_SIZE: usize = 0x0; #[inline(always)] - fn serialise(&self, _buf: &mut [u8]) -> Result<()> { unreachable!() } + fn serialise(&self, _stream: &mut Sstream) -> Result<()> { unsafe { unreachable_unchecked() } } } impl Serialise for isize { - const SERIALISED_SIZE: usize = i32::SERIALISED_SIZE; + const MAX_SERIALISED_SIZE: usize = i32::MAX_SERIALISED_SIZE; #[inline] - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - + fn serialise(&self, stream: &mut Sstream) -> Result<()> { let value = i32::try_from(*self) - .map_err(|_| Error::IsizeOutOfRange { value: *self })?; + .map_err(|_| Error::IsizeOutOfRange(*self))?; - value.serialise(buf) + value.serialise(stream) } } impl Serialise for Option { - const SERIALISED_SIZE: usize = bool::SERIALISED_SIZE + T::SERIALISED_SIZE; - - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); + const MAX_SERIALISED_SIZE: usize = bool::MAX_SERIALISED_SIZE + T::MAX_SERIALISED_SIZE; + fn serialise(&self, stream: &mut Sstream) -> Result<()> { // The first element is of type `bool` and is // called the "sign." It signifies whether there is - // a following element or not. The remaining bytes - // are preserved if `self` is `None`. - - let mut stream = Sstream::new(buf); + // a following element or not. match *self { None => { - stream.append(&false)?; + false.serialise(stream)?; // No need to zero-fill. }, Some(ref v) => { - stream.append(&true)?; - stream.append(v)?; + true.serialise(stream)?; + v.serialise(stream)?; }, }; @@ -209,37 +180,30 @@ impl Serialise for Option { } impl Serialise for PhantomData { - const SERIALISED_SIZE: usize = size_of::(); + const MAX_SERIALISED_SIZE: usize = size_of::(); #[inline(always)] - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - Ok(()) - } + fn serialise(&self, _stream: &mut Sstream) -> Result<()> { Ok(()) } } impl Serialise for core::result::Result where T: Serialise, E: Serialise, { - const SERIALISED_SIZE: usize = bool::SERIALISED_SIZE + if size_of::() > size_of::() { size_of::() } else { size_of::() }; - - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - let mut stream = Sstream::new(buf); + const MAX_SERIALISED_SIZE: usize = bool::MAX_SERIALISED_SIZE + if size_of::() > size_of::() { size_of::() } else { size_of::() }; + fn serialise(&self, stream: &mut Sstream) -> Result<()> { // Remember the descriminant. + match *self { Ok(ref v) => { - stream.append(&false)?; - stream.append(v)?; + false.serialise(stream)?; + v.serialise(stream)?; }, Err(ref e) => { - stream.append(&true)?; - stream.append(e)?; + true.serialise(stream)?; + e.serialise(stream)?; }, }; @@ -248,26 +212,20 @@ where } impl Serialise for () { - const SERIALISED_SIZE: usize = size_of::(); + const MAX_SERIALISED_SIZE: usize = 0x0; #[inline(always)] - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - Ok(()) - } + fn serialise(&self, _stream: &mut Sstream) -> Result<()> { Ok(()) } } impl Serialise for usize { - const SERIALISED_SIZE: Self = u32::SERIALISED_SIZE; - - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); + const MAX_SERIALISED_SIZE: Self = u32::MAX_SERIALISED_SIZE; + fn serialise(&self, stream: &mut Sstream) -> Result<()> { let value = u32::try_from(*self) - .map_err(|_| Error::UsizeOutOfRange { value: *self })?; + .map_err(|_| Error::UsizeOutOfRange(*self))?; - value.serialise(buf) + value.serialise(stream) } } diff --git a/bzipper/src/serialise/test.rs b/bzipper/src/serialise/test.rs index f2332a5..2dee489 100644 --- a/bzipper/src/serialise/test.rs +++ b/bzipper/src/serialise/test.rs @@ -18,7 +18,7 @@ // er General Public License along with bzipper. If // not, see . -use crate::{FixedString, Serialise}; +use crate::{FixedString, Serialise, Sstream}; #[test] fn test_serialise() { @@ -32,19 +32,19 @@ fn test_serialise() { Teacher { initials: [char; 0x3] }, } - assert_eq!(Foo::SERIALISED_SIZE, 0x4); - assert_eq!(Bar::SERIALISED_SIZE, 0x10); + assert_eq!(Foo::MAX_SERIALISED_SIZE, 0x4); + assert_eq!(Bar::MAX_SERIALISED_SIZE, 0x10); macro_rules! test { ($ty:ty: $value:expr => $data:expr) => {{ use ::bzipper::Serialise; - let data: [u8; <$ty as Serialise>::SERIALISED_SIZE] = $data; + let mut buf = [0x00; <$ty as Serialise>::MAX_SERIALISED_SIZE]; - let mut buf = [0x00; <$ty as Serialise>::SERIALISED_SIZE]; - <$ty as Serialise>::serialise(&mut $value, &mut buf).unwrap(); + let mut stream = Sstream::new(&mut buf); + <$ty as Serialise>::serialise(&mut $value, &mut stream).unwrap(); - assert_eq!(buf, data); + assert_eq!(stream, $data); }}; } @@ -63,14 +63,11 @@ fn test_serialise() { 0x83, 0x2E, 0x3C, 0x2C, 0x84, 0x10, 0x58, 0x1A, ]); - test!(FixedString::<0x1>: FixedString::try_from("A").unwrap() => [0x00, 0x00, 0x00, 0x41, 0x00, 0x00, 0x00, 0x01]); + test!(FixedString::<0x1>: FixedString::try_from("A").unwrap() => [0x00, 0x00, 0x00, 0x01, 0x41]); - test!(FixedString::<0x9>: FixedString::try_from("l\u{00F8}gma\u{00F0}ur").unwrap() => [ - 0x00, 0x00, 0x00, 0x6C, 0x00, 0x00, 0x00, 0xF8, - 0x00, 0x00, 0x00, 0x67, 0x00, 0x00, 0x00, 0x6D, - 0x00, 0x00, 0x00, 0x61, 0x00, 0x00, 0x00, 0xF0, - 0x00, 0x00, 0x00, 0x75, 0x00, 0x00, 0x00, 0x72, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, + test!(FixedString::<0x24>: FixedString::try_from("l\u{00F8}gma\u{00F0}ur").unwrap() => [ + 0x00, 0x00, 0x00, 0x0A, 0x6C, 0xC3, 0xB8, 0x67, + 0x6D, 0x61, 0xC3, 0xB0, 0x75, 0x72, ]); test!([char; 0x5]: ['\u{03B4}', '\u{0190}', '\u{03BB}', '\u{03A4}', '\u{03B1}'] => [ @@ -79,7 +76,7 @@ fn test_serialise() { 0x00, 0x00, 0x03, 0xB1, ]); - test!(Result::: Ok(0x45_45) => [0x00, 0x45, 0x45, 0x00, 0x00]); + test!(Result::: Ok(0x45_45) => [0x00, 0x45, 0x45]); test!(Result::: Err(char::REPLACEMENT_CHARACTER) => [0x01, 0x00, 0x00, 0xFF, 0xFD]); test!(Option<()>: None => [0x00]); @@ -87,15 +84,9 @@ fn test_serialise() { test!(Foo: Foo('\u{FDF2}') => [0x00, 0x00, 0xFD, 0xF2]); - test!(Bar: Bar::Unit => [ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - ]); + test!(Bar: Bar::Unit => [0x00, 0x00, 0x00, 0x00]); - test!(Bar: Bar::Pretty(true) => [ - 0x00, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - ]); + test!(Bar: Bar::Pretty(true) => [0x00, 0x00, 0x00, 0x01, 0x01]); test!(Bar: Bar::Teacher { initials: ['T', 'L', '\0'] } => [ 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x54, diff --git a/bzipper/src/serialise/tuple.rs b/bzipper/src/serialise/tuple.rs index feee2e2..f2332b8 100644 --- a/bzipper/src/serialise/tuple.rs +++ b/bzipper/src/serialise/tuple.rs @@ -24,15 +24,11 @@ use crate::{Result, Serialise, Sstream}; impl Serialise for (T0, ) where T0: Serialise, { - const SERIALISED_SIZE: usize = - T0::SERIALISED_SIZE; + const MAX_SERIALISED_SIZE: usize = + T0::MAX_SERIALISED_SIZE; - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - let mut stream = Sstream::new(buf); - - stream.append(&self.0)?; + fn serialise(&self, stream: &mut Sstream) -> Result<()> { + self.0.serialise(stream)?; Ok(()) } @@ -42,17 +38,13 @@ impl Serialise for (T0, T1) where T0: Serialise, T1: Serialise, { - const SERIALISED_SIZE: usize = - T0::SERIALISED_SIZE - + T1::SERIALISED_SIZE; - - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); + const MAX_SERIALISED_SIZE: usize = + T0::MAX_SERIALISED_SIZE + + T1::MAX_SERIALISED_SIZE; - let mut stream = Sstream::new(buf); - - stream.append(&self.0)?; - stream.append(&self.1)?; + fn serialise(&self, stream: &mut Sstream) -> Result<()> { + self.0.serialise(stream)?; + self.1.serialise(stream)?; Ok(()) } @@ -63,19 +55,15 @@ where T0: Serialise, T1: Serialise, T2: Serialise, { - const SERIALISED_SIZE: usize = - T0::SERIALISED_SIZE - + T1::SERIALISED_SIZE - + T2::SERIALISED_SIZE; - - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - let mut stream = Sstream::new(buf); + const MAX_SERIALISED_SIZE: usize = + T0::MAX_SERIALISED_SIZE + + T1::MAX_SERIALISED_SIZE + + T2::MAX_SERIALISED_SIZE; - stream.append(&self.0)?; - stream.append(&self.1)?; - stream.append(&self.2)?; + fn serialise(&self, stream: &mut Sstream) -> Result<()> { + self.0.serialise(stream)?; + self.1.serialise(stream)?; + self.2.serialise(stream)?; Ok(()) } @@ -87,21 +75,17 @@ where T1: Serialise, T2: Serialise, T3: Serialise, { - const SERIALISED_SIZE: usize = - T0::SERIALISED_SIZE - + T1::SERIALISED_SIZE - + T2::SERIALISED_SIZE - + T3::SERIALISED_SIZE; - - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - let mut stream = Sstream::new(buf); - - stream.append(&self.0)?; - stream.append(&self.1)?; - stream.append(&self.2)?; - stream.append(&self.3)?; + const MAX_SERIALISED_SIZE: usize = + T0::MAX_SERIALISED_SIZE + + T1::MAX_SERIALISED_SIZE + + T2::MAX_SERIALISED_SIZE + + T3::MAX_SERIALISED_SIZE; + + fn serialise(&self, stream: &mut Sstream) -> Result<()> { + self.0.serialise(stream)?; + self.1.serialise(stream)?; + self.2.serialise(stream)?; + self.3.serialise(stream)?; Ok(()) } @@ -114,23 +98,19 @@ where T2: Serialise, T3: Serialise, T4: Serialise, { - const SERIALISED_SIZE: usize = - T0::SERIALISED_SIZE - + T1::SERIALISED_SIZE - + T2::SERIALISED_SIZE - + T3::SERIALISED_SIZE - + T4::SERIALISED_SIZE; - - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - let mut stream = Sstream::new(buf); - - stream.append(&self.0)?; - stream.append(&self.1)?; - stream.append(&self.2)?; - stream.append(&self.3)?; - stream.append(&self.4)?; + const MAX_SERIALISED_SIZE: usize = + T0::MAX_SERIALISED_SIZE + + T1::MAX_SERIALISED_SIZE + + T2::MAX_SERIALISED_SIZE + + T3::MAX_SERIALISED_SIZE + + T4::MAX_SERIALISED_SIZE; + + fn serialise(&self, stream: &mut Sstream) -> Result<()> { + self.0.serialise(stream)?; + self.1.serialise(stream)?; + self.2.serialise(stream)?; + self.3.serialise(stream)?; + self.4.serialise(stream)?; Ok(()) } @@ -144,25 +124,21 @@ where T3: Serialise, T4: Serialise, T5: Serialise, { - const SERIALISED_SIZE: usize = - T0::SERIALISED_SIZE - + T1::SERIALISED_SIZE - + T2::SERIALISED_SIZE - + T3::SERIALISED_SIZE - + T4::SERIALISED_SIZE - + T5::SERIALISED_SIZE; - - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - let mut stream = Sstream::new(buf); - - stream.append(&self.0)?; - stream.append(&self.1)?; - stream.append(&self.2)?; - stream.append(&self.3)?; - stream.append(&self.4)?; - stream.append(&self.5)?; + const MAX_SERIALISED_SIZE: usize = + T0::MAX_SERIALISED_SIZE + + T1::MAX_SERIALISED_SIZE + + T2::MAX_SERIALISED_SIZE + + T3::MAX_SERIALISED_SIZE + + T4::MAX_SERIALISED_SIZE + + T5::MAX_SERIALISED_SIZE; + + fn serialise(&self, stream: &mut Sstream) -> Result<()> { + self.0.serialise(stream)?; + self.1.serialise(stream)?; + self.2.serialise(stream)?; + self.3.serialise(stream)?; + self.4.serialise(stream)?; + self.5.serialise(stream)?; Ok(()) } @@ -177,27 +153,23 @@ where T4: Serialise, T5: Serialise, T6: Serialise, { - const SERIALISED_SIZE: usize = - T0::SERIALISED_SIZE - + T1::SERIALISED_SIZE - + T2::SERIALISED_SIZE - + T3::SERIALISED_SIZE - + T4::SERIALISED_SIZE - + T5::SERIALISED_SIZE - + T6::SERIALISED_SIZE; - - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - let mut stream = Sstream::new(buf); - - stream.append(&self.0)?; - stream.append(&self.1)?; - stream.append(&self.2)?; - stream.append(&self.3)?; - stream.append(&self.4)?; - stream.append(&self.5)?; - stream.append(&self.6)?; + const MAX_SERIALISED_SIZE: usize = + T0::MAX_SERIALISED_SIZE + + T1::MAX_SERIALISED_SIZE + + T2::MAX_SERIALISED_SIZE + + T3::MAX_SERIALISED_SIZE + + T4::MAX_SERIALISED_SIZE + + T5::MAX_SERIALISED_SIZE + + T6::MAX_SERIALISED_SIZE; + + fn serialise(&self, stream: &mut Sstream) -> Result<()> { + self.0.serialise(stream)?; + self.1.serialise(stream)?; + self.2.serialise(stream)?; + self.3.serialise(stream)?; + self.4.serialise(stream)?; + self.5.serialise(stream)?; + self.6.serialise(stream)?; Ok(()) } @@ -213,29 +185,25 @@ where T5: Serialise, T6: Serialise, T7: Serialise, { - const SERIALISED_SIZE: usize = - T0::SERIALISED_SIZE - + T1::SERIALISED_SIZE - + T2::SERIALISED_SIZE - + T3::SERIALISED_SIZE - + T4::SERIALISED_SIZE - + T5::SERIALISED_SIZE - + T6::SERIALISED_SIZE - + T7::SERIALISED_SIZE; - - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - let mut stream = Sstream::new(buf); - - stream.append(&self.0)?; - stream.append(&self.1)?; - stream.append(&self.2)?; - stream.append(&self.3)?; - stream.append(&self.4)?; - stream.append(&self.5)?; - stream.append(&self.6)?; - stream.append(&self.7)?; + const MAX_SERIALISED_SIZE: usize = + T0::MAX_SERIALISED_SIZE + + T1::MAX_SERIALISED_SIZE + + T2::MAX_SERIALISED_SIZE + + T3::MAX_SERIALISED_SIZE + + T4::MAX_SERIALISED_SIZE + + T5::MAX_SERIALISED_SIZE + + T6::MAX_SERIALISED_SIZE + + T7::MAX_SERIALISED_SIZE; + + fn serialise(&self, stream: &mut Sstream) -> Result<()> { + self.0.serialise(stream)?; + self.1.serialise(stream)?; + self.2.serialise(stream)?; + self.3.serialise(stream)?; + self.4.serialise(stream)?; + self.5.serialise(stream)?; + self.6.serialise(stream)?; + self.7.serialise(stream)?; Ok(()) } @@ -252,31 +220,27 @@ where T6: Serialise, T7: Serialise, T8: Serialise, { - const SERIALISED_SIZE: usize = - T0::SERIALISED_SIZE - + T1::SERIALISED_SIZE - + T2::SERIALISED_SIZE - + T3::SERIALISED_SIZE - + T4::SERIALISED_SIZE - + T5::SERIALISED_SIZE - + T6::SERIALISED_SIZE - + T7::SERIALISED_SIZE - + T8::SERIALISED_SIZE; - - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - let mut stream = Sstream::new(buf); - - stream.append(&self.0)?; - stream.append(&self.1)?; - stream.append(&self.2)?; - stream.append(&self.3)?; - stream.append(&self.4)?; - stream.append(&self.5)?; - stream.append(&self.6)?; - stream.append(&self.7)?; - stream.append(&self.8)?; + const MAX_SERIALISED_SIZE: usize = + T0::MAX_SERIALISED_SIZE + + T1::MAX_SERIALISED_SIZE + + T2::MAX_SERIALISED_SIZE + + T3::MAX_SERIALISED_SIZE + + T4::MAX_SERIALISED_SIZE + + T5::MAX_SERIALISED_SIZE + + T6::MAX_SERIALISED_SIZE + + T7::MAX_SERIALISED_SIZE + + T8::MAX_SERIALISED_SIZE; + + fn serialise(&self, stream: &mut Sstream) -> Result<()> { + self.0.serialise(stream)?; + self.1.serialise(stream)?; + self.2.serialise(stream)?; + self.3.serialise(stream)?; + self.4.serialise(stream)?; + self.5.serialise(stream)?; + self.6.serialise(stream)?; + self.7.serialise(stream)?; + self.8.serialise(stream)?; Ok(()) } @@ -294,33 +258,29 @@ where T7: Serialise, T8: Serialise, T9: Serialise, { - const SERIALISED_SIZE: usize = - T0::SERIALISED_SIZE - + T1::SERIALISED_SIZE - + T2::SERIALISED_SIZE - + T3::SERIALISED_SIZE - + T4::SERIALISED_SIZE - + T5::SERIALISED_SIZE - + T6::SERIALISED_SIZE - + T7::SERIALISED_SIZE - + T8::SERIALISED_SIZE - + T9::SERIALISED_SIZE; - - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - let mut stream = Sstream::new(buf); - - stream.append(&self.0)?; - stream.append(&self.1)?; - stream.append(&self.2)?; - stream.append(&self.3)?; - stream.append(&self.4)?; - stream.append(&self.5)?; - stream.append(&self.6)?; - stream.append(&self.7)?; - stream.append(&self.8)?; - stream.append(&self.9)?; + const MAX_SERIALISED_SIZE: usize = + T0::MAX_SERIALISED_SIZE + + T1::MAX_SERIALISED_SIZE + + T2::MAX_SERIALISED_SIZE + + T3::MAX_SERIALISED_SIZE + + T4::MAX_SERIALISED_SIZE + + T5::MAX_SERIALISED_SIZE + + T6::MAX_SERIALISED_SIZE + + T7::MAX_SERIALISED_SIZE + + T8::MAX_SERIALISED_SIZE + + T9::MAX_SERIALISED_SIZE; + + fn serialise(&self, stream: &mut Sstream) -> Result<()> { + self.0.serialise(stream)?; + self.1.serialise(stream)?; + self.2.serialise(stream)?; + self.3.serialise(stream)?; + self.4.serialise(stream)?; + self.5.serialise(stream)?; + self.6.serialise(stream)?; + self.7.serialise(stream)?; + self.8.serialise(stream)?; + self.9.serialise(stream)?; Ok(()) } @@ -339,35 +299,31 @@ where T8: Serialise, T9: Serialise, T10: Serialise, { - const SERIALISED_SIZE: usize = - T0::SERIALISED_SIZE - + T1::SERIALISED_SIZE - + T2::SERIALISED_SIZE - + T3::SERIALISED_SIZE - + T4::SERIALISED_SIZE - + T5::SERIALISED_SIZE - + T6::SERIALISED_SIZE - + T7::SERIALISED_SIZE - + T8::SERIALISED_SIZE - + T9::SERIALISED_SIZE - + T10::SERIALISED_SIZE; - - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - let mut stream = Sstream::new(buf); - - stream.append(&self.0)?; - stream.append(&self.1)?; - stream.append(&self.2)?; - stream.append(&self.3)?; - stream.append(&self.4)?; - stream.append(&self.5)?; - stream.append(&self.6)?; - stream.append(&self.7)?; - stream.append(&self.8)?; - stream.append(&self.9)?; - stream.append(&self.10)?; + const MAX_SERIALISED_SIZE: usize = + T0::MAX_SERIALISED_SIZE + + T1::MAX_SERIALISED_SIZE + + T2::MAX_SERIALISED_SIZE + + T3::MAX_SERIALISED_SIZE + + T4::MAX_SERIALISED_SIZE + + T5::MAX_SERIALISED_SIZE + + T6::MAX_SERIALISED_SIZE + + T7::MAX_SERIALISED_SIZE + + T8::MAX_SERIALISED_SIZE + + T9::MAX_SERIALISED_SIZE + + T10::MAX_SERIALISED_SIZE; + + fn serialise(&self, stream: &mut Sstream) -> Result<()> { + self.0.serialise(stream)?; + self.1.serialise(stream)?; + self.2.serialise(stream)?; + self.3.serialise(stream)?; + self.4.serialise(stream)?; + self.5.serialise(stream)?; + self.6.serialise(stream)?; + self.7.serialise(stream)?; + self.8.serialise(stream)?; + self.9.serialise(stream)?; + self.10.serialise(stream)?; Ok(()) } @@ -387,37 +343,33 @@ where T9: Serialise, T10: Serialise, T11: Serialise, { - const SERIALISED_SIZE: usize = - T0::SERIALISED_SIZE - + T1::SERIALISED_SIZE - + T2::SERIALISED_SIZE - + T3::SERIALISED_SIZE - + T4::SERIALISED_SIZE - + T5::SERIALISED_SIZE - + T6::SERIALISED_SIZE - + T7::SERIALISED_SIZE - + T8::SERIALISED_SIZE - + T9::SERIALISED_SIZE - + T10::SERIALISED_SIZE - + T11::SERIALISED_SIZE; - - fn serialise(&self, buf: &mut [u8]) -> Result<()> { - debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - let mut stream = Sstream::new(buf); - - stream.append(&self.0)?; - stream.append(&self.1)?; - stream.append(&self.2)?; - stream.append(&self.3)?; - stream.append(&self.4)?; - stream.append(&self.5)?; - stream.append(&self.6)?; - stream.append(&self.7)?; - stream.append(&self.8)?; - stream.append(&self.9)?; - stream.append(&self.10)?; - stream.append(&self.11)?; + const MAX_SERIALISED_SIZE: usize = + T0::MAX_SERIALISED_SIZE + + T1::MAX_SERIALISED_SIZE + + T2::MAX_SERIALISED_SIZE + + T3::MAX_SERIALISED_SIZE + + T4::MAX_SERIALISED_SIZE + + T5::MAX_SERIALISED_SIZE + + T6::MAX_SERIALISED_SIZE + + T7::MAX_SERIALISED_SIZE + + T8::MAX_SERIALISED_SIZE + + T9::MAX_SERIALISED_SIZE + + T10::MAX_SERIALISED_SIZE + + T11::MAX_SERIALISED_SIZE; + + fn serialise(&self, stream: &mut Sstream) -> Result<()> { + self.0.serialise(stream)?; + self.1.serialise(stream)?; + self.2.serialise(stream)?; + self.3.serialise(stream)?; + self.4.serialise(stream)?; + self.5.serialise(stream)?; + self.6.serialise(stream)?; + self.7.serialise(stream)?; + self.8.serialise(stream)?; + self.9.serialise(stream)?; + self.10.serialise(stream)?; + self.11.serialise(stream)?; Ok(()) } diff --git a/bzipper/src/sstream/mod.rs b/bzipper/src/sstream/mod.rs index 257be95..470a27f 100644 --- a/bzipper/src/sstream/mod.rs +++ b/bzipper/src/sstream/mod.rs @@ -19,16 +19,17 @@ // er General Public License along with bzipper. If // not, see . -use crate::{Error, Result, Serialise}; +use crate::{Dstream, Error, Result}; use core::cell::Cell; +use core::fmt::{Debug, Formatter}; -/// Byte stream for deserialisation. +/// Byte stream suitable for serialisation. /// -/// This type borrows a slice, keeping track internally of the used bytes. +/// This type mutably borrows a buffer, keeping track internally of the used bytes. pub struct Sstream<'a> { - buf: &'a mut [u8], - pos: Cell, + pub(in crate) buf: &'a mut [u8], + pub(in crate) pos: Cell, } impl<'a> Sstream<'a> { @@ -37,22 +38,86 @@ impl<'a> Sstream<'a> { #[must_use] pub fn new(buf: &'a mut [u8]) -> Self { Self { buf, pos: Cell::new(0x0) } } - /// Extends the stream by appending a new serialisation. - /// - /// # Errors - /// - /// If the stream cannot hold any arbitrary serialisation of `T`, an [`EndOfStream`](Error::EndOfStream) instance is returned. + /// Appends raw bytes to the stream. #[inline] - pub fn append(&mut self, value: &T) -> Result<()> { + pub fn write(&mut self, bytes: &[u8]) -> Result<()> { let rem = self.buf.len() - self.pos.get(); - let req = T::SERIALISED_SIZE; + let req = bytes.len(); - if rem < req { return Err(Error::EndOfStream { req, rem }) }; + if rem < req { return Err(Error::EndOfStream { req, rem }) } let start = self.pos.get(); let stop = start + req; self.pos.set(stop); - value.serialise(&mut self.buf[start..stop]) + + let buf = &mut self.buf[start..stop]; + buf.copy_from_slice(bytes); + + Ok(()) } + + /// Gets a pointer to the first byte in the stream. + #[inline(always)] + #[must_use] + pub const fn as_ptr(&self) -> *const u8 { self.buf.as_ptr() } + + /// Gets an immutable slice of the stream. + #[inline(always)] + #[must_use] + pub const fn as_slice(&self) -> &[u8] { + let ptr = self.as_ptr(); + let len = self.len(); + + unsafe { core::slice::from_raw_parts(ptr, len) } + } + + /// Gets the length of the stream. + #[inline(always)] + #[must_use] + pub const fn len(&self) -> usize { unsafe { self.pos.as_ptr().read() } } + + /// Tests if the stream is empty. + /// + /// If no serialisations have been made so far, this method returns `false`. + #[inline(always)] + #[must_use] + pub const fn is_empty(&self) -> bool { self.len() == 0x0 } + + /// Tests if the stream is full. + /// + /// Note that zero-sized types such as [`()`](unit) can still be serialised into this stream. + #[inline(always)] + #[must_use] + pub const fn is_full(&self) -> bool { self.len() == self.buf.len() } +} + +impl Debug for Sstream<'_> { + #[inline(always)] + fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { Debug::fmt(self.as_slice(), f) } +} + +impl<'a> From<&'a mut [u8]> for Sstream<'a> { + #[inline(always)] + fn from(value: &'a mut [u8]) -> Self { Self::new(value) } +} + +impl PartialEq for Sstream<'_> { + #[inline(always)] + fn eq(&self, other: &Self) -> bool { self.as_slice() == other.as_slice() } +} + +impl PartialEq<&[u8]> for Sstream<'_> { + #[inline(always)] + fn eq(&self, other: &&[u8]) -> bool { self.as_slice() == *other } +} + +impl PartialEq<[u8; N]> for Sstream<'_> { + #[inline(always)] + fn eq(&self, other: &[u8; N]) -> bool { self.as_slice() == other.as_slice() } +} + +impl<'a> From> for Dstream<'a> { + #[inline(always)] + fn from(value: Sstream<'a>) -> Self { Self { data: value.buf, pos: value.pos } } } diff --git a/bzipper_macros/Cargo.toml b/bzipper_macros/Cargo.toml index 5b72f9d..8167029 100644 --- a/bzipper_macros/Cargo.toml +++ b/bzipper_macros/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "bzipper_macros" -version = "0.6.2" +version = "0.7.0" edition = "2021" documentation = "https://docs.rs/bzipper_macros/" diff --git a/bzipper_macros/src/impls/deserialise_enum.rs b/bzipper_macros/src/impls/deserialise_enum.rs index 7065c86..4c88a41 100644 --- a/bzipper_macros/src/impls/deserialise_enum.rs +++ b/bzipper_macros/src/impls/deserialise_enum.rs @@ -38,36 +38,30 @@ pub fn deserialise_enum(data: &DataEnum) -> TokenStream { let mut chain_commands = Punctuated::::new(); for field in &variant.fields { - let field_ty = &field.ty; - let command = field.ident .as_ref() .map_or_else( - || quote! { stream.take::<#field_ty>()? }, - |field_name| quote! { #field_name: stream.take::<#field_ty>()? } + || quote! { Deserialise::deserialise(stream)? }, + |field_name| quote! { #field_name: Deserialise::deserialise(stream)? } ); chain_commands.push(command); } - let block = match variant.fields { + let value = match variant.fields { Fields::Named( ..) => quote! { Self::#variant_name { #chain_commands } }, Fields::Unnamed(..) => quote! { Self::#variant_name(#chain_commands) }, Fields::Unit => quote! { Self::#variant_name }, }; - match_arms.push(quote! { #discriminant => #block }); + match_arms.push(quote! { #discriminant => #value }); } - match_arms.push(quote! { value => return Err(::bzipper::Error::InvalidDiscriminant { value }) }); + match_arms.push(quote! { value => return Err(::bzipper::Error::InvalidDiscriminant(value)) }); quote! { - fn deserialise(data: &[u8]) -> ::bzipper::Result { - ::core::debug_assert_eq!(data.len(), ::SERIALISED_SIZE); - - let mut stream = ::bzipper::Dstream::new(data); - - let value = match (stream.take::()?) { #match_arms }; + fn deserialise(stream: &::bzipper::Dstream) -> ::bzipper::Result { + let value = match (::deserialise(stream)?) { #match_arms }; Ok(value) } } diff --git a/bzipper_macros/src/impls/deserialise_struct.rs b/bzipper_macros/src/impls/deserialise_struct.rs index 414a313..f8c167b 100644 --- a/bzipper_macros/src/impls/deserialise_struct.rs +++ b/bzipper_macros/src/impls/deserialise_struct.rs @@ -26,52 +26,35 @@ use syn::punctuated::Punctuated; #[must_use] pub fn deserialise_struct(data: &DataStruct) -> TokenStream { - if let Fields::Named(..) = data.fields { - let mut chain_commands = Punctuated::::new(); - - for field in &data.fields { - let name = field.ident.as_ref().unwrap(); - let ty = &field.ty; - - chain_commands.push(quote! { #name: stream.take::<#ty>()? }); - } - + if matches!(data.fields, Fields::Unit) { quote! { - fn deserialise(data: &[u8]) -> ::bzipper::Result { - ::core::debug_assert_eq!(data.len(), ::SERIALISED_SIZE); - - let stream = ::bzipper::Dstream::new(data); - - Ok(Self { #chain_commands }) - } + #[inline(always)] + fn deserialise(_stream: &::bzipper::Dstream) -> ::bzipper::Result { Ok(Self) } } - } else if let Fields::Unnamed(..) = data.fields { + } else { let mut chain_commands = Punctuated::::new(); for field in &data.fields { - let ty = &field.ty; - - chain_commands.push(quote! { stream.take::<#ty>()? }); + let command = field.ident + .as_ref() + .map_or_else( + || quote! { Deserialise::deserialise(stream)? }, + |field_name| quote! { #field_name: Deserialise::deserialise(stream)? } + ); + + chain_commands.push(command); } - quote! { - fn deserialise(data: &[u8]) -> ::bzipper::Result { - ::core::debug_assert_eq!(data.len(), ::SERIALISED_SIZE); - - let stream = ::bzipper::Dstream::new(data); - - Ok(Self(#chain_commands)) - } - } - } else { - // Fields::Unit + let value = if let Fields::Named(..) = data.fields { + quote! { Self { #chain_commands } } + } else { + quote! { Self(#chain_commands) } + }; quote! { - #[inline(always)] - fn deserialise(data: &[u8]) -> ::bzipper::Result { - ::core::debug_assert_eq!(data.len(), ::SERIALISED_SIZE); - - Ok(Self) + fn deserialise(stream: &::bzipper::Dstream) -> ::bzipper::Result { + let value = #value; + Ok(value) } } } diff --git a/bzipper_macros/src/impls/serialise_enum.rs b/bzipper_macros/src/impls/serialise_enum.rs index a554351..825886c 100644 --- a/bzipper_macros/src/impls/serialise_enum.rs +++ b/bzipper_macros/src/impls/serialise_enum.rs @@ -38,15 +38,15 @@ pub fn serialise_enum(data: &DataEnum) -> TokenStream { let variant_name = &variant.ident; let discriminant = u32::try_from(index) - .expect("enumeration discriminants must be representable in `u32`"); + .expect("enumeration discriminants must be representable as `u32`"); // Discriminant size: - serialised_size.push(quote! { ::SERIALISED_SIZE }); + serialised_size.push(quote! { ::MAX_SERIALISED_SIZE }); let mut captures = Punctuated::::new(); let mut chain_commands = Punctuated::::new(); - chain_commands.push(quote! { stream.append(&#discriminant)? }); + chain_commands.push(quote! { #discriminant.serialise(stream)? }); for (index, field) in variant.fields.iter().enumerate() { let field_ty = &field.ty; @@ -55,14 +55,14 @@ pub fn serialise_enum(data: &DataEnum) -> TokenStream { .as_ref() .map_or_else(|| Ident::new(&format!("v{index}"), Span::call_site()), Clone::clone); - serialised_size.push(quote! { <#field_ty as ::bzipper::Serialise>::SERIALISED_SIZE }); + serialised_size.push(quote! { <#field_ty as ::bzipper::Serialise>::MAX_SERIALISED_SIZE }); captures.push(Capture { ref_token: Token![ref](Span::call_site()), ident: field_name.clone(), }); - chain_commands.push(quote! { stream.append(#field_name)? }); + chain_commands.push(quote! { #field_name.serialise(stream)? }); } chain_commands.push_punct(Token![;](Span::call_site())); @@ -90,14 +90,11 @@ pub fn serialise_enum(data: &DataEnum) -> TokenStream { size_tests.push(quote! { { core::unreachable!(); } }); quote! { - const SERIALISED_SIZE: usize = const { #size_tests }; - - fn serialise(&self, buf: &mut [u8]) -> ::bzipper::Result<()> { - ::core::debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - let mut stream = ::bzipper::Sstream::new(buf); + const MAX_SERIALISED_SIZE: usize = const { #size_tests }; + fn serialise(&self, stream: &mut ::bzipper::Sstream) -> ::bzipper::Result<()> { match (*self) { #match_arms } + Ok(()) } } diff --git a/bzipper_macros/src/impls/serialise_struct.rs b/bzipper_macros/src/impls/serialise_struct.rs index 308a6bb..bd81a39 100644 --- a/bzipper_macros/src/impls/serialise_struct.rs +++ b/bzipper_macros/src/impls/serialise_struct.rs @@ -33,14 +33,10 @@ use syn::{ pub fn serialise_struct(data: &DataStruct) -> TokenStream { if matches!(data.fields, Fields::Unit) { quote! { - const SERIALISED_SIZE: usize = 0x0; + const MAX_SERIALISED_SIZE: usize = 0x0; #[inline(always)] - fn serialise(&self, buf: &mut [u8]) -> ::bzipper::Result<()> { - ::core::debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - Ok(()) - } + fn serialise(&self, stream: &mut ::bzipper::Sstream) -> ::bzipper::Result<()> { Ok(()) } } } else { let mut serialised_size = Punctuated::::new(); @@ -53,21 +49,17 @@ pub fn serialise_struct(data: &DataStruct) -> TokenStream { .as_ref() .map_or_else(|| Index::from(index).to_token_stream(), ToTokens::to_token_stream); - serialised_size.push(quote! { <#ty as ::bzipper::Serialise>::SERIALISED_SIZE }); + serialised_size.push(quote! { <#ty as ::bzipper::Serialise>::MAX_SERIALISED_SIZE }); - chain_commands.push(quote! { stream.append(&self.#name)? }); + chain_commands.push(quote! { self.#name.serialise(stream)? }); } chain_commands.push_punct(Token![;](Span::call_site())); quote! { - const SERIALISED_SIZE: usize = #serialised_size; - - fn serialise(&self, buf: &mut [u8]) -> ::bzipper::Result<()> { - ::core::debug_assert_eq!(buf.len(), Self::SERIALISED_SIZE); - - let mut stream = ::bzipper::Sstream::new(buf); + const MAX_SERIALISED_SIZE: usize = #serialised_size; + fn serialise(&self, stream: &mut ::bzipper::Sstream) -> ::bzipper::Result<()> { #chain_commands Ok(()) -- cgit v1.2.3