diff --git a/Cargo.toml b/Cargo.toml index e4e517d..812388d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "heapless-bytes" -version = "0.4.0" -authors = ["Nicolas Stalder "] +version = "0.5.0" +authors = ["The Trussed developers", "Nicolas Stalder ", "Nitrokey GmbH"] license = "Apache-2.0 OR MIT" description = "Newtype around heapless byte Vec with efficient serde." categories = ["embedded", "encoding", "no-std"] @@ -12,12 +12,11 @@ edition = "2021" [dependencies] -heapless = { version = "0.8", default-features = false } +heapless = { version = "0.9", default-features = false } serde = { version = "1.0", default-features = false } [dev-dependencies] serde_test = "1.0.176" [features] -# From/Into implementation to `heapless::Vec` -"heapless-0.8" = [] +"heapless-0.9" = [] diff --git a/src/lib.rs b/src/lib.rs index 30201c9..74bc103 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -12,31 +12,130 @@ use core::{ ops::{Deref, DerefMut}, }; -use heapless::Vec; +use heapless::{ + vec::{OwnedVecStorage, Vec as UpstreamVec, VecInner as UpstreamVecInner, ViewVecStorage}, + CapacityError, +}; + +type VecInner = UpstreamVecInner; +type Vec = UpstreamVec; use serde::{ de::{Deserialize, Deserializer, Visitor}, ser::{Serialize, Serializer}, }; +use storage::BytesStorage; -#[derive(Clone, Default, Eq, Ord)] -pub struct Bytes { - bytes: Vec, +mod storage { + use super::{BytesInner, BytesView}; + use heapless::vec::{OwnedVecStorage, VecStorage, ViewVecStorage}; + + /// Trait defining how data for a Byte buffer is stored. + /// + /// There's two implementations available: + /// + /// - [`OwnedStorage`]: stores the data in an array whose size is known at compile time. + /// - [`ViewStorage`]: stores the data in an unsized slice + /// + /// This allows [`BytesInner`] to be generic over either sized or unsized storage. The [`heapless-bytes`](crate) + /// crate contains a [`BytesInner`] struct that's generic on [`BytesStorage`], + /// and two type aliases for convenience: + /// + /// - [`String`](crate::string::String) = `StringInner>` + /// - [`StringView`](crate::string::StringView) = `StringInner>` + /// + /// `String` can be unsized into `StrinsgView`, either by unsizing coercions such as `&mut String -> &mut StringView` or + /// `Box -> Box`, or explicitly with [`.as_view()`](crate::string::String::as_view) or [`.as_mut_view()`](crate::string::String::as_mut_view). + /// + /// This trait is sealed, so you cannot implement it for your own types. You can only use + /// the implementations provided by this crate. + /// + /// [`StringInner`]: super::StringInner + /// [`String`]: super::String + /// [`OwnedStorage`]: super::OwnedStorage + /// [`ViewStorage`]: super::ViewStorage + pub trait BytesStorage: BytesStorageSealed {} + pub trait BytesStorageSealed: VecStorage { + fn as_byte_view(this: &BytesInner) -> &BytesView + where + Self: BytesStorage; + fn as_byte_mut_view(this: &mut BytesInner) -> &mut BytesView + where + Self: BytesStorage; + } + + impl BytesStorage for OwnedVecStorage {} + impl BytesStorageSealed for OwnedVecStorage { + fn as_byte_view(this: &BytesInner) -> &BytesView + where + Self: BytesStorage, + { + this + } + fn as_byte_mut_view(this: &mut BytesInner) -> &mut BytesView + where + Self: BytesStorage, + { + this + } + } + + impl BytesStorage for ViewVecStorage {} + + impl BytesStorageSealed for ViewVecStorage { + fn as_byte_view(this: &BytesInner) -> &BytesView + where + Self: BytesStorage, + { + this + } + fn as_byte_mut_view(this: &mut BytesInner) -> &mut BytesView + where + Self: BytesStorage, + { + this + } + } } +pub type OwnedBytesStorage = OwnedVecStorage; +pub type ViewBytesStorage = ViewVecStorage; + +pub struct BytesInner { + bytes: VecInner, +} + +pub type Bytes = BytesInner>; +pub type BytesView = BytesInner; + pub type Bytes8 = Bytes<8>; pub type Bytes16 = Bytes<16>; pub type Bytes32 = Bytes<32>; pub type Bytes64 = Bytes<64>; -#[cfg(feature = "heapless-0.8")] +impl Clone for Bytes { + fn clone(&self) -> Self { + Self { + bytes: self.bytes.clone(), + } + } +} + +impl Eq for BytesInner {} +impl Ord for BytesInner { + fn cmp(&self, other: &Self) -> Ordering { + self.bytes.cmp(&other.bytes) + } +} + +#[cfg(feature = "heapless-0.9")] impl From> for Bytes { fn from(vec: Vec) -> Self { Bytes { bytes: vec }.increase_capacity() } } -#[cfg(feature = "heapless-0.8")] +#[cfg(feature = "heapless-0.9")] impl From> for Vec { fn from(value: Bytes) -> Self { value.increase_capacity().bytes @@ -44,20 +143,68 @@ impl From> for Vec { } impl TryFrom<&[u8]> for Bytes { - type Error = (); - fn try_from(value: &[u8]) -> Result { + type Error = CapacityError; + fn try_from(value: &[u8]) -> Result { Ok(Self { bytes: Vec::from_slice(value)?, }) } } +impl Default for Bytes { + fn default() -> Self { + Self::new() + } +} + impl Bytes { /// Construct a new, empty `Bytes`. - pub fn new() -> Self { + pub const fn new() -> Self { Self { bytes: Vec::new() } } + /// Get the capacity of the buffer. + /// + /// Always equal to the `N` const generic. + pub const fn const_capacity(&self) -> usize { + N + } + /// Copy the contents of this `Bytes` instance into a new instance with a higher capacity. + /// + /// ``` + /// # use heapless_bytes::Bytes; + /// let bytes32: Bytes<32> = Bytes::from([0; 32]); + /// let bytes64: Bytes<64> = bytes32.increase_capacity(); + /// assert_eq!(bytes64.len(), 32); + /// assert_eq!(bytes64.capacity(), 64); + /// ``` + /// + /// Decreasing the capacity causes a compiler error: + /// ```compile_fail + /// # use heapless_bytes::Bytes; + /// let bytes32: Bytes<32> = Bytes::from([0; 32]); + /// let bytes16: Bytes<16> = bytes32.increase_capacity(); + /// ``` + pub fn increase_capacity(&self) -> Bytes { + let () = AssertLessThanEq::::ASSERT; + let mut bytes = Vec::new(); + // bytes has length 0 and capacity M, self has length N, N <= M, so this can never panic + bytes.extend_from_slice(self.as_slice()).unwrap(); + Bytes { bytes } + } + + /// Get a "view" to the Buffer with the `N` const generic erased + pub fn as_view(&self) -> &BytesView { + self + } + + /// Get a mutable "view" to the Buffer with the `N` const generic erased + pub fn as_mut_view(&mut self) -> &mut BytesView { + self + } +} + +impl BytesInner { pub fn as_ptr(&self) -> *const u8 { self.bytes.as_ptr() } @@ -80,7 +227,7 @@ impl Bytes { /// Get the capacity of the buffer. /// /// Always equal to the `N` const generic. - pub const fn capacity(&self) -> usize { + pub fn capacity(&self) -> usize { self.bytes.capacity() } @@ -113,7 +260,7 @@ impl Bytes { } /// Extend the buffer with the contents of a slice - pub fn extend_from_slice(&mut self, other: &[u8]) -> Result<(), ()> { + pub fn extend_from_slice(&mut self, other: &[u8]) -> Result<(), CapacityError> { self.bytes.extend_from_slice(other) } @@ -159,7 +306,7 @@ impl Bytes { /// `new_len` is less than `len`, the buffer is simply truncated. /// /// See also [`resize_zero`](Self::resize_zero). - pub fn resize(&mut self, new_len: usize, value: u8) -> Result<(), ()> { + pub fn resize(&mut self, new_len: usize, value: u8) -> Result<(), CapacityError> { self.bytes.resize(new_len, value) } @@ -168,7 +315,7 @@ impl Bytes { /// If new_len is greater than len, the buffer is extended by the /// difference, with each additional slot filled with `0`. If /// `new_len` is less than `len`, the buffer is simply truncated. - pub fn resize_zero(&mut self, new_len: usize) -> Result<(), ()> { + pub fn resize_zero(&mut self, new_len: usize) -> Result<(), CapacityError> { self.bytes.resize_default(new_len) } @@ -289,33 +436,9 @@ impl Bytes { /// Low-noise conversion between lengths. /// /// For an infaillible version when `M` is known to be larger than `N`, see [`increase_capacity`](Self::increase_capacity) - pub fn resize_capacity(&self) -> Result, ()> { + pub fn resize_capacity(&self) -> Result, CapacityError> { Bytes::try_from(&**self) } - - /// Copy the contents of this `Bytes` instance into a new instance with a higher capacity. - /// - /// ``` - /// # use heapless_bytes::Bytes; - /// let bytes32: Bytes<32> = Bytes::from([0; 32]); - /// let bytes64: Bytes<64> = bytes32.increase_capacity(); - /// assert_eq!(bytes64.len(), 32); - /// assert_eq!(bytes64.capacity(), 64); - /// ``` - /// - /// Decreasing the capacity causes a compiler error: - /// ```compile_fail - /// # use heapless_bytes::Bytes; - /// let bytes32: Bytes<32> = Bytes::from([0; 32]); - /// let bytes16: Bytes<16> = bytes32.increase_capacity(); - /// ``` - pub fn increase_capacity(&self) -> Bytes { - let () = AssertLessThanEq::::ASSERT; - let mut bytes = Vec::new(); - // bytes has length 0 and capacity M, self has length N, N <= M, so this can never panic - bytes.extend_from_slice(self.as_slice()).unwrap(); - Bytes { bytes } - } } /// Construct a `Bytes` instance from an array with `N` elements. @@ -372,27 +495,7 @@ impl From<&[u8; M]> for Bytes { } } -// impl TryFrom for Bytes -// where -// N: ArrayLength, -// F: FnOnce(&mut [u8]) -> Result, -// { -// type Error = E; - -// fn try_from(f: F) -> Result { - -// let mut data = Self::new(); -// data.resize_to_capacity(); -// let result = f(&mut data); - -// result.map(|count| { -// data.resize_default(count).unwrap(); -// data -// }) -// } -// } - -impl Debug for Bytes { +impl Debug for BytesInner { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // TODO: There has to be a better way :'-) @@ -406,19 +509,19 @@ impl Debug for Bytes { } } -impl AsRef<[u8]> for Bytes { +impl AsRef<[u8]> for BytesInner { fn as_ref(&self) -> &[u8] { &self.bytes } } -impl AsMut<[u8]> for Bytes { +impl AsMut<[u8]> for BytesInner { fn as_mut(&mut self) -> &mut [u8] { &mut self.bytes } } -impl Deref for Bytes { +impl Deref for BytesInner { type Target = [u8]; fn deref(&self) -> &Self::Target { @@ -426,13 +529,13 @@ impl Deref for Bytes { } } -impl DerefMut for Bytes { +impl DerefMut for BytesInner { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.bytes } } -impl PartialEq for Bytes +impl PartialEq for BytesInner where Rhs: ?Sized + AsRef<[u8]>, { @@ -441,7 +544,7 @@ where } } -impl PartialOrd for Bytes +impl PartialOrd for BytesInner where Rhs: ?Sized + AsRef<[u8]>, { @@ -450,7 +553,7 @@ where } } -impl Hash for Bytes { +impl Hash for BytesInner { fn hash(&self, state: &mut H) { self.bytes.hash(state); } @@ -479,7 +582,7 @@ impl IntoIterator for Bytes { } } -impl<'a, const N: usize> IntoIterator for &'a Bytes { +impl<'a, S: BytesStorage + ?Sized> IntoIterator for &'a BytesInner { type Item = &'a u8; type IntoIter = <&'a [u8] as IntoIterator>::IntoIter; @@ -488,7 +591,7 @@ impl<'a, const N: usize> IntoIterator for &'a Bytes { } } -impl<'a, const N: usize> IntoIterator for &'a mut Bytes { +impl<'a, S: BytesStorage + ?Sized> IntoIterator for &'a mut BytesInner { type Item = &'a mut u8; type IntoIter = <&'a mut [u8] as IntoIterator>::IntoIter; @@ -497,16 +600,16 @@ impl<'a, const N: usize> IntoIterator for &'a mut Bytes { } } -impl Serialize for Bytes { - fn serialize(&self, serializer: S) -> Result +impl Serialize for BytesInner { + fn serialize(&self, serializer: SER) -> Result where - S: Serializer, + SER: Serializer, { serializer.serialize_bytes(self) } } -impl core::fmt::Write for Bytes { +impl core::fmt::Write for BytesInner { fn write_str(&mut self, s: &str) -> fmt::Result { self.bytes.write_str(s) } @@ -536,7 +639,7 @@ impl<'de, const N: usize> Deserialize<'de> for Bytes { where E: serde::de::Error, { - Bytes::try_from(v).map_err(|()| E::invalid_length(v.len(), &self)) + Bytes::try_from(v).map_err(|_: CapacityError| E::invalid_length(v.len(), &self)) } fn visit_seq(self, mut seq: A) -> Result @@ -590,7 +693,9 @@ mod tests { r"b'\x00abcde\n'", format!( "{:?}", - Bytes::<10>::try_from(b"\0abcde\n".as_slice()).unwrap() + Bytes::<10>::try_from(b"\0abcde\n".as_slice()) + .unwrap() + .as_view() ) ); } @@ -599,9 +704,9 @@ mod tests { fn from() { let _: Bytes<10> = [0; 10].into(); let _: Bytes<10> = (&[0; 8]).into(); - #[cfg(feature = "heapless-0.8")] + #[cfg(feature = "heapless-0.9")] let _: Bytes<10> = Vec::::new().into(); - #[cfg(feature = "heapless-0.8")] + #[cfg(feature = "heapless-0.9")] let _: Bytes<10> = Vec::::new().into(); } }