diff --git a/doc/release-notes/iceoryx2-unreleased.md b/doc/release-notes/iceoryx2-unreleased.md index 7489f1ffa..0eb4e3b5a 100644 --- a/doc/release-notes/iceoryx2-unreleased.md +++ b/doc/release-notes/iceoryx2-unreleased.md @@ -16,6 +16,7 @@ * Developer permissions for resources [#460](https://github.com/eclipse-iceoryx/iceoryx2/issues/460) * Add `--send-copy` flag to Benchmark to consider mem operations [#483](https://github.com/eclipse-iceoryx/iceoryx2/issues/483) * Support for slices in the C++ bindings [#490](https://github.com/eclipse-iceoryx/iceoryx2/issues/490) +* Add relocatable `SlotMap` [#504](https://github.com/eclipse-iceoryx/iceoryx2/issues/504) ### Bugfixes diff --git a/iceoryx2-bb/container/src/lib.rs b/iceoryx2-bb/container/src/lib.rs index e5f258652..85ad33752 100644 --- a/iceoryx2-bb/container/src/lib.rs +++ b/iceoryx2-bb/container/src/lib.rs @@ -89,6 +89,8 @@ pub mod byte_string; /// A queue similar to [`std::collections::VecDeque`] pub mod queue; +/// A container with persistent unique keys to access values. +pub mod slotmap; /// Extends the [ByteString](crate::byte_string) so that custom string types with a semantic /// ruleset on their content can be realized. #[macro_use] diff --git a/iceoryx2-bb/container/src/queue.rs b/iceoryx2-bb/container/src/queue.rs index 3f95cfd02..29efa6757 100644 --- a/iceoryx2-bb/container/src/queue.rs +++ b/iceoryx2-bb/container/src/queue.rs @@ -57,6 +57,7 @@ //! ``` //! use iceoryx2_bb_container::queue::RelocatableQueue; //! use iceoryx2_bb_elementary::math::align_to; +//! use iceoryx2_bb_elementary::bump_allocator::BumpAllocator; //! use iceoryx2_bb_elementary::relocatable_container::RelocatableContainer; //! use core::mem::MaybeUninit; //! @@ -68,11 +69,16 @@ //! //! impl MyConstruct { //! pub fn new() -> Self { -//! Self { -//! queue: unsafe { RelocatableQueue::new(QUEUE_CAPACITY, -//! align_to::>(std::mem::size_of::>()) as isize) }, +//! let mut new_self = Self { +//! queue: unsafe { RelocatableQueue::new_uninit(QUEUE_CAPACITY) }, //! queue_memory: core::array::from_fn(|_| MaybeUninit::uninit()), -//! } +//! }; +//! +//! let allocator = BumpAllocator::new(core::ptr::addr_of!(new_self.queue_memory) as usize); +//! unsafe { +//! new_self.queue.init(&allocator).expect("Enough memory provided.") +//! }; +//! new_self //! } //! } //! ``` @@ -91,18 +97,18 @@ //! //! let bump_allocator = BumpAllocator::new(memory.as_mut_ptr() as usize); //! -//! let queue = unsafe { RelocatableQueue::::new_uninit(QUEUE_CAPACITY) }; +//! let mut queue = unsafe { RelocatableQueue::::new_uninit(QUEUE_CAPACITY) }; //! unsafe { queue.init(&bump_allocator).expect("queue init failed") }; //! ``` //! use iceoryx2_bb_elementary::allocator::{AllocationError, BaseAllocator}; -use iceoryx2_bb_elementary::math::align_to; +use iceoryx2_bb_elementary::bump_allocator::BumpAllocator; use iceoryx2_bb_elementary::math::unaligned_mem_size; -use iceoryx2_bb_elementary::owning_pointer::OwningPointer; +use iceoryx2_bb_elementary::owning_pointer::{GenericOwningPointer, OwningPointer}; use iceoryx2_bb_elementary::placement_default::PlacementDefault; use iceoryx2_bb_elementary::pointer_trait::PointerTrait; pub use iceoryx2_bb_elementary::relocatable_container::RelocatableContainer; -use iceoryx2_bb_elementary::relocatable_ptr::RelocatablePointer; +use iceoryx2_bb_elementary::relocatable_ptr::{GenericRelocatablePointer, RelocatablePointer}; use iceoryx2_bb_log::{fail, fatal_panic}; use iceoryx2_pal_concurrency_sync::iox_atomic::IoxAtomicBool; use std::marker::PhantomData; @@ -110,18 +116,20 @@ use std::{alloc::Layout, fmt::Debug, mem::MaybeUninit}; /// Queue with run-time fixed size capacity. In contrast to its counterpart the /// [`RelocatableQueue`] it is movable but is not shared memory compatible. -pub type Queue = details::Queue>>; +pub type Queue = details::MetaQueue; /// **Non-movable** relocatable queue with runtime fixed size capacity. -pub type RelocatableQueue = details::Queue>>; +pub type RelocatableQueue = details::MetaQueue; #[doc(hidden)] pub mod details { + use iceoryx2_bb_elementary::generic_pointer::GenericPointer; + use super::*; /// **Non-movable** relocatable queue with runtime fixed size capacity. #[repr(C)] #[derive(Debug)] - pub struct Queue>> { - data_ptr: PointerType, + pub struct MetaQueue { + data_ptr: Ptr::Type>, start: usize, len: usize, capacity: usize, @@ -129,9 +137,9 @@ pub mod details { _phantom_data: PhantomData, } - unsafe impl>> Send for Queue {} + unsafe impl Send for MetaQueue {} - impl Queue>> { + impl MetaQueue { /// Creates a new [`Queue`] with the provided capacity pub fn new(capacity: usize) -> Self { Self { @@ -178,7 +186,7 @@ pub mod details { } } - impl> + Debug> Queue { + impl MetaQueue { /// Returns a copy of the element stored at index. The index is starting by 0 for the first /// element until [`Queue::len()`]. /// @@ -206,18 +214,7 @@ pub mod details { } } - impl RelocatableContainer for Queue>> { - unsafe fn new(capacity: usize, distance_to_data: isize) -> Self { - Self { - data_ptr: RelocatablePointer::new(distance_to_data), - start: 0, - len: 0, - capacity, - is_initialized: IoxAtomicBool::new(true), - _phantom_data: PhantomData, - } - } - + impl RelocatableContainer for MetaQueue { unsafe fn new_uninit(capacity: usize) -> Self { Self { data_ptr: RelocatablePointer::new_uninit(), @@ -230,7 +227,7 @@ pub mod details { } unsafe fn init( - &self, + &mut self, allocator: &Allocator, ) -> Result<(), AllocationError> { if self @@ -260,7 +257,12 @@ pub mod details { } } - impl Queue>> { + impl MetaQueue { + /// Returns the required memory size for a queue with a specified capacity + pub const fn const_memory_size(capacity: usize) -> usize { + unaligned_mem_size::(capacity) + } + /// Removes all elements from the queue /// /// # Safety @@ -325,7 +327,7 @@ pub mod details { } } - impl>> Queue { + impl MetaQueue { #[inline(always)] fn verify_init(&self, source: &str) { debug_assert!( @@ -336,11 +338,6 @@ pub mod details { ); } - /// Returns the required memory size for a queue with a specified capacity - pub const fn const_memory_size(capacity: usize) -> usize { - unaligned_mem_size::(capacity) - } - /// Returns true if the queue is empty, otherwise false pub fn is_empty(&self) -> bool { self.len == 0 @@ -443,9 +440,14 @@ pub mod details { } } - impl>> Drop for Queue { + impl Drop for MetaQueue { fn drop(&mut self) { - unsafe { self.clear_impl() } + if self + .is_initialized + .load(std::sync::atomic::Ordering::Relaxed) + { + unsafe { self.clear_impl() } + } } } } @@ -462,32 +464,38 @@ pub struct FixedSizeQueue { impl PlacementDefault for FixedSizeQueue { unsafe fn placement_default(ptr: *mut Self) { let state_ptr = core::ptr::addr_of_mut!((*ptr).state); - state_ptr.write(Self::initialize_state()); + state_ptr.write(RelocatableQueue::new_uninit(CAPACITY)); + + let allocator = BumpAllocator::new(core::ptr::addr_of!((*ptr)._data) as usize); + (*ptr) + .state + .init(&allocator) + .expect("All required memory is preallocated."); } } impl Default for FixedSizeQueue { fn default() -> Self { - Self { - state: Self::initialize_state(), + let mut new_self = Self { + state: unsafe { RelocatableQueue::new_uninit(CAPACITY) }, _data: unsafe { MaybeUninit::uninit().assume_init() }, - } + }; + + let allocator = BumpAllocator::new(core::ptr::addr_of!(new_self._data) as usize); + unsafe { + new_self + .state + .init(&allocator) + .expect("All required memory is preallocated.") + }; + + new_self } } unsafe impl Send for FixedSizeQueue {} -unsafe impl Sync for FixedSizeQueue {} impl FixedSizeQueue { - fn initialize_state() -> RelocatableQueue { - unsafe { - RelocatableQueue::new( - CAPACITY, - align_to::>(std::mem::size_of::>()) as isize, - ) - } - } - /// Creates a new queue. pub fn new() -> Self { Self::default() diff --git a/iceoryx2-bb/container/src/slotmap.rs b/iceoryx2-bb/container/src/slotmap.rs new file mode 100644 index 000000000..663f77177 --- /dev/null +++ b/iceoryx2-bb/container/src/slotmap.rs @@ -0,0 +1,637 @@ +// Copyright (c) 2024 Contributors to the Eclipse Foundation +// +// See the NOTICE file(s) distributed with this work for additional +// information regarding copyright ownership. +// +// This program and the accompanying materials are made available under the +// terms of the Apache Software License 2.0 which is available at +// https://www.apache.org/licenses/LICENSE-2.0, or the MIT license +// which is available at https://opensource.org/licenses/MIT. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT + +//! A SlotMap is a container that has a static unique key for every stored value. Adding or +//! removing values to the SlotMap do not change the unique key of the remaining values. +//! Multiple variationes of that container are available. +//! +//! * [`SlotMap`](crate::slotmap::SlotMap), run-time fixed-size slotmap that is not shared-memory +//! compatible since the memory resides in the heap. +//! * [`FixedSizeSlotMap`](crate::slotmap::FixedSizeSlotMap), compile-time fixed-size slotmap that +//! is self-contained and shared-memory compatible. +//! * [`RelocatableSlotMap`](crate::slotmap::RelocatableSlotMap), run-time fixed-size slotmap that +//! is shared-memory compatible. +//! +//! The SlotMap shall satisfy the following requirements: +//! +//! * A new element can be inserted with a max runtime of `O(1)` +//! * A new element can be inserted at a user-provided key with a max runtime of `O(1)` +//! * An element can be removed by providing the corresponding key with a max runtime of `O(1)` +//! * One can iterate over all elements of the SlotMap. +//! +//! The SlotMap is the perfect container when elements shall be added, removed and accesses quickly +//! but iteration is allowed to be slow. +//! +//! # User Examples +//! +//! ``` +//! use iceoryx2_bb_container::slotmap::FixedSizeSlotMap; +//! +//! const CAPACITY: usize = 123; +//! let mut slotmap = FixedSizeSlotMap::::new(); +//! +//! let key = slotmap.insert(78181).unwrap(); +//! +//! println!("value: {:?}", slotmap.get(key)); +//! ``` + +use std::mem::MaybeUninit; + +use crate::queue::details::MetaQueue; +use crate::vec::details::MetaVec; +use crate::{queue::RelocatableQueue, vec::RelocatableVec}; +use iceoryx2_bb_elementary::bump_allocator::BumpAllocator; +use iceoryx2_bb_elementary::generic_pointer::GenericPointer; +use iceoryx2_bb_elementary::owning_pointer::GenericOwningPointer; +use iceoryx2_bb_elementary::placement_default::PlacementDefault; +use iceoryx2_bb_elementary::relocatable_container::RelocatableContainer; +use iceoryx2_bb_elementary::relocatable_ptr::GenericRelocatablePointer; +use iceoryx2_bb_log::fail; + +/// A key of a [`SlotMap`], [`RelocatableSlotMap`] or [`FixedSizeSlotMap`] that identifies a +/// value. +#[derive(Debug, Clone, Copy, Eq, PartialEq, PartialOrd, Ord, Hash)] +pub struct SlotMapKey(usize); + +impl SlotMapKey { + /// Creates a new [`SlotMapKey`] with the specified value. + pub fn new(value: usize) -> Self { + Self(value) + } + + /// Returns the underlying value of the [`SlotMapKey`]. + pub fn value(&self) -> usize { + self.0 + } +} + +#[derive(Debug, Clone, Copy)] +struct FreeListEntry { + previous: usize, + next: usize, +} + +/// A runtime fixed-size, non-shared memory compatible [`SlotMap`]. The [`SlotMap`]s memory resides +/// in the heap. +pub type SlotMap = details::MetaSlotMap; + +/// A runtime fixed-size, shared-memory compatible [`RelocatableSlotMap`]. +pub type RelocatableSlotMap = details::MetaSlotMap; + +const INVALID: usize = usize::MAX; + +#[doc(hidden)] +pub mod details { + use super::*; + + /// The iterator of a [`SlotMap`], [`RelocatableSlotMap`] or [`FixedSizeSlotMap`]. + pub struct Iter<'slotmap, T, Ptr: GenericPointer> { + slotmap: &'slotmap MetaSlotMap, + key: SlotMapKey, + } + + pub type OwningIter<'slotmap, T> = Iter<'slotmap, T, GenericOwningPointer>; + pub type RelocatableIter<'slotmap, T> = Iter<'slotmap, T, GenericRelocatablePointer>; + + impl<'slotmap, T, Ptr: GenericPointer> Iterator for Iter<'slotmap, T, Ptr> { + type Item = (SlotMapKey, &'slotmap T); + + fn next(&mut self) -> Option { + if let Some((next_key, value)) = self.slotmap.next_available_key_after(self.key) { + self.key.0 = next_key.0 + 1; + Some((next_key, value)) + } else { + None + } + } + } + + #[repr(C)] + #[derive(Debug)] + pub struct MetaSlotMap { + idx_to_data: MetaVec, + idx_to_data_free_list: MetaVec, + data: MetaVec, Ptr>, + data_next_free_index: MetaQueue, + idx_to_data_free_list_head: usize, + len: usize, + } + + impl MetaSlotMap { + fn next_available_key_after(&self, start: SlotMapKey) -> Option<(SlotMapKey, &T)> { + let idx_to_data = &self.idx_to_data; + + for n in start.0..idx_to_data.len() { + let data_idx = self.idx_to_data[n]; + if data_idx != INVALID { + return Some(( + SlotMapKey(n), + self.data[data_idx].as_ref().expect( + "By contract, data contains a value when idx_to_data contains a value", + ), + )); + } + } + + None + } + + pub(crate) unsafe fn initialize_data_structures(&mut self) { + let capacity = self.capacity_impl(); + for n in 0..capacity { + self.idx_to_data.push_impl(INVALID); + self.data.push_impl(None); + self.data_next_free_index.push_impl(n); + + let previous = if n == 0 { INVALID } else { n - 1 }; + let next = if n < capacity - 1 { n + 1 } else { INVALID }; + self.idx_to_data_free_list + .push_impl(FreeListEntry { previous, next }); + } + } + + pub(crate) unsafe fn iter_impl(&self) -> Iter { + Iter { + slotmap: self, + key: SlotMapKey(0), + } + } + + pub(crate) unsafe fn contains_impl(&self, key: SlotMapKey) -> bool { + self.idx_to_data[key.0] != INVALID + } + + pub(crate) unsafe fn get_impl(&self, key: SlotMapKey) -> Option<&T> { + match self.idx_to_data[key.0] { + INVALID => None, + n => Some(self.data[n].as_ref().expect( + "data and idx_to_data correspond and this value must be always available.", + )), + } + } + + pub(crate) unsafe fn get_mut_impl(&mut self, key: SlotMapKey) -> Option<&mut T> { + match self.idx_to_data[key.0] { + INVALID => None, + n => Some(self.data[n].as_mut().expect( + "data and idx_to_data correspond and this value must be always available.", + )), + } + } + + unsafe fn acquire_next_free_index(&mut self) -> Option { + if self.idx_to_data_free_list_head == INVALID { + return None; + } + + let free_idx = self.idx_to_data_free_list_head; + let next = self.idx_to_data_free_list[free_idx].next; + + if next != INVALID { + self.idx_to_data_free_list[next].previous = INVALID; + } + self.idx_to_data_free_list_head = next; + Some(free_idx) + } + + unsafe fn claim_index(&mut self, idx: usize) { + if idx >= self.capacity_impl() { + return; + } + + let entry = self.idx_to_data_free_list[idx]; + if entry.previous != INVALID { + self.idx_to_data_free_list[entry.previous].next = entry.next; + } + if entry.next != INVALID { + self.idx_to_data_free_list[entry.next].previous = entry.previous; + } + self.idx_to_data_free_list[idx].next = INVALID; + self.idx_to_data_free_list[idx].previous = INVALID; + } + + unsafe fn release_free_index(&mut self, idx: usize) { + if self.idx_to_data_free_list_head != INVALID { + self.idx_to_data_free_list[self.idx_to_data_free_list_head].previous = idx; + } + + self.idx_to_data_free_list[idx] = FreeListEntry { + previous: INVALID, + next: self.idx_to_data_free_list_head, + }; + + self.idx_to_data_free_list_head = idx; + } + + pub(crate) unsafe fn insert_impl(&mut self, value: T) -> Option { + self.acquire_next_free_index().map(|key| { + let key = SlotMapKey(key); + self.store_value(key, value); + key + }) + } + + pub(crate) unsafe fn insert_at_impl(&mut self, key: SlotMapKey, value: T) -> bool { + self.claim_index(key.value()); + self.store_value(key, value) + } + + pub(crate) unsafe fn store_value(&mut self, key: SlotMapKey, value: T) -> bool { + if key.0 > self.capacity_impl() { + return false; + } + + let data_idx = self.idx_to_data[key.0]; + if data_idx != INVALID { + self.data[data_idx] = Some(value); + } else { + let n = self.data_next_free_index.pop_impl().expect("data and idx_to_data correspond and there must be always a free index available."); + self.idx_to_data[key.0] = n; + self.data[n] = Some(value); + self.len += 1; + } + + true + } + + pub(crate) unsafe fn remove_impl(&mut self, key: SlotMapKey) -> bool { + if key.0 > self.idx_to_data.len() { + return false; + } + + let data_idx = self.idx_to_data[key.0]; + if data_idx != INVALID { + self.data[data_idx].take(); + let push_result = self.data_next_free_index.push_impl(data_idx); + debug_assert!(push_result); + self.release_free_index(key.0); + self.idx_to_data[key.0] = INVALID; + self.len -= 1; + true + } else { + false + } + } + + pub(crate) fn len_impl(&self) -> usize { + self.len + } + + pub(crate) fn capacity_impl(&self) -> usize { + self.idx_to_data.capacity() + } + + pub(crate) fn is_empty_impl(&self) -> bool { + self.len_impl() == 0 + } + + pub(crate) fn is_full_impl(&self) -> bool { + self.len_impl() == self.capacity_impl() + } + } + + impl RelocatableContainer for MetaSlotMap { + unsafe fn new_uninit(capacity: usize) -> Self { + Self { + len: 0, + idx_to_data_free_list_head: 0, + idx_to_data: RelocatableVec::new_uninit(capacity), + idx_to_data_free_list: RelocatableVec::new_uninit(capacity), + data: RelocatableVec::new_uninit(capacity), + data_next_free_index: RelocatableQueue::new_uninit(capacity), + } + } + + unsafe fn init( + &mut self, + allocator: &Allocator, + ) -> Result<(), iceoryx2_bb_elementary::allocator::AllocationError> { + let msg = "Unable to initialize RelocatableSlotMap"; + fail!(from "RelocatableSlotMap::init()", + when self.idx_to_data.init(allocator), + "{msg} since the underlying idx_to_data vector could not be initialized."); + fail!(from "RelocatableSlotMap::init()", + when self.idx_to_data_free_list.init(allocator), + "{msg} since the underlying idx_to_data_free_list vec could not be initialized."); + fail!(from "RelocatableSlotMap::init()", + when self.data.init(allocator), + "{msg} since the underlying data vector could not be initialized."); + fail!(from "RelocatableSlotMap::init()", + when self.data_next_free_index.init(allocator), + "{msg} since the underlying data_next_free_index queue could not be initialized."); + + self.initialize_data_structures(); + Ok(()) + } + + fn memory_size(capacity: usize) -> usize { + Self::const_memory_size(capacity) + } + } + + impl MetaSlotMap { + /// Creates a new runtime-fixed size [`SlotMap`] on the heap with the given capacity. + pub fn new(capacity: usize) -> Self { + let mut new_self = Self { + len: 0, + idx_to_data_free_list_head: 0, + idx_to_data: MetaVec::new(capacity), + idx_to_data_free_list: MetaVec::new(capacity), + data: MetaVec::new(capacity), + data_next_free_index: MetaQueue::new(capacity), + }; + unsafe { new_self.initialize_data_structures() }; + new_self + } + + /// Returns the [`Iter`]ator to iterate over all entries. + pub fn iter(&self) -> OwningIter { + unsafe { self.iter_impl() } + } + + /// Returns `true` if the provided `key` is contained, otherwise `false`. + pub fn contains(&self, key: SlotMapKey) -> bool { + unsafe { self.contains_impl(key) } + } + + /// Returns a reference to the value stored under the given key. If there is no such key, + /// [`None`] is returned. + pub fn get(&self, key: SlotMapKey) -> Option<&T> { + unsafe { self.get_impl(key) } + } + + /// Returns a mutable reference to the value stored under the given key. If there is no + /// such key, [`None`] is returned. + pub fn get_mut(&mut self, key: SlotMapKey) -> Option<&mut T> { + unsafe { self.get_mut_impl(key) } + } + + /// Insert a value and returns the corresponding [`SlotMapKey`]. If the container is full + /// [`None`] is returned. + pub fn insert(&mut self, value: T) -> Option { + unsafe { self.insert_impl(value) } + } + + /// Insert a value at the specified [`SlotMapKey`] and returns true. If the provided key + /// is out-of-bounds it returns `false` and adds nothing. If there is already a value + /// stored at the `key`s index, the value is overridden with the provided value. + pub fn insert_at(&mut self, key: SlotMapKey, value: T) -> bool { + unsafe { self.insert_at_impl(key, value) } + } + + /// Removes a value at the specified [`SlotMapKey`]. If there was no value corresponding + /// to the [`SlotMapKey`] it returns false, otherwise true. + pub fn remove(&mut self, key: SlotMapKey) -> bool { + unsafe { self.remove_impl(key) } + } + + /// Returns the number of stored values. + pub fn len(&self) -> usize { + self.len_impl() + } + + /// Returns the capacity. + pub fn capacity(&self) -> usize { + self.capacity_impl() + } + + /// Returns true if the container is empty, otherwise false. + pub fn is_empty(&self) -> bool { + self.is_empty_impl() + } + + /// Returns true if the container is full, otherwise false. + pub fn is_full(&self) -> bool { + self.is_full_impl() + } + } + + impl MetaSlotMap { + /// Returns how many memory the [`RelocatableSlotMap`] will allocate from the allocator + /// in [`RelocatableSlotMap::init()`]. + pub const fn const_memory_size(capacity: usize) -> usize { + RelocatableVec::::const_memory_size(capacity) + + RelocatableVec::::const_memory_size(capacity) + + RelocatableVec::>::const_memory_size(capacity) + + RelocatableQueue::::const_memory_size(capacity) + } + + /// Returns the [`Iter`]ator to iterate over all entries. + /// + /// # Safety + /// + /// * [`RelocatableSlotMap::init()`] must be called once before + /// + pub unsafe fn iter(&self) -> RelocatableIter { + self.iter_impl() + } + + /// Returns `true` if the provided `key` is contained, otherwise `false`. + /// + /// # Safety + /// + /// * [`RelocatableSlotMap::init()`] must be called once before + /// + pub unsafe fn contains(&self, key: SlotMapKey) -> bool { + self.contains_impl(key) + } + + /// Returns a reference to the value stored under the given key. If there is no such key, + /// [`None`] is returned. + /// + /// # Safety + /// + /// * [`RelocatableSlotMap::init()`] must be called once before + /// + pub unsafe fn get(&self, key: SlotMapKey) -> Option<&T> { + self.get_impl(key) + } + + /// Returns a mutable reference to the value stored under the given key. If there is no + /// such key, [`None`] is returned. + /// + /// # Safety + /// + /// * [`RelocatableSlotMap::init()`] must be called once before + /// + pub unsafe fn get_mut(&mut self, key: SlotMapKey) -> Option<&mut T> { + self.get_mut_impl(key) + } + + /// Insert a value and returns the corresponding [`SlotMapKey`]. If the container is full + /// [`None`] is returned. + /// + /// # Safety + /// + /// * [`RelocatableSlotMap::init()`] must be called once before + /// + pub unsafe fn insert(&mut self, value: T) -> Option { + self.insert_impl(value) + } + + /// Insert a value at the specified [`SlotMapKey`] and returns true. If the provided key + /// is out-of-bounds it returns `false` and adds nothing. If there is already a value + /// stored at the `key`s index, the value is overridden with the provided value. + /// + /// # Safety + /// + /// * [`RelocatableSlotMap::init()`] must be called once before + /// + pub unsafe fn insert_at(&mut self, key: SlotMapKey, value: T) -> bool { + self.insert_at_impl(key, value) + } + + /// Removes a value at the specified [`SlotMapKey`]. If there was no value corresponding + /// to the [`SlotMapKey`] it returns false, otherwise true. + /// + /// # Safety + /// + /// * [`RelocatableSlotMap::init()`] must be called once before + /// + pub unsafe fn remove(&mut self, key: SlotMapKey) -> bool { + self.remove_impl(key) + } + + /// Returns the number of stored values. + pub fn len(&self) -> usize { + self.len_impl() + } + + /// Returns the capacity. + pub fn capacity(&self) -> usize { + self.capacity_impl() + } + + /// Returns true if the container is empty, otherwise false. + pub fn is_empty(&self) -> bool { + self.is_empty_impl() + } + + /// Returns true if the container is full, otherwise false. + pub fn is_full(&self) -> bool { + self.is_full_impl() + } + } +} + +/// A compile-time fixed-size, shared memory compatible [`FixedSizeSlotMap`]. +#[repr(C)] +#[derive(Debug)] +pub struct FixedSizeSlotMap { + state: RelocatableSlotMap, + _idx_to_data: MaybeUninit<[usize; CAPACITY]>, + _idx_to_data_free_list: MaybeUninit<[FreeListEntry; CAPACITY]>, + _data: MaybeUninit<[Option; CAPACITY]>, + _data_next_free_index: MaybeUninit<[usize; CAPACITY]>, +} + +impl PlacementDefault for FixedSizeSlotMap { + unsafe fn placement_default(ptr: *mut Self) { + let state_ptr = core::ptr::addr_of_mut!((*ptr).state); + state_ptr.write(unsafe { RelocatableSlotMap::new_uninit(CAPACITY) }); + let allocator = BumpAllocator::new(core::ptr::addr_of!((*ptr)._data) as usize); + (*ptr) + .state + .init(&allocator) + .expect("All required memory is preallocated."); + } +} + +impl Default for FixedSizeSlotMap { + fn default() -> Self { + let mut new_self = Self { + _idx_to_data: MaybeUninit::uninit(), + _idx_to_data_free_list: MaybeUninit::uninit(), + _data: MaybeUninit::uninit(), + _data_next_free_index: MaybeUninit::uninit(), + state: unsafe { RelocatableSlotMap::new_uninit(CAPACITY) }, + }; + + let allocator = BumpAllocator::new(core::ptr::addr_of!(new_self._idx_to_data) as usize); + unsafe { + new_self + .state + .init(&allocator) + .expect("All required memory is preallocated.") + }; + + new_self + } +} + +impl FixedSizeSlotMap { + /// Creates a new empty [`FixedSizeSlotMap`]. + pub fn new() -> Self { + Self::default() + } + + /// Returns the [`details::RelocatableIter`]ator to iterate over all entries. + pub fn iter(&self) -> details::RelocatableIter { + unsafe { self.state.iter_impl() } + } + + /// Returns `true` if the provided `key` is contained, otherwise `false`. + pub fn contains(&self, key: SlotMapKey) -> bool { + unsafe { self.state.contains_impl(key) } + } + + /// Returns a reference to the value stored under the given key. If there is no such key, + /// [`None`] is returned. + pub fn get(&self, key: SlotMapKey) -> Option<&T> { + unsafe { self.state.get_impl(key) } + } + + /// Returns a mutable reference to the value stored under the given key. If there is no + /// such key, [`None`] is returned. + pub fn get_mut(&mut self, key: SlotMapKey) -> Option<&mut T> { + unsafe { self.state.get_mut_impl(key) } + } + + /// Insert a value and returns the corresponding [`SlotMapKey`]. If the container is full + /// [`None`] is returned. + pub fn insert(&mut self, value: T) -> Option { + unsafe { self.state.insert_impl(value) } + } + + /// Insert a value at the specified [`SlotMapKey`] and returns true. If the provided key + /// is out-of-bounds it returns `false` and adds nothing. If there is already a value + /// stored at the `key`s index, the value is overridden with the provided value. + pub fn insert_at(&mut self, key: SlotMapKey, value: T) -> bool { + unsafe { self.state.insert_at_impl(key, value) } + } + + /// Removes a value at the specified [`SlotMapKey`]. If there was no value corresponding + /// to the [`SlotMapKey`] it returns false, otherwise true. + pub fn remove(&mut self, key: SlotMapKey) -> bool { + unsafe { self.state.remove_impl(key) } + } + + /// Returns the number of stored values. + pub fn len(&self) -> usize { + self.state.len_impl() + } + + /// Returns the capacity. + pub fn capacity(&self) -> usize { + self.state.capacity_impl() + } + + /// Returns true if the container is empty, otherwise false. + pub fn is_empty(&self) -> bool { + self.state.is_empty_impl() + } + + /// Returns true if the container is full, otherwise false. + pub fn is_full(&self) -> bool { + self.state.is_full_impl() + } +} diff --git a/iceoryx2-bb/container/src/vec.rs b/iceoryx2-bb/container/src/vec.rs index 6bbe634ea..13e1c92f6 100644 --- a/iceoryx2-bb/container/src/vec.rs +++ b/iceoryx2-bb/container/src/vec.rs @@ -1,4 +1,4 @@ -// Copyright (c) 2023 Contributors to the Eclipse Foundation +// Copyright (c) 2023 - 2024 Contributors to the Eclipse Foundation // // See the NOTICE file(s) distributed with this work for additional // information regarding copyright ownership. @@ -10,8 +10,10 @@ // // SPDX-License-Identifier: Apache-2.0 OR MIT -//! Contains two vector variations that are similar to [`std::vec::Vec`]. +//! Contains vector variations that are similar to [`std::vec::Vec`]. //! +//! * [`Vec`](crate::vec::Vec), run-time fixed-size vector that is not shared-memory compatible +//! since the memory resides in the heap. //! * [`FixedSizeVec`](crate::vec::FixedSizeVec), compile-time fixed size vector that is //! self-contained. //! * [`RelocatableVec`](crate::vec::RelocatableVec), run-time fixed size vector that uses by default heap memory. @@ -37,6 +39,7 @@ //! ``` //! use iceoryx2_bb_container::vec::RelocatableVec; //! use iceoryx2_bb_elementary::math::align_to; +//! use iceoryx2_bb_elementary::bump_allocator::BumpAllocator; //! use iceoryx2_bb_elementary::relocatable_container::RelocatableContainer; //! use core::mem::MaybeUninit; //! @@ -48,11 +51,16 @@ //! //! impl MyConstruct { //! pub fn new() -> Self { -//! Self { -//! vec: unsafe { RelocatableVec::new(VEC_CAPACITY, -//! align_to::>(std::mem::size_of::>()) as isize) }, +//! let mut new_self = Self { +//! vec: unsafe { RelocatableVec::new_uninit(VEC_CAPACITY) }, //! vec_memory: core::array::from_fn(|_| MaybeUninit::uninit()), -//! } +//! }; +//! +//! let allocator = BumpAllocator::new(core::ptr::addr_of!(new_self.vec_memory) as usize); +//! unsafe { +//! new_self.vec.init(&allocator).expect("Enough memory provided.") +//! }; +//! new_self //! } //! } //! ``` @@ -71,7 +79,7 @@ //! //! let bump_allocator = BumpAllocator::new(memory.as_mut_ptr() as usize); //! -//! let vec = unsafe { RelocatableVec::::new_uninit(VEC_CAPACITY) }; +//! let mut vec = unsafe { RelocatableVec::::new_uninit(VEC_CAPACITY) }; //! unsafe { vec.init(&bump_allocator).expect("vec init failed") }; //! ``` @@ -83,269 +91,401 @@ use std::{ sync::atomic::Ordering, }; +use iceoryx2_bb_elementary::generic_pointer::GenericPointer; use iceoryx2_bb_elementary::{ - math::{align_to, unaligned_mem_size}, - placement_default::PlacementDefault, - pointer_trait::PointerTrait, - relocatable_container::RelocatableContainer, + bump_allocator::BumpAllocator, owning_pointer::GenericOwningPointer, + relocatable_ptr::GenericRelocatablePointer, +}; +use iceoryx2_bb_elementary::{ + math::unaligned_mem_size, owning_pointer::OwningPointer, placement_default::PlacementDefault, + pointer_trait::PointerTrait, relocatable_container::RelocatableContainer, relocatable_ptr::RelocatablePointer, }; + use iceoryx2_bb_log::{fail, fatal_panic}; use iceoryx2_pal_concurrency_sync::iox_atomic::IoxAtomicBool; use serde::{de::Visitor, Deserialize, Serialize}; +/// Vector with run-time fixed size capacity. In contrast to its counterpart the +/// [`RelocatableVec`] it is movable but is not shared memory compatible. +pub type Vec = details::MetaVec; + /// **Non-movable** relocatable vector with runtime fixed size capacity. -#[repr(C)] -#[derive(Debug)] -pub struct RelocatableVec { - data_ptr: RelocatablePointer>, - capacity: usize, - len: usize, - is_initialized: IoxAtomicBool, -} +pub type RelocatableVec = details::MetaVec; + +#[doc(hidden)] +pub mod details { + use super::*; + + /// **Non-movable** relocatable vector with runtime fixed size capacity. + #[repr(C)] + #[derive(Debug)] + pub struct MetaVec { + data_ptr: Ptr::Type>, + capacity: usize, + len: usize, + is_initialized: IoxAtomicBool, + _phantom_data: PhantomData, + } + + unsafe impl Send for MetaVec {} + + impl Drop for MetaVec { + fn drop(&mut self) { + if self + .is_initialized + .load(std::sync::atomic::Ordering::Relaxed) + { + unsafe { self.clear_impl() }; + } + } + } -unsafe impl Send for RelocatableVec {} + impl RelocatableContainer for MetaVec { + unsafe fn new_uninit(capacity: usize) -> Self { + Self { + data_ptr: RelocatablePointer::new_uninit(), + capacity, + len: 0, + is_initialized: IoxAtomicBool::new(false), + _phantom_data: PhantomData, + } + } -impl Drop for RelocatableVec { - fn drop(&mut self) { - unsafe { self.clear() }; - } -} + unsafe fn init( + &mut self, + allocator: &Allocator, + ) -> Result<(), iceoryx2_bb_elementary::allocator::AllocationError> { + if self.is_initialized.load(Ordering::Relaxed) { + fatal_panic!(from "Vec::init()", "Memory already initialized, Initializing it twice may lead to undefined behavior."); + } -impl RelocatableContainer for RelocatableVec { - unsafe fn new(capacity: usize, distance_to_data: isize) -> Self { - Self { - data_ptr: RelocatablePointer::new(distance_to_data), - capacity, - len: 0, - is_initialized: IoxAtomicBool::new(true), + self.data_ptr.init(fail!(from "Queue::init", when allocator + .allocate(Layout::from_size_align_unchecked( + std::mem::size_of::() * self.capacity, + std::mem::align_of::(), + )), "Failed to initialize queue since the allocation of the data memory failed." + )); + self.is_initialized + .store(true, std::sync::atomic::Ordering::Relaxed); + + Ok(()) + } + + fn memory_size(capacity: usize) -> usize { + Self::const_memory_size(capacity) } } - unsafe fn new_uninit(capacity: usize) -> Self { - Self { - data_ptr: RelocatablePointer::new_uninit(), - capacity, - len: 0, - is_initialized: IoxAtomicBool::new(false), + impl Deref for MetaVec { + type Target = [T]; + + fn deref(&self) -> &Self::Target { + self.verify_init(&format!("Vec<{}>::push()", std::any::type_name::())); + unsafe { core::slice::from_raw_parts((*self.data_ptr.as_ptr()).as_ptr(), self.len) } } } - unsafe fn init( - &self, - allocator: &Allocator, - ) -> Result<(), iceoryx2_bb_elementary::allocator::AllocationError> { - if self.is_initialized.load(Ordering::Relaxed) { - fatal_panic!(from "Vec::init()", "Memory already initialized, Initializing it twice may lead to undefined behavior."); + impl DerefMut for MetaVec { + fn deref_mut(&mut self) -> &mut Self::Target { + self.verify_init(&format!("Vec<{}>::push()", std::any::type_name::())); + unsafe { + core::slice::from_raw_parts_mut( + (*self.data_ptr.as_mut_ptr()).as_mut_ptr(), + self.len, + ) + } } + } - self.data_ptr.init(fail!(from "Queue::init", when allocator - .allocate(Layout::from_size_align_unchecked( - std::mem::size_of::() * self.capacity, - std::mem::align_of::(), - )), "Failed to initialize queue since the allocation of the data memory failed." - )); - self.is_initialized - .store(true, std::sync::atomic::Ordering::Relaxed); + impl PartialEq for MetaVec { + fn eq(&self, other: &Self) -> bool { + if other.len() != self.len() { + return false; + } - Ok(()) - } + for i in 0..self.len() { + if other[i] != self[i] { + return false; + } + } - fn memory_size(capacity: usize) -> usize { - Self::const_memory_size(capacity) + true + } } -} -impl Deref for RelocatableVec { - type Target = [T]; + impl Eq for MetaVec {} - fn deref(&self) -> &Self::Target { - self.verify_init(&format!("Vec<{}>::push()", std::any::type_name::())); - unsafe { core::slice::from_raw_parts((*self.data_ptr.as_ptr()).as_ptr(), self.len) } - } -} + impl MetaVec { + #[inline(always)] + fn verify_init(&self, source: &str) { + debug_assert!( + self.is_initialized + .load(std::sync::atomic::Ordering::Relaxed), + "From: {}, Undefined behavior - the object was not initialized with 'init' before.", + source + ); + } -impl DerefMut for RelocatableVec { - fn deref_mut(&mut self) -> &mut Self::Target { - self.verify_init(&format!("Vec<{}>::push()", std::any::type_name::())); - unsafe { - core::slice::from_raw_parts_mut((*self.data_ptr.as_mut_ptr()).as_mut_ptr(), self.len) + /// Returns the capacity of the vector + pub fn capacity(&self) -> usize { + self.capacity } - } -} -impl PartialEq for RelocatableVec { - fn eq(&self, other: &Self) -> bool { - if other.len() != self.len() { - return false; + /// Returns the number of elements stored inside the vector + pub fn len(&self) -> usize { + self.len + } + + /// Returns true if the vector is empty, otherwise false + pub fn is_empty(&self) -> bool { + self.len == 0 } - for i in 0..self.len() { - if other[i] != self[i] { + /// Returns true if the vector is full, otherwise false + pub fn is_full(&self) -> bool { + self.len == self.capacity + } + + pub(crate) unsafe fn push_impl(&mut self, value: T) -> bool { + if self.is_full() { return false; } + + self.verify_init(&format!("Vec<{}>::push()", std::any::type_name::())); + self.push_unchecked(value); + true } - true - } -} + unsafe fn fill_impl(&mut self, value: T) + where + T: Clone, + { + for _ in self.len..self.capacity { + self.push_unchecked(value.clone()); + } + } -impl Eq for RelocatableVec {} + unsafe fn fill_with_impl T>(&mut self, mut f: F) { + for _ in self.len..self.capacity { + self.push_unchecked(f()); + } + } -impl RelocatableVec { - #[inline(always)] - fn verify_init(&self, source: &str) { - debug_assert!( - self.is_initialized - .load(std::sync::atomic::Ordering::Relaxed), - "From: {}, Undefined behavior - the object was not initialized with 'init' before.", - source - ); - } + fn push_unchecked(&mut self, value: T) { + unsafe { + self.data_ptr + .as_mut_ptr() + .add(self.len) + .write(MaybeUninit::new(value)) + }; - /// Returns the required memory size for a vec with a specified capacity - pub const fn const_memory_size(capacity: usize) -> usize { - unaligned_mem_size::(capacity) - } + self.len += 1; + } - /// Returns the capacity of the vector - pub fn capacity(&self) -> usize { - self.capacity - } + unsafe fn extend_from_slice_impl(&mut self, other: &[T]) -> bool + where + T: Clone, + { + if self.capacity < self.len + other.len() { + return false; + } - /// Returns the number of elements stored inside the vector - pub fn len(&self) -> usize { - self.len - } + for element in other { + self.push_unchecked(element.clone()); + } - /// Returns true if the vector is empty, otherwise false - pub fn is_empty(&self) -> bool { - self.len == 0 - } + true + } - /// Returns true if the vector is full, otherwise false - pub fn is_full(&self) -> bool { - self.len == self.capacity - } + unsafe fn pop_impl(&mut self) -> Option { + if self.is_empty() { + return None; + } - /// Adds an element at the end of the vector. If the vector is full and the element cannot be - /// added it returns false, otherwise true. - /// - /// # Safety - /// - /// * [`RelocatableVec::init()`] must be called once before - /// - pub unsafe fn push(&mut self, value: T) -> bool { - if self.is_full() { - return false; + self.verify_init(&format!("Vec<{}>::pop()", std::any::type_name::())); + Some(self.pop_unchecked()) } - self.verify_init(&format!("Vec<{}>::push()", std::any::type_name::())); - self.push_unchecked(value); - true - } + unsafe fn clear_impl(&mut self) { + for _ in 0..self.len { + self.pop_unchecked(); + } + } - /// Fill the remaining space of the vector with value. - /// - /// # Safety - /// - /// * [`RelocatableVec::init()`] must be called once before - /// - pub unsafe fn fill(&mut self, value: T) - where - T: Clone, - { - for _ in self.len..self.capacity { - self.push_unchecked(value.clone()); + fn pop_unchecked(&mut self) -> T { + let value = std::mem::replace( + unsafe { &mut *self.data_ptr.as_mut_ptr().offset(self.len as isize - 1) }, + MaybeUninit::uninit(), + ); + self.len -= 1; + + unsafe { value.assume_init() } } - } - unsafe fn push_unchecked(&mut self, value: T) { - self.data_ptr - .as_mut_ptr() - .add(self.len) - .write(MaybeUninit::new(value)); + unsafe fn as_slice_impl(&self) -> &[T] { + unsafe { core::slice::from_raw_parts(self.data_ptr.as_ptr().cast(), self.len) } + } - self.len += 1; + unsafe fn as_mut_slice_impl(&mut self) -> &mut [T] { + unsafe { core::slice::from_raw_parts_mut(self.data_ptr.as_mut_ptr().cast(), self.len) } + } } - /// Append all elements from other via [`Clone`]. - /// - /// # Safety - /// - /// * [`RelocatableVec::init()`] must be called once before - /// - pub unsafe fn extend_from_slice(&mut self, other: &[T]) -> bool - where - T: Clone, - { - if self.capacity < self.len + other.len() { - return false; + impl MetaVec { + /// Creates a new [`Queue`] with the provided capacity + pub fn new(capacity: usize) -> Self { + Self { + data_ptr: OwningPointer::>::new_with_alloc(capacity), + capacity, + len: 0, + is_initialized: IoxAtomicBool::new(true), + _phantom_data: PhantomData, + } } - for element in other { - self.push_unchecked(element.clone()); + /// Adds an element at the end of the vector. If the vector is full and the element cannot be + /// added it returns false, otherwise true. + pub fn push(&mut self, value: T) -> bool { + unsafe { self.push_impl(value) } } - true - } + /// Fill the remaining space of the vector with value. + pub fn fill(&mut self, value: T) + where + T: Clone, + { + unsafe { self.fill_impl(value) } + } - /// Removes the last element of the vector and returns it to the user. If the vector is empty - /// it returns [`None`]. - /// - /// # Safety - /// - /// * [`RelocatableVec::init()`] must be called once before - /// - pub unsafe fn pop(&mut self) -> Option { - if self.is_empty() { - return None; + /// Fill the remaining space of the vector by calling the provided closure repeatedly + pub fn fill_with T>(&mut self, f: F) { + unsafe { self.fill_with_impl(f) } } - self.verify_init(&format!("Vec<{}>::pop()", std::any::type_name::())); - Some(self.pop_unchecked()) - } + /// Append all elements from other via [`Clone`]. + pub fn extend_from_slice(&mut self, other: &[T]) -> bool + where + T: Clone, + { + unsafe { self.extend_from_slice_impl(other) } + } - /// Removes all elements from the vector - /// - /// # Safety - /// - /// * [`RelocatableVec::init()`] must be called once before - /// - pub unsafe fn clear(&mut self) { - for _ in 0..self.len { - self.pop_unchecked(); + /// Removes the last element of the vector and returns it to the user. If the vector is empty + /// it returns [`None`]. + pub fn pop(&mut self) -> Option { + unsafe { self.pop_impl() } } - } - unsafe fn pop_unchecked(&mut self) -> T { - let value = std::mem::replace( - &mut *self.data_ptr.as_mut_ptr().offset(self.len as isize - 1), - MaybeUninit::uninit(), - ); - self.len -= 1; + /// Removes all elements from the vector + pub fn clear(&mut self) { + unsafe { self.clear_impl() } + } - value.assume_init() - } + /// Returns a slice to the contents of the vector + pub fn as_slice(&self) -> &[T] { + unsafe { self.as_slice_impl() } + } - /// Returns a slice to the contents of the vector - /// - /// # Safety - /// - /// * [`RelocatableVec::init()`] must be called once before - /// - pub unsafe fn as_slice(&self) -> &[T] { - core::slice::from_raw_parts(self.data_ptr.as_ptr().cast(), self.len) + /// Returns a mutable slice to the contents of the vector + pub fn as_mut_slice(&mut self) -> &mut [T] { + unsafe { self.as_mut_slice_impl() } + } } - /// Returns a mutable slice to the contents of the vector - /// - /// # Safety - /// - /// * [`RelocatableVec::init()`] must be called once before - /// - pub unsafe fn as_mut_slice(&mut self) -> &mut [T] { - core::slice::from_raw_parts_mut(self.data_ptr.as_mut_ptr().cast(), self.len) + impl MetaVec { + /// Returns the required memory size for a vec with a specified capacity + pub const fn const_memory_size(capacity: usize) -> usize { + unaligned_mem_size::(capacity) + } + + /// Adds an element at the end of the vector. If the vector is full and the element cannot be + /// added it returns false, otherwise true. + /// + /// # Safety + /// + /// * [`RelocatableVec::init()`] must be called once before + /// + pub unsafe fn push(&mut self, value: T) -> bool { + self.push_impl(value) + } + + /// Fill the remaining space of the vector with value. + /// + /// # Safety + /// + /// * [`RelocatableVec::init()`] must be called once before + /// + pub unsafe fn fill(&mut self, value: T) + where + T: Clone, + { + self.fill_impl(value) + } + + /// Fill the remaining space of the vector by calling the provided closure repeatedly + /// + /// # Safety + /// + /// * [`RelocatableVec::init()`] must be called once before + /// + pub unsafe fn fill_with T>(&mut self, f: F) { + self.fill_with_impl(f) + } + + /// Append all elements from other via [`Clone`]. + /// + /// # Safety + /// + /// * [`RelocatableVec::init()`] must be called once before + /// + pub unsafe fn extend_from_slice(&mut self, other: &[T]) -> bool + where + T: Clone, + { + self.extend_from_slice_impl(other) + } + + /// Removes the last element of the vector and returns it to the user. If the vector is empty + /// it returns [`None`]. + /// + /// # Safety + /// + /// * [`RelocatableVec::init()`] must be called once before + /// + pub unsafe fn pop(&mut self) -> Option { + self.pop_impl() + } + + /// Removes all elements from the vector + /// + /// # Safety + /// + /// * [`RelocatableVec::init()`] must be called once before + /// + pub unsafe fn clear(&mut self) { + self.clear_impl() + } + + /// Returns a slice to the contents of the vector + /// + /// # Safety + /// + /// * [`RelocatableVec::init()`] must be called once before + /// + pub unsafe fn as_slice(&self) -> &[T] { + self.as_slice_impl() + } + + /// Returns a mutable slice to the contents of the vector + /// + /// # Safety + /// + /// * [`RelocatableVec::init()`] must be called once before + /// + pub unsafe fn as_mut_slice(&mut self) -> &mut [T] { + self.as_mut_slice_impl() + } } } @@ -422,16 +562,31 @@ impl<'de, T: Deserialize<'de>, const CAPACITY: usize> Deserialize<'de> impl PlacementDefault for FixedSizeVec { unsafe fn placement_default(ptr: *mut Self) { let state_ptr = core::ptr::addr_of_mut!((*ptr).state); - state_ptr.write(Self::initialize_state()) + state_ptr.write(unsafe { RelocatableVec::new_uninit(CAPACITY) }); + let allocator = BumpAllocator::new(core::ptr::addr_of!((*ptr)._data) as usize); + (*ptr) + .state + .init(&allocator) + .expect("All required memory is preallocated."); } } impl Default for FixedSizeVec { fn default() -> Self { - Self { - state: Self::initialize_state(), + let mut new_self = Self { + state: unsafe { RelocatableVec::new_uninit(CAPACITY) }, _data: core::array::from_fn(|_| MaybeUninit::uninit()), - } + }; + + let allocator = BumpAllocator::new(core::ptr::addr_of!(new_self._data) as usize); + unsafe { + new_self + .state + .init(&allocator) + .expect("All required memory is preallocated.") + }; + + new_self } } @@ -468,15 +623,6 @@ impl Clone for FixedSizeVec { unsafe impl Send for FixedSizeVec {} impl FixedSizeVec { - fn initialize_state() -> RelocatableVec { - unsafe { - RelocatableVec::new( - CAPACITY, - align_to::>(std::mem::size_of::>()) as isize, - ) - } - } - /// Creates a new vector. pub fn new() -> Self { Self::default() @@ -516,6 +662,11 @@ impl FixedSizeVec { unsafe { self.state.fill(value) } } + /// Fill the remaining space of the vector with value. + pub fn fill_with T>(&mut self, f: F) { + unsafe { self.state.fill_with(f) } + } + /// Append all elements from other via [`Clone`]. pub fn extend_from_slice(&mut self, other: &[T]) -> bool where diff --git a/iceoryx2-bb/container/tests/slotmap_tests.rs b/iceoryx2-bb/container/tests/slotmap_tests.rs new file mode 100644 index 000000000..1ed75f42d --- /dev/null +++ b/iceoryx2-bb/container/tests/slotmap_tests.rs @@ -0,0 +1,202 @@ +// Copyright (c) 2024 Contributors to the Eclipse Foundation +// +// See the NOTICE file(s) distributed with this work for additional +// information regarding copyright ownership. +// +// This program and the accompanying materials are made available under the +// terms of the Apache Software License 2.0 which is available at +// https://www.apache.org/licenses/LICENSE-2.0, or the MIT license +// which is available at https://opensource.org/licenses/MIT. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT + +use iceoryx2_bb_container::slotmap::SlotMap; +use iceoryx2_bb_testing::assert_that; + +mod slot_map { + + use iceoryx2_bb_container::slotmap::{FixedSizeSlotMap, SlotMapKey}; + + use super::*; + + const SUT_CAPACITY: usize = 128; + type Sut = SlotMap; + type FixedSizeSut = FixedSizeSlotMap; + + #[test] + fn new_slotmap_is_empty() { + let sut = Sut::new(SUT_CAPACITY); + + assert_that!(sut, len 0); + assert_that!(sut, is_empty); + assert_that!(sut.is_full(), eq false); + assert_that!(sut.capacity(), eq SUT_CAPACITY); + } + + #[test] + fn new_fixed_size_slotmap_is_empty() { + let sut = FixedSizeSut::new(); + + assert_that!(sut, len 0); + assert_that!(sut, is_empty); + assert_that!(sut.is_full(), eq false); + assert_that!(sut.capacity(), eq SUT_CAPACITY); + } + + #[test] + fn inserting_elements_works() { + let mut sut = FixedSizeSut::new(); + + for i in 0..SUT_CAPACITY { + assert_that!(sut.is_full(), eq false); + let key = sut.insert(i).unwrap(); + *sut.get_mut(key).unwrap() += i; + assert_that!(*sut.get(key).unwrap(), eq 2 * i); + assert_that!(sut, len i + 1); + assert_that!(sut.is_empty(), eq false); + } + + assert_that!(sut.is_full(), eq true); + assert_that!(sut.insert(123), is_none); + } + + #[test] + fn insert_when_full_fails() { + let mut sut = FixedSizeSut::new(); + + for i in 0..SUT_CAPACITY { + assert_that!(sut.insert(i), is_some); + } + + assert_that!(sut.insert(34), is_none); + } + + #[test] + fn removing_elements_works() { + let mut sut = FixedSizeSut::new(); + let mut keys = vec![]; + + for i in 0..SUT_CAPACITY { + keys.push(sut.insert(i).unwrap()); + } + + for (n, key) in keys.iter().enumerate() { + assert_that!(sut.len(), eq sut.capacity() - n); + assert_that!(sut.is_empty(), eq false); + assert_that!(sut.contains(*key), eq true); + assert_that!(sut.remove(*key), eq true); + assert_that!(sut.remove(*key), eq false); + assert_that!(sut.contains(*key), eq false); + assert_that!(sut.is_full(), eq false); + + assert_that!(sut.get(*key), is_none); + assert_that!(sut.get_mut(*key), is_none); + } + + assert_that!(sut.is_empty(), eq true); + } + + #[test] + fn removing_out_of_bounds_key_returns_false() { + let mut sut = FixedSizeSut::new(); + + assert_that!(sut.remove(SlotMapKey::new(SUT_CAPACITY + 1)), eq false); + } + + #[test] + fn insert_at_works() { + let mut sut = FixedSizeSut::new(); + + let key = SlotMapKey::new(5); + let value = 71823; + assert_that!(sut.insert_at(key, 781), eq true); + assert_that!(sut.insert_at(key, value), eq true); + + assert_that!(*sut.get(key).unwrap(), eq value); + } + + #[test] + fn insert_at_and_remove_adjust_map_len_correctly() { + let mut sut = FixedSizeSut::new(); + + for n in 0..SUT_CAPACITY { + let key = SlotMapKey::new(n); + assert_that!(sut.len(), eq n); + assert_that!(sut.insert_at(key, 0), eq true); + } + assert_that!(sut.len(), eq SUT_CAPACITY); + + for n in (0..SUT_CAPACITY).rev() { + let key = SlotMapKey::new(n); + assert_that!(sut.remove(key), eq true); + assert_that!(sut.remove(key), eq false); + assert_that!(sut.len(), eq n); + } + assert_that!(sut.len(), eq 0); + } + + #[test] + fn insert_does_not_use_insert_at_indices() { + let mut sut = FixedSizeSut::new(); + + for n in 0..SUT_CAPACITY / 2 { + let key = SlotMapKey::new(2 * n + 1); + assert_that!(sut.insert_at(key, 0), eq true); + } + + for _ in 0..SUT_CAPACITY / 2 { + let key = sut.insert(0); + assert_that!(key, is_some); + assert_that!(key.unwrap().value() % 2, eq 0); + } + + assert_that!(sut.insert(0), is_none); + } + + #[test] + fn insert_at_out_of_bounds_key_returns_false() { + let mut sut = FixedSizeSut::new(); + let key = SlotMapKey::new(SUT_CAPACITY + 1); + assert_that!(sut.insert_at(key, 781), eq false); + } + + #[test] + fn iterating_works() { + let mut sut = FixedSizeSut::new(); + let mut keys = vec![]; + + for i in 0..SUT_CAPACITY { + keys.push(sut.insert(5 * i + 3).unwrap()); + } + + for (key, value) in sut.iter() { + assert_that!(*value, eq 5 * key.value() + 3); + } + } + + #[test] + fn insert_remove_and_insert_works() { + let mut sut = FixedSizeSut::new(); + + for _ in 0..SUT_CAPACITY { + assert_that!(sut.insert(3), is_some); + } + + for n in 0..SUT_CAPACITY / 2 { + assert_that!(sut.remove(SlotMapKey::new(2 * n)), eq true); + } + + for _ in 0..SUT_CAPACITY / 2 { + let key = sut.insert(2); + assert_that!(key, is_some); + } + + for (key, value) in sut.iter() { + if key.value() % 2 == 0 { + assert_that!(*value, eq 2); + } else { + assert_that!(*value, eq 3); + } + } + } +} diff --git a/iceoryx2-bb/container/tests/vec_tests.rs b/iceoryx2-bb/container/tests/vec_tests.rs index 81277727a..6c1ef391f 100644 --- a/iceoryx2-bb/container/tests/vec_tests.rs +++ b/iceoryx2-bb/container/tests/vec_tests.rs @@ -1,4 +1,4 @@ -// Copyright (c) 2023 Contributors to the Eclipse Foundation +// Copyright (c) 2023 - 2024 Contributors to the Eclipse Foundation // // See the NOTICE file(s) distributed with this work for additional // information regarding copyright ownership. @@ -10,15 +10,17 @@ // // SPDX-License-Identifier: Apache-2.0 OR MIT +use iceoryx2_bb_container::vec::*; +use iceoryx2_bb_elementary::bump_allocator::BumpAllocator; +use iceoryx2_bb_elementary::placement_default::PlacementDefault; +use iceoryx2_bb_elementary::relocatable_container::RelocatableContainer; +use iceoryx2_bb_testing::assert_that; +use iceoryx2_bb_testing::lifetime_tracker::LifetimeTracker; +use iceoryx2_bb_testing::memory::RawMemory; +use serde_test::{assert_tokens, Token}; + mod fixed_size_vec { - use iceoryx2_bb_container::vec::*; - use iceoryx2_bb_elementary::bump_allocator::BumpAllocator; - use iceoryx2_bb_elementary::placement_default::PlacementDefault; - use iceoryx2_bb_elementary::relocatable_container::RelocatableContainer; - use iceoryx2_bb_testing::assert_that; - use iceoryx2_bb_testing::lifetime_tracker::LifetimeTracker; - use iceoryx2_bb_testing::memory::RawMemory; - use serde_test::{assert_tokens, Token}; + use super::*; const SUT_CAPACITY: usize = 128; type Sut = FixedSizeVec; @@ -312,3 +314,23 @@ mod fixed_size_vec { ); } } + +mod vec { + use super::*; + + #[test] + fn push_and_pop_element_works() { + const CAPACITY: usize = 12; + const TEST_VALUE: usize = 89123; + let mut sut = Vec::::new(CAPACITY); + assert_that!(sut.capacity(), eq CAPACITY); + assert_that!(sut, len 0); + + sut.push(TEST_VALUE); + + assert_that!(sut, len 1); + assert_that!(sut[0], eq TEST_VALUE); + assert_that!(sut.pop(), eq Some(TEST_VALUE)); + assert_that!(sut, len 0); + } +} diff --git a/iceoryx2-bb/elementary/src/bump_allocator.rs b/iceoryx2-bb/elementary/src/bump_allocator.rs index 9bdd0d846..10ba93024 100644 --- a/iceoryx2-bb/elementary/src/bump_allocator.rs +++ b/iceoryx2-bb/elementary/src/bump_allocator.rs @@ -14,15 +14,14 @@ use crate::{allocator::BaseAllocator, math::align}; use iceoryx2_pal_concurrency_sync::iox_atomic::IoxAtomicUsize; use std::sync::atomic::Ordering; -/// Simple BumpAllocator for testing purposes. Do not use this in production. If you are looking -/// for a production ready BumpAllocator use the one from iceoryx2_bb_memory::bump_allocator -#[doc(hidden)] +/// A minimalistic [`BumpAllocator`]. pub struct BumpAllocator { start: usize, pos: IoxAtomicUsize, } impl BumpAllocator { + /// Creates a new [`BumpAllocator`] that manages the memory starting at `start`. pub fn new(start: usize) -> Self { Self { start, diff --git a/iceoryx2-bb/elementary/src/generic_pointer.rs b/iceoryx2-bb/elementary/src/generic_pointer.rs new file mode 100644 index 000000000..b24946b59 --- /dev/null +++ b/iceoryx2-bb/elementary/src/generic_pointer.rs @@ -0,0 +1,21 @@ +// Copyright (c) 2024 Contributors to the Eclipse Foundation +// +// See the NOTICE file(s) distributed with this work for additional +// information regarding copyright ownership. +// +// This program and the accompanying materials are made available under the +// terms of the Apache Software License 2.0 which is available at +// https://www.apache.org/licenses/LICENSE-2.0, or the MIT license +// which is available at https://opensource.org/licenses/MIT. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT + +use std::fmt::Debug; + +use crate::pointer_trait::PointerTrait; + +/// Trait that allows to use typed pointers as generic arguments for structs. +pub trait GenericPointer { + /// The underlying pointer type. + type Type: PointerTrait + Debug; +} diff --git a/iceoryx2-bb/elementary/src/lib.rs b/iceoryx2-bb/elementary/src/lib.rs index 3b1bfb0df..e55db7c28 100644 --- a/iceoryx2-bb/elementary/src/lib.rs +++ b/iceoryx2-bb/elementary/src/lib.rs @@ -19,6 +19,7 @@ pub mod enum_gen; pub mod alignment; pub mod allocator; pub mod bump_allocator; +pub mod generic_pointer; pub mod lazy_singleton; pub mod math; pub mod owning_pointer; diff --git a/iceoryx2-bb/elementary/src/owning_pointer.rs b/iceoryx2-bb/elementary/src/owning_pointer.rs index 5194cd0b3..bc3966091 100644 --- a/iceoryx2-bb/elementary/src/owning_pointer.rs +++ b/iceoryx2-bb/elementary/src/owning_pointer.rs @@ -15,9 +15,14 @@ use std::alloc::Layout; use std::alloc::{alloc, dealloc}; +use std::fmt::Debug; +use crate::generic_pointer::GenericPointer; use crate::pointer_trait::PointerTrait; +#[derive(Debug)] +pub struct GenericOwningPointer; + /// Representation of a pointer which owns its memory. #[repr(C)] #[derive(Debug)] @@ -63,3 +68,7 @@ impl PointerTrait for OwningPointer { self.ptr } } + +impl GenericPointer for GenericOwningPointer { + type Type = OwningPointer; +} diff --git a/iceoryx2-bb/elementary/src/relocatable_container.rs b/iceoryx2-bb/elementary/src/relocatable_container.rs index d989683b3..35792666c 100644 --- a/iceoryx2-bb/elementary/src/relocatable_container.rs +++ b/iceoryx2-bb/elementary/src/relocatable_container.rs @@ -18,19 +18,6 @@ use crate::{allocator::AllocationError, allocator::BaseAllocator}; /// mapped at a different virtual memory position the underlying constructs must be relocatable in /// the sense that they should not rely on absolut memory positions. pub trait RelocatableContainer { - /// Creates a new RelocatableContainer. It assumes that the memory of size - /// [`RelocatableContainer::memory_size()`] has the position self + distance_to_data. - /// This approach requires that the object itself and the data of the object are placed in - /// the same shared memory object. - /// - /// # Safety - /// - /// * `distance_to_data` is the offset to the data. The offset refers to the pointer value of - /// the [`RelocatableContainer`] - memory position. - /// * the provided memory must have the size of [`RelocatableContainer::memory_size()`] - /// - unsafe fn new(capacity: usize, distance_to_data: isize) -> Self; - /// Creates a new uninitialized RelocatableContainer. Before the container can be used the method /// [`RelocatableContainer::init()`] must be called. /// @@ -51,7 +38,7 @@ pub trait RelocatableContainer { /// * Shall be only used when the [`RelocatableContainer`] was created with /// [`RelocatableContainer::new_uninit()`] /// - unsafe fn init(&self, allocator: &T) -> Result<(), AllocationError>; + unsafe fn init(&mut self, allocator: &T) -> Result<(), AllocationError>; /// Returns the amount of memory the object requires. The whole memory consumption is /// `std::mem::size_of::() + RelocatableContainer::memory_size()`. diff --git a/iceoryx2-bb/elementary/src/relocatable_ptr.rs b/iceoryx2-bb/elementary/src/relocatable_ptr.rs index 146a0a9c9..4529f613a 100644 --- a/iceoryx2-bb/elementary/src/relocatable_ptr.rs +++ b/iceoryx2-bb/elementary/src/relocatable_ptr.rs @@ -64,9 +64,13 @@ //! } //! ``` +use crate::generic_pointer::GenericPointer; pub use crate::pointer_trait::PointerTrait; use iceoryx2_pal_concurrency_sync::iox_atomic::IoxAtomicIsize; -use std::{marker::PhantomData, ptr::NonNull}; +use std::{fmt::Debug, marker::PhantomData, ptr::NonNull}; + +#[derive(Debug)] +pub struct GenericRelocatablePointer; /// A [`RelocatablePointer`] stores only the distance from its memory starting position to the /// memory location it is pointing to. When the [`RelocatablePointer`] is now shared between @@ -138,3 +142,7 @@ impl PointerTrait for RelocatablePointer { self.as_ptr() as *mut T } } + +impl GenericPointer for GenericRelocatablePointer { + type Type = RelocatablePointer; +} diff --git a/iceoryx2-bb/lock-free/src/mpmc/bit_set.rs b/iceoryx2-bb/lock-free/src/mpmc/bit_set.rs index 3a14966f4..80057a461 100644 --- a/iceoryx2-bb/lock-free/src/mpmc/bit_set.rs +++ b/iceoryx2-bb/lock-free/src/mpmc/bit_set.rs @@ -37,7 +37,7 @@ //! ``` use iceoryx2_bb_elementary::{ - math::align_to, + bump_allocator::BumpAllocator, math::unaligned_mem_size, owning_pointer::OwningPointer, relocatable_container::RelocatableContainer, @@ -125,7 +125,7 @@ pub mod details { } unsafe fn init( - &self, + &mut self, allocator: &T, ) -> Result<(), iceoryx2_bb_elementary::allocator::AllocationError> { if self.is_memory_initialized.load(Ordering::Relaxed) { @@ -160,16 +160,6 @@ pub mod details { fn memory_size(capacity: usize) -> usize { Self::const_memory_size(capacity) } - - unsafe fn new(capacity: usize, distance_to_data: isize) -> Self { - Self { - data_ptr: RelocatablePointer::new(distance_to_data), - capacity, - array_capacity: Self::array_capacity(capacity), - is_memory_initialized: IoxAtomicBool::new(true), - reset_position: IoxAtomicUsize::new(0), - } - } } impl + Debug> BitSet { @@ -313,16 +303,20 @@ unsafe impl Sync for FixedSizeBitSet {} impl Default for FixedSizeBitSet { fn default() -> Self { - Self { - bitset: unsafe { - RelocatableBitSet::new( - CAPACITY, - align_to::(std::mem::size_of::()) - as _, - ) - }, + let mut new_self = Self { + bitset: unsafe { RelocatableBitSet::new_uninit(CAPACITY) }, data: core::array::from_fn(|_| details::BitsetElement::new(0)), - } + }; + + let allocator = BumpAllocator::new(core::ptr::addr_of!(new_self.data) as usize); + unsafe { + new_self + .bitset + .init(&allocator) + .expect("All required memory is preallocated.") + }; + + new_self } } diff --git a/iceoryx2-bb/lock-free/src/mpmc/container.rs b/iceoryx2-bb/lock-free/src/mpmc/container.rs index 5ba86939e..90cb8f253 100644 --- a/iceoryx2-bb/lock-free/src/mpmc/container.rs +++ b/iceoryx2-bb/lock-free/src/mpmc/container.rs @@ -54,6 +54,7 @@ //! ``` pub use crate::mpmc::unique_index_set::ReleaseMode; +use iceoryx2_bb_elementary::bump_allocator::BumpAllocator; pub use iceoryx2_bb_elementary::CallbackProgression; use iceoryx2_bb_elementary::allocator::AllocationError; @@ -198,7 +199,7 @@ impl RelocatableContainer for Container { } unsafe fn init( - &self, + &mut self, allocator: &Allocator, ) -> Result<(), AllocationError> { if self.is_memory_initialized.load(Ordering::Relaxed) { @@ -234,30 +235,6 @@ impl RelocatableContainer for Container { Ok(()) } - unsafe fn new(capacity: usize, distance_to_data: isize) -> Self { - let unique_index_set_distance = distance_to_data - - align_to::(std::mem::size_of::>()) as isize - + align_to::(std::mem::size_of::()) as isize; - - let distance_to_active_index = align_to::( - distance_to_data as usize + (std::mem::size_of::() * (capacity + 1)), - ) as isize; - let distance_to_container_data = align_to::>>( - distance_to_active_index as usize + (std::mem::size_of::() * capacity), - ) as isize - - std::mem::size_of::>() as isize; - - Self { - container_id: UniqueId::new(), - active_index_ptr: RelocatablePointer::new(distance_to_active_index), - data_ptr: RelocatablePointer::new(distance_to_container_data), - capacity, - change_counter: IoxAtomicU64::new(0), - index_set: UniqueIndexSet::new(capacity, unique_index_set_distance), - is_memory_initialized: IoxAtomicBool::new(true), - } - } - fn memory_size(capacity: usize) -> usize { Self::const_memory_size(capacity) } @@ -305,8 +282,7 @@ impl Container { /// /// # Safety /// - /// * Ensure that the either [`Container::new()`] was used or [`Container::init()`] was used - /// before calling this method + /// * Ensure that [`Container::init()`] was called before calling this method /// * Use [`Container::remove()`] to release the acquired index again. Otherwise, the /// element will leak. /// @@ -337,8 +313,7 @@ impl Container { /// /// # Safety /// - /// * Ensure that the either [`Container::new()`] was used or [`Container::init()`] was used - /// before calling this method + /// * Ensure that [`Container::init()`] was called before calling this method /// * Ensure that no one else possesses the [`UniqueIndex`] and the index was unrecoverable /// lost /// * Ensure that the `handle` was acquired by the same [`Container`] @@ -368,8 +343,7 @@ impl Container { /// /// # Safety /// - /// * Ensure that the either [`Container::new()`] was used or [`Container::init()`] was used - /// before calling this method + /// * Ensure that [`Container::init()`] was called before calling this method /// pub unsafe fn get_state(&self) -> ContainerState { self.verify_memory_initialization("get_state"); @@ -384,8 +358,7 @@ impl Container { /// /// # Safety /// - /// * Ensure that the either [`Container::new()`] was used or [`Container::init()`] was used - /// before calling this method + /// * Ensure that [`Container::init()`] was called before calling this method /// * Ensure that the input argument `previous_state` was acquired by the same [`Container`] /// with [`Container::get_state()`], otherwise the method will panic. /// @@ -472,18 +445,23 @@ pub struct FixedSizeContainer { impl Default for FixedSizeContainer { fn default() -> Self { - Self { - container: unsafe { - Container::new( - CAPACITY, - align_to::(std::mem::size_of::>()) as isize, - ) - }, + let mut new_self = Self { + container: unsafe { Container::new_uninit(CAPACITY) }, next_free_index: core::array::from_fn(|i| UnsafeCell::new(i as u32 + 1)), next_free_index_plus_one: UnsafeCell::new(CAPACITY as u32 + 1), active_index: core::array::from_fn(|_| IoxAtomicU64::new(0)), data: core::array::from_fn(|_| UnsafeCell::new(MaybeUninit::uninit())), - } + }; + + let allocator = BumpAllocator::new(core::ptr::addr_of!(new_self.next_free_index) as usize); + unsafe { + new_self + .container + .init(&allocator) + .expect("All required memory is preallocated.") + }; + + new_self } } diff --git a/iceoryx2-bb/lock-free/src/mpmc/unique_index_set.rs b/iceoryx2-bb/lock-free/src/mpmc/unique_index_set.rs index 68cd0d391..ceb194a35 100644 --- a/iceoryx2-bb/lock-free/src/mpmc/unique_index_set.rs +++ b/iceoryx2-bb/lock-free/src/mpmc/unique_index_set.rs @@ -27,7 +27,7 @@ //! let mut memory = [0u8; UniqueIndexSet::const_memory_size(CAPACITY)]; //! let allocator = BumpAllocator::new(memory.as_mut_ptr() as usize); //! -//! let index_set = unsafe { UniqueIndexSet::new_uninit(CAPACITY) }; +//! let mut index_set = unsafe { UniqueIndexSet::new_uninit(CAPACITY) }; //! unsafe { index_set.init(&allocator) }.expect("failed to allocate enough memory"); //! //! let new_index = match unsafe { index_set.acquire() } { @@ -82,8 +82,8 @@ //! ``` use iceoryx2_bb_elementary::allocator::{AllocationError, BaseAllocator}; +use iceoryx2_bb_elementary::bump_allocator::BumpAllocator; use iceoryx2_bb_elementary::enum_gen; -use iceoryx2_bb_elementary::math::align_to; use iceoryx2_bb_elementary::pointer_trait::PointerTrait; use iceoryx2_bb_elementary::relocatable_container::RelocatableContainer; use iceoryx2_bb_elementary::relocatable_ptr::RelocatablePointer; @@ -187,7 +187,7 @@ impl Drop for UniqueIndex<'_> { /// let mut memory = [0u8; UniqueIndexSet::const_memory_size(CAPACITY)]; /// let allocator = BumpAllocator::new(memory.as_mut_ptr() as usize); /// -/// let index_set = unsafe { UniqueIndexSet::new_uninit(CAPACITY) }; +/// let mut index_set = unsafe { UniqueIndexSet::new_uninit(CAPACITY) }; /// unsafe { index_set.init(&allocator) }.expect("failed to allocate enough memory"); /// /// let new_index = match unsafe { index_set.acquire() } { @@ -200,6 +200,7 @@ impl Drop for UniqueIndex<'_> { /// ``` /// use iceoryx2_bb_lock_free::mpmc::unique_index_set::*; /// use iceoryx2_bb_elementary::relocatable_container::*; +/// use iceoryx2_bb_elementary::bump_allocator::BumpAllocator; /// use std::mem::MaybeUninit; /// /// const CAPACITY: usize = 128; @@ -214,15 +215,16 @@ impl Drop for UniqueIndex<'_> { /// /// impl FixedSizeSet { /// pub fn new() -> Self { -/// FixedSizeSet { -/// set: unsafe { -/// UniqueIndexSet::new(CAPACITY, -/// // distance to data beginning from the start of the set (UniqueIndexSet) -/// // member start -/// std::mem::size_of::() as isize) -/// }, +/// let mut new_self = FixedSizeSet { +/// set: unsafe { UniqueIndexSet::new_uninit(CAPACITY) }, /// data: [MaybeUninit::uninit(); UniqueIndexSet::const_memory_size(CAPACITY)] -/// } +/// }; +/// +/// let allocator = BumpAllocator::new(core::ptr::addr_of!(new_self.data) as usize); +/// unsafe { +/// new_self.set.init(&allocator).expect("Enough memory provided.") +/// }; +/// new_self /// } /// } /// ``` @@ -277,7 +279,7 @@ impl RelocatableContainer for UniqueIndexSet { } } - unsafe fn init(&self, allocator: &T) -> Result<(), AllocationError> { + unsafe fn init(&mut self, allocator: &T) -> Result<(), AllocationError> { if self.is_memory_initialized.load(Ordering::Relaxed) { fatal_panic!(from self, "Memory already initialized. Initializing it twice may lead to undefined behavior."); } @@ -299,15 +301,6 @@ impl RelocatableContainer for UniqueIndexSet { Ok(()) } - unsafe fn new(capacity: usize, distance_to_data: isize) -> Self { - Self { - data_ptr: RelocatablePointer::new(distance_to_data), - capacity: capacity as u32, - head: IoxAtomicU64::new(0), - is_memory_initialized: IoxAtomicBool::new(true), - } - } - fn memory_size(capacity: usize) -> usize { Self::const_memory_size(capacity) } @@ -333,8 +326,7 @@ impl UniqueIndexSet { /// /// # Safety /// - /// * Ensure that either the [`UniqueIndexSet`] was created with [`UniqueIndexSet::new()`] or - /// [`UniqueIndexSet::init()`] was called. + /// * Ensure that [`UniqueIndexSet::init()`] was called once. /// pub unsafe fn acquire(&self) -> Result, UniqueIndexSetAcquireFailure> { self.verify_init("acquire"); @@ -392,8 +384,7 @@ impl UniqueIndexSet { /// /// # Safety /// - /// * Ensure that either the [`UniqueIndexSet`] was created with [`UniqueIndexSet::new()`] or - /// [`UniqueIndexSet::init()`] was called. + /// * Ensure that [`UniqueIndexSet::init()`] was called once. /// * The index must be manually released with [`UniqueIndexSet::release_raw_index()`] /// otherwise the index is leaked. pub unsafe fn acquire_raw_index(&self) -> Result { @@ -446,6 +437,7 @@ impl UniqueIndexSet { /// /// # Safety /// + /// * Ensure that [`UniqueIndexSet::init()`] was called once. /// * It must be ensured that the index was acquired before and is not released twice. /// * Shall be only used when the index was acquired with /// [`UniqueIndexSet::acquire_raw_index()`] @@ -529,16 +521,7 @@ pub struct FixedSizeUniqueIndexSet { impl Default for FixedSizeUniqueIndexSet { fn default() -> Self { - Self { - state: unsafe { - UniqueIndexSet::new( - CAPACITY, - align_to::>(std::mem::size_of::()) as isize, - ) - }, - next_free_index: core::array::from_fn(|i| UnsafeCell::new(i as u32 + 1)), - next_free_index_plus_one: UnsafeCell::new(CAPACITY as u32 + 1), - } + Self::new_with_reduced_capacity(CAPACITY).expect("Does not exceed supported capacity.") } } @@ -565,16 +548,21 @@ impl FixedSizeUniqueIndexSet { "Provided value of capacity is zero."); } - Ok(Self { - state: unsafe { - UniqueIndexSet::new( - capacity, - align_to::>(std::mem::size_of::()) as isize, - ) - }, + let mut new_self = Self { + state: unsafe { UniqueIndexSet::new_uninit(capacity) }, next_free_index: core::array::from_fn(|i| UnsafeCell::new(i as u32 + 1)), next_free_index_plus_one: UnsafeCell::new(capacity as u32 + 1), - }) + }; + + let allocator = BumpAllocator::new(core::ptr::addr_of!(new_self.next_free_index) as usize); + unsafe { + new_self + .state + .init(&allocator) + .expect("All required memory is preallocated.") + }; + + Ok(new_self) } /// See [`UniqueIndexSet::acquire()`] diff --git a/iceoryx2-bb/lock-free/src/spsc/index_queue.rs b/iceoryx2-bb/lock-free/src/spsc/index_queue.rs index 50804c4ff..b79561852 100644 --- a/iceoryx2-bb/lock-free/src/spsc/index_queue.rs +++ b/iceoryx2-bb/lock-free/src/spsc/index_queue.rs @@ -45,7 +45,7 @@ use std::{alloc::Layout, cell::UnsafeCell, fmt::Debug, sync::atomic::Ordering}; use iceoryx2_bb_elementary::{ - math::align_to, owning_pointer::OwningPointer, pointer_trait::PointerTrait, + bump_allocator::BumpAllocator, owning_pointer::OwningPointer, pointer_trait::PointerTrait, relocatable_container::RelocatableContainer, relocatable_ptr::RelocatablePointer, }; use iceoryx2_bb_log::{fail, fatal_panic}; @@ -152,7 +152,7 @@ pub mod details { } unsafe fn init( - &self, + &mut self, allocator: &T, ) -> Result<(), iceoryx2_bb_elementary::allocator::AllocationError> { if self.is_memory_initialized.load(Ordering::Relaxed) { @@ -175,18 +175,6 @@ pub mod details { Ok(()) } - unsafe fn new(capacity: usize, distance_to_data: isize) -> Self { - Self { - data_ptr: RelocatablePointer::new(distance_to_data), - capacity, - write_position: IoxAtomicUsize::new(0), - read_position: IoxAtomicUsize::new(0), - has_producer: IoxAtomicBool::new(true), - has_consumer: IoxAtomicBool::new(true), - is_memory_initialized: IoxAtomicBool::new(true), - } - } - fn memory_size(capacity: usize) -> usize { Self::const_memory_size(capacity) } @@ -390,16 +378,20 @@ impl Default for FixedSizeIndexQueue { impl FixedSizeIndexQueue { /// Creates a new empty [`FixedSizeIndexQueue`]. pub fn new() -> Self { - Self { - state: unsafe { - RelocatableIndexQueue::new( - CAPACITY, - align_to::>(std::mem::size_of::()) - as isize, - ) - }, + let mut new_self = Self { + state: unsafe { RelocatableIndexQueue::new_uninit(CAPACITY) }, data: core::array::from_fn(|_| UnsafeCell::new(0)), - } + }; + + let allocator = BumpAllocator::new(core::ptr::addr_of!(new_self.data) as usize); + unsafe { + new_self + .state + .init(&allocator) + .expect("All required memory is preallocated.") + }; + + new_self } /// See [`IndexQueue::acquire_producer()`] diff --git a/iceoryx2-bb/lock-free/src/spsc/safely_overflowing_index_queue.rs b/iceoryx2-bb/lock-free/src/spsc/safely_overflowing_index_queue.rs index a57baa44e..976dd1afc 100644 --- a/iceoryx2-bb/lock-free/src/spsc/safely_overflowing_index_queue.rs +++ b/iceoryx2-bb/lock-free/src/spsc/safely_overflowing_index_queue.rs @@ -47,7 +47,7 @@ use iceoryx2_pal_concurrency_sync::iox_atomic::{IoxAtomicBool, IoxAtomicUsize}; use std::{alloc::Layout, cell::UnsafeCell, fmt::Debug, sync::atomic::Ordering}; use iceoryx2_bb_elementary::{ - math::align_to, owning_pointer::OwningPointer, pointer_trait::PointerTrait, + bump_allocator::BumpAllocator, owning_pointer::OwningPointer, pointer_trait::PointerTrait, relocatable_container::RelocatableContainer, relocatable_ptr::RelocatablePointer, }; use iceoryx2_bb_log::{fail, fatal_panic}; @@ -165,7 +165,7 @@ pub mod details { } unsafe fn init( - &self, + &mut self, allocator: &T, ) -> Result<(), iceoryx2_bb_elementary::allocator::AllocationError> { if self.is_memory_initialized.load(Ordering::Relaxed) { @@ -188,18 +188,6 @@ pub mod details { Ok(()) } - unsafe fn new(capacity: usize, distance_to_data: isize) -> Self { - Self { - data_ptr: RelocatablePointer::new(distance_to_data), - capacity, - write_position: IoxAtomicUsize::new(0), - read_position: IoxAtomicUsize::new(0), - has_producer: IoxAtomicBool::new(true), - has_consumer: IoxAtomicBool::new(true), - is_memory_initialized: IoxAtomicBool::new(true), - } - } - fn memory_size(capacity: usize) -> usize { Self::const_memory_size(capacity) } @@ -445,18 +433,21 @@ impl Default for FixedSizeSafelyOverflowingIndexQueue FixedSizeSafelyOverflowingIndexQueue { /// Creates a new empty [`FixedSizeSafelyOverflowingIndexQueue`]. pub fn new() -> Self { - Self { - state: unsafe { - RelocatableSafelyOverflowingIndexQueue::new( - CAPACITY, - align_to::>(std::mem::size_of::< - RelocatableSafelyOverflowingIndexQueue, - >()) as isize, - ) - }, + let mut new_self = Self { + state: unsafe { RelocatableSafelyOverflowingIndexQueue::new_uninit(CAPACITY) }, data: core::array::from_fn(|_| UnsafeCell::new(0)), data_plus_one: UnsafeCell::new(0), - } + }; + + let allocator = BumpAllocator::new(core::ptr::addr_of!(new_self.data) as usize); + unsafe { + new_self + .state + .init(&allocator) + .expect("All required memory is preallocated.") + }; + + new_self } /// See [`SafelyOverflowingIndexQueue::acquire_producer()`] diff --git a/iceoryx2-bb/lock-free/tests/mpmc_container_tests.rs b/iceoryx2-bb/lock-free/tests/mpmc_container_tests.rs index 1ae91a1b7..d8f3a1170 100644 --- a/iceoryx2-bb/lock-free/tests/mpmc_container_tests.rs +++ b/iceoryx2-bb/lock-free/tests/mpmc_container_tests.rs @@ -131,7 +131,7 @@ mod mpmc_container { // case - hack required since `T` cannot be used in const operations let mut memory = [0u8; Container::::const_memory_size(129_usize)]; let allocator = BumpAllocator::new(memory.as_mut_ptr() as usize); - let sut = unsafe { Container::::new_uninit(CAPACITY) }; + let mut sut = unsafe { Container::::new_uninit(CAPACITY) }; unsafe { assert_that!(sut.init(&allocator), is_ok) }; let mut stored_indices = vec![]; diff --git a/iceoryx2-bb/lock-free/tests/mpmc_unique_index_set_tests.rs b/iceoryx2-bb/lock-free/tests/mpmc_unique_index_set_tests.rs index 12a36a012..05c996e29 100644 --- a/iceoryx2-bb/lock-free/tests/mpmc_unique_index_set_tests.rs +++ b/iceoryx2-bb/lock-free/tests/mpmc_unique_index_set_tests.rs @@ -137,7 +137,7 @@ fn mpmc_unique_index_set_borrowed_indices_works() { fn mpmc_unique_index_set_acquire_and_release_works_with_uninitialized_memory() { let mut memory = [0u8; UniqueIndexSet::const_memory_size(128)]; let allocator = BumpAllocator::new(memory.as_mut_ptr() as usize); - let sut = unsafe { UniqueIndexSet::new_uninit(CAPACITY) }; + let mut sut = unsafe { UniqueIndexSet::new_uninit(CAPACITY) }; unsafe { assert_that!(sut.init(&allocator), is_ok) }; let mut ids = vec![]; diff --git a/iceoryx2-bb/memory/src/pool_allocator.rs b/iceoryx2-bb/memory/src/pool_allocator.rs index 9d0a6d978..c9f4c6e5b 100644 --- a/iceoryx2-bb/memory/src/pool_allocator.rs +++ b/iceoryx2-bb/memory/src/pool_allocator.rs @@ -47,8 +47,8 @@ //! Layout::from_size_align_unchecked(32, 4))}; //! ``` +use iceoryx2_bb_elementary::bump_allocator::BumpAllocator; use iceoryx2_bb_elementary::math::align; -use iceoryx2_bb_elementary::math::align_to; use iceoryx2_bb_elementary::relocatable_container::*; use iceoryx2_bb_lock_free::mpmc::unique_index_set::*; @@ -125,7 +125,7 @@ impl PoolAllocator { /// * must be called exactly once before any other method can be called /// pub unsafe fn init( - &self, + &mut self, allocator: &Allocator, ) -> Result<(), AllocationError> { if self.is_memory_initialized.load(Ordering::Relaxed) { @@ -315,13 +315,13 @@ impl FixedSizePoolAllocator>(std::mem::size_of::()) as isize, - ) + UniqueIndexSet::new_uninit(std::cmp::min( + number_of_buckets, + MAX_NUMBER_OF_BUCKETS, + )) }, bucket_size: bucket_layout.size(), bucket_alignment: bucket_layout.align(), @@ -331,7 +331,17 @@ impl FixedSizePoolAllocator( - &self, + &mut self, _mgmt_allocator: &Allocator, ) -> Result<(), ShmAllocatorInitError> { let msg = "Unable to initialize allocator"; diff --git a/iceoryx2-cal/src/shm_allocator/mod.rs b/iceoryx2-cal/src/shm_allocator/mod.rs index 106e3b294..fa803a5af 100644 --- a/iceoryx2-cal/src/shm_allocator/mod.rs +++ b/iceoryx2-cal/src/shm_allocator/mod.rs @@ -76,7 +76,7 @@ pub trait ShmAllocator: Send + Sync + 'static { /// * must be called before any other method is called /// unsafe fn init( - &self, + &mut self, mgmt_allocator: &Allocator, ) -> Result<(), ShmAllocatorInitError>; diff --git a/iceoryx2-cal/src/shm_allocator/pool_allocator.rs b/iceoryx2-cal/src/shm_allocator/pool_allocator.rs index 20850e390..96f42abb0 100644 --- a/iceoryx2-cal/src/shm_allocator/pool_allocator.rs +++ b/iceoryx2-cal/src/shm_allocator/pool_allocator.rs @@ -88,7 +88,7 @@ impl ShmAllocator for PoolAllocator { } unsafe fn init( - &self, + &mut self, mgmt_allocator: &Allocator, ) -> Result<(), ShmAllocatorInitError> { let msg = "Unable to initialize allocator"; diff --git a/iceoryx2-cal/src/zero_copy_connection/used_chunk_list.rs b/iceoryx2-cal/src/zero_copy_connection/used_chunk_list.rs index 736193202..00c4c5b68 100644 --- a/iceoryx2-cal/src/zero_copy_connection/used_chunk_list.rs +++ b/iceoryx2-cal/src/zero_copy_connection/used_chunk_list.rs @@ -11,7 +11,7 @@ // SPDX-License-Identifier: Apache-2.0 OR MIT use iceoryx2_bb_elementary::{ - math::align_to, + bump_allocator::BumpAllocator, owning_pointer::OwningPointer, relocatable_container::RelocatableContainer, relocatable_ptr::{PointerTrait, RelocatablePointer}, @@ -72,7 +72,7 @@ pub mod details { } unsafe fn init( - &self, + &mut self, allocator: &T, ) -> Result<(), iceoryx2_bb_elementary::allocator::AllocationError> { if self.is_memory_initialized.load(Ordering::Relaxed) { @@ -107,14 +107,6 @@ pub mod details { fn memory_size(capacity: usize) -> usize { Self::const_memory_size(capacity) } - - unsafe fn new(capacity: usize, distance_to_data: isize) -> Self { - Self { - data_ptr: RelocatablePointer::new(distance_to_data), - capacity, - is_memory_initialized: IoxAtomicBool::new(true), - } - } } impl + Debug> UsedChunkList { @@ -175,15 +167,19 @@ pub struct FixedSizeUsedChunkList { impl Default for FixedSizeUsedChunkList { fn default() -> Self { - Self { - list: unsafe { - RelocatableUsedChunkList::new( - CAPACITY, - align_to::(std::mem::size_of::()) as _, - ) - }, + let mut new_self = Self { + list: unsafe { RelocatableUsedChunkList::new_uninit(CAPACITY) }, data: core::array::from_fn(|_| IoxAtomicBool::new(false)), - } + }; + + let allocator = BumpAllocator::new(core::ptr::addr_of!(new_self.data) as usize); + unsafe { + new_self + .list + .init(&allocator) + .expect("All required memory is preallocated.") + }; + new_self } } diff --git a/iceoryx2-cal/tests/event_id_tracker_tests.rs b/iceoryx2-cal/tests/event_id_tracker_tests.rs index d7e977863..5a8f840bd 100644 --- a/iceoryx2-cal/tests/event_id_tracker_tests.rs +++ b/iceoryx2-cal/tests/event_id_tracker_tests.rs @@ -39,7 +39,7 @@ mod event_id_tracker { const CAPACITY: usize = 5234; let mut memory = memory(); - let sut = unsafe { Sut::new_uninit(CAPACITY) }; + let mut sut = unsafe { Sut::new_uninit(CAPACITY) }; assert_that!(unsafe { sut.init(&allocator(&mut *memory)) }, is_ok); assert_that!(sut.trigger_id_max().as_value(), lt CAPACITY); } @@ -49,7 +49,7 @@ mod event_id_tracker { let mut memory = memory(); const CAPACITY: usize = 1234; - let sut = unsafe { Sut::new_uninit(CAPACITY) }; + let mut sut = unsafe { Sut::new_uninit(CAPACITY) }; assert_that!(unsafe { sut.init(&allocator(&mut *memory)) }, is_ok); assert_that!(unsafe { sut.acquire() }, eq None); @@ -66,7 +66,7 @@ mod event_id_tracker { let mut memory = memory(); const CAPACITY: usize = 1234; - let sut = unsafe { Sut::new_uninit(CAPACITY) }; + let mut sut = unsafe { Sut::new_uninit(CAPACITY) }; assert_that!(unsafe { sut.init(&allocator(&mut *memory)) }, is_ok); for i in 0..CAPACITY { @@ -89,7 +89,7 @@ mod event_id_tracker { let mut memory = memory(); const CAPACITY: usize = 3234; - let sut = unsafe { Sut::new_uninit(CAPACITY) }; + let mut sut = unsafe { Sut::new_uninit(CAPACITY) }; assert_that!(unsafe { sut.init(&allocator(&mut *memory)) }, is_ok); for i in 0..CAPACITY { @@ -117,7 +117,7 @@ mod event_id_tracker { let mut memory = memory(); const CAPACITY: usize = 234; - let sut = unsafe { Sut::new_uninit(CAPACITY) }; + let mut sut = unsafe { Sut::new_uninit(CAPACITY) }; assert_that!(unsafe { sut.init(&allocator(&mut *memory)) }, is_ok); for i in 0..CAPACITY { @@ -147,7 +147,7 @@ mod event_id_tracker { let mut memory = memory(); const CAPACITY: usize = 1234; - let sut = unsafe { Sut::new_uninit(CAPACITY) }; + let mut sut = unsafe { Sut::new_uninit(CAPACITY) }; assert_that!(unsafe { sut.init(&allocator(&mut *memory)) }, is_ok); assert_that!(unsafe { sut.acquire() }, eq None); diff --git a/iceoryx2-cal/tests/shm_allocator_pool_allocator_tests.rs b/iceoryx2-cal/tests/shm_allocator_pool_allocator_tests.rs index 44cc93569..90e2fba65 100644 --- a/iceoryx2-cal/tests/shm_allocator_pool_allocator_tests.rs +++ b/iceoryx2-cal/tests/shm_allocator_pool_allocator_tests.rs @@ -42,7 +42,7 @@ mod shm_allocator_pool_allocator { MEM_SIZE, ); let config = &Config { bucket_layout }; - let sut = Box::new(unsafe { + let mut sut = Box::new(unsafe { PoolAllocator::new_uninit(MAX_SUPPORTED_ALIGNMENT, base_address, config) }); diff --git a/iceoryx2-cal/tests/shm_allocator_trait_tests.rs b/iceoryx2-cal/tests/shm_allocator_trait_tests.rs index 225179d12..ddb2489ee 100644 --- a/iceoryx2-cal/tests/shm_allocator_trait_tests.rs +++ b/iceoryx2-cal/tests/shm_allocator_trait_tests.rs @@ -149,7 +149,7 @@ mod shm_allocator { let mut test = TestFixture::::new(); test.prepare(); - let sut = unsafe { + let mut sut = unsafe { Sut::new_uninit( 1, NonNull::new_unchecked(test.memory.as_mut_slice()), diff --git a/iceoryx2/src/service/dynamic_config/event.rs b/iceoryx2/src/service/dynamic_config/event.rs index 41a295e57..0da479fa8 100644 --- a/iceoryx2/src/service/dynamic_config/event.rs +++ b/iceoryx2/src/service/dynamic_config/event.rs @@ -72,7 +72,7 @@ impl DynamicConfig { } } - pub(crate) unsafe fn init(&self, allocator: &BumpAllocator) { + pub(crate) unsafe fn init(&mut self, allocator: &BumpAllocator) { fatal_panic!(from "event::DynamicConfig::init", when self.listeners.init(allocator), "This should never happen! Unable to initialize listener port id container."); diff --git a/iceoryx2/src/service/dynamic_config/mod.rs b/iceoryx2/src/service/dynamic_config/mod.rs index 576bd4051..42943b51a 100644 --- a/iceoryx2/src/service/dynamic_config/mod.rs +++ b/iceoryx2/src/service/dynamic_config/mod.rs @@ -92,12 +92,12 @@ impl DynamicConfig { Container::::memory_size(max_number_of_nodes) } - pub(crate) unsafe fn init(&self, allocator: &BumpAllocator) { + pub(crate) unsafe fn init(&mut self, allocator: &BumpAllocator) { fatal_panic!(from self, when self.nodes.init(allocator), "This should never happen! Unable to initialize NodeId container."); - match &self.messaging_pattern { - MessagingPattern::PublishSubscribe(ref v) => v.init(allocator), - MessagingPattern::Event(ref v) => v.init(allocator), + match &mut self.messaging_pattern { + MessagingPattern::PublishSubscribe(ref mut v) => v.init(allocator), + MessagingPattern::Event(ref mut v) => v.init(allocator), } } diff --git a/iceoryx2/src/service/dynamic_config/publish_subscribe.rs b/iceoryx2/src/service/dynamic_config/publish_subscribe.rs index 52db35567..9fae84e35 100644 --- a/iceoryx2/src/service/dynamic_config/publish_subscribe.rs +++ b/iceoryx2/src/service/dynamic_config/publish_subscribe.rs @@ -75,7 +75,7 @@ impl DynamicConfig { } } - pub(crate) unsafe fn init(&self, allocator: &BumpAllocator) { + pub(crate) unsafe fn init(&mut self, allocator: &BumpAllocator) { fatal_panic!(from self, when self.subscribers.init(allocator), "This should never happen! Unable to initialize subscriber port id container.");