diff --git a/comet_structs/Cargo.toml b/comet_structs/Cargo.toml new file mode 100644 index 0000000..16c508b --- /dev/null +++ b/comet_structs/Cargo.toml @@ -0,0 +1,6 @@ +[package] +name = "comet_structs" +version = "0.1.0" +edition = "2024" + +[dependencies] diff --git a/comet_structs/src/column.rs b/comet_structs/src/column.rs new file mode 100644 index 0000000..d465491 --- /dev/null +++ b/comet_structs/src/column.rs @@ -0,0 +1,304 @@ +use crate::{Component}; +use std::{ + alloc::{ + handle_alloc_error, + Layout + }, + any::TypeId, + collections::{ + HashMap, + HashSet + }, + hash::{ + DefaultHasher, + Hash, + Hasher + }, + mem::MaybeUninit, + ptr::NonNull +}; +use std::ptr; + +#[derive(Debug, Clone)] +pub struct BlobVec { + item_layout: Layout, + capacity: usize, + len: usize, + data: NonNull, + swap_scratch: NonNull, + drop: unsafe fn(*mut u8) +} + + +impl BlobVec { + pub fn new(item_layout: Layout, drop: unsafe fn(*mut u8), capacity: usize) -> Self { + if item_layout.size() == 0 { + BlobVec { + swap_scratch: NonNull::dangling(), + data: NonNull::dangling(), + capacity: usize:: MAX, + len: 0, + item_layout, + drop, + } + } + else { + let swap_scratch = NonNull::new(unsafe { std::alloc::alloc(item_layout) }) + .unwrap_or_else(|| handle_alloc_error(item_layout)); + + let mut blob_vec = BlobVec { + swap_scratch, + data: NonNull::dangling(), + capacity: 0, + len: 0, + item_layout, + drop, + }; + blob_vec.reserve_exact(capacity); + blob_vec + } + } + + pub fn reserve_exact(&mut self, additional: usize) { + let available_space = self.capacity - self.len; + if available_space < additional { + self.grow_exact(additional - available_space); + } + } + + fn grow_exact(&mut self, increment: usize) { + debug_assert!(self.item_layout.size() != 0); + + let new_capacity = self.capacity + increment; + let new_layout = + array_layout(&self.item_layout, new_capacity).expect("array layout should be valid"); + unsafe { + let new_data = if self.capacity == 0 { + std::alloc::alloc(new_layout) + } else { + std::alloc::realloc( + self.get_ptr().as_ptr(), + array_layout(&self.item_layout, self.capacity) + .expect("array layout should be valid"), + new_layout.size(), + ) + }; + + self.data = NonNull::new(new_data).unwrap_or_else(|| handle_alloc_error(new_layout)); + } + self.capacity = new_capacity; + } + + + #[inline] + pub fn len(&self) -> usize { + self.len + } + + #[inline] + pub fn is_empty(&self) -> bool { + self.len == 0 + } + + #[inline] + pub fn capacity(&self) -> usize { + self.capacity + } + + + #[inline] + pub unsafe fn get_ptr(&self) -> NonNull { + self.data + } + + #[inline] + pub unsafe fn push_uninit(&mut self) -> usize { + self.reserve_exact(1); + let index = self.len; + self.len += 1; + index + } + + #[inline] + pub unsafe fn get_unchecked(&self, index: usize) -> *mut u8 { + debug_assert!(index < self.len()); + self.get_ptr().as_ptr().add(index * self.item_layout.size()) + } + + #[inline] + pub unsafe fn get_unchecked_mut(&mut self, index: usize) -> *mut u8 { + debug_assert!(index < self.len()); + self.get_ptr().as_ptr().add(index * self.item_layout.size()) + } + + pub unsafe fn push_element(&mut self, element: T) { + let index = self.push_uninit(); + let ptr = self.get_unchecked(index) as *mut T; + ptr::write(ptr,element); + } + + pub fn clear(&mut self) { + let len = self.len; + // We set len to 0 _before_ dropping elements for unwind safety. This ensures we don't + // accidentally drop elements twice in the event of a drop impl panicking. + self.len = 0; + for i in 0..len { + unsafe { + // NOTE: this doesn't use self.get_unchecked(i) because the debug_assert on index + // will panic here due to self.len being set to 0 + let ptr = self.get_ptr().as_ptr().add(i * self.item_layout.size()); + (self.drop)(ptr); + } + } + } + + #[inline] + pub unsafe fn swap_remove_and_forget_unchecked(&mut self, index: usize) -> *mut u8 { + debug_assert!(index < self.len()); + let last = self.len - 1; + let swap_scratch = self.swap_scratch.as_ptr(); + ptr::copy_nonoverlapping( + self.get_unchecked(index), + swap_scratch, + self.item_layout.size(), + ); + ptr::copy( + self.get_unchecked(last), + self.get_unchecked(index), + self.item_layout.size(), + ); + self.len -= 1; + swap_scratch + } + + #[inline] + pub unsafe fn initialize_unchecked(&mut self, index: usize, value: *mut u8) { + debug_assert!(index < self.len()); + let ptr = self.get_unchecked(index); + ptr::copy_nonoverlapping(value, ptr, self.item_layout.size()); + } +} + +impl Drop for BlobVec { + fn drop(&mut self) { + self.clear(); + let array_layout = + array_layout(&self.item_layout, self.capacity).expect("array layout should be valid"); + if array_layout.size() > 0 { + unsafe { + std::alloc::dealloc(self.get_ptr().as_ptr(), array_layout); + std::alloc::dealloc(self.swap_scratch.as_ptr(), self.item_layout); + } + } + } +} + +unsafe impl Send for BlobVec {} +unsafe impl Sync for BlobVec {} + +fn array_layout(layout: &Layout, n: usize) -> Option { + let (array_layout, offset) = repeat_layout(layout, n)?; + debug_assert_eq!(layout.size(), offset); + Some(array_layout) +} + +fn repeat_layout(layout: &Layout, n: usize) -> Option<(Layout, usize)> { + let padded_size = layout.size() + padding_needed_for(layout, layout.align()); + let alloc_size = padded_size.checked_mul(n)?; + + unsafe { + Some(( + Layout::from_size_align_unchecked(alloc_size, layout.align()), + padded_size, + )) + } +} + +const fn padding_needed_for(layout: &Layout, align: usize) -> usize { + let len = layout.size(); + let len_rounded_up = len.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1); + len_rounded_up.wrapping_sub(len) +} + +#[derive(Debug, Clone)] +pub struct Column { + pub data: BlobVec +} + +impl Column { + pub fn new(capacity: usize) -> Self { + let layout = Layout::new::(); + let drop_fn = |ptr: *mut u8| unsafe { + ptr::drop_in_place(ptr as *mut T); + }; + Self { + data: BlobVec::new(layout, drop_fn, capacity), + } + } + + pub fn data(&self) -> BlobVec { + self.data.clone() + } + + pub fn push(&mut self, item: T) { + assert_eq!(TypeId::of::(), TypeId::of::(), "Type mismatch"); + unsafe { + let index = self.data.push_uninit(); + let ptr = self.data.get_unchecked(index); + ptr::write(ptr as *mut T, item); + } + } + + pub fn get(&self, index: usize) -> Option<&T> { + assert_eq!(TypeId::of::(), TypeId::of::(), "Type mismatch"); + if index >= self.data.len() { + return None; + } + unsafe { + let ptr = self.data.get_unchecked(index); + Some(&*(ptr as *const T)) + } + } + + pub fn get_mut(&mut self, index: usize) -> Option<&mut T> { + assert_eq!(TypeId::of::(), TypeId::of::(), "Type mismatch"); + + if index >= self.data.len() { + return None; + } + + // Access the element at the given index + unsafe { + let ptr = self.data.get_unchecked(index); + // Convert the pointer to a mutable reference and return it + Some(&mut *(ptr as *mut T)) + } + } + + pub fn remove(&mut self, index: usize) -> Option { + assert_eq!(TypeId::of::(), TypeId::of::(), "Type mismatch"); + if index >= self.data.len() { + return None; + } + unsafe { + let ptr = self.data.swap_remove_and_forget_unchecked(index); + Some(ptr::read(ptr as *const T)) + } + } + + fn swap(&mut self, index1: usize, index2: usize) { + assert!(index1 < self.data.len() && index2 < self.data.len(), "Index out of bounds"); + + unsafe { + let ptr1 = self.data.get_unchecked(index1); + let ptr2 = self.data.get_unchecked(index2); + + let mut temp = MaybeUninit::::uninit(); + + // Swap the elements at index1 and index2 + ptr::copy_nonoverlapping(ptr1, temp.as_mut_ptr(), self.data.item_layout.size()); + ptr::copy_nonoverlapping(ptr2, ptr1, self.data.item_layout.size()); + ptr::copy_nonoverlapping(temp.as_ptr(), ptr2, self.data.item_layout.size()); + } + } +} diff --git a/comet_structs/src/flat_map.rs b/comet_structs/src/flat_map.rs new file mode 100644 index 0000000..4b69170 --- /dev/null +++ b/comet_structs/src/flat_map.rs @@ -0,0 +1,64 @@ +pub struct MapNode { + key: K, + value: V +} + +impl MapNode { + pub fn new(key: K, value: V) -> Self { + Self { + key, + value + } + } + + pub fn key(&self) -> &K { + &self.key + } + + pub fn value(&self) -> &V { + &self.value + } +} + +pub struct FlatMap { + map: Vec +} + +impl FlatMap { + pub fn new() -> Self { + Self { + map: Vec::new() + } + } + + pub fn insert(key: K, value: V) { + let node = MapNode::new(key, value); + self.map.push(node); + } + + pub fn remove(key: K) { + for node in self.map { + if node.key() == key { + self.map.remove(node); + } + } + } + + pub fn get(key: K) -> Option<&V> { + for node in self.map { + if node.key() == key { + return Some(&node.value); + } + } + return None; + } + + pub fn get_mut(key: K) -> Option<&mut V> { + for node in self.map { + if node.key() == key { + return Some(&mut node.value); + } + } + return None; + } +} diff --git a/comet_structs/src/lib.rs b/comet_structs/src/lib.rs new file mode 100644 index 0000000..52f5f9c --- /dev/null +++ b/comet_structs/src/lib.rs @@ -0,0 +1,5 @@ +pub use column::Column; +pub use sparse_set::SparseSet; + +mod column; +mod sparse_set; diff --git a/comet_structs/src/sparse_set.rs b/comet_structs/src/sparse_set.rs new file mode 100644 index 0000000..0771134 --- /dev/null +++ b/comet_structs/src/sparse_set.rs @@ -0,0 +1,91 @@ +use crate::{Component}; +use std::{ + alloc::{ + handle_alloc_error, + Layout + }, + any::TypeId, + collections::{ + HashMap, + HashSet + }, + hash::{ + DefaultHasher, + Hash, + Hasher + }, + mem::MaybeUninit, + ptr::NonNull +}; +use std::ptr; + +#[derive(Debug, Clone)] +pub struct SparseSet { + sparse: Vec>, + dense: Column, + len: usize +} + +impl SparseSet { + pub fn new(capacity: usize) -> Self { + Self { + sparse: Vec::with_capacity(capacity), + dense: Column::new::(capacity), + len: 0 + } + } + + pub fn set(&mut self, index: usize, element: T) { + if index >= self.sparse.len() { + self.sparse.resize_with(index + 1, || None); + } + + if let Some(column_index) = self.sparse[index] { + // Explicitly drop the existing component before replacing it + unsafe { + let existing_ptr = self.dense.data.get_unchecked_mut(column_index) as *mut T; + ptr::drop_in_place(existing_ptr); + ptr::write(existing_ptr, element); + } + } else { + let column_index = unsafe { self.dense.data.push_uninit() }; + unsafe { + self.dense.data.initialize_unchecked(column_index, &element as *const T as *mut u8); + } + self.sparse[index] = Some(column_index); + self.len += 1; + } + } + + pub fn remove(&mut self, index: usize) -> Option { + if index >= self.sparse.len() || self.sparse[index] == None { + return None; + } + + let column_index = self.sparse[index]; + let element = unsafe { + self.dense.data.swap_remove_and_forget_unchecked(column_index.unwrap()) + }; + + self.sparse[index] = None; + self.len -= 1; + + Some(unsafe { ptr::read(element as *const T) }) + } + + pub fn get(&self, index: usize) -> Option<&T> { + if index >= self.sparse.len() || self.sparse[index] == None { + return None; + } + + self.dense.get::(index) + } + + pub fn get_mut(&mut self, index: usize) -> Option<&mut T> { + if index >= self.sparse.len() || self.sparse[index] == None { + return None; + } + + self.dense.get_mut::(index) + } +}