mirror of
https://github.com/lisk77/comet.git
synced 2025-10-23 21:38:50 +00:00
feat(ecs): limited the creation of archetypes to three unique components to lower the creation to polynomial complexity
This commit is contained in:
parent
0da5200916
commit
d04c706a94
5 changed files with 409 additions and 421 deletions
|
@ -1,7 +1,5 @@
|
|||
use comet_colors::{Color as ColorTrait, LinearRgba};
|
||||
use comet_ecs::{
|
||||
Camera2D, Color, Component, Entity, Render2D, Scene, Text, Transform2D, Transform3D,
|
||||
};
|
||||
use comet_ecs::{Camera2D, Component, Entity, Render2D, Scene, Text, Transform2D, Transform3D};
|
||||
use comet_input::keyboard::Key;
|
||||
use comet_log::*;
|
||||
use comet_renderer::renderer::Renderer;
|
||||
|
|
|
@ -215,16 +215,14 @@ impl Scene {
|
|||
self.create_archetype(new_component_set.clone());
|
||||
}
|
||||
|
||||
let powerset = ComponentSet::powerset(new_component_set.to_vec());
|
||||
let subsets = ComponentSet::compute_subsets_up_to_size_3(new_component_set.to_vec());
|
||||
|
||||
for subset in powerset {
|
||||
let component_set = ComponentSet::from_ids(subset.iter().cloned().collect());
|
||||
|
||||
if !self.archetypes.contains_archetype(&component_set) {
|
||||
self.create_archetype(component_set.clone());
|
||||
for subset in subsets {
|
||||
if !self.archetypes.contains_archetype(&subset) {
|
||||
self.create_archetype(subset.clone());
|
||||
}
|
||||
|
||||
self.add_entity_to_archetype(entity_id as u32, component_set);
|
||||
self.add_entity_to_archetype(entity_id as u32, subset);
|
||||
}
|
||||
|
||||
info!(
|
||||
|
@ -256,16 +254,14 @@ impl Scene {
|
|||
self.create_archetype(new_component_set.clone());
|
||||
}
|
||||
|
||||
let powerset = ComponentSet::powerset(new_component_set.to_vec());
|
||||
let subsets = ComponentSet::compute_subsets_up_to_size_3(new_component_set.to_vec());
|
||||
|
||||
for subset in powerset {
|
||||
let component_set = ComponentSet::from_ids(subset.iter().cloned().collect());
|
||||
|
||||
if !self.archetypes.contains_archetype(&component_set) {
|
||||
self.create_archetype(component_set.clone());
|
||||
for subset in subsets {
|
||||
if !self.archetypes.contains_archetype(&subset) {
|
||||
self.create_archetype(subset.clone());
|
||||
}
|
||||
|
||||
self.add_entity_to_archetype(entity_id as u32, component_set);
|
||||
self.add_entity_to_archetype(entity_id as u32, subset);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -295,6 +291,10 @@ impl Scene {
|
|||
/// Returns a list of entities that have the given components.
|
||||
pub fn get_entities_with(&self, components: Vec<TypeId>) -> Vec<usize> {
|
||||
let component_set = ComponentSet::from_ids(components);
|
||||
if component_set.size() > 3 {
|
||||
error!("An entity query should only contain at most 3 different components!");
|
||||
return Vec::new();
|
||||
}
|
||||
if self.archetypes.contains_archetype(&component_set) {
|
||||
return self
|
||||
.archetypes
|
||||
|
@ -316,7 +316,7 @@ impl Scene {
|
|||
}
|
||||
}
|
||||
|
||||
/// Iterates over all entities that have the given components and calls the given function.
|
||||
/// Iterates over all entities that have the two given components and calls the given function.
|
||||
pub fn foreach<C: Component, K: Component>(&mut self, func: fn(&mut C, &mut K)) {
|
||||
let entities = self.get_entities_with(vec![C::type_id(), K::type_id()]);
|
||||
for entity in entities {
|
||||
|
|
|
@ -1,299 +1,293 @@
|
|||
use std::{
|
||||
alloc::{
|
||||
handle_alloc_error,
|
||||
Layout
|
||||
},
|
||||
any::TypeId,
|
||||
hash::{
|
||||
Hash,
|
||||
Hasher
|
||||
},
|
||||
mem::MaybeUninit,
|
||||
ptr::NonNull
|
||||
};
|
||||
use std::ptr;
|
||||
use std::{
|
||||
alloc::{handle_alloc_error, Layout},
|
||||
any::TypeId,
|
||||
mem::MaybeUninit,
|
||||
ptr::NonNull,
|
||||
};
|
||||
|
||||
// The following two structs are just blatantly stolen from Bevy - another Rust game engine.
|
||||
// I just need them for the ComponentStorage system, and I was too lazy to write them myself.
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct BlobVec {
|
||||
item_layout: Layout,
|
||||
capacity: usize,
|
||||
len: usize,
|
||||
data: NonNull<u8>,
|
||||
swap_scratch: NonNull<u8>,
|
||||
drop: unsafe fn(*mut u8)
|
||||
item_layout: Layout,
|
||||
capacity: usize,
|
||||
len: usize,
|
||||
data: NonNull<u8>,
|
||||
swap_scratch: NonNull<u8>,
|
||||
drop: unsafe fn(*mut u8),
|
||||
}
|
||||
|
||||
impl BlobVec {
|
||||
pub fn new(item_layout: Layout, drop: unsafe fn(*mut u8), capacity: usize) -> Self {
|
||||
if item_layout.size() == 0 {
|
||||
BlobVec {
|
||||
swap_scratch: NonNull::dangling(),
|
||||
data: NonNull::dangling(),
|
||||
capacity: usize:: MAX,
|
||||
len: 0,
|
||||
item_layout,
|
||||
drop,
|
||||
}
|
||||
}
|
||||
else {
|
||||
let swap_scratch = NonNull::new(unsafe { std::alloc::alloc(item_layout) })
|
||||
.unwrap_or_else(|| handle_alloc_error(item_layout));
|
||||
pub fn new(item_layout: Layout, drop: unsafe fn(*mut u8), capacity: usize) -> Self {
|
||||
if item_layout.size() == 0 {
|
||||
BlobVec {
|
||||
swap_scratch: NonNull::dangling(),
|
||||
data: NonNull::dangling(),
|
||||
capacity: usize::MAX,
|
||||
len: 0,
|
||||
item_layout,
|
||||
drop,
|
||||
}
|
||||
} else {
|
||||
let swap_scratch = NonNull::new(unsafe { std::alloc::alloc(item_layout) })
|
||||
.unwrap_or_else(|| handle_alloc_error(item_layout));
|
||||
|
||||
let mut blob_vec = BlobVec {
|
||||
swap_scratch,
|
||||
data: NonNull::dangling(),
|
||||
capacity: 0,
|
||||
len: 0,
|
||||
item_layout,
|
||||
drop,
|
||||
};
|
||||
blob_vec.reserve_exact(capacity);
|
||||
blob_vec
|
||||
}
|
||||
}
|
||||
let mut blob_vec = BlobVec {
|
||||
swap_scratch,
|
||||
data: NonNull::dangling(),
|
||||
capacity: 0,
|
||||
len: 0,
|
||||
item_layout,
|
||||
drop,
|
||||
};
|
||||
blob_vec.reserve_exact(capacity);
|
||||
blob_vec
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reserve_exact(&mut self, additional: usize) {
|
||||
let available_space = self.capacity - self.len;
|
||||
if available_space < additional {
|
||||
self.grow_exact(additional - available_space);
|
||||
}
|
||||
}
|
||||
pub fn reserve_exact(&mut self, additional: usize) {
|
||||
let available_space = self.capacity - self.len;
|
||||
if available_space < additional {
|
||||
self.grow_exact(additional - available_space);
|
||||
}
|
||||
}
|
||||
|
||||
fn grow_exact(&mut self, increment: usize) {
|
||||
debug_assert!(self.item_layout.size() != 0);
|
||||
fn grow_exact(&mut self, increment: usize) {
|
||||
debug_assert!(self.item_layout.size() != 0);
|
||||
|
||||
let new_capacity = self.capacity + increment;
|
||||
let new_layout =
|
||||
array_layout(&self.item_layout, new_capacity).expect("array layout should be valid");
|
||||
unsafe {
|
||||
let new_data = if self.capacity == 0 {
|
||||
std::alloc::alloc(new_layout)
|
||||
} else {
|
||||
std::alloc::realloc(
|
||||
self.get_ptr().as_ptr(),
|
||||
array_layout(&self.item_layout, self.capacity)
|
||||
.expect("array layout should be valid"),
|
||||
new_layout.size(),
|
||||
)
|
||||
};
|
||||
let new_capacity = self.capacity + increment;
|
||||
let new_layout =
|
||||
array_layout(&self.item_layout, new_capacity).expect("array layout should be valid");
|
||||
unsafe {
|
||||
let new_data = if self.capacity == 0 {
|
||||
std::alloc::alloc(new_layout)
|
||||
} else {
|
||||
std::alloc::realloc(
|
||||
self.get_ptr().as_ptr(),
|
||||
array_layout(&self.item_layout, self.capacity)
|
||||
.expect("array layout should be valid"),
|
||||
new_layout.size(),
|
||||
)
|
||||
};
|
||||
|
||||
self.data = NonNull::new(new_data).unwrap_or_else(|| handle_alloc_error(new_layout));
|
||||
}
|
||||
self.capacity = new_capacity;
|
||||
}
|
||||
self.data = NonNull::new(new_data).unwrap_or_else(|| handle_alloc_error(new_layout));
|
||||
}
|
||||
self.capacity = new_capacity;
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize {
|
||||
self.len
|
||||
}
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize {
|
||||
self.len
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len == 0
|
||||
}
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len == 0
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn capacity(&self) -> usize {
|
||||
self.capacity
|
||||
}
|
||||
#[inline]
|
||||
pub fn capacity(&self) -> usize {
|
||||
self.capacity
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub unsafe fn get_ptr(&self) -> NonNull<u8> {
|
||||
self.data
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub unsafe fn get_ptr(&self) -> NonNull<u8> {
|
||||
self.data
|
||||
}
|
||||
#[inline]
|
||||
pub unsafe fn push_uninit(&mut self) -> usize {
|
||||
self.reserve_exact(1);
|
||||
let index = self.len;
|
||||
self.len += 1;
|
||||
index
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub unsafe fn push_uninit(&mut self) -> usize {
|
||||
self.reserve_exact(1);
|
||||
let index = self.len;
|
||||
self.len += 1;
|
||||
index
|
||||
}
|
||||
#[inline]
|
||||
pub unsafe fn get_unchecked(&self, index: usize) -> *mut u8 {
|
||||
debug_assert!(index < self.len());
|
||||
self.get_ptr().as_ptr().add(index * self.item_layout.size())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub unsafe fn get_unchecked(&self, index: usize) -> *mut u8 {
|
||||
debug_assert!(index < self.len());
|
||||
self.get_ptr().as_ptr().add(index * self.item_layout.size())
|
||||
}
|
||||
#[inline]
|
||||
pub unsafe fn get_unchecked_mut(&mut self, index: usize) -> *mut u8 {
|
||||
debug_assert!(index < self.len());
|
||||
self.get_ptr().as_ptr().add(index * self.item_layout.size())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub unsafe fn get_unchecked_mut(&mut self, index: usize) -> *mut u8 {
|
||||
debug_assert!(index < self.len());
|
||||
self.get_ptr().as_ptr().add(index * self.item_layout.size())
|
||||
}
|
||||
pub unsafe fn push_element<T>(&mut self, element: T) {
|
||||
let index = self.push_uninit();
|
||||
let ptr = self.get_unchecked(index) as *mut T;
|
||||
ptr::write(ptr, element);
|
||||
}
|
||||
|
||||
pub unsafe fn push_element<T>(&mut self, element: T) {
|
||||
let index = self.push_uninit();
|
||||
let ptr = self.get_unchecked(index) as *mut T;
|
||||
ptr::write(ptr,element);
|
||||
}
|
||||
pub fn clear(&mut self) {
|
||||
let len = self.len;
|
||||
// We set len to 0 _before_ dropping elements for unwind safety. This ensures we don't
|
||||
// accidentally drop elements twice in the event of a drop impl panicking.
|
||||
self.len = 0;
|
||||
for i in 0..len {
|
||||
unsafe {
|
||||
// NOTE: this doesn't use self.get_unchecked(i) because the debug_assert on index
|
||||
// will panic here due to self.len being set to 0
|
||||
let ptr = self.get_ptr().as_ptr().add(i * self.item_layout.size());
|
||||
(self.drop)(ptr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) {
|
||||
let len = self.len;
|
||||
// We set len to 0 _before_ dropping elements for unwind safety. This ensures we don't
|
||||
// accidentally drop elements twice in the event of a drop impl panicking.
|
||||
self.len = 0;
|
||||
for i in 0..len {
|
||||
unsafe {
|
||||
// NOTE: this doesn't use self.get_unchecked(i) because the debug_assert on index
|
||||
// will panic here due to self.len being set to 0
|
||||
let ptr = self.get_ptr().as_ptr().add(i * self.item_layout.size());
|
||||
(self.drop)(ptr);
|
||||
}
|
||||
}
|
||||
}
|
||||
#[inline]
|
||||
pub unsafe fn swap_remove_and_forget_unchecked(&mut self, index: usize) -> *mut u8 {
|
||||
debug_assert!(index < self.len());
|
||||
let last = self.len - 1;
|
||||
let swap_scratch = self.swap_scratch.as_ptr();
|
||||
ptr::copy_nonoverlapping(
|
||||
self.get_unchecked(index),
|
||||
swap_scratch,
|
||||
self.item_layout.size(),
|
||||
);
|
||||
ptr::copy(
|
||||
self.get_unchecked(last),
|
||||
self.get_unchecked(index),
|
||||
self.item_layout.size(),
|
||||
);
|
||||
self.len -= 1;
|
||||
swap_scratch
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub unsafe fn swap_remove_and_forget_unchecked(&mut self, index: usize) -> *mut u8 {
|
||||
debug_assert!(index < self.len());
|
||||
let last = self.len - 1;
|
||||
let swap_scratch = self.swap_scratch.as_ptr();
|
||||
ptr::copy_nonoverlapping(
|
||||
self.get_unchecked(index),
|
||||
swap_scratch,
|
||||
self.item_layout.size(),
|
||||
);
|
||||
ptr::copy(
|
||||
self.get_unchecked(last),
|
||||
self.get_unchecked(index),
|
||||
self.item_layout.size(),
|
||||
);
|
||||
self.len -= 1;
|
||||
swap_scratch
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub unsafe fn initialize_unchecked(&mut self, index: usize, value: *mut u8) {
|
||||
debug_assert!(index < self.len());
|
||||
let ptr = self.get_unchecked(index);
|
||||
ptr::copy_nonoverlapping(value, ptr, self.item_layout.size());
|
||||
}
|
||||
#[inline]
|
||||
pub unsafe fn initialize_unchecked(&mut self, index: usize, value: *mut u8) {
|
||||
debug_assert!(index < self.len());
|
||||
let ptr = self.get_unchecked(index);
|
||||
ptr::copy_nonoverlapping(value, ptr, self.item_layout.size());
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for BlobVec {
|
||||
fn drop(&mut self) {
|
||||
self.clear();
|
||||
let array_layout =
|
||||
array_layout(&self.item_layout, self.capacity).expect("array layout should be valid");
|
||||
if array_layout.size() > 0 {
|
||||
unsafe {
|
||||
std::alloc::dealloc(self.get_ptr().as_ptr(), array_layout);
|
||||
std::alloc::dealloc(self.swap_scratch.as_ptr(), self.item_layout);
|
||||
}
|
||||
}
|
||||
}
|
||||
fn drop(&mut self) {
|
||||
self.clear();
|
||||
let array_layout =
|
||||
array_layout(&self.item_layout, self.capacity).expect("array layout should be valid");
|
||||
if array_layout.size() > 0 {
|
||||
unsafe {
|
||||
std::alloc::dealloc(self.get_ptr().as_ptr(), array_layout);
|
||||
std::alloc::dealloc(self.swap_scratch.as_ptr(), self.item_layout);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl Send for BlobVec {}
|
||||
unsafe impl Sync for BlobVec {}
|
||||
|
||||
fn array_layout(layout: &Layout, n: usize) -> Option<Layout> {
|
||||
let (array_layout, offset) = repeat_layout(layout, n)?;
|
||||
debug_assert_eq!(layout.size(), offset);
|
||||
Some(array_layout)
|
||||
let (array_layout, offset) = repeat_layout(layout, n)?;
|
||||
debug_assert_eq!(layout.size(), offset);
|
||||
Some(array_layout)
|
||||
}
|
||||
|
||||
fn repeat_layout(layout: &Layout, n: usize) -> Option<(Layout, usize)> {
|
||||
let padded_size = layout.size() + padding_needed_for(layout, layout.align());
|
||||
let alloc_size = padded_size.checked_mul(n)?;
|
||||
let padded_size = layout.size() + padding_needed_for(layout, layout.align());
|
||||
let alloc_size = padded_size.checked_mul(n)?;
|
||||
|
||||
unsafe {
|
||||
Some((
|
||||
Layout::from_size_align_unchecked(alloc_size, layout.align()),
|
||||
padded_size,
|
||||
))
|
||||
}
|
||||
unsafe {
|
||||
Some((
|
||||
Layout::from_size_align_unchecked(alloc_size, layout.align()),
|
||||
padded_size,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
const fn padding_needed_for(layout: &Layout, align: usize) -> usize {
|
||||
let len = layout.size();
|
||||
let len_rounded_up = len.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1);
|
||||
len_rounded_up.wrapping_sub(len)
|
||||
let len = layout.size();
|
||||
let len_rounded_up = len.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1);
|
||||
len_rounded_up.wrapping_sub(len)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Column {
|
||||
pub data: BlobVec
|
||||
pub data: BlobVec,
|
||||
}
|
||||
|
||||
impl Column {
|
||||
pub fn new<T: 'static>(capacity: usize) -> Self {
|
||||
let layout = Layout::new::<T>();
|
||||
let drop_fn = |ptr: *mut u8| unsafe {
|
||||
ptr::drop_in_place(ptr as *mut T);
|
||||
};
|
||||
Self {
|
||||
data: BlobVec::new(layout, drop_fn, capacity),
|
||||
}
|
||||
}
|
||||
pub fn new<T: 'static>(capacity: usize) -> Self {
|
||||
let layout = Layout::new::<T>();
|
||||
let drop_fn = |ptr: *mut u8| unsafe {
|
||||
ptr::drop_in_place(ptr as *mut T);
|
||||
};
|
||||
Self {
|
||||
data: BlobVec::new(layout, drop_fn, capacity),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn data(&self) -> BlobVec {
|
||||
self.data.clone()
|
||||
}
|
||||
pub fn data(&self) -> BlobVec {
|
||||
self.data.clone()
|
||||
}
|
||||
|
||||
pub fn push<T: 'static>(&mut self, item: T) {
|
||||
assert_eq!(TypeId::of::<T>(), TypeId::of::<T>(), "Type mismatch");
|
||||
unsafe {
|
||||
let index = self.data.push_uninit();
|
||||
let ptr = self.data.get_unchecked(index);
|
||||
ptr::write(ptr as *mut T, item);
|
||||
}
|
||||
}
|
||||
pub fn push<T: 'static>(&mut self, item: T) {
|
||||
assert_eq!(TypeId::of::<T>(), TypeId::of::<T>(), "Type mismatch");
|
||||
unsafe {
|
||||
let index = self.data.push_uninit();
|
||||
let ptr = self.data.get_unchecked(index);
|
||||
ptr::write(ptr as *mut T, item);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get<T: 'static>(&self, index: usize) -> Option<&T> {
|
||||
assert_eq!(TypeId::of::<T>(), TypeId::of::<T>(), "Type mismatch");
|
||||
if index >= self.data.len() {
|
||||
return None;
|
||||
}
|
||||
unsafe {
|
||||
let ptr = self.data.get_unchecked(index);
|
||||
Some(&*(ptr as *const T))
|
||||
}
|
||||
}
|
||||
pub fn get<T: 'static>(&self, index: usize) -> Option<&T> {
|
||||
assert_eq!(TypeId::of::<T>(), TypeId::of::<T>(), "Type mismatch");
|
||||
if index >= self.data.len() {
|
||||
return None;
|
||||
}
|
||||
unsafe {
|
||||
let ptr = self.data.get_unchecked(index);
|
||||
Some(&*(ptr as *const T))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_mut<T: 'static>(&mut self, index: usize) -> Option<&mut T> {
|
||||
assert_eq!(TypeId::of::<T>(), TypeId::of::<T>(), "Type mismatch");
|
||||
pub fn get_mut<T: 'static>(&mut self, index: usize) -> Option<&mut T> {
|
||||
assert_eq!(TypeId::of::<T>(), TypeId::of::<T>(), "Type mismatch");
|
||||
|
||||
if index >= self.data.len() {
|
||||
return None;
|
||||
}
|
||||
if index >= self.data.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Access the element at the given index
|
||||
unsafe {
|
||||
let ptr = self.data.get_unchecked(index);
|
||||
// Convert the pointer to a mutable reference and return it
|
||||
Some(&mut *(ptr as *mut T))
|
||||
}
|
||||
}
|
||||
// Access the element at the given index
|
||||
unsafe {
|
||||
let ptr = self.data.get_unchecked(index);
|
||||
// Convert the pointer to a mutable reference and return it
|
||||
Some(&mut *(ptr as *mut T))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn remove<T: 'static>(&mut self, index: usize) -> Option<T> {
|
||||
assert_eq!(TypeId::of::<T>(), TypeId::of::<T>(), "Type mismatch");
|
||||
if index >= self.data.len() {
|
||||
return None;
|
||||
}
|
||||
unsafe {
|
||||
let ptr = self.data.swap_remove_and_forget_unchecked(index);
|
||||
Some(ptr::read(ptr as *const T))
|
||||
}
|
||||
}
|
||||
pub fn remove<T: 'static>(&mut self, index: usize) -> Option<T> {
|
||||
assert_eq!(TypeId::of::<T>(), TypeId::of::<T>(), "Type mismatch");
|
||||
if index >= self.data.len() {
|
||||
return None;
|
||||
}
|
||||
unsafe {
|
||||
let ptr = self.data.swap_remove_and_forget_unchecked(index);
|
||||
Some(ptr::read(ptr as *const T))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn swap(&mut self, index1: usize, index2: usize) {
|
||||
assert!(index1 < self.data.len() && index2 < self.data.len(), "Index out of bounds");
|
||||
pub fn swap(&mut self, index1: usize, index2: usize) {
|
||||
assert!(
|
||||
index1 < self.data.len() && index2 < self.data.len(),
|
||||
"Index out of bounds"
|
||||
);
|
||||
|
||||
unsafe {
|
||||
let ptr1 = self.data.get_unchecked(index1);
|
||||
let ptr2 = self.data.get_unchecked(index2);
|
||||
unsafe {
|
||||
let ptr1 = self.data.get_unchecked(index1);
|
||||
let ptr2 = self.data.get_unchecked(index2);
|
||||
|
||||
let mut temp = MaybeUninit::<u8>::uninit();
|
||||
let mut temp = MaybeUninit::<u8>::uninit();
|
||||
|
||||
// Swap the elements at index1 and index2
|
||||
ptr::copy_nonoverlapping(ptr1, temp.as_mut_ptr(), self.data.item_layout.size());
|
||||
ptr::copy_nonoverlapping(ptr2, ptr1, self.data.item_layout.size());
|
||||
ptr::copy_nonoverlapping(temp.as_ptr(), ptr2, self.data.item_layout.size());
|
||||
}
|
||||
}
|
||||
// Swap the elements at index1 and index2
|
||||
ptr::copy_nonoverlapping(ptr1, temp.as_mut_ptr(), self.data.item_layout.size());
|
||||
ptr::copy_nonoverlapping(ptr2, ptr1, self.data.item_layout.size());
|
||||
ptr::copy_nonoverlapping(temp.as_ptr(), ptr2, self.data.item_layout.size());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,53 +4,82 @@ use std::hash::{Hash, Hasher};
|
|||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ComponentSet {
|
||||
set: HashSet<TypeId>
|
||||
set: HashSet<TypeId>,
|
||||
}
|
||||
|
||||
impl ComponentSet {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
set: HashSet::new()
|
||||
}
|
||||
}
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
set: HashSet::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_ids(ids: Vec<TypeId>) -> Self {
|
||||
Self {
|
||||
set: ids.into_iter().collect()
|
||||
}
|
||||
}
|
||||
pub fn from_ids(ids: Vec<TypeId>) -> Self {
|
||||
Self {
|
||||
set: ids.into_iter().collect(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn powerset(ids: Vec<TypeId>) -> Vec<HashSet<TypeId>> {
|
||||
let n = ids.len();
|
||||
let mut subsets: Vec<HashSet<TypeId>> = Vec::with_capacity(1 << n);
|
||||
|
||||
for mask in 0..(1 << n) {
|
||||
let mut subset = HashSet::new();
|
||||
for i in 0..n {
|
||||
if (mask & (1 << i)) != 0 {
|
||||
subset.insert(ids[i].clone());
|
||||
}
|
||||
}
|
||||
subsets.push(subset);
|
||||
}
|
||||
subsets.remove(0);
|
||||
pub fn compute_subsets_up_to_size_3(ids: Vec<TypeId>) -> Vec<ComponentSet> {
|
||||
let mut result = Vec::new();
|
||||
let n = ids.len();
|
||||
|
||||
subsets
|
||||
}
|
||||
for i in 0..n {
|
||||
result.push(ComponentSet::from_ids(vec![ids[i]]));
|
||||
}
|
||||
|
||||
pub fn is_subset(&self, other: &ComponentSet) -> bool {
|
||||
self.set.is_subset(&other.set)
|
||||
}
|
||||
for i in 0..n {
|
||||
for j in (i + 1)..n {
|
||||
result.push(ComponentSet::from_ids(vec![ids[i], ids[j]]));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_vec(&self) -> Vec<TypeId> {
|
||||
self.set.iter().cloned().collect()
|
||||
}
|
||||
for i in 0..n {
|
||||
for j in (i + 1)..n {
|
||||
for k in (j + 1)..n {
|
||||
result.push(ComponentSet::from_ids(vec![ids[i], ids[j], ids[k]]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub fn powerset(ids: Vec<TypeId>) -> Vec<HashSet<TypeId>> {
|
||||
let n = ids.len();
|
||||
let mut subsets: Vec<HashSet<TypeId>> = Vec::with_capacity(1 << n);
|
||||
|
||||
for mask in 0..(1 << n) {
|
||||
let mut subset = HashSet::new();
|
||||
for i in 0..n {
|
||||
if (mask & (1 << i)) != 0 {
|
||||
subset.insert(ids[i].clone());
|
||||
}
|
||||
}
|
||||
subsets.push(subset);
|
||||
}
|
||||
subsets.remove(0);
|
||||
|
||||
subsets
|
||||
}
|
||||
|
||||
pub fn is_subset(&self, other: &ComponentSet) -> bool {
|
||||
self.set.is_subset(&other.set)
|
||||
}
|
||||
|
||||
pub fn to_vec(&self) -> Vec<TypeId> {
|
||||
self.set.iter().cloned().collect()
|
||||
}
|
||||
|
||||
pub fn size(&self) -> usize {
|
||||
self.set.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for ComponentSet {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
let mut types: Vec<TypeId> = self.set.iter().cloned().collect();
|
||||
types.sort();
|
||||
types.hash(state);
|
||||
}
|
||||
}
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
let mut types: Vec<TypeId> = self.set.iter().cloned().collect();
|
||||
types.sort();
|
||||
types.hash(state);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,142 +1,109 @@
|
|||
use crate::Column;
|
||||
use std::hash::{
|
||||
Hash,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SparseSet {
|
||||
sparse: Vec<Option<Vec<Option<usize>>>>,
|
||||
dense: Column,
|
||||
page_size: usize
|
||||
sparse: Vec<Option<Vec<Option<usize>>>>,
|
||||
dense: Column,
|
||||
page_size: usize,
|
||||
}
|
||||
|
||||
impl SparseSet {
|
||||
pub fn new<T: 'static>(capacity: usize, page_size: usize) -> Self {
|
||||
Self {
|
||||
sparse: Vec::new(),
|
||||
dense: Column::new::<T>(capacity),
|
||||
page_size
|
||||
}
|
||||
}
|
||||
pub fn new<T: 'static>(capacity: usize, page_size: usize) -> Self {
|
||||
Self {
|
||||
sparse: Vec::new(),
|
||||
dense: Column::new::<T>(capacity),
|
||||
page_size,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert<T: 'static>(&mut self, index: usize, value: T) {
|
||||
let page = index / self.page_size;
|
||||
pub fn insert<T: 'static>(&mut self, index: usize, value: T) {
|
||||
let page = index / self.page_size;
|
||||
|
||||
if page >= self.sparse.len() {
|
||||
self.sparse.resize(page + 1, None);
|
||||
}
|
||||
if page >= self.sparse.len() {
|
||||
self.sparse.resize(page + 1, None);
|
||||
}
|
||||
|
||||
if self.sparse[page].is_none() {
|
||||
self.sparse[page] = Some(vec![None; self.page_size]);
|
||||
}
|
||||
if self.sparse[page].is_none() {
|
||||
self.sparse[page] = Some(vec![None; self.page_size]);
|
||||
}
|
||||
|
||||
if let Some(page_vec) = &mut self.sparse[page] {
|
||||
page_vec[index % self.page_size] = Some(self.dense.data.len());
|
||||
}
|
||||
if let Some(page_vec) = &mut self.sparse[page] {
|
||||
page_vec[index % self.page_size] = Some(self.dense.data.len());
|
||||
}
|
||||
|
||||
self.dense.push(value);
|
||||
}
|
||||
self.dense.push(value);
|
||||
}
|
||||
|
||||
pub fn remove<T: 'static>(&mut self, index: usize) -> Option<T> {
|
||||
if let Some(page_vec) = self.sparse.get(index / self.page_size).and_then(|x| x.as_ref()) {
|
||||
if let Some(sparse_index) = page_vec.get(index % self.page_size).and_then(|x| x.as_ref()) {
|
||||
let dense_index = *sparse_index;
|
||||
let last_index = self.dense.data.len() - 1;
|
||||
if dense_index != last_index {
|
||||
self.dense.swap(dense_index, last_index);
|
||||
if let Some(page_vec) = self.sparse.get_mut(last_index / self.page_size).and_then(|x| x.as_mut()) {
|
||||
page_vec[last_index % self.page_size] = Some(dense_index);
|
||||
}
|
||||
}
|
||||
if let Some(page_vec) = self.sparse.get_mut(index / self.page_size).and_then(|x| x.as_mut()) {
|
||||
page_vec[index % self.page_size] = None;
|
||||
}
|
||||
return self.dense.remove::<T>(last_index);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
pub fn remove<T: 'static>(&mut self, index: usize) -> Option<T> {
|
||||
if let Some(page_vec) = self
|
||||
.sparse
|
||||
.get(index / self.page_size)
|
||||
.and_then(|x| x.as_ref())
|
||||
{
|
||||
if let Some(sparse_index) = page_vec
|
||||
.get(index % self.page_size)
|
||||
.and_then(|x| x.as_ref())
|
||||
{
|
||||
let dense_index = *sparse_index;
|
||||
let last_index = self.dense.data.len() - 1;
|
||||
if dense_index != last_index {
|
||||
self.dense.swap(dense_index, last_index);
|
||||
if let Some(page_vec) = self
|
||||
.sparse
|
||||
.get_mut(last_index / self.page_size)
|
||||
.and_then(|x| x.as_mut())
|
||||
{
|
||||
page_vec[last_index % self.page_size] = Some(dense_index);
|
||||
}
|
||||
}
|
||||
if let Some(page_vec) = self
|
||||
.sparse
|
||||
.get_mut(index / self.page_size)
|
||||
.and_then(|x| x.as_mut())
|
||||
{
|
||||
page_vec[index % self.page_size] = None;
|
||||
}
|
||||
return self.dense.remove::<T>(last_index);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn get<T: 'static>(&self, index: usize) -> Option<&T> {
|
||||
if let Some(page_vec) = self.sparse.get(index / self.page_size).and_then(|x| x.as_ref()) {
|
||||
if let Some(sparse_index) = page_vec.get(index % self.page_size).and_then(|x| x.as_ref()) {
|
||||
self.dense.get::<T>(*sparse_index)
|
||||
}
|
||||
else {
|
||||
None
|
||||
}
|
||||
}
|
||||
else {
|
||||
None
|
||||
}
|
||||
}
|
||||
pub fn get<T: 'static>(&self, index: usize) -> Option<&T> {
|
||||
if let Some(page_vec) = self
|
||||
.sparse
|
||||
.get(index / self.page_size)
|
||||
.and_then(|x| x.as_ref())
|
||||
{
|
||||
if let Some(sparse_index) = page_vec
|
||||
.get(index % self.page_size)
|
||||
.and_then(|x| x.as_ref())
|
||||
{
|
||||
self.dense.get::<T>(*sparse_index)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_mut<T: 'static>(&mut self, index: usize) -> Option<&mut T> {
|
||||
if let Some(page_vec) = self.sparse.get(index / self.page_size).and_then(|x| x.as_ref()) {
|
||||
if let Some(sparse_index) = page_vec.get(index % self.page_size).and_then(|x| x.as_ref()) {
|
||||
self.dense.get_mut::<T>(*sparse_index)
|
||||
}
|
||||
else {
|
||||
None
|
||||
}
|
||||
}
|
||||
else {
|
||||
None
|
||||
}
|
||||
}
|
||||
pub fn get_mut<T: 'static>(&mut self, index: usize) -> Option<&mut T> {
|
||||
if let Some(page_vec) = self
|
||||
.sparse
|
||||
.get(index / self.page_size)
|
||||
.and_then(|x| x.as_ref())
|
||||
{
|
||||
if let Some(sparse_index) = page_vec
|
||||
.get(index % self.page_size)
|
||||
.and_then(|x| x.as_ref())
|
||||
{
|
||||
self.dense.get_mut::<T>(*sparse_index)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*#[derive(Debug, Clone)]
|
||||
pub struct SparseSet {
|
||||
sparse: Vec<Option<usize>>,
|
||||
dense: Column,
|
||||
}
|
||||
|
||||
impl SparseSet {
|
||||
pub fn new<T: 'static>(capacity: usize) -> Self {
|
||||
Self {
|
||||
sparse: Vec::new(),
|
||||
dense: Column::new::<T>(capacity),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert<T: 'static>(&mut self, index: usize, value: T) {
|
||||
if index >= self.sparse.len() {
|
||||
self.sparse.resize(index + 1, None);
|
||||
}
|
||||
self.sparse[index] = Some(self.dense.data.len());
|
||||
self.dense.push(value);
|
||||
}
|
||||
|
||||
pub fn remove<T: 'static>(&mut self, index: usize) -> Option<T>{
|
||||
if let Some(sparse_index) = self.sparse.get(index).and_then(|x| x.as_ref()) {
|
||||
let dense_index = *sparse_index;
|
||||
let last_index = self.dense.data.len() - 1;
|
||||
if dense_index != last_index {
|
||||
self.dense.swap(dense_index, last_index);
|
||||
if let Some(sparse) = self.sparse.get_mut(last_index) {
|
||||
*sparse = Some(dense_index);
|
||||
}
|
||||
}
|
||||
self.sparse[index] = None;
|
||||
self.dense.remove::<T>(last_index)
|
||||
}
|
||||
else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get<T: 'static>(&self, index: usize) -> Option<&T> {
|
||||
match self.sparse.get(index).and_then(|x| x.as_ref()) {
|
||||
Some(sparse_index) => self.dense.get::<T>(*sparse_index),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_mut<T: 'static>(&mut self, index: usize) -> Option<&mut T> {
|
||||
match self.sparse.get(index).and_then(|x| x.as_ref()) {
|
||||
Some(sparse_index) => self.dense.get_mut::<T>(*sparse_index),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
}*/
|
Loading…
Add table
Add a link
Reference in a new issue