summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--ecs/src/uid.rs12
-rw-r--r--ecs/src/util.rs88
2 files changed, 95 insertions, 5 deletions
diff --git a/ecs/src/uid.rs b/ecs/src/uid.rs
index 0e5d88a..60167d3 100644
--- a/ecs/src/uid.rs
+++ b/ecs/src/uid.rs
@@ -1,10 +1,12 @@
use std::mem::transmute;
use std::sync::atomic::{AtomicU32, Ordering};
+use crate::util::{gen_mask_64, BitMask, NumberExt};
+
static NEXT: AtomicU32 = AtomicU32::new(1);
-// Bit 0 and 1 for the kind
-const KIND_BITS: u64 = 0x03;
+const ID_BITS: BitMask<u64> = BitMask::new(gen_mask_64!(32..=63));
+const KIND_BITS: BitMask<u64> = BitMask::new(gen_mask_64!(0..=1));
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
#[repr(u8)]
@@ -26,10 +28,10 @@ impl Uid
/// Returns a new unique entity/component ID.
pub fn new_unique(kind: Kind) -> Self
{
- let id_part = NEXT.fetch_add(1, Ordering::Relaxed);
+ let id = NEXT.fetch_add(1, Ordering::Relaxed);
Self {
- inner: (u64::from(id_part) << 32) | kind as u64,
+ inner: ID_BITS.field_prep(id as u64) | KIND_BITS.field_prep(kind as u64),
}
}
@@ -38,6 +40,6 @@ impl Uid
{
// SAFETY: The kind bits cannot be invalid since they are set using the Kind enum
// in the new_unique function
- unsafe { transmute((self.inner & KIND_BITS) as u8) }
+ unsafe { transmute(self.inner.field_get(KIND_BITS) as u8) }
}
}
diff --git a/ecs/src/util.rs b/ecs/src/util.rs
index 4480fc8..0344e89 100644
--- a/ecs/src/util.rs
+++ b/ecs/src/util.rs
@@ -1,3 +1,5 @@
+use std::ops::BitAnd;
+
pub trait Sortable
{
type Item;
@@ -46,3 +48,89 @@ impl<Item> Sortable for Vec<Item>
self.sort_by_key(func);
}
}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct BitMask<Value>
+{
+ mask: Value,
+}
+
+impl BitMask<u64>
+{
+ #[must_use]
+ pub const fn new(mask: u64) -> Self
+ {
+ Self { mask }
+ }
+
+ pub const fn value(self) -> u64
+ {
+ self.mask
+ }
+
+ /// Prepares a bitfield value in the range of bits specified by this `BitMask`.
+ #[must_use]
+ pub const fn field_prep(self, field_value: u64) -> u64
+ {
+ ((field_value) << self.mask.trailing_zeros()) & (self.mask)
+ }
+}
+
+impl BitAnd<u64> for BitMask<u64>
+{
+ type Output = u64;
+
+ fn bitand(self, rhs: u64) -> Self::Output
+ {
+ self.mask & rhs
+ }
+}
+
+pub trait NumberExt: Sized
+{
+ /// Returns a range of bits (field) specified by the provided [`BitMask`].
+ fn field_get(self, field_mask: BitMask<Self>) -> Self;
+}
+
+impl NumberExt for u64
+{
+ fn field_get(self, field_mask: BitMask<Self>) -> Self
+ {
+ (field_mask & self) >> field_mask.value().trailing_zeros()
+ }
+}
+
+macro_rules! gen_mask_64 {
+ ($low: literal..=$high: literal) => {
+ const {
+ if $high <= $low {
+ panic!("High bit index cannot be less than or equal to low bit index");
+ }
+
+ (((!0u64) - (1u64 << ($low)) + 1)
+ & (!0u64 >> (u64::BITS as u64 - 1 - ($high))))
+ }
+ };
+}
+
+pub(crate) use gen_mask_64;
+
+#[cfg(test)]
+mod tests
+{
+
+ use super::BitMask;
+ use crate::util::NumberExt;
+
+ #[test]
+ fn field_get_works()
+ {
+ assert_eq!(0b11011u64.field_get(BitMask::new(0b11100)), 0b00110);
+ }
+
+ #[test]
+ fn bitmask_field_prep_works()
+ {
+ assert_eq!(BitMask::new(0b11000).field_prep(3), 0b11000);
+ }
+}