From 291152b241bcc20a63c4f365871d594691b4ff30 Mon Sep 17 00:00:00 2001 From: Andrew Sonin Date: Fri, 13 Oct 2023 18:01:39 +0300 Subject: [PATCH] Initial commit --- .github/workflows/ci.yml | 99 +++ .gitignore | 20 + Cargo.toml | 39 ++ README.md | 4 + src/collections.rs | 2 + src/collections/blazemap.rs | 527 ++++++++++++++ src/collections/blazemap/entries.rs | 208 ++++++ src/collections/blazemap/iters.rs | 655 ++++++++++++++++++ src/lib.rs | 38 + src/loom.rs | 148 ++++ src/sync.rs | 8 + src/traits.rs | 164 +++++ src/type_gen.rs | 226 ++++++ src/type_gen/key_wrapper.rs | 324 +++++++++ src/type_gen/key_wrapper_bounded.rs | 158 +++++ src/type_gen/plain_id.rs | 178 +++++ src/type_info_containers.rs | 3 + src/type_info_containers/key_wrapper.rs | 144 ++++ .../key_wrapper_bounded.rs | 211 ++++++ src/type_info_containers/plain_id.rs | 85 +++ src/utils.rs | 2 + src/utils/offset_provider.rs | 70 ++ tests/action.rs | 646 +++++++++++++++++ tests/loom.rs | 330 +++++++++ tests/loom.sh | 17 + tests/miri.rs | 131 ++++ 26 files changed, 4437 insertions(+) create mode 100644 .github/workflows/ci.yml create mode 100644 .gitignore create mode 100644 Cargo.toml create mode 100644 README.md create mode 100644 src/collections.rs create mode 100644 src/collections/blazemap.rs create mode 100644 src/collections/blazemap/entries.rs create mode 100644 src/collections/blazemap/iters.rs create mode 100644 src/lib.rs create mode 100644 src/loom.rs create mode 100644 src/sync.rs create mode 100644 src/traits.rs create mode 100644 src/type_gen.rs create mode 100644 src/type_gen/key_wrapper.rs create mode 100644 src/type_gen/key_wrapper_bounded.rs create mode 100644 src/type_gen/plain_id.rs create mode 100644 src/type_info_containers.rs create mode 100644 src/type_info_containers/key_wrapper.rs create mode 100644 src/type_info_containers/key_wrapper_bounded.rs create mode 100644 src/type_info_containers/plain_id.rs create mode 100644 src/utils.rs create mode 100644 src/utils/offset_provider.rs create mode 100644 tests/action.rs create mode 100644 tests/loom.rs create mode 100755 tests/loom.sh create mode 100644 tests/miri.rs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..6977c32 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,99 @@ +name: CI + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +env: + CARGO_TERM_COLOR: always + RUSTFLAGS: -Dwarnings + RUST_BACKTRACE: 1 + RUST_LOG: blazemap + +jobs: + fmt: + name: "Fmt" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - run: rustup show active-toolchain -v + - run: rustup component add rustfmt + - run: cargo fmt --version + - run: cargo fmt -- --check + + build: + name: "Build" + needs: fmt + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Build project + run: cargo build --all-targets --all-features + + docs: + name: "Docs" + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Build Documentation + run: cargo doc --all --no-deps --release + + clippy: + name: "Clippy" + needs: fmt + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Add clippy + run: rustup component add clippy + - name: Clippy version + run: cargo clippy --version + - name: Run clippy + run: cargo clippy + - name: Run clippy with all features + run: cargo clippy --all-targets --all-features + - name: Run clippy on tests + run: cargo clippy --tests --all-targets --all-features + + tests: + name: "Tests" + needs: build + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Run tests + run: cargo test + - name: Run tests with all features + run: cargo test --all-features + - name: Run tests with all features in release mode + run: cargo test --all-features --release + + loom: + name: "Loom" + needs: build + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - run: rustup show active-toolchain -v + - run: ./tests/loom.sh + + miri: + name: "Miri" + needs: clippy + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install Miri + run: | + rustup toolchain install nightly --component miri + rustup override set nightly + cargo miri setup + - name: Run tests with Miri + run: cargo miri test --all-features \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a4532c8 --- /dev/null +++ b/.gitignore @@ -0,0 +1,20 @@ +# Generated by Cargo +# will have compiled files and executables +debug/ +target/ + +# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries +# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html +Cargo.lock + +# These are backup files generated by rustfmt +**/*.rs.bk + +# MSVC Windows builds of rustc generate these, which store debugging information +*.pdb + +# macOS Finder files +.DS_Store + +# JetBrains files +.idea/ \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..855abfd --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,39 @@ +[package] +name = "blazemap" +version = "0.3.0" +authors = ["Andrew Sonin "] +categories = ["data-structures"] +description = """ +Implements a vector-based slab-like map with an interface similar to that of HashMap, \ +and also provides tools for generating lightweight identifiers that can be type-safely used as keys for this map. +""" +keywords = ["map", "slab", "hashmap"] +license = "MIT" +repository = "https://github.com/andrewsonin/blazemap" +readme = "README.md" +edition = "2021" + +[lints.rust] +rust_2018_idioms = "warn" +unreachable_pub = "warn" +missing_docs = "warn" +missing_debug_implementations = "warn" + +[lints.clippy] +pedantic = { level = "warn", priority = -1 } + +[dependencies] +once_cell = "1" +parking_lot = "0.12" +serde = { version = "1", optional = true, features = ["derive"] } + +[target.'cfg(loom)'.dependencies] +loom = { version = "0.7", features = ["checkpoint"] } + +[dev-dependencies] +rand = "0.8" +serde_json = "1" +static_assertions = "1" + +[features] +serde = ["dep:serde"] \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..2c5b276 --- /dev/null +++ b/README.md @@ -0,0 +1,4 @@ +# blazemap + +_Implements a vector-based slab-like map with an interface similar to that of `HashMap`, +and also provides tools for generating lightweight identifiers that can be type-safely used as keys for this map._ \ No newline at end of file diff --git a/src/collections.rs b/src/collections.rs new file mode 100644 index 0000000..0ad97d3 --- /dev/null +++ b/src/collections.rs @@ -0,0 +1,2 @@ +/// Defines [`BlazeMap`](crate::prelude::BlazeMap). +pub mod blazemap; diff --git a/src/collections/blazemap.rs b/src/collections/blazemap.rs new file mode 100644 index 0000000..099d634 --- /dev/null +++ b/src/collections/blazemap.rs @@ -0,0 +1,527 @@ +use std::borrow::Borrow; +use std::fmt::{Debug, Formatter}; +use std::marker::PhantomData; + +#[cfg(feature = "serde")] +use { + crate::prelude::BlazeMapIdWrapper, + serde::{ + de::{MapAccess, Visitor}, + ser::SerializeMap, + Deserialize, Deserializer, Serialize, Serializer, + }, +}; + +use crate::collections::blazemap::entries::VacantEntryInner; +pub use crate::collections::blazemap::{ + entries::{Entry, OccupiedEntry, VacantEntry}, + iters::{Drain, IntoIter, IntoKeys, IntoValues, Iter, IterMut, Keys, Values, ValuesMut}, +}; +use crate::traits::CapacityInfoProvider; +use crate::traits::KeyByOffsetProvider; +use crate::traits::{BlazeMapId, BlazeMapIdStatic, TypeInfoContainer}; + +mod entries; +mod iters; + +/// A [`Vec`]-based analogue of a [`HashMap`](std::collections::HashMap). +#[derive(Clone, PartialEq, Eq)] +pub struct BlazeMap { + pub(in crate::collections::blazemap) inner: Vec>, + pub(in crate::collections::blazemap) len: usize, + phantom: PhantomData, +} + +impl BlazeMap { + /// Creates a new instance of the [`BlazeMap`]. + #[inline] + #[must_use] + pub const fn new() -> Self { + Self { + inner: vec![], + len: 0, + phantom: PhantomData, + } + } + + /// Returns the number of elements in the map. + #[inline] + #[must_use] + pub fn len(&self) -> usize { + self.len + } + + /// Returns `true` if the map contains no elements. + #[inline] + #[must_use] + pub fn is_empty(&self) -> bool { + self.len == 0 + } + + /// Clears the map, removing all key-value pairs. Keeps the allocated memory for reuse. + #[inline] + pub fn clear(&mut self) { + self.inner.clear(); + self.len = 0; + } + + /// Shrinks the capacity of the map as much as possible. + /// It will drop down as much as possible while maintaining the internal rules + /// and possibly leaving some space in accordance with the resize policy. + #[inline] + pub fn shrink_to_fit(&mut self) { + if !self.is_empty() { + let elems_to_crop = self + .inner + .iter() + .rev() + .position(Option::is_some) + .unwrap_or(0); + self.inner.truncate(self.inner.len() - elems_to_crop); + } + self.inner.shrink_to_fit(); + debug_assert_eq!( + self.inner.iter().filter_map(Option::as_ref).count(), + self.len + ); + } + + /// Clears the map, returning all key-value pairs as an iterator. + /// Keeps the allocated memory for reuse. + /// + /// If the returned iterator is dropped before being fully consumed, + /// it drops the remaining key-value pairs. + /// The returned iterator keeps a mutable borrow on the map to optimize its implementation. + #[inline] + pub fn drain(&mut self) -> Drain<'_, K, V> { + debug_assert_eq!( + self.inner.iter().filter_map(Option::as_ref).count(), + self.len + ); + Drain { + map: self, + current_position: 0, + } + } +} + +impl BlazeMap +where + K: BlazeMapId, +{ + /// An iterator visiting all key-value pairs. The iterator element type is `(K, &V)`. + #[inline] + #[must_use] + pub fn iter(&self) -> Iter<'_, K, V> { + debug_assert_eq!( + self.inner.iter().filter_map(Option::as_ref).count(), + self.len + ); + Iter { + inner: self.inner.as_ptr(), + current_position: 0, + len: self.len, + phantom: PhantomData, + } + } + + /// An iterator visiting all key-value pairs, with mutable references to the values. + /// The iterator element type is `(K, &mut V)`. + #[inline] + pub fn iter_mut(&mut self) -> IterMut<'_, K, V> { + debug_assert_eq!( + self.inner.iter().filter_map(Option::as_ref).count(), + self.len + ); + IterMut { + inner: self.inner.as_mut_ptr(), + current_position: 0, + len: self.len, + phantom: PhantomData, + } + } + + /// An iterator visiting all keys. The iterator element type is `K`. + #[inline] + #[must_use] + pub fn keys(&self) -> Keys<'_, K, V> { + debug_assert_eq!( + self.inner.iter().filter_map(Option::as_ref).count(), + self.len + ); + Keys { inner: self.iter() } + } + + /// An iterator visiting all values. The iterator element type is `&V`. + #[inline] + #[must_use] + pub fn values(&self) -> Values<'_, K, V> { + debug_assert_eq!( + self.inner.iter().filter_map(Option::as_ref).count(), + self.len + ); + Values { inner: self.iter() } + } + + /// An iterator visiting all values mutably. The iterator element type is `&mut V`. + #[inline] + pub fn values_mut(&mut self) -> ValuesMut<'_, K, V> { + debug_assert_eq!( + self.inner.iter().filter_map(Option::as_ref).count(), + self.len + ); + ValuesMut { + inner: self.iter_mut(), + } + } +} + +impl BlazeMap +where + K: BlazeMapIdStatic, +{ + /// Creates a new instance of the [`BlazeMap`] + /// with capacity equal to the current total number of unique `K` instances. + #[inline] + #[must_use] + pub fn with_current_key_type_capacity() -> Self { + let current_capacity = K::static_container() + .capacity_info_provider() + .offset_capacity(); + Self { + inner: Vec::with_capacity(current_capacity), + len: 0, + phantom: PhantomData, + } + } +} + +impl BlazeMap +where + K: BlazeMapId, +{ + /// Returns `true` if the map contains a value for the specified key. + #[inline] + pub fn contains_key(&self, key: K) -> bool { + debug_assert_eq!( + self.inner.iter().filter_map(Option::as_ref).count(), + self.len + ); + self.inner + .get(key.get_offset()) + .and_then(Option::as_ref) + .is_some() + } + + /// Returns a reference to the value corresponding to the key. + #[inline] + pub fn get(&self, key: K) -> Option<&V> { + debug_assert_eq!( + self.inner.iter().filter_map(Option::as_ref).count(), + self.len + ); + self.inner.get(key.get_offset()).and_then(Option::as_ref) + } + + /// Returns a mutable reference to the value corresponding to the key. + #[inline] + pub fn get_mut(&mut self, key: K) -> Option<&mut V> { + debug_assert_eq!( + self.inner.iter().filter_map(Option::as_ref).count(), + self.len + ); + self.inner + .get_mut(key.get_offset()) + .and_then(Option::as_mut) + } + + /// Inserts a key-value pair into the map. + /// + /// If the map did not have this key present, None is returned. + /// + /// If the map did have this key present, the value is updated, and the old value is returned. + /// The key is not updated, though. + #[inline] + pub fn insert(&mut self, key: K, value: V) -> Option { + debug_assert_eq!( + self.inner.iter().filter_map(Option::as_ref).count(), + self.len + ); + let result = match self.entry(key) { + Entry::Vacant(entry) => { + entry.insert(value); + None + } + Entry::Occupied(mut entry) => Some(entry.insert(value)), + }; + result + } + + /// Removes a key from the map, + /// returning the value at the key if the key was previously in the map. + #[inline] + pub fn remove(&mut self, key: K) -> Option { + debug_assert_eq!( + self.inner.iter().filter_map(Option::as_ref).count(), + self.len + ); + let result = if let Entry::Occupied(entry) = self.entry(key) { + Some(entry.remove()) + } else { + None + }; + debug_assert_eq!( + self.inner.iter().filter_map(Option::as_ref).count(), + self.len + ); + result + } + + /// Gets the given key’s corresponding entry in the map for in-place manipulation. + #[inline] + pub fn entry(&mut self, key: K) -> Entry<'_, K, V> { + debug_assert_eq!( + self.inner.iter().filter_map(Option::as_ref).count(), + self.len + ); + let offset = key.get_offset(); + if offset < self.inner.len() { + let value = unsafe { self.inner.get_unchecked_mut(offset) }; + if value.is_some() { + let occupied = OccupiedEntry { + key, + len: &mut self.len, + value, + }; + Entry::Occupied(occupied) + } else { + let vacant = VacantEntry { + key, + len: &mut self.len, + inner: VacantEntryInner::ShouldBeInserted(value), + }; + Entry::Vacant(vacant) + } + } else { + let vacant = VacantEntry { + key, + len: &mut self.len, + inner: VacantEntryInner::ShouldBeEnlarged(&mut self.inner), + }; + Entry::Vacant(vacant) + } + } + + /// Creates a consuming iterator visiting all the keys. + /// The map cannot be used after calling this. The iterator element type is `K`. + #[inline] + #[must_use] + pub fn into_keys(self) -> IntoKeys { + debug_assert_eq!( + self.inner.iter().filter_map(Option::as_ref).count(), + self.len + ); + IntoKeys { + inner: self.into_iter(), + } + } + + /// Creates a consuming iterator visiting all the values. + /// The map cannot be used after calling this. The iterator element type is `V`. + #[inline] + #[must_use] + pub fn into_values(self) -> IntoValues { + debug_assert_eq!( + self.inner.iter().filter_map(Option::as_ref).count(), + self.len + ); + IntoValues { + inner: self.into_iter(), + } + } +} + +impl IntoIterator for BlazeMap +where + K: BlazeMapId, +{ + type Item = (K, V); + type IntoIter = IntoIter; + + #[inline] + fn into_iter(self) -> IntoIter { + debug_assert_eq!( + self.inner.iter().filter_map(Option::as_ref).count(), + self.len + ); + IntoIter { inner: self } + } +} + +impl<'a, K, V> IntoIterator for &'a BlazeMap +where + K: BlazeMapId, +{ + type Item = (K, &'a V); + type IntoIter = Iter<'a, K, V>; + + #[inline] + fn into_iter(self) -> Iter<'a, K, V> { + debug_assert_eq!( + self.inner.iter().filter_map(Option::as_ref).count(), + self.len + ); + self.iter() + } +} + +impl<'a, K, V> IntoIterator for &'a mut BlazeMap +where + K: BlazeMapId, +{ + type Item = (K, &'a mut V); + type IntoIter = IterMut<'a, K, V>; + + #[inline] + fn into_iter(self) -> IterMut<'a, K, V> { + debug_assert_eq!( + self.inner.iter().filter_map(Option::as_ref).count(), + self.len + ); + self.iter_mut() + } +} + +impl FromIterator<(K, V)> for BlazeMap +where + K: BlazeMapIdStatic, +{ + #[inline] + fn from_iter>(iter: T) -> Self { + let mut result = BlazeMap::with_current_key_type_capacity(); + iter.into_iter().for_each(|(key, value)| { + result.insert(key, value); + }); + debug_assert_eq!( + result.inner.iter().filter_map(Option::as_ref).count(), + result.len + ); + result + } +} + +impl Default for BlazeMap +where + K: BlazeMapId, +{ + #[inline] + fn default() -> Self { + Self::new() + } +} + +macro_rules! blaze_map_orig_key_blocking_iter { + ($self:ident, $iter:ident, $guard:ident) => { + let $guard = K::static_container().key_by_offset_provider(); + let $iter = $self + .inner + .iter() + .enumerate() + .filter_map(|(idx, value)| Some((idx, value.as_ref()?))) + .map(|(idx, value)| { + let key = unsafe { $guard.key_by_offset_unchecked(idx) }; + (key, value) + }); + }; +} + +impl Debug for BlazeMap +where + K: BlazeMapIdStatic, + ::OrigType: Debug, + V: Debug, +{ + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + blaze_map_orig_key_blocking_iter!(self, iter, guard); + let mut debug_map = f.debug_map(); + for (key, value) in iter { + debug_map.entry(key.borrow(), value); + } + debug_map.finish() + } +} + +#[cfg(feature = "serde")] +impl Serialize for BlazeMap +where + K: BlazeMapIdStatic, + ::OrigType: Serialize, + V: Serialize, +{ + #[inline] + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + blaze_map_orig_key_blocking_iter!(self, iter, guard); + let mut serializer = serializer.serialize_map(Some(self.len))?; + for (key, value) in iter { + serializer.serialize_entry(key.borrow(), value)?; + } + serializer.end() + } +} + +#[cfg(feature = "serde")] +impl<'de, K, V> Deserialize<'de> for BlazeMap +where + K: BlazeMapIdWrapper + BlazeMapIdStatic, + ::OrigType: Deserialize<'de>, + V: Deserialize<'de>, +{ + #[inline] + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + deserializer.deserialize_map(BlazeMapDeserializer(PhantomData)) + } +} + +#[cfg(feature = "serde")] +struct BlazeMapDeserializer(PhantomData<(K, V)>); + +#[cfg(feature = "serde")] +impl<'de, K, V> Visitor<'de> for BlazeMapDeserializer +where + K: BlazeMapIdWrapper + BlazeMapIdStatic, + ::OrigType: Deserialize<'de>, + V: Deserialize<'de>, +{ + type Value = BlazeMap; + + #[inline] + fn expecting(&self, formatter: &mut Formatter<'_>) -> std::fmt::Result { + write!(formatter, "BlazeMap-compatible map") + } + + #[inline] + fn visit_map(self, mut map: A) -> Result + where + A: MapAccess<'de>, + { + let mut result = BlazeMap::with_current_key_type_capacity(); + + while let Some((key, value)) = map.next_entry::()? { + let key = unsafe { K::new(K::static_container(), key) }; + result.insert(key, value); + } + result.shrink_to_fit(); + debug_assert_eq!( + result.inner.iter().filter_map(Option::as_ref).count(), + result.len + ); + Ok(result) + } +} diff --git a/src/collections/blazemap/entries.rs b/src/collections/blazemap/entries.rs new file mode 100644 index 0000000..98d4855 --- /dev/null +++ b/src/collections/blazemap/entries.rs @@ -0,0 +1,208 @@ +use crate::prelude::BlazeMapId; + +#[derive(Debug)] +/// A view into a single entry in a map, which may either be vacant or occupied. +/// +/// This enum is constructed +/// from the [`entry`] method on [`BlazeMap`](crate::collections::blazemap::BlazeMap). +/// +/// [`entry`]: crate::collections::blazemap::BlazeMap::entry +pub enum Entry<'a, K, V> +where + K: BlazeMapId, +{ + /// An occupied entry. + Occupied(OccupiedEntry<'a, K, V>), + /// A vacant entry. + Vacant(VacantEntry<'a, K, V>), +} + +#[derive(Debug)] +/// A view into an occupied entry in a [`BlazeMap`](crate::collections::blazemap::BlazeMap). +/// It is part of the [`Entry`] enum. +pub struct OccupiedEntry<'a, K, V> +where + K: BlazeMapId, +{ + pub(in crate::collections::blazemap) key: K, + + pub(in crate::collections::blazemap) len: &'a mut usize, + + pub(in crate::collections::blazemap) value: &'a mut Option, +} + +#[derive(Debug)] +/// A view into a vacant entry in a [`BlazeMap`](crate::collections::blazemap::BlazeMap). +/// It is part of the [`Entry`] enum. +pub struct VacantEntry<'a, K, V> +where + K: BlazeMapId, +{ + pub(in crate::collections::blazemap) key: K, + + pub(in crate::collections::blazemap) len: &'a mut usize, + + pub(in crate::collections::blazemap) inner: VacantEntryInner<'a, V>, +} + +#[derive(Debug)] +pub(in crate::collections::blazemap) enum VacantEntryInner<'a, V> { + ShouldBeInserted(&'a mut Option), + ShouldBeEnlarged(&'a mut Vec>), +} + +impl<'a, K, V> Entry<'a, K, V> +where + K: BlazeMapId, +{ + /// Ensures a value is in the entry by inserting the default if empty, + /// and returns a mutable reference to the value in the entry. + #[inline] + pub fn or_insert(self, default: V) -> &'a mut V { + match self { + Entry::Occupied(entry) => entry.into_mut(), + Entry::Vacant(entry) => entry.insert(default), + } + } + + /// Ensures a value is in the entry by inserting the result of the default function if empty, + /// and returns a mutable reference to the value in the entry. + #[inline] + pub fn or_insert_with(self, default: impl FnOnce() -> V) -> &'a mut V { + match self { + Entry::Occupied(entry) => entry.into_mut(), + Entry::Vacant(entry) => entry.insert(default()), + } + } + + /// Returns this entry’s key. + #[inline] + pub fn key(&self) -> K { + match self { + Entry::Occupied(entry) => entry.key(), + Entry::Vacant(entry) => entry.key(), + } + } + + /// Provides in-place mutable access + /// to an occupied entry before any potential inserts into the map. + #[inline] + #[must_use] + pub fn and_modify(self, f: impl FnOnce(&mut V)) -> Self { + match self { + Entry::Occupied(mut entry) => { + f(entry.get_mut()); + Entry::Occupied(entry) + } + Entry::Vacant(entry) => Entry::Vacant(entry), + } + } +} + +impl<'a, K, V> Entry<'a, K, V> +where + K: BlazeMapId, + V: Default, +{ + /// Ensures a value is in the entry by inserting the default value if empty, + /// and returns a mutable reference to the value in the entry. + #[inline] + pub fn or_default(self) -> &'a mut V { + match self { + Entry::Occupied(entry) => entry.into_mut(), + Entry::Vacant(entry) => entry.insert(Default::default()), + } + } +} + +impl<'a, K, V> OccupiedEntry<'a, K, V> +where + K: BlazeMapId, +{ + /// Gets the key in the entry. + #[inline] + pub fn key(&self) -> K { + self.key + } + + /// Take the ownership of the key and value from the map. + #[inline] + pub fn remove_entry(self) -> (K, V) { + let Self { key, len, value } = self; + *len -= 1; + let value = unsafe { value.take().unwrap_unchecked() }; + (key, value) + } + + /// Gets a reference to the value in the entry. + #[inline] + pub fn get(&self) -> &V { + unsafe { self.value.as_ref().unwrap_unchecked() } + } + + /// Gets a mutable reference to the value in the entry. + /// + /// If you need a reference to the [`OccupiedEntry`] + /// which may outlive the destruction of the [`Entry`] value, see [`into_mut`]. + /// + /// [`into_mut`]: Self::into_mut + #[inline] + pub fn get_mut(&mut self) -> &mut V { + unsafe { self.value.as_mut().unwrap_unchecked() } + } + + /// Converts the [`OccupiedEntry`] into a mutable reference + /// to the value in the entry with a lifetime bound to the map itself. + /// + /// If you need multiple references to the [`OccupiedEntry`], see [`get_mut`]. + /// + /// [`get_mut`]: Self::get_mut + #[inline] + pub fn into_mut(self) -> &'a mut V { + unsafe { self.value.as_mut().unwrap_unchecked() } + } + + /// Sets the value of the entry, and returns the entry’s old value. + #[inline] + pub fn insert(&mut self, value: V) -> V { + std::mem::replace(self.get_mut(), value) + } + + /// Takes the value out of the entry, and returns it. + #[inline] + pub fn remove(self) -> V { + let Self { len, value, .. } = self; + *len -= 1; + unsafe { value.take().unwrap_unchecked() } + } +} + +impl<'a, K, V> VacantEntry<'a, K, V> +where + K: BlazeMapId, +{ + /// Gets the key that would be used when inserting a value through the [`VacantEntry`]. + #[inline] + pub fn key(&self) -> K { + self.key + } + + /// Sets the value of the entry with the [`VacantEntry`]’s key, + /// and returns a mutable reference to it. + #[inline] + pub fn insert(self, value: V) -> &'a mut V { + let Self { key, len, inner } = self; + *len += 1; + let reference = match inner { + VacantEntryInner::ShouldBeInserted(reference) => reference, + VacantEntryInner::ShouldBeEnlarged(vec) => { + let offset = key.get_offset(); + let new_len = offset + 1; // It's safe to don't use `checked_add`, since `vec` will panic at `isize::MAX` + vec.resize_with(new_len, || None); + unsafe { vec.get_unchecked_mut(offset) } + } + }; + *reference = Some(value); + unsafe { reference.as_mut().unwrap_unchecked() } + } +} diff --git a/src/collections/blazemap/iters.rs b/src/collections/blazemap/iters.rs new file mode 100644 index 0000000..22feef7 --- /dev/null +++ b/src/collections/blazemap/iters.rs @@ -0,0 +1,655 @@ +use std::borrow::Borrow; +use std::fmt::{Debug, Formatter}; +use std::marker::PhantomData; +use std::panic::{RefUnwindSafe, UnwindSafe}; + +use crate::collections::blazemap::BlazeMap; +use crate::prelude::{BlazeMapId, BlazeMapIdStatic}; +use crate::traits::{KeyByOffsetProvider, TypeInfoContainer}; + +/// An iterator over the entries of a [`BlazeMap`]. +/// +/// This `struct` is created by the [`iter`] method on [`BlazeMap`]. See its +/// documentation for more. +/// +/// [`iter`]: BlazeMap::iter +pub struct Iter<'a, K, V> { + pub(in crate::collections::blazemap) inner: *const Option, + + pub(in crate::collections::blazemap) current_position: usize, + + pub(in crate::collections::blazemap) len: usize, + + pub(in crate::collections::blazemap) phantom: PhantomData<(K, &'a V)>, +} + +/// A mutable iterator over the entries of a [`BlazeMap`]. +/// +/// This `struct` is created by the [`iter_mut`] method on [`BlazeMap`]. See its +/// documentation for more. +/// +/// [`iter_mut`]: BlazeMap::iter_mut +pub struct IterMut<'a, K, V> { + pub(in crate::collections::blazemap) inner: *mut Option, + + pub(in crate::collections::blazemap) current_position: usize, + + pub(in crate::collections::blazemap) len: usize, + + pub(in crate::collections::blazemap) phantom: PhantomData<(K, &'a mut V)>, +} + +/// An iterator over the keys of a [`BlazeMap`]. +/// +/// This `struct` is created by the [`keys`] method on [`BlazeMap`]. See its +/// documentation for more. +/// +/// [`keys`]: BlazeMap::keys +pub struct Keys<'a, K, V> { + pub(in crate::collections::blazemap) inner: Iter<'a, K, V>, +} + +/// An iterator over the values of a [`BlazeMap`]. +/// +/// This `struct` is created by the [`values`] method on [`BlazeMap`]. See its +/// documentation for more. +/// +/// [`values`]: BlazeMap::values +pub struct Values<'a, K, V> { + pub(in crate::collections::blazemap) inner: Iter<'a, K, V>, +} + +/// A mutable iterator over the values of a [`BlazeMap`]. +/// +/// This `struct` is created by the [`values_mut`] method on [`BlazeMap`]. See its +/// documentation for more. +/// +/// [`values_mut`]: BlazeMap::values_mut +pub struct ValuesMut<'a, K, V> { + pub(in crate::collections::blazemap) inner: IterMut<'a, K, V>, +} + +/// An owning iterator over the entries of a [`BlazeMap`]. +/// +/// This `struct` is created by the [`into_iter`] method on [`BlazeMap`] +/// (provided by the [`IntoIterator`] trait). See its documentation for more. +/// +/// [`into_iter`]: IntoIterator::into_iter +pub struct IntoIter { + pub(in crate::collections::blazemap) inner: BlazeMap, +} + +/// An owning iterator over the keys of a [`BlazeMap`]. +/// +/// This `struct` is created by the [`into_keys`] method on [`BlazeMap`]. +/// See its documentation for more. +/// +/// [`into_keys`]: BlazeMap::into_keys +pub struct IntoKeys { + pub(in crate::collections::blazemap) inner: IntoIter, +} + +/// An owning iterator over the values of a [`BlazeMap`]. +/// +/// This `struct` is created by the [`into_values`] method on [`BlazeMap`]. +/// See its documentation for more. +/// +/// [`into_values`]: BlazeMap::into_values +pub struct IntoValues { + pub(in crate::collections::blazemap) inner: IntoIter, +} + +/// A draining iterator over the entries of a [`BlazeMap`]. +/// +/// This `struct` is created by the [`drain`] method on [`BlazeMap`]. See its +/// documentation for more. +/// +/// [`drain`]: BlazeMap::drain +pub struct Drain<'a, K, V> { + pub(in crate::collections::blazemap) map: &'a mut BlazeMap, + + pub(in crate::collections::blazemap) current_position: usize, +} + +impl<'a, K, V> Iterator for Iter<'a, K, V> +where + K: BlazeMapId, +{ + type Item = (K, &'a V); + + #[inline] + fn next(&mut self) -> Option<(K, &'a V)> { + let Self { + inner, + current_position, + len, + .. + } = self; + if *len == 0 { + return None; + } + unsafe { + loop { + match &*inner.add(*current_position) { + None => { + *current_position += 1; + continue; + } + Some(value) => { + let key = K::from_offset_unchecked(*current_position); + *current_position += 1; + *len -= 1; + return Some((key, value)); + } + } + } + } + } +} + +impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> +where + K: BlazeMapId, +{ + #[inline] + fn len(&self) -> usize { + self.len + } +} + +impl<'a, K, V> Iterator for IterMut<'a, K, V> +where + K: BlazeMapId, +{ + type Item = (K, &'a mut V); + + #[inline] + fn next(&mut self) -> Option<(K, &'a mut V)> { + if self.len == 0 { + return None; + } + unsafe { + loop { + match &mut *self.inner.add(self.current_position) { + None => { + self.current_position += 1; + continue; + } + Some(value) => { + let key = K::from_offset_unchecked(self.current_position); + self.current_position += 1; + self.len -= 1; + return Some((key, value)); + } + } + } + } + } +} + +impl<'a, K, V> ExactSizeIterator for IterMut<'a, K, V> +where + K: BlazeMapId, +{ + #[inline] + fn len(&self) -> usize { + self.len + } +} + +impl<'a, K, V> Iterator for Keys<'a, K, V> +where + K: BlazeMapId, +{ + type Item = K; + + #[inline] + fn next(&mut self) -> Option { + let Iter { + inner, + current_position, + len, + .. + } = &mut self.inner; + if *len == 0 { + return None; + } + unsafe { + loop { + match &*inner.add(*current_position) { + None => { + *current_position += 1; + continue; + } + Some(_) => { + let key = K::from_offset_unchecked(*current_position); + *current_position += 1; + *len -= 1; + return Some(key); + } + } + } + } + } +} + +impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V> +where + K: BlazeMapId, +{ + #[inline] + fn len(&self) -> usize { + self.inner.len + } +} + +impl<'a, K, V> Iterator for Values<'a, K, V> { + type Item = &'a V; + + #[inline] + fn next(&mut self) -> Option<&'a V> { + let Iter { + inner, + current_position, + len, + .. + } = &mut self.inner; + if *len == 0 { + return None; + } + loop { + match unsafe { &*inner.add(*current_position) } { + None => { + *current_position += 1; + continue; + } + Some(value) => { + *current_position += 1; + *len -= 1; + return Some(value); + } + } + } + } +} + +impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> { + #[inline] + fn len(&self) -> usize { + self.inner.len + } +} + +impl<'a, K, V> Iterator for ValuesMut<'a, K, V> { + type Item = &'a mut V; + + #[inline] + fn next(&mut self) -> Option<&'a mut V> { + let inner = &mut self.inner; + if inner.len == 0 { + return None; + } + loop { + match unsafe { &mut *inner.inner.add(inner.current_position) } { + None => { + inner.current_position += 1; + continue; + } + Some(value) => { + inner.current_position += 1; + inner.len -= 1; + return Some(value); + } + } + } + } +} + +impl<'a, K, V> ExactSizeIterator for ValuesMut<'a, K, V> { + #[inline] + fn len(&self) -> usize { + self.inner.len + } +} + +impl Iterator for IntoIter +where + K: BlazeMapId, +{ + type Item = (K, V); + + #[inline] + fn next(&mut self) -> Option<(K, V)> { + let BlazeMap { inner, len, .. } = &mut self.inner; + while let Some(back) = inner.pop() { + if let Some(value) = back { + let key = unsafe { K::from_offset_unchecked(inner.len()) }; + *len -= 1; + return Some((key, value)); + } + } + None + } +} + +impl ExactSizeIterator for IntoIter +where + K: BlazeMapId, +{ + #[inline] + fn len(&self) -> usize { + self.inner.len + } +} + +impl Iterator for IntoKeys +where + K: BlazeMapId, +{ + type Item = K; + + #[inline] + fn next(&mut self) -> Option { + let BlazeMap { inner, len, .. } = &mut self.inner.inner; + while let Some(back) = inner.pop() { + if back.is_some() { + let key = unsafe { K::from_offset_unchecked(inner.len()) }; + *len -= 1; + return Some(key); + } + } + None + } +} + +impl ExactSizeIterator for IntoKeys +where + K: BlazeMapId, +{ + #[inline] + fn len(&self) -> usize { + self.inner.len() + } +} + +impl Iterator for IntoValues { + type Item = V; + + #[inline] + fn next(&mut self) -> Option { + let BlazeMap { inner, len, .. } = &mut self.inner.inner; + while let Some(back) = inner.pop() { + if let Some(value) = back { + *len -= 1; + return Some(value); + } + } + None + } +} + +impl ExactSizeIterator for IntoValues +where + K: BlazeMapId, +{ + #[inline] + fn len(&self) -> usize { + self.inner.len() + } +} + +impl<'a, K, V> Iterator for Drain<'a, K, V> +where + K: BlazeMapId, +{ + type Item = (K, V); + + #[inline] + fn next(&mut self) -> Option<(K, V)> { + if self.map.len == 0 { + return None; + } + unsafe { + loop { + let value = &mut *self.map.inner.as_mut_ptr().add(self.current_position); + match value.take() { + None => { + self.current_position += 1; + continue; + } + Some(value) => { + let key = K::from_offset_unchecked(self.current_position); + self.map.len -= 1; + self.current_position += 1; + return Some((key, value)); + } + } + } + } + } +} + +impl<'a, K, V> ExactSizeIterator for Drain<'a, K, V> +where + K: BlazeMapId, +{ + #[inline] + fn len(&self) -> usize { + self.map.len + } +} + +impl<'a, K, V> Drop for Drain<'a, K, V> { + #[inline] + fn drop(&mut self) { + self.map.clear(); + } +} + +unsafe impl<'a, K, V> Send for Iter<'a, K, V> +where + K: Sync, + V: Sync, +{ +} + +unsafe impl<'a, K, V> Sync for Iter<'a, K, V> +where + K: Sync, + V: Sync, +{ +} + +impl<'a, K, V> Unpin for Iter<'a, K, V> {} + +impl<'a, K, V> UnwindSafe for Iter<'a, K, V> +where + K: RefUnwindSafe, + V: RefUnwindSafe, +{ +} + +unsafe impl<'a, K, V> Send for IterMut<'a, K, V> +where + K: Sync, + V: Send, +{ +} + +unsafe impl<'a, K, V> Sync for IterMut<'a, K, V> +where + K: Sync, + V: Sync, +{ +} + +impl<'a, K, V> Unpin for IterMut<'a, K, V> {} + +impl<'a, K, V> Clone for Iter<'a, K, V> { + #[inline] + fn clone(&self) -> Self { + Self { + inner: self.inner, + current_position: self.current_position, + len: self.len, + phantom: PhantomData, + } + } +} + +impl<'a, K, V> Debug for Iter<'a, K, V> +where + K: BlazeMapIdStatic, + K::OrigType: Debug, + V: Debug, +{ + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let key_provider = K::static_container().key_by_offset_provider(); + let mut debug_map = f.debug_map(); + for (key, value) in self.clone() { + let key = unsafe { key_provider.key_by_offset_unchecked(key.get_offset()) }; + debug_map.entry(key.borrow(), value); + } + debug_map.finish() + } +} + +impl<'a, K, V> Debug for IterMut<'a, K, V> +where + K: BlazeMapIdStatic, + K::OrigType: Debug, + V: Debug, +{ + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let Self { + inner, + current_position, + len, + .. + } = self; + let iter = Iter:: { + inner: *inner, + current_position: *current_position, + len: *len, + phantom: PhantomData, + }; + iter.fmt(f) + } +} + +impl<'a, K, V> Clone for Keys<'a, K, V> { + #[inline] + fn clone(&self) -> Self { + Self { + inner: self.inner.clone(), + } + } +} + +impl<'a, K, V> Clone for Values<'a, K, V> { + #[inline] + fn clone(&self) -> Self { + Self { + inner: self.inner.clone(), + } + } +} + +impl<'a, K, V> Debug for Keys<'a, K, V> +where + K: BlazeMapIdStatic, + K::OrigType: Debug, +{ + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let key_provider = K::static_container().key_by_offset_provider(); + let mut debug_list = f.debug_list(); + for key in self.clone() { + let key = unsafe { key_provider.key_by_offset_unchecked(key.get_offset()) }; + debug_list.entry(key.borrow()); + } + debug_list.finish() + } +} + +impl<'a, K, V> Debug for Values<'a, K, V> +where + V: Debug, +{ + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +impl<'a, K, V> Debug for ValuesMut<'a, K, V> +where + V: Debug, +{ + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let IterMut { + inner, + current_position, + len, + .. + } = self.inner; + let iter = Values:: { + inner: Iter { + inner, + current_position, + len, + phantom: PhantomData, + }, + }; + iter.fmt(f) + } +} + +impl Debug for IntoIter +where + K: BlazeMapIdStatic, + K::OrigType: Debug, + V: Debug, +{ + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + self.inner.fmt(f) + } +} + +impl Debug for IntoKeys +where + K: BlazeMapIdStatic, + K::OrigType: Debug, +{ + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + self.inner.inner.keys().fmt(f) + } +} + +impl Debug for IntoValues +where + K: BlazeMapId, + V: Debug, +{ + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + self.inner.inner.values().fmt(f) + } +} + +impl<'a, K, V> Debug for Drain<'a, K, V> +where + K: BlazeMapIdStatic, + K::OrigType: Debug, + V: Debug, +{ + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + self.map.fmt(f) + } +} diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..7ae5380 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,38 @@ +//! Implements a [vector-based slab-like map](prelude::BlazeMap) +//! with an interface similar to that of [`HashMap`](std::collections::HashMap), +//! and also provides tools +//! for generating lightweight identifiers that can be type-safely used as keys for this map. + +/// Collection types. +pub mod collections; +/// Utilities for testing the codebase with [`loom`](crate::external::loom). +#[cfg(loom)] +pub mod loom; +#[doc(hidden)] +pub mod sync; +#[doc(hidden)] +pub mod traits; +mod type_gen; +#[doc(hidden)] +pub mod type_info_containers; +#[doc(hidden)] +pub mod utils; + +/// Crate prelude. +pub mod prelude { + pub use crate::{ + collections::blazemap::BlazeMap, + define_key_wrapper, define_key_wrapper_bounded, define_plain_id, + traits::{AllInstancesIter, BlazeMapId, BlazeMapIdStatic, BlazeMapIdWrapper}, + }; +} + +/// Public re-exports of external crates used. +pub mod external { + #[cfg(feature = "serde")] + pub use serde; + + #[cfg(loom)] + pub use loom; + pub use {once_cell, parking_lot}; +} diff --git a/src/loom.rs b/src/loom.rs new file mode 100644 index 0000000..c9720eb --- /dev/null +++ b/src/loom.rs @@ -0,0 +1,148 @@ +use std::cmp::Ordering; +use std::marker::PhantomData; + +use crate::prelude::AllInstancesIter; +#[cfg(feature = "serde")] +use serde::{Serialize, Serializer}; + +use crate::traits::{BlazeMapId, TypeInfoContainer}; + +/// Provides `PartialOrd`, `Ord` and `Serialize` traits, which are derived as for original type, +/// for [`BlazeMapId`]s in the [`loom`](crate::external::loom) context. +#[derive(Debug, Copy, Clone)] +pub struct TestableId<'a, I, C> { + id: I, + type_info_container: &'a C, +} + +impl<'a, I, C> TestableId<'a, I, C> +where + I: BlazeMapId, + C: TypeInfoContainer, +{ + /// Creates a new instance of [`TestableId`]. + /// + /// # Safety + /// Mustn't be used outside of loom tests, + /// since there is no guarantee that one [`BlazeMapId`] + /// doesn't interact with different containers of the same type. + #[inline] + pub fn new(id: I, type_info_container: &'a C) -> Self { + Self { + id, + type_info_container, + } + } + + /// Creates an iterator over all identifiers registered. + #[inline] + #[must_use] + pub fn all_instances_iter(&self) -> AllInstancesIter { + use crate::traits::CapacityInfoProvider; + let num_elems = self + .type_info_container + .capacity_info_provider() + .offset_capacity(); + AllInstancesIter { + range: 0..num_elems, + phantom: PhantomData, + } + } +} + +impl<'a, I, C> PartialEq for TestableId<'a, I, C> +where + I: BlazeMapId + PartialEq, + C: TypeInfoContainer, +{ + #[inline] + fn eq(&self, other: &Self) -> bool { + assert!(std::ptr::eq( + self.type_info_container, + other.type_info_container, + )); + self.id.eq(&other.id) + } +} + +impl<'a, I, C> Eq for TestableId<'a, I, C> +where + I: BlazeMapId + Eq, + C: TypeInfoContainer, +{ +} + +impl<'a, I, C> PartialOrd for TestableId<'a, I, C> +where + I: BlazeMapId + PartialEq, + C: TypeInfoContainer, + C::OrigType: PartialOrd, +{ + #[inline] + fn partial_cmp(&self, other: &Self) -> Option { + use crate::traits::KeyByOffsetProvider; + use std::borrow::Borrow; + assert!(std::ptr::eq( + self.type_info_container, + other.type_info_container, + )); + let guard = self.type_info_container.key_by_offset_provider(); + let (lhs, rhs) = unsafe { + ( + guard.key_by_offset_unchecked(self.id.get_offset()), + guard.key_by_offset_unchecked(other.id.get_offset()), + ) + }; + lhs.borrow().partial_cmp(rhs.borrow()) + } +} + +impl<'a, I, C> Ord for TestableId<'a, I, C> +where + I: BlazeMapId + Eq, + C: TypeInfoContainer, + C::OrigType: Ord, +{ + #[inline] + fn cmp(&self, other: &Self) -> Ordering { + use crate::traits::KeyByOffsetProvider; + use std::borrow::Borrow; + assert!(std::ptr::eq( + self.type_info_container, + other.type_info_container, + )); + let guard = self.type_info_container.key_by_offset_provider(); + let (lhs, rhs) = unsafe { + ( + guard.key_by_offset_unchecked(self.id.get_offset()), + guard.key_by_offset_unchecked(other.id.get_offset()), + ) + }; + lhs.borrow().cmp(rhs.borrow()) + } +} + +#[cfg(feature = "serde")] +impl<'a, I, C> Serialize for TestableId<'a, I, C> +where + I: BlazeMapId, + C: TypeInfoContainer, + C::OrigType: Serialize, +{ + #[inline] + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + use crate::traits::KeyByOffsetProvider; + use ::std::borrow::Borrow; + + unsafe { + self.type_info_container + .key_by_offset_provider() + .key_by_offset_unchecked(self.id.get_offset()) + .borrow() + .serialize(serializer) + } + } +} diff --git a/src/sync.rs b/src/sync.rs new file mode 100644 index 0000000..e37da78 --- /dev/null +++ b/src/sync.rs @@ -0,0 +1,8 @@ +#[cfg(loom)] +pub use loom::sync::{atomic::AtomicUsize, atomic::Ordering, RwLock, RwLockReadGuard}; + +#[cfg(not(loom))] +pub use { + parking_lot::RwLock, + std::sync::atomic::{AtomicUsize, Ordering}, +}; diff --git a/src/traits.rs b/src/traits.rs new file mode 100644 index 0000000..35f4177 --- /dev/null +++ b/src/traits.rs @@ -0,0 +1,164 @@ +use std::borrow::Borrow; +use std::fmt::{Debug, Formatter}; +use std::hash::{Hash, Hasher}; +use std::marker::PhantomData; +use std::ops::{Deref, Range}; + +/// Provides an interface for `blazemap` id types defined by type-generating macros. +pub trait BlazeMapId: Copy { + /// Original key type. + type OrigType: 'static + Clone + Eq + Hash; + /// Type of the container that holds all the information necessary + /// for `Self` to be a [`BlazeMapId`] type. + #[doc(hidden)] + type TypeInfoContainer: TypeInfoContainer; + + /// Returns the offset corresponding to the given identifier. + #[doc(hidden)] + fn get_offset(self) -> usize; + + /// Creates an identifier corresponding to the provided offset. + #[doc(hidden)] + unsafe fn from_offset_unchecked(offset: usize) -> Self; +} + +/// Provides an interface for `blazemap` key-wrapper id types +/// defined by the [`define_key_wrapper`](crate::define_key_wrapper) +/// and [`define_key_wrapper_bounded`](crate::define_key_wrapper_bounded) macros. +pub trait BlazeMapIdWrapper: BlazeMapId { + /// Creates a new instance of [`Self`] based on the [`Self::OrigType`](BlazeMapId::OrigType) instance. + unsafe fn new(type_info_container: &Self::TypeInfoContainer, key: Self::OrigType) -> Self; +} + +/// Provides an interface for statically registered `blazemap` id types. +pub trait BlazeMapIdStatic: BlazeMapId { + /// Creates an iterator over all identifiers registered. + #[inline] + #[must_use] + fn all_instances_iter() -> AllInstancesIter { + let num_elems = Self::static_container() + .capacity_info_provider() + .offset_capacity(); + AllInstancesIter { + range: 0..num_elems, + phantom: PhantomData, + } + } + + /// Returns the static container + /// that holds all the necessary static information for the [`BlazeMapId`] type. + #[doc(hidden)] + fn static_container() -> &'static Self::TypeInfoContainer; +} + +/// Implements an interface for [`BlazeMapId`] key-wrapper static containers. +#[doc(hidden)] +pub trait WrapKey { + /// Creates an instance of [`BlazeMapId`] type that is unique to the given key. + fn wrap_key(&self, key: I::OrigType) -> I; +} + +pub trait TypeInfoContainer: 'static { + /// Original key type. + type OrigType; + + /// Returns the provider of the current total number of registered unique `Self` identifiers. + /// Note that this provider isn't sequentially consistent. + #[doc(hidden)] + fn capacity_info_provider(&self) -> impl Deref; + + /// Returns a provider that may unsafely return + /// the registered key corresponding to the offset specified. + #[doc(hidden)] + fn key_by_offset_provider( + &self, + ) -> impl Deref>; +} + +/// Provides the current total number of registered unique [`BlazeMapId`] identifiers. +/// Note that there is no guarantee of sequential consistency. +#[doc(hidden)] +pub trait CapacityInfoProvider { + /// Returns the current total number of registered unique [`BlazeMapId`] identifiers. + fn offset_capacity(&self) -> usize; +} + +/// May unsafely return the registered key corresponding to the offset specified. +#[doc(hidden)] +pub trait KeyByOffsetProvider { + /// Returns the registered key corresponding to the offset specified. + unsafe fn key_by_offset_unchecked(&self, offset: usize) -> impl Borrow; +} + +/// Iterator over consecutive `blazemap` identifiers. +pub struct AllInstancesIter { + pub(crate) range: Range, + pub(crate) phantom: PhantomData, +} + +impl Clone for AllInstancesIter { + #[inline] + fn clone(&self) -> Self { + Self { + range: self.range.clone(), + phantom: PhantomData, + } + } +} + +impl Debug for AllInstancesIter { + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.range) + } +} + +impl PartialEq for AllInstancesIter { + #[inline] + fn eq(&self, other: &Self) -> bool { + self.range == other.range + } +} + +impl Eq for AllInstancesIter {} + +impl Hash for AllInstancesIter { + #[inline] + fn hash(&self, state: &mut H) { + self.range.hash(state); + } +} + +impl Iterator for AllInstancesIter +where + T: BlazeMapId, +{ + type Item = T; + + #[inline] + fn next(&mut self) -> Option { + let next_offset = self.range.next()?; + Some(unsafe { T::from_offset_unchecked(next_offset) }) + } +} + +impl DoubleEndedIterator for AllInstancesIter +where + T: BlazeMapId, +{ + #[inline] + fn next_back(&mut self) -> Option { + let next_back_offset = self.range.next_back()?; + Some(unsafe { T::from_offset_unchecked(next_back_offset) }) + } +} + +impl ExactSizeIterator for AllInstancesIter +where + T: BlazeMapId, +{ + #[inline] + fn len(&self) -> usize { + self.range.len() + } +} diff --git a/src/type_gen.rs b/src/type_gen.rs new file mode 100644 index 0000000..ffaba9e --- /dev/null +++ b/src/type_gen.rs @@ -0,0 +1,226 @@ +#![allow(clippy::module_name_repetitions)] + +mod key_wrapper; +mod key_wrapper_bounded; +mod plain_id; + +#[cfg(all(test, not(loom)))] +mod tests { + use crate::prelude::BlazeMapId; + use crate::{define_key_wrapper, define_key_wrapper_bounded, define_plain_id}; + + #[cfg(feature = "serde")] + mod serde_compatible { + use crate::traits::BlazeMapId; + use crate::{define_key_wrapper, define_key_wrapper_bounded, define_plain_id}; + + #[test] + fn key_wrapper() { + define_key_wrapper! { + struct BlazeMapKeyExample(String); + Derive(as for Original Type): { + Default, + Debug, + Display, + Ord, + Serialize, + Deserialize + } + } + + let first = BlazeMapKeyExample::new("first".to_string()); + let second = BlazeMapKeyExample::new("second".to_string()); + assert_eq!(first.get_offset(), 0); + assert_eq!(second.get_offset(), 1); + assert_eq!(serde_json::ser::to_string(&first).unwrap(), r#""first""#); + assert_eq!(serde_json::ser::to_string(&second).unwrap(), r#""second""#); + } + + #[test] + fn plain_id() { + define_plain_id! { + struct BlazeMapIdExample; + Derive: { + Ord, + Serialize + } + } + + let first = BlazeMapIdExample::new(); + let second = BlazeMapIdExample::new(); + assert_eq!(first.get_offset(), 0); + assert_eq!(second.get_offset(), 1); + assert_eq!(serde_json::ser::to_string(&first).unwrap(), "0"); + assert_eq!(serde_json::ser::to_string(&second).unwrap(), "1"); + } + + #[test] + fn key_wrapper_bounded() { + define_key_wrapper_bounded! { + struct BlazeMapKeyExample(String); + MAX_CAP = 2; + Derive(as for Original Type): { + Default, + Debug, + Display, + Ord, + Serialize, + Deserialize + } + } + + let first = BlazeMapKeyExample::new("first".to_string()); + let second = BlazeMapKeyExample::new("second".to_string()); + assert_eq!(first.get_offset(), 0); + assert_eq!(second.get_offset(), 1); + assert_eq!(serde_json::ser::to_string(&first).unwrap(), r#""first""#); + assert_eq!(serde_json::ser::to_string(&second).unwrap(), r#""second""#); + } + + #[test] + #[should_panic(expected = "capacity 2 overflow")] + fn key_wrapper_bounded_overflow() { + define_key_wrapper_bounded! { + struct BlazeMapKeyExample(String); + MAX_CAP = 2; + Derive(as for Original Type): { + Default, + Debug, + Display, + Ord, + Serialize, + Deserialize + } + } + + let _first = BlazeMapKeyExample::new("first".to_string()); + let _second = BlazeMapKeyExample::new("second".to_string()); + let _third = BlazeMapKeyExample::new("third".to_string()); + } + } + + #[test] + fn key_wrapper() { + define_key_wrapper! { + struct BlazeMapKeyExample1(usize); + Derive(as for Original Type): { + Default, + Debug, + Display, + Ord + } + } + + define_key_wrapper! { + struct BlazeMapKeyExample2(usize); + Derive(as for Original Type): { + Default, + Debug, + Display, + PartialOrd + } + } + + define_key_wrapper! { + struct BlazeMapKeyExample3(usize); + Derive(as for Original Type): { + Default, + Debug, + Display + }; + Derive(as for usize): { + Ord + } + } + + define_key_wrapper! { + struct BlazeMapKeyExample4(usize); + Derive(as for Original Type): { + Default, + Debug, + Display + }; + Derive(as for usize): { + PartialOrd + } + } + } + + #[test] + fn plain_id() { + define_plain_id! { + struct BlazeMapIdExample1; + Derive: { + Ord + } + } + + define_plain_id! { + struct BlazeMapIdExample2; + Derive: { + PartialOrd + } + } + + let first = BlazeMapIdExample1::new(); + let second = BlazeMapIdExample1::new(); + assert_eq!(first.get_offset(), 0); + assert_eq!(second.get_offset(), 1); + + let first = BlazeMapIdExample2::new(); + let second = BlazeMapIdExample2::new(); + assert_eq!(first.get_offset(), 0); + assert_eq!(second.get_offset(), 1); + } + + #[test] + fn key_wrapper_bounded() { + define_key_wrapper_bounded! { + struct BlazeMapKeyExample1(usize); + MAX_CAP = 2; + Derive(as for Original Type): { + Default, + Debug, + Display, + Ord + } + } + + define_key_wrapper_bounded! { + struct BlazeMapKeyExample2(usize); + MAX_CAP = 2; + Derive(as for Original Type): { + Default, + Debug, + Display, + PartialOrd + } + } + + define_key_wrapper_bounded! { + struct BlazeMapKeyExample3(usize); + MAX_CAP = 2; + Derive(as for Original Type): { + Default, + Debug, + Display + }; + Derive(as for usize): { + Ord + } + } + + define_key_wrapper_bounded! { + struct BlazeMapKeyExample4(usize); + MAX_CAP = 2; + Derive(as for Original Type): { + Default, + Debug, + Display + }; + Derive(as for usize): { + PartialOrd + } + } + } +} diff --git a/src/type_gen/key_wrapper.rs b/src/type_gen/key_wrapper.rs new file mode 100644 index 0000000..f5cfa33 --- /dev/null +++ b/src/type_gen/key_wrapper.rs @@ -0,0 +1,324 @@ +/// Creates a new type that acts as an `usize`-based replacement for the old type +/// that can be used as a key for `blazemap` collections. +/// +/// This macro supports optional inference of standard traits using the following syntax: +/// +/// * `Derive(as for Original Type)` — derives traits as for the original type +/// for which `blazemap_key` is being registered. Each call to methods on these traits +/// requires an additional `.read` call on the internal synchronization primitive, +/// so — all other things being equal — their calls may be less optimal +/// than the corresponding calls on instances of the original key's type. +/// This method supports inference of the following traits: +/// * `Default` +/// * `PartialOrd` (mutually exclusive with `Ord`) +/// * `Ord` (also derives `PartialOrd`, so mutually exclusive with `PartialOrd`) +/// * `Debug` +/// * `Display` +/// * `Serialize` (with `serde` feature only) +/// * `Deserialize` (with `serde` feature only) +/// * `Derive(as for usize)` — derives traits in the same way as for +/// the serial number assigned when registering an instance of the original type +/// the first time [`BlazeMapIdWrapper::new`](crate::prelude::BlazeMapIdWrapper::new) was called. +/// Because methods inferred by this option do not require additional +/// locking on synchronization primitives, +/// they do not incur any additional overhead compared to methods inferred for plain `usize`. +/// This method supports inference of the following traits: +/// * `PartialOrd` (mutually exclusive with `Ord`) +/// * `Ord` (also derives `PartialOrd`, so mutually exclusive with `PartialOrd`) +/// +/// # Example +/// +/// ```rust +/// use blazemap::prelude::{BlazeMap, define_key_wrapper}; +/// +/// define_key_wrapper! { +/// pub struct Key(&'static str); +/// Derive(as for Original Type): { // Optional section +/// Debug, +/// Display, +/// }; +/// Derive(as for usize): { // Optional section +/// Ord, +/// } +/// } +/// +/// let key_1 = Key::new("first"); +/// let key_2 = Key::new("second"); +/// let key_3 = Key::new("third"); +/// +/// let mut map = BlazeMap::new(); +/// map.insert(key_2, "2"); +/// map.insert(key_1, "1"); +/// map.insert(key_3, "3"); +/// +/// assert_eq!(format!("{map:?}"), r#"{"first": "1", "second": "2", "third": "3"}"#) +/// ``` +#[macro_export] +macro_rules! define_key_wrapper { + ( + $(#[$attrs:meta])* + $vis:vis + struct $new_type:ident($orig_type:ty) + $(; Derive(as for Original Type): {$($to_derive_orig:ident),+ $(,)?} )? + $(; Derive(as for usize): {$( $to_derive_sn:ident),+ $(,)?} )? + $(;)? + ) => { + $crate::key_wrapper_inner! { + $(#[$attrs])* + $vis + struct $new_type($orig_type) + } + $($($crate::key_wrapper_derive! {@DERIVE $to_derive_orig $new_type})*)? + $($($crate::assigned_offset_derive! {@DERIVE $to_derive_sn $new_type})*)? + }; + ( + $(#[$attrs:meta])* + $vis:vis + struct $new_type:ident($orig_type:ty) + $(; Derive(as for usize): {$( $to_derive_sn:ident),+ $(,)?} )? + $(; Derive(as for Original Type): {$($to_derive_orig:ident),+ $(,)?} )? + $(;)? + ) => { + $crate::key_wrapper_inner! { + $(#[$attrs])* + $vis + struct $new_type($orig_type) + } + $($($crate::key_wrapper_derive! {@DERIVE $to_derive_orig $new_type})*)? + $($($crate::assigned_offset_derive! {@DERIVE $to_derive_sn $new_type})*)? + } +} + +#[doc(hidden)] +#[macro_export] +macro_rules! key_wrapper_inner { + ( + $(#[$attrs:meta])* + $vis:vis + struct $new_type:ident($orig_type:ty) + ) => { + $(#[$attrs])* + #[derive(Clone, Copy, Eq, PartialEq, Hash)] + #[repr(transparent)] + $vis struct $new_type($crate::utils::OffsetProvider); + + #[cfg(not(loom))] + impl $new_type + { + #[inline] + $vis fn new(value: $orig_type) -> Self { + use $crate::traits::BlazeMapIdStatic; + unsafe { ::new(Self::static_container(), value) } + } + } + + impl $crate::prelude::BlazeMapId for $new_type + { + type OrigType = $orig_type; + type TypeInfoContainer = $crate::sync::RwLock<$crate::type_info_containers::key_wrapper::StaticContainer<$orig_type>>; + + #[inline] + fn get_offset(self) -> usize { + self.0.into_offset() + } + + #[inline] + unsafe fn from_offset_unchecked(offset: usize) -> Self { + Self($crate::utils::OffsetProvider::::new(offset)) + } + } + + #[cfg(not(loom))] + impl $crate::traits::BlazeMapIdStatic for $new_type + { + #[inline] + fn static_container() -> &'static Self::TypeInfoContainer + { + use $crate::sync::RwLock; + use $crate::type_info_containers::key_wrapper::StaticContainer; + + static MAP: RwLock> = RwLock::new(StaticContainer::new()); + &MAP + } + } + + impl $crate::prelude::BlazeMapIdWrapper for $new_type + { + #[inline] + unsafe fn new(type_info_container: &Self::TypeInfoContainer, key: $orig_type) -> Self { + use $crate::traits::WrapKey; + type_info_container.wrap_key(key) + } + } + } +} + +#[doc(hidden)] +#[macro_export] +macro_rules! key_wrapper_derive { + (@DERIVE Default $new_type:ident) => { + impl Default for $new_type { + #[inline] + fn default() -> Self { + Self::new(Default::default()) + } + } + }; + (@DERIVE PartialOrd $new_type:ident) => { + impl PartialOrd for $new_type { + #[inline] + fn partial_cmp(&self, other: &Self) -> Option<::std::cmp::Ordering> { + use ::std::borrow::Borrow; + use $crate::traits::KeyByOffsetProvider; + use $crate::traits::TypeInfoContainer; + let Self(lhs) = self; + let Self(rhs) = other; + let guard = ::static_container() + .key_by_offset_provider(); + let (lhs, rhs) = unsafe { + ( + guard.key_by_offset_unchecked(lhs.into_offset()), + guard.key_by_offset_unchecked(rhs.into_offset()), + ) + }; + lhs.borrow().partial_cmp(rhs.borrow()) + } + } + }; + (@DERIVE Ord $new_type:ident) => { + impl PartialOrd for $new_type { + #[inline] + fn partial_cmp(&self, other: &Self) -> Option<::std::cmp::Ordering> { + Some(self.cmp(other)) + } + } + + impl Ord for $new_type { + #[inline] + fn cmp(&self, other: &Self) -> ::std::cmp::Ordering { + use ::std::borrow::Borrow; + use $crate::traits::KeyByOffsetProvider; + use $crate::traits::TypeInfoContainer; + + let Self(lhs) = self; + let Self(rhs) = other; + let guard = ::static_container() + .key_by_offset_provider(); + let (lhs, rhs) = unsafe { + ( + guard.key_by_offset_unchecked(lhs.into_offset()), + guard.key_by_offset_unchecked(rhs.into_offset()), + ) + }; + lhs.borrow().cmp(rhs.borrow()) + } + } + }; + (@DERIVE Debug $new_type:ident) => { + impl ::std::fmt::Debug for $new_type { + #[inline] + fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { + use ::std::borrow::Borrow; + use $crate::traits::KeyByOffsetProvider; + use $crate::traits::TypeInfoContainer; + + let mut f = f.debug_struct(::std::stringify!($new_type)); + let offset = self.0.into_offset(); + let guard = ::static_container() + .key_by_offset_provider(); + let original_key = unsafe { guard.key_by_offset_unchecked(offset) }; + f.field("original_key", original_key.borrow()); + drop(original_key); + drop(guard); + f.field("offset", &offset).finish() + } + } + }; + (@DERIVE Display $new_type:ident) => { + impl ::std::fmt::Display for $new_type { + #[inline] + fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { + use ::std::borrow::Borrow; + use $crate::traits::KeyByOffsetProvider; + use $crate::traits::TypeInfoContainer; + + let guard = ::static_container() + .key_by_offset_provider(); + let original_key = unsafe { guard.key_by_offset_unchecked(self.0.into_offset()) }; + write!(f, "{}", original_key.borrow()) + } + } + }; + (@DERIVE Deserialize $new_type:ident) => { + impl<'de> $crate::external::serde::Deserialize<'de> for $new_type { + #[inline] + fn deserialize(deserializer: D) -> Result + where + D: $crate::external::serde::Deserializer<'de>, + { + use $crate::traits::BlazeMapIdStatic; + let original_key: ::OrigType = + $crate::external::serde::Deserialize::deserialize(deserializer)?; + Ok(unsafe { + ::new( + Self::static_container(), + original_key, + ) + }) + } + } + }; + (@DERIVE Serialize $new_type:ident) => { + impl $crate::external::serde::Serialize for $new_type { + #[inline] + fn serialize(&self, serializer: S) -> Result + where + S: $crate::external::serde::Serializer, + { + use ::std::borrow::Borrow; + use $crate::traits::KeyByOffsetProvider; + use $crate::traits::TypeInfoContainer; + + unsafe { + ::static_container() + .key_by_offset_provider() + .key_by_offset_unchecked(self.0.into_offset()) + .borrow() + .serialize(serializer) + } + } + } + }; +} + +#[doc(hidden)] +#[macro_export] +macro_rules! assigned_offset_derive { + (@DERIVE PartialOrd $new_type:ident) => { + impl PartialOrd for $new_type { + #[inline] + fn partial_cmp(&self, other: &Self) -> Option<::std::cmp::Ordering> { + let Self(lhs) = self; + let Self(rhs) = other; + lhs.into_offset().partial_cmp(&rhs.into_offset()) + } + } + }; + (@DERIVE Ord $new_type:ident) => { + impl PartialOrd for $new_type { + #[inline] + fn partial_cmp(&self, other: &Self) -> Option<::std::cmp::Ordering> { + Some(self.cmp(other)) + } + } + + impl Ord for $new_type { + #[inline] + fn cmp(&self, other: &Self) -> ::std::cmp::Ordering { + let Self(lhs) = self; + let Self(rhs) = other; + lhs.into_offset().cmp(&rhs.into_offset()) + } + } + }; +} diff --git a/src/type_gen/key_wrapper_bounded.rs b/src/type_gen/key_wrapper_bounded.rs new file mode 100644 index 0000000..05a8111 --- /dev/null +++ b/src/type_gen/key_wrapper_bounded.rs @@ -0,0 +1,158 @@ +/// Creates a new type that acts as an `usize`-based replacement for the old type +/// that can be used as a key for `blazemap` collections. +/// Being an analogue of [`define_key_wrapper`](crate::define_key_wrapper) +/// for the case when the user could statically guarantee +/// that the number of unique keys doesn't exceed `MAX_CAP`, it's optimized for read operations +/// so that they don't create any multi-thread contention. +/// +/// This macro supports optional inference of standard traits using the following syntax: +/// +/// * `Derive(as for Original Type)` — derives traits as for the original type +/// for which `blazemap_key` is being registered. +/// This method supports inference of the following traits: +/// * `Default` +/// * `PartialOrd` (mutually exclusive with `Ord`) +/// * `Ord` (also derives `PartialOrd`, so mutually exclusive with `PartialOrd`) +/// * `Debug` +/// * `Display` +/// * `Serialize` (with `serde` feature only) +/// * `Deserialize` (with `serde` feature only) +/// * `Derive(as for usize)` — derives traits in the same way as for +/// the serial number assigned when registering an instance of the original type +/// the first time [`BlazeMapIdWrapper::new`](crate::prelude::BlazeMapIdWrapper::new) was called. +/// Methods inferred by this option do not incur any additional overhead +/// compared to methods inferred for plain `usize`. +/// This method supports inference of the following traits: +/// * `PartialOrd` (mutually exclusive with `Ord`) +/// * `Ord` (also derives `PartialOrd`, so mutually exclusive with `PartialOrd`) +/// +/// # Example +/// +/// ```rust +/// use blazemap::prelude::{BlazeMap, define_key_wrapper_bounded}; +/// +/// define_key_wrapper_bounded! { +/// pub struct Key(&'static str); +/// MAX_CAP = 40_000; +/// Derive(as for Original Type): { // Optional section +/// Debug, +/// Display, +/// }; +/// Derive(as for usize): { // Optional section +/// Ord, +/// } +/// } +/// +/// let key_1 = Key::new("first"); +/// let key_2 = Key::new("second"); +/// let key_3 = Key::new("third"); +/// +/// let mut map = BlazeMap::new(); +/// map.insert(key_2, "2"); +/// map.insert(key_1, "1"); +/// map.insert(key_3, "3"); +/// +/// assert_eq!(format!("{map:?}"), r#"{"first": "1", "second": "2", "third": "3"}"#) +/// ``` +#[macro_export] +macro_rules! define_key_wrapper_bounded { + ( + $(#[$attrs:meta])* + $vis:vis + struct $new_type:ident($orig_type:ty); + MAX_CAP = $capacity:literal + $(; Derive(as for Original Type): {$($to_derive_orig:ident),+ $(,)?} )? + $(; Derive(as for usize): {$( $to_derive_sn:ident),+ $(,)?} )? + $(;)? + ) => { + $crate::key_wrapper_bounded_inner! { + $(#[$attrs])* + $vis + struct $new_type($orig_type); + MAX_CAP = $capacity + } + $($($crate::key_wrapper_derive! {@DERIVE $to_derive_orig $new_type})*)? + $($($crate::assigned_offset_derive! {@DERIVE $to_derive_sn $new_type})*)? + }; + ( + $(#[$attrs:meta])* + $vis:vis + struct $new_type:ident($orig_type:ty) + $(; Derive(as for usize): {$( $to_derive_sn:ident),+ $(,)?} )? + $(; Derive(as for Original Type): {$($to_derive_orig:ident),+ $(,)?} )? + $(;)? + ) => { + $crate::key_wrapper_bounded_inner! { + $(#[$attrs])* + $vis + struct $new_type($orig_type); + MAX_CAP = $capacity + } + $($($crate::key_wrapper_derive! {@DERIVE $to_derive_orig $new_type})*)? + $($($crate::assigned_offset_derive! {@DERIVE $to_derive_sn $new_type})*)? + } +} + +#[doc(hidden)] +#[macro_export] +macro_rules! key_wrapper_bounded_inner { + ( + $(#[$attrs:meta])* + $vis:vis + struct $new_type:ident($orig_type:ty); + MAX_CAP = $capacity:literal + ) => { + $(#[$attrs])* + #[derive(Clone, Copy, Eq, PartialEq, Hash)] + #[repr(transparent)] + $vis struct $new_type($crate::utils::OffsetProvider); + + #[cfg(not(loom))] + impl $new_type + { + #[inline] + $vis fn new(value: $orig_type) -> Self { + use $crate::traits::BlazeMapIdStatic; + unsafe { ::new(Self::static_container(), value) } + } + } + + impl $crate::prelude::BlazeMapId for $new_type + { + type OrigType = $orig_type; + type TypeInfoContainer = $crate::type_info_containers::key_wrapper_bounded::StaticContainer<$orig_type, $capacity>; + + #[inline] + fn get_offset(self) -> usize { + self.0.into_offset() + } + + #[inline] + unsafe fn from_offset_unchecked(offset: usize) -> Self { + Self($crate::utils::OffsetProvider::::new(offset)) + } + } + + #[cfg(not(loom))] + impl $crate::traits::BlazeMapIdStatic for $new_type + { + #[inline] + fn static_container() -> &'static Self::TypeInfoContainer + { + use $crate::type_info_containers::key_wrapper_bounded::StaticContainer; + use $crate::external::once_cell::sync::Lazy; + static MAP: Lazy> = Lazy::new(StaticContainer::new); + &MAP + } + } + + impl $crate::prelude::BlazeMapIdWrapper for $new_type + { + #[inline] + unsafe fn new(type_info_container: &Self::TypeInfoContainer, key: $orig_type) -> Self { + use $crate::traits::WrapKey; + type_info_container.wrap_key(key) + } + } + } +} diff --git a/src/type_gen/plain_id.rs b/src/type_gen/plain_id.rs new file mode 100644 index 0000000..77b7f35 --- /dev/null +++ b/src/type_gen/plain_id.rs @@ -0,0 +1,178 @@ +/// Creates a new type based on incrementally generated `usize` instances +/// that can be used as a key for `blazemap` collections. +/// +/// This macro supports optional inference of standard traits using the following syntax: +/// +/// * `Derive` — derives traits in the same way as for +/// the serial number assigned when creating a new instance of the type. +/// Because methods inferred by this option do not require additional +/// locking on synchronization primitives, +/// they do not incur any additional overhead compared to methods inferred for plain `usize`. +/// This method supports inference of the following traits: +/// * `PartialOrd` (mutually exclusive with `Ord`) +/// * `Ord` (also derives `PartialOrd`, so mutually exclusive with `PartialOrd`) +/// * `Serialize` (with `serde` feature only) +/// +/// # Example +/// +/// ```rust +/// use blazemap::prelude::{BlazeMap, define_plain_id}; +/// +/// define_plain_id! { +/// pub struct Id; +/// Derive: { // Derive section is optional +/// Ord +/// }; +/// } +/// +/// let key_1 = Id::new(); +/// let key_2 = Id::new(); +/// let key_3 = Id::new(); +/// +/// let mut map = BlazeMap::new(); +/// map.insert(key_2, "2"); +/// map.insert(key_1, "1"); +/// map.insert(key_3, "3"); +/// +/// assert_eq!(format!("{map:?}"), r#"{0: "1", 1: "2", 2: "3"}"#) +/// ``` +#[macro_export] +macro_rules! define_plain_id { + ( + $(#[$attrs:meta])* + $vis:vis + struct $new_type:ident + $(; Derive: {$($to_derive_sn:ident),+ $(,)?} )? + $(;)? + ) => { + $crate::plain_id_inner! { + $(#[$attrs])* + $vis + struct $new_type + } + $($($crate::plain_id_derive! {@DERIVE $to_derive_sn $new_type})*)? + }; +} + +#[doc(hidden)] +#[macro_export] +macro_rules! plain_id_inner { + ( + $(#[$attrs:meta])* + $vis:vis + struct $new_type:ident + ) => { + $(#[$attrs])* + #[derive(Clone, Copy, Eq, PartialEq, Hash)] + #[repr(transparent)] + $vis struct $new_type($crate::utils::OffsetProvider); + + impl $new_type + { + #[inline] + #[cfg(not(loom))] + $vis fn new() -> Self { + let next_id = ::static_container().next_id(); + Self(unsafe { $crate::utils::OffsetProvider::::new(next_id) }) + } + + #[inline] + #[cfg(loom)] + $vis fn new(type_info_container: &::TypeInfoContainer) -> Self { + let next_id = type_info_container.next_id(); + Self(unsafe { $crate::utils::OffsetProvider::::new(next_id) }) + } + } + + impl $crate::prelude::BlazeMapId for $new_type + { + type OrigType = usize; + type TypeInfoContainer = $crate::type_info_containers::plain_id::StaticContainer; + + #[inline] + fn get_offset(self) -> usize { + self.0.into_offset() + } + + #[inline] + unsafe fn from_offset_unchecked(offset: usize) -> Self { + Self($crate::utils::OffsetProvider::::new(offset)) + } + } + + #[cfg(not(loom))] + impl $crate::traits::BlazeMapIdStatic for $new_type + { + #[inline] + fn static_container() -> &'static Self::TypeInfoContainer + { + use $crate::type_info_containers::plain_id::StaticContainer; + static INFO: StaticContainer = StaticContainer::new(); + &INFO + } + } + + impl ::std::fmt::Debug for $new_type + { + #[inline] + fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result + { + f.debug_tuple(::std::stringify!($new_type)) + .field(&self.0.into_offset()) + .finish() + } + } + + impl ::std::fmt::Display for $new_type + { + #[inline] + fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result + { + write!(f, "{}", self.0.into_offset()) + } + } + } +} + +#[doc(hidden)] +#[macro_export] +macro_rules! plain_id_derive { + (@DERIVE PartialOrd $new_type:ident) => { + impl PartialOrd for $new_type { + #[inline] + fn partial_cmp(&self, other: &Self) -> Option<::std::cmp::Ordering> { + let Self(lhs) = self; + let Self(rhs) = other; + lhs.into_offset().partial_cmp(&rhs.into_offset()) + } + } + }; + (@DERIVE Ord $new_type:ident) => { + impl PartialOrd for $new_type { + #[inline] + fn partial_cmp(&self, other: &Self) -> Option<::std::cmp::Ordering> { + Some(self.cmp(other)) + } + } + + impl Ord for $new_type { + #[inline] + fn cmp(&self, other: &Self) -> ::std::cmp::Ordering { + let Self(lhs) = self; + let Self(rhs) = other; + lhs.into_offset().cmp(&rhs.into_offset()) + } + } + }; + (@DERIVE Serialize $new_type:ident) => { + impl $crate::external::serde::Serialize for $new_type { + #[inline] + fn serialize(&self, serializer: S) -> Result + where + S: $crate::external::serde::Serializer, + { + self.0.into_offset().serialize(serializer) + } + } + }; +} diff --git a/src/type_info_containers.rs b/src/type_info_containers.rs new file mode 100644 index 0000000..f66fd1e --- /dev/null +++ b/src/type_info_containers.rs @@ -0,0 +1,3 @@ +pub mod key_wrapper; +pub mod key_wrapper_bounded; +pub mod plain_id; diff --git a/src/type_info_containers/key_wrapper.rs b/src/type_info_containers/key_wrapper.rs new file mode 100644 index 0000000..c87794e --- /dev/null +++ b/src/type_info_containers/key_wrapper.rs @@ -0,0 +1,144 @@ +use std::borrow::Borrow; +use std::collections::hash_map::Entry; +use std::collections::HashMap; +use std::hash::Hash; +use std::ops::Deref; + +#[cfg(not(loom))] +use once_cell::sync::Lazy; + +use crate::prelude::BlazeMapId; +use crate::sync::RwLock; +use crate::traits::{CapacityInfoProvider, KeyByOffsetProvider, TypeInfoContainer, WrapKey}; + +/// Global, statically initialized container with correspondence mapping +/// between blazemap offset wrappers and original keys. +#[cfg(not(loom))] +#[doc(hidden)] +#[derive(Debug)] +pub struct StaticContainer { + offset_to_orig: Vec, + orig_to_offset: Lazy>, +} + +/// Loom-testable version of the above container. +/// Note that it cannot be actually static +/// due to the [`loom` inability](https://github.com/tokio-rs/loom/issues/290) +/// to test statically initialized code. +#[cfg(loom)] +#[doc(hidden)] +#[derive(Debug)] +pub struct StaticContainer { + offset_to_orig: Vec, + orig_to_offset: HashMap, +} + +impl StaticContainer { + /// Creates a new instance of [`StaticContainer`]. + #[inline] + #[must_use] + #[cfg(not(loom))] + pub const fn new() -> Self { + Self { + offset_to_orig: vec![], + orig_to_offset: Lazy::new(Default::default), + } + } + + /// Creates a new instance of [`StaticContainer`]. + /// + /// # Safety + /// Mustn't be used outside of loom tests, + /// since there is no guarantee that one [`BlazeMapId`](crate::prelude::BlazeMapId) + /// doesn't interact with different containers of the same type. + #[inline] + #[must_use] + #[cfg(loom)] + pub fn new() -> Self { + Self { + offset_to_orig: vec![], + orig_to_offset: HashMap::new(), + } + } +} + +impl WrapKey for RwLock> +where + K: Clone + Eq + Hash, + I: BlazeMapId, +{ + #[inline] + fn wrap_key(&self, key: K) -> I { + #[cfg(not(loom))] + let offset = self.read().orig_to_offset.get(&key).copied(); + #[cfg(loom)] + let offset = self.read().unwrap().orig_to_offset.get(&key).copied(); + unsafe { + if let Some(offset) = offset { + I::from_offset_unchecked(offset) + } else { + #[cfg(not(loom))] + let mut guard = self.write(); + #[cfg(loom)] + let mut guard = self.write().unwrap(); + let container = &mut *guard; + let offset = match container.orig_to_offset.entry(key) { + Entry::Vacant(entry) => { + let offset = container.offset_to_orig.len(); + container.offset_to_orig.push(entry.key().clone()); + entry.insert(offset); + offset + } + Entry::Occupied(entry) => *entry.get(), + }; + drop(guard); + I::from_offset_unchecked(offset) + } + } + } +} + +impl TypeInfoContainer for RwLock> +where + K: 'static, +{ + type OrigType = K; + + #[inline] + fn capacity_info_provider(&self) -> impl Deref { + #[cfg(not(loom))] + let result = self.read(); + #[cfg(loom)] + let result = self.read().unwrap(); + result + } + + #[inline] + fn key_by_offset_provider( + &self, + ) -> impl Deref> { + #[cfg(not(loom))] + let result = self.read(); + #[cfg(loom)] + let result = self.read().unwrap(); + result + } +} + +impl CapacityInfoProvider for StaticContainer { + #[inline] + fn offset_capacity(&self) -> usize { + self.offset_to_orig.len() + } +} + +impl KeyByOffsetProvider for StaticContainer { + #[inline] + unsafe fn key_by_offset_unchecked(&self, offset: usize) -> impl Borrow { + #[cfg(not(loom))] + let result = self.offset_to_orig.get_unchecked(offset); + #[cfg(loom)] + let result = self.offset_to_orig.get(offset).unwrap(); + result + } +} diff --git a/src/type_info_containers/key_wrapper_bounded.rs b/src/type_info_containers/key_wrapper_bounded.rs new file mode 100644 index 0000000..fd31878 --- /dev/null +++ b/src/type_info_containers/key_wrapper_bounded.rs @@ -0,0 +1,211 @@ +use std::collections::hash_map::Entry; +use std::collections::HashMap; +use std::hash::Hash; + +use std::borrow::Borrow; +use std::ops::Deref; +#[cfg(not(loom))] +use std::{ + cell::UnsafeCell, + mem::{needs_drop, MaybeUninit}, +}; + +use crate::sync::{AtomicUsize, Ordering, RwLock}; + +use crate::prelude::BlazeMapId; +use crate::traits::{CapacityInfoProvider, KeyByOffsetProvider, TypeInfoContainer, WrapKey}; + +#[cfg(loom)] +use crate::sync::RwLockReadGuard; + +/// Global, statically initialized container with correspondence mapping +/// between blazemap index wrappers and original keys. +/// +/// Being an analogue of [`KeyWrapperStaticContainer`](crate::type_info_containers::key_wrapper::StaticContainer) +/// for the case when the user could statically guarantee +/// that the number of unique keys doesn't exceed `CAP`, it's optimized for read operations +/// so that they don't create any multi-thread contention. +#[cfg(not(loom))] +#[doc(hidden)] +#[derive(Debug)] +pub struct StaticContainer { + offset_to_orig: Vec>>, + orig_to_offset: RwLock>, + next_offset: AtomicUsize, +} + +/// Loom-testable version of the above container. +/// Note that it cannot be actually static +/// due to the [`loom` inability](https://github.com/tokio-rs/loom/issues/290) +/// to test statically initialized code. +#[cfg(loom)] +#[doc(hidden)] +#[derive(Debug)] +pub struct StaticContainer { + offset_to_orig: Vec>>, + orig_to_offset: RwLock>, + next_offset: AtomicUsize, +} + +#[cfg(not(loom))] +impl Default for StaticContainer { + #[inline] + fn default() -> Self { + Self { + offset_to_orig: std::iter::repeat_with(|| UnsafeCell::new(MaybeUninit::uninit())) + .take(CAP) + .collect(), + orig_to_offset: RwLock::new(HashMap::with_capacity(CAP)), + next_offset: AtomicUsize::new(0), + } + } +} + +impl StaticContainer { + /// Creates a new instance of [`StaticContainer`]. + #[inline] + #[must_use] + #[cfg(not(loom))] + pub fn new() -> Self { + Self::default() + } + + /// Creates a new instance of [`StaticContainer`]. + /// + /// # Safety + /// Mustn't be used outside of loom tests, + /// since there is no guarantee that one [`BlazeMapId`](crate::prelude::BlazeMapId) + /// doesn't interact with different containers of the same type. + #[inline] + #[must_use] + #[cfg(loom)] + pub fn new() -> Self { + Self { + offset_to_orig: std::iter::repeat_with(|| RwLock::new(None)) + .take(CAP) + .collect(), + orig_to_offset: RwLock::new(HashMap::with_capacity(CAP)), + next_offset: AtomicUsize::new(0), + } + } +} + +impl WrapKey for StaticContainer +where + K: Clone + Eq + Hash, + I: BlazeMapId, +{ + #[inline] + fn wrap_key(&self, key: K) -> I { + #[cfg(not(loom))] + let offset = self.orig_to_offset.read().get(&key).copied(); + #[cfg(loom)] + let offset = self.orig_to_offset.read().unwrap().get(&key).copied(); + unsafe { + if let Some(offset) = offset { + I::from_offset_unchecked(offset) + } else { + #[cfg(not(loom))] + let mut guard = self.orig_to_offset.write(); + #[cfg(loom)] + let mut guard = self.orig_to_offset.write().unwrap(); + let offset = match guard.entry(key) { + Entry::Vacant(entry) => { + let offset = self.next_offset.load(Ordering::Relaxed); + let cell = self + .offset_to_orig + .get(offset) + .unwrap_or_else(|| panic!("capacity {CAP} overflow")); + #[cfg(not(loom))] + (*cell.get()).write(entry.key().clone()); + #[cfg(loom)] + { + let mut guard = cell.try_write().unwrap(); + let value = &mut *guard; + if value.is_some() { + panic!("value is already set") + } + *value = Some(entry.key().clone()); + } + entry.insert(offset); + self.next_offset.store(offset + 1, Ordering::Release); + offset + } + Entry::Occupied(entry) => *entry.get(), + }; + drop(guard); + I::from_offset_unchecked(offset) + } + } + } +} + +impl Drop for StaticContainer { + #[inline] + fn drop(&mut self) { + #[cfg(not(loom))] + if !needs_drop::() { + return; + } + #[cfg(not(loom))] + let num_init = *self.next_offset.get_mut(); + #[cfg(loom)] + let num_init = self.next_offset.load(Ordering::Acquire); + self.offset_to_orig.as_mut_slice()[..num_init] + .iter_mut() + .for_each(|cell| { + #[cfg(not(loom))] + unsafe { + cell.get_mut().assume_init_drop(); + }; + #[cfg(loom)] + let _ = cell.try_write().unwrap().take(); + }); + } +} + +unsafe impl Sync for StaticContainer {} + +impl TypeInfoContainer for StaticContainer { + type OrigType = K; + + #[inline] + fn capacity_info_provider(&self) -> impl Deref { + self + } + + #[inline] + fn key_by_offset_provider( + &self, + ) -> impl Deref> { + self + } +} + +impl CapacityInfoProvider for StaticContainer { + #[inline] + fn offset_capacity(&self) -> usize { + self.next_offset.load(Ordering::Acquire) + } +} + +#[cfg(loom)] +struct BorrowGuard<'a, K>(RwLockReadGuard<'a, Option>); + +#[cfg(loom)] +impl Borrow for BorrowGuard<'_, K> { + fn borrow(&self) -> &K { + self.0.as_ref().unwrap() + } +} + +impl KeyByOffsetProvider for StaticContainer { + #[inline] + unsafe fn key_by_offset_unchecked(&self, offset: usize) -> impl Borrow { + #[cfg(not(loom))] + let result = (*self.offset_to_orig.get_unchecked(offset).get()).assume_init_ref(); + #[cfg(loom)] + let result = BorrowGuard(self.offset_to_orig.get(offset).unwrap().read().unwrap()); + result + } +} diff --git a/src/type_info_containers/plain_id.rs b/src/type_info_containers/plain_id.rs new file mode 100644 index 0000000..4021e76 --- /dev/null +++ b/src/type_info_containers/plain_id.rs @@ -0,0 +1,85 @@ +use crate::sync::{AtomicUsize, Ordering}; +use std::borrow::Borrow; +use std::ops::Deref; + +use crate::traits::{CapacityInfoProvider, KeyByOffsetProvider, TypeInfoContainer}; + +/// Global, statically initialized offset generator. +#[doc(hidden)] +#[derive(Debug)] +pub struct StaticContainer { + next_offset: AtomicUsize, +} + +impl StaticContainer { + /// Creates a new instance of [`StaticContainer`]. + #[inline] + #[must_use] + #[cfg(not(loom))] + pub const fn new() -> Self { + Self { + next_offset: AtomicUsize::new(0), + } + } + + /// Creates a new instance of [`StaticContainer`]. + /// + /// # Safety + /// Mustn't be used outside of loom tests, + /// since there is no guarantee that one [`BlazeMapId`](crate::prelude::BlazeMapId) + /// doesn't interact with different containers of the same type. + #[inline] + #[must_use] + #[cfg(loom)] + pub fn new() -> Self { + Self { + next_offset: AtomicUsize::new(0), + } + } + + /// Returns the next identifier. + #[inline] + pub fn next_id(&self) -> usize { + self.next_offset + .fetch_update(Ordering::Release, Ordering::Acquire, |next_id| { + next_id.checked_add(1) + }) + .expect("usize overflow") + } +} + +impl TypeInfoContainer for StaticContainer { + type OrigType = usize; + + #[inline] + fn capacity_info_provider(&self) -> impl Deref { + self + } + + #[inline] + fn key_by_offset_provider( + &self, + ) -> impl Deref> { + &KeyByOffsetProviderTrivial + } +} + +impl CapacityInfoProvider for StaticContainer { + #[inline] + fn offset_capacity(&self) -> usize { + self.next_offset.load(Ordering::Acquire) + } +} + +/// Zero-sized type that trivially implements [`KeyByOffsetProvider`]. +#[doc(hidden)] +#[repr(transparent)] +#[derive(Debug)] +pub struct KeyByOffsetProviderTrivial; + +impl KeyByOffsetProvider for KeyByOffsetProviderTrivial { + #[inline] + unsafe fn key_by_offset_unchecked(&self, offset: usize) -> impl Borrow { + offset + } +} diff --git a/src/utils.rs b/src/utils.rs new file mode 100644 index 0000000..a14bfba --- /dev/null +++ b/src/utils.rs @@ -0,0 +1,2 @@ +pub use offset_provider::OffsetProvider; +mod offset_provider; diff --git a/src/utils/offset_provider.rs b/src/utils/offset_provider.rs new file mode 100644 index 0000000..3e6acd5 --- /dev/null +++ b/src/utils/offset_provider.rs @@ -0,0 +1,70 @@ +use std::hash::Hash; +use std::num::NonZeroUsize; + +/// Holds and provides the `usize` offset. +/// +/// Necessary to protect the internal `usize`, which, in the absence of this wrapper, +/// would be public in the module calling +/// the [`define_key_wrapper`](crate::define_key_wrapper). +/// +/// Publicity of the internal `usize` may lead to: +/// * UB if the programmer of the downstream crate would accidentally mutate it. +/// * Incorrect auto-derives of standard traits such as `Default`, `Debug`, `Display`, +/// `PartialOrd`, `Ord`, `serde::Serialize` and `serde::Deserialize`. +#[derive(Clone, Copy, Eq, PartialEq, Hash)] +#[repr(transparent)] +#[doc(hidden)] +#[allow(missing_debug_implementations)] +pub struct OffsetProvider(T); + +impl OffsetProvider { + #[inline] + #[must_use] + pub unsafe fn new(offset: usize) -> Self { + Self(offset) + } + + #[inline] + #[must_use] + pub fn into_offset(self) -> usize { + self.0 + } +} + +impl OffsetProvider { + #[inline] + #[must_use] + pub unsafe fn new(offset: usize) -> Self { + let inner = offset.checked_add(1).expect("usize overflow"); + Self(unsafe { NonZeroUsize::new_unchecked(inner) }) + } + + #[inline] + #[must_use] + pub fn into_offset(self) -> usize { + self.0.get() - 1 + } +} + +#[cfg(test)] +mod tests { + use std::fmt::{Debug, Display}; + use std::num::NonZeroUsize; + + #[cfg(feature = "serde")] + use serde::{Deserialize, Serialize}; + use static_assertions::assert_not_impl_any; + + use crate::utils::offset_provider::OffsetProvider; + + // These assertions are needed in order to prevent standard traits + // from being automatically derived for types + // generated by the [`define_key_wrapper`](crate::define_plain_id) macro. + assert_not_impl_any!(OffsetProvider: Default, Debug, Display, PartialOrd); + assert_not_impl_any!(OffsetProvider: Default, Debug, Display, PartialOrd); + + #[cfg(feature = "serde")] + assert_not_impl_any!(OffsetProvider: Serialize, Deserialize<'static>); + #[cfg(feature = "serde")] + assert_not_impl_any!(OffsetProvider: Serialize, Deserialize<'static>); +} diff --git a/tests/action.rs b/tests/action.rs new file mode 100644 index 0000000..9a0c14a --- /dev/null +++ b/tests/action.rs @@ -0,0 +1,646 @@ +#![cfg(feature = "serde")] +#![allow(renamed_and_removed_lints)] +#![allow(illegal_floating_point_literal_pattern)] +#![allow(missing_debug_implementations)] +#![allow(unreachable_pub)] +#![allow(clippy::missing_panics_doc)] +#![allow(clippy::too_many_lines)] +#![allow(clippy::enum_variant_names)] +#![allow(clippy::explicit_write)] +#![allow(clippy::module_name_repetitions)] + +use std::fmt::{Debug, Formatter, Write}; + +use rand::Rng; + +use blazemap::prelude::BlazeMap; +use blazemap::traits::{BlazeMapId, BlazeMapIdStatic}; + +#[derive(Debug)] +pub enum Action { + Clear, + ShrinkToFit, + Iter(Iter), + IterMut(IterMut), + Keys(Iter), + Values(Iter), + ValuesMut(IterMut), + Drain(IterMut), + ContainsKey { key: K }, + Get { key: K }, + GetMut { key: K }, + Insert { key: K, value: V }, + Remove { key: K }, + Entry { key: K, event: Entry }, + IntoKeys(IterMut), + IntoValues(IterMut), + IntoIter(IterMut), + Debug, + Serialize, +} + +macro_rules! process_iter_action { + ($log_suffix:ident, $rng:ident, $event:ident, $iterator:ident) => { + match $event { + Iter::Next => { + if let Some(v) = $iterator.next() { + let mut io = std::io::sink(); + write!(io, "{:?}", v).unwrap(); + } + } + Iter::Len => { + let _ = $iterator.len(); + } + Iter::Clone => $iterator = $iterator.clone(), + Iter::Debug => { + let mut io = std::io::sink(); + write!(io, "{:?}", $iterator).unwrap(); + } + } + while $iterator.len() != 0 { + let event = IterPeekWeights::new(&(), $rng).generate($rng); + writeln!(std::io::stdout(), "{} {event:?}", $log_suffix).unwrap(); + std::io::stdout().flush().unwrap(); + match event { + Iter::Next => { + if let Some(v) = $iterator.next() { + let mut io = std::io::sink(); + write!(io, "{:?}", v).unwrap(); + } + } + Iter::Len => { + let _ = $iterator.len(); + } + Iter::Clone => $iterator = $iterator.clone(), + Iter::Debug => { + let mut io = std::io::sink(); + write!(io, "{:?}", $iterator).unwrap(); + } + } + } + }; +} + +macro_rules! process_iter_mut_action { + ($log_suffix:ident, $rng:ident, $event:ident, $iterator:ident) => { + match $event { + IterMut::Next => { + if let Some(v) = $iterator.next() { + let mut io = std::io::sink(); + write!(io, "{:?}", v).unwrap(); + } + } + IterMut::Len => { + let _ = $iterator.len(); + } + IterMut::Debug => { + let mut io = std::io::sink(); + write!(io, "{:?}", $iterator).unwrap(); + } + } + while $iterator.len() != 0 { + let event = IterMutPeekWeights::new(&(), $rng).generate($rng); + writeln!(std::io::stdout(), "{} {event:?}", $log_suffix).unwrap(); + std::io::stdout().flush().unwrap(); + match event { + IterMut::Next => { + if let Some(v) = $iterator.next() { + let mut io = std::io::sink(); + write!(io, "{:?}", v).unwrap(); + } + } + IterMut::Len => { + let _ = $iterator.len(); + } + IterMut::Debug => { + let mut io = std::io::sink(); + write!(io, "{:?}", $iterator).unwrap(); + } + } + } + }; +} + +impl Action { + #[inline] + pub fn apply( + self, + log_suffix: &str, + rng: &mut impl Rng, + map: &mut BlazeMap, + key_to_id: impl FnOnce(String) -> I, + ) where + I: BlazeMapId + BlazeMapIdStatic + Debug, + { + use std::io::Write; + writeln!(std::io::stdout(), "{log_suffix} {self:?}").unwrap(); + std::io::stdout().flush().unwrap(); + match self { + Action::Clear => map.clear(), + Action::ShrinkToFit => map.shrink_to_fit(), + Action::Iter(event) => { + let mut iterator = map.iter(); + process_iter_action!(log_suffix, rng, event, iterator); + } + Action::IterMut(event) => { + let mut iterator = map.iter_mut(); + process_iter_mut_action!(log_suffix, rng, event, iterator); + } + Action::Keys(event) => { + let mut iterator = map.keys(); + process_iter_action!(log_suffix, rng, event, iterator); + } + Action::Values(event) => { + let mut iterator = map.values(); + process_iter_action!(log_suffix, rng, event, iterator); + } + Action::ValuesMut(event) => { + let mut iterator = map.values_mut(); + process_iter_mut_action!(log_suffix, rng, event, iterator); + } + Action::Drain(event) => { + let mut iterator = map.drain(); + process_iter_mut_action!(log_suffix, rng, event, iterator); + } + Action::ContainsKey { key } => { + let mut io = std::io::sink(); + write!(io, "{:?}", map.contains_key(key_to_id(key))).unwrap(); + } + Action::Get { key } => { + let mut io = std::io::sink(); + write!(io, "{:?}", map.get(key_to_id(key))).unwrap(); + } + Action::GetMut { key } => { + let mut io = std::io::sink(); + write!(io, "{:?}", map.get_mut(key_to_id(key))).unwrap(); + } + Action::Insert { key, value } => { + let mut io = std::io::sink(); + write!(io, "{:?}", map.insert(key_to_id(key), value)).unwrap(); + } + Action::Remove { key } => { + let mut io = std::io::sink(); + write!(io, "{:?}", map.remove(key_to_id(key))).unwrap(); + } + Action::Entry { key, event } => { + let mut io = std::io::sink(); + let entry = map.entry(key_to_id(key)); + match event { + Entry::OrInsert { value } => { + write!(io, "{}", entry.or_insert(value)).unwrap(); + } + Entry::OrInsertWith { default } => { + write!(io, "{}", entry.or_insert_with(default)).unwrap(); + } + Entry::Key => { + write!(io, "{:?}", entry.key()).unwrap(); + } + Entry::AndModify { f } => { + let _ = entry.and_modify(f); + } + Entry::OrDefault => { + write!(io, "{}", entry.or_default()).unwrap(); + } + Entry::EntryMatch(event) => match entry { + blazemap::collections::blazemap::Entry::Occupied(mut entry) => { + match event.on_occupied { + OccupiedEntry::Key => write!(io, "{:?}", entry.key()).unwrap(), + OccupiedEntry::RemoveEntry => { + write!(io, "{:?}", entry.remove_entry()).unwrap(); + } + OccupiedEntry::Get => write!(io, "{}", entry.get()).unwrap(), + OccupiedEntry::GetMut => write!(io, "{}", entry.get_mut()).unwrap(), + OccupiedEntry::IntoMut => { + write!(io, "{}", entry.into_mut()).unwrap(); + } + OccupiedEntry::Insert { value } => { + write!(io, "{}", entry.insert(value)).unwrap(); + } + OccupiedEntry::Remove => write!(io, "{}", entry.remove()).unwrap(), + } + } + blazemap::collections::blazemap::Entry::Vacant(entry) => { + match event.on_vacant { + VacantEntry::Key => write!(io, "{:?}", entry.key()).unwrap(), + VacantEntry::Insert { value } => { + write!(io, "{:?}", entry.insert(value)).unwrap(); + } + } + } + }, + } + } + Action::IntoKeys(event) => { + let old = std::mem::replace(map, BlazeMap::new()); + let mut iterator = old.into_keys(); + process_iter_mut_action!(log_suffix, rng, event, iterator); + } + Action::IntoValues(event) => { + let old = std::mem::replace(map, BlazeMap::new()); + let mut iterator = old.into_values(); + process_iter_mut_action!(log_suffix, rng, event, iterator); + } + Action::IntoIter(event) => { + let old = std::mem::replace(map, BlazeMap::new()); + let mut iterator = old.into_iter(); + process_iter_mut_action!(log_suffix, rng, event, iterator); + } + Action::Debug => { + let mut io = std::io::sink(); + write!(io, "{map:?}").unwrap(); + } + Action::Serialize => { + let mut io = std::io::sink(); + write!(io, "{}", serde_json::to_string(&map).unwrap()).unwrap(); + } + } + } +} + +#[inline] +fn generate_random_string(num_digits: u8, rng: &mut impl Rng) -> String { + const END: &str = " -----------------------------"; + let mut result = String::with_capacity(num_digits as usize + END.len()); + for _ in 0..num_digits { + result.write_char(rng.gen_range('0'..='9')).unwrap(); + } + result.write_str(END).unwrap(); + result +} + +#[derive(Debug, Clone)] +pub enum Iter { + Next, + Len, + Clone, + Debug, +} + +#[derive(Debug, Clone)] +pub enum IterMut { + Next, + Len, + Debug, +} + +pub enum Entry { + OrInsert { value: V }, + OrInsertWith { default: Box V> }, + Key, + AndModify { f: Box }, + OrDefault, + EntryMatch(EntryMatch), +} + +impl Debug for Entry { + #[inline] + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + #[derive(Debug)] + #[allow(dead_code)] + enum Helper { + OrInsert { value: V }, + OrInsertWith, + Key, + AndModify, + OrDefault, + EntryMatch(EntryMatch), + } + let res = match self { + Entry::OrInsert { value } => Helper::OrInsert { + value: value.clone(), + }, + Entry::OrInsertWith { .. } => Helper::OrInsertWith, + Entry::Key => Helper::Key, + Entry::AndModify { .. } => Helper::AndModify, + Entry::OrDefault => Helper::OrDefault, + Entry::EntryMatch(value) => Helper::EntryMatch(value.clone()), + }; + res.fmt(f) + } +} + +#[derive(Debug, Clone)] +pub struct EntryMatch { + on_occupied: OccupiedEntry, + on_vacant: VacantEntry, +} + +#[derive(Debug, Clone)] +pub enum OccupiedEntry { + Key, + RemoveEntry, + Get, + GetMut, + IntoMut, + Insert { value: V }, + Remove, +} + +#[derive(Debug, Clone)] +pub enum VacantEntry { + Key, + Insert { value: V }, +} + +pub trait EventWeights { + type Config; + type Event; + fn new(config: &Self::Config, rng: &mut impl Rng) -> Self; + fn generate(&self, rng: &mut impl Rng) -> Self::Event; +} + +pub struct ActionPeekWeights { + random_string_len: u8, +} + +struct IterPeekWeights; + +struct IterMutPeekWeights; + +struct EntryPeekWeights { + random_string_len: u8, +} + +struct OccupiedEntryPeekWeights { + random_string_len: u8, +} + +struct VacantEntryPeekWeights { + random_string_len: u8, +} + +impl ActionPeekWeights { + const CLEAR: f64 = 0.4; + const SHRINK_TO_FIT: f64 = 5.0; + const ITER: f64 = 10.0; + const ITER_MUT: f64 = 15.0; + const KEYS: f64 = 20.0; + const VALUES: f64 = 25.0; + const VALUES_MUT: f64 = 30.0; + const DRAIN: f64 = 31.0; + const CONTAINS_KEY: f64 = 40.0; + const GET: f64 = 50.0; + const GET_MUT: f64 = 60.0; + const INSERT: f64 = 70.0; + const REMOVE: f64 = 80.0; + const ENTRY: f64 = 100.0; + const INTO_KEYS: f64 = 101.0; + const INTO_VALUES: f64 = 102.0; + const INTO_ITER: f64 = 103.0; + const DEBUG: f64 = 120.0; + const SERIALIZE: f64 = 125.0; + + const MAX_WEIGHT: f64 = Self::SERIALIZE; +} + +impl EventWeights for ActionPeekWeights { + type Config = u8; + type Event = Action; + + #[inline] + fn new(random_string_len: &u8, _rng: &mut impl Rng) -> Self { + Self { + random_string_len: *random_string_len, + } + } + + #[inline] + fn generate(&self, rng: &mut impl Rng) -> Self::Event { + match rng.gen_range(0.0..Self::MAX_WEIGHT) { + ..=Self::CLEAR => Action::Clear, + ..=Self::SHRINK_TO_FIT => Action::ShrinkToFit, + ..=Self::ITER => Action::Iter(IterPeekWeights::new(&(), rng).generate(rng)), + ..=Self::ITER_MUT => Action::IterMut(IterMutPeekWeights::new(&(), rng).generate(rng)), + ..=Self::KEYS => Action::Keys(IterPeekWeights::new(&(), rng).generate(rng)), + ..=Self::VALUES => Action::Values(IterPeekWeights::new(&(), rng).generate(rng)), + ..=Self::VALUES_MUT => { + Action::ValuesMut(IterMutPeekWeights::new(&(), rng).generate(rng)) + } + ..=Self::DRAIN => Action::Drain(IterMutPeekWeights::new(&(), rng).generate(rng)), + ..=Self::CONTAINS_KEY => { + let key = generate_random_string(self.random_string_len, rng); + Action::ContainsKey { key } + } + ..=Self::GET => { + let key = generate_random_string(self.random_string_len, rng); + Action::Get { key } + } + ..=Self::GET_MUT => { + let key = generate_random_string(self.random_string_len, rng); + Action::GetMut { key } + } + ..=Self::INSERT => { + let key = generate_random_string(self.random_string_len, rng); + let value = generate_random_string(self.random_string_len, rng); + Action::Insert { key, value } + } + ..=Self::REMOVE => { + let key = generate_random_string(self.random_string_len, rng); + Action::Remove { key } + } + ..=Self::ENTRY => { + let key = generate_random_string(self.random_string_len, rng); + Action::Entry { + key, + event: EntryPeekWeights::new(&self.random_string_len, rng).generate(rng), + } + } + ..=Self::INTO_KEYS => Action::IntoKeys(IterMutPeekWeights::new(&(), rng).generate(rng)), + ..=Self::INTO_VALUES => { + Action::IntoValues(IterMutPeekWeights::new(&(), rng).generate(rng)) + } + ..=Self::INTO_ITER => Action::IntoIter(IterMutPeekWeights::new(&(), rng).generate(rng)), + ..=Self::DEBUG => Action::Debug, + ..=Self::SERIALIZE => Action::Serialize, + value => unreachable!("`{}` isn't in range", value), + } + } +} + +impl IterPeekWeights { + const NEXT: f64 = 10.0; + const LEN: f64 = 10.5; + const CLONE: f64 = 11.0; + const DEBUG: f64 = 11.5; + + const MAX_WEIGHT: f64 = Self::DEBUG; +} + +impl EventWeights for IterPeekWeights { + type Config = (); + type Event = Iter; + + #[inline] + fn new(_config: &Self::Config, _rng: &mut impl Rng) -> Self { + Self + } + + #[inline] + fn generate(&self, rng: &mut impl Rng) -> Self::Event { + match rng.gen_range(0.0..Self::MAX_WEIGHT) { + ..=Self::NEXT => Iter::Next, + ..=Self::LEN => Iter::Len, + ..=Self::CLONE => Iter::Clone, + ..=Self::DEBUG => Iter::Debug, + value => unreachable!("`{}` isn't in range", value), + } + } +} + +impl IterMutPeekWeights { + const NEXT: f64 = 10.0; + const LEN: f64 = 10.5; + const DEBUG: f64 = 11.0; + + const MAX_WEIGHT: f64 = Self::DEBUG; +} + +impl EventWeights for IterMutPeekWeights { + type Config = (); + type Event = IterMut; + + #[inline] + fn new(_config: &Self::Config, _rng: &mut impl Rng) -> Self { + Self + } + + #[inline] + fn generate(&self, rng: &mut impl Rng) -> Self::Event { + match rng.gen_range(0.0..Self::MAX_WEIGHT) { + ..=Self::NEXT => IterMut::Next, + ..=Self::LEN => IterMut::Len, + ..=Self::DEBUG => IterMut::Debug, + value => unreachable!("`{}` isn't in range", value), + } + } +} + +impl EntryPeekWeights { + const OR_INSERT: f64 = 1.0; + const OR_INSERT_WITH: f64 = 1.5; + const KEY: f64 = 5.0; + const AND_MODIFY: f64 = 7.0; + const OR_DEFAULT: f64 = 7.5; + const ENTRY_MATCH: f64 = 9.0; + + const MAX_WEIGHT: f64 = Self::ENTRY_MATCH; +} + +impl EventWeights for EntryPeekWeights { + type Config = u8; + type Event = Entry; + + #[inline] + fn new(random_string_len: &u8, _rng: &mut impl Rng) -> Self { + Self { + random_string_len: *random_string_len, + } + } + + #[inline] + fn generate(&self, rng: &mut impl Rng) -> Self::Event { + match rng.gen_range(0.0..Self::MAX_WEIGHT) { + ..=Self::OR_INSERT => Entry::OrInsert { + value: generate_random_string(self.random_string_len, rng), + }, + ..=Self::OR_INSERT_WITH => { + let random_string = generate_random_string(self.random_string_len, rng); + Entry::OrInsertWith { + default: Box::new(move || random_string), + } + } + ..=Self::KEY => Entry::Key, + ..=Self::AND_MODIFY => { + let random_string = generate_random_string(self.random_string_len, rng); + Entry::AndModify { + f: Box::new(move |v| { + let _ = std::mem::replace(v, random_string); + }), + } + } + ..=Self::OR_DEFAULT => Entry::OrDefault, + ..=Self::ENTRY_MATCH => { + let entry = EntryMatch { + on_occupied: OccupiedEntryPeekWeights::new(&self.random_string_len, rng) + .generate(rng), + on_vacant: VacantEntryPeekWeights::new(&self.random_string_len, rng) + .generate(rng), + }; + Entry::EntryMatch(entry) + } + value => unreachable!("`{}` isn't in range", value), + } + } +} + +impl OccupiedEntryPeekWeights { + const KEY: f64 = 1.0; + const REMOVE_ENTRY: f64 = 1.5; + const GET: f64 = 3.0; + const GET_MUT: f64 = 4.0; + const INTO_MUT: f64 = 4.5; + const INSERT: f64 = 5.5; + const REMOVE: f64 = 6.0; + + const MAX_WEIGHT: f64 = Self::REMOVE; +} + +impl EventWeights for OccupiedEntryPeekWeights { + type Config = u8; + type Event = OccupiedEntry; + + #[inline] + fn new(random_string_len: &u8, _rng: &mut impl Rng) -> Self { + Self { + random_string_len: *random_string_len, + } + } + + #[inline] + fn generate(&self, rng: &mut impl Rng) -> Self::Event { + match rng.gen_range(0.0..Self::MAX_WEIGHT) { + ..=Self::KEY => OccupiedEntry::Key, + ..=Self::REMOVE_ENTRY => OccupiedEntry::RemoveEntry, + ..=Self::GET => OccupiedEntry::Get, + ..=Self::GET_MUT => OccupiedEntry::GetMut, + ..=Self::INTO_MUT => OccupiedEntry::IntoMut, + ..=Self::INSERT => { + let value = generate_random_string(self.random_string_len, rng); + OccupiedEntry::Insert { value } + } + ..=Self::REMOVE => OccupiedEntry::Remove, + value => unreachable!("`{}` isn't in range", value), + } + } +} + +impl VacantEntryPeekWeights { + const KEY: f64 = 0.5; + const INSERT: f64 = 1.5; + + const MAX_WEIGHT: f64 = Self::INSERT; +} + +impl EventWeights for VacantEntryPeekWeights { + type Config = u8; + type Event = VacantEntry; + + #[inline] + fn new(random_string_len: &u8, _rng: &mut impl Rng) -> Self { + Self { + random_string_len: *random_string_len, + } + } + + #[inline] + fn generate(&self, rng: &mut impl Rng) -> Self::Event { + match rng.gen_range(0.0..Self::MAX_WEIGHT) { + ..=Self::KEY => VacantEntry::Key, + ..=Self::INSERT => { + let value = generate_random_string(self.random_string_len, rng); + VacantEntry::Insert { value } + } + value => unreachable!("`{}` isn't in range", value), + } + } +} diff --git a/tests/loom.rs b/tests/loom.rs new file mode 100644 index 0000000..40cda7a --- /dev/null +++ b/tests/loom.rs @@ -0,0 +1,330 @@ +#![cfg(loom)] + +use blazemap::sync::RwLock; +use loom::sync::Arc; +use std::string::ToString; + +use loom::thread; + +use blazemap::loom::TestableId; +use blazemap::prelude::BlazeMapIdWrapper; +use blazemap::traits::{CapacityInfoProvider, TypeInfoContainer}; +use blazemap::{define_key_wrapper, define_key_wrapper_bounded, define_plain_id}; + +fn run_model(f: F) +where + F: Fn() + Sync + Send + 'static, +{ + use std::{ + io::Write, + sync::{ + atomic::{AtomicU32, Ordering}, + Arc, + }, + }; + + let iters = Arc::new(AtomicU32::new(0)); + let iters1 = iters.clone(); + + loom::model(move || { + iters.fetch_add(1, Ordering::Relaxed); + f(); + }); + + let iters = iters1.load(Ordering::Relaxed); + #[allow(clippy::explicit_write)] // print even when stdout is captured + write!(std::io::stdout(), "[{iters} iters] ").unwrap(); +} + +const STRING_0: &str = "0 zero"; +const STRING_1: &str = "1 first"; +const STRING_2: &str = "2 second"; + +static LAZY_STRING_0: once_cell::sync::Lazy = + once_cell::sync::Lazy::new(|| STRING_0.to_string()); +static LAZY_STRING_1: once_cell::sync::Lazy = + once_cell::sync::Lazy::new(|| STRING_1.to_string()); +static LAZY_STRING_2: once_cell::sync::Lazy = + once_cell::sync::Lazy::new(|| STRING_2.to_string()); + +#[test] +fn key_wrapper_cmp() { + define_key_wrapper! { + struct Id(String) + } + run_model(|| { + use blazemap::type_info_containers::key_wrapper::StaticContainer; + + let type_info_container = Arc::new(RwLock::new(StaticContainer::new())); + let key_0 = Arc::new(unsafe { Id::new(&type_info_container, LAZY_STRING_0.clone()) }); + + let type_info_container_clone = type_info_container.clone(); + let key_0_clone = key_0.clone(); + let t1 = thread::spawn(move || { + let key_1 = unsafe { Id::new(&type_info_container_clone, LAZY_STRING_1.clone()) }; + let key_1 = TestableId::new(key_1, &type_info_container_clone); + let key_0 = TestableId::new(*key_0_clone, &type_info_container_clone); + assert!(key_1 > key_0) + }); + + let type_info_container_clone = type_info_container.clone(); + let key_0_clone = key_0.clone(); + let t2 = thread::spawn(move || { + let key_2 = unsafe { Id::new(&type_info_container_clone, LAZY_STRING_2.clone()) }; + let key_2 = TestableId::new(key_2, &type_info_container_clone); + let key_0 = TestableId::new(*key_0_clone, &type_info_container_clone); + assert!(key_2 > key_0) + }); + + t1.join().unwrap(); + t2.join().unwrap(); + assert_eq!( + type_info_container + .capacity_info_provider() + .offset_capacity(), + 3 + ); + }); +} + +#[test] +fn key_wrapper_all_instances_iter() { + define_key_wrapper! { + struct Id(String) + } + run_model(|| { + use blazemap::type_info_containers::key_wrapper::StaticContainer; + + let type_info_container = Arc::new(RwLock::new(StaticContainer::new())); + let _key_0 = unsafe { Id::new(&type_info_container, LAZY_STRING_0.clone()) }; + + let type_info_container_clone = type_info_container.clone(); + let t1 = thread::spawn(move || { + let key_1 = unsafe { Id::new(&type_info_container_clone, LAZY_STRING_1.clone()) }; + let key_1 = TestableId::new(key_1, &type_info_container_clone); + let mut num_iters = 0; + for instance in key_1.all_instances_iter() { + let instance = TestableId::new(instance, &type_info_container_clone); + num_iters += 1; + let _ = instance > key_1; + let _ = instance == key_1; + } + assert!(num_iters >= 1); + }); + + let type_info_container_clone = type_info_container.clone(); + let t2 = thread::spawn(move || { + let key_2 = unsafe { Id::new(&type_info_container_clone, LAZY_STRING_2.clone()) }; + let key_2 = TestableId::new(key_2, &type_info_container_clone); + let mut num_iters = 0; + for instance in key_2.all_instances_iter() { + let instance = TestableId::new(instance, &type_info_container_clone); + num_iters += 1; + let _ = instance > key_2; + let _ = instance == key_2; + } + assert!(num_iters >= 1); + }); + + t1.join().unwrap(); + t2.join().unwrap(); + assert_eq!( + type_info_container + .capacity_info_provider() + .offset_capacity(), + 3 + ); + }); +} + +#[test] +fn key_wrapper_bounded_cmp() { + define_key_wrapper_bounded! { + struct Id(String); + MAX_CAP = 3 + } + run_model(|| { + use blazemap::type_info_containers::key_wrapper_bounded::StaticContainer; + + let type_info_container = Arc::new(StaticContainer::new()); + let key_0 = Arc::new(unsafe { Id::new(&type_info_container, LAZY_STRING_0.clone()) }); + + let type_info_container_clone = type_info_container.clone(); + let key_0_clone = key_0.clone(); + let t1 = thread::spawn(move || { + let key_1 = unsafe { Id::new(&type_info_container_clone, LAZY_STRING_1.clone()) }; + let key_1 = TestableId::new(key_1, &type_info_container_clone); + let key_0 = TestableId::new(*key_0_clone, &type_info_container_clone); + assert!(key_1 > key_0) + }); + + let type_info_container_clone = type_info_container.clone(); + let key_0_clone = key_0.clone(); + let t2 = thread::spawn(move || { + let key_2 = unsafe { Id::new(&type_info_container_clone, LAZY_STRING_2.clone()) }; + let key_2 = TestableId::new(key_2, &type_info_container_clone); + let key_0 = TestableId::new(*key_0_clone, &type_info_container_clone); + assert!(key_2 > key_0) + }); + + t1.join().unwrap(); + t2.join().unwrap(); + assert_eq!( + type_info_container + .capacity_info_provider() + .offset_capacity(), + 3 + ); + }); +} + +#[test] +fn key_wrapper_bounded_all_instances_iter() { + define_key_wrapper_bounded! { + struct Id(String); + MAX_CAP = 3 + } + run_model(|| { + use blazemap::type_info_containers::key_wrapper_bounded::StaticContainer; + + let type_info_container = Arc::new(StaticContainer::new()); + let _key_0 = unsafe { Id::new(&type_info_container, LAZY_STRING_0.clone()) }; + + let type_info_container_clone = type_info_container.clone(); + let t1 = thread::spawn(move || { + let key_1 = unsafe { Id::new(&type_info_container_clone, LAZY_STRING_1.clone()) }; + let key_1 = TestableId::new(key_1, &type_info_container_clone); + let mut num_iters = 0; + for instance in key_1.all_instances_iter() { + let instance = TestableId::new(instance, &type_info_container_clone); + if instance == key_1 { + // Skip this case as it may cause an RwLock deadlock due to multiple reads + // from the current thread, which cannot happen in the prod stage. + continue; + } + num_iters += 1; + let _ = instance > key_1; + let _ = instance == key_1; + } + assert!(num_iters >= 1); + }); + + let type_info_container_clone = type_info_container.clone(); + let t2 = thread::spawn(move || { + let key_2 = unsafe { Id::new(&type_info_container_clone, LAZY_STRING_2.clone()) }; + let key_2 = TestableId::new(key_2, &type_info_container_clone); + let mut num_iters = 0; + for instance in key_2.all_instances_iter() { + let instance = TestableId::new(instance, &type_info_container_clone); + if instance == key_2 { + // Skip this case as it may cause an RwLock deadlock due to multiple reads + // from the current thread, which cannot happen in the prod stage. + continue; + } + num_iters += 1; + let _ = instance > key_2; + let _ = instance == key_2; + } + assert!(num_iters >= 1); + }); + + t1.join().unwrap(); + t2.join().unwrap(); + assert_eq!( + type_info_container + .capacity_info_provider() + .offset_capacity(), + 3 + ); + }); +} + +#[test] +fn plain_id_cmp() { + define_plain_id! { + struct Id + } + run_model(|| { + use blazemap::type_info_containers::plain_id::StaticContainer; + + let type_info_container = Arc::new(StaticContainer::new()); + let key_0 = Arc::new(Id::new(&type_info_container)); + + let type_info_container_clone = type_info_container.clone(); + let key_0_clone = key_0.clone(); + let t1 = thread::spawn(move || { + let key_1 = Id::new(&type_info_container_clone); + let key_1 = TestableId::new(key_1, &type_info_container_clone); + let key_0 = TestableId::new(*key_0_clone, &type_info_container_clone); + assert!(key_1 > key_0) + }); + + let type_info_container_clone = type_info_container.clone(); + let key_0_clone = key_0.clone(); + let t2 = thread::spawn(move || { + let key_2 = Id::new(&type_info_container_clone); + let key_2 = TestableId::new(key_2, &type_info_container_clone); + let key_0 = TestableId::new(*key_0_clone, &type_info_container_clone); + assert!(key_2 > key_0) + }); + + t1.join().unwrap(); + t2.join().unwrap(); + assert_eq!( + type_info_container + .capacity_info_provider() + .offset_capacity(), + 3 + ); + }); +} + +#[test] +fn plain_id_all_instances_iter() { + define_plain_id! { + struct Id + } + run_model(|| { + use blazemap::type_info_containers::plain_id::StaticContainer; + + let type_info_container = Arc::new(StaticContainer::new()); + let _key_0 = Id::new(&type_info_container); + + let type_info_container_clone = type_info_container.clone(); + let t1 = thread::spawn(move || { + let key_1 = Id::new(&type_info_container_clone); + let key_1 = TestableId::new(key_1, &type_info_container_clone); + let mut num_iters = 0; + for instance in key_1.all_instances_iter() { + let instance = TestableId::new(instance, &type_info_container_clone); + num_iters += 1; + let _ = instance > key_1; + let _ = instance == key_1; + } + assert!(num_iters >= 1); + }); + + let type_info_container_clone = type_info_container.clone(); + let t2 = thread::spawn(move || { + let key_2 = Id::new(&type_info_container_clone); + let key_2 = TestableId::new(key_2, &type_info_container_clone); + let mut num_iters = 0; + for instance in key_2.all_instances_iter() { + let instance = TestableId::new(instance, &type_info_container_clone); + num_iters += 1; + let _ = instance > key_2; + let _ = instance == key_2; + } + assert!(num_iters >= 1); + }); + + t1.join().unwrap(); + t2.join().unwrap(); + assert_eq!( + type_info_container + .capacity_info_provider() + .offset_capacity(), + 3 + ); + }); +} diff --git a/tests/loom.sh b/tests/loom.sh new file mode 100755 index 0000000..66f10ae --- /dev/null +++ b/tests/loom.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + +# Runs loom tests with defaults for loom's configuration values. +# +# The tests are compiled in release mode to improve performance, but debug +# assertions are enabled. +# +# Any arguments to this script are passed to the `cargo test` invocation. + +# Useful: +# LOOM_LOG=debug +# LOOM_CHECKPOINT_FILE=target/loom-checkpoint.json + +time RUSTFLAGS="${RUSTFLAGS} --cfg loom -C debug-assertions" \ + RUST_BACKTRACE=full \ + LOOM_LOCATION=1 \ + cargo test --release --test loom "$@" -- --nocapture \ No newline at end of file diff --git a/tests/miri.rs b/tests/miri.rs new file mode 100644 index 0000000..f5f7d2e --- /dev/null +++ b/tests/miri.rs @@ -0,0 +1,131 @@ +#![cfg(feature = "serde")] +#![allow(clippy::explicit_write)] +#![allow(clippy::explicit_counter_loop)] + +use std::io::Write; + +use rand::prelude::StdRng; +use rand::{random, Rng, SeedableRng}; + +use blazemap::prelude::BlazeMap; +use blazemap::{define_key_wrapper, define_key_wrapper_bounded}; + +use crate::action::{ActionPeekWeights, EventWeights}; + +mod action; + +#[test] +fn key_wrapper() { + define_key_wrapper! { + struct Id(String); + Derive(as for Original Type): { + Default, + Debug, + Display, + Ord, + Serialize + } + } + let seed: u64 = random(); + writeln!(std::io::stdout(), "`key_wrapper` random seed: {seed}").unwrap(); + std::io::stdout().flush().unwrap(); + let mut rng = StdRng::seed_from_u64(seed); + + let mut input_combinations = Vec::with_capacity(300); + for num_random_digits in 1..3 { + for _ in 0..100 { + let num_actions: usize = rng.gen_range(0..10_000); + let seed: u64 = rng.gen_range(0..=u64::MAX); + input_combinations.push((num_random_digits, num_actions, seed)); + } + } + + #[allow(unused_variables)] + let mut i = 0; + input_combinations + .iter() + .copied() + .for_each(|(num_random_digits, num_actions, seed)| { + i += 1; + let mut rng = StdRng::seed_from_u64(seed); + let mut map = BlazeMap::::new(); + #[allow(unused_variables)] + let mut j = 0; + for _ in 1..=num_actions { + #[cfg(miri)] + if j % 100 == 1 { + writeln!( + std::io::stdout(), + "`key_wrapper` epoch: [{i}/{combs}], action_iter: [{j}/{num_actions}]", + combs = input_combinations.len() + ) + .unwrap(); + std::io::stdout().flush().unwrap(); + } + j += 1; + let action = + ActionPeekWeights::new(&num_random_digits, &mut rng).generate(&mut rng); + action.apply("key_wrapper", &mut rng, &mut map, Id::new); + } + }); +} + +#[test] +fn key_wrapper_bounded() { + define_key_wrapper_bounded! { + struct Id(String); + MAX_CAP = 10_000; + Derive(as for Original Type): { + Default, + Debug, + Display, + Ord, + Serialize + } + } + let seed: u64 = random(); + writeln!( + std::io::stdout(), + "`key_wrapper_bounded` random seed: {seed}" + ) + .unwrap(); + std::io::stdout().flush().unwrap(); + let mut rng = StdRng::seed_from_u64(seed); + + let mut input_combinations = Vec::with_capacity(300); + for num_random_digits in 1..3 { + for _ in 0..100 { + let num_actions: usize = rng.gen_range(0..10_000); + let seed: u64 = rng.gen_range(0..=u64::MAX); + input_combinations.push((num_random_digits, num_actions, seed)); + } + } + + #[allow(unused_variables)] + let mut i = 0; + input_combinations + .iter() + .copied() + .for_each(|(num_random_digits, num_actions, seed)| { + i += 1; + let mut rng = StdRng::seed_from_u64(seed); + let mut map = BlazeMap::::new(); + #[allow(unused_variables)] + let mut j = 0; + for _ in 1..=num_actions { + #[cfg(miri)] + if j % 100 == 1 { + writeln!( + std::io::stdout(), + "`key_wrapper_bounded` epoch: [{i}/{combs}], action_iter: [{j}/{num_actions}]", + combs = input_combinations.len() + ).unwrap(); + std::io::stdout().flush().unwrap(); + } + j += 1; + let action = + ActionPeekWeights::new(&num_random_digits, &mut rng).generate(&mut rng); + action.apply("key_wrapper_bounded", &mut rng, &mut map, Id::new); + } + }); +}