zng_unique_id/
lib.rs

1#![doc(html_favicon_url = "https://zng-ui.github.io/res/zng-logo-icon.png")]
2#![doc(html_logo_url = "https://zng-ui.github.io/res/zng-logo.png")]
3//!
4//! Macros for generating unique ID types.
5//!
6//! # Crate
7//!
8#![doc = include_str!(concat!("../", std::env!("CARGO_PKG_README")))]
9#![warn(unused_extern_crates)]
10#![warn(missing_docs)]
11
12use std::{
13    hash::{BuildHasher, Hash, Hasher},
14    num::{NonZeroU32, NonZeroU64},
15    ops,
16    sync::atomic::{AtomicU32, Ordering},
17};
18
19use rayon::iter::{FromParallelIterator, IntoParallelIterator, IntoParallelRefIterator, IntoParallelRefMutIterator};
20
21#[doc(hidden)]
22#[cfg(target_has_atomic = "64")]
23pub use std::sync::atomic::AtomicU64;
24
25#[doc(hidden)]
26#[cfg(not(target_has_atomic = "64"))]
27pub struct AtomicU64(parking_lot::Mutex<u64>);
28#[cfg(not(target_has_atomic = "64"))]
29impl AtomicU64 {
30    pub const fn new(u: u64) -> Self {
31        Self(parking_lot::Mutex::new(u))
32    }
33
34    fn fetch_add(&self, u: u64, _: Ordering) -> u64 {
35        let mut a = self.0.lock();
36        let r = *a;
37        *a += u;
38        r
39    }
40}
41
42#[cfg(feature = "named")]
43mod named;
44
45#[doc(hidden)]
46pub mod hot_reload;
47
48pub use hot_reload::lazy_static_init;
49
50#[cfg(feature = "named")]
51pub use named::*;
52
53#[doc(hidden)]
54pub use pastey::paste;
55
56/// Declare a new unique id type that is backed by a `NonZeroU32`.
57#[macro_export]
58macro_rules! unique_id_32 {
59    ($(#[$attrs:meta])* $vis:vis struct $Type:ident $(< $T:ident $(:($($bounds:tt)+))? >)? $(: $ParentId:path)? ;) => {
60       $crate::unique_id! {
61            request {
62                $(#[$attrs])*
63                ///
64                /// # Memory
65                ///
66                /// The internal number is a [`NonZeroU32`], that means that
67                #[doc=concat!("`Option<", stringify!($Type), ">`")]
68                /// and
69                #[doc=concat!("`", stringify!($Type), "`")]
70                /// are the same size as `u32`.
71                ///
72                /// # As Hash
73                ///
74                /// The generated internal number has good statistical distribution and can be used as its own hash,
75                /// although it is not cryptographically safe, as it is simply a sequential counter scrambled using a modified
76                /// `splitmix64`.
77                ///
78                /// [`NonZeroU32`]: std::num::NonZeroU32
79                ///
80                /// # Static
81                ///
82                /// The unique ID cannot be generated at compile time, but you can use the `static_id!` macro to declare
83                /// a lazy static that instantiates the ID.
84                ///
85                /// # Exhaustion Handling
86                ///
87                /// If more IDs are generated them `u32::MAX` an error is logged, the internal counter is reset and ids are reused.
88                $vis struct $Type $(< $T $(:($($bounds)+))? >)? $(: $ParentId)? ;
89            }
90            non_zero {
91                std::num::NonZeroU32
92            }
93            atomic {
94                std::sync::atomic::AtomicU32
95            }
96            next_id {
97                $crate::next_id32
98            }
99            literal {
100                u32
101            }
102            to_hash {
103                $crate::un_hash32
104            }
105            to_sequential {
106                $crate::un_hash32
107            }
108       }
109    }
110}
111
112/// Declare a new unique id type that is backed by a `NonZeroU64`.
113#[macro_export]
114macro_rules! unique_id_64 {
115    ($(#[$attrs:meta])* $vis:vis struct $Type:ident $(< $T:ident $(:($($bounds:tt)+))? >)? $(: $ParentId:path)? ;) => {
116        $crate::unique_id! {
117            request {
118                $(#[$attrs])*
119                ///
120                /// # Memory
121                ///
122                /// The internal number is a [`NonZeroU64`], that means that
123                #[doc=concat!("`Option<", stringify!($Type), ">`")]
124                /// and
125                #[doc=concat!("`", stringify!($Type), "`")]
126                /// are the same size as `u64`.
127                ///
128                /// # As Hash
129                ///
130                /// The generated internal number has good statistical distribution and can be used as its own hash,
131                /// although it is not cryptographically safe, as it is simply a sequential counter scrambled using `splitmix64`.
132                ///
133                /// [`NonZeroU64`]: std::num::NonZeroU64
134                ///
135                /// # Static
136                ///
137                /// The unique ID cannot be generated at compile time, but you can use the `static_id!` macro to declare
138                /// a lazy static that instantiates the ID.
139                ///
140                /// # Exhaustion Handling
141                ///
142                /// If more IDs are generated them `u64::MAX` an error is logged, the internal counter is reset and ids are reused.
143                $vis struct $Type $(< $T $(:($($bounds)+))? >)? $(: $ParentId)? ;
144            }
145            non_zero {
146                std::num::NonZeroU64
147            }
148            atomic {
149                $crate::AtomicU64
150            }
151            next_id {
152                $crate::next_id64
153            }
154            literal {
155                u64
156            }
157            to_hash {
158                $crate::splitmix64
159            }
160            to_sequential {
161                $crate::un_splitmix64
162            }
163        }
164    };
165}
166
167/// Implement [`bytemuck`] trait for the unique ID.
168///
169/// [`bytemuck`]: https://docs.rs/bytemuck/
170#[macro_export]
171macro_rules! impl_unique_id_bytemuck {
172    ($Type:ident $(< $T:ident $(:($($bounds:tt)+))? >)?) => {
173        // SAFETY: $Type a transparent wrapper on a std non-zero integer.
174        unsafe impl$(<$T $(: $($bounds)+)?>)? bytemuck::NoUninit for $Type $(<$T>)? { }
175        unsafe impl$(<$T $(: $($bounds)+)?>)? bytemuck::ZeroableInOption for $Type $(<$T>)? { }
176        unsafe impl$(<$T $(: $($bounds)+)?>)? bytemuck::PodInOption for $Type $(<$T>)? { }
177    }
178}
179
180#[doc(hidden)]
181#[macro_export]
182macro_rules! unique_id {
183    (
184        request {
185            $(#[$attrs:meta])* $vis:vis struct $Type:ident $(< $T:ident $(:($($bounds:tt)+))? >)? $(: $ParentId:path)? ;
186        }
187        non_zero {
188            $non_zero:path
189        }
190        atomic {
191            $atomic:path
192        }
193        next_id {
194            $next_id:path
195        }
196        literal {
197            $lit:ident
198        }
199        to_hash {
200            $to_hash:path
201        }
202        to_sequential {
203            $to_sequential:path
204        }
205    ) => {
206
207        $(#[$attrs])*
208        #[repr(transparent)]
209        $vis struct $Type $(<$T $(: $($bounds)+)?>)? ($non_zero $(, std::marker::PhantomData<$T>)?);
210
211        impl$(<$T $(: $($bounds)+)?>)? Clone for $Type $(<$T>)? {
212            fn clone(&self) -> Self {
213                *self
214            }
215        }
216        impl$(<$T $(: $($bounds)+)?>)? Copy for $Type $(<$T>)? {
217        }
218        impl$(<$T $(: $($bounds)+)?>)? PartialEq for $Type $(<$T>)? {
219            fn eq(&self, other: &Self) -> bool {
220                self.0 == other.0
221            }
222        }
223        impl$(<$T $(: $($bounds)+)?>)? Eq for $Type $(<$T>)? {
224        }
225        impl$(<$T $(: $($bounds)+)?>)? std::hash::Hash for $Type $(<$T>)? {
226            fn hash<H>(&self, state: &mut H)
227            where
228                H: std::hash::Hasher
229            {
230                std::hash::Hash::hash(&self.0, state)
231            }
232        }
233        impl$(<$T $(: $($bounds)+)?>)? $crate::UniqueId for $Type $(<$T>)? {
234            fn new_unique() -> Self {
235                Self::new_unique()
236            }
237        }
238
239        #[allow(dead_code)]
240        impl$(<$T $(: $($bounds)+)?>)? $Type $(<$T>)? {
241            $crate::unique_id! {
242                new_unique {
243                    $($ParentId, )? $(<$T>)?
244                }
245                atomic {
246                    $atomic
247                }
248                next_id {
249                    $next_id
250                }
251            }
252
253            /// Retrieve the underlying value.
254            pub fn get(self) -> $lit {
255                self.0.get()
256            }
257
258            /// Un-scramble the underlying value to get the original sequential count number.
259            ///
260            /// If two IDs, `id0` and `id1` where generated by the same thread then `id0.sequential() < id1.sequential()`.
261            pub fn sequential(self) -> $lit {
262                $to_sequential(self.0.get())
263            }
264
265            /// Creates an ID from a raw value.
266            ///
267            /// The value must not be zero, panics if it is, the value must have been provided by [`get`] otherwise
268            /// the ID will not be unique.
269            ///
270            /// [`get`]: Self::get
271            pub fn from_raw(raw: $lit) -> Self {
272                use $non_zero as __non_zero;
273
274                Self(__non_zero::new(raw).unwrap() $(, std::marker::PhantomData::<$T>)?)
275            }
276
277            /// Creates an ID from a [`sequential`] number.
278            ///
279            /// # Safety
280            ///
281            /// The value must not be zero, panics if it is, the value must have been provided by [`sequential`] otherwise
282            /// the ID will not be unique.
283            ///
284            /// [`sequential`]: Self::sequential
285            pub fn from_sequential(num: $lit) -> Self {
286                use $non_zero as __non_zero;
287
288                Self(__non_zero::new($to_hash(num)).unwrap() $(, std::marker::PhantomData::<$T>)?)
289            }
290        }
291    };
292
293    (
294        new_unique {
295            $ParentId:path, $(<$T:ident>)?
296        }
297        atomic {
298            $atomic:path
299        }
300        next_id {
301            $next_id:path
302        }
303    ) => {
304        /// Generates a new unique ID.
305        pub fn new_unique() -> Self {
306            use $ParentId as __parent;
307            let id = __parent $(::<$T>)? ::new_unique().get();
308            Self::from_raw(id)
309        }
310    };
311
312    (
313        new_unique {
314            $(<$T:ident>)?
315        }
316        atomic {
317            $atomic:path
318        }
319        next_id {
320            $next_id:path
321        }
322    ) => {
323        /// Generates a new unique ID.
324        pub fn new_unique() -> Self {
325            use $atomic as __atomic;
326
327            $crate::hot_static! {
328                static NEXT: __atomic = __atomic::new(1);
329            }
330            let __ref = $crate::hot_static_ref!(NEXT);
331            Self($next_id(__ref) $(, std::marker::PhantomData::<$T>)?)
332        }
333    };
334}
335
336#[doc(hidden)]
337pub fn next_id32(next: &'static AtomicU32) -> NonZeroU32 {
338    loop {
339        // the sequential next id is already in the variable.
340        let id = next.fetch_add(1, Ordering::Relaxed);
341
342        if id == 0 {
343            tracing::error!("id factory reached `u32::MAX`, will start reusing");
344        } else {
345            let id = hash32(id);
346            if let Some(id) = NonZeroU32::new(id) {
347                return id;
348            }
349        }
350    }
351}
352#[doc(hidden)]
353pub fn next_id64(next: &'static AtomicU64) -> NonZeroU64 {
354    loop {
355        // the sequential next id is already in the variable.
356        let id = next.fetch_add(1, Ordering::Relaxed);
357
358        if id == 0 {
359            tracing::error!("id factory reached `u64::MAX`, will start reusing");
360        } else {
361            // remove the sequential clustering.
362            let id = splitmix64(id);
363            if let Some(id) = NonZeroU64::new(id) {
364                return id;
365            }
366        }
367    }
368}
369
370#[doc(hidden)]
371pub fn hash32(n: u32) -> u32 {
372    use std::num::Wrapping as W;
373
374    let mut z = W(n);
375    z = ((z >> 16) ^ z) * W(0x45d9f3b);
376    z = ((z >> 16) ^ z) * W(0x45d9f3b);
377    z = (z >> 16) ^ z;
378    z.0
379}
380#[doc(hidden)]
381pub fn un_hash32(z: u32) -> u32 {
382    use std::num::Wrapping as W;
383
384    let mut n = W(z);
385    n = ((n >> 16) ^ n) * W(0x119de1f3);
386    n = ((n >> 16) ^ n) * W(0x119de1f3);
387    n = (n >> 16) ^ n;
388    n.0
389}
390
391#[doc(hidden)]
392pub fn splitmix64(n: u64) -> u64 {
393    use std::num::Wrapping as W;
394
395    let mut z = W(n);
396    z = (z ^ (z >> 30)) * W(0xBF58476D1CE4E5B9u64);
397    z = (z ^ (z >> 27)) * W(0x94D049BB133111EBu64);
398    z = z ^ (z >> 31);
399    z.0
400}
401#[doc(hidden)]
402pub fn un_splitmix64(z: u64) -> u64 {
403    use std::num::Wrapping as W;
404
405    let mut n = W(z);
406    n = (n ^ (n >> 31) ^ (n >> 62)) * W(0x319642b2d24d8ec3u64);
407    n = (n ^ (n >> 27) ^ (n >> 54)) * W(0x96de1b173f119089u64);
408    n = n ^ (n >> 30) ^ (n >> 60);
409    n.0
410}
411
412/// Map specialized for unique IDs that are already a randomized hash.
413#[derive(Clone, Debug)]
414pub struct IdMap<K, V>(hashbrown::HashMap<K, V, BuildIdHasher>);
415
416impl<K: Eq + Hash, V: PartialEq> PartialEq for IdMap<K, V> {
417    fn eq(&self, other: &Self) -> bool {
418        self.0 == other.0
419    }
420}
421impl<K, V> IdMap<K, V> {
422    /// New `const` default.
423    pub const fn new() -> Self {
424        Self(hashbrown::HashMap::with_hasher(BuildIdHasher))
425    }
426}
427impl<K, V> Default for IdMap<K, V> {
428    fn default() -> Self {
429        Self::new()
430    }
431}
432impl<K, V> ops::Deref for IdMap<K, V> {
433    type Target = hashbrown::HashMap<K, V, BuildIdHasher>;
434
435    fn deref(&self) -> &Self::Target {
436        &self.0
437    }
438}
439impl<K, V> ops::DerefMut for IdMap<K, V> {
440    fn deref_mut(&mut self) -> &mut Self::Target {
441        &mut self.0
442    }
443}
444impl<K, V> IntoIterator for IdMap<K, V> {
445    type Item = (K, V);
446
447    type IntoIter = hashbrown::hash_map::IntoIter<K, V>;
448
449    fn into_iter(self) -> Self::IntoIter {
450        self.0.into_iter()
451    }
452}
453impl<'a, K, V> IntoIterator for &'a IdMap<K, V> {
454    type Item = (&'a K, &'a V);
455
456    type IntoIter = hashbrown::hash_map::Iter<'a, K, V>;
457
458    fn into_iter(self) -> Self::IntoIter {
459        self.0.iter()
460    }
461}
462impl<'a, K, V> IntoIterator for &'a mut IdMap<K, V> {
463    type Item = (&'a K, &'a mut V);
464
465    type IntoIter = hashbrown::hash_map::IterMut<'a, K, V>;
466
467    fn into_iter(self) -> Self::IntoIter {
468        self.0.iter_mut()
469    }
470}
471impl<K: Send, V: Send> IntoParallelIterator for IdMap<K, V> {
472    type Iter = hashbrown::hash_map::rayon::IntoParIter<K, V>;
473
474    type Item = (K, V);
475
476    fn into_par_iter(self) -> Self::Iter {
477        self.0.into_par_iter()
478    }
479}
480impl<'a, K: Sync, V: Sync> IntoParallelIterator for &'a IdMap<K, V> {
481    type Iter = hashbrown::hash_map::rayon::ParIter<'a, K, V>;
482
483    type Item = (&'a K, &'a V);
484
485    fn into_par_iter(self) -> Self::Iter {
486        self.0.par_iter()
487    }
488}
489impl<'a, K: Sync, V: Send> IntoParallelIterator for &'a mut IdMap<K, V> {
490    type Iter = hashbrown::hash_map::rayon::ParIterMut<'a, K, V>;
491
492    type Item = (&'a K, &'a mut V);
493
494    fn into_par_iter(self) -> Self::Iter {
495        self.0.par_iter_mut()
496    }
497}
498impl<K: Eq + Hash, V> FromIterator<(K, V)> for IdMap<K, V> {
499    fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
500        Self(FromIterator::from_iter(iter))
501    }
502}
503impl<K: Eq + Hash + Send, V: Send> FromParallelIterator<(K, V)> for IdMap<K, V> {
504    fn from_par_iter<I>(par_iter: I) -> Self
505    where
506        I: IntoParallelIterator<Item = (K, V)>,
507    {
508        Self(FromParallelIterator::from_par_iter(par_iter))
509    }
510}
511
512/// Set specialized for unique IDs that are already a randomized hash.
513#[derive(Clone, Debug)]
514pub struct IdSet<K>(hashbrown::HashSet<K, BuildIdHasher>);
515impl<K> IdSet<K> {
516    /// New `const` default.
517    pub const fn new() -> Self {
518        Self(hashbrown::HashSet::with_hasher(BuildIdHasher))
519    }
520}
521impl<K> Default for IdSet<K> {
522    fn default() -> Self {
523        Self::new()
524    }
525}
526impl<K> ops::Deref for IdSet<K> {
527    type Target = hashbrown::HashSet<K, BuildIdHasher>;
528
529    fn deref(&self) -> &Self::Target {
530        &self.0
531    }
532}
533impl<K> ops::DerefMut for IdSet<K> {
534    fn deref_mut(&mut self) -> &mut Self::Target {
535        &mut self.0
536    }
537}
538impl<K> IntoIterator for IdSet<K> {
539    type Item = K;
540
541    type IntoIter = hashbrown::hash_set::IntoIter<K>;
542
543    fn into_iter(self) -> Self::IntoIter {
544        self.0.into_iter()
545    }
546}
547impl<'a, K> IntoIterator for &'a IdSet<K> {
548    type Item = &'a K;
549
550    type IntoIter = hashbrown::hash_set::Iter<'a, K>;
551
552    fn into_iter(self) -> Self::IntoIter {
553        self.0.iter()
554    }
555}
556impl<K: Send> IntoParallelIterator for IdSet<K> {
557    type Iter = hashbrown::hash_set::rayon::IntoParIter<K>;
558
559    type Item = K;
560
561    fn into_par_iter(self) -> Self::Iter {
562        self.0.into_par_iter()
563    }
564}
565impl<'a, K: Sync> IntoParallelIterator for &'a IdSet<K> {
566    type Iter = hashbrown::hash_set::rayon::ParIter<'a, K>;
567
568    type Item = &'a K;
569
570    fn into_par_iter(self) -> Self::Iter {
571        self.0.par_iter()
572    }
573}
574impl<K: Eq + Hash> FromIterator<K> for IdSet<K> {
575    fn from_iter<T: IntoIterator<Item = K>>(iter: T) -> Self {
576        Self(FromIterator::from_iter(iter))
577    }
578}
579impl<K: Eq + Hash + Send> FromParallelIterator<K> for IdSet<K> {
580    fn from_par_iter<I>(par_iter: I) -> Self
581    where
582        I: IntoParallelIterator<Item = K>,
583    {
584        Self(FromParallelIterator::from_par_iter(par_iter))
585    }
586}
587impl<K: Eq + Hash> PartialEq for IdSet<K> {
588    fn eq(&self, other: &Self) -> bool {
589        self.0 == other.0
590    }
591}
592impl<K: Eq + Hash> Eq for IdSet<K> {}
593
594/// Entry in [`IdMap`].
595pub type IdEntry<'a, K, V> = hashbrown::hash_map::Entry<'a, K, V, BuildIdHasher>;
596
597/// Occupied entry in an [`IdEntry`].
598pub type IdOccupiedEntry<'a, K, V> = hashbrown::hash_map::OccupiedEntry<'a, K, V, BuildIdHasher>;
599
600/// Vacant entry in an [`IdEntry`].
601pub type IdVacantEntry<'a, K, V> = hashbrown::hash_map::VacantEntry<'a, K, V, BuildIdHasher>;
602
603/// Build [`IdHasher`].
604#[derive(Default, Clone, Debug, Copy)]
605pub struct BuildIdHasher;
606impl BuildHasher for BuildIdHasher {
607    type Hasher = IdHasher;
608
609    fn build_hasher(&self) -> Self::Hasher {
610        IdHasher::default()
611    }
612}
613
614/// No-op hasher.
615///
616/// This hasher supports only `write_u32` and `write_u64`, other methods panic.
617///
618/// This hasher does nothing, it uses the `u32` or `u64` value directly as a hash.
619#[derive(Default)]
620pub struct IdHasher(u64);
621impl Hasher for IdHasher {
622    fn write(&mut self, _: &[u8]) {
623        unimplemented!("`only `write_u32` and `write_u64` are supported");
624    }
625
626    fn write_u32(&mut self, id: u32) {
627        self.0 = id as u64;
628    }
629
630    fn write_u64(&mut self, id: u64) {
631        self.0 = id;
632    }
633
634    fn finish(&self) -> u64 {
635        self.0
636    }
637}
638
639/// Trait implemented for all generated unique ID types.
640pub trait UniqueId: Clone + Copy + PartialEq + Eq + Hash {
641    /// New unique ID.
642    fn new_unique() -> Self;
643}
644
645/// Declares a static unique ID that is lazy inited.
646///
647/// Dereferencing this static generates the ID and caches it.
648///
649/// # Examples
650///
651/// ```
652/// # fn main() { }
653/// # use zng_unique_id::*;
654/// #
655/// # unique_id_32! {
656/// # pub struct StateId<T: (std::any::Any)>;
657/// # }
658/// #
659/// static_id! {
660///     /// Metadata foo ID.
661///     pub static ref FOO_ID: StateId<bool>;
662/// }
663/// ```
664#[macro_export]
665macro_rules! static_id {
666    ($(
667        $(#[$attr:meta])*
668        $vis:vis static ref $IDENT:ident: $IdTy:ty;
669    )+) => {
670        $(
671            $crate::lazy_static! {
672                $(#[$attr])*
673                $vis static ref $IDENT: $IdTy = <$IdTy as $crate::UniqueId>::new_unique();
674            }
675        )+
676    };
677}