1#[cfg(not(feature = "std"))]
52use alloc::vec::Vec;
53#[cfg(feature = "std")]
54mod rstd {
55 pub use std::*;
56}
57#[cfg(not(feature = "std"))]
58mod rstd {
59 pub use core::*;
60 pub mod collections {
61 pub use alloc::collections::*;
62
63 pub use vec_deque::VecDeque;
64 }
65}
66
67#[cfg(not(feature = "std"))]
68pub use alloc::boxed::Box;
69#[cfg(not(feature = "std"))]
70use core::ffi::c_void;
71#[cfg(feature = "std")]
72use std::hash::BuildHasher;
73#[cfg(feature = "std")]
74use std::os::raw::c_void;
75#[cfg(feature = "std")]
76use std::sync::Arc;
77
78#[cfg(feature = "std")]
79use rstd::hash::Hash;
80use rstd::{
81 marker::PhantomData,
82 mem::size_of,
83 ops::{Deref, DerefMut, Range},
84};
85
86pub type VoidPtrToSizeFn = unsafe extern "C" fn(ptr: *const c_void) -> usize;
88
89pub type VoidPtrToBoolFnMut = dyn FnMut(*const c_void) -> bool;
91
92pub struct MallocSizeOfOps {
94 size_of_op: VoidPtrToSizeFn,
96
97 enclosing_size_of_op: Option<VoidPtrToSizeFn>,
102
103 have_seen_ptr_op: Option<Box<VoidPtrToBoolFnMut>>,
107}
108
109impl MallocSizeOfOps {
110 pub fn new(
111 size_of: VoidPtrToSizeFn,
112 malloc_enclosing_size_of: Option<VoidPtrToSizeFn>,
113 have_seen_ptr: Option<Box<VoidPtrToBoolFnMut>>,
114 ) -> Self {
115 MallocSizeOfOps {
116 size_of_op: size_of,
117 enclosing_size_of_op: malloc_enclosing_size_of,
118 have_seen_ptr_op: have_seen_ptr,
119 }
120 }
121
122 fn is_empty<T: ?Sized>(ptr: *const T) -> bool {
125 return ptr as *const usize as usize <= 256;
133 }
134
135 pub unsafe fn malloc_size_of<T: ?Sized>(&self, ptr: *const T) -> usize {
138 if MallocSizeOfOps::is_empty(ptr) {
139 0
140 } else {
141 (self.size_of_op)(ptr as *const c_void)
142 }
143 }
144
145 pub fn has_malloc_enclosing_size_of(&self) -> bool {
147 self.enclosing_size_of_op.is_some()
148 }
149
150 pub unsafe fn malloc_enclosing_size_of<T>(&self, ptr: *const T) -> usize {
153 assert!(!MallocSizeOfOps::is_empty(ptr));
154 (self.enclosing_size_of_op.unwrap())(ptr as *const c_void)
155 }
156
157 pub fn have_seen_ptr<T>(&mut self, ptr: *const T) -> bool {
159 let have_seen_ptr_op = self
160 .have_seen_ptr_op
161 .as_mut()
162 .expect("missing have_seen_ptr_op");
163 have_seen_ptr_op(ptr as *const c_void)
164 }
165}
166
167pub trait MallocSizeOf {
170 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
175
176 fn constant_size() -> Option<usize>
180 where
181 Self: Sized,
182 {
183 None
184 }
185}
186
187pub trait MallocShallowSizeOf {
189 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
194}
195
196pub trait MallocUnconditionalSizeOf {
200 fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
203}
204
205pub trait MallocUnconditionalShallowSizeOf {
207 fn unconditional_shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
209}
210
211impl<'a, T: ?Sized> MallocSizeOf for &'a T {
212 fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
213 0
215 }
216 fn constant_size() -> Option<usize> {
217 Some(0)
218 }
219}
220
221impl<T: MallocSizeOf + ?Sized> MallocSizeOf for Box<T> {
222 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
223 self.shallow_size_of(ops) + (**self).size_of(ops)
224 }
225}
226
227#[impl_trait_for_tuples::impl_for_tuples(12)]
228impl MallocSizeOf for Tuple {
229 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
230 let mut result = 0;
231 for_tuples!( #( result += Tuple.size_of(ops); )* );
232 result
233 }
234 fn constant_size() -> Option<usize> {
235 let mut result = Some(0);
236 for_tuples!( #( result = result.and_then(|s| Tuple::constant_size().map(|t| s + t)); )* );
237 result
238 }
239}
240
241impl<T: MallocSizeOf> MallocSizeOf for Option<T> {
242 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
243 if let Some(val) = self.as_ref() {
244 val.size_of(ops)
245 } else {
246 0
247 }
248 }
249 fn constant_size() -> Option<usize> {
250 T::constant_size().filter(|s| *s == 0)
251 }
252}
253
254impl<T: MallocSizeOf, E: MallocSizeOf> MallocSizeOf for Result<T, E> {
255 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
256 match *self {
257 Ok(ref x) => x.size_of(ops),
258 Err(ref e) => e.size_of(ops),
259 }
260 }
261 fn constant_size() -> Option<usize> {
262 T::constant_size().and_then(|t| E::constant_size().filter(|e| *e == t))
264 }
265}
266
267impl<T: MallocSizeOf + Copy> MallocSizeOf for rstd::cell::Cell<T> {
268 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
269 self.get().size_of(ops)
270 }
271 fn constant_size() -> Option<usize> {
272 T::constant_size()
273 }
274}
275
276impl<T: MallocSizeOf> MallocSizeOf for rstd::cell::RefCell<T> {
277 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
278 self.borrow().size_of(ops)
279 }
280 fn constant_size() -> Option<usize> {
281 T::constant_size()
282 }
283}
284
285#[cfg(feature = "std")]
286impl<'a, B: ?Sized + ToOwned> MallocSizeOf for std::borrow::Cow<'a, B>
287where
288 B::Owned: MallocSizeOf,
289{
290 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
291 match *self {
292 std::borrow::Cow::Borrowed(_) => 0,
293 std::borrow::Cow::Owned(ref b) => b.size_of(ops),
294 }
295 }
296}
297
298impl<T: MallocSizeOf> MallocSizeOf for [T] {
299 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
300 let mut n = 0;
301 if let Some(t) = T::constant_size() {
302 n += self.len() * t;
303 } else {
304 n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
305 }
306 n
307 }
308}
309
310impl<T: MallocSizeOf> MallocSizeOf for Vec<T> {
311 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
312 let mut n = self.shallow_size_of(ops);
313 if let Some(t) = T::constant_size() {
314 n += self.len() * t;
315 } else {
316 n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
317 }
318 n
319 }
320}
321
322impl<T> MallocShallowSizeOf for rstd::collections::VecDeque<T> {
323 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
324 if ops.has_malloc_enclosing_size_of() {
325 if let Some(front) = self.front() {
326 unsafe { ops.malloc_enclosing_size_of(&*front) }
328 } else {
329 0
331 }
332 } else {
333 self.capacity() * size_of::<T>()
335 }
336 }
337}
338
339impl<T: MallocSizeOf> MallocSizeOf for rstd::collections::VecDeque<T> {
340 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
341 let mut n = self.shallow_size_of(ops);
342 if let Some(t) = T::constant_size() {
343 n += self.len() * t;
344 } else {
345 n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
346 }
347 n
348 }
349}
350
351#[cfg(feature = "std")]
352impl<T, S> MallocShallowSizeOf for std::collections::HashSet<T, S>
353where
354 T: Eq + Hash,
355 S: BuildHasher,
356{
357 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
358 if ops.has_malloc_enclosing_size_of() {
359 self.iter()
364 .next()
365 .map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
366 } else {
367 self.capacity() * (size_of::<T>() + size_of::<usize>())
369 }
370 }
371}
372
373#[cfg(feature = "std")]
374impl<T, S> MallocSizeOf for std::collections::HashSet<T, S>
375where
376 T: Eq + Hash + MallocSizeOf,
377 S: BuildHasher,
378{
379 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
380 let mut n = self.shallow_size_of(ops);
381 if let Some(t) = T::constant_size() {
382 n += self.len() * t;
383 } else {
384 n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
385 }
386 n
387 }
388}
389
390impl<I: MallocSizeOf> MallocSizeOf for rstd::cmp::Reverse<I> {
391 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
392 self.0.size_of(ops)
393 }
394 fn constant_size() -> Option<usize> {
395 I::constant_size()
396 }
397}
398
399#[cfg(feature = "std")]
400impl<K, V, S> MallocShallowSizeOf for std::collections::HashMap<K, V, S> {
401 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
402 if ops.has_malloc_enclosing_size_of() {
404 self.values()
405 .next()
406 .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
407 } else {
408 self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
409 }
410 }
411}
412
413#[cfg(feature = "std")]
414impl<K, V, S> MallocSizeOf for std::collections::HashMap<K, V, S>
415where
416 K: MallocSizeOf,
417 V: MallocSizeOf,
418{
419 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
420 let mut n = self.shallow_size_of(ops);
421 if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
422 n += self.len() * (k + v)
423 } else {
424 n = self
425 .iter()
426 .fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
427 }
428 n
429 }
430}
431
432impl<K, V> MallocShallowSizeOf for rstd::collections::BTreeMap<K, V> {
433 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
434 if ops.has_malloc_enclosing_size_of() {
435 self.values()
436 .next()
437 .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
438 } else {
439 self.len() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
440 }
441 }
442}
443
444impl<K, V> MallocSizeOf for rstd::collections::BTreeMap<K, V>
445where
446 K: MallocSizeOf,
447 V: MallocSizeOf,
448{
449 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
450 let mut n = self.shallow_size_of(ops);
451 if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
452 n += self.len() * (k + v)
453 } else {
454 n = self
455 .iter()
456 .fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
457 }
458 n
459 }
460}
461
462impl<T> MallocShallowSizeOf for rstd::collections::BTreeSet<T> {
463 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
464 if ops.has_malloc_enclosing_size_of() {
465 self.iter()
467 .next()
468 .map_or(0, |t| unsafe { ops.malloc_enclosing_size_of(t) })
469 } else {
470 self.len() * (size_of::<T>() + size_of::<usize>())
472 }
473 }
474}
475
476impl<T> MallocSizeOf for rstd::collections::BTreeSet<T>
477where
478 T: MallocSizeOf,
479{
480 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
481 let mut n = self.shallow_size_of(ops);
482 if let Some(t) = T::constant_size() {
483 n += self.len() * t;
484 } else {
485 n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
486 }
487 n
488 }
489}
490
491#[cfg(feature = "std")]
499impl<T: MallocSizeOf> MallocUnconditionalSizeOf for Arc<T> {
500 fn unconditional_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
501 self.unconditional_shallow_size_of(ops) + (**self).size_of(ops)
502 }
503}
504
505#[cfg(feature = "std")]
515impl<T: MallocSizeOf> MallocSizeOf for std::sync::Mutex<T> {
516 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
517 self.lock().unwrap().size_of(ops)
518 }
519}
520
521#[cfg(feature = "std")]
522impl<T: MallocSizeOf> MallocSizeOf for parking_lot::Mutex<T> {
523 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
524 self.lock().size_of(ops)
525 }
526}
527
528#[cfg(feature = "std")]
529impl<T: MallocSizeOf> MallocSizeOf for once_cell::sync::OnceCell<T> {
530 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
531 self.get().map(|v| v.size_of(ops)).unwrap_or(0)
532 }
533}
534
535#[cfg(feature = "std")]
536impl<T: MallocSizeOf> MallocSizeOf for std::sync::RwLock<T> {
537 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
538 self.read().unwrap().size_of(ops)
539 }
540}
541
542#[cfg(feature = "std")]
543impl<T: MallocSizeOf> MallocSizeOf for parking_lot::RwLock<T> {
544 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
545 self.read().size_of(ops)
546 }
547}
548
549#[macro_export]
581macro_rules! malloc_size_of_is_0(
582 ($($ty:ty),+) => (
583 $(
584 impl $crate::MallocSizeOf for $ty {
585 #[inline(always)]
586 fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
587 0
588 }
589 #[inline(always)]
590 fn constant_size() -> Option<usize> { Some(0) }
591 }
592 )+
593 );
594 (any: $($ty:ident<$($gen:ident),+>),+) => (
595 $(
596 impl<$($gen),+> $crate::MallocSizeOf for $ty<$($gen),+> {
597 #[inline(always)]
598 fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
599 0
600 }
601 #[inline(always)]
602 fn constant_size() -> Option<usize> { Some(0) }
603 }
604 )+
605 );
606 ($($ty:ident<$($gen:ident),+>),+) => (
607 $(
608 impl<$($gen: $crate::MallocSizeOf),+> $crate::MallocSizeOf for $ty<$($gen),+> {
609 #[inline(always)]
610 fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
611 0
612 }
613 #[inline(always)]
614 fn constant_size() -> Option<usize> { Some(0) }
615 }
616 )+
617 );
618);
619
620malloc_size_of_is_0!(bool, char, str);
621malloc_size_of_is_0!(u8, u16, u32, u64, u128, usize);
622malloc_size_of_is_0!(i8, i16, i32, i64, i128, isize);
623malloc_size_of_is_0!(f32, f64);
624
625malloc_size_of_is_0!(rstd::sync::atomic::AtomicBool);
626malloc_size_of_is_0!(rstd::sync::atomic::AtomicIsize);
627malloc_size_of_is_0!(rstd::sync::atomic::AtomicUsize);
628
629malloc_size_of_is_0!(Range<u8>, Range<u16>, Range<u32>, Range<u64>, Range<usize>);
630malloc_size_of_is_0!(Range<i8>, Range<i16>, Range<i32>, Range<i64>, Range<isize>);
631malloc_size_of_is_0!(Range<f32>, Range<f64>);
632malloc_size_of_is_0!(any: PhantomData<T>);
633
634#[derive(Clone)]
637pub struct Measurable<T: MallocSizeOf>(pub T);
638
639impl<T: MallocSizeOf> Deref for Measurable<T> {
640 type Target = T;
641
642 fn deref(&self) -> &T {
643 &self.0
644 }
645}
646
647impl<T: MallocSizeOf> DerefMut for Measurable<T> {
648 fn deref_mut(&mut self) -> &mut T {
649 &mut self.0
650 }
651}
652
653#[cfg(feature = "hashbrown")]
654impl<K, V, S> MallocShallowSizeOf for hashbrown::HashMap<K, V, S> {
655 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
656 if ops.has_malloc_enclosing_size_of() {
658 self.values()
659 .next()
660 .map_or(0, |v| unsafe { ops.malloc_enclosing_size_of(v) })
661 } else {
662 self.capacity() * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
663 }
664 }
665}
666
667#[cfg(feature = "hashbrown")]
668impl<K, V, S> MallocSizeOf for hashbrown::HashMap<K, V, S>
669where
670 K: MallocSizeOf,
671 V: MallocSizeOf,
672{
673 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
674 let mut n = self.shallow_size_of(ops);
675 if let (Some(k), Some(v)) = (K::constant_size(), V::constant_size()) {
676 n += self.len() * (k + v)
677 } else {
678 n = self
679 .iter()
680 .fold(n, |acc, (k, v)| acc + k.size_of(ops) + v.size_of(ops))
681 }
682 n
683 }
684}
685
686malloc_size_of_is_0!(
687 [u8; 1], [u8; 2], [u8; 3], [u8; 4], [u8; 5], [u8; 6], [u8; 7], [u8; 8], [u8; 9], [u8; 10],
688 [u8; 11], [u8; 12], [u8; 13], [u8; 14], [u8; 15], [u8; 16], [u8; 17], [u8; 18], [u8; 19],
689 [u8; 20], [u8; 21], [u8; 22], [u8; 23], [u8; 24], [u8; 25], [u8; 26], [u8; 27], [u8; 28],
690 [u8; 29], [u8; 30], [u8; 31], [u8; 32]
691);
692
693macro_rules! impl_smallvec {
694 ($size: expr) => {
695 #[cfg(feature = "smallvec")]
696 impl<T> MallocSizeOf for smallvec::SmallVec<[T; $size]>
697 where
698 T: MallocSizeOf,
699 {
700 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
701 let mut n = if self.spilled() {
702 self.capacity() * core::mem::size_of::<T>()
703 } else {
704 0
705 };
706 if let Some(t) = T::constant_size() {
707 n += self.len() * t;
708 } else {
709 n = self.iter().fold(n, |acc, elem| acc + elem.size_of(ops))
710 }
711 n
712 }
713 }
714 };
715}
716
717impl_smallvec!(32); impl_smallvec!(36); #[cfg(feature = "std")]
721malloc_size_of_is_0!(std::time::Instant);
722#[cfg(feature = "std")]
723malloc_size_of_is_0!(std::time::Duration);
724
725#[cfg(all(test, feature = "std"))] mod tests {
727 use std::{collections::BTreeSet, mem};
728
729 use smallvec::SmallVec;
730
731 use crate::{MallocSizeOf, MallocSizeOfOps, allocators::new_malloc_size_ops};
732 impl_smallvec!(3);
733
734 #[test]
735 fn test_smallvec_stack_allocated_type() {
736 let mut v: SmallVec<[u8; 3]> = SmallVec::new();
737 let mut ops = new_malloc_size_ops();
738 assert_eq!(v.size_of(&mut ops), 0);
739 v.push(1);
740 v.push(2);
741 v.push(3);
742 assert_eq!(v.size_of(&mut ops), 0);
743 assert!(!v.spilled());
744 v.push(4);
745 assert!(
746 v.spilled(),
747 "SmallVec spills when going beyond the capacity of the inner backing array"
748 );
749 assert_eq!(v.size_of(&mut ops), 4); }
751
752 #[test]
753 fn test_smallvec_boxed_stack_allocated_type() {
754 let mut v: SmallVec<[Box<u8>; 3]> = SmallVec::new();
755 let mut ops = new_malloc_size_ops();
756 assert_eq!(v.size_of(&mut ops), 0);
757 v.push(Box::new(1u8));
758 v.push(Box::new(2u8));
759 v.push(Box::new(3u8));
760 assert!(v.size_of(&mut ops) >= 3);
761 assert!(!v.spilled());
762 v.push(Box::new(4u8));
763 assert!(
764 v.spilled(),
765 "SmallVec spills when going beyond the capacity of the inner backing array"
766 );
767 let mut ops = new_malloc_size_ops();
768 let expected_min_allocs = mem::size_of::<Box<u8>>() * 4 + 4;
769 assert!(v.size_of(&mut ops) >= expected_min_allocs);
770 }
771
772 #[test]
773 fn test_smallvec_heap_allocated_type() {
774 let mut v: SmallVec<[String; 3]> = SmallVec::new();
775 let mut ops = new_malloc_size_ops();
776 assert_eq!(v.size_of(&mut ops), 0);
777 v.push("COW".into());
778 v.push("PIG".into());
779 v.push("DUCK".into());
780 assert!(!v.spilled());
781 assert!(v.size_of(&mut ops) >= "COW".len() + "PIG".len() + "DUCK".len());
782 v.push("ÖWL".into());
783 assert!(v.spilled());
784 let mut ops = new_malloc_size_ops();
785 let expected_min_allocs =
786 mem::size_of::<String>() * 4 + "ÖWL".len() + "COW".len() + "PIG".len() + "DUCK".len();
787 assert!(v.size_of(&mut ops) >= expected_min_allocs);
788 }
789
790 #[test]
791 fn test_large_vec() {
792 const N: usize = 128 * 1024 * 1024;
793 let val = vec![1u8; N];
794 let mut ops = new_malloc_size_ops();
795 assert!(val.size_of(&mut ops) >= N);
796 assert!(val.size_of(&mut ops) < 2 * N);
797 }
798
799 #[test]
800 fn btree_set() {
801 let mut set = BTreeSet::new();
802 for t in 0..100 {
803 set.insert(vec![t]);
804 }
805 assert!(crate::malloc_size(&set) > 3000);
807 }
808
809 #[test]
810 fn special_malloc_size_of_0() {
811 struct Data<P> {
812 phantom: std::marker::PhantomData<P>,
813 }
814
815 malloc_size_of_is_0!(any: Data<P>);
816
817 assert_eq!(
819 crate::malloc_size(&Data::<[u8; 333]> {
820 phantom: std::marker::PhantomData
821 }),
822 0
823 );
824 }
825
826 #[test]
827 fn constant_size() {
828 struct AlwaysTwo(Vec<u8>);
829
830 impl MallocSizeOf for AlwaysTwo {
831 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
832 self.0.size_of(ops)
833 }
834 fn constant_size() -> Option<usize> {
835 Some(2)
836 }
837 }
838
839 assert_eq!(AlwaysTwo::constant_size(), Some(2));
840 assert_eq!(std::cmp::Reverse::<u8>::constant_size(), Some(0));
841 assert_eq!(std::cell::RefCell::<u8>::constant_size(), Some(0));
842 assert_eq!(std::cell::Cell::<u8>::constant_size(), Some(0));
843 assert_eq!(Result::<(), ()>::constant_size(), Some(0));
844 assert_eq!(
845 <(AlwaysTwo, (), [u8; 32], AlwaysTwo)>::constant_size(),
846 Some(2 + 2)
847 );
848 assert_eq!(Option::<u8>::constant_size(), Some(0));
849 assert_eq!(<&String>::constant_size(), Some(0));
850
851 assert_eq!(<String>::constant_size(), None);
852 assert_eq!(std::borrow::Cow::<String>::constant_size(), None);
853 assert_eq!(Result::<(), String>::constant_size(), None);
854 assert_eq!(Option::<AlwaysTwo>::constant_size(), None);
855 }
856}