1use cfg_if::cfg_if;
14use cfx_types::{
15 AddressWithSpace, AllChainID, Space, SpaceMap, H160, H256, H512, U256, U512,
16};
17use hashbrown::HashMap as FastHashMap;
18use parking_lot;
19use slab::Slab;
20use std::{
21 cmp::Reverse,
22 collections::{BinaryHeap, HashSet, VecDeque},
23 hash::{BuildHasher, Hash},
24 mem::{self, size_of},
25 ops::Range,
26 os::raw::c_void,
27 sync::Arc,
28};
29
30type VoidPtrToSizeFn = unsafe extern "C" fn(ptr: *const c_void) -> usize;
32
33pub struct MallocSizeOfOps {
35 pub size_of_op: VoidPtrToSizeFn,
37
38 pub enclosing_size_of_op: Option<VoidPtrToSizeFn>,
43
44 pub visited: HashSet<usize>,
45}
46
47impl MallocSizeOfOps {
48 pub fn new(
49 size_of: VoidPtrToSizeFn,
50 malloc_enclosing_size_of: Option<VoidPtrToSizeFn>,
51 ) -> Self {
52 MallocSizeOfOps {
53 size_of_op: size_of,
54 enclosing_size_of_op: malloc_enclosing_size_of,
55 visited: HashSet::new(),
56 }
57 }
58
59 fn is_empty<T: ?Sized>(ptr: *const T) -> bool {
62 ptr as *const usize as usize <= 256
70 }
71
72 pub unsafe fn malloc_size_of<T: ?Sized>(&self, ptr: *const T) -> usize {
75 if MallocSizeOfOps::is_empty(ptr) {
76 0
77 } else {
78 (self.size_of_op)(ptr as *const c_void)
79 }
80 }
81
82 pub fn has_malloc_enclosing_size_of(&self) -> bool {
84 self.enclosing_size_of_op.is_some()
85 }
86
87 pub unsafe fn malloc_enclosing_size_of<T>(&self, ptr: *const T) -> usize {
90 assert!(!MallocSizeOfOps::is_empty(ptr));
91 (self.enclosing_size_of_op.unwrap())(ptr as *const c_void)
92 }
93}
94
95pub trait MallocSizeOf {
98 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
101}
102
103pub trait MallocShallowSizeOf {
105 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
110}
111
112impl MallocSizeOf for String {
113 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
114 unsafe { ops.malloc_size_of(self.as_ptr()) }
115 }
116}
117
118impl<T: MallocSizeOf> MallocSizeOf for SpaceMap<T> {
119 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
120 self.map_sum(|x| x.size_of(ops))
121 }
122}
123
124impl<T: ?Sized> MallocShallowSizeOf for Box<T> {
125 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
126 unsafe { ops.malloc_size_of(&**self) }
127 }
128}
129
130impl<T: MallocSizeOf + ?Sized> MallocSizeOf for Box<T> {
131 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
132 self.shallow_size_of(ops) + (**self).size_of(ops)
133 }
134}
135
136impl MallocSizeOf for () {
137 fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize { 0 }
138}
139
140impl<T1, T2> MallocSizeOf for (T1, T2)
141where
142 T1: MallocSizeOf,
143 T2: MallocSizeOf,
144{
145 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
146 self.0.size_of(ops) + self.1.size_of(ops)
147 }
148}
149
150impl<T1, T2, T3> MallocSizeOf for (T1, T2, T3)
151where
152 T1: MallocSizeOf,
153 T2: MallocSizeOf,
154 T3: MallocSizeOf,
155{
156 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
157 self.0.size_of(ops) + self.1.size_of(ops) + self.2.size_of(ops)
158 }
159}
160
161impl<T1, T2, T3, T4> MallocSizeOf for (T1, T2, T3, T4)
162where
163 T1: MallocSizeOf,
164 T2: MallocSizeOf,
165 T3: MallocSizeOf,
166 T4: MallocSizeOf,
167{
168 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
169 self.0.size_of(ops)
170 + self.1.size_of(ops)
171 + self.2.size_of(ops)
172 + self.3.size_of(ops)
173 }
174}
175
176impl<T: MallocSizeOf> MallocSizeOf for Option<T> {
177 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
178 if let Some(val) = self.as_ref() {
179 val.size_of(ops)
180 } else {
181 0
182 }
183 }
184}
185
186impl<T: MallocSizeOf, E: MallocSizeOf> MallocSizeOf for Result<T, E> {
187 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
188 match *self {
189 Ok(ref x) => x.size_of(ops),
190 Err(ref e) => e.size_of(ops),
191 }
192 }
193}
194
195impl<T: MallocSizeOf + Copy> MallocSizeOf for std::cell::Cell<T> {
196 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
197 self.get().size_of(ops)
198 }
199}
200
201impl<T: MallocSizeOf> MallocSizeOf for std::cell::RefCell<T> {
202 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
203 self.borrow().size_of(ops)
204 }
205}
206
207impl<T: MallocSizeOf> MallocSizeOf for std::cell::UnsafeCell<T> {
208 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
209 unsafe { &*self.get() }.size_of(ops)
210 }
211}
212
213impl<'a, B: ?Sized + ToOwned> MallocSizeOf for std::borrow::Cow<'a, B>
214where B::Owned: MallocSizeOf
215{
216 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
217 match *self {
218 std::borrow::Cow::Borrowed(_) => 0,
219 std::borrow::Cow::Owned(ref b) => b.size_of(ops),
220 }
221 }
222}
223
224impl<T: MallocSizeOf> MallocSizeOf for [T] {
225 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
226 let mut n = 0;
227 for elem in self.iter() {
228 n += elem.size_of(ops);
229 }
230 n
231 }
232}
233
234impl<T> MallocShallowSizeOf for Vec<T> {
235 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
236 unsafe { ops.malloc_size_of(self.as_ptr()) }
237 }
238}
239
240impl<T: MallocSizeOf> MallocSizeOf for Vec<T> {
241 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
242 let mut n = self.shallow_size_of(ops);
243 for elem in self.iter() {
244 n += elem.size_of(ops);
245 }
246 n
247 }
248}
249
250#[allow(dead_code)]
251#[derive(Clone)]
252enum Entry<T> {
253 Vacant(usize),
254 Occupied(T),
255}
256
257impl<T> MallocShallowSizeOf for Slab<T> {
258 fn shallow_size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
259 mem::size_of::<Entry<T>>() * self.capacity()
260 }
261}
262
263impl<T: MallocSizeOf> MallocSizeOf for Slab<T> {
264 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
265 let mut n = self.shallow_size_of(ops);
266 for (_, elem) in self.iter() {
267 n += elem.size_of(ops);
268 }
269 n
270 }
271}
272
273impl<T> MallocShallowSizeOf for BinaryHeap<T> {
274 fn shallow_size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
275 mem::size_of::<T>() * self.capacity()
276 }
277}
278
279impl<T: MallocSizeOf> MallocSizeOf for BinaryHeap<T> {
280 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
281 let mut n = self.shallow_size_of(ops);
282 for elem in self.iter() {
283 n += elem.size_of(ops);
284 }
285 n
286 }
287}
288
289impl<T> MallocShallowSizeOf for VecDeque<T> {
290 fn shallow_size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
291 mem::size_of::<T>() * self.capacity()
292 }
293}
294
295impl<T: MallocSizeOf> MallocSizeOf for VecDeque<T> {
296 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
297 let mut n = self.shallow_size_of(ops);
298 for elem in self.iter() {
299 n += elem.size_of(ops);
300 }
301 n
302 }
303}
304
305impl<T: MallocSizeOf> MallocSizeOf for Arc<T> {
307 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
308 let ptr = self.as_ref() as *const T as usize;
309 if ops.visited.contains(&ptr) {
310 return 0;
311 }
312 ops.visited.insert(ptr);
313 mem::size_of::<T>() + self.as_ref().size_of(ops)
314 }
315}
316
317macro_rules! malloc_size_of_hash_set {
318 ($ty:ty) => {
319 impl<T, S> MallocShallowSizeOf for $ty
320 where
321 T: Eq + Hash,
322 S: BuildHasher,
323 {
324 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
325 if ops.has_malloc_enclosing_size_of() {
326 self.iter().next().map_or(0, |t| unsafe {
332 ops.malloc_enclosing_size_of(t)
333 })
334 } else {
335 self.capacity() * (size_of::<T>() + size_of::<usize>())
337 }
338 }
339 }
340
341 impl<T, S> MallocSizeOf for $ty
342 where
343 T: Eq + Hash + MallocSizeOf,
344 S: BuildHasher,
345 {
346 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
347 let mut n = self.shallow_size_of(ops);
348 for t in self.iter() {
349 n += t.size_of(ops);
350 }
351 n
352 }
353 }
354 };
355}
356
357malloc_size_of_hash_set!(std::collections::HashSet<T, S>);
358
359macro_rules! malloc_size_of_hash_map {
360 ($ty:ty) => {
361 impl<K, V, S> MallocShallowSizeOf for $ty
362 where
363 K: Eq + Hash,
364 S: BuildHasher,
365 {
366 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
367 if ops.has_malloc_enclosing_size_of() {
370 self.values().next().map_or(0, |v| unsafe {
371 ops.malloc_enclosing_size_of(v)
372 })
373 } else {
374 self.capacity()
375 * (size_of::<V>() + size_of::<K>() + size_of::<usize>())
376 }
377 }
378 }
379
380 impl<K, V, S> MallocSizeOf for $ty
381 where
382 K: Eq + Hash + MallocSizeOf,
383 V: MallocSizeOf,
384 S: BuildHasher,
385 {
386 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
387 let mut n = self.shallow_size_of(ops);
388 for (k, v) in self.iter() {
389 n += k.size_of(ops);
390 n += v.size_of(ops);
391 }
392 n
393 }
394 }
395 };
396}
397
398malloc_size_of_hash_map!(std::collections::HashMap<K, V, S>);
399malloc_size_of_hash_map!(FastHashMap<K, V, S>);
400
401impl<T> MallocSizeOf for std::marker::PhantomData<T> {
403 fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize { 0 }
404}
405
406impl<T: MallocSizeOf> MallocSizeOf for std::sync::Mutex<T> {
407 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
408 self.lock().unwrap().size_of(ops)
409 }
410}
411
412impl<T: MallocSizeOf> MallocSizeOf for parking_lot::Mutex<T> {
413 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
414 self.lock().size_of(ops)
415 }
416}
417
418impl<T: MallocSizeOf> MallocSizeOf for std::sync::RwLock<T> {
419 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
420 self.read().unwrap().size_of(ops)
421 }
422}
423
424impl<T: MallocSizeOf> MallocSizeOf for parking_lot::RwLock<T> {
425 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
426 self.read().size_of(ops)
427 }
428}
429
430impl<T: MallocSizeOf> MallocSizeOf for Reverse<T> {
431 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
432 self.0.size_of(ops)
433 }
434}
435
436macro_rules! impl_smallvec {
437 ($size:expr) => {
438 impl<T> MallocSizeOf for smallvec::SmallVec<[T; $size]>
439 where T: MallocSizeOf
440 {
441 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
442 let mut n = if self.spilled() {
443 self.capacity() * core::mem::size_of::<T>()
444 } else {
445 0
446 };
447 for elem in self.iter() {
448 n += elem.size_of(ops);
449 }
450 n
451 }
452 }
453 };
454}
455
456impl_smallvec!(32); #[macro_export]
460macro_rules! malloc_size_of_is_0(
461 ($($ty:ty),+) => (
462 $(
463 impl $crate::MallocSizeOf for $ty {
464 #[inline(always)]
465 fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
466 0
467 }
468 }
469 )+
470 );
471 ($($ty:ident<$($gen:ident),+>),+) => (
472 $(
473 impl<$($gen: $crate::MallocSizeOf),+> $crate::MallocSizeOf for $ty<$($gen),+> {
474 #[inline(always)]
475 fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
476 0
477 }
478 }
479 )+
480 );
481);
482
483malloc_size_of_is_0!(bool, char, str);
484malloc_size_of_is_0!(u8, u16, u32, u64, u128, usize);
485malloc_size_of_is_0!(i8, i16, i32, i64, i128, isize);
486malloc_size_of_is_0!(f32, f64);
487
488malloc_size_of_is_0!(std::sync::atomic::AtomicBool);
489malloc_size_of_is_0!(std::sync::atomic::AtomicIsize);
490malloc_size_of_is_0!(std::sync::atomic::AtomicUsize);
491
492malloc_size_of_is_0!(std::num::NonZeroUsize);
493malloc_size_of_is_0!(std::num::NonZeroU32);
494
495malloc_size_of_is_0!(std::time::Duration);
496malloc_size_of_is_0!(std::time::Instant);
497malloc_size_of_is_0!(std::time::SystemTime);
498
499malloc_size_of_is_0!(
500 Range<u8>,
501 Range<u16>,
502 Range<u32>,
503 Range<u64>,
504 Range<usize>
505);
506malloc_size_of_is_0!(
507 Range<i8>,
508 Range<i16>,
509 Range<i32>,
510 Range<i64>,
511 Range<isize>
512);
513malloc_size_of_is_0!(Range<f32>, Range<f64>);
514
515malloc_size_of_is_0!(
516 H256,
517 U256,
518 H512,
519 H160,
520 U512,
521 AddressWithSpace,
522 AllChainID,
523 Space
524);
525
526mod usable_size {
527
528 use super::*;
529
530 cfg_if! {
531 if #[cfg(target_os = "windows")] {
532
533 extern crate winapi;
535
536 use self::winapi::um::heapapi::{GetProcessHeap, HeapSize, HeapValidate};
537
538 pub unsafe extern "C" fn malloc_usable_size(mut ptr: *const c_void) -> usize {
541
542 let heap = GetProcessHeap();
543
544 if HeapValidate(heap, 0, ptr) == 0 {
545 ptr = *(ptr as *const *const c_void).offset(-1);
546 }
547
548 HeapSize(heap, 0, ptr) as usize
549 }
550
551 } else if #[cfg(feature = "jemalloc")] {
552
553 pub unsafe extern "C" fn malloc_usable_size(ptr: *const c_void) -> usize {
555 tikv_jemallocator::usable_size(ptr)
556 }
557
558 } else if #[cfg(target_os = "linux")] {
559
560 extern "C" {
562 pub fn malloc_usable_size(ptr: *const c_void) -> usize;
563 }
564
565 } else if #[cfg(target_os = "macos")] {
566
567 extern "C" {
569 #[link_name = "malloc_size"]
570 pub fn malloc_usable_size(ptr: *const c_void) -> usize;
571 }
572
573
574 } else {
575 pub unsafe extern "C" fn malloc_usable_size(_ptr: *const c_void) -> usize {
576 unreachable!("estimate heapsize or feature allocator needed")
577 }
578
579 }
580
581 }
582
583 #[inline]
585 pub fn new_enclosing_size_fn() -> Option<VoidPtrToSizeFn> { None }
586}
587
588pub fn new_malloc_size_ops() -> MallocSizeOfOps {
590 MallocSizeOfOps::new(
591 usable_size::malloc_usable_size,
592 usable_size::new_enclosing_size_fn(),
593 )
594}