#[cfg(all(not(target_env = "msvc"), feature = "jemalloc-global"))]
use jemallocator;
#[cfg(all(not(target_env = "msvc"), feature = "jemalloc-global"))]
use jemallocator::Jemalloc;
#[cfg(all(not(target_env = "msvc"), feature = "jemalloc-global"))]
#[global_allocator]
static GLOBAL: Jemalloc = Jemalloc;
use cfg_if::cfg_if;
use cfx_types::{
AddressWithSpace, AllChainID, Space, SpaceMap, H160, H256, H512, U256, U512,
};
use hashbrown::HashMap as FastHashMap;
use parking_lot;
use slab::Slab;
use std::{
cmp::Reverse,
collections::{BinaryHeap, HashSet, VecDeque},
hash::{BuildHasher, Hash},
mem::{self, size_of},
ops::Range,
os::raw::c_void,
sync::Arc,
};
type VoidPtrToSizeFn = unsafe extern "C" fn(ptr: *const c_void) -> usize;
pub struct MallocSizeOfOps {
pub size_of_op: VoidPtrToSizeFn,
pub enclosing_size_of_op: Option<VoidPtrToSizeFn>,
pub visited: HashSet<usize>,
}
impl MallocSizeOfOps {
pub fn new(
size_of: VoidPtrToSizeFn,
malloc_enclosing_size_of: Option<VoidPtrToSizeFn>,
) -> Self {
MallocSizeOfOps {
size_of_op: size_of,
enclosing_size_of_op: malloc_enclosing_size_of,
visited: HashSet::new(),
}
}
fn is_empty<T: ?Sized>(ptr: *const T) -> bool {
ptr as *const usize as usize <= 256
}
pub unsafe fn malloc_size_of<T: ?Sized>(&self, ptr: *const T) -> usize {
if MallocSizeOfOps::is_empty(ptr) {
0
} else {
(self.size_of_op)(ptr as *const c_void)
}
}
pub fn has_malloc_enclosing_size_of(&self) -> bool {
self.enclosing_size_of_op.is_some()
}
pub unsafe fn malloc_enclosing_size_of<T>(&self, ptr: *const T) -> usize {
assert!(!MallocSizeOfOps::is_empty(ptr));
(self.enclosing_size_of_op.unwrap())(ptr as *const c_void)
}
}
pub trait MallocSizeOf {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
}
pub trait MallocShallowSizeOf {
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize;
}
impl MallocSizeOf for String {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
unsafe { ops.malloc_size_of(self.as_ptr()) }
}
}
impl<T: MallocSizeOf> MallocSizeOf for SpaceMap<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.map_sum(|x| x.size_of(ops))
}
}
impl<T: ?Sized> MallocShallowSizeOf for Box<T> {
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
unsafe { ops.malloc_size_of(&**self) }
}
}
impl<T: MallocSizeOf + ?Sized> MallocSizeOf for Box<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.shallow_size_of(ops) + (**self).size_of(ops)
}
}
impl MallocSizeOf for () {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize { 0 }
}
impl<T1, T2> MallocSizeOf for (T1, T2)
where
T1: MallocSizeOf,
T2: MallocSizeOf,
{
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.0.size_of(ops) + self.1.size_of(ops)
}
}
impl<T1, T2, T3> MallocSizeOf for (T1, T2, T3)
where
T1: MallocSizeOf,
T2: MallocSizeOf,
T3: MallocSizeOf,
{
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.0.size_of(ops) + self.1.size_of(ops) + self.2.size_of(ops)
}
}
impl<T1, T2, T3, T4> MallocSizeOf for (T1, T2, T3, T4)
where
T1: MallocSizeOf,
T2: MallocSizeOf,
T3: MallocSizeOf,
T4: MallocSizeOf,
{
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.0.size_of(ops)
+ self.1.size_of(ops)
+ self.2.size_of(ops)
+ self.3.size_of(ops)
}
}
impl<T: MallocSizeOf> MallocSizeOf for Option<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
if let Some(val) = self.as_ref() {
val.size_of(ops)
} else {
0
}
}
}
impl<T: MallocSizeOf, E: MallocSizeOf> MallocSizeOf for Result<T, E> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
match *self {
Ok(ref x) => x.size_of(ops),
Err(ref e) => e.size_of(ops),
}
}
}
impl<T: MallocSizeOf + Copy> MallocSizeOf for std::cell::Cell<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.get().size_of(ops)
}
}
impl<T: MallocSizeOf> MallocSizeOf for std::cell::RefCell<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.borrow().size_of(ops)
}
}
impl<T: MallocSizeOf> MallocSizeOf for std::cell::UnsafeCell<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
unsafe { &*self.get() }.size_of(ops)
}
}
impl<'a, B: ?Sized + ToOwned> MallocSizeOf for std::borrow::Cow<'a, B>
where B::Owned: MallocSizeOf
{
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
match *self {
std::borrow::Cow::Borrowed(_) => 0,
std::borrow::Cow::Owned(ref b) => b.size_of(ops),
}
}
}
impl<T: MallocSizeOf> MallocSizeOf for [T] {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let mut n = 0;
for elem in self.iter() {
n += elem.size_of(ops);
}
n
}
}
impl<T> MallocShallowSizeOf for Vec<T> {
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
unsafe { ops.malloc_size_of(self.as_ptr()) }
}
}
impl<T: MallocSizeOf> MallocSizeOf for Vec<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let mut n = self.shallow_size_of(ops);
for elem in self.iter() {
n += elem.size_of(ops);
}
n
}
}
#[allow(dead_code)]
#[derive(Clone)]
enum Entry<T> {
Vacant(usize),
Occupied(T),
}
impl<T> MallocShallowSizeOf for Slab<T> {
fn shallow_size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
mem::size_of::<Entry<T>>() * self.capacity()
}
}
impl<T: MallocSizeOf> MallocSizeOf for Slab<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let mut n = self.shallow_size_of(ops);
for (_, elem) in self.iter() {
n += elem.size_of(ops);
}
n
}
}
impl<T> MallocShallowSizeOf for BinaryHeap<T> {
fn shallow_size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
mem::size_of::<T>() * self.capacity()
}
}
impl<T: MallocSizeOf> MallocSizeOf for BinaryHeap<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let mut n = self.shallow_size_of(ops);
for elem in self.iter() {
n += elem.size_of(ops);
}
n
}
}
impl<T> MallocShallowSizeOf for VecDeque<T> {
fn shallow_size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
mem::size_of::<T>() * self.capacity()
}
}
impl<T: MallocSizeOf> MallocSizeOf for VecDeque<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let mut n = self.shallow_size_of(ops);
for elem in self.iter() {
n += elem.size_of(ops);
}
n
}
}
impl<T: MallocSizeOf> MallocSizeOf for Arc<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let ptr = self.as_ref() as *const T as usize;
if ops.visited.contains(&ptr) {
return 0;
}
ops.visited.insert(ptr);
mem::size_of::<T>() + self.as_ref().size_of(ops)
}
}
macro_rules! malloc_size_of_hash_set {
($ty:ty) => {
impl<T, S> MallocShallowSizeOf for $ty
where
T: Eq + Hash,
S: BuildHasher,
{
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
if ops.has_malloc_enclosing_size_of() {
self.iter().next().map_or(0, |t| unsafe {
ops.malloc_enclosing_size_of(t)
})
} else {
self.capacity() * (size_of::<T>() + size_of::<usize>())
}
}
}
impl<T, S> MallocSizeOf for $ty
where
T: Eq + Hash + MallocSizeOf,
S: BuildHasher,
{
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let mut n = self.shallow_size_of(ops);
for t in self.iter() {
n += t.size_of(ops);
}
n
}
}
};
}
malloc_size_of_hash_set!(std::collections::HashSet<T, S>);
macro_rules! malloc_size_of_hash_map {
($ty:ty) => {
impl<K, V, S> MallocShallowSizeOf for $ty
where
K: Eq + Hash,
S: BuildHasher,
{
fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
if ops.has_malloc_enclosing_size_of() {
self.values().next().map_or(0, |v| unsafe {
ops.malloc_enclosing_size_of(v)
})
} else {
self.capacity()
* (size_of::<V>() + size_of::<K>() + size_of::<usize>())
}
}
}
impl<K, V, S> MallocSizeOf for $ty
where
K: Eq + Hash + MallocSizeOf,
V: MallocSizeOf,
S: BuildHasher,
{
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let mut n = self.shallow_size_of(ops);
for (k, v) in self.iter() {
n += k.size_of(ops);
n += v.size_of(ops);
}
n
}
}
};
}
malloc_size_of_hash_map!(std::collections::HashMap<K, V, S>);
malloc_size_of_hash_map!(FastHashMap<K, V, S>);
impl<T> MallocSizeOf for std::marker::PhantomData<T> {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize { 0 }
}
impl<T: MallocSizeOf> MallocSizeOf for std::sync::Mutex<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.lock().unwrap().size_of(ops)
}
}
impl<T: MallocSizeOf> MallocSizeOf for parking_lot::Mutex<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.lock().size_of(ops)
}
}
impl<T: MallocSizeOf> MallocSizeOf for std::sync::RwLock<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.read().unwrap().size_of(ops)
}
}
impl<T: MallocSizeOf> MallocSizeOf for parking_lot::RwLock<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.read().size_of(ops)
}
}
impl<T: MallocSizeOf> MallocSizeOf for Reverse<T> {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
self.0.size_of(ops)
}
}
macro_rules! impl_smallvec {
($size:expr) => {
impl<T> MallocSizeOf for smallvec::SmallVec<[T; $size]>
where T: MallocSizeOf
{
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
let mut n = if self.spilled() {
self.capacity() * core::mem::size_of::<T>()
} else {
0
};
for elem in self.iter() {
n += elem.size_of(ops);
}
n
}
}
};
}
impl_smallvec!(32); #[macro_export]
macro_rules! malloc_size_of_is_0(
($($ty:ty),+) => (
$(
impl $crate::MallocSizeOf for $ty {
#[inline(always)]
fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
0
}
}
)+
);
($($ty:ident<$($gen:ident),+>),+) => (
$(
impl<$($gen: $crate::MallocSizeOf),+> $crate::MallocSizeOf for $ty<$($gen),+> {
#[inline(always)]
fn size_of(&self, _: &mut $crate::MallocSizeOfOps) -> usize {
0
}
}
)+
);
);
malloc_size_of_is_0!(bool, char, str);
malloc_size_of_is_0!(u8, u16, u32, u64, u128, usize);
malloc_size_of_is_0!(i8, i16, i32, i64, i128, isize);
malloc_size_of_is_0!(f32, f64);
malloc_size_of_is_0!(std::sync::atomic::AtomicBool);
malloc_size_of_is_0!(std::sync::atomic::AtomicIsize);
malloc_size_of_is_0!(std::sync::atomic::AtomicUsize);
malloc_size_of_is_0!(std::num::NonZeroUsize);
malloc_size_of_is_0!(std::num::NonZeroU32);
malloc_size_of_is_0!(std::time::Duration);
malloc_size_of_is_0!(std::time::Instant);
malloc_size_of_is_0!(std::time::SystemTime);
malloc_size_of_is_0!(
Range<u8>,
Range<u16>,
Range<u32>,
Range<u64>,
Range<usize>
);
malloc_size_of_is_0!(
Range<i8>,
Range<i16>,
Range<i32>,
Range<i64>,
Range<isize>
);
malloc_size_of_is_0!(Range<f32>, Range<f64>);
malloc_size_of_is_0!(
H256,
U256,
H512,
H160,
U512,
AddressWithSpace,
AllChainID,
Space
);
mod usable_size {
use super::*;
cfg_if! {
if #[cfg(target_os = "windows")] {
extern crate winapi;
use self::winapi::um::heapapi::{GetProcessHeap, HeapSize, HeapValidate};
pub unsafe extern "C" fn malloc_usable_size(mut ptr: *const c_void) -> usize {
let heap = GetProcessHeap();
if HeapValidate(heap, 0, ptr) == 0 {
ptr = *(ptr as *const *const c_void).offset(-1);
}
HeapSize(heap, 0, ptr) as usize
}
} else if #[cfg(feature = "jemalloc-global")] {
pub unsafe extern "C" fn malloc_usable_size(ptr: *const c_void) -> usize {
jemallocator::usable_size(ptr)
}
} else if #[cfg(target_os = "linux")] {
extern "C" {
pub fn malloc_usable_size(ptr: *const c_void) -> usize;
}
} else if #[cfg(target_os = "macos")] {
extern "C" {
#[link_name = "malloc_size"]
pub fn malloc_usable_size(ptr: *const c_void) -> usize;
}
} else {
pub unsafe extern "C" fn malloc_usable_size(_ptr: *const c_void) -> usize {
unreachable!("estimate heapsize or feature allocator needed")
}
}
}
#[inline]
pub fn new_enclosing_size_fn() -> Option<VoidPtrToSizeFn> { None }
}
pub fn new_malloc_size_ops() -> MallocSizeOfOps {
MallocSizeOfOps::new(
usable_size::malloc_usable_size,
usable_size::new_enclosing_size_fn(),
)
}