cfx_storage/impls/delta_mpt/
mem_optimized_trie_node.rs1#[derive(Default)]
9pub struct MemOptimizedTrieNode<CacheAlgoDataT: CacheAlgoDataTrait> {
10 slab_next_vacant_index: u32,
18
19 path_mask: u8,
40 path_size: u16,
43 path: MaybeInPlaceByteArray,
44 path_memory_manager: FieldsOffsetMaybeInPlaceByteArrayMemoryManager<
45 u16,
46 TrivialSizeFieldConverterU16,
47 MemOptimizedTrieNodePathMemoryManager<CacheAlgoDataT>,
48 MemOptimizedTrieNodePathMemoryManager<CacheAlgoDataT>,
49 >,
50
51 pub(super) children_table: ChildrenTableDeltaMpt,
56 value_size: u32,
61 value: MaybeInPlaceByteArray,
62 value_memory_manager: FieldsOffsetMaybeInPlaceByteArrayMemoryManager<
63 u32,
64 ValueSizeFieldConverter,
65 MemOptimizedTrieNodeValueMemoryManager<CacheAlgoDataT>,
66 MemOptimizedTrieNodeValueMemoryManager<CacheAlgoDataT>,
67 >,
68
69 merkle_hash: MerkleHash,
73
74 pub(in super::super) cache_algo_data: CacheAlgoDataT,
75}
76
77impl<CacheAlgoDataT: CacheAlgoDataTrait> MallocSizeOf
78 for MemOptimizedTrieNode<CacheAlgoDataT>
79{
80 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
81 let path_malloc_size = if self.path_memory_manager.get_size()
82 > MaybeInPlaceByteArray::MAX_INPLACE_SIZE
83 {
84 unsafe { ops.malloc_size_of(self.path.ptr) }
85 } else {
86 0
87 };
88 let value_malloc_size = if self.value_memory_manager.get_size()
89 > MaybeInPlaceByteArray::MAX_INPLACE_SIZE
90 {
91 unsafe { ops.malloc_size_of(self.value.ptr) }
92 } else {
93 0
94 };
95 self.children_table.size_of(ops)
96 + self.cache_algo_data.size_of(ops)
97 + value_malloc_size
98 + path_malloc_size
99 }
100}
101
102pub enum TrieNodeAction {
104 Modify,
105 Delete,
106 MergePath {
107 child_index: u8,
108 child_node_ref: NodeRefDeltaMpt,
109 },
110}
111
112#[cfg(test)]
113use super::node_memory_manager::TrieNodeDeltaMpt;
114#[test]
115fn test_mem_optimized_trie_node_size() {
116 assert_eq!(std::mem::size_of::<TrieNodeDeltaMpt>(), 80);
117 assert_ne!(std::mem::size_of::<Entry<TrieNodeDeltaMpt>>(), 80)
119}
120
121make_parallel_field_maybe_in_place_byte_array_memory_manager!(
122 MemOptimizedTrieNodePathMemoryManager<CacheAlgoDataT> where <CacheAlgoDataT: CacheAlgoDataTrait>,
123 MemOptimizedTrieNode<CacheAlgoDataT>,
124 path_memory_manager,
125 path,
126 path_size: u16,
127 TrivialSizeFieldConverterU16,
128);
129
130make_parallel_field_maybe_in_place_byte_array_memory_manager!(
131 MemOptimizedTrieNodeValueMemoryManager<CacheAlgoDataT> where <CacheAlgoDataT: CacheAlgoDataTrait>,
132 MemOptimizedTrieNode<CacheAlgoDataT>,
133 value_memory_manager,
134 value,
135 value_size: u32,
136 ValueSizeFieldConverter,
137);
138
139impl<CacheAlgoDataT: CacheAlgoDataTrait> Clone
140 for MemOptimizedTrieNode<CacheAlgoDataT>
141{
142 fn clone(&self) -> Self {
143 Self::new(
144 self.merkle_hash.clone(),
145 self.children_table.clone(),
146 self.value_clone().into_option(),
147 self.compressed_path_ref().into(),
148 )
149 }
150}
151
152unsafe impl<CacheAlgoDataT: CacheAlgoDataTrait> Send
156 for MemOptimizedTrieNode<CacheAlgoDataT>
157{
158}
159unsafe impl<CacheAlgoDataT: CacheAlgoDataTrait> Sync
163 for MemOptimizedTrieNode<CacheAlgoDataT>
164{
165}
166
167#[derive(Default, Debug)]
168struct ValueSizeFieldConverter {}
169
170impl ValueSizeFieldConverter {
171 const MAX_VALUE_SIZE: usize = 0xfffffffe;
172 const VALUE_TOMBSTONE: u32 = 0xffffffff;
179}
180
181impl SizeFieldConverterTrait<u32> for ValueSizeFieldConverter {
182 fn max_size() -> usize { Self::MAX_VALUE_SIZE }
183
184 fn is_size_over_limit(size: usize) -> bool { size > Self::MAX_VALUE_SIZE }
185
186 fn get(size_field: &u32) -> usize {
187 if *size_field == Self::VALUE_TOMBSTONE {
188 0
189 } else {
190 (*size_field) as usize
191 }
192 }
193
194 fn set(size_field: &mut u32, size: usize) { *size_field = size as u32; }
195}
196
197impl<CacheAlgoDataT: CacheAlgoDataTrait> MemOptimizedTrieNode<CacheAlgoDataT> {
198 pub fn get_compressed_path_size(&self) -> u16 { self.path_size }
199
200 pub fn copy_compressed_path<CompressedPath: CompressedPathTrait>(
201 &mut self, new_path: CompressedPath,
202 ) {
203 unsafe {
205 self.path_memory_manager.drop_value();
206 }
207 self.path_size = new_path.path_size();
208 self.path_mask = new_path.path_mask();
209 let path_slice = new_path.path_slice();
210 self.path =
211 MaybeInPlaceByteArray::copy_from(path_slice, path_slice.len());
212 }
213
214 pub fn value_clone(&self) -> MptValue<Box<[u8]>> {
215 let size = self.value_size;
216 if size == 0 {
217 MptValue::None
218 } else if size == ValueSizeFieldConverter::VALUE_TOMBSTONE {
219 MptValue::TombStone
220 } else {
221 MptValue::Some(self.value.get_slice(size as usize).into())
222 }
223 }
224
225 fn value_into_boxed_slice(&mut self) -> MptValue<Box<[u8]>> {
229 let size = self.value_size;
230 let maybe_value;
231 if size == 0 {
232 maybe_value = MptValue::None;
233 } else {
234 if size == ValueSizeFieldConverter::VALUE_TOMBSTONE {
235 maybe_value = MptValue::TombStone
236 } else {
237 maybe_value =
238 MptValue::Some(self.value.into_boxed_slice(size as usize));
239 }
240 self.value_size = 0;
241 }
242 maybe_value
243 }
244
245 pub fn check_value_size(value: &[u8]) -> Result<()> {
246 let value_size = value.len();
247 if ValueSizeFieldConverter::is_size_over_limit(value_size) {
248 return Err(Error::MPTInvalidValueLength {
249 length: value_size,
250 length_limit: ValueSizeFieldConverter::max_size(),
251 });
252 }
253 Ok(())
257 }
258
259 pub fn check_key_size(access_key: &[u8]) -> Result<()> {
260 let key_size = access_key.len();
261 if TrivialSizeFieldConverterU16::is_size_over_limit(key_size) {
262 return Err(Error::MPTInvalidKeyLength {
263 length: key_size,
264 length_limit: TrivialSizeFieldConverterU16::max_size(),
265 });
266 }
267 if key_size == 0 {
268 return Err(Error::MPTInvalidKeyLength {
269 length: key_size,
270 length_limit: TrivialSizeFieldConverterU16::max_size(),
271 });
272 }
273
274 Ok(())
275 }
276}
277
278impl<CacheAlgoDataT: CacheAlgoDataTrait> TrieNodeTrait
279 for MemOptimizedTrieNode<CacheAlgoDataT>
280{
281 type ChildrenTableType = ChildrenTableDeltaMpt;
282 type NodeRefType = NodeRefDeltaMptCompact;
283
284 fn compressed_path_ref(&self) -> CompressedPathRef<'_> {
285 let size = self.path_size;
286 CompressedPathRef::new(
287 self.path.get_slice(size as usize),
288 self.path_mask,
289 )
290 }
291
292 fn has_value(&self) -> bool { self.value_size > 0 }
293
294 fn get_children_count(&self) -> u8 {
295 self.children_table.get_children_count()
296 }
297
298 fn value_as_slice(&self) -> MptValue<&[u8]> {
299 let size = self.value_size;
300 if size == 0 {
301 MptValue::None
302 } else if size == ValueSizeFieldConverter::VALUE_TOMBSTONE {
303 MptValue::TombStone
304 } else {
305 MptValue::Some(self.value.get_slice(size as usize))
306 }
307 }
308
309 fn set_compressed_path(&mut self, mut path: CompressedPathRaw) {
310 self.path_mask = path.path_mask();
311
312 path.byte_array_memory_manager
313 .move_to(&mut self.path_memory_manager);
314 }
315
316 unsafe fn add_new_child_unchecked<T>(&mut self, child_index: u8, child: T)
317 where ChildrenTableItem<NodeRefDeltaMptCompact>:
318 WrappedCreateFrom<T, NodeRefDeltaMptCompact> {
319 self.children_table = CompactedChildrenTable::insert_child_unchecked(
320 self.children_table.to_ref(),
321 child_index,
322 ChildrenTableItem::<NodeRefDeltaMptCompact>::take(child),
323 );
324 }
325
326 unsafe fn get_child_mut_unchecked(
327 &mut self, child_index: u8,
328 ) -> &mut Self::NodeRefType {
329 self.children_table.get_child_mut_unchecked(child_index)
330 }
331
332 unsafe fn replace_child_unchecked<T>(&mut self, child_index: u8, child: T)
333 where ChildrenTableItem<NodeRefDeltaMptCompact>:
334 WrappedCreateFrom<T, NodeRefDeltaMptCompact> {
335 self.children_table.set_child_unchecked(
336 child_index,
337 ChildrenTableItem::<NodeRefDeltaMptCompact>::take(child),
338 );
339 }
340
341 unsafe fn delete_child_unchecked(&mut self, child_index: u8) {
342 self.children_table = CompactedChildrenTable::delete_child_unchecked(
343 self.children_table.to_ref(),
344 child_index,
345 );
346 }
347
348 unsafe fn delete_value_unchecked(&mut self) -> Box<[u8]> {
349 self.value_into_boxed_slice().unwrap()
350 }
351
352 fn replace_value_valid(
353 &mut self, valid_value: Box<[u8]>,
354 ) -> MptValue<Box<[u8]>> {
355 let old_value = self.value_into_boxed_slice();
356 let value_size = valid_value.len();
357 if value_size == 0 {
358 self.value_size = ValueSizeFieldConverter::VALUE_TOMBSTONE;
359 } else {
360 self.value = MaybeInPlaceByteArray::new(valid_value, value_size);
361 self.value_size = value_size as u32;
362 }
363
364 old_value
365 }
366
367 fn get_children_table_ref(&self) -> &Self::ChildrenTableType {
368 &self.children_table
369 }
370}
371
372impl<'node, CacheAlgoDataT: CacheAlgoDataTrait> GetChildTrait<'node>
373 for MemOptimizedTrieNode<CacheAlgoDataT>
374{
375 type ChildIdType = NodeRefDeltaMptCompact;
376
377 fn get_child(
378 &'node self, child_index: u8,
379 ) -> Option<NodeRefDeltaMptCompact> {
380 self.children_table.get_child(child_index)
381 }
382}
383
384impl<'node, CacheAlgoDataT: CacheAlgoDataTrait> TrieNodeWalkTrait<'node>
385 for MemOptimizedTrieNode<CacheAlgoDataT>
386{
387}
388
389impl<CacheAlgoDataT: CacheAlgoDataTrait> MemOptimizedTrieNode<CacheAlgoDataT> {
392 pub fn new(
393 merkle: MerkleHash, children_table: ChildrenTableDeltaMpt,
394 maybe_value: Option<Box<[u8]>>, compressed_path: CompressedPathRaw,
395 ) -> MemOptimizedTrieNode<CacheAlgoDataT> {
396 let mut ret = MemOptimizedTrieNode::default();
397
398 ret.merkle_hash = merkle;
399 ret.children_table = children_table;
400 match maybe_value {
401 None => {}
402 Some(value) => {
403 ret.replace_value_valid(value);
404 }
405 }
406 ret.set_compressed_path(compressed_path);
407
408 ret
409 }
410
411 pub unsafe fn copy_and_replace_fields(
419 &self, new_value: Option<Option<Box<[u8]>>>,
420 new_path: Option<CompressedPathRaw>,
421 children_table: Option<ChildrenTableDeltaMpt>,
422 ) -> MemOptimizedTrieNode<CacheAlgoDataT> {
423 let mut ret = MemOptimizedTrieNode::default();
424
425 match new_value {
426 Some(maybe_value) => match maybe_value {
427 Some(value) => {
428 ret.replace_value_valid(value);
429 }
430 None => {}
431 },
432 None => {
433 let byte_size = self.value_memory_manager.get_size();
434 ret.value_size = self.value_size;
435 ret.value = MaybeInPlaceByteArray::copy_from(
436 self.value.get_slice(byte_size),
437 byte_size,
438 );
439 }
440 }
441
442 match new_path {
443 Some(path) => ret.set_compressed_path(path),
444 None => ret.copy_compressed_path(self.compressed_path_ref()),
445 }
446
447 match children_table {
448 Some(table) => ret.children_table = table,
449 None => ret.children_table = self.children_table.clone(),
450 }
451
452 ret
453 }
454
455 pub fn check_delete_value(&self) -> Result<TrieNodeAction> {
457 if self.has_value() {
458 Ok(match self.get_children_count() {
459 0 => TrieNodeAction::Delete,
460 1 => self.merge_path_action(),
461 _ => TrieNodeAction::Modify,
462 })
463 } else {
464 Err(Error::MPTKeyNotFound.into())
465 }
466 }
467
468 fn merge_path_action(&self) -> TrieNodeAction {
469 let (i, node_ref) = self
470 .children_table
471 .iter()
472 .next()
473 .expect("Only called when children_count == 1");
474 TrieNodeAction::MergePath {
475 child_index: i,
476 child_node_ref: (*node_ref).into(),
477 }
478 }
479
480 fn merge_path_action_after_child_deletion(
481 &self, child_index: u8,
482 ) -> TrieNodeAction {
483 for (i, node_ref) in self.children_table.iter() {
484 if i != child_index {
485 return TrieNodeAction::MergePath {
486 child_index: i,
487 child_node_ref: (*node_ref).into(),
488 };
489 }
490 }
491 unreachable!()
492 }
493
494 pub unsafe fn set_first_child_unchecked(
495 &mut self, child_index: u8, child: NodeRefDeltaMptCompact,
496 ) {
497 self.children_table =
498 ChildrenTableDeltaMpt::new_from_one_child(child_index, child);
499 }
500
501 pub fn check_replace_or_delete_child_action(
503 &self, child_index: u8, new_child_node: Option<NodeRefDeltaMptCompact>,
504 ) -> TrieNodeAction {
505 if new_child_node.is_none() {
506 match self.get_children_count() {
507 2 => {
508 return self
509 .merge_path_action_after_child_deletion(child_index);
510 }
511 _ => {}
512 }
513 }
514 return TrieNodeAction::Modify;
515 }
516
517 pub fn get_merkle(&self) -> &MerkleHash { &self.merkle_hash }
518
519 pub fn set_merkle(&mut self, merkle: &MerkleHash) {
520 self.merkle_hash = merkle.clone();
521 }
522}
523
524impl<CacheAlgoDataT: CacheAlgoDataTrait> EntryTrait
525 for MemOptimizedTrieNode<CacheAlgoDataT>
526{
527 type EntryType = MemOptimizedTrieNode<CacheAlgoDataT>;
528
529 fn from_value(value: Self) -> Self { value }
530
531 fn from_vacant_index(next: usize) -> Self {
532 Self {
533 slab_next_vacant_index: next as u32,
534 children_table: Default::default(),
535 merkle_hash: Default::default(),
536 path_mask: CompressedPathRaw::NO_MISSING_NIBBLE,
537 path_size: 0,
538 path: Default::default(),
539 path_memory_manager: Default::default(),
540 value_size: 0,
541 value: Default::default(),
542 value_memory_manager: Default::default(),
543 cache_algo_data: Default::default(),
544 }
545 }
546
547 fn is_vacant(&self) -> bool {
548 self.slab_next_vacant_index as CompactNodeRef
550 != MaybeNodeRefDeltaMptCompact::NULL
551 }
552
553 fn take_occupied_and_replace_with_vacant_index(
554 &mut self, next: usize,
555 ) -> MemOptimizedTrieNode<CacheAlgoDataT> {
556 std::mem::replace(self, Self::from_vacant_index(next))
557 }
558
559 fn get_next_vacant_index(&self) -> usize {
560 self.slab_next_vacant_index as usize
561 }
562
563 fn get_occupied_ref(&self) -> &MemOptimizedTrieNode<CacheAlgoDataT> { self }
564
565 fn get_occupied_mut(
566 &mut self,
567 ) -> &mut MemOptimizedTrieNode<CacheAlgoDataT> {
568 self
569 }
570}
571
572impl<CacheAlgoDataT: CacheAlgoDataTrait> Encodable
573 for MemOptimizedTrieNode<CacheAlgoDataT>
574{
575 fn rlp_append(&self, s: &mut RlpStream) {
576 s.begin_unbounded_list()
577 .append(self.get_merkle())
578 .append(&self.get_children_table_ref().to_ref())
579 .append(&self.value_as_slice().into_option());
580
581 let compressed_path_ref = self.compressed_path_ref();
582 if compressed_path_ref.path_size() > 0 {
583 s.append(&compressed_path_ref);
584 }
585
586 s.finalize_unbounded_list();
587 }
588}
589
590impl<CacheAlgoDataT: CacheAlgoDataTrait> Decodable
591 for MemOptimizedTrieNode<CacheAlgoDataT>
592{
593 fn decode(rlp: &Rlp) -> ::std::result::Result<Self, DecoderError> {
594 let compressed_path = if rlp.item_count()? != 4 {
595 CompressedPathRaw::new(&[], 0)
596 } else {
597 rlp.val_at(3)?
598 };
599
600 Ok(MemOptimizedTrieNode::new(
601 MerkleHash::from_slice(rlp.val_at::<Vec<u8>>(0)?.as_slice()),
602 rlp.val_at::<ChildrenTableManagedDeltaMpt>(1)?.into(),
603 rlp.val_at::<Option<Vec<u8>>>(2)?
604 .map(|v| v.into_boxed_slice()),
605 compressed_path,
606 ))
607 }
608}
609
610impl<CacheAlgoDataT: CacheAlgoDataTrait> PartialEq
611 for MemOptimizedTrieNode<CacheAlgoDataT>
612{
613 fn eq(&self, other: &Self) -> bool {
614 self.value_as_slice() == other.value_as_slice()
615 && self.children_table == other.children_table
616 && self.merkle_hash == other.merkle_hash
617 && self.compressed_path_ref() == other.compressed_path_ref()
618 }
619}
620
621impl<CacheAlgoDataT: CacheAlgoDataTrait> Debug
622 for MemOptimizedTrieNode<CacheAlgoDataT>
623{
624 fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
625 write!(f,
626 "TrieNode{{ merkle: {:?}, value: {:?}, children_table: {:?}, compressed_path: {:?} }}",
627 self.merkle_hash, self.value_as_slice(),
628 &self.children_table, self.compressed_path_ref())
629 }
630}
631
632use super::{
633 super::{
634 super::utils::WrappedCreateFrom,
635 errors::*,
636 merkle_patricia_trie::{maybe_in_place_byte_array::*, walk::*},
637 },
638 cache::algorithm::CacheAlgoDataTrait,
639 node_ref::*,
640 slab::*,
641 *,
642};
643use malloc_size_of::{MallocSizeOf, MallocSizeOfOps};
644use primitives::{MerkleHash, MptValue};
645use rlp::*;
646use std::{
647 fmt::{Debug, Formatter},
648 marker::{Send, Sync},
649 vec::Vec,
650};