cfx_storage/impls/merkle_patricia_trie/
compressed_path.rs1pub trait CompressedPathTrait: Debug {
6 fn path_slice(&self) -> &[u8];
7 fn path_mask(&self) -> u8;
8
9 fn path_size(&self) -> u16 { self.path_slice().len() as u16 }
10
11 fn path_steps(&self) -> u16 {
12 CompressedPathRaw::calculate_path_steps(
13 self.path_size(),
14 self.path_mask(),
15 )
16 }
17
18 fn as_ref(&self) -> CompressedPathRef<'_> {
19 CompressedPathRef {
20 path_slice: self.path_slice(),
21 path_mask: self.path_mask(),
22 }
23 }
24
25 fn rlp_append(&self, s: &mut RlpStream) {
26 s.begin_list(2);
27 s.append(&self.path_mask()).append(&self.path_slice());
28 }
29}
30
31impl CompressedPathTrait for [u8] {
32 fn path_slice(&self) -> &[u8] { self }
33
34 fn path_mask(&self) -> u8 { CompressedPathRaw::NO_MISSING_NIBBLE }
35}
36
37impl<'a> CompressedPathTrait for &'a [u8] {
38 fn path_slice(&self) -> &[u8] { self }
39
40 fn path_mask(&self) -> u8 { CompressedPathRaw::NO_MISSING_NIBBLE }
41}
42
43#[derive(Clone, Debug, Default, PartialEq, Eq, Hash)]
44pub struct CompressedPathRef<'a> {
45 pub path_slice: &'a [u8],
46 path_mask: u8,
47}
48
49impl<'a> CompressedPathRef<'a> {
50 pub fn new(path_slice: &'a [u8], path_mask: u8) -> Self {
51 Self {
52 path_slice,
53 path_mask,
54 }
55 }
56}
57
58#[derive(Default)]
59pub struct CompressedPathRaw {
60 path_size: u16,
61 path: MaybeInPlaceByteArray,
62 path_mask: u8,
63 pub byte_array_memory_manager:
64 FieldsOffsetMaybeInPlaceByteArrayMemoryManager<
65 u16,
66 TrivialSizeFieldConverterU16,
67 CompressedPathRawByteArrayMemoryManager,
68 CompressedPathRawByteArrayMemoryManager,
69 >,
70}
71
72unsafe impl Send for CompressedPathRaw {}
74unsafe impl Sync for CompressedPathRaw {}
75
76#[cfg(test)]
77mod tests {
78 use super::{super::maybe_in_place_byte_array::*, *};
79
80 #[test]
81 fn test_compressed_path_raw_memory_manager_size() {
82 assert_eq!(
83 std::mem::size_of::<
84 FieldsOffsetMaybeInPlaceByteArrayMemoryManager<
85 u16,
86 TrivialSizeFieldConverterU16,
87 CompressedPathRawByteArrayMemoryManager,
88 CompressedPathRawByteArrayMemoryManager,
89 >,
90 >(),
91 0
92 );
93 }
94}
95
96make_parallel_field_maybe_in_place_byte_array_memory_manager!(
97 CompressedPathRawByteArrayMemoryManager,
98 CompressedPathRaw,
99 byte_array_memory_manager,
100 path,
101 path_size: u16,
102 TrivialSizeFieldConverterU16,
103);
104
105impl CompressedPathRaw {
106 const BITS_0_3_MASK: u8 = 0x0f;
107 const BITS_4_7_MASK: u8 = 0xf0;
108 pub const NO_MISSING_NIBBLE: u8 = 0;
109}
110
111impl<'a> CompressedPathTrait for CompressedPathRef<'a> {
112 fn path_slice(&self) -> &[u8] { self.path_slice }
113
114 fn path_mask(&self) -> u8 { self.path_mask }
115
116 fn path_size(&self) -> u16 { self.path_slice.len() as u16 }
117}
118
119impl CompressedPathTrait for CompressedPathRaw {
120 fn path_slice(&self) -> &[u8] {
121 self.path.get_slice(self.path_size as usize)
122 }
123
124 fn path_mask(&self) -> u8 { self.path_mask }
125}
126
127impl<'a> From<&'a [u8]> for CompressedPathRaw {
128 fn from(x: &'a [u8]) -> Self {
129 CompressedPathRaw::new(x, Self::NO_MISSING_NIBBLE)
130 }
131}
132
133impl<'a> From<CompressedPathRef<'a>> for CompressedPathRaw {
134 fn from(x: CompressedPathRef<'a>) -> Self {
135 CompressedPathRaw::new(x.path_slice, x.path_mask)
136 }
137}
138
139impl CompressedPathRaw {
140 pub fn new(path_slice: &[u8], path_mask: u8) -> Self {
143 let path_size = path_slice.len();
144
145 Self {
146 path_size: path_size as u16,
147 path: MaybeInPlaceByteArray::copy_from(path_slice, path_size),
148 path_mask,
149 byte_array_memory_manager: Default::default(),
150 }
151 }
152
153 #[inline]
154 fn last_byte_mut(&mut self) -> &mut u8 {
155 unsafe {
157 self.path
158 .get_slice_mut(self.path_size as usize)
159 .get_unchecked_mut(self.path_size as usize - 1)
160 }
161 }
162
163 pub fn new_and_apply_mask(path_slice: &[u8], path_mask: u8) -> Self {
164 let path_size = path_slice.len();
165 let mut ret = Self {
166 path_size: path_size as u16,
167 path: MaybeInPlaceByteArray::copy_from(path_slice, path_size),
168 path_mask,
169 byte_array_memory_manager: Default::default(),
170 };
171 if path_size > 0 {
172 *ret.last_byte_mut() &= !Self::first_nibble(path_mask);
177 }
178
179 ret
180 }
181
182 pub fn new_zeroed(path_size: u16, path_mask: u8) -> Self {
183 Self {
184 path_size,
185 path: MaybeInPlaceByteArray::new_zeroed(path_size as usize),
186 path_mask,
187 byte_array_memory_manager: Default::default(),
188 }
189 }
190
191 #[inline]
192 pub const fn first_nibble_mask() -> u8 { Self::BITS_0_3_MASK }
193
194 #[inline]
195 pub const fn second_nibble_mask() -> u8 { Self::BITS_4_7_MASK }
196
197 #[inline]
198 fn calculate_path_steps(path_size: u16, path_mask: u8) -> u16 {
199 path_size * 2
200 - (Self::clear_second_nibble(path_mask) != 0) as u16
201 - (Self::second_nibble(path_mask) != 0) as u16
202 }
203
204 #[inline]
205 pub fn from_first_nibble(x: u8) -> u8 { x << 4 }
206
207 #[inline]
208 pub fn first_nibble(x: u8) -> u8 { x >> 4 }
209
210 #[inline]
211 pub fn clear_second_nibble(x: u8) -> u8 { x & Self::BITS_4_7_MASK }
212
213 #[inline]
214 pub fn second_nibble(x: u8) -> u8 { x & Self::BITS_0_3_MASK }
215
216 #[inline]
217 pub fn set_second_nibble(x: u8, second_nibble: u8) -> u8 {
218 Self::clear_second_nibble(x) | second_nibble
219 }
220
221 #[inline]
222 pub fn has_second_nibble(path_mask: u8) -> bool {
223 Self::clear_second_nibble(path_mask)
224 == CompressedPathRaw::NO_MISSING_NIBBLE
225 }
226
227 #[inline]
228 pub fn no_second_nibble(path_mask: u8) -> bool {
229 Self::clear_second_nibble(path_mask)
230 != CompressedPathRaw::NO_MISSING_NIBBLE
231 }
232
233 pub fn extend_path<X: CompressedPathTrait>(x: &X, child_index: u8) -> Self {
234 let new_size;
235 let path_mask;
236 let x_path_mask = x.path_mask();
238 if Self::has_second_nibble(x_path_mask) {
239 new_size = x.path_size() + 1;
240 path_mask = x_path_mask | Self::second_nibble_mask();
241 } else {
242 new_size = x.path_size();
243 path_mask = Self::second_nibble(x_path_mask);
244 }
245 let mut ret = Self::new_zeroed(new_size, path_mask);
246 ret.path.get_slice_mut(new_size as usize)[0..x.path_size() as usize]
247 .copy_from_slice(x.path_slice());
248 if Self::has_second_nibble(x_path_mask) {
250 *ret.last_byte_mut() = Self::from_first_nibble(child_index);
251 } else {
252 let last_byte = *ret.last_byte_mut();
253 *ret.last_byte_mut() =
254 Self::set_second_nibble(last_byte, child_index);
255 }
256
257 ret
258 }
259
260 pub fn join_connected_paths<
263 X: CompressedPathTrait,
264 Y: CompressedPathTrait,
265 >(
266 x: &X, child_index: u8, y: &Y,
267 ) -> Self {
268 let x_slice = x.path_slice();
269 let x_slice_len = x_slice.len();
270 let x_path_mask = x.path_mask();
271 let y_slice = y.path_slice();
272
273 let size = x_slice_len + y_slice.len();
277
278 let mut path;
279 {
280 let slice;
281 #[allow(clippy::uninit_vec)]
283 unsafe {
284 if size > MaybeInPlaceByteArray::MAX_INPLACE_SIZE {
285 let mut value = Vec::with_capacity(size);
287 value.set_len(size);
288 let mut value_box = value.into_boxed_slice();
289
290 let ptr = value_box.as_mut_ptr();
291 let _ = Box::into_raw(value_box);
294 path = MaybeInPlaceByteArray { ptr };
295 slice = std::slice::from_raw_parts_mut(ptr, size);
296 } else {
297 let in_place: [u8;
298 MaybeInPlaceByteArray::MAX_INPLACE_SIZE] =
299 Default::default();
300 path = MaybeInPlaceByteArray { in_place };
301 slice = &mut path.in_place[0..size];
302 }
303 }
304
305 if Self::has_second_nibble(x_path_mask) {
306 slice[0..x_slice_len].copy_from_slice(x_slice);
307 } else {
308 slice[0..x_slice_len - 1]
309 .copy_from_slice(&x_slice[0..x_slice_len - 1]);
310 slice[x_slice_len - 1] = CompressedPathRaw::set_second_nibble(
311 x_slice[x_slice_len - 1],
312 child_index,
313 );
314 }
315 slice[x_slice_len..].copy_from_slice(y_slice);
316 }
317
318 Self {
319 path_size: size as u16,
320 path,
321 path_mask: Self::set_second_nibble(
322 y.path_mask(),
323 CompressedPathRaw::second_nibble(x_path_mask),
324 ),
325 byte_array_memory_manager: Default::default(),
326 }
327 }
328}
329
330impl Clone for CompressedPathRaw {
331 fn clone(&self) -> Self {
332 Self {
333 path_mask: self.path_mask,
334 path_size: self.path_size,
335 path: MaybeInPlaceByteArray::clone(
336 &self.path,
337 self.path_size as usize,
338 ),
339 byte_array_memory_manager: Default::default(),
340 }
341 }
342}
343
344impl<'a> Encodable for CompressedPathRef<'a> {
345 fn rlp_append(&self, s: &mut RlpStream) {
346 CompressedPathTrait::rlp_append(self, s);
347 }
348}
349
350impl Encodable for CompressedPathRaw {
351 fn rlp_append(&self, s: &mut RlpStream) {
352 CompressedPathTrait::rlp_append(self, s);
353 }
354}
355
356impl Decodable for CompressedPathRaw {
357 fn decode(rlp: &Rlp) -> Result<Self, DecoderError> {
358 Ok(CompressedPathRaw::new(
359 rlp.val_at::<Vec<u8>>(1)?.as_slice(),
360 rlp.val_at(0)?,
361 ))
362 }
363}
364
365impl Serialize for CompressedPathRaw {
366 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
367 where S: Serializer {
368 let path_mask = self.path_mask();
369 let path_mask = format!("0x{:x}", path_mask);
370 let path_slice = self.path_slice();
371 let mut struc = serializer.serialize_struct("CompressedPathRaw", 2)?;
372 struc.serialize_field("pathMask", &path_mask)?;
373 struc.serialize_field(
374 "pathSlice",
375 &("0x".to_owned() + path_slice.to_hex::<String>().as_ref()),
376 )?;
377
378 struc.end()
379 }
380}
381
382impl<'a> Deserialize<'a> for CompressedPathRaw {
383 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
384 where D: Deserializer<'a> {
385 let (path_mask, path_slice) = deserializer.deserialize_struct(
386 "CompressedPathRaw",
387 FIELDS,
388 CompressedPathRawVisitor,
389 )?;
390
391 Ok(CompressedPathRaw::new(&path_slice[..], path_mask))
392 }
393}
394
395const FIELDS: &'static [&'static str] = &["pathMask", "pathSlice"];
396
397enum Field {
398 Mask,
399 Slice,
400}
401
402impl<'de> Deserialize<'de> for Field {
403 fn deserialize<D>(deserializer: D) -> Result<Field, D::Error>
404 where D: Deserializer<'de> {
405 struct FieldVisitor;
406
407 impl<'de> Visitor<'de> for FieldVisitor {
408 type Value = Field;
409
410 fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
411 formatter.write_str("`pathMask` or `pathSlice`")
412 }
413
414 fn visit_str<E>(self, value: &str) -> Result<Field, E>
415 where E: de::Error {
416 match value {
417 "pathMask" => Ok(Field::Mask),
418 "pathSlice" => Ok(Field::Slice),
419 _ => Err(de::Error::unknown_field(value, FIELDS)),
420 }
421 }
422 }
423
424 deserializer.deserialize_identifier(FieldVisitor)
425 }
426}
427
428struct CompressedPathRawVisitor;
429impl<'a> Visitor<'a> for CompressedPathRawVisitor {
430 type Value = (u8, Vec<u8>);
431
432 fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
433 write!(formatter, "Struct CompressedPath")
434 }
435
436 fn visit_map<V>(self, mut visitor: V) -> Result<Self::Value, V::Error>
437 where V: MapAccess<'a> {
438 let mut path_mask = None;
439 let mut path_slice = None;
440
441 while let Some(key) = visitor.next_key()? {
442 match key {
443 Field::Mask => {
444 if path_mask.is_some() {
445 return Err(de::Error::duplicate_field("pathMask"));
446 }
447
448 path_mask = Some(visitor.next_value()?);
449 }
450 Field::Slice => {
451 if path_slice.is_some() {
452 return Err(de::Error::duplicate_field("pathSlice"));
453 }
454
455 path_slice = Some(visitor.next_value()?);
456 }
457 }
458 }
459
460 let path_mask: String =
461 path_mask.ok_or_else(|| de::Error::missing_field("pathMask"))?;
462
463 let path_mask = if let Some(s) = path_mask.strip_prefix("0x") {
464 u8::from_str_radix(&s, 16).map_err(|e| {
465 de::Error::custom(format!("pathMask: invalid hex: {}", e))
466 })?
467 } else {
468 return Err(de::Error::custom(
469 "pathMask: invalid format. Expected a 0x-prefixed hex string",
470 ));
471 };
472
473 let path_slice: String =
474 path_slice.ok_or_else(|| de::Error::missing_field("pathSlice"))?;
475
476 let path_slice: Vec<u8> = if let (Some(s), true) =
477 (path_slice.strip_prefix("0x"), path_slice.len() & 1 == 0)
478 {
479 FromHex::from_hex(s).map_err(|e| {
480 de::Error::custom(format!("pathSlice: invalid hex: {}", e))
481 })?
482 } else {
483 return Err(de::Error::custom("pathSlice: invalid format. Expected a 0x-prefixed hex string with even length"));
484 };
485
486 Ok((path_mask, path_slice))
487 }
488}
489
490impl PartialEq<Self> for CompressedPathRaw {
491 fn eq(&self, other: &Self) -> bool { self.as_ref().eq(&other.as_ref()) }
492}
493
494impl Eq for CompressedPathRaw {}
495
496impl Hash for CompressedPathRaw {
497 fn hash<H: Hasher>(&self, state: &mut H) { self.as_ref().hash(state) }
498}
499
500impl Debug for CompressedPathRaw {
501 fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
502 self.as_ref().fmt(f)
503 }
504}
505
506impl CompressedPathRaw {
507 pub fn path_slice_mut(&mut self) -> &mut [u8] {
508 self.path.get_slice_mut(self.path_size as usize)
509 }
510}
511
512impl<'a> PartialEq<Self> for dyn CompressedPathTrait + 'a {
513 fn eq(&self, other: &(dyn CompressedPathTrait + 'a)) -> bool {
514 self.as_ref().eq(&other.as_ref())
515 }
516}
517
518impl<'a> Eq for dyn CompressedPathTrait + 'a {}
519
520impl<'a> Hash for dyn CompressedPathTrait + 'a {
521 fn hash<H: Hasher>(&self, state: &mut H) { self.as_ref().hash(state) }
522}
523
524impl<'a> Borrow<dyn CompressedPathTrait + 'a> for CompressedPathRaw {
525 fn borrow(&self) -> &(dyn CompressedPathTrait + 'a) { self }
526}
527
528use super::maybe_in_place_byte_array::*;
529use rlp::*;
530use rustc_hex::{FromHex, ToHex};
531use serde::{
532 de::{self, MapAccess, Visitor},
533 ser::SerializeStruct,
534 Deserialize, Deserializer, Serialize, Serializer,
535};
536
537use std::{
538 borrow::Borrow,
539 fmt::{self, Debug, Error, Formatter},
540 hash::{Hash, Hasher},
541 result::Result,
542};