/__w/smoldot/smoldot/repo/lib/src/trie/trie_node.rs
Line | Count | Source (jump to first uncovered line) |
1 | | // Smoldot |
2 | | // Copyright (C) 2019-2022 Parity Technologies (UK) Ltd. |
3 | | // SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 |
4 | | |
5 | | // This program is free software: you can redistribute it and/or modify |
6 | | // it under the terms of the GNU General Public License as published by |
7 | | // the Free Software Foundation, either version 3 of the License, or |
8 | | // (at your option) any later version. |
9 | | |
10 | | // This program is distributed in the hope that it will be useful, |
11 | | // but WITHOUT ANY WARRANTY; without even the implied warranty of |
12 | | // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
13 | | // GNU General Public License for more details. |
14 | | |
15 | | // You should have received a copy of the GNU General Public License |
16 | | // along with this program. If not, see <http://www.gnu.org/licenses/>. |
17 | | |
18 | | use super::{nibble, HashFunction}; |
19 | | use alloc::vec::Vec; |
20 | | use core::{cmp, fmt, iter, slice}; |
21 | | |
22 | | /// Encodes the components of a node value into the node value itself. |
23 | | /// |
24 | | /// This function returns an iterator of buffers. The actual node value is the concatenation of |
25 | | /// these buffers put together. |
26 | | /// |
27 | | /// > **Note**: The returned iterator might contain a reference to the storage value and children |
28 | | /// > values in the [`Decoded`]. By returning an iterator of buffers, we avoid copying |
29 | | /// > these storage value and children values. |
30 | | /// |
31 | | /// This encoding is independent of the trie version. |
32 | 896k | pub fn encode<'a>( |
33 | 896k | decoded: Decoded< |
34 | 896k | 'a, |
35 | 896k | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, |
36 | 896k | impl AsRef<[u8]> + Clone + 'a, |
37 | 896k | >, |
38 | 896k | ) -> Result<impl Iterator<Item = impl AsRef<[u8]> + 'a + Clone> + Clone + 'a, EncodeError> { |
39 | 896k | // The return value is composed of three parts: |
40 | 896k | // - Before the storage value. |
41 | 896k | // - The storage value (which can be empty). |
42 | 896k | // - The children nodes. |
43 | 896k | |
44 | 896k | // Contains the encoding before the storage value. |
45 | 896k | let mut before_storage_value: Vec<u8> = Vec::with_capacity(decoded.partial_key.len() / 2 + 32); |
46 | 896k | |
47 | 896k | let has_children = decoded.children.iter().any(Option::is_some); |
48 | | |
49 | | // We first push the node header. |
50 | | // See https://spec.polkadot.network/#defn-node-header |
51 | | { |
52 | 896k | let (first_byte_msb, pk_len_first_byte_bits): (u8, _) = |
53 | 896k | match (has_children, decoded.storage_value) { |
54 | 670k | (false, StorageValue::Unhashed(_)) => (0b01, 6), |
55 | 148k | (true, StorageValue::None) => (0b10, 6), |
56 | 77.3k | (true, StorageValue::Unhashed(_)) => (0b11, 6), |
57 | 100 | (false, StorageValue::Hashed(_)) => (0b001, 5), |
58 | 0 | (true, StorageValue::Hashed(_)) => (0b0001, 4), |
59 | | (false, StorageValue::None) => { |
60 | 4 | if decoded.partial_key.len() != 0 { |
61 | 1 | return Err(EncodeError::PartialKeyButNoChildrenNoStorageValue); |
62 | | } else { |
63 | 3 | (0, 6) |
64 | | } |
65 | | } |
66 | | }; |
67 | | |
68 | 896k | let max_representable_in_first_byte = (1 << pk_len_first_byte_bits) - 1; |
69 | 896k | let first_byte = (first_byte_msb << pk_len_first_byte_bits) |
70 | 896k | | u8::try_from(cmp::min( |
71 | 896k | decoded.partial_key.len(), |
72 | 896k | max_representable_in_first_byte, |
73 | 896k | )) |
74 | 896k | .unwrap(); |
75 | 896k | before_storage_value.push(first_byte); |
76 | 896k | |
77 | 896k | // Note that if the partial key length is exactly equal to `pk_len_first_byte_bits`, we |
78 | 896k | // need to push a `0` afterwards in order to avoid an ambiguity. Similarly, if |
79 | 896k | // `remain_pk_len` is at any point equal to 255, we must push an additional `0` |
80 | 896k | // afterwards. |
81 | 896k | let mut remain_pk_len = decoded |
82 | 896k | .partial_key |
83 | 896k | .len() |
84 | 896k | .checked_sub(max_representable_in_first_byte); |
85 | 897k | while let Some(pk_len_inner874 ) = remain_pk_len { |
86 | 874 | before_storage_value.push(u8::try_from(cmp::min(pk_len_inner, 255)).unwrap()); |
87 | 874 | remain_pk_len = pk_len_inner.checked_sub(255); |
88 | 874 | } |
89 | | } |
90 | | |
91 | | // We then push the partial key. |
92 | 896k | before_storage_value.extend(nibble::nibbles_to_bytes_prefix_extend( |
93 | 896k | decoded.partial_key.clone(), |
94 | 896k | )); |
95 | 896k | |
96 | 896k | // After the partial key, the node value optionally contains a bitfield of child nodes. |
97 | 896k | if has_children { |
98 | 226k | before_storage_value.extend_from_slice(&decoded.children_bitmap().to_le_bytes()); |
99 | 670k | } |
100 | | |
101 | | // Then, the storage value. |
102 | 896k | let storage_value = match decoded.storage_value { |
103 | 100 | StorageValue::Hashed(hash) => &hash[..], |
104 | 148k | StorageValue::None => &[][..], |
105 | 747k | StorageValue::Unhashed(storage_value) => { |
106 | 747k | before_storage_value.extend_from_slice( |
107 | 747k | crate::util::encode_scale_compact_usize(storage_value.len()).as_ref(), |
108 | 747k | ); |
109 | 747k | storage_value |
110 | | } |
111 | | }; |
112 | | |
113 | | // Finally, the children node values. |
114 | 896k | let children_nodes = decoded |
115 | 896k | .children |
116 | 896k | .into_iter() |
117 | 896k | .flatten() |
118 | 896k | .flat_map(|child_value| { |
119 | 810k | let size = crate::util::encode_scale_compact_usize(child_value.as_ref().len()); |
120 | 810k | [either::Left(size), either::Right(child_value)].into_iter() |
121 | 896k | }); Unexecuted instantiation: _RNCINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node6encodeINtNtB6_6nibble14BytesToNibblesINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterhEERShE0B8_ _RNCINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node6encodeINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB6_6nibble6NibbleENtB4_17MerkleValueOutputE0B8_ Line | Count | Source | 118 | 403k | .flat_map(|child_value| { | 119 | 403k | let size = crate::util::encode_scale_compact_usize(child_value.as_ref().len()); | 120 | 403k | [either::Left(size), either::Right(child_value)].into_iter() | 121 | 403k | }); |
Unexecuted instantiation: _RNCINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node6encodeINtNtNtNtCsaYZPK01V26L_4core4iter7sources5empty5EmptyNtNtB6_6nibble6NibbleERShE0B8_ _RNCINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node6encodeINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtBY_5slice4iter4IterNtNtB6_6nibble6NibbleEERNtB4_17MerkleValueOutputE0B8_ Line | Count | Source | 118 | 349k | .flat_map(|child_value| { | 119 | 349k | let size = crate::util::encode_scale_compact_usize(child_value.as_ref().len()); | 120 | 349k | [either::Left(size), either::Right(child_value)].into_iter() | 121 | 349k | }); |
_RNCINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node6encodeINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB6_6nibble6NibbleERShE0B8_ Line | Count | Source | 118 | 27.3k | .flat_map(|child_value| { | 119 | 27.3k | let size = crate::util::encode_scale_compact_usize(child_value.as_ref().len()); | 120 | 27.3k | [either::Left(size), either::Right(child_value)].into_iter() | 121 | 27.3k | }); |
_RNCINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node6encodeNtB4_17DecodedPartialKeyRShE0B8_ Line | Count | Source | 118 | 27.3k | .flat_map(|child_value| { | 119 | 27.3k | let size = crate::util::encode_scale_compact_usize(child_value.as_ref().len()); | 120 | 27.3k | [either::Left(size), either::Right(child_value)].into_iter() | 121 | 27.3k | }); |
Unexecuted instantiation: _RNCINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6encodeINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtBZ_5slice4iter4IterNtNtB6_6nibble6NibbleEERNtB4_17MerkleValueOutputE0CsDDUKWWCHAU_18smoldot_light_wasm Unexecuted instantiation: _RNCINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6encodeNtB4_17DecodedPartialKeyRShE0B8_ _RNCINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6encodeINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtBZ_5slice4iter4IterNtNtB6_6nibble6NibbleEERNtB4_17MerkleValueOutputE0B8_ Line | Count | Source | 118 | 987 | .flat_map(|child_value| { | 119 | 987 | let size = crate::util::encode_scale_compact_usize(child_value.as_ref().len()); | 120 | 987 | [either::Left(size), either::Right(child_value)].into_iter() | 121 | 987 | }); |
_RNCINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6encodeINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB6_6nibble6NibbleENtB4_17MerkleValueOutputE0CsiLzmwikkc22_14json_rpc_basic Line | Count | Source | 118 | 94 | .flat_map(|child_value| { | 119 | 94 | let size = crate::util::encode_scale_compact_usize(child_value.as_ref().len()); | 120 | 94 | [either::Left(size), either::Right(child_value)].into_iter() | 121 | 94 | }); |
Unexecuted instantiation: _RNCINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6encodeINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtBZ_5slice4iter4IterNtNtB6_6nibble6NibbleEERNtB4_17MerkleValueOutputE0CsiLzmwikkc22_14json_rpc_basic Unexecuted instantiation: _RNCINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6encodeINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB6_6nibble6NibbleENtB4_17MerkleValueOutputE0CscDgN54JpMGG_6author Unexecuted instantiation: _RNCINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6encodeINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtBZ_5slice4iter4IterNtNtB6_6nibble6NibbleEERNtB4_17MerkleValueOutputE0CscDgN54JpMGG_6author _RNCINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6encodeINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB6_6nibble6NibbleENtB4_17MerkleValueOutputE0CsibGXYHQB8Ea_25json_rpc_general_requests Line | Count | Source | 118 | 893 | .flat_map(|child_value| { | 119 | 893 | let size = crate::util::encode_scale_compact_usize(child_value.as_ref().len()); | 120 | 893 | [either::Left(size), either::Right(child_value)].into_iter() | 121 | 893 | }); |
Unexecuted instantiation: _RNCINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6encodeINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtBZ_5slice4iter4IterNtNtB6_6nibble6NibbleEERNtB4_17MerkleValueOutputE0CsibGXYHQB8Ea_25json_rpc_general_requests |
122 | 896k | |
123 | 896k | // The return value is the combination of these components. |
124 | 896k | Ok(iter::once(either::Left(before_storage_value)) |
125 | 896k | .chain(iter::once(either::Right(storage_value))) |
126 | 896k | .map(either::Left) |
127 | 896k | .chain(children_nodes.map(either::Right))) |
128 | 896k | } _RINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node6encodeINtNtB4_6nibble14BytesToNibblesINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterhEERShEB6_ Line | Count | Source | 32 | 1 | pub fn encode<'a>( | 33 | 1 | decoded: Decoded< | 34 | 1 | 'a, | 35 | 1 | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, | 36 | 1 | impl AsRef<[u8]> + Clone + 'a, | 37 | 1 | >, | 38 | 1 | ) -> Result<impl Iterator<Item = impl AsRef<[u8]> + 'a + Clone> + Clone + 'a, EncodeError> { | 39 | 1 | // The return value is composed of three parts: | 40 | 1 | // - Before the storage value. | 41 | 1 | // - The storage value (which can be empty). | 42 | 1 | // - The children nodes. | 43 | 1 | | 44 | 1 | // Contains the encoding before the storage value. | 45 | 1 | let mut before_storage_value: Vec<u8> = Vec::with_capacity(decoded.partial_key.len() / 2 + 32); | 46 | 1 | | 47 | 1 | let has_children = decoded.children.iter().any(Option::is_some); | 48 | | | 49 | | // We first push the node header. | 50 | | // See https://spec.polkadot.network/#defn-node-header | 51 | | { | 52 | 1 | let (first_byte_msb, pk_len_first_byte_bits): (u8, _) = | 53 | 1 | match (has_children, decoded.storage_value) { | 54 | 1 | (false, StorageValue::Unhashed(_)) => (0b01, 6), | 55 | 0 | (true, StorageValue::None) => (0b10, 6), | 56 | 0 | (true, StorageValue::Unhashed(_)) => (0b11, 6), | 57 | 0 | (false, StorageValue::Hashed(_)) => (0b001, 5), | 58 | 0 | (true, StorageValue::Hashed(_)) => (0b0001, 4), | 59 | | (false, StorageValue::None) => { | 60 | 0 | if decoded.partial_key.len() != 0 { | 61 | 0 | return Err(EncodeError::PartialKeyButNoChildrenNoStorageValue); | 62 | | } else { | 63 | 0 | (0, 6) | 64 | | } | 65 | | } | 66 | | }; | 67 | | | 68 | 1 | let max_representable_in_first_byte = (1 << pk_len_first_byte_bits) - 1; | 69 | 1 | let first_byte = (first_byte_msb << pk_len_first_byte_bits) | 70 | 1 | | u8::try_from(cmp::min( | 71 | 1 | decoded.partial_key.len(), | 72 | 1 | max_representable_in_first_byte, | 73 | 1 | )) | 74 | 1 | .unwrap(); | 75 | 1 | before_storage_value.push(first_byte); | 76 | 1 | | 77 | 1 | // Note that if the partial key length is exactly equal to `pk_len_first_byte_bits`, we | 78 | 1 | // need to push a `0` afterwards in order to avoid an ambiguity. Similarly, if | 79 | 1 | // `remain_pk_len` is at any point equal to 255, we must push an additional `0` | 80 | 1 | // afterwards. | 81 | 1 | let mut remain_pk_len = decoded | 82 | 1 | .partial_key | 83 | 1 | .len() | 84 | 1 | .checked_sub(max_representable_in_first_byte); | 85 | 1 | while let Some(pk_len_inner0 ) = remain_pk_len { | 86 | 0 | before_storage_value.push(u8::try_from(cmp::min(pk_len_inner, 255)).unwrap()); | 87 | 0 | remain_pk_len = pk_len_inner.checked_sub(255); | 88 | 0 | } | 89 | | } | 90 | | | 91 | | // We then push the partial key. | 92 | 1 | before_storage_value.extend(nibble::nibbles_to_bytes_prefix_extend( | 93 | 1 | decoded.partial_key.clone(), | 94 | 1 | )); | 95 | 1 | | 96 | 1 | // After the partial key, the node value optionally contains a bitfield of child nodes. | 97 | 1 | if has_children { | 98 | 0 | before_storage_value.extend_from_slice(&decoded.children_bitmap().to_le_bytes()); | 99 | 1 | } | 100 | | | 101 | | // Then, the storage value. | 102 | 1 | let storage_value = match decoded.storage_value { | 103 | 0 | StorageValue::Hashed(hash) => &hash[..], | 104 | 0 | StorageValue::None => &[][..], | 105 | 1 | StorageValue::Unhashed(storage_value) => { | 106 | 1 | before_storage_value.extend_from_slice( | 107 | 1 | crate::util::encode_scale_compact_usize(storage_value.len()).as_ref(), | 108 | 1 | ); | 109 | 1 | storage_value | 110 | | } | 111 | | }; | 112 | | | 113 | | // Finally, the children node values. | 114 | 1 | let children_nodes = decoded | 115 | 1 | .children | 116 | 1 | .into_iter() | 117 | 1 | .flatten() | 118 | 1 | .flat_map(|child_value| { | 119 | | let size = crate::util::encode_scale_compact_usize(child_value.as_ref().len()); | 120 | | [either::Left(size), either::Right(child_value)].into_iter() | 121 | 1 | }); | 122 | 1 | | 123 | 1 | // The return value is the combination of these components. | 124 | 1 | Ok(iter::once(either::Left(before_storage_value)) | 125 | 1 | .chain(iter::once(either::Right(storage_value))) | 126 | 1 | .map(either::Left) | 127 | 1 | .chain(children_nodes.map(either::Right))) | 128 | 1 | } |
_RINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node6encodeINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB4_6nibble6NibbleENtB2_17MerkleValueOutputEB6_ Line | Count | Source | 32 | 470k | pub fn encode<'a>( | 33 | 470k | decoded: Decoded< | 34 | 470k | 'a, | 35 | 470k | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, | 36 | 470k | impl AsRef<[u8]> + Clone + 'a, | 37 | 470k | >, | 38 | 470k | ) -> Result<impl Iterator<Item = impl AsRef<[u8]> + 'a + Clone> + Clone + 'a, EncodeError> { | 39 | 470k | // The return value is composed of three parts: | 40 | 470k | // - Before the storage value. | 41 | 470k | // - The storage value (which can be empty). | 42 | 470k | // - The children nodes. | 43 | 470k | | 44 | 470k | // Contains the encoding before the storage value. | 45 | 470k | let mut before_storage_value: Vec<u8> = Vec::with_capacity(decoded.partial_key.len() / 2 + 32); | 46 | 470k | | 47 | 470k | let has_children = decoded.children.iter().any(Option::is_some); | 48 | | | 49 | | // We first push the node header. | 50 | | // See https://spec.polkadot.network/#defn-node-header | 51 | | { | 52 | 470k | let (first_byte_msb, pk_len_first_byte_bits): (u8, _) = | 53 | 470k | match (has_children, decoded.storage_value) { | 54 | 351k | (false, StorageValue::Unhashed(_)) => (0b01, 6), | 55 | 69.8k | (true, StorageValue::None) => (0b10, 6), | 56 | 48.9k | (true, StorageValue::Unhashed(_)) => (0b11, 6), | 57 | 0 | (false, StorageValue::Hashed(_)) => (0b001, 5), | 58 | 0 | (true, StorageValue::Hashed(_)) => (0b0001, 4), | 59 | | (false, StorageValue::None) => { | 60 | 0 | if decoded.partial_key.len() != 0 { | 61 | 0 | return Err(EncodeError::PartialKeyButNoChildrenNoStorageValue); | 62 | | } else { | 63 | 0 | (0, 6) | 64 | | } | 65 | | } | 66 | | }; | 67 | | | 68 | 470k | let max_representable_in_first_byte = (1 << pk_len_first_byte_bits) - 1; | 69 | 470k | let first_byte = (first_byte_msb << pk_len_first_byte_bits) | 70 | 470k | | u8::try_from(cmp::min( | 71 | 470k | decoded.partial_key.len(), | 72 | 470k | max_representable_in_first_byte, | 73 | 470k | )) | 74 | 470k | .unwrap(); | 75 | 470k | before_storage_value.push(first_byte); | 76 | 470k | | 77 | 470k | // Note that if the partial key length is exactly equal to `pk_len_first_byte_bits`, we | 78 | 470k | // need to push a `0` afterwards in order to avoid an ambiguity. Similarly, if | 79 | 470k | // `remain_pk_len` is at any point equal to 255, we must push an additional `0` | 80 | 470k | // afterwards. | 81 | 470k | let mut remain_pk_len = decoded | 82 | 470k | .partial_key | 83 | 470k | .len() | 84 | 470k | .checked_sub(max_representable_in_first_byte); | 85 | 470k | while let Some(pk_len_inner0 ) = remain_pk_len { | 86 | 0 | before_storage_value.push(u8::try_from(cmp::min(pk_len_inner, 255)).unwrap()); | 87 | 0 | remain_pk_len = pk_len_inner.checked_sub(255); | 88 | 0 | } | 89 | | } | 90 | | | 91 | | // We then push the partial key. | 92 | 470k | before_storage_value.extend(nibble::nibbles_to_bytes_prefix_extend( | 93 | 470k | decoded.partial_key.clone(), | 94 | 470k | )); | 95 | 470k | | 96 | 470k | // After the partial key, the node value optionally contains a bitfield of child nodes. | 97 | 470k | if has_children { | 98 | 118k | before_storage_value.extend_from_slice(&decoded.children_bitmap().to_le_bytes()); | 99 | 351k | } | 100 | | | 101 | | // Then, the storage value. | 102 | 470k | let storage_value = match decoded.storage_value { | 103 | 0 | StorageValue::Hashed(hash) => &hash[..], | 104 | 69.8k | StorageValue::None => &[][..], | 105 | 400k | StorageValue::Unhashed(storage_value) => { | 106 | 400k | before_storage_value.extend_from_slice( | 107 | 400k | crate::util::encode_scale_compact_usize(storage_value.len()).as_ref(), | 108 | 400k | ); | 109 | 400k | storage_value | 110 | | } | 111 | | }; | 112 | | | 113 | | // Finally, the children node values. | 114 | 470k | let children_nodes = decoded | 115 | 470k | .children | 116 | 470k | .into_iter() | 117 | 470k | .flatten() | 118 | 470k | .flat_map(|child_value| { | 119 | | let size = crate::util::encode_scale_compact_usize(child_value.as_ref().len()); | 120 | | [either::Left(size), either::Right(child_value)].into_iter() | 121 | 470k | }); | 122 | 470k | | 123 | 470k | // The return value is the combination of these components. | 124 | 470k | Ok(iter::once(either::Left(before_storage_value)) | 125 | 470k | .chain(iter::once(either::Right(storage_value))) | 126 | 470k | .map(either::Left) | 127 | 470k | .chain(children_nodes.map(either::Right))) | 128 | 470k | } |
_RINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node6encodeINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB4_6nibble6NibbleERShEB6_ Line | Count | Source | 32 | 28.8k | pub fn encode<'a>( | 33 | 28.8k | decoded: Decoded< | 34 | 28.8k | 'a, | 35 | 28.8k | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, | 36 | 28.8k | impl AsRef<[u8]> + Clone + 'a, | 37 | 28.8k | >, | 38 | 28.8k | ) -> Result<impl Iterator<Item = impl AsRef<[u8]> + 'a + Clone> + Clone + 'a, EncodeError> { | 39 | 28.8k | // The return value is composed of three parts: | 40 | 28.8k | // - Before the storage value. | 41 | 28.8k | // - The storage value (which can be empty). | 42 | 28.8k | // - The children nodes. | 43 | 28.8k | | 44 | 28.8k | // Contains the encoding before the storage value. | 45 | 28.8k | let mut before_storage_value: Vec<u8> = Vec::with_capacity(decoded.partial_key.len() / 2 + 32); | 46 | 28.8k | | 47 | 28.8k | let has_children = decoded.children.iter().any(Option::is_some); | 48 | | | 49 | | // We first push the node header. | 50 | | // See https://spec.polkadot.network/#defn-node-header | 51 | | { | 52 | 28.8k | let (first_byte_msb, pk_len_first_byte_bits): (u8, _) = | 53 | 28.8k | match (has_children, decoded.storage_value) { | 54 | 21.0k | (false, StorageValue::Unhashed(_)) => (0b01, 6), | 55 | 5.58k | (true, StorageValue::None) => (0b10, 6), | 56 | 2.17k | (true, StorageValue::Unhashed(_)) => (0b11, 6), | 57 | 0 | (false, StorageValue::Hashed(_)) => (0b001, 5), | 58 | 0 | (true, StorageValue::Hashed(_)) => (0b0001, 4), | 59 | | (false, StorageValue::None) => { | 60 | 0 | if decoded.partial_key.len() != 0 { | 61 | 0 | return Err(EncodeError::PartialKeyButNoChildrenNoStorageValue); | 62 | | } else { | 63 | 0 | (0, 6) | 64 | | } | 65 | | } | 66 | | }; | 67 | | | 68 | 28.8k | let max_representable_in_first_byte = (1 << pk_len_first_byte_bits) - 1; | 69 | 28.8k | let first_byte = (first_byte_msb << pk_len_first_byte_bits) | 70 | 28.8k | | u8::try_from(cmp::min( | 71 | 28.8k | decoded.partial_key.len(), | 72 | 28.8k | max_representable_in_first_byte, | 73 | 28.8k | )) | 74 | 28.8k | .unwrap(); | 75 | 28.8k | before_storage_value.push(first_byte); | 76 | 28.8k | | 77 | 28.8k | // Note that if the partial key length is exactly equal to `pk_len_first_byte_bits`, we | 78 | 28.8k | // need to push a `0` afterwards in order to avoid an ambiguity. Similarly, if | 79 | 28.8k | // `remain_pk_len` is at any point equal to 255, we must push an additional `0` | 80 | 28.8k | // afterwards. | 81 | 28.8k | let mut remain_pk_len = decoded | 82 | 28.8k | .partial_key | 83 | 28.8k | .len() | 84 | 28.8k | .checked_sub(max_representable_in_first_byte); | 85 | 28.8k | while let Some(pk_len_inner0 ) = remain_pk_len { | 86 | 0 | before_storage_value.push(u8::try_from(cmp::min(pk_len_inner, 255)).unwrap()); | 87 | 0 | remain_pk_len = pk_len_inner.checked_sub(255); | 88 | 0 | } | 89 | | } | 90 | | | 91 | | // We then push the partial key. | 92 | 28.8k | before_storage_value.extend(nibble::nibbles_to_bytes_prefix_extend( | 93 | 28.8k | decoded.partial_key.clone(), | 94 | 28.8k | )); | 95 | 28.8k | | 96 | 28.8k | // After the partial key, the node value optionally contains a bitfield of child nodes. | 97 | 28.8k | if has_children { | 98 | 7.76k | before_storage_value.extend_from_slice(&decoded.children_bitmap().to_le_bytes()); | 99 | 21.0k | } | 100 | | | 101 | | // Then, the storage value. | 102 | 28.8k | let storage_value = match decoded.storage_value { | 103 | 0 | StorageValue::Hashed(hash) => &hash[..], | 104 | 5.58k | StorageValue::None => &[][..], | 105 | 23.2k | StorageValue::Unhashed(storage_value) => { | 106 | 23.2k | before_storage_value.extend_from_slice( | 107 | 23.2k | crate::util::encode_scale_compact_usize(storage_value.len()).as_ref(), | 108 | 23.2k | ); | 109 | 23.2k | storage_value | 110 | | } | 111 | | }; | 112 | | | 113 | | // Finally, the children node values. | 114 | 28.8k | let children_nodes = decoded | 115 | 28.8k | .children | 116 | 28.8k | .into_iter() | 117 | 28.8k | .flatten() | 118 | 28.8k | .flat_map(|child_value| { | 119 | | let size = crate::util::encode_scale_compact_usize(child_value.as_ref().len()); | 120 | | [either::Left(size), either::Right(child_value)].into_iter() | 121 | 28.8k | }); | 122 | 28.8k | | 123 | 28.8k | // The return value is the combination of these components. | 124 | 28.8k | Ok(iter::once(either::Left(before_storage_value)) | 125 | 28.8k | .chain(iter::once(either::Right(storage_value))) | 126 | 28.8k | .map(either::Left) | 127 | 28.8k | .chain(children_nodes.map(either::Right))) | 128 | 28.8k | } |
_RINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node6encodeINtNtNtNtCsaYZPK01V26L_4core4iter7sources4once4OnceNtNtB4_6nibble6NibbleERShEB6_ Line | Count | Source | 32 | 1 | pub fn encode<'a>( | 33 | 1 | decoded: Decoded< | 34 | 1 | 'a, | 35 | 1 | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, | 36 | 1 | impl AsRef<[u8]> + Clone + 'a, | 37 | 1 | >, | 38 | 1 | ) -> Result<impl Iterator<Item = impl AsRef<[u8]> + 'a + Clone> + Clone + 'a, EncodeError> { | 39 | 1 | // The return value is composed of three parts: | 40 | 1 | // - Before the storage value. | 41 | 1 | // - The storage value (which can be empty). | 42 | 1 | // - The children nodes. | 43 | 1 | | 44 | 1 | // Contains the encoding before the storage value. | 45 | 1 | let mut before_storage_value: Vec<u8> = Vec::with_capacity(decoded.partial_key.len() / 2 + 32); | 46 | 1 | | 47 | 1 | let has_children = decoded.children.iter().any(Option::is_some); | 48 | | | 49 | | // We first push the node header. | 50 | | // See https://spec.polkadot.network/#defn-node-header | 51 | | { | 52 | 0 | let (first_byte_msb, pk_len_first_byte_bits): (u8, _) = | 53 | 1 | match (has_children, decoded.storage_value) { | 54 | 0 | (false, StorageValue::Unhashed(_)) => (0b01, 6), | 55 | 0 | (true, StorageValue::None) => (0b10, 6), | 56 | 0 | (true, StorageValue::Unhashed(_)) => (0b11, 6), | 57 | 0 | (false, StorageValue::Hashed(_)) => (0b001, 5), | 58 | 0 | (true, StorageValue::Hashed(_)) => (0b0001, 4), | 59 | | (false, StorageValue::None) => { | 60 | 1 | if decoded.partial_key.len() != 0 { | 61 | 1 | return Err(EncodeError::PartialKeyButNoChildrenNoStorageValue); | 62 | | } else { | 63 | 0 | (0, 6) | 64 | | } | 65 | | } | 66 | | }; | 67 | | | 68 | 0 | let max_representable_in_first_byte = (1 << pk_len_first_byte_bits) - 1; | 69 | 0 | let first_byte = (first_byte_msb << pk_len_first_byte_bits) | 70 | 0 | | u8::try_from(cmp::min( | 71 | 0 | decoded.partial_key.len(), | 72 | 0 | max_representable_in_first_byte, | 73 | 0 | )) | 74 | 0 | .unwrap(); | 75 | 0 | before_storage_value.push(first_byte); | 76 | 0 |
| 77 | 0 | // Note that if the partial key length is exactly equal to `pk_len_first_byte_bits`, we | 78 | 0 | // need to push a `0` afterwards in order to avoid an ambiguity. Similarly, if | 79 | 0 | // `remain_pk_len` is at any point equal to 255, we must push an additional `0` | 80 | 0 | // afterwards. | 81 | 0 | let mut remain_pk_len = decoded | 82 | 0 | .partial_key | 83 | 0 | .len() | 84 | 0 | .checked_sub(max_representable_in_first_byte); | 85 | 0 | while let Some(pk_len_inner) = remain_pk_len { | 86 | 0 | before_storage_value.push(u8::try_from(cmp::min(pk_len_inner, 255)).unwrap()); | 87 | 0 | remain_pk_len = pk_len_inner.checked_sub(255); | 88 | 0 | } | 89 | | } | 90 | | | 91 | | // We then push the partial key. | 92 | 0 | before_storage_value.extend(nibble::nibbles_to_bytes_prefix_extend( | 93 | 0 | decoded.partial_key.clone(), | 94 | 0 | )); | 95 | 0 |
| 96 | 0 | // After the partial key, the node value optionally contains a bitfield of child nodes. | 97 | 0 | if has_children { | 98 | 0 | before_storage_value.extend_from_slice(&decoded.children_bitmap().to_le_bytes()); | 99 | 0 | } | 100 | | | 101 | | // Then, the storage value. | 102 | 0 | let storage_value = match decoded.storage_value { | 103 | 0 | StorageValue::Hashed(hash) => &hash[..], | 104 | 0 | StorageValue::None => &[][..], | 105 | 0 | StorageValue::Unhashed(storage_value) => { | 106 | 0 | before_storage_value.extend_from_slice( | 107 | 0 | crate::util::encode_scale_compact_usize(storage_value.len()).as_ref(), | 108 | 0 | ); | 109 | 0 | storage_value | 110 | | } | 111 | | }; | 112 | | | 113 | | // Finally, the children node values. | 114 | 0 | let children_nodes = decoded | 115 | 0 | .children | 116 | 0 | .into_iter() | 117 | 0 | .flatten() | 118 | 0 | .flat_map(|child_value| { | 119 | | let size = crate::util::encode_scale_compact_usize(child_value.as_ref().len()); | 120 | | [either::Left(size), either::Right(child_value)].into_iter() | 121 | 0 | }); | 122 | 0 |
| 123 | 0 | // The return value is the combination of these components. | 124 | 0 | Ok(iter::once(either::Left(before_storage_value)) | 125 | 0 | .chain(iter::once(either::Right(storage_value))) | 126 | 0 | .map(either::Left) | 127 | 0 | .chain(children_nodes.map(either::Right))) | 128 | 1 | } |
_RINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node6encodeINtNtNtNtCsaYZPK01V26L_4core4iter7sources5empty5EmptyNtNtB4_6nibble6NibbleERShEB6_ Line | Count | Source | 32 | 3 | pub fn encode<'a>( | 33 | 3 | decoded: Decoded< | 34 | 3 | 'a, | 35 | 3 | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, | 36 | 3 | impl AsRef<[u8]> + Clone + 'a, | 37 | 3 | >, | 38 | 3 | ) -> Result<impl Iterator<Item = impl AsRef<[u8]> + 'a + Clone> + Clone + 'a, EncodeError> { | 39 | 3 | // The return value is composed of three parts: | 40 | 3 | // - Before the storage value. | 41 | 3 | // - The storage value (which can be empty). | 42 | 3 | // - The children nodes. | 43 | 3 | | 44 | 3 | // Contains the encoding before the storage value. | 45 | 3 | let mut before_storage_value: Vec<u8> = Vec::with_capacity(decoded.partial_key.len() / 2 + 32); | 46 | 3 | | 47 | 3 | let has_children = decoded.children.iter().any(Option::is_some); | 48 | | | 49 | | // We first push the node header. | 50 | | // See https://spec.polkadot.network/#defn-node-header | 51 | | { | 52 | 3 | let (first_byte_msb, pk_len_first_byte_bits): (u8, _) = | 53 | 3 | match (has_children, decoded.storage_value) { | 54 | 0 | (false, StorageValue::Unhashed(_)) => (0b01, 6), | 55 | 0 | (true, StorageValue::None) => (0b10, 6), | 56 | 0 | (true, StorageValue::Unhashed(_)) => (0b11, 6), | 57 | 0 | (false, StorageValue::Hashed(_)) => (0b001, 5), | 58 | 0 | (true, StorageValue::Hashed(_)) => (0b0001, 4), | 59 | | (false, StorageValue::None) => { | 60 | 3 | if decoded.partial_key.len() != 0 { | 61 | 0 | return Err(EncodeError::PartialKeyButNoChildrenNoStorageValue); | 62 | | } else { | 63 | 3 | (0, 6) | 64 | | } | 65 | | } | 66 | | }; | 67 | | | 68 | 3 | let max_representable_in_first_byte = (1 << pk_len_first_byte_bits) - 1; | 69 | 3 | let first_byte = (first_byte_msb << pk_len_first_byte_bits) | 70 | 3 | | u8::try_from(cmp::min( | 71 | 3 | decoded.partial_key.len(), | 72 | 3 | max_representable_in_first_byte, | 73 | 3 | )) | 74 | 3 | .unwrap(); | 75 | 3 | before_storage_value.push(first_byte); | 76 | 3 | | 77 | 3 | // Note that if the partial key length is exactly equal to `pk_len_first_byte_bits`, we | 78 | 3 | // need to push a `0` afterwards in order to avoid an ambiguity. Similarly, if | 79 | 3 | // `remain_pk_len` is at any point equal to 255, we must push an additional `0` | 80 | 3 | // afterwards. | 81 | 3 | let mut remain_pk_len = decoded | 82 | 3 | .partial_key | 83 | 3 | .len() | 84 | 3 | .checked_sub(max_representable_in_first_byte); | 85 | 3 | while let Some(pk_len_inner0 ) = remain_pk_len { | 86 | 0 | before_storage_value.push(u8::try_from(cmp::min(pk_len_inner, 255)).unwrap()); | 87 | 0 | remain_pk_len = pk_len_inner.checked_sub(255); | 88 | 0 | } | 89 | | } | 90 | | | 91 | | // We then push the partial key. | 92 | 3 | before_storage_value.extend(nibble::nibbles_to_bytes_prefix_extend( | 93 | 3 | decoded.partial_key.clone(), | 94 | 3 | )); | 95 | 3 | | 96 | 3 | // After the partial key, the node value optionally contains a bitfield of child nodes. | 97 | 3 | if has_children { | 98 | 0 | before_storage_value.extend_from_slice(&decoded.children_bitmap().to_le_bytes()); | 99 | 3 | } | 100 | | | 101 | | // Then, the storage value. | 102 | 3 | let storage_value = match decoded.storage_value { | 103 | 0 | StorageValue::Hashed(hash) => &hash[..], | 104 | 3 | StorageValue::None => &[][..], | 105 | 0 | StorageValue::Unhashed(storage_value) => { | 106 | 0 | before_storage_value.extend_from_slice( | 107 | 0 | crate::util::encode_scale_compact_usize(storage_value.len()).as_ref(), | 108 | 0 | ); | 109 | 0 | storage_value | 110 | | } | 111 | | }; | 112 | | | 113 | | // Finally, the children node values. | 114 | 3 | let children_nodes = decoded | 115 | 3 | .children | 116 | 3 | .into_iter() | 117 | 3 | .flatten() | 118 | 3 | .flat_map(|child_value| { | 119 | | let size = crate::util::encode_scale_compact_usize(child_value.as_ref().len()); | 120 | | [either::Left(size), either::Right(child_value)].into_iter() | 121 | 3 | }); | 122 | 3 | | 123 | 3 | // The return value is the combination of these components. | 124 | 3 | Ok(iter::once(either::Left(before_storage_value)) | 125 | 3 | .chain(iter::once(either::Right(storage_value))) | 126 | 3 | .map(either::Left) | 127 | 3 | .chain(children_nodes.map(either::Right))) | 128 | 3 | } |
_RINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node6encodeINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtBW_5slice4iter4IterNtNtB4_6nibble6NibbleEERNtB2_17MerkleValueOutputEB6_ Line | Count | Source | 32 | 366k | pub fn encode<'a>( | 33 | 366k | decoded: Decoded< | 34 | 366k | 'a, | 35 | 366k | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, | 36 | 366k | impl AsRef<[u8]> + Clone + 'a, | 37 | 366k | >, | 38 | 366k | ) -> Result<impl Iterator<Item = impl AsRef<[u8]> + 'a + Clone> + Clone + 'a, EncodeError> { | 39 | 366k | // The return value is composed of three parts: | 40 | 366k | // - Before the storage value. | 41 | 366k | // - The storage value (which can be empty). | 42 | 366k | // - The children nodes. | 43 | 366k | | 44 | 366k | // Contains the encoding before the storage value. | 45 | 366k | let mut before_storage_value: Vec<u8> = Vec::with_capacity(decoded.partial_key.len() / 2 + 32); | 46 | 366k | | 47 | 366k | let has_children = decoded.children.iter().any(Option::is_some); | 48 | | | 49 | | // We first push the node header. | 50 | | // See https://spec.polkadot.network/#defn-node-header | 51 | | { | 52 | 366k | let (first_byte_msb, pk_len_first_byte_bits): (u8, _) = | 53 | 366k | match (has_children, decoded.storage_value) { | 54 | 275k | (false, StorageValue::Unhashed(_)) => (0b01, 6), | 55 | 67.1k | (true, StorageValue::None) => (0b10, 6), | 56 | 23.9k | (true, StorageValue::Unhashed(_)) => (0b11, 6), | 57 | 100 | (false, StorageValue::Hashed(_)) => (0b001, 5), | 58 | 0 | (true, StorageValue::Hashed(_)) => (0b0001, 4), | 59 | | (false, StorageValue::None) => { | 60 | 0 | if decoded.partial_key.len() != 0 { | 61 | 0 | return Err(EncodeError::PartialKeyButNoChildrenNoStorageValue); | 62 | | } else { | 63 | 0 | (0, 6) | 64 | | } | 65 | | } | 66 | | }; | 67 | | | 68 | 366k | let max_representable_in_first_byte = (1 << pk_len_first_byte_bits) - 1; | 69 | 366k | let first_byte = (first_byte_msb << pk_len_first_byte_bits) | 70 | 366k | | u8::try_from(cmp::min( | 71 | 366k | decoded.partial_key.len(), | 72 | 366k | max_representable_in_first_byte, | 73 | 366k | )) | 74 | 366k | .unwrap(); | 75 | 366k | before_storage_value.push(first_byte); | 76 | 366k | | 77 | 366k | // Note that if the partial key length is exactly equal to `pk_len_first_byte_bits`, we | 78 | 366k | // need to push a `0` afterwards in order to avoid an ambiguity. Similarly, if | 79 | 366k | // `remain_pk_len` is at any point equal to 255, we must push an additional `0` | 80 | 366k | // afterwards. | 81 | 366k | let mut remain_pk_len = decoded | 82 | 366k | .partial_key | 83 | 366k | .len() | 84 | 366k | .checked_sub(max_representable_in_first_byte); | 85 | 366k | while let Some(pk_len_inner202 ) = remain_pk_len { | 86 | 202 | before_storage_value.push(u8::try_from(cmp::min(pk_len_inner, 255)).unwrap()); | 87 | 202 | remain_pk_len = pk_len_inner.checked_sub(255); | 88 | 202 | } | 89 | | } | 90 | | | 91 | | // We then push the partial key. | 92 | 366k | before_storage_value.extend(nibble::nibbles_to_bytes_prefix_extend( | 93 | 366k | decoded.partial_key.clone(), | 94 | 366k | )); | 95 | 366k | | 96 | 366k | // After the partial key, the node value optionally contains a bitfield of child nodes. | 97 | 366k | if has_children { | 98 | 91.1k | before_storage_value.extend_from_slice(&decoded.children_bitmap().to_le_bytes()); | 99 | 275k | } | 100 | | | 101 | | // Then, the storage value. | 102 | 366k | let storage_value = match decoded.storage_value { | 103 | 100 | StorageValue::Hashed(hash) => &hash[..], | 104 | 67.1k | StorageValue::None => &[][..], | 105 | 299k | StorageValue::Unhashed(storage_value) => { | 106 | 299k | before_storage_value.extend_from_slice( | 107 | 299k | crate::util::encode_scale_compact_usize(storage_value.len()).as_ref(), | 108 | 299k | ); | 109 | 299k | storage_value | 110 | | } | 111 | | }; | 112 | | | 113 | | // Finally, the children node values. | 114 | 366k | let children_nodes = decoded | 115 | 366k | .children | 116 | 366k | .into_iter() | 117 | 366k | .flatten() | 118 | 366k | .flat_map(|child_value| { | 119 | | let size = crate::util::encode_scale_compact_usize(child_value.as_ref().len()); | 120 | | [either::Left(size), either::Right(child_value)].into_iter() | 121 | 366k | }); | 122 | 366k | | 123 | 366k | // The return value is the combination of these components. | 124 | 366k | Ok(iter::once(either::Left(before_storage_value)) | 125 | 366k | .chain(iter::once(either::Right(storage_value))) | 126 | 366k | .map(either::Left) | 127 | 366k | .chain(children_nodes.map(either::Right))) | 128 | 366k | } |
_RINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node6encodeNtB2_17DecodedPartialKeyRShEB6_ Line | Count | Source | 32 | 28.8k | pub fn encode<'a>( | 33 | 28.8k | decoded: Decoded< | 34 | 28.8k | 'a, | 35 | 28.8k | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, | 36 | 28.8k | impl AsRef<[u8]> + Clone + 'a, | 37 | 28.8k | >, | 38 | 28.8k | ) -> Result<impl Iterator<Item = impl AsRef<[u8]> + 'a + Clone> + Clone + 'a, EncodeError> { | 39 | 28.8k | // The return value is composed of three parts: | 40 | 28.8k | // - Before the storage value. | 41 | 28.8k | // - The storage value (which can be empty). | 42 | 28.8k | // - The children nodes. | 43 | 28.8k | | 44 | 28.8k | // Contains the encoding before the storage value. | 45 | 28.8k | let mut before_storage_value: Vec<u8> = Vec::with_capacity(decoded.partial_key.len() / 2 + 32); | 46 | 28.8k | | 47 | 28.8k | let has_children = decoded.children.iter().any(Option::is_some); | 48 | | | 49 | | // We first push the node header. | 50 | | // See https://spec.polkadot.network/#defn-node-header | 51 | | { | 52 | 28.8k | let (first_byte_msb, pk_len_first_byte_bits): (u8, _) = | 53 | 28.8k | match (has_children, decoded.storage_value) { | 54 | 21.0k | (false, StorageValue::Unhashed(_)) => (0b01, 6), | 55 | 5.58k | (true, StorageValue::None) => (0b10, 6), | 56 | 2.18k | (true, StorageValue::Unhashed(_)) => (0b11, 6), | 57 | 0 | (false, StorageValue::Hashed(_)) => (0b001, 5), | 58 | 0 | (true, StorageValue::Hashed(_)) => (0b0001, 4), | 59 | | (false, StorageValue::None) => { | 60 | 0 | if decoded.partial_key.len() != 0 { | 61 | 0 | return Err(EncodeError::PartialKeyButNoChildrenNoStorageValue); | 62 | | } else { | 63 | 0 | (0, 6) | 64 | | } | 65 | | } | 66 | | }; | 67 | | | 68 | 28.8k | let max_representable_in_first_byte = (1 << pk_len_first_byte_bits) - 1; | 69 | 28.8k | let first_byte = (first_byte_msb << pk_len_first_byte_bits) | 70 | 28.8k | | u8::try_from(cmp::min( | 71 | 28.8k | decoded.partial_key.len(), | 72 | 28.8k | max_representable_in_first_byte, | 73 | 28.8k | )) | 74 | 28.8k | .unwrap(); | 75 | 28.8k | before_storage_value.push(first_byte); | 76 | 28.8k | | 77 | 28.8k | // Note that if the partial key length is exactly equal to `pk_len_first_byte_bits`, we | 78 | 28.8k | // need to push a `0` afterwards in order to avoid an ambiguity. Similarly, if | 79 | 28.8k | // `remain_pk_len` is at any point equal to 255, we must push an additional `0` | 80 | 28.8k | // afterwards. | 81 | 28.8k | let mut remain_pk_len = decoded | 82 | 28.8k | .partial_key | 83 | 28.8k | .len() | 84 | 28.8k | .checked_sub(max_representable_in_first_byte); | 85 | 28.8k | while let Some(pk_len_inner0 ) = remain_pk_len { | 86 | 0 | before_storage_value.push(u8::try_from(cmp::min(pk_len_inner, 255)).unwrap()); | 87 | 0 | remain_pk_len = pk_len_inner.checked_sub(255); | 88 | 0 | } | 89 | | } | 90 | | | 91 | | // We then push the partial key. | 92 | 28.8k | before_storage_value.extend(nibble::nibbles_to_bytes_prefix_extend( | 93 | 28.8k | decoded.partial_key.clone(), | 94 | 28.8k | )); | 95 | 28.8k | | 96 | 28.8k | // After the partial key, the node value optionally contains a bitfield of child nodes. | 97 | 28.8k | if has_children { | 98 | 7.76k | before_storage_value.extend_from_slice(&decoded.children_bitmap().to_le_bytes()); | 99 | 21.0k | } | 100 | | | 101 | | // Then, the storage value. | 102 | 28.8k | let storage_value = match decoded.storage_value { | 103 | 0 | StorageValue::Hashed(hash) => &hash[..], | 104 | 5.58k | StorageValue::None => &[][..], | 105 | 23.2k | StorageValue::Unhashed(storage_value) => { | 106 | 23.2k | before_storage_value.extend_from_slice( | 107 | 23.2k | crate::util::encode_scale_compact_usize(storage_value.len()).as_ref(), | 108 | 23.2k | ); | 109 | 23.2k | storage_value | 110 | | } | 111 | | }; | 112 | | | 113 | | // Finally, the children node values. | 114 | 28.8k | let children_nodes = decoded | 115 | 28.8k | .children | 116 | 28.8k | .into_iter() | 117 | 28.8k | .flatten() | 118 | 28.8k | .flat_map(|child_value| { | 119 | | let size = crate::util::encode_scale_compact_usize(child_value.as_ref().len()); | 120 | | [either::Left(size), either::Right(child_value)].into_iter() | 121 | 28.8k | }); | 122 | 28.8k | | 123 | 28.8k | // The return value is the combination of these components. | 124 | 28.8k | Ok(iter::once(either::Left(before_storage_value)) | 125 | 28.8k | .chain(iter::once(either::Right(storage_value))) | 126 | 28.8k | .map(either::Left) | 127 | 28.8k | .chain(children_nodes.map(either::Right))) | 128 | 28.8k | } |
Unexecuted instantiation: _RINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6encodeINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtBX_5slice4iter4IterNtNtB4_6nibble6NibbleEERNtB2_17MerkleValueOutputECsDDUKWWCHAU_18smoldot_light_wasm _RINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6encodeINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtBX_5slice4iter4IterNtNtB4_6nibble6NibbleEERNtB2_17MerkleValueOutputEB6_ Line | Count | Source | 32 | 1.00k | pub fn encode<'a>( | 33 | 1.00k | decoded: Decoded< | 34 | 1.00k | 'a, | 35 | 1.00k | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, | 36 | 1.00k | impl AsRef<[u8]> + Clone + 'a, | 37 | 1.00k | >, | 38 | 1.00k | ) -> Result<impl Iterator<Item = impl AsRef<[u8]> + 'a + Clone> + Clone + 'a, EncodeError> { | 39 | 1.00k | // The return value is composed of three parts: | 40 | 1.00k | // - Before the storage value. | 41 | 1.00k | // - The storage value (which can be empty). | 42 | 1.00k | // - The children nodes. | 43 | 1.00k | | 44 | 1.00k | // Contains the encoding before the storage value. | 45 | 1.00k | let mut before_storage_value: Vec<u8> = Vec::with_capacity(decoded.partial_key.len() / 2 + 32); | 46 | 1.00k | | 47 | 1.00k | let has_children = decoded.children.iter().any(Option::is_some); | 48 | | | 49 | | // We first push the node header. | 50 | | // See https://spec.polkadot.network/#defn-node-header | 51 | | { | 52 | 1.00k | let (first_byte_msb, pk_len_first_byte_bits): (u8, _) = | 53 | 1.00k | match (has_children, decoded.storage_value) { | 54 | 735 | (false, StorageValue::Unhashed(_)) => (0b01, 6), | 55 | 273 | (true, StorageValue::None) => (0b10, 6), | 56 | 0 | (true, StorageValue::Unhashed(_)) => (0b11, 6), | 57 | 0 | (false, StorageValue::Hashed(_)) => (0b001, 5), | 58 | 0 | (true, StorageValue::Hashed(_)) => (0b0001, 4), | 59 | | (false, StorageValue::None) => { | 60 | 0 | if decoded.partial_key.len() != 0 { | 61 | 0 | return Err(EncodeError::PartialKeyButNoChildrenNoStorageValue); | 62 | | } else { | 63 | 0 | (0, 6) | 64 | | } | 65 | | } | 66 | | }; | 67 | | | 68 | 1.00k | let max_representable_in_first_byte = (1 << pk_len_first_byte_bits) - 1; | 69 | 1.00k | let first_byte = (first_byte_msb << pk_len_first_byte_bits) | 70 | 1.00k | | u8::try_from(cmp::min( | 71 | 1.00k | decoded.partial_key.len(), | 72 | 1.00k | max_representable_in_first_byte, | 73 | 1.00k | )) | 74 | 1.00k | .unwrap(); | 75 | 1.00k | before_storage_value.push(first_byte); | 76 | 1.00k | | 77 | 1.00k | // Note that if the partial key length is exactly equal to `pk_len_first_byte_bits`, we | 78 | 1.00k | // need to push a `0` afterwards in order to avoid an ambiguity. Similarly, if | 79 | 1.00k | // `remain_pk_len` is at any point equal to 255, we must push an additional `0` | 80 | 1.00k | // afterwards. | 81 | 1.00k | let mut remain_pk_len = decoded | 82 | 1.00k | .partial_key | 83 | 1.00k | .len() | 84 | 1.00k | .checked_sub(max_representable_in_first_byte); | 85 | 1.34k | while let Some(pk_len_inner336 ) = remain_pk_len { | 86 | 336 | before_storage_value.push(u8::try_from(cmp::min(pk_len_inner, 255)).unwrap()); | 87 | 336 | remain_pk_len = pk_len_inner.checked_sub(255); | 88 | 336 | } | 89 | | } | 90 | | | 91 | | // We then push the partial key. | 92 | 1.00k | before_storage_value.extend(nibble::nibbles_to_bytes_prefix_extend( | 93 | 1.00k | decoded.partial_key.clone(), | 94 | 1.00k | )); | 95 | 1.00k | | 96 | 1.00k | // After the partial key, the node value optionally contains a bitfield of child nodes. | 97 | 1.00k | if has_children { | 98 | 273 | before_storage_value.extend_from_slice(&decoded.children_bitmap().to_le_bytes()); | 99 | 735 | } | 100 | | | 101 | | // Then, the storage value. | 102 | 1.00k | let storage_value = match decoded.storage_value { | 103 | 0 | StorageValue::Hashed(hash) => &hash[..], | 104 | 273 | StorageValue::None => &[][..], | 105 | 735 | StorageValue::Unhashed(storage_value) => { | 106 | 735 | before_storage_value.extend_from_slice( | 107 | 735 | crate::util::encode_scale_compact_usize(storage_value.len()).as_ref(), | 108 | 735 | ); | 109 | 735 | storage_value | 110 | | } | 111 | | }; | 112 | | | 113 | | // Finally, the children node values. | 114 | 1.00k | let children_nodes = decoded | 115 | 1.00k | .children | 116 | 1.00k | .into_iter() | 117 | 1.00k | .flatten() | 118 | 1.00k | .flat_map(|child_value| { | 119 | | let size = crate::util::encode_scale_compact_usize(child_value.as_ref().len()); | 120 | | [either::Left(size), either::Right(child_value)].into_iter() | 121 | 1.00k | }); | 122 | 1.00k | | 123 | 1.00k | // The return value is the combination of these components. | 124 | 1.00k | Ok(iter::once(either::Left(before_storage_value)) | 125 | 1.00k | .chain(iter::once(either::Right(storage_value))) | 126 | 1.00k | .map(either::Left) | 127 | 1.00k | .chain(children_nodes.map(either::Right))) | 128 | 1.00k | } |
Unexecuted instantiation: _RINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6encodeNtB2_17DecodedPartialKeyRShEB6_ _RINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6encodeINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB4_6nibble6NibbleENtB2_17MerkleValueOutputECsiLzmwikkc22_14json_rpc_basic Line | Count | Source | 32 | 96 | pub fn encode<'a>( | 33 | 96 | decoded: Decoded< | 34 | 96 | 'a, | 35 | 96 | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, | 36 | 96 | impl AsRef<[u8]> + Clone + 'a, | 37 | 96 | >, | 38 | 96 | ) -> Result<impl Iterator<Item = impl AsRef<[u8]> + 'a + Clone> + Clone + 'a, EncodeError> { | 39 | 96 | // The return value is composed of three parts: | 40 | 96 | // - Before the storage value. | 41 | 96 | // - The storage value (which can be empty). | 42 | 96 | // - The children nodes. | 43 | 96 | | 44 | 96 | // Contains the encoding before the storage value. | 45 | 96 | let mut before_storage_value: Vec<u8> = Vec::with_capacity(decoded.partial_key.len() / 2 + 32); | 46 | 96 | | 47 | 96 | let has_children = decoded.children.iter().any(Option::is_some); | 48 | | | 49 | | // We first push the node header. | 50 | | // See https://spec.polkadot.network/#defn-node-header | 51 | | { | 52 | 96 | let (first_byte_msb, pk_len_first_byte_bits): (u8, _) = | 53 | 96 | match (has_children, decoded.storage_value) { | 54 | 70 | (false, StorageValue::Unhashed(_)) => (0b01, 6), | 55 | 26 | (true, StorageValue::None) => (0b10, 6), | 56 | 0 | (true, StorageValue::Unhashed(_)) => (0b11, 6), | 57 | 0 | (false, StorageValue::Hashed(_)) => (0b001, 5), | 58 | 0 | (true, StorageValue::Hashed(_)) => (0b0001, 4), | 59 | | (false, StorageValue::None) => { | 60 | 0 | if decoded.partial_key.len() != 0 { | 61 | 0 | return Err(EncodeError::PartialKeyButNoChildrenNoStorageValue); | 62 | | } else { | 63 | 0 | (0, 6) | 64 | | } | 65 | | } | 66 | | }; | 67 | | | 68 | 96 | let max_representable_in_first_byte = (1 << pk_len_first_byte_bits) - 1; | 69 | 96 | let first_byte = (first_byte_msb << pk_len_first_byte_bits) | 70 | 96 | | u8::try_from(cmp::min( | 71 | 96 | decoded.partial_key.len(), | 72 | 96 | max_representable_in_first_byte, | 73 | 96 | )) | 74 | 96 | .unwrap(); | 75 | 96 | before_storage_value.push(first_byte); | 76 | 96 | | 77 | 96 | // Note that if the partial key length is exactly equal to `pk_len_first_byte_bits`, we | 78 | 96 | // need to push a `0` afterwards in order to avoid an ambiguity. Similarly, if | 79 | 96 | // `remain_pk_len` is at any point equal to 255, we must push an additional `0` | 80 | 96 | // afterwards. | 81 | 96 | let mut remain_pk_len = decoded | 82 | 96 | .partial_key | 83 | 96 | .len() | 84 | 96 | .checked_sub(max_representable_in_first_byte); | 85 | 128 | while let Some(pk_len_inner32 ) = remain_pk_len { | 86 | 32 | before_storage_value.push(u8::try_from(cmp::min(pk_len_inner, 255)).unwrap()); | 87 | 32 | remain_pk_len = pk_len_inner.checked_sub(255); | 88 | 32 | } | 89 | | } | 90 | | | 91 | | // We then push the partial key. | 92 | 96 | before_storage_value.extend(nibble::nibbles_to_bytes_prefix_extend( | 93 | 96 | decoded.partial_key.clone(), | 94 | 96 | )); | 95 | 96 | | 96 | 96 | // After the partial key, the node value optionally contains a bitfield of child nodes. | 97 | 96 | if has_children { | 98 | 26 | before_storage_value.extend_from_slice(&decoded.children_bitmap().to_le_bytes()); | 99 | 70 | } | 100 | | | 101 | | // Then, the storage value. | 102 | 96 | let storage_value = match decoded.storage_value { | 103 | 0 | StorageValue::Hashed(hash) => &hash[..], | 104 | 26 | StorageValue::None => &[][..], | 105 | 70 | StorageValue::Unhashed(storage_value) => { | 106 | 70 | before_storage_value.extend_from_slice( | 107 | 70 | crate::util::encode_scale_compact_usize(storage_value.len()).as_ref(), | 108 | 70 | ); | 109 | 70 | storage_value | 110 | | } | 111 | | }; | 112 | | | 113 | | // Finally, the children node values. | 114 | 96 | let children_nodes = decoded | 115 | 96 | .children | 116 | 96 | .into_iter() | 117 | 96 | .flatten() | 118 | 96 | .flat_map(|child_value| { | 119 | | let size = crate::util::encode_scale_compact_usize(child_value.as_ref().len()); | 120 | | [either::Left(size), either::Right(child_value)].into_iter() | 121 | 96 | }); | 122 | 96 | | 123 | 96 | // The return value is the combination of these components. | 124 | 96 | Ok(iter::once(either::Left(before_storage_value)) | 125 | 96 | .chain(iter::once(either::Right(storage_value))) | 126 | 96 | .map(either::Left) | 127 | 96 | .chain(children_nodes.map(either::Right))) | 128 | 96 | } |
Unexecuted instantiation: _RINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6encodeINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtBX_5slice4iter4IterNtNtB4_6nibble6NibbleEERNtB2_17MerkleValueOutputECsiLzmwikkc22_14json_rpc_basic Unexecuted instantiation: _RINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6encodeINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB4_6nibble6NibbleENtB2_17MerkleValueOutputECscDgN54JpMGG_6author Unexecuted instantiation: _RINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6encodeINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtBX_5slice4iter4IterNtNtB4_6nibble6NibbleEERNtB2_17MerkleValueOutputECscDgN54JpMGG_6author _RINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6encodeINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB4_6nibble6NibbleENtB2_17MerkleValueOutputECsibGXYHQB8Ea_25json_rpc_general_requests Line | Count | Source | 32 | 912 | pub fn encode<'a>( | 33 | 912 | decoded: Decoded< | 34 | 912 | 'a, | 35 | 912 | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, | 36 | 912 | impl AsRef<[u8]> + Clone + 'a, | 37 | 912 | >, | 38 | 912 | ) -> Result<impl Iterator<Item = impl AsRef<[u8]> + 'a + Clone> + Clone + 'a, EncodeError> { | 39 | 912 | // The return value is composed of three parts: | 40 | 912 | // - Before the storage value. | 41 | 912 | // - The storage value (which can be empty). | 42 | 912 | // - The children nodes. | 43 | 912 | | 44 | 912 | // Contains the encoding before the storage value. | 45 | 912 | let mut before_storage_value: Vec<u8> = Vec::with_capacity(decoded.partial_key.len() / 2 + 32); | 46 | 912 | | 47 | 912 | let has_children = decoded.children.iter().any(Option::is_some); | 48 | | | 49 | | // We first push the node header. | 50 | | // See https://spec.polkadot.network/#defn-node-header | 51 | | { | 52 | 912 | let (first_byte_msb, pk_len_first_byte_bits): (u8, _) = | 53 | 912 | match (has_children, decoded.storage_value) { | 54 | 665 | (false, StorageValue::Unhashed(_)) => (0b01, 6), | 55 | 247 | (true, StorageValue::None) => (0b10, 6), | 56 | 0 | (true, StorageValue::Unhashed(_)) => (0b11, 6), | 57 | 0 | (false, StorageValue::Hashed(_)) => (0b001, 5), | 58 | 0 | (true, StorageValue::Hashed(_)) => (0b0001, 4), | 59 | | (false, StorageValue::None) => { | 60 | 0 | if decoded.partial_key.len() != 0 { | 61 | 0 | return Err(EncodeError::PartialKeyButNoChildrenNoStorageValue); | 62 | | } else { | 63 | 0 | (0, 6) | 64 | | } | 65 | | } | 66 | | }; | 67 | | | 68 | 912 | let max_representable_in_first_byte = (1 << pk_len_first_byte_bits) - 1; | 69 | 912 | let first_byte = (first_byte_msb << pk_len_first_byte_bits) | 70 | 912 | | u8::try_from(cmp::min( | 71 | 912 | decoded.partial_key.len(), | 72 | 912 | max_representable_in_first_byte, | 73 | 912 | )) | 74 | 912 | .unwrap(); | 75 | 912 | before_storage_value.push(first_byte); | 76 | 912 | | 77 | 912 | // Note that if the partial key length is exactly equal to `pk_len_first_byte_bits`, we | 78 | 912 | // need to push a `0` afterwards in order to avoid an ambiguity. Similarly, if | 79 | 912 | // `remain_pk_len` is at any point equal to 255, we must push an additional `0` | 80 | 912 | // afterwards. | 81 | 912 | let mut remain_pk_len = decoded | 82 | 912 | .partial_key | 83 | 912 | .len() | 84 | 912 | .checked_sub(max_representable_in_first_byte); | 85 | 1.21k | while let Some(pk_len_inner304 ) = remain_pk_len { | 86 | 304 | before_storage_value.push(u8::try_from(cmp::min(pk_len_inner, 255)).unwrap()); | 87 | 304 | remain_pk_len = pk_len_inner.checked_sub(255); | 88 | 304 | } | 89 | | } | 90 | | | 91 | | // We then push the partial key. | 92 | 912 | before_storage_value.extend(nibble::nibbles_to_bytes_prefix_extend( | 93 | 912 | decoded.partial_key.clone(), | 94 | 912 | )); | 95 | 912 | | 96 | 912 | // After the partial key, the node value optionally contains a bitfield of child nodes. | 97 | 912 | if has_children { | 98 | 247 | before_storage_value.extend_from_slice(&decoded.children_bitmap().to_le_bytes()); | 99 | 665 | } | 100 | | | 101 | | // Then, the storage value. | 102 | 912 | let storage_value = match decoded.storage_value { | 103 | 0 | StorageValue::Hashed(hash) => &hash[..], | 104 | 247 | StorageValue::None => &[][..], | 105 | 665 | StorageValue::Unhashed(storage_value) => { | 106 | 665 | before_storage_value.extend_from_slice( | 107 | 665 | crate::util::encode_scale_compact_usize(storage_value.len()).as_ref(), | 108 | 665 | ); | 109 | 665 | storage_value | 110 | | } | 111 | | }; | 112 | | | 113 | | // Finally, the children node values. | 114 | 912 | let children_nodes = decoded | 115 | 912 | .children | 116 | 912 | .into_iter() | 117 | 912 | .flatten() | 118 | 912 | .flat_map(|child_value| { | 119 | | let size = crate::util::encode_scale_compact_usize(child_value.as_ref().len()); | 120 | | [either::Left(size), either::Right(child_value)].into_iter() | 121 | 912 | }); | 122 | 912 | | 123 | 912 | // The return value is the combination of these components. | 124 | 912 | Ok(iter::once(either::Left(before_storage_value)) | 125 | 912 | .chain(iter::once(either::Right(storage_value))) | 126 | 912 | .map(either::Left) | 127 | 912 | .chain(children_nodes.map(either::Right))) | 128 | 912 | } |
Unexecuted instantiation: _RINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6encodeINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtBX_5slice4iter4IterNtNtB4_6nibble6NibbleEERNtB2_17MerkleValueOutputECsibGXYHQB8Ea_25json_rpc_general_requests |
129 | | |
130 | | /// Error potentially returned by [`encode`]. |
131 | 0 | #[derive(Debug, derive_more::Display, Clone)] Unexecuted instantiation: _RNvXs8_NtNtCsN16ciHI6Qf_7smoldot4trie9trie_nodeNtB5_11EncodeErrorNtNtCsaYZPK01V26L_4core3fmt7Display3fmt Unexecuted instantiation: _RNvXs8_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeNtB5_11EncodeErrorNtNtCsaYZPK01V26L_4core3fmt7Display3fmt |
132 | | pub enum EncodeError { |
133 | | /// Nodes that have no children nor storage value are invalid unless they are the root node. |
134 | | PartialKeyButNoChildrenNoStorageValue, |
135 | | } |
136 | | |
137 | | /// Encodes the components of a node value into the node value itself. |
138 | | /// |
139 | | /// This is a convenient wrapper around [`encode`]. See the documentation of [`encode`] for more |
140 | | /// details. |
141 | 28.8k | pub fn encode_to_vec( |
142 | 28.8k | decoded: Decoded< |
143 | 28.8k | '_, |
144 | 28.8k | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, |
145 | 28.8k | impl AsRef<[u8]> + Clone, |
146 | 28.8k | >, |
147 | 28.8k | ) -> Result<Vec<u8>, EncodeError> { |
148 | 28.8k | let capacity = decoded.partial_key.len() / 2 |
149 | 28.8k | + match decoded.storage_value { |
150 | 0 | StorageValue::Hashed(_) => 32, |
151 | 5.58k | StorageValue::None => 0, |
152 | 23.2k | StorageValue::Unhashed(v) => v.len(), |
153 | | } |
154 | 28.8k | + 16 * 32 |
155 | | + 32; // The last `+ 32` is an arbitrary margin, for length prefixes and the header. |
156 | | |
157 | 112k | let result28.8k = encode(decoded)28.8k ?0 .fold(Vec::with_capacity(capacity), 28.8k |mut a, b| { |
158 | 112k | a.extend_from_slice(b.as_ref()); |
159 | 112k | a |
160 | 112k | }); _RNCINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node13encode_to_vecINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB6_6nibble6NibbleERShE0B8_ Line | Count | Source | 157 | 112k | let result = encode(decoded)?.fold(Vec::with_capacity(capacity), |mut a, b| { | 158 | 112k | a.extend_from_slice(b.as_ref()); | 159 | 112k | a | 160 | 112k | }); |
_RNCINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node13encode_to_vecNtB4_17DecodedPartialKeyRShE0B8_ Line | Count | Source | 157 | 6 | let result = encode(decoded)?.fold(Vec::with_capacity(capacity), |mut a, b| { | 158 | 6 | a.extend_from_slice(b.as_ref()); | 159 | 6 | a | 160 | 6 | }); |
Unexecuted instantiation: _RNCINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node13encode_to_vecppE0B8_ |
161 | 28.8k | |
162 | 28.8k | // Check that `capacity` was calculated correctly and that no re-allocation has happened. |
163 | 28.8k | debug_assert_eq!(result.capacity(), capacity); |
164 | | |
165 | 28.8k | Ok(result) |
166 | 28.8k | } _RINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node13encode_to_vecINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB4_6nibble6NibbleERShEB6_ Line | Count | Source | 141 | 28.8k | pub fn encode_to_vec( | 142 | 28.8k | decoded: Decoded< | 143 | 28.8k | '_, | 144 | 28.8k | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, | 145 | 28.8k | impl AsRef<[u8]> + Clone, | 146 | 28.8k | >, | 147 | 28.8k | ) -> Result<Vec<u8>, EncodeError> { | 148 | 28.8k | let capacity = decoded.partial_key.len() / 2 | 149 | 28.8k | + match decoded.storage_value { | 150 | 0 | StorageValue::Hashed(_) => 32, | 151 | 5.58k | StorageValue::None => 0, | 152 | 23.2k | StorageValue::Unhashed(v) => v.len(), | 153 | | } | 154 | 28.8k | + 16 * 32 | 155 | | + 32; // The last `+ 32` is an arbitrary margin, for length prefixes and the header. | 156 | | | 157 | 28.8k | let result = encode(decoded)?0 .fold(Vec::with_capacity(capacity), |mut a, b| { | 158 | | a.extend_from_slice(b.as_ref()); | 159 | | a | 160 | 28.8k | }); | 161 | 28.8k | | 162 | 28.8k | // Check that `capacity` was calculated correctly and that no re-allocation has happened. | 163 | 28.8k | debug_assert_eq!(result.capacity(), capacity); | 164 | | | 165 | 28.8k | Ok(result) | 166 | 28.8k | } |
_RINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node13encode_to_vecNtB2_17DecodedPartialKeyRShEB6_ Line | Count | Source | 141 | 1 | pub fn encode_to_vec( | 142 | 1 | decoded: Decoded< | 143 | 1 | '_, | 144 | 1 | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, | 145 | 1 | impl AsRef<[u8]> + Clone, | 146 | 1 | >, | 147 | 1 | ) -> Result<Vec<u8>, EncodeError> { | 148 | 1 | let capacity = decoded.partial_key.len() / 2 | 149 | 1 | + match decoded.storage_value { | 150 | 0 | StorageValue::Hashed(_) => 32, | 151 | 0 | StorageValue::None => 0, | 152 | 1 | StorageValue::Unhashed(v) => v.len(), | 153 | | } | 154 | 1 | + 16 * 32 | 155 | | + 32; // The last `+ 32` is an arbitrary margin, for length prefixes and the header. | 156 | | | 157 | 1 | let result = encode(decoded)?0 .fold(Vec::with_capacity(capacity), |mut a, b| { | 158 | | a.extend_from_slice(b.as_ref()); | 159 | | a | 160 | 1 | }); | 161 | 1 | | 162 | 1 | // Check that `capacity` was calculated correctly and that no re-allocation has happened. | 163 | 1 | debug_assert_eq!(result.capacity(), capacity); | 164 | | | 165 | 1 | Ok(result) | 166 | 1 | } |
Unexecuted instantiation: _RINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node13encode_to_vecppEB6_ |
167 | | |
168 | | /// Calculates the Merkle value of the given node. |
169 | | /// |
170 | | /// `is_root_node` must be `true` if the encoded node is the root node of the trie. |
171 | | /// |
172 | | /// This is similar to [`encode`], except that the encoding is then optionally hashed. |
173 | | /// |
174 | | /// Hashing is performed if the encoded value is 32 bytes or more, or if `is_root_node` is `true`. |
175 | | /// This is the reason why `is_root_node` must be provided. |
176 | 838k | pub fn calculate_merkle_value( |
177 | 838k | decoded: Decoded< |
178 | 838k | '_, |
179 | 838k | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, |
180 | 838k | impl AsRef<[u8]> + Clone, |
181 | 838k | >, |
182 | 838k | hash_function: HashFunction, |
183 | 838k | is_root_node: bool, |
184 | 838k | ) -> Result<MerkleValueOutput, EncodeError> { |
185 | | /// The Merkle value of a node is defined as either the hash of the node value, or the node value |
186 | | /// itself if it is shorted than 32 bytes (or if we are the root). |
187 | | /// |
188 | | /// This struct serves as a helper to handle these situations. Rather than putting intermediary |
189 | | /// values in buffers then hashing the node value as a whole, we push the elements of the node |
190 | | /// value to this struct which automatically switches to hashing if the value exceeds 32 bytes. |
191 | | enum HashOrInline { |
192 | | Inline(arrayvec::ArrayVec<u8, 31>), |
193 | | Blake2Hasher(blake2_rfc::blake2b::Blake2b), |
194 | | Keccak256Hasher(sha3::Keccak256), |
195 | | } |
196 | | |
197 | 838k | let mut merkle_value_sink = match (is_root_node, hash_function) { |
198 | | (true, HashFunction::Blake2) => { |
199 | 99.3k | HashOrInline::Blake2Hasher(blake2_rfc::blake2b::Blake2b::new(32)) |
200 | | } |
201 | | (true, HashFunction::Keccak256) => { |
202 | 3 | HashOrInline::Keccak256Hasher(<sha3::Keccak256 as sha3::Digest>::new()) |
203 | | } |
204 | 739k | (false, _) => HashOrInline::Inline(arrayvec::ArrayVec::new()), |
205 | | }; |
206 | | |
207 | 3.18M | for buffer in encode(decoded)838k ?0 { |
208 | 3.18M | let buffer = buffer.as_ref(); |
209 | 3.18M | match &mut merkle_value_sink { |
210 | 1.95M | HashOrInline::Inline(curr) => { |
211 | 1.95M | if curr.try_extend_from_slice(buffer).is_ok() { |
212 | 1.86M | continue; |
213 | 83.4k | } |
214 | 83.4k | |
215 | 83.4k | match hash_function { |
216 | 83.4k | HashFunction::Blake2 => { |
217 | 83.4k | let mut hasher = blake2_rfc::blake2b::Blake2b::new(32); |
218 | 83.4k | hasher.update(curr); |
219 | 83.4k | hasher.update(buffer); |
220 | 83.4k | merkle_value_sink = HashOrInline::Blake2Hasher(hasher); |
221 | 83.4k | } |
222 | 0 | HashFunction::Keccak256 => { |
223 | 0 | let mut hasher = <sha3::Keccak256 as sha3::Digest>::new(); |
224 | 0 | sha3::Digest::update(&mut hasher, curr); |
225 | 0 | sha3::Digest::update(&mut hasher, buffer); |
226 | 0 | merkle_value_sink = HashOrInline::Keccak256Hasher(hasher); |
227 | 0 | } |
228 | | } |
229 | | } |
230 | 1.23M | HashOrInline::Blake2Hasher(hasher) => { |
231 | 1.23M | hasher.update(buffer); |
232 | 1.23M | } |
233 | 12 | HashOrInline::Keccak256Hasher(hasher) => { |
234 | 12 | sha3::Digest::update(hasher, buffer); |
235 | 12 | } |
236 | | } |
237 | | } |
238 | | |
239 | | Ok(MerkleValueOutput { |
240 | 838k | inner: match merkle_value_sink { |
241 | 655k | HashOrInline::Inline(b) => MerkleValueOutputInner::Inline(b), |
242 | 182k | HashOrInline::Blake2Hasher(h) => MerkleValueOutputInner::Blake2Hasher(h.finalize()), |
243 | 3 | HashOrInline::Keccak256Hasher(h) => { |
244 | 3 | MerkleValueOutputInner::Keccak256Hasher(sha3::Digest::finalize(h).into()) |
245 | | } |
246 | | }, |
247 | | }) |
248 | 838k | } _RINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node22calculate_merkle_valueINtNtB4_6nibble14BytesToNibblesINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterhEERShEB6_ Line | Count | Source | 176 | 1 | pub fn calculate_merkle_value( | 177 | 1 | decoded: Decoded< | 178 | 1 | '_, | 179 | 1 | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, | 180 | 1 | impl AsRef<[u8]> + Clone, | 181 | 1 | >, | 182 | 1 | hash_function: HashFunction, | 183 | 1 | is_root_node: bool, | 184 | 1 | ) -> Result<MerkleValueOutput, EncodeError> { | 185 | | /// The Merkle value of a node is defined as either the hash of the node value, or the node value | 186 | | /// itself if it is shorted than 32 bytes (or if we are the root). | 187 | | /// | 188 | | /// This struct serves as a helper to handle these situations. Rather than putting intermediary | 189 | | /// values in buffers then hashing the node value as a whole, we push the elements of the node | 190 | | /// value to this struct which automatically switches to hashing if the value exceeds 32 bytes. | 191 | | enum HashOrInline { | 192 | | Inline(arrayvec::ArrayVec<u8, 31>), | 193 | | Blake2Hasher(blake2_rfc::blake2b::Blake2b), | 194 | | Keccak256Hasher(sha3::Keccak256), | 195 | | } | 196 | | | 197 | 1 | let mut merkle_value_sink = match (is_root_node, hash_function) { | 198 | | (true, HashFunction::Blake2) => { | 199 | 1 | HashOrInline::Blake2Hasher(blake2_rfc::blake2b::Blake2b::new(32)) | 200 | | } | 201 | | (true, HashFunction::Keccak256) => { | 202 | 0 | HashOrInline::Keccak256Hasher(<sha3::Keccak256 as sha3::Digest>::new()) | 203 | | } | 204 | 0 | (false, _) => HashOrInline::Inline(arrayvec::ArrayVec::new()), | 205 | | }; | 206 | | | 207 | 2 | for buffer in encode(decoded)1 ?0 { | 208 | 2 | let buffer = buffer.as_ref(); | 209 | 2 | match &mut merkle_value_sink { | 210 | 0 | HashOrInline::Inline(curr) => { | 211 | 0 | if curr.try_extend_from_slice(buffer).is_ok() { | 212 | 0 | continue; | 213 | 0 | } | 214 | 0 |
| 215 | 0 | match hash_function { | 216 | 0 | HashFunction::Blake2 => { | 217 | 0 | let mut hasher = blake2_rfc::blake2b::Blake2b::new(32); | 218 | 0 | hasher.update(curr); | 219 | 0 | hasher.update(buffer); | 220 | 0 | merkle_value_sink = HashOrInline::Blake2Hasher(hasher); | 221 | 0 | } | 222 | 0 | HashFunction::Keccak256 => { | 223 | 0 | let mut hasher = <sha3::Keccak256 as sha3::Digest>::new(); | 224 | 0 | sha3::Digest::update(&mut hasher, curr); | 225 | 0 | sha3::Digest::update(&mut hasher, buffer); | 226 | 0 | merkle_value_sink = HashOrInline::Keccak256Hasher(hasher); | 227 | 0 | } | 228 | | } | 229 | | } | 230 | 2 | HashOrInline::Blake2Hasher(hasher) => { | 231 | 2 | hasher.update(buffer); | 232 | 2 | } | 233 | 0 | HashOrInline::Keccak256Hasher(hasher) => { | 234 | 0 | sha3::Digest::update(hasher, buffer); | 235 | 0 | } | 236 | | } | 237 | | } | 238 | | | 239 | | Ok(MerkleValueOutput { | 240 | 1 | inner: match merkle_value_sink { | 241 | 0 | HashOrInline::Inline(b) => MerkleValueOutputInner::Inline(b), | 242 | 1 | HashOrInline::Blake2Hasher(h) => MerkleValueOutputInner::Blake2Hasher(h.finalize()), | 243 | 0 | HashOrInline::Keccak256Hasher(h) => { | 244 | 0 | MerkleValueOutputInner::Keccak256Hasher(sha3::Digest::finalize(h).into()) | 245 | | } | 246 | | }, | 247 | | }) | 248 | 1 | } |
_RINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node22calculate_merkle_valueINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB4_6nibble6NibbleENtB2_17MerkleValueOutputEB6_ Line | Count | Source | 176 | 470k | pub fn calculate_merkle_value( | 177 | 470k | decoded: Decoded< | 178 | 470k | '_, | 179 | 470k | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, | 180 | 470k | impl AsRef<[u8]> + Clone, | 181 | 470k | >, | 182 | 470k | hash_function: HashFunction, | 183 | 470k | is_root_node: bool, | 184 | 470k | ) -> Result<MerkleValueOutput, EncodeError> { | 185 | | /// The Merkle value of a node is defined as either the hash of the node value, or the node value | 186 | | /// itself if it is shorted than 32 bytes (or if we are the root). | 187 | | /// | 188 | | /// This struct serves as a helper to handle these situations. Rather than putting intermediary | 189 | | /// values in buffers then hashing the node value as a whole, we push the elements of the node | 190 | | /// value to this struct which automatically switches to hashing if the value exceeds 32 bytes. | 191 | | enum HashOrInline { | 192 | | Inline(arrayvec::ArrayVec<u8, 31>), | 193 | | Blake2Hasher(blake2_rfc::blake2b::Blake2b), | 194 | | Keccak256Hasher(sha3::Keccak256), | 195 | | } | 196 | | | 197 | 470k | let mut merkle_value_sink = match (is_root_node, hash_function) { | 198 | | (true, HashFunction::Blake2) => { | 199 | 66.5k | HashOrInline::Blake2Hasher(blake2_rfc::blake2b::Blake2b::new(32)) | 200 | | } | 201 | | (true, HashFunction::Keccak256) => { | 202 | 0 | HashOrInline::Keccak256Hasher(<sha3::Keccak256 as sha3::Digest>::new()) | 203 | | } | 204 | 403k | (false, _) => HashOrInline::Inline(arrayvec::ArrayVec::new()), | 205 | | }; | 206 | | | 207 | 1.74M | for buffer in encode(decoded)470k ?0 { | 208 | 1.74M | let buffer = buffer.as_ref(); | 209 | 1.74M | match &mut merkle_value_sink { | 210 | 1.04M | HashOrInline::Inline(curr) => { | 211 | 1.04M | if curr.try_extend_from_slice(buffer).is_ok() { | 212 | 1.00M | continue; | 213 | 41.6k | } | 214 | 41.6k | | 215 | 41.6k | match hash_function { | 216 | 41.6k | HashFunction::Blake2 => { | 217 | 41.6k | let mut hasher = blake2_rfc::blake2b::Blake2b::new(32); | 218 | 41.6k | hasher.update(curr); | 219 | 41.6k | hasher.update(buffer); | 220 | 41.6k | merkle_value_sink = HashOrInline::Blake2Hasher(hasher); | 221 | 41.6k | } | 222 | 0 | HashFunction::Keccak256 => { | 223 | 0 | let mut hasher = <sha3::Keccak256 as sha3::Digest>::new(); | 224 | 0 | sha3::Digest::update(&mut hasher, curr); | 225 | 0 | sha3::Digest::update(&mut hasher, buffer); | 226 | 0 | merkle_value_sink = HashOrInline::Keccak256Hasher(hasher); | 227 | 0 | } | 228 | | } | 229 | | } | 230 | 698k | HashOrInline::Blake2Hasher(hasher) => { | 231 | 698k | hasher.update(buffer); | 232 | 698k | } | 233 | 0 | HashOrInline::Keccak256Hasher(hasher) => { | 234 | 0 | sha3::Digest::update(hasher, buffer); | 235 | 0 | } | 236 | | } | 237 | | } | 238 | | | 239 | | Ok(MerkleValueOutput { | 240 | 470k | inner: match merkle_value_sink { | 241 | 362k | HashOrInline::Inline(b) => MerkleValueOutputInner::Inline(b), | 242 | 108k | HashOrInline::Blake2Hasher(h) => MerkleValueOutputInner::Blake2Hasher(h.finalize()), | 243 | 0 | HashOrInline::Keccak256Hasher(h) => { | 244 | 0 | MerkleValueOutputInner::Keccak256Hasher(sha3::Digest::finalize(h).into()) | 245 | | } | 246 | | }, | 247 | | }) | 248 | 470k | } |
_RINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node22calculate_merkle_valueINtNtNtNtCsaYZPK01V26L_4core4iter7sources5empty5EmptyNtNtB4_6nibble6NibbleERShEB6_ Line | Count | Source | 176 | 2 | pub fn calculate_merkle_value( | 177 | 2 | decoded: Decoded< | 178 | 2 | '_, | 179 | 2 | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, | 180 | 2 | impl AsRef<[u8]> + Clone, | 181 | 2 | >, | 182 | 2 | hash_function: HashFunction, | 183 | 2 | is_root_node: bool, | 184 | 2 | ) -> Result<MerkleValueOutput, EncodeError> { | 185 | | /// The Merkle value of a node is defined as either the hash of the node value, or the node value | 186 | | /// itself if it is shorted than 32 bytes (or if we are the root). | 187 | | /// | 188 | | /// This struct serves as a helper to handle these situations. Rather than putting intermediary | 189 | | /// values in buffers then hashing the node value as a whole, we push the elements of the node | 190 | | /// value to this struct which automatically switches to hashing if the value exceeds 32 bytes. | 191 | | enum HashOrInline { | 192 | | Inline(arrayvec::ArrayVec<u8, 31>), | 193 | | Blake2Hasher(blake2_rfc::blake2b::Blake2b), | 194 | | Keccak256Hasher(sha3::Keccak256), | 195 | | } | 196 | | | 197 | 2 | let mut merkle_value_sink = match (is_root_node, hash_function) { | 198 | | (true, HashFunction::Blake2) => { | 199 | 1 | HashOrInline::Blake2Hasher(blake2_rfc::blake2b::Blake2b::new(32)) | 200 | | } | 201 | | (true, HashFunction::Keccak256) => { | 202 | 1 | HashOrInline::Keccak256Hasher(<sha3::Keccak256 as sha3::Digest>::new()) | 203 | | } | 204 | 0 | (false, _) => HashOrInline::Inline(arrayvec::ArrayVec::new()), | 205 | | }; | 206 | | | 207 | 4 | for buffer in encode(decoded)2 ?0 { | 208 | 4 | let buffer = buffer.as_ref(); | 209 | 4 | match &mut merkle_value_sink { | 210 | 0 | HashOrInline::Inline(curr) => { | 211 | 0 | if curr.try_extend_from_slice(buffer).is_ok() { | 212 | 0 | continue; | 213 | 0 | } | 214 | 0 |
| 215 | 0 | match hash_function { | 216 | 0 | HashFunction::Blake2 => { | 217 | 0 | let mut hasher = blake2_rfc::blake2b::Blake2b::new(32); | 218 | 0 | hasher.update(curr); | 219 | 0 | hasher.update(buffer); | 220 | 0 | merkle_value_sink = HashOrInline::Blake2Hasher(hasher); | 221 | 0 | } | 222 | 0 | HashFunction::Keccak256 => { | 223 | 0 | let mut hasher = <sha3::Keccak256 as sha3::Digest>::new(); | 224 | 0 | sha3::Digest::update(&mut hasher, curr); | 225 | 0 | sha3::Digest::update(&mut hasher, buffer); | 226 | 0 | merkle_value_sink = HashOrInline::Keccak256Hasher(hasher); | 227 | 0 | } | 228 | | } | 229 | | } | 230 | 2 | HashOrInline::Blake2Hasher(hasher) => { | 231 | 2 | hasher.update(buffer); | 232 | 2 | } | 233 | 2 | HashOrInline::Keccak256Hasher(hasher) => { | 234 | 2 | sha3::Digest::update(hasher, buffer); | 235 | 2 | } | 236 | | } | 237 | | } | 238 | | | 239 | | Ok(MerkleValueOutput { | 240 | 2 | inner: match merkle_value_sink { | 241 | 0 | HashOrInline::Inline(b) => MerkleValueOutputInner::Inline(b), | 242 | 1 | HashOrInline::Blake2Hasher(h) => MerkleValueOutputInner::Blake2Hasher(h.finalize()), | 243 | 1 | HashOrInline::Keccak256Hasher(h) => { | 244 | 1 | MerkleValueOutputInner::Keccak256Hasher(sha3::Digest::finalize(h).into()) | 245 | | } | 246 | | }, | 247 | | }) | 248 | 2 | } |
_RINvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node22calculate_merkle_valueINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtB1d_5slice4iter4IterNtNtB4_6nibble6NibbleEERNtB2_17MerkleValueOutputEB6_ Line | Count | Source | 176 | 366k | pub fn calculate_merkle_value( | 177 | 366k | decoded: Decoded< | 178 | 366k | '_, | 179 | 366k | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, | 180 | 366k | impl AsRef<[u8]> + Clone, | 181 | 366k | >, | 182 | 366k | hash_function: HashFunction, | 183 | 366k | is_root_node: bool, | 184 | 366k | ) -> Result<MerkleValueOutput, EncodeError> { | 185 | | /// The Merkle value of a node is defined as either the hash of the node value, or the node value | 186 | | /// itself if it is shorted than 32 bytes (or if we are the root). | 187 | | /// | 188 | | /// This struct serves as a helper to handle these situations. Rather than putting intermediary | 189 | | /// values in buffers then hashing the node value as a whole, we push the elements of the node | 190 | | /// value to this struct which automatically switches to hashing if the value exceeds 32 bytes. | 191 | | enum HashOrInline { | 192 | | Inline(arrayvec::ArrayVec<u8, 31>), | 193 | | Blake2Hasher(blake2_rfc::blake2b::Blake2b), | 194 | | Keccak256Hasher(sha3::Keccak256), | 195 | | } | 196 | | | 197 | 366k | let mut merkle_value_sink = match (is_root_node, hash_function) { | 198 | | (true, HashFunction::Blake2) => { | 199 | 32.7k | HashOrInline::Blake2Hasher(blake2_rfc::blake2b::Blake2b::new(32)) | 200 | | } | 201 | | (true, HashFunction::Keccak256) => { | 202 | 2 | HashOrInline::Keccak256Hasher(<sha3::Keccak256 as sha3::Digest>::new()) | 203 | | } | 204 | 333k | (false, _) => HashOrInline::Inline(arrayvec::ArrayVec::new()), | 205 | | }; | 206 | | | 207 | 1.43M | for buffer in encode(decoded)366k ?0 { | 208 | 1.43M | let buffer = buffer.as_ref(); | 209 | 1.43M | match &mut merkle_value_sink { | 210 | 897k | HashOrInline::Inline(curr) => { | 211 | 897k | if curr.try_extend_from_slice(buffer).is_ok() { | 212 | 857k | continue; | 213 | 40.2k | } | 214 | 40.2k | | 215 | 40.2k | match hash_function { | 216 | 40.2k | HashFunction::Blake2 => { | 217 | 40.2k | let mut hasher = blake2_rfc::blake2b::Blake2b::new(32); | 218 | 40.2k | hasher.update(curr); | 219 | 40.2k | hasher.update(buffer); | 220 | 40.2k | merkle_value_sink = HashOrInline::Blake2Hasher(hasher); | 221 | 40.2k | } | 222 | 0 | HashFunction::Keccak256 => { | 223 | 0 | let mut hasher = <sha3::Keccak256 as sha3::Digest>::new(); | 224 | 0 | sha3::Digest::update(&mut hasher, curr); | 225 | 0 | sha3::Digest::update(&mut hasher, buffer); | 226 | 0 | merkle_value_sink = HashOrInline::Keccak256Hasher(hasher); | 227 | 0 | } | 228 | | } | 229 | | } | 230 | 535k | HashOrInline::Blake2Hasher(hasher) => { | 231 | 535k | hasher.update(buffer); | 232 | 535k | } | 233 | 10 | HashOrInline::Keccak256Hasher(hasher) => { | 234 | 10 | sha3::Digest::update(hasher, buffer); | 235 | 10 | } | 236 | | } | 237 | | } | 238 | | | 239 | | Ok(MerkleValueOutput { | 240 | 366k | inner: match merkle_value_sink { | 241 | 293k | HashOrInline::Inline(b) => MerkleValueOutputInner::Inline(b), | 242 | 73.0k | HashOrInline::Blake2Hasher(h) => MerkleValueOutputInner::Blake2Hasher(h.finalize()), | 243 | 2 | HashOrInline::Keccak256Hasher(h) => { | 244 | 2 | MerkleValueOutputInner::Keccak256Hasher(sha3::Digest::finalize(h).into()) | 245 | | } | 246 | | }, | 247 | | }) | 248 | 366k | } |
Unexecuted instantiation: _RINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node22calculate_merkle_valueINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtB1e_5slice4iter4IterNtNtB4_6nibble6NibbleEERNtB2_17MerkleValueOutputECsDDUKWWCHAU_18smoldot_light_wasm _RINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node22calculate_merkle_valueINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtB1e_5slice4iter4IterNtNtB4_6nibble6NibbleEERNtB2_17MerkleValueOutputEB6_ Line | Count | Source | 176 | 1.00k | pub fn calculate_merkle_value( | 177 | 1.00k | decoded: Decoded< | 178 | 1.00k | '_, | 179 | 1.00k | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, | 180 | 1.00k | impl AsRef<[u8]> + Clone, | 181 | 1.00k | >, | 182 | 1.00k | hash_function: HashFunction, | 183 | 1.00k | is_root_node: bool, | 184 | 1.00k | ) -> Result<MerkleValueOutput, EncodeError> { | 185 | | /// The Merkle value of a node is defined as either the hash of the node value, or the node value | 186 | | /// itself if it is shorted than 32 bytes (or if we are the root). | 187 | | /// | 188 | | /// This struct serves as a helper to handle these situations. Rather than putting intermediary | 189 | | /// values in buffers then hashing the node value as a whole, we push the elements of the node | 190 | | /// value to this struct which automatically switches to hashing if the value exceeds 32 bytes. | 191 | | enum HashOrInline { | 192 | | Inline(arrayvec::ArrayVec<u8, 31>), | 193 | | Blake2Hasher(blake2_rfc::blake2b::Blake2b), | 194 | | Keccak256Hasher(sha3::Keccak256), | 195 | | } | 196 | | | 197 | 1.00k | let mut merkle_value_sink = match (is_root_node, hash_function) { | 198 | | (true, HashFunction::Blake2) => { | 199 | 21 | HashOrInline::Blake2Hasher(blake2_rfc::blake2b::Blake2b::new(32)) | 200 | | } | 201 | | (true, HashFunction::Keccak256) => { | 202 | 0 | HashOrInline::Keccak256Hasher(<sha3::Keccak256 as sha3::Digest>::new()) | 203 | | } | 204 | 987 | (false, _) => HashOrInline::Inline(arrayvec::ArrayVec::new()), | 205 | | }; | 206 | | | 207 | 3.99k | for buffer in encode(decoded)1.00k ?0 { | 208 | 3.99k | let buffer = buffer.as_ref(); | 209 | 3.99k | match &mut merkle_value_sink { | 210 | 2.12k | HashOrInline::Inline(curr) => { | 211 | 2.12k | if curr.try_extend_from_slice(buffer).is_ok() { | 212 | 1.34k | continue; | 213 | 777 | } | 214 | 777 | | 215 | 777 | match hash_function { | 216 | 777 | HashFunction::Blake2 => { | 217 | 777 | let mut hasher = blake2_rfc::blake2b::Blake2b::new(32); | 218 | 777 | hasher.update(curr); | 219 | 777 | hasher.update(buffer); | 220 | 777 | merkle_value_sink = HashOrInline::Blake2Hasher(hasher); | 221 | 777 | } | 222 | 0 | HashFunction::Keccak256 => { | 223 | 0 | let mut hasher = <sha3::Keccak256 as sha3::Digest>::new(); | 224 | 0 | sha3::Digest::update(&mut hasher, curr); | 225 | 0 | sha3::Digest::update(&mut hasher, buffer); | 226 | 0 | merkle_value_sink = HashOrInline::Keccak256Hasher(hasher); | 227 | 0 | } | 228 | | } | 229 | | } | 230 | 1.86k | HashOrInline::Blake2Hasher(hasher) => { | 231 | 1.86k | hasher.update(buffer); | 232 | 1.86k | } | 233 | 0 | HashOrInline::Keccak256Hasher(hasher) => { | 234 | 0 | sha3::Digest::update(hasher, buffer); | 235 | 0 | } | 236 | | } | 237 | | } | 238 | | | 239 | | Ok(MerkleValueOutput { | 240 | 1.00k | inner: match merkle_value_sink { | 241 | 210 | HashOrInline::Inline(b) => MerkleValueOutputInner::Inline(b), | 242 | 798 | HashOrInline::Blake2Hasher(h) => MerkleValueOutputInner::Blake2Hasher(h.finalize()), | 243 | 0 | HashOrInline::Keccak256Hasher(h) => { | 244 | 0 | MerkleValueOutputInner::Keccak256Hasher(sha3::Digest::finalize(h).into()) | 245 | | } | 246 | | }, | 247 | | }) | 248 | 1.00k | } |
_RINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node22calculate_merkle_valueINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB4_6nibble6NibbleENtB2_17MerkleValueOutputECsiLzmwikkc22_14json_rpc_basic Line | Count | Source | 176 | 96 | pub fn calculate_merkle_value( | 177 | 96 | decoded: Decoded< | 178 | 96 | '_, | 179 | 96 | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, | 180 | 96 | impl AsRef<[u8]> + Clone, | 181 | 96 | >, | 182 | 96 | hash_function: HashFunction, | 183 | 96 | is_root_node: bool, | 184 | 96 | ) -> Result<MerkleValueOutput, EncodeError> { | 185 | | /// The Merkle value of a node is defined as either the hash of the node value, or the node value | 186 | | /// itself if it is shorted than 32 bytes (or if we are the root). | 187 | | /// | 188 | | /// This struct serves as a helper to handle these situations. Rather than putting intermediary | 189 | | /// values in buffers then hashing the node value as a whole, we push the elements of the node | 190 | | /// value to this struct which automatically switches to hashing if the value exceeds 32 bytes. | 191 | | enum HashOrInline { | 192 | | Inline(arrayvec::ArrayVec<u8, 31>), | 193 | | Blake2Hasher(blake2_rfc::blake2b::Blake2b), | 194 | | Keccak256Hasher(sha3::Keccak256), | 195 | | } | 196 | | | 197 | 96 | let mut merkle_value_sink = match (is_root_node, hash_function) { | 198 | | (true, HashFunction::Blake2) => { | 199 | 2 | HashOrInline::Blake2Hasher(blake2_rfc::blake2b::Blake2b::new(32)) | 200 | | } | 201 | | (true, HashFunction::Keccak256) => { | 202 | 0 | HashOrInline::Keccak256Hasher(<sha3::Keccak256 as sha3::Digest>::new()) | 203 | | } | 204 | 94 | (false, _) => HashOrInline::Inline(arrayvec::ArrayVec::new()), | 205 | | }; | 206 | | | 207 | 380 | for buffer in encode(decoded)96 ?0 { | 208 | 380 | let buffer = buffer.as_ref(); | 209 | 380 | match &mut merkle_value_sink { | 210 | 202 | HashOrInline::Inline(curr) => { | 211 | 202 | if curr.try_extend_from_slice(buffer).is_ok() { | 212 | 128 | continue; | 213 | 74 | } | 214 | 74 | | 215 | 74 | match hash_function { | 216 | 74 | HashFunction::Blake2 => { | 217 | 74 | let mut hasher = blake2_rfc::blake2b::Blake2b::new(32); | 218 | 74 | hasher.update(curr); | 219 | 74 | hasher.update(buffer); | 220 | 74 | merkle_value_sink = HashOrInline::Blake2Hasher(hasher); | 221 | 74 | } | 222 | 0 | HashFunction::Keccak256 => { | 223 | 0 | let mut hasher = <sha3::Keccak256 as sha3::Digest>::new(); | 224 | 0 | sha3::Digest::update(&mut hasher, curr); | 225 | 0 | sha3::Digest::update(&mut hasher, buffer); | 226 | 0 | merkle_value_sink = HashOrInline::Keccak256Hasher(hasher); | 227 | 0 | } | 228 | | } | 229 | | } | 230 | 178 | HashOrInline::Blake2Hasher(hasher) => { | 231 | 178 | hasher.update(buffer); | 232 | 178 | } | 233 | 0 | HashOrInline::Keccak256Hasher(hasher) => { | 234 | 0 | sha3::Digest::update(hasher, buffer); | 235 | 0 | } | 236 | | } | 237 | | } | 238 | | | 239 | | Ok(MerkleValueOutput { | 240 | 96 | inner: match merkle_value_sink { | 241 | 20 | HashOrInline::Inline(b) => MerkleValueOutputInner::Inline(b), | 242 | 76 | HashOrInline::Blake2Hasher(h) => MerkleValueOutputInner::Blake2Hasher(h.finalize()), | 243 | 0 | HashOrInline::Keccak256Hasher(h) => { | 244 | 0 | MerkleValueOutputInner::Keccak256Hasher(sha3::Digest::finalize(h).into()) | 245 | | } | 246 | | }, | 247 | | }) | 248 | 96 | } |
Unexecuted instantiation: _RINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node22calculate_merkle_valueINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtB1e_5slice4iter4IterNtNtB4_6nibble6NibbleEERNtB2_17MerkleValueOutputECsiLzmwikkc22_14json_rpc_basic Unexecuted instantiation: _RINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node22calculate_merkle_valueINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB4_6nibble6NibbleENtB2_17MerkleValueOutputECscDgN54JpMGG_6author Unexecuted instantiation: _RINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node22calculate_merkle_valueINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtB1e_5slice4iter4IterNtNtB4_6nibble6NibbleEERNtB2_17MerkleValueOutputECscDgN54JpMGG_6author _RINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node22calculate_merkle_valueINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB4_6nibble6NibbleENtB2_17MerkleValueOutputECsibGXYHQB8Ea_25json_rpc_general_requests Line | Count | Source | 176 | 912 | pub fn calculate_merkle_value( | 177 | 912 | decoded: Decoded< | 178 | 912 | '_, | 179 | 912 | impl ExactSizeIterator<Item = nibble::Nibble> + Clone, | 180 | 912 | impl AsRef<[u8]> + Clone, | 181 | 912 | >, | 182 | 912 | hash_function: HashFunction, | 183 | 912 | is_root_node: bool, | 184 | 912 | ) -> Result<MerkleValueOutput, EncodeError> { | 185 | | /// The Merkle value of a node is defined as either the hash of the node value, or the node value | 186 | | /// itself if it is shorted than 32 bytes (or if we are the root). | 187 | | /// | 188 | | /// This struct serves as a helper to handle these situations. Rather than putting intermediary | 189 | | /// values in buffers then hashing the node value as a whole, we push the elements of the node | 190 | | /// value to this struct which automatically switches to hashing if the value exceeds 32 bytes. | 191 | | enum HashOrInline { | 192 | | Inline(arrayvec::ArrayVec<u8, 31>), | 193 | | Blake2Hasher(blake2_rfc::blake2b::Blake2b), | 194 | | Keccak256Hasher(sha3::Keccak256), | 195 | | } | 196 | | | 197 | 912 | let mut merkle_value_sink = match (is_root_node, hash_function) { | 198 | | (true, HashFunction::Blake2) => { | 199 | 19 | HashOrInline::Blake2Hasher(blake2_rfc::blake2b::Blake2b::new(32)) | 200 | | } | 201 | | (true, HashFunction::Keccak256) => { | 202 | 0 | HashOrInline::Keccak256Hasher(<sha3::Keccak256 as sha3::Digest>::new()) | 203 | | } | 204 | 893 | (false, _) => HashOrInline::Inline(arrayvec::ArrayVec::new()), | 205 | | }; | 206 | | | 207 | 3.61k | for buffer in encode(decoded)912 ?0 { | 208 | 3.61k | let buffer = buffer.as_ref(); | 209 | 3.61k | match &mut merkle_value_sink { | 210 | 1.91k | HashOrInline::Inline(curr) => { | 211 | 1.91k | if curr.try_extend_from_slice(buffer).is_ok() { | 212 | 1.21k | continue; | 213 | 703 | } | 214 | 703 | | 215 | 703 | match hash_function { | 216 | 703 | HashFunction::Blake2 => { | 217 | 703 | let mut hasher = blake2_rfc::blake2b::Blake2b::new(32); | 218 | 703 | hasher.update(curr); | 219 | 703 | hasher.update(buffer); | 220 | 703 | merkle_value_sink = HashOrInline::Blake2Hasher(hasher); | 221 | 703 | } | 222 | 0 | HashFunction::Keccak256 => { | 223 | 0 | let mut hasher = <sha3::Keccak256 as sha3::Digest>::new(); | 224 | 0 | sha3::Digest::update(&mut hasher, curr); | 225 | 0 | sha3::Digest::update(&mut hasher, buffer); | 226 | 0 | merkle_value_sink = HashOrInline::Keccak256Hasher(hasher); | 227 | 0 | } | 228 | | } | 229 | | } | 230 | 1.69k | HashOrInline::Blake2Hasher(hasher) => { | 231 | 1.69k | hasher.update(buffer); | 232 | 1.69k | } | 233 | 0 | HashOrInline::Keccak256Hasher(hasher) => { | 234 | 0 | sha3::Digest::update(hasher, buffer); | 235 | 0 | } | 236 | | } | 237 | | } | 238 | | | 239 | | Ok(MerkleValueOutput { | 240 | 912 | inner: match merkle_value_sink { | 241 | 190 | HashOrInline::Inline(b) => MerkleValueOutputInner::Inline(b), | 242 | 722 | HashOrInline::Blake2Hasher(h) => MerkleValueOutputInner::Blake2Hasher(h.finalize()), | 243 | 0 | HashOrInline::Keccak256Hasher(h) => { | 244 | 0 | MerkleValueOutputInner::Keccak256Hasher(sha3::Digest::finalize(h).into()) | 245 | | } | 246 | | }, | 247 | | }) | 248 | 912 | } |
Unexecuted instantiation: _RINvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node22calculate_merkle_valueINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtB1e_5slice4iter4IterNtNtB4_6nibble6NibbleEERNtB2_17MerkleValueOutputECsibGXYHQB8Ea_25json_rpc_general_requests |
249 | | |
250 | | /// Output of the calculation. |
251 | | #[derive(Clone)] |
252 | | pub struct MerkleValueOutput { |
253 | | inner: MerkleValueOutputInner, |
254 | | } |
255 | | |
256 | | #[derive(Clone)] |
257 | | enum MerkleValueOutputInner { |
258 | | Inline(arrayvec::ArrayVec<u8, 31>), |
259 | | Blake2Hasher(blake2_rfc::blake2b::Blake2bResult), |
260 | | Keccak256Hasher([u8; 32]), |
261 | | Bytes(arrayvec::ArrayVec<u8, 32>), |
262 | | } |
263 | | |
264 | | impl MerkleValueOutput { |
265 | | /// Builds a [`MerkleValueOutput`] from a slice of bytes. |
266 | | /// |
267 | | /// # Panic |
268 | | /// |
269 | | /// Panics if `bytes.len() > 32`. |
270 | | /// |
271 | 16.2k | pub fn from_bytes(bytes: &[u8]) -> MerkleValueOutput { |
272 | 16.2k | assert!(bytes.len() <= 32); |
273 | 16.2k | MerkleValueOutput { |
274 | 16.2k | inner: MerkleValueOutputInner::Bytes({ |
275 | 16.2k | let mut v = arrayvec::ArrayVec::new(); |
276 | 16.2k | v.try_extend_from_slice(bytes).unwrap(); |
277 | 16.2k | v |
278 | 16.2k | }), |
279 | 16.2k | } |
280 | 16.2k | } _RNvMNtNtCsN16ciHI6Qf_7smoldot4trie9trie_nodeNtB2_17MerkleValueOutput10from_bytes Line | Count | Source | 271 | 16.2k | pub fn from_bytes(bytes: &[u8]) -> MerkleValueOutput { | 272 | 16.2k | assert!(bytes.len() <= 32); | 273 | 16.2k | MerkleValueOutput { | 274 | 16.2k | inner: MerkleValueOutputInner::Bytes({ | 275 | 16.2k | let mut v = arrayvec::ArrayVec::new(); | 276 | 16.2k | v.try_extend_from_slice(bytes).unwrap(); | 277 | 16.2k | v | 278 | 16.2k | }), | 279 | 16.2k | } | 280 | 16.2k | } |
Unexecuted instantiation: _RNvMNtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeNtB2_17MerkleValueOutput10from_bytes |
281 | | } |
282 | | |
283 | | impl AsRef<[u8]> for MerkleValueOutput { |
284 | 2.59M | fn as_ref(&self) -> &[u8] { |
285 | 2.59M | match &self.inner { |
286 | 1.99M | MerkleValueOutputInner::Inline(a) => a.as_slice(), |
287 | 566k | MerkleValueOutputInner::Blake2Hasher(a) => a.as_bytes(), |
288 | 3 | MerkleValueOutputInner::Keccak256Hasher(a) => &a[..], |
289 | 32.4k | MerkleValueOutputInner::Bytes(a) => a.as_slice(), |
290 | | } |
291 | 2.59M | } _RNvXs_NtNtCsN16ciHI6Qf_7smoldot4trie9trie_nodeNtB4_17MerkleValueOutputINtNtCsaYZPK01V26L_4core7convert5AsRefShE6as_ref Line | Count | Source | 284 | 2.58M | fn as_ref(&self) -> &[u8] { | 285 | 2.58M | match &self.inner { | 286 | 1.99M | MerkleValueOutputInner::Inline(a) => a.as_slice(), | 287 | 561k | MerkleValueOutputInner::Blake2Hasher(a) => a.as_bytes(), | 288 | 3 | MerkleValueOutputInner::Keccak256Hasher(a) => &a[..], | 289 | 32.4k | MerkleValueOutputInner::Bytes(a) => a.as_slice(), | 290 | | } | 291 | 2.58M | } |
_RNvXs_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeNtB4_17MerkleValueOutputINtNtCsaYZPK01V26L_4core7convert5AsRefShE6as_ref Line | Count | Source | 284 | 5.96k | fn as_ref(&self) -> &[u8] { | 285 | 5.96k | match &self.inner { | 286 | 1.26k | MerkleValueOutputInner::Inline(a) => a.as_slice(), | 287 | 4.70k | MerkleValueOutputInner::Blake2Hasher(a) => a.as_bytes(), | 288 | 0 | MerkleValueOutputInner::Keccak256Hasher(a) => &a[..], | 289 | 0 | MerkleValueOutputInner::Bytes(a) => a.as_slice(), | 290 | | } | 291 | 5.96k | } |
|
292 | | } |
293 | | |
294 | | impl TryFrom<MerkleValueOutput> for [u8; 32] { |
295 | | type Error = (); // TODO: proper error? |
296 | | |
297 | 32.7k | fn try_from(output: MerkleValueOutput) -> Result<Self, Self::Error> { |
298 | 32.7k | if output.as_ref().len() == 32 { |
299 | 32.7k | let mut out = [0; 32]; |
300 | 32.7k | out.copy_from_slice(output.as_ref()); |
301 | 32.7k | Ok(out) |
302 | | } else { |
303 | 0 | Err(()) |
304 | | } |
305 | 32.7k | } _RNvXs0_NtNtCsN16ciHI6Qf_7smoldot4trie9trie_nodeAhj20_INtNtCsaYZPK01V26L_4core7convert7TryFromNtB5_17MerkleValueOutputE8try_from Line | Count | Source | 297 | 32.7k | fn try_from(output: MerkleValueOutput) -> Result<Self, Self::Error> { | 298 | 32.7k | if output.as_ref().len() == 32 { | 299 | 32.7k | let mut out = [0; 32]; | 300 | 32.7k | out.copy_from_slice(output.as_ref()); | 301 | 32.7k | Ok(out) | 302 | | } else { | 303 | 0 | Err(()) | 304 | | } | 305 | 32.7k | } |
Unexecuted instantiation: _RNvXs0_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeAhj20_INtNtCsaYZPK01V26L_4core7convert7TryFromNtB5_17MerkleValueOutputE8try_from |
306 | | } |
307 | | |
308 | | impl fmt::Debug for MerkleValueOutput { |
309 | 0 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
310 | 0 | fmt::Debug::fmt(self.as_ref(), f) |
311 | 0 | } Unexecuted instantiation: _RNvXs1_NtNtCsN16ciHI6Qf_7smoldot4trie9trie_nodeNtB5_17MerkleValueOutputNtNtCsaYZPK01V26L_4core3fmt5Debug3fmt Unexecuted instantiation: _RNvXs1_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeNtB5_17MerkleValueOutputNtNtCsaYZPK01V26L_4core3fmt5Debug3fmt |
312 | | } |
313 | | |
314 | | /// Decodes a node value found in a proof into its components. |
315 | | /// |
316 | | /// This can decode nodes no matter their version or hash algorithm. |
317 | 154k | pub fn decode(mut node_value: &'_ [u8]) -> Result<Decoded<DecodedPartialKey<'_>, &'_ [u8]>, Error> { |
318 | 154k | if node_value.is_empty() { |
319 | 1 | return Err(Error::Empty); |
320 | 154k | } |
321 | | |
322 | | // See https://spec.polkadot.network/#defn-node-header |
323 | 154k | let (has_children, storage_value_hashed, pk_len_first_byte_bits154k ) = match node_value[0] >> 6 { |
324 | | 0b00 => { |
325 | 88 | if (node_value[0] >> 5) == 0b001 { |
326 | 82 | (false, Some(true), 5) |
327 | 6 | } else if (node_value[0] >> 4) == 0b0001 { |
328 | 1 | (true, Some(true), 4) |
329 | 5 | } else if node_value[0] == 0 { |
330 | 3 | (false, None, 6) |
331 | | } else { |
332 | 2 | return Err(Error::InvalidHeaderBits); |
333 | | } |
334 | | } |
335 | 65.2k | 0b10 => (true, None, 6), |
336 | 81.6k | 0b01 => (false, Some(false), 6), |
337 | 7.53k | 0b11 => (true, Some(false), 6), |
338 | 0 | _ => unreachable!(), |
339 | | }; |
340 | | |
341 | | // Length of the partial key, in nibbles. |
342 | 154k | let pk_len = { |
343 | 154k | let mut accumulator = usize::from(node_value[0] & ((1 << pk_len_first_byte_bits) - 1)); |
344 | 154k | node_value = &node_value[1..]; |
345 | 154k | let mut continue_iter = accumulator == ((1 << pk_len_first_byte_bits) - 1); |
346 | 157k | while continue_iter { |
347 | 2.77k | if node_value.is_empty() { |
348 | 0 | return Err(Error::PartialKeyLenTooShort); |
349 | 2.77k | } |
350 | 2.77k | continue_iter = node_value[0] == 255; |
351 | 2.77k | accumulator = accumulator |
352 | 2.77k | .checked_add(usize::from(node_value[0])) |
353 | 2.77k | .ok_or(Error::PartialKeyLenOverflow)?0 ; |
354 | 2.77k | node_value = &node_value[1..]; |
355 | | } |
356 | 154k | accumulator |
357 | 154k | }; |
358 | 154k | |
359 | 154k | // No children and no storage value can only indicate the root of an empty trie, in which case |
360 | 154k | // a non-empty partial key is invalid. |
361 | 154k | if pk_len != 0 && !has_children86.6k && storage_value_hashed.is_none()75.8k { |
362 | 0 | return Err(Error::EmptyTrieWithPartialKey); |
363 | 154k | } |
364 | | |
365 | | // Iterator to the partial key found in the node value of `proof_iter`. |
366 | 154k | let partial_key = { |
367 | | // Length of the partial key, in bytes. |
368 | 154k | let pk_len_bytes = if pk_len == 0 { |
369 | 67.9k | 0 |
370 | | } else { |
371 | 86.6k | 1 + ((pk_len - 1) / 2) |
372 | | }; |
373 | 154k | if node_value.len() < pk_len_bytes { |
374 | 1 | return Err(Error::PartialKeyTooShort); |
375 | 154k | } |
376 | 154k | |
377 | 154k | let pk = &node_value[..pk_len_bytes]; |
378 | 154k | node_value = &node_value[pk_len_bytes..]; |
379 | 154k | |
380 | 154k | if (pk_len % 2) == 1 && (pk[0] & 0xf0) != 052.2k { |
381 | 0 | return Err(Error::InvalidPartialKeyPadding); |
382 | 154k | } |
383 | 154k | |
384 | 154k | pk |
385 | | }; |
386 | | |
387 | | // After the partial key, the node value optionally contains a bitfield of child nodes. |
388 | 154k | let children_bitmap154k = if has_children { |
389 | 72.7k | if node_value.len() < 2 { |
390 | 0 | return Err(Error::ChildrenBitmapTooShort); |
391 | 72.7k | } |
392 | 72.7k | let val = u16::from_le_bytes(<[u8; 2]>::try_from(&node_value[..2]).unwrap()); |
393 | 72.7k | if val == 0 { |
394 | 1 | return Err(Error::ZeroChildrenBitmap); |
395 | 72.7k | } |
396 | 72.7k | node_value = &node_value[2..]; |
397 | 72.7k | val |
398 | | } else { |
399 | 81.7k | 0 |
400 | | }; |
401 | | |
402 | | // Now at the value that interests us. |
403 | 154k | let storage_value154k = match storage_value_hashed { |
404 | | Some(false) => { |
405 | 89.2k | let (node_value_update, len89.2k ) = crate::util::nom_scale_compact_usize(node_value) |
406 | 89.2k | .map_err(|_: nom::Err<nom::error::Error<&[u8]>>| Error::StorageValueLenDecode12 )?12 ; _RNCNvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node6decode0B7_ Line | Count | Source | 406 | 12 | .map_err(|_: nom::Err<nom::error::Error<&[u8]>>| Error::StorageValueLenDecode)?; |
Unexecuted instantiation: _RNCNvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6decode0B7_ |
407 | 89.2k | node_value = node_value_update; |
408 | 89.2k | if node_value.len() < len { |
409 | 32 | return Err(Error::StorageValueTooShort); |
410 | 89.1k | } |
411 | 89.1k | let storage_value = &node_value[..len]; |
412 | 89.1k | node_value = &node_value[len..]; |
413 | 89.1k | StorageValue::Unhashed(storage_value) |
414 | | } |
415 | | Some(true) => { |
416 | 82 | if node_value.len() < 32 { |
417 | 0 | return Err(Error::StorageValueTooShort); |
418 | 82 | } |
419 | 82 | let storage_value_hash = <&[u8; 32]>::try_from(&node_value[..32]).unwrap(); |
420 | 82 | node_value = &node_value[32..]; |
421 | 82 | StorageValue::Hashed(storage_value_hash) |
422 | | } |
423 | 65.2k | None => StorageValue::None, |
424 | | }; |
425 | | |
426 | 154k | let mut children = [None; 16]; |
427 | 2.47M | for (n, child) in children.iter_mut().enumerate()154k { |
428 | 2.47M | if children_bitmap & (1 << n) == 0 { |
429 | 1.88M | continue; |
430 | 583k | } |
431 | | |
432 | | // Find the Merkle value of that child in `node_value`. |
433 | 583k | let (node_value_update, len583k ) = crate::util::nom_scale_compact_usize(node_value) |
434 | 583k | .map_err(|_: nom::Err<nom::error::Error<&[u8]>>| Error::ChildLenDecode5 )?5 ; _RNCNvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node6decodes_0B7_ Line | Count | Source | 434 | 5 | .map_err(|_: nom::Err<nom::error::Error<&[u8]>>| Error::ChildLenDecode)?; |
Unexecuted instantiation: _RNCNvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6decodes_0B7_ |
435 | 583k | if len > 32 { |
436 | 2 | return Err(Error::ChildTooLarge); |
437 | 583k | } |
438 | 583k | node_value = node_value_update; |
439 | 583k | if node_value.len() < len { |
440 | 2 | return Err(Error::ChildrenTooShort); |
441 | 583k | } |
442 | 583k | |
443 | 583k | *child = Some(&node_value[..len]); |
444 | 583k | node_value = &node_value[len..]; |
445 | | } |
446 | | |
447 | 154k | if !node_value.is_empty() { |
448 | 3 | return Err(Error::TooLong); |
449 | 154k | } |
450 | 154k | |
451 | 154k | Ok(Decoded { |
452 | 154k | partial_key: if (pk_len % 2) == 1 { |
453 | 52.2k | DecodedPartialKey::from_bytes_skip_first(partial_key) |
454 | | } else { |
455 | 102k | DecodedPartialKey::from_bytes(partial_key) |
456 | | }, |
457 | 154k | children, |
458 | 154k | storage_value, |
459 | | }) |
460 | 154k | } _RNvNtNtCsN16ciHI6Qf_7smoldot4trie9trie_node6decode Line | Count | Source | 317 | 154k | pub fn decode(mut node_value: &'_ [u8]) -> Result<Decoded<DecodedPartialKey<'_>, &'_ [u8]>, Error> { | 318 | 154k | if node_value.is_empty() { | 319 | 1 | return Err(Error::Empty); | 320 | 154k | } | 321 | | | 322 | | // See https://spec.polkadot.network/#defn-node-header | 323 | 154k | let (has_children, storage_value_hashed, pk_len_first_byte_bits154k ) = match node_value[0] >> 6 { | 324 | | 0b00 => { | 325 | 88 | if (node_value[0] >> 5) == 0b001 { | 326 | 82 | (false, Some(true), 5) | 327 | 6 | } else if (node_value[0] >> 4) == 0b0001 { | 328 | 1 | (true, Some(true), 4) | 329 | 5 | } else if node_value[0] == 0 { | 330 | 3 | (false, None, 6) | 331 | | } else { | 332 | 2 | return Err(Error::InvalidHeaderBits); | 333 | | } | 334 | | } | 335 | 65.2k | 0b10 => (true, None, 6), | 336 | 81.6k | 0b01 => (false, Some(false), 6), | 337 | 7.53k | 0b11 => (true, Some(false), 6), | 338 | 0 | _ => unreachable!(), | 339 | | }; | 340 | | | 341 | | // Length of the partial key, in nibbles. | 342 | 154k | let pk_len = { | 343 | 154k | let mut accumulator = usize::from(node_value[0] & ((1 << pk_len_first_byte_bits) - 1)); | 344 | 154k | node_value = &node_value[1..]; | 345 | 154k | let mut continue_iter = accumulator == ((1 << pk_len_first_byte_bits) - 1); | 346 | 157k | while continue_iter { | 347 | 2.77k | if node_value.is_empty() { | 348 | 0 | return Err(Error::PartialKeyLenTooShort); | 349 | 2.77k | } | 350 | 2.77k | continue_iter = node_value[0] == 255; | 351 | 2.77k | accumulator = accumulator | 352 | 2.77k | .checked_add(usize::from(node_value[0])) | 353 | 2.77k | .ok_or(Error::PartialKeyLenOverflow)?0 ; | 354 | 2.77k | node_value = &node_value[1..]; | 355 | | } | 356 | 154k | accumulator | 357 | 154k | }; | 358 | 154k | | 359 | 154k | // No children and no storage value can only indicate the root of an empty trie, in which case | 360 | 154k | // a non-empty partial key is invalid. | 361 | 154k | if pk_len != 0 && !has_children86.6k && storage_value_hashed.is_none()75.8k { | 362 | 0 | return Err(Error::EmptyTrieWithPartialKey); | 363 | 154k | } | 364 | | | 365 | | // Iterator to the partial key found in the node value of `proof_iter`. | 366 | 154k | let partial_key = { | 367 | | // Length of the partial key, in bytes. | 368 | 154k | let pk_len_bytes = if pk_len == 0 { | 369 | 67.9k | 0 | 370 | | } else { | 371 | 86.6k | 1 + ((pk_len - 1) / 2) | 372 | | }; | 373 | 154k | if node_value.len() < pk_len_bytes { | 374 | 1 | return Err(Error::PartialKeyTooShort); | 375 | 154k | } | 376 | 154k | | 377 | 154k | let pk = &node_value[..pk_len_bytes]; | 378 | 154k | node_value = &node_value[pk_len_bytes..]; | 379 | 154k | | 380 | 154k | if (pk_len % 2) == 1 && (pk[0] & 0xf0) != 052.2k { | 381 | 0 | return Err(Error::InvalidPartialKeyPadding); | 382 | 154k | } | 383 | 154k | | 384 | 154k | pk | 385 | | }; | 386 | | | 387 | | // After the partial key, the node value optionally contains a bitfield of child nodes. | 388 | 154k | let children_bitmap154k = if has_children { | 389 | 72.7k | if node_value.len() < 2 { | 390 | 0 | return Err(Error::ChildrenBitmapTooShort); | 391 | 72.7k | } | 392 | 72.7k | let val = u16::from_le_bytes(<[u8; 2]>::try_from(&node_value[..2]).unwrap()); | 393 | 72.7k | if val == 0 { | 394 | 1 | return Err(Error::ZeroChildrenBitmap); | 395 | 72.7k | } | 396 | 72.7k | node_value = &node_value[2..]; | 397 | 72.7k | val | 398 | | } else { | 399 | 81.7k | 0 | 400 | | }; | 401 | | | 402 | | // Now at the value that interests us. | 403 | 154k | let storage_value154k = match storage_value_hashed { | 404 | | Some(false) => { | 405 | 89.2k | let (node_value_update, len89.2k ) = crate::util::nom_scale_compact_usize(node_value) | 406 | 89.2k | .map_err(|_: nom::Err<nom::error::Error<&[u8]>>| Error::StorageValueLenDecode)?12 ; | 407 | 89.2k | node_value = node_value_update; | 408 | 89.2k | if node_value.len() < len { | 409 | 32 | return Err(Error::StorageValueTooShort); | 410 | 89.1k | } | 411 | 89.1k | let storage_value = &node_value[..len]; | 412 | 89.1k | node_value = &node_value[len..]; | 413 | 89.1k | StorageValue::Unhashed(storage_value) | 414 | | } | 415 | | Some(true) => { | 416 | 82 | if node_value.len() < 32 { | 417 | 0 | return Err(Error::StorageValueTooShort); | 418 | 82 | } | 419 | 82 | let storage_value_hash = <&[u8; 32]>::try_from(&node_value[..32]).unwrap(); | 420 | 82 | node_value = &node_value[32..]; | 421 | 82 | StorageValue::Hashed(storage_value_hash) | 422 | | } | 423 | 65.2k | None => StorageValue::None, | 424 | | }; | 425 | | | 426 | 154k | let mut children = [None; 16]; | 427 | 2.47M | for (n, child) in children.iter_mut().enumerate()154k { | 428 | 2.47M | if children_bitmap & (1 << n) == 0 { | 429 | 1.88M | continue; | 430 | 583k | } | 431 | | | 432 | | // Find the Merkle value of that child in `node_value`. | 433 | 583k | let (node_value_update, len583k ) = crate::util::nom_scale_compact_usize(node_value) | 434 | 583k | .map_err(|_: nom::Err<nom::error::Error<&[u8]>>| Error::ChildLenDecode)?5 ; | 435 | 583k | if len > 32 { | 436 | 2 | return Err(Error::ChildTooLarge); | 437 | 583k | } | 438 | 583k | node_value = node_value_update; | 439 | 583k | if node_value.len() < len { | 440 | 2 | return Err(Error::ChildrenTooShort); | 441 | 583k | } | 442 | 583k | | 443 | 583k | *child = Some(&node_value[..len]); | 444 | 583k | node_value = &node_value[len..]; | 445 | | } | 446 | | | 447 | 154k | if !node_value.is_empty() { | 448 | 3 | return Err(Error::TooLong); | 449 | 154k | } | 450 | 154k | | 451 | 154k | Ok(Decoded { | 452 | 154k | partial_key: if (pk_len % 2) == 1 { | 453 | 52.2k | DecodedPartialKey::from_bytes_skip_first(partial_key) | 454 | | } else { | 455 | 102k | DecodedPartialKey::from_bytes(partial_key) | 456 | | }, | 457 | 154k | children, | 458 | 154k | storage_value, | 459 | | }) | 460 | 154k | } |
Unexecuted instantiation: _RNvNtNtCseuYC0Zibziv_7smoldot4trie9trie_node6decode |
461 | | |
462 | | /// Decoded node value. Returned by [`decode`] or passed as parameter to [`encode`]. |
463 | | #[derive(Debug, Clone)] |
464 | | pub struct Decoded<'a, I, C> { |
465 | | /// Iterator to the nibbles of the partial key of the node. |
466 | | pub partial_key: I, |
467 | | |
468 | | /// All 16 possible children. `Some` if a child is present, and `None` otherwise. The `&[u8]` |
469 | | /// can be: |
470 | | /// |
471 | | /// - Of length 32, in which case the slice is the hash of the node value of the child (also |
472 | | /// known as the Merkle value). |
473 | | /// - Empty when decoding a compact trie proof. |
474 | | /// - Of length inferior to 32, in which case the slice is directly the node value. |
475 | | /// |
476 | | pub children: [Option<C>; 16], |
477 | | |
478 | | /// Storage value of this node. |
479 | | pub storage_value: StorageValue<'a>, |
480 | | } |
481 | | |
482 | | impl<'a, I, C> Decoded<'a, I, C> { |
483 | | /// Returns a bits map of the children that are present, as found in the node value. |
484 | 257k | pub fn children_bitmap(&self) -> u16 { |
485 | 257k | let mut out = 0u16; |
486 | 4.36M | for n4.11M in 0..16 { |
487 | 4.11M | if self.children[n].is_none() { |
488 | 3.27M | continue; |
489 | 842k | } |
490 | 842k | out |= 1 << n; |
491 | | } |
492 | 257k | out |
493 | 257k | } Unexecuted instantiation: _RNvMs2_NtNtCsN16ciHI6Qf_7smoldot4trie9trie_nodeINtB5_7DecodedINtNtB7_6nibble14BytesToNibblesINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterhEERShE15children_bitmapB9_ _RNvMs2_NtNtCsN16ciHI6Qf_7smoldot4trie9trie_nodeINtB5_7DecodedINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB7_6nibble6NibbleENtB5_17MerkleValueOutputE15children_bitmapB9_ Line | Count | Source | 484 | 118k | pub fn children_bitmap(&self) -> u16 { | 485 | 118k | let mut out = 0u16; | 486 | 2.02M | for n1.90M in 0..16 { | 487 | 1.90M | if self.children[n].is_none() { | 488 | 1.49M | continue; | 489 | 403k | } | 490 | 403k | out |= 1 << n; | 491 | | } | 492 | 118k | out | 493 | 118k | } |
_RNvMs2_NtNtCsN16ciHI6Qf_7smoldot4trie9trie_nodeINtB5_7DecodedINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB7_6nibble6NibbleERShE15children_bitmapB9_ Line | Count | Source | 484 | 7.76k | pub fn children_bitmap(&self) -> u16 { | 485 | 7.76k | let mut out = 0u16; | 486 | 132k | for n124k in 0..16 { | 487 | 124k | if self.children[n].is_none() { | 488 | 96.9k | continue; | 489 | 27.3k | } | 490 | 27.3k | out |= 1 << n; | 491 | | } | 492 | 7.76k | out | 493 | 7.76k | } |
Unexecuted instantiation: _RNvMs2_NtNtCsN16ciHI6Qf_7smoldot4trie9trie_nodeINtB5_7DecodedINtNtNtNtCsaYZPK01V26L_4core4iter7sources4once4OnceNtNtB7_6nibble6NibbleERShE15children_bitmapB9_ Unexecuted instantiation: _RNvMs2_NtNtCsN16ciHI6Qf_7smoldot4trie9trie_nodeINtB5_7DecodedINtNtNtNtCsaYZPK01V26L_4core4iter7sources5empty5EmptyNtNtB7_6nibble6NibbleERShE15children_bitmapB9_ _RNvMs2_NtNtCsN16ciHI6Qf_7smoldot4trie9trie_nodeINtB5_7DecodedINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtB16_5slice4iter4IterNtNtB7_6nibble6NibbleEERNtB5_17MerkleValueOutputE15children_bitmapB9_ Line | Count | Source | 484 | 91.1k | pub fn children_bitmap(&self) -> u16 { | 485 | 91.1k | let mut out = 0u16; | 486 | 1.54M | for n1.45M in 0..16 { | 487 | 1.45M | if self.children[n].is_none() { | 488 | 1.10M | continue; | 489 | 349k | } | 490 | 349k | out |= 1 << n; | 491 | | } | 492 | 91.1k | out | 493 | 91.1k | } |
_RNvMs2_NtNtCsN16ciHI6Qf_7smoldot4trie9trie_nodeINtB5_7DecodedNtB5_17DecodedPartialKeyRShE15children_bitmapB9_ Line | Count | Source | 484 | 38.7k | pub fn children_bitmap(&self) -> u16 { | 485 | 38.7k | let mut out = 0u16; | 486 | 658k | for n619k in 0..16 { | 487 | 619k | if self.children[n].is_none() { | 488 | 559k | continue; | 489 | 59.4k | } | 490 | 59.4k | out |= 1 << n; | 491 | | } | 492 | 38.7k | out | 493 | 38.7k | } |
Unexecuted instantiation: _RNvMs2_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeINtB5_7DecodedINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtB17_5slice4iter4IterNtNtB7_6nibble6NibbleEERNtB5_17MerkleValueOutputE15children_bitmapCsDDUKWWCHAU_18smoldot_light_wasm Unexecuted instantiation: _RNvMs2_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeINtB5_7DecodedNtB5_17DecodedPartialKeyRShE15children_bitmapCsDDUKWWCHAU_18smoldot_light_wasm _RNvMs2_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeINtB5_7DecodedINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtB17_5slice4iter4IterNtNtB7_6nibble6NibbleEERNtB5_17MerkleValueOutputE15children_bitmapB9_ Line | Count | Source | 484 | 273 | pub fn children_bitmap(&self) -> u16 { | 485 | 273 | let mut out = 0u16; | 486 | 4.64k | for n4.36k in 0..16 { | 487 | 4.36k | if self.children[n].is_none() { | 488 | 3.38k | continue; | 489 | 987 | } | 490 | 987 | out |= 1 << n; | 491 | | } | 492 | 273 | out | 493 | 273 | } |
Unexecuted instantiation: _RNvMs2_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeINtB5_7DecodedNtB5_17DecodedPartialKeyRShE15children_bitmapB9_ _RNvMs2_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeINtB5_7DecodedINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB7_6nibble6NibbleENtB5_17MerkleValueOutputE15children_bitmapCsiLzmwikkc22_14json_rpc_basic Line | Count | Source | 484 | 26 | pub fn children_bitmap(&self) -> u16 { | 485 | 26 | let mut out = 0u16; | 486 | 442 | for n416 in 0..16 { | 487 | 416 | if self.children[n].is_none() { | 488 | 322 | continue; | 489 | 94 | } | 490 | 94 | out |= 1 << n; | 491 | | } | 492 | 26 | out | 493 | 26 | } |
Unexecuted instantiation: _RNvMs2_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeINtB5_7DecodedINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtB17_5slice4iter4IterNtNtB7_6nibble6NibbleEERNtB5_17MerkleValueOutputE15children_bitmapCsiLzmwikkc22_14json_rpc_basic Unexecuted instantiation: _RNvMs2_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeINtB5_7DecodedNtB5_17DecodedPartialKeyRShE15children_bitmapCsiLzmwikkc22_14json_rpc_basic Unexecuted instantiation: _RNvMs2_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeINtB5_7DecodedINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB7_6nibble6NibbleENtB5_17MerkleValueOutputE15children_bitmapCscDgN54JpMGG_6author Unexecuted instantiation: _RNvMs2_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeINtB5_7DecodedINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtB17_5slice4iter4IterNtNtB7_6nibble6NibbleEERNtB5_17MerkleValueOutputE15children_bitmapCscDgN54JpMGG_6author Unexecuted instantiation: _RNvMs2_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeINtB5_7DecodedNtB5_17DecodedPartialKeyRShE15children_bitmapCscDgN54JpMGG_6author _RNvMs2_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeINtB5_7DecodedINtNtNtCsdZExvAaxgia_5alloc3vec9into_iter8IntoIterNtNtB7_6nibble6NibbleENtB5_17MerkleValueOutputE15children_bitmapCsibGXYHQB8Ea_25json_rpc_general_requests Line | Count | Source | 484 | 247 | pub fn children_bitmap(&self) -> u16 { | 485 | 247 | let mut out = 0u16; | 486 | 4.19k | for n3.95k in 0..16 { | 487 | 3.95k | if self.children[n].is_none() { | 488 | 3.05k | continue; | 489 | 893 | } | 490 | 893 | out |= 1 << n; | 491 | | } | 492 | 247 | out | 493 | 247 | } |
Unexecuted instantiation: _RNvMs2_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeINtB5_7DecodedINtNtNtNtCsaYZPK01V26L_4core4iter8adapters6copied6CopiedINtNtNtB17_5slice4iter4IterNtNtB7_6nibble6NibbleEERNtB5_17MerkleValueOutputE15children_bitmapCsibGXYHQB8Ea_25json_rpc_general_requests Unexecuted instantiation: _RNvMs2_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeINtB5_7DecodedNtB5_17DecodedPartialKeyRShE15children_bitmapCsibGXYHQB8Ea_25json_rpc_general_requests |
494 | | } |
495 | | |
496 | | /// See [`Decoded::storage_value`]. |
497 | | #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] |
498 | | pub enum StorageValue<'a> { |
499 | | /// Storage value of the item is present in the node value. |
500 | | Unhashed(&'a [u8]), |
501 | | /// Hash of the storage value of the item is present in the node value. |
502 | | Hashed(&'a [u8; 32]), |
503 | | /// Item doesn't have any storage value. |
504 | | None, |
505 | | } |
506 | | |
507 | | /// Iterator to the nibbles of the partial key. See [`Decoded::partial_key`]. |
508 | | #[derive(Clone)] |
509 | | pub struct DecodedPartialKey<'a> { |
510 | | inner: nibble::BytesToNibbles<iter::Copied<slice::Iter<'a, u8>>>, |
511 | | skip_first: bool, |
512 | | } |
513 | | |
514 | | impl<'a> DecodedPartialKey<'a> { |
515 | | /// Returns a [`DecodedPartialKey`] iterator that produces the nibbles encoded as the given |
516 | | /// bytes. Each byte is turned into two nibbles. |
517 | | /// |
518 | | /// > **Note**: This function is a convenient wrapper around [`nibble::bytes_to_nibbles`]. |
519 | 102k | pub fn from_bytes(bytes: &'a [u8]) -> Self { |
520 | 102k | DecodedPartialKey { |
521 | 102k | inner: nibble::bytes_to_nibbles(bytes.iter().copied()), |
522 | 102k | skip_first: false, |
523 | 102k | } |
524 | 102k | } _RNvMs3_NtNtCsN16ciHI6Qf_7smoldot4trie9trie_nodeNtB5_17DecodedPartialKey10from_bytes Line | Count | Source | 519 | 102k | pub fn from_bytes(bytes: &'a [u8]) -> Self { | 520 | 102k | DecodedPartialKey { | 521 | 102k | inner: nibble::bytes_to_nibbles(bytes.iter().copied()), | 522 | 102k | skip_first: false, | 523 | 102k | } | 524 | 102k | } |
Unexecuted instantiation: _RNvMs3_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeNtB5_17DecodedPartialKey10from_bytes |
525 | | |
526 | | /// Equivalent to [`DecodedPartialKey::from_bytes`], but skips the first nibble. |
527 | | /// |
528 | | /// This is useful for situations where the partial key contains a `0` prefix that exists for |
529 | | /// alignment but doesn't actually represent a nibble. |
530 | | /// |
531 | | /// > **Note**: This is equivalent to `from_bytes(bytes).skip(1)`. The possibility to skip the |
532 | | /// > first nibble is built into this code due to how frequent it is necessary. |
533 | 52.2k | pub fn from_bytes_skip_first(bytes: &'a [u8]) -> Self { |
534 | 52.2k | DecodedPartialKey { |
535 | 52.2k | inner: nibble::bytes_to_nibbles(bytes.iter().copied()), |
536 | 52.2k | skip_first: true, |
537 | 52.2k | } |
538 | 52.2k | } _RNvMs3_NtNtCsN16ciHI6Qf_7smoldot4trie9trie_nodeNtB5_17DecodedPartialKey21from_bytes_skip_first Line | Count | Source | 533 | 52.2k | pub fn from_bytes_skip_first(bytes: &'a [u8]) -> Self { | 534 | 52.2k | DecodedPartialKey { | 535 | 52.2k | inner: nibble::bytes_to_nibbles(bytes.iter().copied()), | 536 | 52.2k | skip_first: true, | 537 | 52.2k | } | 538 | 52.2k | } |
Unexecuted instantiation: _RNvMs3_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeNtB5_17DecodedPartialKey21from_bytes_skip_first |
539 | | } |
540 | | |
541 | | impl<'a> Iterator for DecodedPartialKey<'a> { |
542 | | type Item = nibble::Nibble; |
543 | | |
544 | 756k | fn next(&mut self) -> Option<nibble::Nibble> { |
545 | | loop { |
546 | 783k | let nibble681k = self.inner.next()?101k ; |
547 | 681k | if self.skip_first { |
548 | 26.9k | debug_assert_eq!(u8::from(nibble), 0); |
549 | 26.9k | self.skip_first = false; |
550 | 26.9k | continue; |
551 | 654k | } |
552 | 654k | break Some(nibble); |
553 | | } |
554 | 756k | } _RNvXs4_NtNtCsN16ciHI6Qf_7smoldot4trie9trie_nodeNtB5_17DecodedPartialKeyNtNtNtNtCsaYZPK01V26L_4core4iter6traits8iterator8Iterator4next Line | Count | Source | 544 | 756k | fn next(&mut self) -> Option<nibble::Nibble> { | 545 | | loop { | 546 | 783k | let nibble681k = self.inner.next()?101k ; | 547 | 681k | if self.skip_first { | 548 | 26.9k | debug_assert_eq!(u8::from(nibble), 0); | 549 | 26.9k | self.skip_first = false; | 550 | 26.9k | continue; | 551 | 654k | } | 552 | 654k | break Some(nibble); | 553 | | } | 554 | 756k | } |
Unexecuted instantiation: _RNvXs4_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeNtB5_17DecodedPartialKeyNtNtNtNtCsaYZPK01V26L_4core4iter6traits8iterator8Iterator4next |
555 | | |
556 | 148k | fn size_hint(&self) -> (usize, Option<usize>) { |
557 | 148k | let mut len = self.inner.len(); |
558 | 148k | if self.skip_first { |
559 | 51.0k | len -= 1; |
560 | 97.7k | } |
561 | 148k | (len, Some(len)) |
562 | 148k | } _RNvXs4_NtNtCsN16ciHI6Qf_7smoldot4trie9trie_nodeNtB5_17DecodedPartialKeyNtNtNtNtCsaYZPK01V26L_4core4iter6traits8iterator8Iterator9size_hint Line | Count | Source | 556 | 148k | fn size_hint(&self) -> (usize, Option<usize>) { | 557 | 148k | let mut len = self.inner.len(); | 558 | 148k | if self.skip_first { | 559 | 51.0k | len -= 1; | 560 | 97.7k | } | 561 | 148k | (len, Some(len)) | 562 | 148k | } |
Unexecuted instantiation: _RNvXs4_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeNtB5_17DecodedPartialKeyNtNtNtNtCsaYZPK01V26L_4core4iter6traits8iterator8Iterator9size_hint |
563 | | } |
564 | | |
565 | | impl<'a> ExactSizeIterator for DecodedPartialKey<'a> {} |
566 | | |
567 | | impl<'a> fmt::Debug for DecodedPartialKey<'a> { |
568 | 0 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
569 | 0 | const HEX_TABLE: &[u8] = b"0123456789abcdef"; |
570 | 0 | write!(f, "0x")?; |
571 | 0 | for nibble in self.clone() { |
572 | 0 | let chr = HEX_TABLE[usize::from(u8::from(nibble))]; |
573 | 0 | write!(f, "{}", char::from(chr))?; |
574 | | } |
575 | 0 | Ok(()) |
576 | 0 | } Unexecuted instantiation: _RNvXs6_NtNtCsN16ciHI6Qf_7smoldot4trie9trie_nodeNtB5_17DecodedPartialKeyNtNtCsaYZPK01V26L_4core3fmt5Debug3fmt Unexecuted instantiation: _RNvXs6_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeNtB5_17DecodedPartialKeyNtNtCsaYZPK01V26L_4core3fmt5Debug3fmt |
577 | | } |
578 | | |
579 | | /// Possible error returned by [`decode`]. |
580 | 0 | #[derive(Debug, Clone, derive_more::Display)] Unexecuted instantiation: _RNvXso_NtNtCsN16ciHI6Qf_7smoldot4trie9trie_nodeNtB5_5ErrorNtNtCsaYZPK01V26L_4core3fmt7Display3fmt Unexecuted instantiation: _RNvXso_NtNtCseuYC0Zibziv_7smoldot4trie9trie_nodeNtB5_5ErrorNtNtCsaYZPK01V26L_4core3fmt7Display3fmt |
581 | | pub enum Error { |
582 | | /// Node value is empty. |
583 | | Empty, |
584 | | /// Bits in the header have an invalid format. |
585 | | InvalidHeaderBits, |
586 | | /// Node value ends while parsing partial key length. |
587 | | PartialKeyLenTooShort, |
588 | | /// Length of partial key is too large to be reasonable. |
589 | | PartialKeyLenOverflow, |
590 | | /// Node value ends within partial key. |
591 | | PartialKeyTooShort, |
592 | | /// If partial key is of uneven length, then it must be padded with `0`. |
593 | | InvalidPartialKeyPadding, |
594 | | /// End of data within the children bitmap. |
595 | | ChildrenBitmapTooShort, |
596 | | /// The children bitmap is equal to 0 despite the header indicating the presence of children. |
597 | | ZeroChildrenBitmap, |
598 | | /// Error while decoding length of child. |
599 | | ChildLenDecode, |
600 | | /// Node value ends within a child value. |
601 | | ChildrenTooShort, |
602 | | /// Child value is superior to 32 bytes. |
603 | | ChildTooLarge, |
604 | | /// Error while decoding length of storage value. |
605 | | StorageValueLenDecode, |
606 | | /// Node value ends within the storage value. |
607 | | StorageValueTooShort, |
608 | | /// Node value is longer than expected. |
609 | | TooLong, |
610 | | /// Node value indicates that it is the root of an empty trie but contains a non-empty partial |
611 | | /// key. |
612 | | EmptyTrieWithPartialKey, |
613 | | } |
614 | | |
615 | | #[cfg(test)] |
616 | | mod tests { |
617 | | use super::super::nibble; |
618 | | |
619 | | #[test] |
620 | 1 | fn basic() { |
621 | 1 | let encoded_bytes = &[ |
622 | 1 | 194, 99, 192, 0, 0, 128, 129, 254, 111, 21, 39, 188, 215, 18, 139, 76, 128, 157, 108, |
623 | 1 | 33, 139, 232, 34, 73, 0, 21, 202, 54, 18, 71, 145, 117, 47, 222, 189, 93, 119, 68, 128, |
624 | 1 | 108, 211, 105, 98, 122, 206, 246, 73, 77, 237, 51, 77, 26, 166, 1, 52, 179, 173, 43, |
625 | 1 | 89, 219, 104, 196, 190, 208, 128, 135, 177, 13, 185, 111, 175, |
626 | 1 | ]; |
627 | 1 | |
628 | 1 | let decoded = super::decode(encoded_bytes).unwrap(); |
629 | 1 | |
630 | 1 | assert_eq!( |
631 | 1 | super::encode(decoded.clone()) |
632 | 1 | .unwrap() |
633 | 6 | .fold(Vec::new(), |mut a, b| { |
634 | 6 | a.extend_from_slice(b.as_ref()); |
635 | 6 | a |
636 | 6 | }), |
637 | 1 | encoded_bytes |
638 | 1 | ); |
639 | 1 | assert_eq!( |
640 | 1 | decoded.partial_key.clone().collect::<Vec<_>>(), |
641 | 1 | vec![ |
642 | 1 | nibble::Nibble::try_from(0x6).unwrap(), |
643 | 1 | nibble::Nibble::try_from(0x3).unwrap() |
644 | 1 | ] |
645 | 1 | ); |
646 | 1 | assert_eq!( |
647 | 1 | decoded.storage_value, |
648 | 1 | super::StorageValue::Unhashed(&[][..]) |
649 | 1 | ); |
650 | | |
651 | 16 | assert_eq!(decoded.children.iter().filter(1 |c| c.is_some()).count(), 2)1 ; |
652 | 1 | assert_eq!( |
653 | 1 | decoded.children[6], |
654 | 1 | Some( |
655 | 1 | &[ |
656 | 1 | 129, 254, 111, 21, 39, 188, 215, 18, 139, 76, 128, 157, 108, 33, 139, 232, 34, |
657 | 1 | 73, 0, 21, 202, 54, 18, 71, 145, 117, 47, 222, 189, 93, 119, 68 |
658 | 1 | ][..] |
659 | 1 | ) |
660 | 1 | ); |
661 | 1 | assert_eq!( |
662 | 1 | decoded.children[7], |
663 | 1 | Some( |
664 | 1 | &[ |
665 | 1 | 108, 211, 105, 98, 122, 206, 246, 73, 77, 237, 51, 77, 26, 166, 1, 52, 179, |
666 | 1 | 173, 43, 89, 219, 104, 196, 190, 208, 128, 135, 177, 13, 185, 111, 175 |
667 | 1 | ][..] |
668 | 1 | ) |
669 | 1 | ); |
670 | | |
671 | 1 | assert_eq!(super::encode_to_vec(decoded).unwrap(), encoded_bytes); |
672 | 1 | } |
673 | | |
674 | | #[test] |
675 | 1 | fn no_children_no_storage_value() { |
676 | 1 | assert!(matches!0 ( |
677 | 1 | super::encode(super::Decoded { |
678 | 1 | children: [None::<&'static [u8]>; 16], |
679 | 1 | storage_value: super::StorageValue::None, |
680 | 1 | partial_key: core::iter::empty() |
681 | 1 | }), |
682 | | Ok(_) |
683 | | )); |
684 | | |
685 | 1 | assert!(matches!0 ( |
686 | 1 | super::encode(super::Decoded { |
687 | 1 | children: [None::<&'static [u8]>; 16], |
688 | 1 | storage_value: super::StorageValue::None, |
689 | 1 | partial_key: core::iter::once(nibble::Nibble::try_from(2).unwrap()) |
690 | 1 | }), |
691 | | Err(super::EncodeError::PartialKeyButNoChildrenNoStorageValue) |
692 | | )); |
693 | 1 | } |
694 | | } |