1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
use quote::quote;
use syn::parse::{Parse, ParseStream};
use proc_macro::TokenStream;
pub(super) struct InputBytes(pub Vec<u8>);
pub(super) struct MultipleInputBytes(pub Vec<Vec<u8>>);
impl MultipleInputBytes {
pub(super) fn concatenated(mut self) -> Vec<u8> {
if self.0.len() == 0 {
Vec::new()
} else {
let mut result = core::mem::take(&mut self.0[0]);
for other in self.0[1..].iter_mut() {
result.append(other);
}
result
}
}
}
impl Parse for InputBytes {
fn parse(input: ParseStream) -> syn::Result<Self> {
match syn::ExprArray::parse(input) {
Ok(array) => {
let mut bytes = Vec::<u8>::new();
for expr in array.elems.iter() {
match expr {
syn::Expr::Lit(lit) => match &lit.lit {
syn::Lit::Int(b) => bytes.push(b.base10_parse()?),
syn::Lit::Byte(b) => bytes.push(b.value()),
_ =>
return Err(syn::Error::new(
input.span(),
"Expected array of u8 elements.".to_string(),
)),
},
_ =>
return Err(syn::Error::new(
input.span(),
"Expected array of u8 elements.".to_string(),
)),
}
}
return Ok(InputBytes(bytes))
},
Err(_e) => (),
}
match syn::Ident::parse(input) {
Ok(ident) => return Ok(InputBytes(ident.to_string().as_bytes().to_vec())),
Err(_e) => (),
}
Ok(InputBytes(syn::LitByteStr::parse(input)?.value()))
}
}
impl Parse for MultipleInputBytes {
fn parse(input: ParseStream) -> syn::Result<Self> {
let elts =
syn::punctuated::Punctuated::<InputBytes, syn::token::Comma>::parse_terminated(input)?;
Ok(MultipleInputBytes(elts.into_iter().map(|elt| elt.0).collect()))
}
}
pub(super) fn twox_64(bytes: Vec<u8>) -> TokenStream {
bytes_to_array(sp_core_hashing::twox_64(bytes.as_slice()))
}
pub(super) fn twox_128(bytes: Vec<u8>) -> TokenStream {
bytes_to_array(sp_core_hashing::twox_128(bytes.as_slice()))
}
pub(super) fn blake2b_512(bytes: Vec<u8>) -> TokenStream {
bytes_to_array(sp_core_hashing::blake2_512(bytes.as_slice()))
}
pub(super) fn blake2b_256(bytes: Vec<u8>) -> TokenStream {
bytes_to_array(sp_core_hashing::blake2_256(bytes.as_slice()))
}
pub(super) fn blake2b_64(bytes: Vec<u8>) -> TokenStream {
bytes_to_array(sp_core_hashing::blake2_64(bytes.as_slice()))
}
pub(super) fn keccak_256(bytes: Vec<u8>) -> TokenStream {
bytes_to_array(sp_core_hashing::keccak_256(bytes.as_slice()))
}
pub(super) fn keccak_512(bytes: Vec<u8>) -> TokenStream {
bytes_to_array(sp_core_hashing::keccak_512(bytes.as_slice()))
}
pub(super) fn sha2_256(bytes: Vec<u8>) -> TokenStream {
bytes_to_array(sp_core_hashing::sha2_256(bytes.as_slice()))
}
fn bytes_to_array(bytes: impl IntoIterator<Item = u8>) -> TokenStream {
let bytes = bytes.into_iter();
quote!(
[ #( #bytes ),* ]
)
.into()
}