iota_analytics_indexer_derive/
lib.rs1use proc_macro::TokenStream;
6use quote::quote;
7use syn::{Data, DeriveInput, Fields, parse_macro_input};
8
9#[proc_macro_derive(SerializeParquet)]
10pub fn schema_derive(input: TokenStream) -> TokenStream {
11 let input = parse_macro_input!(input as DeriveInput);
12 let struct_name = &input.ident;
13 let (schema, getter_implementation) = match &input.data {
14 Data::Struct(data_struct) => match &data_struct.fields {
15 Fields::Named(fields) => {
16 let (schema_iter, getter_iter): (Vec<_>, Vec<_>) = fields
17 .named
18 .iter()
19 .enumerate()
20 .map(|(idx, field)| {
21 let field_name = field.ident.as_ref().unwrap().to_string();
22 (
23 format!("\"{}\".to_string()", field_name),
24 format!(
25 "if idx == {} {{ return self.{}.clone().into(); }}",
26 idx, field_name
27 ),
28 )
29 })
30 .unzip();
31 (schema_iter.join(", "), getter_iter.join("\n"))
32 }
33 _ => panic!("not supported struct for parquet serialization"),
34 },
35 _ => panic!("not supported struct for parquet serialization"),
36 };
37 let schema_tokens: proc_macro2::TokenStream = schema.parse().unwrap();
38 let getter_implementation_tokens: proc_macro2::TokenStream =
39 getter_implementation.parse().unwrap();
40 quote! {
41 impl ParquetSchema for #struct_name {
42 fn schema() -> Vec<String> {
43 vec![#schema_tokens]
44 }
45
46 fn get_column(&self, idx: usize) -> ParquetValue {
47 #getter_implementation_tokens
48 panic!("not supported column {:?}", idx);
49 }
50 }
51 }
52 .into()
53}