Simplify tokenstream folding

This commit is contained in:
Dmitry Samoylov 2020-02-04 17:47:14 +07:00
parent d68a924bdb
commit 703a238d02
3 changed files with 30 additions and 88 deletions

View File

@ -2,7 +2,7 @@ use attribute::*;
use de::build_default_value::build_default_value; use de::build_default_value::build_default_value;
use field_type::*; use field_type::*;
use proc_macro2::{Span, TokenStream}; use proc_macro2::{Span, TokenStream};
use quote::{ToTokens, TokenStreamExt}; use quote::ToTokens;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use syn::DataStruct; use syn::DataStruct;
use syn::Ident; use syn::Ident;
@ -24,12 +24,8 @@ pub fn parse(
None None
} }
}) })
.filter(|x| x.is_some()) .filter_map(|x| x)
.map(|x| x.unwrap()) .collect();
.fold(TokenStream::new(), |mut tokens, token| {
tokens.append_all(token);
tokens
});
let variables: TokenStream = data_struct let variables: TokenStream = data_struct
.fields .fields
@ -191,12 +187,8 @@ pub fn parse(
None => None, None => None,
} }
}) })
.filter(|x| x.is_some()) .filter_map(|x| x)
.map(|x| x.unwrap()) .collect();
.fold(TokenStream::new(), |mut sum, val| {
sum.append_all(val);
sum
});
let field_visitors: TokenStream = data_struct let field_visitors: TokenStream = data_struct
.fields .fields
@ -383,12 +375,8 @@ pub fn parse(
None => None, None => None,
} }
}) })
.filter(|x| x.is_some()) .filter_map(|x| x)
.map(|x| x.unwrap()) .collect();
.fold(TokenStream::new(), |mut sum, val| {
sum.append_all(val);
sum
});
let call_visitors: TokenStream = data_struct let call_visitors: TokenStream = data_struct
.fields .fields
@ -873,12 +861,8 @@ pub fn parse(
None => None, None => None,
} }
}) })
.filter(|x| x.is_some()) .filter_map(|x| x)
.map(|x| x.unwrap()) .collect();
.fold(TokenStream::new(), |mut sum, val| {
sum.append_all(val);
sum
});
let attributes_loading: TokenStream = data_struct let attributes_loading: TokenStream = data_struct
.fields .fields
@ -1093,12 +1077,8 @@ pub fn parse(
_ => None, _ => None,
} }
}) })
.filter(|x| x.is_some()) .filter_map(|x| x)
.map(|x| x.unwrap()) .collect();
.fold(TokenStream::new(), |mut sum, val| {
sum.append_all(val);
sum
});
let set_text: TokenStream = data_struct let set_text: TokenStream = data_struct
.fields .fields
@ -1173,12 +1153,8 @@ pub fn parse(
| None => None, | None => None,
} }
}) })
.filter(|x| x.is_some()) .filter_map(|x| x)
.map(|x| x.unwrap()) .collect();
.fold(TokenStream::new(), |mut tokens, token| {
tokens.append_all(token);
tokens
});
let struct_builder: TokenStream = data_struct let struct_builder: TokenStream = data_struct
.fields .fields
@ -1195,12 +1171,8 @@ pub fn parse(
None None
} }
}) })
.filter(|x| x.is_some()) .filter_map(|x| x)
.map(|x| x.unwrap()) .collect();
.fold(TokenStream::new(), |mut tokens, token| {
tokens.append_all(token);
tokens
});
quote! { quote! {
use xml::reader::XmlEvent; use xml::reader::XmlEvent;
@ -1316,12 +1288,8 @@ fn build_call_visitor(
None None
} }
}) })
.filter(|x| x.is_some()) .filter_map(|x| x)
.map(|x| x.unwrap()) .collect();
.fold(TokenStream::new(), |mut tokens, token| {
tokens.append_all(token);
tokens
});
Some(quote! { Some(quote! {
#label_name => { #label_name => {

View File

@ -1,7 +1,6 @@
use attribute::*; use attribute::*;
use field_type::*; use field_type::*;
use proc_macro2::{Span, TokenStream}; use proc_macro2::{Span, TokenStream};
use quote::TokenStreamExt;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use syn::DataEnum; use syn::DataEnum;
use syn::Fields; use syn::Fields;
@ -36,7 +35,7 @@ pub fn serialize(
} }
}), }),
Fields::Named(ref fields) => { Fields::Named(ref fields) => {
let enum_fields = fields let enum_fields: TokenStream = fields
.named .named
.iter() .iter()
.map(|field| { .map(|field| {
@ -100,12 +99,8 @@ pub fn serialize(
_ => None, _ => None,
} }
}) })
.filter(|x| x.is_some()) .filter_map(|x| x)
.map(|x| x.unwrap()) .collect();
.fold(TokenStream::new(), |mut tokens, token| {
tokens.append_all(token);
tokens
});
Some(quote! { Some(quote! {
&#name::#label{..} => { &#name::#label{..} => {
@ -210,12 +205,8 @@ pub fn serialize(
} }
} }
}) })
.filter(|x| x.is_some()) .filter_map(|x| x)
.map(|x| x.unwrap()) .collect();
.fold(TokenStream::new(), |mut tokens, token| {
tokens.append_all(token);
tokens
});
let add_namespaces: TokenStream = namespaces let add_namespaces: TokenStream = namespaces
.iter() .iter()
@ -224,12 +215,8 @@ pub fn serialize(
.ns(#prefix, #namespace) .ns(#prefix, #namespace)
)) ))
}) })
.filter(|x| x.is_some()) .filter_map(|x| x)
.map(|x| x.unwrap()) .collect();
.fold(TokenStream::new(), |mut tokens, token| {
tokens.append_all(token);
tokens
});
quote! { quote! {
use xml::writer::XmlEvent; use xml::writer::XmlEvent;

View File

@ -1,7 +1,6 @@
use attribute::*; use attribute::*;
use field_type::*; use field_type::*;
use proc_macro2::{Span, TokenStream}; use proc_macro2::{Span, TokenStream};
use quote::TokenStreamExt;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::string::ToString; use std::string::ToString;
use syn::DataStruct; use syn::DataStruct;
@ -225,12 +224,8 @@ pub fn serialize(
_ => None, _ => None,
} }
}) })
.filter(|x| x.is_some()) .filter_map(|x| x)
.map(|x| x.unwrap()) .collect();
.fold(TokenStream::new(), |mut tokens, token| {
tokens.append_all(token);
tokens
});
let add_namespaces: TokenStream = namespaces let add_namespaces: TokenStream = namespaces
.iter() .iter()
@ -239,12 +234,8 @@ pub fn serialize(
.ns(#prefix, #namespace) .ns(#prefix, #namespace)
)) ))
}) })
.filter(|x| x.is_some()) .filter_map(|x| x)
.map(|x| x.unwrap()) .collect();
.fold(TokenStream::new(), |mut tokens, token| {
tokens.append_all(token);
tokens
});
let struct_inspector: TokenStream = data_struct let struct_inspector: TokenStream = data_struct
.fields .fields
@ -417,12 +408,8 @@ pub fn serialize(
None => None, None => None,
} }
}) })
.filter(|x| x.is_some()) .filter_map(|x| x)
.map(|x| x.unwrap()) .collect();
.fold(TokenStream::new(), |mut tokens, token| {
tokens.append_all(token);
tokens
});
quote! { quote! {
use xml::writer::XmlEvent; use xml::writer::XmlEvent;