diff --git a/yaserde_derive/src/de/expand_struct.rs b/yaserde_derive/src/de/expand_struct.rs index 37814e5..71d115e 100644 --- a/yaserde_derive/src/de/expand_struct.rs +++ b/yaserde_derive/src/de/expand_struct.rs @@ -2,7 +2,7 @@ use attribute::*; use de::build_default_value::build_default_value; use field_type::*; use proc_macro2::{Span, TokenStream}; -use quote::{ToTokens, TokenStreamExt}; +use quote::ToTokens; use std::collections::BTreeMap; use syn::DataStruct; use syn::Ident; @@ -24,12 +24,8 @@ pub fn parse( None } }) - .filter(|x| x.is_some()) - .map(|x| x.unwrap()) - .fold(TokenStream::new(), |mut tokens, token| { - tokens.append_all(token); - tokens - }); + .filter_map(|x| x) + .collect(); let variables: TokenStream = data_struct .fields @@ -191,12 +187,8 @@ pub fn parse( None => None, } }) - .filter(|x| x.is_some()) - .map(|x| x.unwrap()) - .fold(TokenStream::new(), |mut sum, val| { - sum.append_all(val); - sum - }); + .filter_map(|x| x) + .collect(); let field_visitors: TokenStream = data_struct .fields @@ -383,12 +375,8 @@ pub fn parse( None => None, } }) - .filter(|x| x.is_some()) - .map(|x| x.unwrap()) - .fold(TokenStream::new(), |mut sum, val| { - sum.append_all(val); - sum - }); + .filter_map(|x| x) + .collect(); let call_visitors: TokenStream = data_struct .fields @@ -873,12 +861,8 @@ pub fn parse( None => None, } }) - .filter(|x| x.is_some()) - .map(|x| x.unwrap()) - .fold(TokenStream::new(), |mut sum, val| { - sum.append_all(val); - sum - }); + .filter_map(|x| x) + .collect(); let attributes_loading: TokenStream = data_struct .fields @@ -1093,12 +1077,8 @@ pub fn parse( _ => None, } }) - .filter(|x| x.is_some()) - .map(|x| x.unwrap()) - .fold(TokenStream::new(), |mut sum, val| { - sum.append_all(val); - sum - }); + .filter_map(|x| x) + .collect(); let set_text: TokenStream = data_struct .fields @@ -1173,12 +1153,8 @@ pub fn parse( | None => None, } }) - .filter(|x| x.is_some()) - .map(|x| x.unwrap()) - .fold(TokenStream::new(), |mut tokens, token| { - tokens.append_all(token); - tokens - }); + .filter_map(|x| x) + .collect(); let struct_builder: TokenStream = data_struct .fields @@ -1195,12 +1171,8 @@ pub fn parse( None } }) - .filter(|x| x.is_some()) - .map(|x| x.unwrap()) - .fold(TokenStream::new(), |mut tokens, token| { - tokens.append_all(token); - tokens - }); + .filter_map(|x| x) + .collect(); quote! { use xml::reader::XmlEvent; @@ -1316,12 +1288,8 @@ fn build_call_visitor( None } }) - .filter(|x| x.is_some()) - .map(|x| x.unwrap()) - .fold(TokenStream::new(), |mut tokens, token| { - tokens.append_all(token); - tokens - }); + .filter_map(|x| x) + .collect(); Some(quote! { #label_name => { diff --git a/yaserde_derive/src/ser/expand_enum.rs b/yaserde_derive/src/ser/expand_enum.rs index f61e40f..3fc1d41 100644 --- a/yaserde_derive/src/ser/expand_enum.rs +++ b/yaserde_derive/src/ser/expand_enum.rs @@ -1,7 +1,6 @@ use attribute::*; use field_type::*; use proc_macro2::{Span, TokenStream}; -use quote::TokenStreamExt; use std::collections::BTreeMap; use syn::DataEnum; use syn::Fields; @@ -36,7 +35,7 @@ pub fn serialize( } }), Fields::Named(ref fields) => { - let enum_fields = fields + let enum_fields: TokenStream = fields .named .iter() .map(|field| { @@ -100,12 +99,8 @@ pub fn serialize( _ => None, } }) - .filter(|x| x.is_some()) - .map(|x| x.unwrap()) - .fold(TokenStream::new(), |mut tokens, token| { - tokens.append_all(token); - tokens - }); + .filter_map(|x| x) + .collect(); Some(quote! { &#name::#label{..} => { @@ -210,12 +205,8 @@ pub fn serialize( } } }) - .filter(|x| x.is_some()) - .map(|x| x.unwrap()) - .fold(TokenStream::new(), |mut tokens, token| { - tokens.append_all(token); - tokens - }); + .filter_map(|x| x) + .collect(); let add_namespaces: TokenStream = namespaces .iter() @@ -224,12 +215,8 @@ pub fn serialize( .ns(#prefix, #namespace) )) }) - .filter(|x| x.is_some()) - .map(|x| x.unwrap()) - .fold(TokenStream::new(), |mut tokens, token| { - tokens.append_all(token); - tokens - }); + .filter_map(|x| x) + .collect(); quote! { use xml::writer::XmlEvent; diff --git a/yaserde_derive/src/ser/expand_struct.rs b/yaserde_derive/src/ser/expand_struct.rs index d80f337..72ddeed 100644 --- a/yaserde_derive/src/ser/expand_struct.rs +++ b/yaserde_derive/src/ser/expand_struct.rs @@ -1,7 +1,6 @@ use attribute::*; use field_type::*; use proc_macro2::{Span, TokenStream}; -use quote::TokenStreamExt; use std::collections::BTreeMap; use std::string::ToString; use syn::DataStruct; @@ -225,12 +224,8 @@ pub fn serialize( _ => None, } }) - .filter(|x| x.is_some()) - .map(|x| x.unwrap()) - .fold(TokenStream::new(), |mut tokens, token| { - tokens.append_all(token); - tokens - }); + .filter_map(|x| x) + .collect(); let add_namespaces: TokenStream = namespaces .iter() @@ -239,12 +234,8 @@ pub fn serialize( .ns(#prefix, #namespace) )) }) - .filter(|x| x.is_some()) - .map(|x| x.unwrap()) - .fold(TokenStream::new(), |mut tokens, token| { - tokens.append_all(token); - tokens - }); + .filter_map(|x| x) + .collect(); let struct_inspector: TokenStream = data_struct .fields @@ -417,12 +408,8 @@ pub fn serialize( None => None, } }) - .filter(|x| x.is_some()) - .map(|x| x.unwrap()) - .fold(TokenStream::new(), |mut tokens, token| { - tokens.append_all(token); - tokens - }); + .filter_map(|x| x) + .collect(); quote! { use xml::writer::XmlEvent;