update dependencies
This commit is contained in:
parent
cb37689656
commit
e691fba686
@ -13,7 +13,7 @@ readme = "../README.md"
|
||||
[dependencies]
|
||||
yaserde_derive = { version = "0.2", path = "../yaserde_derive", optional = true }
|
||||
regex = "0.2"
|
||||
xml-rs = "0.7.0"
|
||||
xml-rs = "0.8.0"
|
||||
log = "0.4.1"
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
@ -11,9 +11,9 @@ documentation = "https://docs.rs/yaserde"
|
||||
readme = "../README.md"
|
||||
|
||||
[dependencies]
|
||||
syn = { version = "0.12.14", features = ["visit", "extra-traits"] }
|
||||
proc-macro2 = "0.2.3"
|
||||
quote = "0.4.2"
|
||||
syn = { version = "0.14.0", features = ["visit", "extra-traits"] }
|
||||
proc-macro2 = "0.4.3"
|
||||
quote = "0.6.2"
|
||||
|
||||
[lib]
|
||||
name = "yaserde_derive"
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
use proc_macro2::TokenTreeIter;
|
||||
use proc_macro2::TokenNode::*;
|
||||
use proc_macro2::Spacing;
|
||||
use proc_macro2::Delimiter::Parenthesis;
|
||||
|
||||
use proc_macro2::TokenTree;
|
||||
use proc_macro2::Delimiter;
|
||||
use proc_macro2::token_stream::IntoIter;
|
||||
use std::collections::BTreeMap;
|
||||
use syn::Attribute;
|
||||
|
||||
@ -15,13 +15,15 @@ pub struct YaSerdeAttribute {
|
||||
pub text: bool,
|
||||
}
|
||||
|
||||
fn get_value(iter: &mut TokenTreeIter) -> Option<String> {
|
||||
match (iter.next(), iter.next()) {
|
||||
(Some(operator), Some(value)) => match (operator.kind, value.kind) {
|
||||
(Op('=', Spacing::Alone), Literal(l)) => Some(l.to_string().replace("\"", "")),
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
fn get_value(iter: &mut IntoIter) -> Option<String> {
|
||||
if let (Some(TokenTree::Punct(operator)), Some(TokenTree::Literal(value))) = (iter.next(), iter.next()) {
|
||||
if operator.as_char() == '=' {
|
||||
Some(value.to_string().replace("\"", ""))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
@ -37,39 +39,41 @@ impl YaSerdeAttribute {
|
||||
for attr in attrs.iter() {
|
||||
let mut attr_iter = attr.clone().tts.into_iter();
|
||||
if let Some(token) = attr_iter.next() {
|
||||
if let Group(Parenthesis, token_stream) = token.kind {
|
||||
let mut attr_iter = token_stream.into_iter();
|
||||
if let TokenTree::Group(group) = token {
|
||||
if group.delimiter() == Delimiter::Parenthesis {
|
||||
let mut attr_iter = group.stream().into_iter();
|
||||
|
||||
while let Some(item) = attr_iter.next() {
|
||||
if let Term(term) = item.kind {
|
||||
match term.as_str() {
|
||||
"attribute" => {
|
||||
attribute = true;
|
||||
}
|
||||
"namespace" => {
|
||||
if let Some(namespace) = get_value(&mut attr_iter) {
|
||||
let splitted: Vec<&str> = namespace.split(": ").collect();
|
||||
if splitted.len() == 2 {
|
||||
namespaces.insert(splitted[0].to_owned(), splitted[1].to_owned());
|
||||
}
|
||||
if splitted.len() == 1 {
|
||||
namespaces.insert("".to_owned(), splitted[0].to_owned());
|
||||
while let Some(item) = attr_iter.next() {
|
||||
if let TokenTree::Ident(ident) = item {
|
||||
match ident.to_string().as_str() {
|
||||
"attribute" => {
|
||||
attribute = true;
|
||||
}
|
||||
"namespace" => {
|
||||
if let Some(namespace) = get_value(&mut attr_iter) {
|
||||
let splitted: Vec<&str> = namespace.split(": ").collect();
|
||||
if splitted.len() == 2 {
|
||||
namespaces.insert(splitted[0].to_owned(), splitted[1].to_owned());
|
||||
}
|
||||
if splitted.len() == 1 {
|
||||
namespaces.insert("".to_owned(), splitted[0].to_owned());
|
||||
}
|
||||
}
|
||||
}
|
||||
"prefix" => {
|
||||
prefix = get_value(&mut attr_iter);
|
||||
}
|
||||
"rename" => {
|
||||
rename = get_value(&mut attr_iter);
|
||||
}
|
||||
"root" => {
|
||||
root = get_value(&mut attr_iter);
|
||||
}
|
||||
"text" => {
|
||||
text = true;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
"prefix" => {
|
||||
prefix = get_value(&mut attr_iter);
|
||||
}
|
||||
"rename" => {
|
||||
rename = get_value(&mut attr_iter);
|
||||
}
|
||||
"root" => {
|
||||
root = get_value(&mut attr_iter);
|
||||
}
|
||||
"text" => {
|
||||
text = true;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,19 +1,19 @@
|
||||
use attribute::*;
|
||||
use field_type::*;
|
||||
use quote::Tokens;
|
||||
use quote::TokenStreamExt;
|
||||
use std::collections::BTreeMap;
|
||||
use syn::Fields;
|
||||
use syn::Ident;
|
||||
use syn::DataEnum;
|
||||
use proc_macro2::Span;
|
||||
use proc_macro2::{TokenStream, Span};
|
||||
|
||||
pub fn parse(
|
||||
data_enum: &DataEnum,
|
||||
name: &Ident,
|
||||
root: &str,
|
||||
_namespaces: &BTreeMap<String, String>,
|
||||
) -> Tokens {
|
||||
let variables: Tokens = data_enum
|
||||
) -> TokenStream {
|
||||
let variables: TokenStream = data_enum
|
||||
.variants
|
||||
.iter()
|
||||
.map(|variant| match variant.fields {
|
||||
@ -23,7 +23,7 @@ pub fn parse(
|
||||
.named
|
||||
.iter()
|
||||
.map(|field| {
|
||||
let field_label = field.ident;
|
||||
let field_label = &field.ident;
|
||||
|
||||
match get_field_type(field) {
|
||||
Some(FieldType::FieldTypeString) => {
|
||||
@ -93,7 +93,7 @@ pub fn parse(
|
||||
Some(&FieldType::FieldTypeU64) => {
|
||||
build_default_value(&field_label, "e!{Vec<u64>}, "e!{vec![]})
|
||||
}
|
||||
Some(&FieldType::FieldTypeStruct { struct_name }) => Some(quote!{
|
||||
Some(&FieldType::FieldTypeStruct { ref struct_name }) => Some(quote!{
|
||||
#[allow(unused_mut)]
|
||||
let mut #field_label : Vec<#struct_name> = vec![];
|
||||
}),
|
||||
@ -110,7 +110,7 @@ pub fn parse(
|
||||
})
|
||||
.filter(|x| x.is_some())
|
||||
.map(|x| x.unwrap())
|
||||
.fold(Tokens::new(), |mut sum, val| {
|
||||
.fold(TokenStream::empty(), |mut sum, val| {
|
||||
sum.append_all(val);
|
||||
sum
|
||||
});
|
||||
@ -123,12 +123,12 @@ pub fn parse(
|
||||
})
|
||||
.filter(|x| x.is_some())
|
||||
.map(|x| x.unwrap())
|
||||
.fold(Tokens::new(), |mut sum, val| {
|
||||
.fold(TokenStream::empty(), |mut sum, val| {
|
||||
sum.append_all(val);
|
||||
sum
|
||||
});
|
||||
|
||||
let enum_visitors: Tokens = data_enum
|
||||
let enum_visitors: TokenStream = data_enum
|
||||
.variants
|
||||
.iter()
|
||||
.map(|variant| {
|
||||
@ -164,7 +164,7 @@ pub fn parse(
|
||||
})
|
||||
.filter(|x| x.is_some())
|
||||
.map(|x| x.unwrap())
|
||||
.fold(Tokens::new(), |mut sum, val| {
|
||||
.fold(TokenStream::empty(), |mut sum, val| {
|
||||
sum.append_all(val);
|
||||
sum
|
||||
});
|
||||
@ -178,21 +178,21 @@ pub fn parse(
|
||||
})
|
||||
.filter(|x| x.is_some())
|
||||
.map(|x| x.unwrap())
|
||||
.fold(Tokens::new(), |mut sum, val| {
|
||||
.fold(TokenStream::empty(), |mut sum, val| {
|
||||
sum.append_all(val);
|
||||
sum
|
||||
});
|
||||
|
||||
let match_to_enum: Tokens = data_enum
|
||||
let match_to_enum: TokenStream = data_enum
|
||||
.variants
|
||||
.iter()
|
||||
.map(|variant| {
|
||||
let field_attrs = YaSerdeAttribute::parse(&variant.attrs);
|
||||
let renamed_label = match field_attrs.rename {
|
||||
Some(value) => Ident::new(&format!("{}", value), Span::call_site()),
|
||||
None => variant.ident,
|
||||
None => variant.ident.clone(),
|
||||
};
|
||||
let label = variant.ident;
|
||||
let label = &variant.ident;
|
||||
let label_name = renamed_label.to_string();
|
||||
|
||||
match variant.fields {
|
||||
@ -206,7 +206,7 @@ pub fn parse(
|
||||
})
|
||||
.filter(|x| x.is_some())
|
||||
.map(|x| x.unwrap())
|
||||
.fold(Tokens::new(), |mut tokens, token| {
|
||||
.fold(TokenStream::empty(), |mut tokens, token| {
|
||||
tokens.append_all(token);
|
||||
tokens
|
||||
});
|
||||
@ -274,9 +274,9 @@ pub fn parse(
|
||||
|
||||
fn build_default_value(
|
||||
label: &Option<Ident>,
|
||||
field_type: &Tokens,
|
||||
default: &Tokens,
|
||||
) -> Option<Tokens> {
|
||||
field_type: &TokenStream,
|
||||
default: &TokenStream,
|
||||
) -> Option<TokenStream> {
|
||||
Some(quote!{
|
||||
#[allow(unused_mut)]
|
||||
let mut #label : #field_type = #default;
|
||||
|
||||
@ -1,24 +1,26 @@
|
||||
use attribute::*;
|
||||
use field_type::*;
|
||||
use quote::Tokens;
|
||||
use quote::TokenStreamExt;
|
||||
use std::collections::BTreeMap;
|
||||
use syn::Ident;
|
||||
use syn::DataStruct;
|
||||
use proc_macro2::Span;
|
||||
use proc_macro2::{TokenStream, Span};
|
||||
|
||||
pub fn parse(
|
||||
data_struct: &DataStruct,
|
||||
name: &Ident,
|
||||
root: &str,
|
||||
namespaces: &BTreeMap<String, String>,
|
||||
) -> Tokens {
|
||||
let validate_namespace: Tokens = namespaces
|
||||
) -> TokenStream {
|
||||
let validate_namespace: TokenStream = namespaces
|
||||
.iter()
|
||||
.map(|(_prefix, namespace)| {
|
||||
Some(quote!(
|
||||
|
||||
let mut found = false;
|
||||
println!("{:?}", namespace);
|
||||
for (key, value) in namespace {
|
||||
println!("{:?}", value);
|
||||
if #namespace == value {
|
||||
found = true;
|
||||
}
|
||||
@ -31,16 +33,16 @@ pub fn parse(
|
||||
})
|
||||
.filter(|x| x.is_some())
|
||||
.map(|x| x.unwrap())
|
||||
.fold(Tokens::new(), |mut tokens, token| {
|
||||
.fold(TokenStream::empty(), |mut tokens, token| {
|
||||
tokens.append_all(token);
|
||||
tokens
|
||||
});
|
||||
|
||||
let variables: Tokens = data_struct
|
||||
let variables: TokenStream = data_struct
|
||||
.fields
|
||||
.iter()
|
||||
.map(|field| {
|
||||
let label = field.ident;
|
||||
let label = &field.ident;
|
||||
match get_field_type(field) {
|
||||
Some(FieldType::FieldTypeString) => {
|
||||
build_default_value(&label, "e!{String}, "e!{"".to_string()})
|
||||
@ -93,7 +95,7 @@ pub fn parse(
|
||||
Some(&FieldType::FieldTypeU64) => {
|
||||
build_default_value(&label, "e!{Vec<u64>}, "e!{vec![]})
|
||||
}
|
||||
Some(&FieldType::FieldTypeStruct { struct_name }) => Some(quote!{
|
||||
Some(&FieldType::FieldTypeStruct { ref struct_name }) => Some(quote!{
|
||||
#[allow(unused_mut)]
|
||||
let mut #label : Vec<#struct_name> = vec![];
|
||||
}),
|
||||
@ -110,12 +112,12 @@ pub fn parse(
|
||||
})
|
||||
.filter(|x| x.is_some())
|
||||
.map(|x| x.unwrap())
|
||||
.fold(Tokens::new(), |mut sum, val| {
|
||||
.fold(TokenStream::empty(), |mut sum, val| {
|
||||
sum.append_all(val);
|
||||
sum
|
||||
});
|
||||
|
||||
let field_visitors: Tokens = data_struct
|
||||
let field_visitors: TokenStream = data_struct
|
||||
.fields
|
||||
.iter()
|
||||
.map(|field| {
|
||||
@ -123,7 +125,7 @@ pub fn parse(
|
||||
let label_name = if let Some(value) = field_attrs.rename {
|
||||
Ident::new(&format!("{}", value), Span::call_site()).to_string()
|
||||
} else {
|
||||
field.ident.unwrap().to_string()
|
||||
field.ident.clone().unwrap().to_string()
|
||||
};
|
||||
|
||||
let visitor_label = Ident::new(&format!("__Visitor{}", label_name), Span::call_site());
|
||||
@ -213,8 +215,8 @@ pub fn parse(
|
||||
Some(&FieldType::FieldTypeU64) => {
|
||||
build_declare_visitor("e!{u64}, "e!{visit_u64}, &visitor_label)
|
||||
}
|
||||
Some(&FieldType::FieldTypeStruct { struct_name }) => {
|
||||
let struct_ident = Ident::new(&format!("{}", struct_name), Span::def_site());
|
||||
Some(&FieldType::FieldTypeStruct { ref struct_name }) => {
|
||||
let struct_ident = Ident::new(&format!("{}", struct_name), Span::call_site());
|
||||
Some(quote!{
|
||||
#[allow(non_snake_case, non_camel_case_types)]
|
||||
struct #visitor_label;
|
||||
@ -231,17 +233,17 @@ pub fn parse(
|
||||
})
|
||||
.filter(|x| x.is_some())
|
||||
.map(|x| x.unwrap())
|
||||
.fold(Tokens::new(), |mut sum, val| {
|
||||
.fold(TokenStream::empty(), |mut sum, val| {
|
||||
sum.append_all(val);
|
||||
sum
|
||||
});
|
||||
|
||||
let call_visitors: Tokens = data_struct
|
||||
let call_visitors: TokenStream = data_struct
|
||||
.fields
|
||||
.iter()
|
||||
.map(|field| {
|
||||
let field_attrs = YaSerdeAttribute::parse(&field.attrs);
|
||||
let label = field.ident;
|
||||
let label = &field.ident;
|
||||
|
||||
if field_attrs.attribute {
|
||||
return None;
|
||||
@ -250,7 +252,7 @@ pub fn parse(
|
||||
let label_name = if let Some(value) = field_attrs.rename {
|
||||
Ident::new(&format!("{}", value), Span::call_site()).to_string()
|
||||
} else {
|
||||
field.ident.unwrap().to_string()
|
||||
field.ident.clone().unwrap().to_string()
|
||||
};
|
||||
|
||||
let visitor_label = Ident::new(&format!("__Visitor{}", label_name), Span::call_site());
|
||||
@ -493,8 +495,8 @@ pub fn parse(
|
||||
&label_name,
|
||||
)
|
||||
}
|
||||
Some(&FieldType::FieldTypeStruct { struct_name }) => {
|
||||
let struct_ident = Ident::new(&format!("{}", struct_name), Span::def_site());
|
||||
Some(&FieldType::FieldTypeStruct { ref struct_name }) => {
|
||||
let struct_ident = Ident::new(&format!("{}", struct_name), Span::call_site());
|
||||
Some(quote!{
|
||||
#label_name => {
|
||||
reader.set_map_value();
|
||||
@ -518,12 +520,12 @@ pub fn parse(
|
||||
})
|
||||
.filter(|x| x.is_some())
|
||||
.map(|x| x.unwrap())
|
||||
.fold(Tokens::new(), |mut sum, val| {
|
||||
.fold(TokenStream::empty(), |mut sum, val| {
|
||||
sum.append_all(val);
|
||||
sum
|
||||
});
|
||||
|
||||
let attributes_loading: Tokens = data_struct
|
||||
let attributes_loading: TokenStream = data_struct
|
||||
.fields
|
||||
.iter()
|
||||
.map(|field| {
|
||||
@ -532,11 +534,11 @@ pub fn parse(
|
||||
return None;
|
||||
}
|
||||
|
||||
let label = field.ident;
|
||||
let label = &field.ident;
|
||||
let label_name = if let Some(value) = field_attrs.rename {
|
||||
Ident::new(&format!("{}", value), Span::call_site()).to_string()
|
||||
} else {
|
||||
field.ident.unwrap().to_string()
|
||||
field.ident.clone().unwrap().to_string()
|
||||
};
|
||||
|
||||
let visitor_label = Ident::new(&format!("__Visitor{}", label_name), Span::call_site());
|
||||
@ -600,16 +602,16 @@ pub fn parse(
|
||||
})
|
||||
.filter(|x| x.is_some())
|
||||
.map(|x| x.unwrap())
|
||||
.fold(Tokens::new(), |mut sum, val| {
|
||||
.fold(TokenStream::empty(), |mut sum, val| {
|
||||
sum.append_all(val);
|
||||
sum
|
||||
});
|
||||
|
||||
let set_text: Tokens = data_struct
|
||||
let set_text: TokenStream = data_struct
|
||||
.fields
|
||||
.iter()
|
||||
.map(|field| {
|
||||
let label = field.ident;
|
||||
let label = &field.ident;
|
||||
let field_attrs = YaSerdeAttribute::parse(&field.attrs);
|
||||
|
||||
match get_field_type(field) {
|
||||
@ -669,16 +671,16 @@ pub fn parse(
|
||||
})
|
||||
.filter(|x| x.is_some())
|
||||
.map(|x| x.unwrap())
|
||||
.fold(Tokens::new(), |mut tokens, token| {
|
||||
.fold(TokenStream::empty(), |mut tokens, token| {
|
||||
tokens.append_all(token);
|
||||
tokens
|
||||
});
|
||||
|
||||
let struct_builder: Tokens = data_struct
|
||||
let struct_builder: TokenStream = data_struct
|
||||
.fields
|
||||
.iter()
|
||||
.map(|field| {
|
||||
let label = field.ident;
|
||||
let label = &field.ident;
|
||||
|
||||
if get_field_type(field).is_some() {
|
||||
Some(quote!{
|
||||
@ -690,7 +692,7 @@ pub fn parse(
|
||||
})
|
||||
.filter(|x| x.is_some())
|
||||
.map(|x| x.unwrap())
|
||||
.fold(Tokens::new(), |mut tokens, token| {
|
||||
.fold(TokenStream::empty(), |mut tokens, token| {
|
||||
tokens.append_all(token);
|
||||
tokens
|
||||
});
|
||||
@ -755,9 +757,9 @@ pub fn parse(
|
||||
|
||||
fn build_default_value(
|
||||
label: &Option<Ident>,
|
||||
field_type: &Tokens,
|
||||
default: &Tokens,
|
||||
) -> Option<Tokens> {
|
||||
field_type: &TokenStream,
|
||||
default: &TokenStream,
|
||||
) -> Option<TokenStream> {
|
||||
Some(quote!{
|
||||
#[allow(unused_mut)]
|
||||
let mut #label : #field_type = #default;
|
||||
@ -765,10 +767,10 @@ fn build_default_value(
|
||||
}
|
||||
|
||||
fn build_declare_visitor(
|
||||
field_type: &Tokens,
|
||||
visitor: &Tokens,
|
||||
field_type: &TokenStream,
|
||||
visitor: &TokenStream,
|
||||
visitor_label: &Ident,
|
||||
) -> Option<Tokens> {
|
||||
) -> Option<TokenStream> {
|
||||
Some(quote!{
|
||||
#[allow(non_snake_case, non_camel_case_types)]
|
||||
struct #visitor_label;
|
||||
@ -783,13 +785,13 @@ fn build_declare_visitor(
|
||||
}
|
||||
|
||||
fn build_call_visitor(
|
||||
field_type: &Tokens,
|
||||
field_type: &TokenStream,
|
||||
visitor: &Ident,
|
||||
action: &Tokens,
|
||||
action: &TokenStream,
|
||||
visitor_label: &Ident,
|
||||
label: &Option<Ident>,
|
||||
label_name: &str,
|
||||
) -> Option<Tokens> {
|
||||
) -> Option<TokenStream> {
|
||||
Some(quote!{
|
||||
#label_name => {
|
||||
let visitor = #visitor_label{};
|
||||
@ -820,9 +822,9 @@ fn build_call_visitor(
|
||||
fn build_call_visitor_for_attribute(
|
||||
label: &Option<Ident>,
|
||||
label_name: &str,
|
||||
visitor: &Tokens,
|
||||
visitor: &TokenStream,
|
||||
visitor_label: &Ident,
|
||||
) -> Option<Tokens> {
|
||||
) -> Option<TokenStream> {
|
||||
Some(quote!{
|
||||
for attr in attributes {
|
||||
if attr.name.local_name == #label_name {
|
||||
@ -839,8 +841,8 @@ fn build_call_visitor_for_attribute(
|
||||
fn build_set_text_to_value(
|
||||
field_attrs: &YaSerdeAttribute,
|
||||
label: &Option<Ident>,
|
||||
action: &Tokens,
|
||||
) -> Option<Tokens> {
|
||||
action: &TokenStream,
|
||||
) -> Option<TokenStream> {
|
||||
if field_attrs.text {
|
||||
Some(quote!{
|
||||
#label = #action;
|
||||
|
||||
@ -2,12 +2,11 @@ pub mod expand_enum;
|
||||
pub mod expand_struct;
|
||||
|
||||
use attribute;
|
||||
use proc_macro2::Span;
|
||||
use quote;
|
||||
use proc_macro2::{TokenStream, Span};
|
||||
use syn;
|
||||
use syn::Ident;
|
||||
|
||||
pub fn expand_derive_deserialize(ast: &syn::DeriveInput) -> Result<quote::Tokens, String> {
|
||||
pub fn expand_derive_deserialize(ast: &syn::DeriveInput) -> Result<TokenStream, String> {
|
||||
let name = &ast.ident;
|
||||
let attrs = &ast.attrs;
|
||||
let data = &ast.data;
|
||||
@ -27,7 +26,7 @@ pub fn expand_derive_deserialize(ast: &syn::DeriveInput) -> Result<quote::Tokens
|
||||
|
||||
let dummy_const = Ident::new(
|
||||
&format!("_IMPL_YA_DESERIALIZE_FOR_{}", name),
|
||||
Span::def_site(),
|
||||
Span::call_site(),
|
||||
);
|
||||
|
||||
let generated = quote! {
|
||||
|
||||
@ -20,7 +20,7 @@ pub enum FieldType {
|
||||
|
||||
impl FieldType {
|
||||
fn from_ident(t: &syn::PathSegment) -> Option<FieldType> {
|
||||
match t.ident.as_ref() {
|
||||
match t.ident.to_string().as_str() {
|
||||
"String" => Some(FieldType::FieldTypeString),
|
||||
"bool" => Some(FieldType::FieldTypeBool),
|
||||
"i8" => Some(FieldType::FieldTypeI8),
|
||||
@ -42,7 +42,7 @@ impl FieldType {
|
||||
}
|
||||
}),
|
||||
_struct_name => Some(FieldType::FieldTypeStruct {
|
||||
struct_name: t.ident,
|
||||
struct_name: t.ident.clone(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
@ -64,7 +64,7 @@ fn get_vec_type(t: &syn::PathSegment) -> Option<syn::Ident> {
|
||||
if let syn::GenericArgument::Type(ref argument) = *tt {
|
||||
if let Path(ref path2) = *argument {
|
||||
if let Some(Pair::End(ttt)) = path2.path.segments.first() {
|
||||
return Some(ttt.ident);
|
||||
return Some(ttt.ident.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,28 +1,28 @@
|
||||
use attribute::*;
|
||||
use field_type::*;
|
||||
use quote::Tokens;
|
||||
use quote::TokenStreamExt;
|
||||
use std::collections::BTreeMap;
|
||||
use syn::Fields;
|
||||
use syn::Ident;
|
||||
use syn::DataEnum;
|
||||
use proc_macro2::Span;
|
||||
use proc_macro2::{TokenStream, Span};
|
||||
|
||||
pub fn serialize(
|
||||
data_enum: &DataEnum,
|
||||
name: &Ident,
|
||||
root: &str,
|
||||
namespaces: &BTreeMap<String, String>,
|
||||
) -> Tokens {
|
||||
let write_enum_content: Tokens = data_enum
|
||||
) -> TokenStream {
|
||||
let write_enum_content: TokenStream = data_enum
|
||||
.variants
|
||||
.iter()
|
||||
.map(|variant| {
|
||||
let variant_attrs = YaSerdeAttribute::parse(&variant.attrs);
|
||||
let renamed_label = match variant_attrs.rename {
|
||||
Some(value) => Ident::new(&format!("{}", value), Span::call_site()),
|
||||
None => variant.ident,
|
||||
None => variant.ident.clone(),
|
||||
};
|
||||
let label = variant.ident;
|
||||
let label = &variant.ident;
|
||||
let label_name = if let Some(prefix) = variant_attrs.prefix {
|
||||
prefix + ":" + renamed_label.to_string().as_ref()
|
||||
} else {
|
||||
@ -46,7 +46,7 @@ pub fn serialize(
|
||||
return None;
|
||||
}
|
||||
|
||||
let field_label = field.ident;
|
||||
let field_label = &field.ident;
|
||||
if field_attrs.text {
|
||||
return Some(quote!(
|
||||
let data_event = XmlEvent::characters(&self.#field_label);
|
||||
@ -56,7 +56,7 @@ pub fn serialize(
|
||||
|
||||
let renamed_field_label = match field_attrs.rename {
|
||||
Some(value) => Some(Ident::new(&format!("{}", value), Span::call_site())),
|
||||
None => field.ident,
|
||||
None => field.ident.clone(),
|
||||
};
|
||||
let field_label_name = renamed_field_label.unwrap().to_string();
|
||||
|
||||
@ -116,7 +116,7 @@ pub fn serialize(
|
||||
})
|
||||
.filter(|x| x.is_some())
|
||||
.map(|x| x.unwrap())
|
||||
.fold(Tokens::new(), |mut tokens, token| {
|
||||
.fold(TokenStream::empty(), |mut tokens, token| {
|
||||
tokens.append_all(token);
|
||||
tokens
|
||||
});
|
||||
@ -138,12 +138,12 @@ pub fn serialize(
|
||||
})
|
||||
.filter(|x| x.is_some())
|
||||
.map(|x| x.unwrap())
|
||||
.fold(Tokens::new(), |mut tokens, token| {
|
||||
.fold(TokenStream::empty(), |mut tokens, token| {
|
||||
tokens.append_all(token);
|
||||
tokens
|
||||
});
|
||||
|
||||
let add_namespaces: Tokens = namespaces
|
||||
let add_namespaces: TokenStream = namespaces
|
||||
.iter()
|
||||
.map(|(prefix, namespace)| {
|
||||
Some(quote!(
|
||||
@ -152,7 +152,7 @@ pub fn serialize(
|
||||
})
|
||||
.filter(|x| x.is_some())
|
||||
.map(|x| x.unwrap())
|
||||
.fold(Tokens::new(), |mut tokens, token| {
|
||||
.fold(TokenStream::empty(), |mut tokens, token| {
|
||||
tokens.append_all(token);
|
||||
tokens
|
||||
});
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
use attribute::*;
|
||||
use field_type::*;
|
||||
use quote::Tokens;
|
||||
use quote::TokenStreamExt;
|
||||
use std::collections::BTreeMap;
|
||||
use syn::Ident;
|
||||
use syn::DataStruct;
|
||||
use proc_macro2::Span;
|
||||
use proc_macro2::{TokenStream, Span};
|
||||
use std::string::ToString;
|
||||
|
||||
pub fn serialize(
|
||||
@ -12,8 +12,8 @@ pub fn serialize(
|
||||
name: &Ident,
|
||||
root: &str,
|
||||
namespaces: &BTreeMap<String, String>,
|
||||
) -> Tokens {
|
||||
let build_attributes: Tokens = data_struct
|
||||
) -> TokenStream {
|
||||
let build_attributes: TokenStream = data_struct
|
||||
.fields
|
||||
.iter()
|
||||
.map(|field| {
|
||||
@ -24,9 +24,9 @@ pub fn serialize(
|
||||
|
||||
let renamed_label = match field_attrs.rename {
|
||||
Some(value) => Some(Ident::new(&format!("{}", value), Span::call_site())),
|
||||
None => field.ident,
|
||||
None => field.ident.clone(),
|
||||
};
|
||||
let label = field.ident;
|
||||
let label = &field.ident;
|
||||
let label_name = if let Some(prefix) = field_attrs.prefix {
|
||||
prefix + ":" + renamed_label.unwrap().to_string().as_ref()
|
||||
} else {
|
||||
@ -75,12 +75,12 @@ pub fn serialize(
|
||||
})
|
||||
.filter(|x| x.is_some())
|
||||
.map(|x| x.unwrap())
|
||||
.fold(Tokens::new(), |mut tokens, token| {
|
||||
.fold(TokenStream::empty(), |mut tokens, token| {
|
||||
tokens.append_all(token);
|
||||
tokens
|
||||
});
|
||||
|
||||
let add_namespaces: Tokens = namespaces
|
||||
let add_namespaces: TokenStream = namespaces
|
||||
.iter()
|
||||
.map(|(prefix, namespace)| {
|
||||
Some(quote!(
|
||||
@ -89,12 +89,12 @@ pub fn serialize(
|
||||
})
|
||||
.filter(|x| x.is_some())
|
||||
.map(|x| x.unwrap())
|
||||
.fold(Tokens::new(), |mut tokens, token| {
|
||||
.fold(TokenStream::empty(), |mut tokens, token| {
|
||||
tokens.append_all(token);
|
||||
tokens
|
||||
});
|
||||
|
||||
let struct_inspector: Tokens = data_struct
|
||||
let struct_inspector: TokenStream = data_struct
|
||||
.fields
|
||||
.iter()
|
||||
.map(|field| {
|
||||
@ -103,7 +103,7 @@ pub fn serialize(
|
||||
return None;
|
||||
}
|
||||
|
||||
let label = field.ident;
|
||||
let label = &field.ident;
|
||||
if field_attrs.text {
|
||||
return Some(quote!(
|
||||
let data_event = XmlEvent::characters(&self.#label);
|
||||
@ -113,7 +113,7 @@ pub fn serialize(
|
||||
|
||||
let renamed_label = match field_attrs.rename {
|
||||
Some(value) => Some(Ident::new(&format!("{}", value), Span::call_site())),
|
||||
None => field.ident,
|
||||
None => field.ident.clone(),
|
||||
};
|
||||
|
||||
let label_name = if let Some(prefix) = field_attrs.prefix {
|
||||
@ -220,7 +220,7 @@ pub fn serialize(
|
||||
})
|
||||
.filter(|x| x.is_some())
|
||||
.map(|x| x.unwrap())
|
||||
.fold(Tokens::new(), |mut tokens, token| {
|
||||
.fold(TokenStream::empty(), |mut tokens, token| {
|
||||
tokens.append_all(token);
|
||||
tokens
|
||||
});
|
||||
|
||||
@ -2,12 +2,11 @@ pub mod expand_enum;
|
||||
pub mod expand_struct;
|
||||
|
||||
use attribute;
|
||||
use proc_macro2::Span;
|
||||
use quote;
|
||||
use proc_macro2::{TokenStream, Span};
|
||||
use syn;
|
||||
use syn::Ident;
|
||||
|
||||
pub fn expand_derive_serialize(ast: &syn::DeriveInput) -> Result<quote::Tokens, String> {
|
||||
pub fn expand_derive_serialize(ast: &syn::DeriveInput) -> Result<TokenStream, String> {
|
||||
let name = &ast.ident;
|
||||
let attrs = &ast.attrs;
|
||||
let data = &ast.data;
|
||||
@ -33,7 +32,7 @@ pub fn expand_derive_serialize(ast: &syn::DeriveInput) -> Result<quote::Tokens,
|
||||
|
||||
let dummy_const = Ident::new(
|
||||
&format!("_IMPL_YA_SERIALIZE_FOR_{}", name),
|
||||
Span::def_site(),
|
||||
Span::call_site(),
|
||||
);
|
||||
|
||||
let generated = quote! {
|
||||
|
||||
Loading…
Reference in New Issue
Block a user