Clean up Vec<TokenTree> in tests
diff --git a/tests/test_derive_input.rs b/tests/test_derive_input.rs
index b88c28a..16f195b 100644
--- a/tests/test_derive_input.rs
+++ b/tests/test_derive_input.rs
@@ -4,9 +4,11 @@
extern crate proc_macro2;
use syn::*;
-use proc_macro2::{TokenNode, TokenTree, Spacing, Delimiter, Literal, Term};
+use proc_macro2::{TokenNode, TokenTree, TokenStream, Spacing, Delimiter, Literal, Term};
use proc_macro2::Delimiter::{Parenthesis, Brace};
+use std::iter::FromIterator;
+
fn op(c: char) -> TokenTree {
proc_macro2::TokenTree {
span: Default::default(),
@@ -74,13 +76,13 @@
pound_token: Default::default(),
style: AttrStyle::Outer,
path: "derive".into(),
- tts: vec![
+ tts: TokenStream::from_iter(vec![
delimited(Parenthesis, vec![
word("Debug"),
op(','),
word("Clone"),
]),
- ],
+ ]),
is_sugared_doc: false,
}],
generics: Generics::default(),
@@ -180,10 +182,10 @@
pound_token: Default::default(),
style: AttrStyle::Outer,
path: "doc".into(),
- tts: vec![
+ tts: TokenStream::from_iter(vec![
op('='),
lit(Literal::doccomment("/// See the std::result module documentation for details.")),
- ],
+ ]),
is_sugared_doc: true,
},
Attribute {
@@ -191,7 +193,7 @@
pound_token: Default::default(),
style: AttrStyle::Outer,
path: "must_use".into(),
- tts: vec![],
+ tts: TokenStream::empty(),
is_sugared_doc: false,
},
],
@@ -351,7 +353,7 @@
PathSegment::from("identity"),
].into(),
},
- tts: vec![
+ tts: TokenStream::from_iter(vec![
word("fn"),
word("main"),
delimited(Parenthesis, vec![]),
@@ -366,7 +368,7 @@
]),
op(';'),
]),
- ],
+ ]),
is_sugared_doc: false,
}],
generics: Generics::default(),
@@ -404,11 +406,11 @@
PathSegment::from("inert"),
].into(),
},
- tts: vec![
+ tts: TokenStream::from_iter(vec![
op('<'),
word("T"),
op('>'),
- ],
+ ]),
is_sugared_doc: false,
}],
generics: Generics::default(),
@@ -447,7 +449,7 @@
PathSegment::from("self"),
].into(),
},
- tts: vec![],
+ tts: TokenStream::empty(),
is_sugared_doc: false,
}],
generics: Generics::default(),
diff --git a/tests/test_generics.rs b/tests/test_generics.rs
index 632fb86..c30751b 100644
--- a/tests/test_generics.rs
+++ b/tests/test_generics.rs
@@ -9,7 +9,7 @@
extern crate quote;
extern crate proc_macro2;
-use proc_macro2::{Span, Term};
+use proc_macro2::{Span, Term, TokenStream};
mod common;
@@ -38,7 +38,7 @@
pound_token: Default::default(),
style: AttrStyle::Outer,
path: "may_dangle".into(),
- tts: vec![],
+ tts: TokenStream::empty(),
is_sugared_doc: false,
}],
ident: "T".into(),
diff --git a/tests/test_token_trees.rs b/tests/test_token_trees.rs
index cc55f2b..6d8b217 100644
--- a/tests/test_token_trees.rs
+++ b/tests/test_token_trees.rs
@@ -5,7 +5,7 @@
extern crate syn;
extern crate proc_macro2;
-use syn::{Lit, Macro};
+use syn::{Lit, Attribute, AttrStyle};
use proc_macro2::{TokenNode, TokenTree, Spacing, Delimiter, TokenStream, Term};
use proc_macro2::Delimiter::*;
@@ -78,21 +78,21 @@
],
)];
- fn wrap(tts: Vec<TokenTree>) -> Macro {
- Macro {
- path: "tts".into(),
- bang_token: Default::default(),
- tokens: tts,
+ fn wrap(tts: TokenStream) -> Attribute {
+ Attribute {
+ style: AttrStyle::Outer,
+ pound_token: Default::default(),
+ bracket_token: Default::default(),
+ path: "test".into(),
+ tts: tts,
+ is_sugared_doc: false,
}
}
- let result = raw.parse::<TokenStream>().unwrap()
- .into_iter()
- .collect();
- let result = wrap(result);
- let expected = wrap(expected);
+ let result = wrap(raw.parse().unwrap());
+ let expected = wrap(expected.into_iter().collect());
if result != expected {
- panic!("{:#?}\n!=\n{:#?}", result.tokens, expected.tokens);
+ panic!("{:#?}\n!=\n{:#?}", result.tts, expected.tts);
}
}