Replace all use of Vec<TokenTree>
diff --git a/src/attr.rs b/src/attr.rs
index 4171553..875f452 100644
--- a/src/attr.rs
+++ b/src/attr.rs
@@ -3,12 +3,12 @@
 
 use std::iter;
 
-use proc_macro2::{Delimiter, TokenNode, TokenTree, Spacing};
+use proc_macro2::{Delimiter, TokenNode, TokenTree, TokenStream, Spacing};
 
 #[cfg(feature = "extra-traits")]
 use std::hash::{Hash, Hasher};
 #[cfg(feature = "extra-traits")]
-use mac::SliceTokenTreeHelper;
+use mac::TokenStreamHelper;
 
 ast_struct! {
     /// Doc-comments are promoted to attributes that have `is_sugared_doc` = true
@@ -27,7 +27,7 @@
         ///
         /// E.g. `( Copy )` in `#[derive(Copy)]`
         /// E.g. `x < 5` in `#[crate::precondition x < 5]`
-        pub tts: Vec<TokenTree>,
+        pub tts: TokenStream,
 
         pub is_sugared_doc: bool,
     }
@@ -43,7 +43,7 @@
             && self.pound_token == other.pound_token
             && self.bracket_token == other.bracket_token
             && self.path == other.path
-            && SliceTokenTreeHelper(&self.tts) == SliceTokenTreeHelper(&other.tts)
+            && TokenStreamHelper(&self.tts) == TokenStreamHelper(&other.tts)
             && self.is_sugared_doc == other.is_sugared_doc
     }
 }
@@ -57,7 +57,7 @@
         self.pound_token.hash(state);
         self.bracket_token.hash(state);
         self.path.hash(state);
-        SliceTokenTreeHelper(&self.tts).hash(state);
+        TokenStreamHelper(&self.tts).hash(state);
         self.is_sugared_doc.hash(state);
     }
 }
@@ -75,12 +75,14 @@
             return Some(MetaItem::Term(*name));
         }
 
-        if self.tts.len() == 1 {
-            if let TokenNode::Group(Delimiter::Parenthesis, ref ts) = self.tts[0].kind {
+        let tts = self.tts.clone().into_iter().collect::<Vec<_>>();
+
+        if tts.len() == 1 {
+            if let TokenNode::Group(Delimiter::Parenthesis, ref ts) = tts[0].kind {
                 let tokens = ts.clone().into_iter().collect::<Vec<_>>();
                 if let Some(nested_meta_items) = list_of_nested_meta_items_from_tokens(&tokens) {
                     return Some(MetaItem::List(MetaItemList {
-                        paren_token: token::Paren(self.tts[0].span),
+                        paren_token: token::Paren(tts[0].span),
                         ident: *name,
                         nested: nested_meta_items,
                     }));
@@ -88,15 +90,15 @@
             }
         }
 
-        if self.tts.len() == 2 {
-            if let TokenNode::Op('=', Spacing::Alone) = self.tts[0].kind {
-                if let TokenNode::Literal(ref lit) = self.tts[1].kind {
+        if tts.len() == 2 {
+            if let TokenNode::Op('=', Spacing::Alone) = tts[0].kind {
+                if let TokenNode::Literal(ref lit) = tts[1].kind {
                     return Some(MetaItem::NameValue(MetaNameValue {
                         ident: *name,
-                        eq_token: Token![=]([self.tts[0].span]),
+                        eq_token: Token![=]([tts[0].span]),
                         lit: Lit {
                             value: LitKind::Other(lit.clone()),
-                            span: self.tts[1].span,
+                            span: tts[1].span,
                         },
                     }));
                 }
@@ -345,7 +347,7 @@
                 bang: punct!(!) >>
                 path_and_tts: brackets!(tuple!(
                     call!(::Path::parse_mod_style),
-                    call!(mac::parsing::parse_tt_list)
+                    syn!(TokenStream)
                 )) >>
                 ({
                     let ((path, tts), bracket) = path_and_tts;
@@ -369,7 +371,7 @@
                     tts: vec![
                         eq(),
                         lit,
-                    ],
+                    ].into_iter().collect(),
                     is_sugared_doc: true,
                     pound_token: <Token![#]>::default(),
                     bracket_token: token::Bracket::default(),
@@ -382,7 +384,7 @@
                 pound: punct!(#) >>
                 path_and_tts: brackets!(tuple!(
                     call!(::Path::parse_mod_style),
-                    call!(mac::parsing::parse_tt_list)
+                    syn!(TokenStream)
                 )) >>
                 ({
                     let ((path, tts), bracket) = path_and_tts;
@@ -406,7 +408,7 @@
                     tts: vec![
                         eq(),
                         lit,
-                    ],
+                    ].into_iter().collect(),
                     is_sugared_doc: true,
                     pound_token: <Token![#]>::default(),
                     bracket_token: token::Bracket::default(),
@@ -459,7 +461,7 @@
             }
             self.bracket_token.surround(tokens, |tokens| {
                 self.path.to_tokens(tokens);
-                tokens.append_all(&self.tts);
+                tokens.append_all(&self.tts.clone().into_iter().collect::<Vec<_>>());
             });
         }
     }
diff --git a/src/expr.rs b/src/expr.rs
index ba1fc02..b244de2 100644
--- a/src/expr.rs
+++ b/src/expr.rs
@@ -1847,10 +1847,10 @@
             Macro {
                 path: what,
                 bang_token: bang,
-                tokens: vec![proc_macro2::TokenTree {
+                tokens: proc_macro2::TokenTree {
                     span: (data.1).0,
                     kind: TokenNode::Group(Delimiter::Brace, data.0),
-                }],
+                },
             },
             match semi {
                 Some(semi) => MacStmtStyle::Semicolon(semi),
diff --git a/src/item.rs b/src/item.rs
index 9549823..17fbe6c 100644
--- a/src/item.rs
+++ b/src/item.rs
@@ -527,7 +527,7 @@
             mac: Macro {
                 path: what,
                 bang_token: bang,
-                tokens: vec![body],
+                tokens: body,
             },
         })
     ));
@@ -1565,7 +1565,7 @@
             self.mac.path.to_tokens(tokens);
             self.mac.bang_token.to_tokens(tokens);
             self.ident.to_tokens(tokens);
-            tokens.append_all(&self.mac.tokens);
+            self.mac.tokens.to_tokens(tokens);
             if !self.mac.is_braced() {
                 <Token![;]>::default().to_tokens(tokens);
             }
diff --git a/src/mac.rs b/src/mac.rs
index b2a6a19..12c4e0d 100644
--- a/src/mac.rs
+++ b/src/mac.rs
@@ -1,6 +1,6 @@
 use super::*;
 
-use proc_macro2::{TokenNode, TokenTree, Delimiter};
+use proc_macro2::{TokenNode, TokenTree, TokenStream, Delimiter};
 
 #[cfg(feature = "extra-traits")]
 use std::hash::{Hash, Hasher};
@@ -12,7 +12,7 @@
     pub struct Macro #manual_extra_traits {
         pub path: Path,
         pub bang_token: Token![!],
-        pub tokens: Vec<TokenTree>,
+        pub tokens: TokenTree,
     }
 }
 
@@ -24,7 +24,7 @@
     fn eq(&self, other: &Self) -> bool {
         self.path == other.path
             && self.bang_token == other.bang_token
-            && SliceTokenTreeHelper(&self.tokens) == SliceTokenTreeHelper(&other.tokens)
+            && TokenTreeHelper(&self.tokens) == TokenTreeHelper(&other.tokens)
     }
 }
 
@@ -35,16 +35,13 @@
     {
         self.path.hash(state);
         self.bang_token.hash(state);
-        SliceTokenTreeHelper(&self.tokens).hash(state);
+        TokenTreeHelper(&self.tokens).hash(state);
     }
 }
 
 impl Macro {
     pub fn is_braced(&self) -> bool {
-        match self.tokens.last() {
-            Some(t) => is_braced(t),
-            None => false,
-        }
+        is_braced(&self.tokens)
     }
 }
 
@@ -140,16 +137,18 @@
 }
 
 #[cfg(feature = "extra-traits")]
-pub struct SliceTokenTreeHelper<'a>(pub &'a [TokenTree]);
+pub struct TokenStreamHelper<'a>(pub &'a TokenStream);
 
 #[cfg(feature = "extra-traits")]
-impl<'a> PartialEq for SliceTokenTreeHelper<'a> {
+impl<'a> PartialEq for TokenStreamHelper<'a> {
     fn eq(&self, other: &Self) -> bool {
-        if self.0.len() != other.0.len() {
+        let left = self.0.clone().into_iter().collect::<Vec<_>>();
+        let right = other.0.clone().into_iter().collect::<Vec<_>>();
+        if left.len() != right.len() {
             return false;
         }
-        for (a, b) in self.0.iter().zip(other.0) {
-            if TokenTreeHelper(a) != TokenTreeHelper(b) {
+        for (a, b) in left.into_iter().zip(right) {
+            if TokenTreeHelper(&a) != TokenTreeHelper(&b) {
                 return false;
             }
         }
@@ -158,11 +157,12 @@
 }
 
 #[cfg(feature = "extra-traits")]
-impl<'a> Hash for SliceTokenTreeHelper<'a> {
+impl<'a> Hash for TokenStreamHelper<'a> {
     fn hash<H: Hasher>(&self, state: &mut H) {
-        self.0.len().hash(state);
-        for tt in self.0 {
-            TokenTreeHelper(tt).hash(state);
+        let tts = self.0.clone().into_iter().collect::<Vec<_>>();
+        tts.len().hash(state);
+        for tt in tts {
+            TokenTreeHelper(&tt).hash(state);
         }
     }
 }
@@ -184,15 +184,11 @@
             (Macro {
                 path: what,
                 bang_token: bang,
-                tokens: vec![body],
+                tokens: body,
             })
         ));
     }
 
-    pub fn parse_tt_list(input: Cursor) -> PResult<Vec<TokenTree>> {
-        Ok((Cursor::empty(), input.token_stream().into_iter().collect()))
-    }
-
     pub fn parse_tt_delimited(input: Cursor) -> PResult<TokenTree> {
         match input.token_tree() {
             Some((rest, token @ TokenTree { kind: TokenNode::Group(..), .. })) => {
@@ -212,7 +208,7 @@
         fn to_tokens(&self, tokens: &mut Tokens) {
             self.path.to_tokens(tokens);
             self.bang_token.to_tokens(tokens);
-            tokens.append_all(&self.tokens);
+            self.tokens.to_tokens(tokens);
         }
     }
 }