David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 1 | use proc_macro2::{TokenNode, TokenStream, TokenTree, Delimiter}; |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 2 | use cursor::Cursor; |
David Tolnay | 203557a | 2017-12-27 23:59:33 -0500 | [diff] [blame] | 3 | use parse_error; |
| 4 | use synom::PResult; |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 5 | use MacroDelimiter; |
| 6 | use token::{Paren, Brace, Bracket}; |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 7 | |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame^] | 8 | #[cfg(feature = "extra-traits")] |
| 9 | use std::hash::{Hash, Hasher}; |
| 10 | |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 11 | pub fn delimited(input: Cursor) -> PResult<(MacroDelimiter, TokenStream)> { |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 12 | match input.token_tree() { |
| 13 | Some(( |
| 14 | rest, |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 15 | TokenTree { |
| 16 | span, |
| 17 | kind: TokenNode::Group(delimiter, tts), |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 18 | }, |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 19 | )) => { |
| 20 | let delimiter = match delimiter { |
| 21 | Delimiter::Parenthesis => MacroDelimiter::Paren(Paren(span)), |
| 22 | Delimiter::Brace => MacroDelimiter::Brace(Brace(span)), |
| 23 | Delimiter::Bracket => MacroDelimiter::Bracket(Bracket(span)), |
| 24 | Delimiter::None => return parse_error(), |
| 25 | }; |
| 26 | Ok((rest, (delimiter, tts))) |
| 27 | } |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 28 | _ => parse_error(), |
| 29 | } |
| 30 | } |
| 31 | |
| 32 | #[cfg(feature = "full")] |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 33 | pub fn braced(input: Cursor) -> PResult<(Brace, TokenStream)> { |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 34 | match input.token_tree() { |
| 35 | Some(( |
| 36 | rest, |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 37 | TokenTree { |
| 38 | span, |
| 39 | kind: TokenNode::Group(Delimiter::Brace, tts), |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 40 | }, |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 41 | )) => Ok((rest, (Brace(span), tts))), |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 42 | _ => parse_error(), |
| 43 | } |
| 44 | } |
| 45 | |
| 46 | #[cfg(feature = "full")] |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 47 | pub fn parenthesized(input: Cursor) -> PResult<(Paren, TokenStream)> { |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 48 | match input.token_tree() { |
| 49 | Some(( |
| 50 | rest, |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 51 | TokenTree { |
| 52 | span, |
| 53 | kind: TokenNode::Group(Delimiter::Parenthesis, tts), |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 54 | }, |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 55 | )) => Ok((rest, (Paren(span), tts))), |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 56 | _ => parse_error(), |
| 57 | } |
| 58 | } |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame^] | 59 | |
| 60 | #[cfg(feature = "extra-traits")] |
| 61 | pub struct TokenTreeHelper<'a>(pub &'a TokenTree); |
| 62 | |
| 63 | #[cfg(feature = "extra-traits")] |
| 64 | impl<'a> PartialEq for TokenTreeHelper<'a> { |
| 65 | fn eq(&self, other: &Self) -> bool { |
| 66 | use proc_macro2::Spacing; |
| 67 | |
| 68 | match (&self.0.kind, &other.0.kind) { |
| 69 | (&TokenNode::Group(d1, ref s1), &TokenNode::Group(d2, ref s2)) => { |
| 70 | match (d1, d2) { |
| 71 | (Delimiter::Parenthesis, Delimiter::Parenthesis) |
| 72 | | (Delimiter::Brace, Delimiter::Brace) |
| 73 | | (Delimiter::Bracket, Delimiter::Bracket) |
| 74 | | (Delimiter::None, Delimiter::None) => {} |
| 75 | _ => return false, |
| 76 | } |
| 77 | |
| 78 | let s1 = s1.clone().into_iter(); |
| 79 | let mut s2 = s2.clone().into_iter(); |
| 80 | |
| 81 | for item1 in s1 { |
| 82 | let item2 = match s2.next() { |
| 83 | Some(item) => item, |
| 84 | None => return false, |
| 85 | }; |
| 86 | if TokenTreeHelper(&item1) != TokenTreeHelper(&item2) { |
| 87 | return false; |
| 88 | } |
| 89 | } |
| 90 | s2.next().is_none() |
| 91 | } |
| 92 | (&TokenNode::Op(o1, k1), &TokenNode::Op(o2, k2)) => { |
| 93 | o1 == o2 && match (k1, k2) { |
| 94 | (Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true, |
| 95 | _ => false, |
| 96 | } |
| 97 | } |
| 98 | (&TokenNode::Literal(ref l1), &TokenNode::Literal(ref l2)) => { |
| 99 | l1.to_string() == l2.to_string() |
| 100 | } |
| 101 | (&TokenNode::Term(ref s1), &TokenNode::Term(ref s2)) => s1.as_str() == s2.as_str(), |
| 102 | _ => false, |
| 103 | } |
| 104 | } |
| 105 | } |
| 106 | |
| 107 | #[cfg(feature = "extra-traits")] |
| 108 | impl<'a> Hash for TokenTreeHelper<'a> { |
| 109 | fn hash<H: Hasher>(&self, h: &mut H) { |
| 110 | use proc_macro2::Spacing; |
| 111 | |
| 112 | match self.0.kind { |
| 113 | TokenNode::Group(delim, ref stream) => { |
| 114 | 0u8.hash(h); |
| 115 | match delim { |
| 116 | Delimiter::Parenthesis => 0u8.hash(h), |
| 117 | Delimiter::Brace => 1u8.hash(h), |
| 118 | Delimiter::Bracket => 2u8.hash(h), |
| 119 | Delimiter::None => 3u8.hash(h), |
| 120 | } |
| 121 | |
| 122 | for item in stream.clone() { |
| 123 | TokenTreeHelper(&item).hash(h); |
| 124 | } |
| 125 | 0xffu8.hash(h); // terminator w/ a variant we don't normally hash |
| 126 | } |
| 127 | TokenNode::Op(op, kind) => { |
| 128 | 1u8.hash(h); |
| 129 | op.hash(h); |
| 130 | match kind { |
| 131 | Spacing::Alone => 0u8.hash(h), |
| 132 | Spacing::Joint => 1u8.hash(h), |
| 133 | } |
| 134 | } |
| 135 | TokenNode::Literal(ref lit) => (2u8, lit.to_string()).hash(h), |
| 136 | TokenNode::Term(ref word) => (3u8, word.as_str()).hash(h), |
| 137 | } |
| 138 | } |
| 139 | } |
| 140 | |
| 141 | #[cfg(feature = "extra-traits")] |
| 142 | pub struct TokenStreamHelper<'a>(pub &'a TokenStream); |
| 143 | |
| 144 | #[cfg(feature = "extra-traits")] |
| 145 | impl<'a> PartialEq for TokenStreamHelper<'a> { |
| 146 | fn eq(&self, other: &Self) -> bool { |
| 147 | let left = self.0.clone().into_iter().collect::<Vec<_>>(); |
| 148 | let right = other.0.clone().into_iter().collect::<Vec<_>>(); |
| 149 | if left.len() != right.len() { |
| 150 | return false; |
| 151 | } |
| 152 | for (a, b) in left.into_iter().zip(right) { |
| 153 | if TokenTreeHelper(&a) != TokenTreeHelper(&b) { |
| 154 | return false; |
| 155 | } |
| 156 | } |
| 157 | true |
| 158 | } |
| 159 | } |
| 160 | |
| 161 | #[cfg(feature = "extra-traits")] |
| 162 | impl<'a> Hash for TokenStreamHelper<'a> { |
| 163 | fn hash<H: Hasher>(&self, state: &mut H) { |
| 164 | let tts = self.0.clone().into_iter().collect::<Vec<_>>(); |
| 165 | tts.len().hash(state); |
| 166 | for tt in tts { |
| 167 | TokenTreeHelper(&tt).hash(state); |
| 168 | } |
| 169 | } |
| 170 | } |