David Tolnay | 5553501 | 2018-01-05 16:39:23 -0800 | [diff] [blame] | 1 | // Copyright 2018 Syn Developers |
| 2 | // |
| 3 | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or |
| 4 | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license |
| 5 | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your |
| 6 | // option. This file may not be copied, modified, or distributed |
| 7 | // except according to those terms. |
| 8 | |
David Tolnay | 61037c6 | 2018-01-05 16:21:03 -0800 | [diff] [blame] | 9 | use proc_macro2::{Delimiter, TokenNode, TokenStream, TokenTree}; |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 10 | use cursor::Cursor; |
David Tolnay | 203557a | 2017-12-27 23:59:33 -0500 | [diff] [blame] | 11 | use parse_error; |
| 12 | use synom::PResult; |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 13 | use MacroDelimiter; |
David Tolnay | 61037c6 | 2018-01-05 16:21:03 -0800 | [diff] [blame] | 14 | use token::{Brace, Bracket, Paren}; |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 15 | |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 16 | #[cfg(feature = "extra-traits")] |
| 17 | use std::hash::{Hash, Hasher}; |
| 18 | |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 19 | pub fn delimited(input: Cursor) -> PResult<(MacroDelimiter, TokenStream)> { |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 20 | match input.token_tree() { |
| 21 | Some(( |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 22 | TokenTree { |
| 23 | span, |
| 24 | kind: TokenNode::Group(delimiter, tts), |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 25 | }, |
David Tolnay | 6572948 | 2017-12-31 16:14:50 -0500 | [diff] [blame] | 26 | rest, |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 27 | )) => { |
| 28 | let delimiter = match delimiter { |
| 29 | Delimiter::Parenthesis => MacroDelimiter::Paren(Paren(span)), |
| 30 | Delimiter::Brace => MacroDelimiter::Brace(Brace(span)), |
| 31 | Delimiter::Bracket => MacroDelimiter::Bracket(Bracket(span)), |
| 32 | Delimiter::None => return parse_error(), |
| 33 | }; |
David Tolnay | f4aa6b4 | 2017-12-31 16:40:33 -0500 | [diff] [blame] | 34 | Ok(((delimiter, tts), rest)) |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 35 | } |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 36 | _ => parse_error(), |
| 37 | } |
| 38 | } |
| 39 | |
| 40 | #[cfg(feature = "full")] |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 41 | pub fn braced(input: Cursor) -> PResult<(Brace, TokenStream)> { |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 42 | match input.token_tree() { |
| 43 | Some(( |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 44 | TokenTree { |
| 45 | span, |
| 46 | kind: TokenNode::Group(Delimiter::Brace, tts), |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 47 | }, |
David Tolnay | 6572948 | 2017-12-31 16:14:50 -0500 | [diff] [blame] | 48 | rest, |
David Tolnay | f4aa6b4 | 2017-12-31 16:40:33 -0500 | [diff] [blame] | 49 | )) => Ok(((Brace(span), tts), rest)), |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 50 | _ => parse_error(), |
| 51 | } |
| 52 | } |
| 53 | |
| 54 | #[cfg(feature = "full")] |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 55 | pub fn parenthesized(input: Cursor) -> PResult<(Paren, TokenStream)> { |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 56 | match input.token_tree() { |
| 57 | Some(( |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 58 | TokenTree { |
| 59 | span, |
| 60 | kind: TokenNode::Group(Delimiter::Parenthesis, tts), |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 61 | }, |
David Tolnay | 6572948 | 2017-12-31 16:14:50 -0500 | [diff] [blame] | 62 | rest, |
David Tolnay | f4aa6b4 | 2017-12-31 16:40:33 -0500 | [diff] [blame] | 63 | )) => Ok(((Paren(span), tts), rest)), |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 64 | _ => parse_error(), |
| 65 | } |
| 66 | } |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 67 | |
| 68 | #[cfg(feature = "extra-traits")] |
| 69 | pub struct TokenTreeHelper<'a>(pub &'a TokenTree); |
| 70 | |
| 71 | #[cfg(feature = "extra-traits")] |
| 72 | impl<'a> PartialEq for TokenTreeHelper<'a> { |
| 73 | fn eq(&self, other: &Self) -> bool { |
| 74 | use proc_macro2::Spacing; |
| 75 | |
| 76 | match (&self.0.kind, &other.0.kind) { |
| 77 | (&TokenNode::Group(d1, ref s1), &TokenNode::Group(d2, ref s2)) => { |
| 78 | match (d1, d2) { |
| 79 | (Delimiter::Parenthesis, Delimiter::Parenthesis) |
| 80 | | (Delimiter::Brace, Delimiter::Brace) |
| 81 | | (Delimiter::Bracket, Delimiter::Bracket) |
| 82 | | (Delimiter::None, Delimiter::None) => {} |
| 83 | _ => return false, |
| 84 | } |
| 85 | |
| 86 | let s1 = s1.clone().into_iter(); |
| 87 | let mut s2 = s2.clone().into_iter(); |
| 88 | |
| 89 | for item1 in s1 { |
| 90 | let item2 = match s2.next() { |
| 91 | Some(item) => item, |
| 92 | None => return false, |
| 93 | }; |
| 94 | if TokenTreeHelper(&item1) != TokenTreeHelper(&item2) { |
| 95 | return false; |
| 96 | } |
| 97 | } |
| 98 | s2.next().is_none() |
| 99 | } |
| 100 | (&TokenNode::Op(o1, k1), &TokenNode::Op(o2, k2)) => { |
| 101 | o1 == o2 && match (k1, k2) { |
| 102 | (Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true, |
| 103 | _ => false, |
| 104 | } |
| 105 | } |
| 106 | (&TokenNode::Literal(ref l1), &TokenNode::Literal(ref l2)) => { |
| 107 | l1.to_string() == l2.to_string() |
| 108 | } |
| 109 | (&TokenNode::Term(ref s1), &TokenNode::Term(ref s2)) => s1.as_str() == s2.as_str(), |
| 110 | _ => false, |
| 111 | } |
| 112 | } |
| 113 | } |
| 114 | |
| 115 | #[cfg(feature = "extra-traits")] |
| 116 | impl<'a> Hash for TokenTreeHelper<'a> { |
| 117 | fn hash<H: Hasher>(&self, h: &mut H) { |
| 118 | use proc_macro2::Spacing; |
| 119 | |
| 120 | match self.0.kind { |
| 121 | TokenNode::Group(delim, ref stream) => { |
| 122 | 0u8.hash(h); |
| 123 | match delim { |
| 124 | Delimiter::Parenthesis => 0u8.hash(h), |
| 125 | Delimiter::Brace => 1u8.hash(h), |
| 126 | Delimiter::Bracket => 2u8.hash(h), |
| 127 | Delimiter::None => 3u8.hash(h), |
| 128 | } |
| 129 | |
| 130 | for item in stream.clone() { |
| 131 | TokenTreeHelper(&item).hash(h); |
| 132 | } |
| 133 | 0xffu8.hash(h); // terminator w/ a variant we don't normally hash |
| 134 | } |
| 135 | TokenNode::Op(op, kind) => { |
| 136 | 1u8.hash(h); |
| 137 | op.hash(h); |
| 138 | match kind { |
| 139 | Spacing::Alone => 0u8.hash(h), |
| 140 | Spacing::Joint => 1u8.hash(h), |
| 141 | } |
| 142 | } |
| 143 | TokenNode::Literal(ref lit) => (2u8, lit.to_string()).hash(h), |
| 144 | TokenNode::Term(ref word) => (3u8, word.as_str()).hash(h), |
| 145 | } |
| 146 | } |
| 147 | } |
| 148 | |
| 149 | #[cfg(feature = "extra-traits")] |
| 150 | pub struct TokenStreamHelper<'a>(pub &'a TokenStream); |
| 151 | |
| 152 | #[cfg(feature = "extra-traits")] |
| 153 | impl<'a> PartialEq for TokenStreamHelper<'a> { |
| 154 | fn eq(&self, other: &Self) -> bool { |
| 155 | let left = self.0.clone().into_iter().collect::<Vec<_>>(); |
| 156 | let right = other.0.clone().into_iter().collect::<Vec<_>>(); |
| 157 | if left.len() != right.len() { |
| 158 | return false; |
| 159 | } |
| 160 | for (a, b) in left.into_iter().zip(right) { |
| 161 | if TokenTreeHelper(&a) != TokenTreeHelper(&b) { |
| 162 | return false; |
| 163 | } |
| 164 | } |
| 165 | true |
| 166 | } |
| 167 | } |
| 168 | |
| 169 | #[cfg(feature = "extra-traits")] |
| 170 | impl<'a> Hash for TokenStreamHelper<'a> { |
| 171 | fn hash<H: Hasher>(&self, state: &mut H) { |
| 172 | let tts = self.0.clone().into_iter().collect::<Vec<_>>(); |
| 173 | tts.len().hash(state); |
| 174 | for tt in tts { |
| 175 | TokenTreeHelper(&tt).hash(state); |
| 176 | } |
| 177 | } |
| 178 | } |