David Tolnay | 5553501 | 2018-01-05 16:39:23 -0800 | [diff] [blame] | 1 | // Copyright 2018 Syn Developers |
| 2 | // |
| 3 | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or |
| 4 | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license |
| 5 | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your |
| 6 | // option. This file may not be copied, modified, or distributed |
| 7 | // except according to those terms. |
| 8 | |
David Tolnay | cc54371 | 2018-01-08 11:29:54 -0800 | [diff] [blame] | 9 | #[cfg(feature = "parsing")] |
David Tolnay | dfc886b | 2018-01-06 08:03:09 -0800 | [diff] [blame] | 10 | use buffer::Cursor; |
David Tolnay | cc54371 | 2018-01-08 11:29:54 -0800 | [diff] [blame] | 11 | #[cfg(feature = "parsing")] |
David Tolnay | 203557a | 2017-12-27 23:59:33 -0500 | [diff] [blame] | 12 | use synom::PResult; |
David Tolnay | cc54371 | 2018-01-08 11:29:54 -0800 | [diff] [blame] | 13 | #[cfg(feature = "parsing")] |
David Tolnay | 61037c6 | 2018-01-05 16:21:03 -0800 | [diff] [blame] | 14 | use token::{Brace, Bracket, Paren}; |
David Tolnay | cc54371 | 2018-01-08 11:29:54 -0800 | [diff] [blame] | 15 | #[cfg(feature = "parsing")] |
| 16 | use {parse_error, MacroDelimiter}; |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 17 | |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 18 | #[cfg(feature = "extra-traits")] |
| 19 | use std::hash::{Hash, Hasher}; |
| 20 | |
David Tolnay | cc54371 | 2018-01-08 11:29:54 -0800 | [diff] [blame] | 21 | #[cfg(any(feature = "parsing", feature = "extra-traits"))] |
| 22 | use proc_macro2::{Delimiter, TokenNode, TokenStream, TokenTree}; |
| 23 | |
| 24 | #[cfg(feature = "parsing")] |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 25 | pub fn delimited(input: Cursor) -> PResult<(MacroDelimiter, TokenStream)> { |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 26 | match input.token_tree() { |
| 27 | Some(( |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 28 | TokenTree { |
| 29 | span, |
| 30 | kind: TokenNode::Group(delimiter, tts), |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 31 | }, |
David Tolnay | 6572948 | 2017-12-31 16:14:50 -0500 | [diff] [blame] | 32 | rest, |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 33 | )) => { |
| 34 | let delimiter = match delimiter { |
| 35 | Delimiter::Parenthesis => MacroDelimiter::Paren(Paren(span)), |
| 36 | Delimiter::Brace => MacroDelimiter::Brace(Brace(span)), |
| 37 | Delimiter::Bracket => MacroDelimiter::Bracket(Bracket(span)), |
| 38 | Delimiter::None => return parse_error(), |
| 39 | }; |
David Tolnay | f4aa6b4 | 2017-12-31 16:40:33 -0500 | [diff] [blame] | 40 | Ok(((delimiter, tts), rest)) |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 41 | } |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 42 | _ => parse_error(), |
| 43 | } |
| 44 | } |
| 45 | |
David Tolnay | cc54371 | 2018-01-08 11:29:54 -0800 | [diff] [blame] | 46 | #[cfg(all(feature = "full", feature = "parsing"))] |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 47 | pub fn braced(input: Cursor) -> PResult<(Brace, TokenStream)> { |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 48 | match input.token_tree() { |
| 49 | Some(( |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 50 | TokenTree { |
| 51 | span, |
| 52 | kind: TokenNode::Group(Delimiter::Brace, tts), |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 53 | }, |
David Tolnay | 6572948 | 2017-12-31 16:14:50 -0500 | [diff] [blame] | 54 | rest, |
David Tolnay | f4aa6b4 | 2017-12-31 16:40:33 -0500 | [diff] [blame] | 55 | )) => Ok(((Brace(span), tts), rest)), |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 56 | _ => parse_error(), |
| 57 | } |
| 58 | } |
| 59 | |
David Tolnay | cc54371 | 2018-01-08 11:29:54 -0800 | [diff] [blame] | 60 | #[cfg(all(feature = "full", feature = "parsing"))] |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 61 | pub fn parenthesized(input: Cursor) -> PResult<(Paren, TokenStream)> { |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 62 | match input.token_tree() { |
| 63 | Some(( |
David Tolnay | ab91951 | 2017-12-30 23:31:51 -0500 | [diff] [blame] | 64 | TokenTree { |
| 65 | span, |
| 66 | kind: TokenNode::Group(Delimiter::Parenthesis, tts), |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 67 | }, |
David Tolnay | 6572948 | 2017-12-31 16:14:50 -0500 | [diff] [blame] | 68 | rest, |
David Tolnay | f4aa6b4 | 2017-12-31 16:40:33 -0500 | [diff] [blame] | 69 | )) => Ok(((Paren(span), tts), rest)), |
David Tolnay | e082403 | 2017-12-27 15:25:56 -0500 | [diff] [blame] | 70 | _ => parse_error(), |
| 71 | } |
| 72 | } |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 73 | |
| 74 | #[cfg(feature = "extra-traits")] |
| 75 | pub struct TokenTreeHelper<'a>(pub &'a TokenTree); |
| 76 | |
| 77 | #[cfg(feature = "extra-traits")] |
| 78 | impl<'a> PartialEq for TokenTreeHelper<'a> { |
| 79 | fn eq(&self, other: &Self) -> bool { |
| 80 | use proc_macro2::Spacing; |
| 81 | |
| 82 | match (&self.0.kind, &other.0.kind) { |
| 83 | (&TokenNode::Group(d1, ref s1), &TokenNode::Group(d2, ref s2)) => { |
| 84 | match (d1, d2) { |
| 85 | (Delimiter::Parenthesis, Delimiter::Parenthesis) |
| 86 | | (Delimiter::Brace, Delimiter::Brace) |
| 87 | | (Delimiter::Bracket, Delimiter::Bracket) |
| 88 | | (Delimiter::None, Delimiter::None) => {} |
| 89 | _ => return false, |
| 90 | } |
| 91 | |
| 92 | let s1 = s1.clone().into_iter(); |
| 93 | let mut s2 = s2.clone().into_iter(); |
| 94 | |
| 95 | for item1 in s1 { |
| 96 | let item2 = match s2.next() { |
| 97 | Some(item) => item, |
| 98 | None => return false, |
| 99 | }; |
| 100 | if TokenTreeHelper(&item1) != TokenTreeHelper(&item2) { |
| 101 | return false; |
| 102 | } |
| 103 | } |
| 104 | s2.next().is_none() |
| 105 | } |
| 106 | (&TokenNode::Op(o1, k1), &TokenNode::Op(o2, k2)) => { |
| 107 | o1 == o2 && match (k1, k2) { |
| 108 | (Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true, |
| 109 | _ => false, |
| 110 | } |
| 111 | } |
| 112 | (&TokenNode::Literal(ref l1), &TokenNode::Literal(ref l2)) => { |
| 113 | l1.to_string() == l2.to_string() |
| 114 | } |
| 115 | (&TokenNode::Term(ref s1), &TokenNode::Term(ref s2)) => s1.as_str() == s2.as_str(), |
| 116 | _ => false, |
| 117 | } |
| 118 | } |
| 119 | } |
| 120 | |
| 121 | #[cfg(feature = "extra-traits")] |
| 122 | impl<'a> Hash for TokenTreeHelper<'a> { |
| 123 | fn hash<H: Hasher>(&self, h: &mut H) { |
| 124 | use proc_macro2::Spacing; |
| 125 | |
| 126 | match self.0.kind { |
| 127 | TokenNode::Group(delim, ref stream) => { |
| 128 | 0u8.hash(h); |
| 129 | match delim { |
| 130 | Delimiter::Parenthesis => 0u8.hash(h), |
| 131 | Delimiter::Brace => 1u8.hash(h), |
| 132 | Delimiter::Bracket => 2u8.hash(h), |
| 133 | Delimiter::None => 3u8.hash(h), |
| 134 | } |
| 135 | |
| 136 | for item in stream.clone() { |
| 137 | TokenTreeHelper(&item).hash(h); |
| 138 | } |
| 139 | 0xffu8.hash(h); // terminator w/ a variant we don't normally hash |
| 140 | } |
| 141 | TokenNode::Op(op, kind) => { |
| 142 | 1u8.hash(h); |
| 143 | op.hash(h); |
| 144 | match kind { |
| 145 | Spacing::Alone => 0u8.hash(h), |
| 146 | Spacing::Joint => 1u8.hash(h), |
| 147 | } |
| 148 | } |
| 149 | TokenNode::Literal(ref lit) => (2u8, lit.to_string()).hash(h), |
| 150 | TokenNode::Term(ref word) => (3u8, word.as_str()).hash(h), |
| 151 | } |
| 152 | } |
| 153 | } |
| 154 | |
| 155 | #[cfg(feature = "extra-traits")] |
| 156 | pub struct TokenStreamHelper<'a>(pub &'a TokenStream); |
| 157 | |
| 158 | #[cfg(feature = "extra-traits")] |
| 159 | impl<'a> PartialEq for TokenStreamHelper<'a> { |
| 160 | fn eq(&self, other: &Self) -> bool { |
| 161 | let left = self.0.clone().into_iter().collect::<Vec<_>>(); |
| 162 | let right = other.0.clone().into_iter().collect::<Vec<_>>(); |
| 163 | if left.len() != right.len() { |
| 164 | return false; |
| 165 | } |
| 166 | for (a, b) in left.into_iter().zip(right) { |
| 167 | if TokenTreeHelper(&a) != TokenTreeHelper(&b) { |
| 168 | return false; |
| 169 | } |
| 170 | } |
| 171 | true |
| 172 | } |
| 173 | } |
| 174 | |
| 175 | #[cfg(feature = "extra-traits")] |
| 176 | impl<'a> Hash for TokenStreamHelper<'a> { |
| 177 | fn hash<H: Hasher>(&self, state: &mut H) { |
| 178 | let tts = self.0.clone().into_iter().collect::<Vec<_>>(); |
| 179 | tts.len().hash(state); |
| 180 | for tt in tts { |
| 181 | TokenTreeHelper(&tt).hash(state); |
| 182 | } |
| 183 | } |
| 184 | } |