David Tolnay | 5553501 | 2018-01-05 16:39:23 -0800 | [diff] [blame] | 1 | // Copyright 2018 Syn Developers |
| 2 | // |
| 3 | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or |
| 4 | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license |
| 5 | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your |
| 6 | // option. This file may not be copied, modified, or distributed |
| 7 | // except according to those terms. |
| 8 | |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 9 | use std::hash::{Hash, Hasher}; |
| 10 | |
Alex Crichton | 9a4dca2 | 2018-03-28 06:32:19 -0700 | [diff] [blame] | 11 | use proc_macro2::{Delimiter, TokenStream, TokenTree}; |
David Tolnay | cc54371 | 2018-01-08 11:29:54 -0800 | [diff] [blame] | 12 | |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 13 | pub struct TokenTreeHelper<'a>(pub &'a TokenTree); |
| 14 | |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 15 | impl<'a> PartialEq for TokenTreeHelper<'a> { |
| 16 | fn eq(&self, other: &Self) -> bool { |
| 17 | use proc_macro2::Spacing; |
| 18 | |
Alex Crichton | 9a4dca2 | 2018-03-28 06:32:19 -0700 | [diff] [blame] | 19 | match (self.0, other.0) { |
| 20 | (&TokenTree::Group(ref g1), &TokenTree::Group(ref g2)) => { |
| 21 | match (g1.delimiter(), g2.delimiter()) { |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 22 | (Delimiter::Parenthesis, Delimiter::Parenthesis) |
| 23 | | (Delimiter::Brace, Delimiter::Brace) |
| 24 | | (Delimiter::Bracket, Delimiter::Bracket) |
| 25 | | (Delimiter::None, Delimiter::None) => {} |
| 26 | _ => return false, |
| 27 | } |
| 28 | |
Alex Crichton | 9a4dca2 | 2018-03-28 06:32:19 -0700 | [diff] [blame] | 29 | let s1 = g1.stream().clone().into_iter(); |
| 30 | let mut s2 = g2.stream().clone().into_iter(); |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 31 | |
| 32 | for item1 in s1 { |
| 33 | let item2 = match s2.next() { |
| 34 | Some(item) => item, |
| 35 | None => return false, |
| 36 | }; |
| 37 | if TokenTreeHelper(&item1) != TokenTreeHelper(&item2) { |
| 38 | return false; |
| 39 | } |
| 40 | } |
| 41 | s2.next().is_none() |
| 42 | } |
Alex Crichton | a74a1c8 | 2018-05-16 10:20:44 -0700 | [diff] [blame] | 43 | (&TokenTree::Punct(ref o1), &TokenTree::Punct(ref o2)) => { |
David Tolnay | e614f28 | 2018-10-27 22:50:12 -0700 | [diff] [blame^] | 44 | o1.as_char() == o2.as_char() |
| 45 | && match (o1.spacing(), o2.spacing()) { |
| 46 | (Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true, |
| 47 | _ => false, |
| 48 | } |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 49 | } |
Alex Crichton | 9a4dca2 | 2018-03-28 06:32:19 -0700 | [diff] [blame] | 50 | (&TokenTree::Literal(ref l1), &TokenTree::Literal(ref l2)) => { |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 51 | l1.to_string() == l2.to_string() |
| 52 | } |
Alex Crichton | a74a1c8 | 2018-05-16 10:20:44 -0700 | [diff] [blame] | 53 | (&TokenTree::Ident(ref s1), &TokenTree::Ident(ref s2)) => s1 == s2, |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 54 | _ => false, |
| 55 | } |
| 56 | } |
| 57 | } |
| 58 | |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 59 | impl<'a> Hash for TokenTreeHelper<'a> { |
| 60 | fn hash<H: Hasher>(&self, h: &mut H) { |
| 61 | use proc_macro2::Spacing; |
| 62 | |
Alex Crichton | 9a4dca2 | 2018-03-28 06:32:19 -0700 | [diff] [blame] | 63 | match *self.0 { |
| 64 | TokenTree::Group(ref g) => { |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 65 | 0u8.hash(h); |
Alex Crichton | 9a4dca2 | 2018-03-28 06:32:19 -0700 | [diff] [blame] | 66 | match g.delimiter() { |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 67 | Delimiter::Parenthesis => 0u8.hash(h), |
| 68 | Delimiter::Brace => 1u8.hash(h), |
| 69 | Delimiter::Bracket => 2u8.hash(h), |
| 70 | Delimiter::None => 3u8.hash(h), |
| 71 | } |
| 72 | |
Alex Crichton | 9a4dca2 | 2018-03-28 06:32:19 -0700 | [diff] [blame] | 73 | for item in g.stream().clone() { |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 74 | TokenTreeHelper(&item).hash(h); |
| 75 | } |
| 76 | 0xffu8.hash(h); // terminator w/ a variant we don't normally hash |
| 77 | } |
Alex Crichton | a74a1c8 | 2018-05-16 10:20:44 -0700 | [diff] [blame] | 78 | TokenTree::Punct(ref op) => { |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 79 | 1u8.hash(h); |
Alex Crichton | a74a1c8 | 2018-05-16 10:20:44 -0700 | [diff] [blame] | 80 | op.as_char().hash(h); |
Alex Crichton | 9a4dca2 | 2018-03-28 06:32:19 -0700 | [diff] [blame] | 81 | match op.spacing() { |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 82 | Spacing::Alone => 0u8.hash(h), |
| 83 | Spacing::Joint => 1u8.hash(h), |
| 84 | } |
| 85 | } |
Alex Crichton | 9a4dca2 | 2018-03-28 06:32:19 -0700 | [diff] [blame] | 86 | TokenTree::Literal(ref lit) => (2u8, lit.to_string()).hash(h), |
Alex Crichton | a74a1c8 | 2018-05-16 10:20:44 -0700 | [diff] [blame] | 87 | TokenTree::Ident(ref word) => (3u8, word).hash(h), |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 88 | } |
| 89 | } |
| 90 | } |
| 91 | |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 92 | pub struct TokenStreamHelper<'a>(pub &'a TokenStream); |
| 93 | |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 94 | impl<'a> PartialEq for TokenStreamHelper<'a> { |
| 95 | fn eq(&self, other: &Self) -> bool { |
| 96 | let left = self.0.clone().into_iter().collect::<Vec<_>>(); |
| 97 | let right = other.0.clone().into_iter().collect::<Vec<_>>(); |
| 98 | if left.len() != right.len() { |
| 99 | return false; |
| 100 | } |
| 101 | for (a, b) in left.into_iter().zip(right) { |
| 102 | if TokenTreeHelper(&a) != TokenTreeHelper(&b) { |
| 103 | return false; |
| 104 | } |
| 105 | } |
| 106 | true |
| 107 | } |
| 108 | } |
| 109 | |
David Tolnay | c43b44e | 2017-12-30 23:55:54 -0500 | [diff] [blame] | 110 | impl<'a> Hash for TokenStreamHelper<'a> { |
| 111 | fn hash<H: Hasher>(&self, state: &mut H) { |
| 112 | let tts = self.0.clone().into_iter().collect::<Vec<_>>(); |
| 113 | tts.len().hash(state); |
| 114 | for tt in tts { |
| 115 | TokenTreeHelper(&tt).hash(state); |
| 116 | } |
| 117 | } |
| 118 | } |