blob: 11c3e3ce9b059b68c532d6b9a8fc544e91d3c894 [file] [log] [blame]
David Tolnay55535012018-01-05 16:39:23 -08001// Copyright 2018 Syn Developers
2//
3// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
4// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
5// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
6// option. This file may not be copied, modified, or distributed
7// except according to those terms.
8
David Tolnay61037c62018-01-05 16:21:03 -08009use proc_macro2::{Delimiter, TokenNode, TokenStream, TokenTree};
David Tolnaydfc886b2018-01-06 08:03:09 -080010use buffer::Cursor;
David Tolnay203557a2017-12-27 23:59:33 -050011use parse_error;
12use synom::PResult;
David Tolnayab919512017-12-30 23:31:51 -050013use MacroDelimiter;
David Tolnay61037c62018-01-05 16:21:03 -080014use token::{Brace, Bracket, Paren};
David Tolnaye0824032017-12-27 15:25:56 -050015
David Tolnayc43b44e2017-12-30 23:55:54 -050016#[cfg(feature = "extra-traits")]
17use std::hash::{Hash, Hasher};
18
David Tolnayab919512017-12-30 23:31:51 -050019pub fn delimited(input: Cursor) -> PResult<(MacroDelimiter, TokenStream)> {
David Tolnaye0824032017-12-27 15:25:56 -050020 match input.token_tree() {
21 Some((
David Tolnayab919512017-12-30 23:31:51 -050022 TokenTree {
23 span,
24 kind: TokenNode::Group(delimiter, tts),
David Tolnaye0824032017-12-27 15:25:56 -050025 },
David Tolnay65729482017-12-31 16:14:50 -050026 rest,
David Tolnayab919512017-12-30 23:31:51 -050027 )) => {
28 let delimiter = match delimiter {
29 Delimiter::Parenthesis => MacroDelimiter::Paren(Paren(span)),
30 Delimiter::Brace => MacroDelimiter::Brace(Brace(span)),
31 Delimiter::Bracket => MacroDelimiter::Bracket(Bracket(span)),
32 Delimiter::None => return parse_error(),
33 };
David Tolnayf4aa6b42017-12-31 16:40:33 -050034 Ok(((delimiter, tts), rest))
David Tolnayab919512017-12-30 23:31:51 -050035 }
David Tolnaye0824032017-12-27 15:25:56 -050036 _ => parse_error(),
37 }
38}
39
40#[cfg(feature = "full")]
David Tolnayab919512017-12-30 23:31:51 -050041pub fn braced(input: Cursor) -> PResult<(Brace, TokenStream)> {
David Tolnaye0824032017-12-27 15:25:56 -050042 match input.token_tree() {
43 Some((
David Tolnayab919512017-12-30 23:31:51 -050044 TokenTree {
45 span,
46 kind: TokenNode::Group(Delimiter::Brace, tts),
David Tolnaye0824032017-12-27 15:25:56 -050047 },
David Tolnay65729482017-12-31 16:14:50 -050048 rest,
David Tolnayf4aa6b42017-12-31 16:40:33 -050049 )) => Ok(((Brace(span), tts), rest)),
David Tolnaye0824032017-12-27 15:25:56 -050050 _ => parse_error(),
51 }
52}
53
54#[cfg(feature = "full")]
David Tolnayab919512017-12-30 23:31:51 -050055pub fn parenthesized(input: Cursor) -> PResult<(Paren, TokenStream)> {
David Tolnaye0824032017-12-27 15:25:56 -050056 match input.token_tree() {
57 Some((
David Tolnayab919512017-12-30 23:31:51 -050058 TokenTree {
59 span,
60 kind: TokenNode::Group(Delimiter::Parenthesis, tts),
David Tolnaye0824032017-12-27 15:25:56 -050061 },
David Tolnay65729482017-12-31 16:14:50 -050062 rest,
David Tolnayf4aa6b42017-12-31 16:40:33 -050063 )) => Ok(((Paren(span), tts), rest)),
David Tolnaye0824032017-12-27 15:25:56 -050064 _ => parse_error(),
65 }
66}
David Tolnayc43b44e2017-12-30 23:55:54 -050067
68#[cfg(feature = "extra-traits")]
69pub struct TokenTreeHelper<'a>(pub &'a TokenTree);
70
71#[cfg(feature = "extra-traits")]
72impl<'a> PartialEq for TokenTreeHelper<'a> {
73 fn eq(&self, other: &Self) -> bool {
74 use proc_macro2::Spacing;
75
76 match (&self.0.kind, &other.0.kind) {
77 (&TokenNode::Group(d1, ref s1), &TokenNode::Group(d2, ref s2)) => {
78 match (d1, d2) {
79 (Delimiter::Parenthesis, Delimiter::Parenthesis)
80 | (Delimiter::Brace, Delimiter::Brace)
81 | (Delimiter::Bracket, Delimiter::Bracket)
82 | (Delimiter::None, Delimiter::None) => {}
83 _ => return false,
84 }
85
86 let s1 = s1.clone().into_iter();
87 let mut s2 = s2.clone().into_iter();
88
89 for item1 in s1 {
90 let item2 = match s2.next() {
91 Some(item) => item,
92 None => return false,
93 };
94 if TokenTreeHelper(&item1) != TokenTreeHelper(&item2) {
95 return false;
96 }
97 }
98 s2.next().is_none()
99 }
100 (&TokenNode::Op(o1, k1), &TokenNode::Op(o2, k2)) => {
101 o1 == o2 && match (k1, k2) {
102 (Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true,
103 _ => false,
104 }
105 }
106 (&TokenNode::Literal(ref l1), &TokenNode::Literal(ref l2)) => {
107 l1.to_string() == l2.to_string()
108 }
109 (&TokenNode::Term(ref s1), &TokenNode::Term(ref s2)) => s1.as_str() == s2.as_str(),
110 _ => false,
111 }
112 }
113}
114
115#[cfg(feature = "extra-traits")]
116impl<'a> Hash for TokenTreeHelper<'a> {
117 fn hash<H: Hasher>(&self, h: &mut H) {
118 use proc_macro2::Spacing;
119
120 match self.0.kind {
121 TokenNode::Group(delim, ref stream) => {
122 0u8.hash(h);
123 match delim {
124 Delimiter::Parenthesis => 0u8.hash(h),
125 Delimiter::Brace => 1u8.hash(h),
126 Delimiter::Bracket => 2u8.hash(h),
127 Delimiter::None => 3u8.hash(h),
128 }
129
130 for item in stream.clone() {
131 TokenTreeHelper(&item).hash(h);
132 }
133 0xffu8.hash(h); // terminator w/ a variant we don't normally hash
134 }
135 TokenNode::Op(op, kind) => {
136 1u8.hash(h);
137 op.hash(h);
138 match kind {
139 Spacing::Alone => 0u8.hash(h),
140 Spacing::Joint => 1u8.hash(h),
141 }
142 }
143 TokenNode::Literal(ref lit) => (2u8, lit.to_string()).hash(h),
144 TokenNode::Term(ref word) => (3u8, word.as_str()).hash(h),
145 }
146 }
147}
148
149#[cfg(feature = "extra-traits")]
150pub struct TokenStreamHelper<'a>(pub &'a TokenStream);
151
152#[cfg(feature = "extra-traits")]
153impl<'a> PartialEq for TokenStreamHelper<'a> {
154 fn eq(&self, other: &Self) -> bool {
155 let left = self.0.clone().into_iter().collect::<Vec<_>>();
156 let right = other.0.clone().into_iter().collect::<Vec<_>>();
157 if left.len() != right.len() {
158 return false;
159 }
160 for (a, b) in left.into_iter().zip(right) {
161 if TokenTreeHelper(&a) != TokenTreeHelper(&b) {
162 return false;
163 }
164 }
165 true
166 }
167}
168
169#[cfg(feature = "extra-traits")]
170impl<'a> Hash for TokenStreamHelper<'a> {
171 fn hash<H: Hasher>(&self, state: &mut H) {
172 let tts = self.0.clone().into_iter().collect::<Vec<_>>();
173 tts.len().hash(state);
174 for tt in tts {
175 TokenTreeHelper(&tt).hash(state);
176 }
177 }
178}