blob: d8d497bb2ffb3f3ddf0fb556255a8279f3db2da8 [file] [log] [blame]
David Tolnayab919512017-12-30 23:31:51 -05001use proc_macro2::{TokenNode, TokenStream, TokenTree, Delimiter};
David Tolnaye0824032017-12-27 15:25:56 -05002use cursor::Cursor;
David Tolnay203557a2017-12-27 23:59:33 -05003use parse_error;
4use synom::PResult;
David Tolnayab919512017-12-30 23:31:51 -05005use MacroDelimiter;
6use token::{Paren, Brace, Bracket};
David Tolnaye0824032017-12-27 15:25:56 -05007
David Tolnayc43b44e2017-12-30 23:55:54 -05008#[cfg(feature = "extra-traits")]
9use std::hash::{Hash, Hasher};
10
David Tolnayab919512017-12-30 23:31:51 -050011pub fn delimited(input: Cursor) -> PResult<(MacroDelimiter, TokenStream)> {
David Tolnaye0824032017-12-27 15:25:56 -050012 match input.token_tree() {
13 Some((
14 rest,
David Tolnayab919512017-12-30 23:31:51 -050015 TokenTree {
16 span,
17 kind: TokenNode::Group(delimiter, tts),
David Tolnaye0824032017-12-27 15:25:56 -050018 },
David Tolnayab919512017-12-30 23:31:51 -050019 )) => {
20 let delimiter = match delimiter {
21 Delimiter::Parenthesis => MacroDelimiter::Paren(Paren(span)),
22 Delimiter::Brace => MacroDelimiter::Brace(Brace(span)),
23 Delimiter::Bracket => MacroDelimiter::Bracket(Bracket(span)),
24 Delimiter::None => return parse_error(),
25 };
26 Ok((rest, (delimiter, tts)))
27 }
David Tolnaye0824032017-12-27 15:25:56 -050028 _ => parse_error(),
29 }
30}
31
32#[cfg(feature = "full")]
David Tolnayab919512017-12-30 23:31:51 -050033pub fn braced(input: Cursor) -> PResult<(Brace, TokenStream)> {
David Tolnaye0824032017-12-27 15:25:56 -050034 match input.token_tree() {
35 Some((
36 rest,
David Tolnayab919512017-12-30 23:31:51 -050037 TokenTree {
38 span,
39 kind: TokenNode::Group(Delimiter::Brace, tts),
David Tolnaye0824032017-12-27 15:25:56 -050040 },
David Tolnayab919512017-12-30 23:31:51 -050041 )) => Ok((rest, (Brace(span), tts))),
David Tolnaye0824032017-12-27 15:25:56 -050042 _ => parse_error(),
43 }
44}
45
46#[cfg(feature = "full")]
David Tolnayab919512017-12-30 23:31:51 -050047pub fn parenthesized(input: Cursor) -> PResult<(Paren, TokenStream)> {
David Tolnaye0824032017-12-27 15:25:56 -050048 match input.token_tree() {
49 Some((
50 rest,
David Tolnayab919512017-12-30 23:31:51 -050051 TokenTree {
52 span,
53 kind: TokenNode::Group(Delimiter::Parenthesis, tts),
David Tolnaye0824032017-12-27 15:25:56 -050054 },
David Tolnayab919512017-12-30 23:31:51 -050055 )) => Ok((rest, (Paren(span), tts))),
David Tolnaye0824032017-12-27 15:25:56 -050056 _ => parse_error(),
57 }
58}
David Tolnayc43b44e2017-12-30 23:55:54 -050059
60#[cfg(feature = "extra-traits")]
61pub struct TokenTreeHelper<'a>(pub &'a TokenTree);
62
63#[cfg(feature = "extra-traits")]
64impl<'a> PartialEq for TokenTreeHelper<'a> {
65 fn eq(&self, other: &Self) -> bool {
66 use proc_macro2::Spacing;
67
68 match (&self.0.kind, &other.0.kind) {
69 (&TokenNode::Group(d1, ref s1), &TokenNode::Group(d2, ref s2)) => {
70 match (d1, d2) {
71 (Delimiter::Parenthesis, Delimiter::Parenthesis)
72 | (Delimiter::Brace, Delimiter::Brace)
73 | (Delimiter::Bracket, Delimiter::Bracket)
74 | (Delimiter::None, Delimiter::None) => {}
75 _ => return false,
76 }
77
78 let s1 = s1.clone().into_iter();
79 let mut s2 = s2.clone().into_iter();
80
81 for item1 in s1 {
82 let item2 = match s2.next() {
83 Some(item) => item,
84 None => return false,
85 };
86 if TokenTreeHelper(&item1) != TokenTreeHelper(&item2) {
87 return false;
88 }
89 }
90 s2.next().is_none()
91 }
92 (&TokenNode::Op(o1, k1), &TokenNode::Op(o2, k2)) => {
93 o1 == o2 && match (k1, k2) {
94 (Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true,
95 _ => false,
96 }
97 }
98 (&TokenNode::Literal(ref l1), &TokenNode::Literal(ref l2)) => {
99 l1.to_string() == l2.to_string()
100 }
101 (&TokenNode::Term(ref s1), &TokenNode::Term(ref s2)) => s1.as_str() == s2.as_str(),
102 _ => false,
103 }
104 }
105}
106
107#[cfg(feature = "extra-traits")]
108impl<'a> Hash for TokenTreeHelper<'a> {
109 fn hash<H: Hasher>(&self, h: &mut H) {
110 use proc_macro2::Spacing;
111
112 match self.0.kind {
113 TokenNode::Group(delim, ref stream) => {
114 0u8.hash(h);
115 match delim {
116 Delimiter::Parenthesis => 0u8.hash(h),
117 Delimiter::Brace => 1u8.hash(h),
118 Delimiter::Bracket => 2u8.hash(h),
119 Delimiter::None => 3u8.hash(h),
120 }
121
122 for item in stream.clone() {
123 TokenTreeHelper(&item).hash(h);
124 }
125 0xffu8.hash(h); // terminator w/ a variant we don't normally hash
126 }
127 TokenNode::Op(op, kind) => {
128 1u8.hash(h);
129 op.hash(h);
130 match kind {
131 Spacing::Alone => 0u8.hash(h),
132 Spacing::Joint => 1u8.hash(h),
133 }
134 }
135 TokenNode::Literal(ref lit) => (2u8, lit.to_string()).hash(h),
136 TokenNode::Term(ref word) => (3u8, word.as_str()).hash(h),
137 }
138 }
139}
140
141#[cfg(feature = "extra-traits")]
142pub struct TokenStreamHelper<'a>(pub &'a TokenStream);
143
144#[cfg(feature = "extra-traits")]
145impl<'a> PartialEq for TokenStreamHelper<'a> {
146 fn eq(&self, other: &Self) -> bool {
147 let left = self.0.clone().into_iter().collect::<Vec<_>>();
148 let right = other.0.clone().into_iter().collect::<Vec<_>>();
149 if left.len() != right.len() {
150 return false;
151 }
152 for (a, b) in left.into_iter().zip(right) {
153 if TokenTreeHelper(&a) != TokenTreeHelper(&b) {
154 return false;
155 }
156 }
157 true
158 }
159}
160
161#[cfg(feature = "extra-traits")]
162impl<'a> Hash for TokenStreamHelper<'a> {
163 fn hash<H: Hasher>(&self, state: &mut H) {
164 let tts = self.0.clone().into_iter().collect::<Vec<_>>();
165 tts.len().hash(state);
166 for tt in tts {
167 TokenTreeHelper(&tt).hash(state);
168 }
169 }
170}