blob: 19ebedd3f130498a48a509c4b68b58a1d9f191dc [file] [log] [blame]
David Tolnay55535012018-01-05 16:39:23 -08001// Copyright 2018 Syn Developers
2//
3// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
4// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
5// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
6// option. This file may not be copied, modified, or distributed
7// except according to those terms.
8
David Tolnaycc543712018-01-08 11:29:54 -08009#[cfg(feature = "parsing")]
David Tolnaydfc886b2018-01-06 08:03:09 -080010use buffer::Cursor;
David Tolnaycc543712018-01-08 11:29:54 -080011#[cfg(feature = "parsing")]
David Tolnay203557a2017-12-27 23:59:33 -050012use synom::PResult;
David Tolnaycc543712018-01-08 11:29:54 -080013#[cfg(feature = "parsing")]
David Tolnay61037c62018-01-05 16:21:03 -080014use token::{Brace, Bracket, Paren};
David Tolnaycc543712018-01-08 11:29:54 -080015#[cfg(feature = "parsing")]
16use {parse_error, MacroDelimiter};
David Tolnaye0824032017-12-27 15:25:56 -050017
David Tolnayc43b44e2017-12-30 23:55:54 -050018#[cfg(feature = "extra-traits")]
19use std::hash::{Hash, Hasher};
20
David Tolnaycc543712018-01-08 11:29:54 -080021#[cfg(any(feature = "parsing", feature = "extra-traits"))]
22use proc_macro2::{Delimiter, TokenNode, TokenStream, TokenTree};
23
24#[cfg(feature = "parsing")]
David Tolnayab919512017-12-30 23:31:51 -050025pub fn delimited(input: Cursor) -> PResult<(MacroDelimiter, TokenStream)> {
David Tolnaye0824032017-12-27 15:25:56 -050026 match input.token_tree() {
27 Some((
David Tolnayab919512017-12-30 23:31:51 -050028 TokenTree {
29 span,
30 kind: TokenNode::Group(delimiter, tts),
David Tolnaye0824032017-12-27 15:25:56 -050031 },
David Tolnay65729482017-12-31 16:14:50 -050032 rest,
David Tolnayab919512017-12-30 23:31:51 -050033 )) => {
34 let delimiter = match delimiter {
35 Delimiter::Parenthesis => MacroDelimiter::Paren(Paren(span)),
36 Delimiter::Brace => MacroDelimiter::Brace(Brace(span)),
37 Delimiter::Bracket => MacroDelimiter::Bracket(Bracket(span)),
38 Delimiter::None => return parse_error(),
39 };
David Tolnayf4aa6b42017-12-31 16:40:33 -050040 Ok(((delimiter, tts), rest))
David Tolnayab919512017-12-30 23:31:51 -050041 }
David Tolnaye0824032017-12-27 15:25:56 -050042 _ => parse_error(),
43 }
44}
45
David Tolnaycc543712018-01-08 11:29:54 -080046#[cfg(all(feature = "full", feature = "parsing"))]
David Tolnayab919512017-12-30 23:31:51 -050047pub fn braced(input: Cursor) -> PResult<(Brace, TokenStream)> {
David Tolnaye0824032017-12-27 15:25:56 -050048 match input.token_tree() {
49 Some((
David Tolnayab919512017-12-30 23:31:51 -050050 TokenTree {
51 span,
52 kind: TokenNode::Group(Delimiter::Brace, tts),
David Tolnaye0824032017-12-27 15:25:56 -050053 },
David Tolnay65729482017-12-31 16:14:50 -050054 rest,
David Tolnayf4aa6b42017-12-31 16:40:33 -050055 )) => Ok(((Brace(span), tts), rest)),
David Tolnaye0824032017-12-27 15:25:56 -050056 _ => parse_error(),
57 }
58}
59
David Tolnaycc543712018-01-08 11:29:54 -080060#[cfg(all(feature = "full", feature = "parsing"))]
David Tolnayab919512017-12-30 23:31:51 -050061pub fn parenthesized(input: Cursor) -> PResult<(Paren, TokenStream)> {
David Tolnaye0824032017-12-27 15:25:56 -050062 match input.token_tree() {
63 Some((
David Tolnayab919512017-12-30 23:31:51 -050064 TokenTree {
65 span,
66 kind: TokenNode::Group(Delimiter::Parenthesis, tts),
David Tolnaye0824032017-12-27 15:25:56 -050067 },
David Tolnay65729482017-12-31 16:14:50 -050068 rest,
David Tolnayf4aa6b42017-12-31 16:40:33 -050069 )) => Ok(((Paren(span), tts), rest)),
David Tolnaye0824032017-12-27 15:25:56 -050070 _ => parse_error(),
71 }
72}
David Tolnayc43b44e2017-12-30 23:55:54 -050073
74#[cfg(feature = "extra-traits")]
75pub struct TokenTreeHelper<'a>(pub &'a TokenTree);
76
77#[cfg(feature = "extra-traits")]
78impl<'a> PartialEq for TokenTreeHelper<'a> {
79 fn eq(&self, other: &Self) -> bool {
80 use proc_macro2::Spacing;
81
82 match (&self.0.kind, &other.0.kind) {
83 (&TokenNode::Group(d1, ref s1), &TokenNode::Group(d2, ref s2)) => {
84 match (d1, d2) {
85 (Delimiter::Parenthesis, Delimiter::Parenthesis)
86 | (Delimiter::Brace, Delimiter::Brace)
87 | (Delimiter::Bracket, Delimiter::Bracket)
88 | (Delimiter::None, Delimiter::None) => {}
89 _ => return false,
90 }
91
92 let s1 = s1.clone().into_iter();
93 let mut s2 = s2.clone().into_iter();
94
95 for item1 in s1 {
96 let item2 = match s2.next() {
97 Some(item) => item,
98 None => return false,
99 };
100 if TokenTreeHelper(&item1) != TokenTreeHelper(&item2) {
101 return false;
102 }
103 }
104 s2.next().is_none()
105 }
106 (&TokenNode::Op(o1, k1), &TokenNode::Op(o2, k2)) => {
107 o1 == o2 && match (k1, k2) {
108 (Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true,
109 _ => false,
110 }
111 }
112 (&TokenNode::Literal(ref l1), &TokenNode::Literal(ref l2)) => {
113 l1.to_string() == l2.to_string()
114 }
115 (&TokenNode::Term(ref s1), &TokenNode::Term(ref s2)) => s1.as_str() == s2.as_str(),
116 _ => false,
117 }
118 }
119}
120
121#[cfg(feature = "extra-traits")]
122impl<'a> Hash for TokenTreeHelper<'a> {
123 fn hash<H: Hasher>(&self, h: &mut H) {
124 use proc_macro2::Spacing;
125
126 match self.0.kind {
127 TokenNode::Group(delim, ref stream) => {
128 0u8.hash(h);
129 match delim {
130 Delimiter::Parenthesis => 0u8.hash(h),
131 Delimiter::Brace => 1u8.hash(h),
132 Delimiter::Bracket => 2u8.hash(h),
133 Delimiter::None => 3u8.hash(h),
134 }
135
136 for item in stream.clone() {
137 TokenTreeHelper(&item).hash(h);
138 }
139 0xffu8.hash(h); // terminator w/ a variant we don't normally hash
140 }
141 TokenNode::Op(op, kind) => {
142 1u8.hash(h);
143 op.hash(h);
144 match kind {
145 Spacing::Alone => 0u8.hash(h),
146 Spacing::Joint => 1u8.hash(h),
147 }
148 }
149 TokenNode::Literal(ref lit) => (2u8, lit.to_string()).hash(h),
150 TokenNode::Term(ref word) => (3u8, word.as_str()).hash(h),
151 }
152 }
153}
154
155#[cfg(feature = "extra-traits")]
156pub struct TokenStreamHelper<'a>(pub &'a TokenStream);
157
158#[cfg(feature = "extra-traits")]
159impl<'a> PartialEq for TokenStreamHelper<'a> {
160 fn eq(&self, other: &Self) -> bool {
161 let left = self.0.clone().into_iter().collect::<Vec<_>>();
162 let right = other.0.clone().into_iter().collect::<Vec<_>>();
163 if left.len() != right.len() {
164 return false;
165 }
166 for (a, b) in left.into_iter().zip(right) {
167 if TokenTreeHelper(&a) != TokenTreeHelper(&b) {
168 return false;
169 }
170 }
171 true
172 }
173}
174
175#[cfg(feature = "extra-traits")]
176impl<'a> Hash for TokenStreamHelper<'a> {
177 fn hash<H: Hasher>(&self, state: &mut H) {
178 let tts = self.0.clone().into_iter().collect::<Vec<_>>();
179 tts.len().hash(state);
180 for tt in tts {
181 TokenTreeHelper(&tt).hash(state);
182 }
183 }
184}