blob: 27429ee3a4630a90926069cbf52cd88e8067152b [file] [log] [blame]
David Tolnaye5806852017-06-01 12:49:20 -07001extern crate proc_macro2;
2
David Tolnaya13d1422018-03-31 21:27:48 +02003use std::str::{self, FromStr};
Alex Crichton8c030332018-01-16 08:07:36 -08004
David Tolnayb28f38a2018-03-31 22:02:29 +02005use proc_macro2::{Literal, Span, Term, TokenStream, TokenTree};
David Tolnaye5806852017-06-01 12:49:20 -07006
7#[test]
David Tolnay489c6422018-04-07 08:37:28 -07008fn terms() {
9 assert_eq!(Term::new("String", Span::call_site()).as_str(), "String");
10 assert_eq!(Term::new("fn", Span::call_site()).as_str(), "fn");
11 assert_eq!(Term::new("_", Span::call_site()).as_str(), "_");
12}
13
14#[test]
15fn raw_terms() {
16 assert_eq!(Term::new("r#String", Span::call_site()).as_str(), "r#String");
17 assert_eq!(Term::new("r#fn", Span::call_site()).as_str(), "r#fn");
18 assert_eq!(Term::new("r#_", Span::call_site()).as_str(), "r#_");
19}
20
21#[test]
22fn lifetimes() {
23 assert_eq!(Term::new("'a", Span::call_site()).as_str(), "'a");
24 assert_eq!(Term::new("'static", Span::call_site()).as_str(), "'static");
25 assert_eq!(Term::new("'_", Span::call_site()).as_str(), "'_");
26}
27
28#[test]
29#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")]
30fn term_empty() {
31 Term::new("", Span::call_site());
32}
33
34#[test]
35#[should_panic(expected = "Term cannot be a number; use Literal instead")]
36fn term_number() {
37 Term::new("255", Span::call_site());
38}
39
40#[test]
41#[should_panic(expected = "\"a#\" is not a valid Term")]
42fn term_invalid() {
43 Term::new("a#", Span::call_site());
44}
45
46#[test]
47#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")]
48fn raw_term_empty() {
49 Term::new("r#", Span::call_site());
50}
51
52#[test]
53#[should_panic(expected = "Term cannot be a number; use Literal instead")]
54fn raw_term_number() {
55 Term::new("r#255", Span::call_site());
56}
57
58#[test]
59#[should_panic(expected = "\"r#a#\" is not a valid Term")]
60fn raw_term_invalid() {
61 Term::new("r#a#", Span::call_site());
62}
63
64#[test]
65#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")]
66fn lifetime_empty() {
67 Term::new("'", Span::call_site());
68}
69
70#[test]
71#[should_panic(expected = "Term cannot be a number; use Literal instead")]
72fn lifetime_number() {
73 Term::new("'255", Span::call_site());
74}
75
76#[test]
77#[should_panic(expected = r#""\'a#" is not a valid Term"#)]
78fn lifetime_invalid() {
79 Term::new("'a#", Span::call_site());
David Tolnaye5806852017-06-01 12:49:20 -070080}
81
82#[test]
83fn literals() {
Alex Crichton1a7f7622017-07-05 17:47:15 -070084 assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
85 assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
Alex Crichtonaf5bad42018-03-27 14:45:10 -070086 assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
David Tolnaye5806852017-06-01 12:49:20 -070087}
88
89#[test]
90fn roundtrip() {
91 fn roundtrip(p: &str) {
92 println!("parse: {}", p);
93 let s = p.parse::<TokenStream>().unwrap().to_string();
94 println!("first: {}", s);
95 let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string();
96 assert_eq!(s, s2);
97 }
98 roundtrip("a");
99 roundtrip("<<");
100 roundtrip("<<=");
David Tolnayb28f38a2018-03-31 22:02:29 +0200101 roundtrip(
102 "
David Tolnaye5806852017-06-01 12:49:20 -0700103 1
104 1.0
105 1f32
106 2f64
107 1usize
108 4isize
109 4e10
110 1_000
111 1_0i32
112 8u8
113 9
114 0
115 0xffffffffffffffffffffffffffffffff
David Tolnayb28f38a2018-03-31 22:02:29 +0200116 ",
117 );
David Tolnaye5806852017-06-01 12:49:20 -0700118 roundtrip("'a");
119 roundtrip("'static");
David Tolnay8d109342017-12-25 18:24:45 -0500120 roundtrip("'\\u{10__FFFF}'");
121 roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
David Tolnaye5806852017-06-01 12:49:20 -0700122}
123
124#[test]
125fn fail() {
126 fn fail(p: &str) {
127 if p.parse::<TokenStream>().is_ok() {
128 panic!("should have failed to parse: {}", p);
129 }
130 }
131 fail("1x");
132 fail("1u80");
133 fail("1f320");
134 fail("' static");
135 fail("'mut");
David Tolnaya13d1422018-03-31 21:27:48 +0200136 fail("r#1");
137 fail("r#_");
David Tolnaye5806852017-06-01 12:49:20 -0700138}
Nika Layzellf8d5f212017-12-11 14:07:02 -0500139
David Tolnay1ebe3972018-01-02 20:14:20 -0800140#[cfg(procmacro2_semver_exempt)]
Nika Layzellf8d5f212017-12-11 14:07:02 -0500141#[test]
142fn span_test() {
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700143 use proc_macro2::TokenTree;
144
Nika Layzellf8d5f212017-12-11 14:07:02 -0500145 fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
Nika Layzellf8d5f212017-12-11 14:07:02 -0500146 let ts = p.parse::<TokenStream>().unwrap();
147 check_spans_internal(ts, &mut lines);
148 }
149
David Tolnayb28f38a2018-03-31 22:02:29 +0200150 fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
Nika Layzellf8d5f212017-12-11 14:07:02 -0500151 for i in ts {
152 if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
153 *lines = rest;
154
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700155 let start = i.span().start();
Nika Layzellf8d5f212017-12-11 14:07:02 -0500156 assert_eq!(start.line, sline, "sline did not match for {}", i);
157 assert_eq!(start.column, scol, "scol did not match for {}", i);
158
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700159 let end = i.span().end();
Nika Layzellf8d5f212017-12-11 14:07:02 -0500160 assert_eq!(end.line, eline, "eline did not match for {}", i);
161 assert_eq!(end.column, ecol, "ecol did not match for {}", i);
162
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700163 match i {
164 TokenTree::Group(ref g) => {
165 check_spans_internal(g.stream().clone(), lines);
166 }
Nika Layzellf8d5f212017-12-11 14:07:02 -0500167 _ => {}
168 }
169 }
170 }
171 }
172
David Tolnayb28f38a2018-03-31 22:02:29 +0200173 check_spans(
174 "\
Nika Layzellf8d5f212017-12-11 14:07:02 -0500175/// This is a document comment
176testing 123
177{
178 testing 234
David Tolnayb28f38a2018-03-31 22:02:29 +0200179}",
180 &[
Alex Crichton1eb96a02018-04-04 13:07:35 -0700181 (1, 0, 1, 30), // #
182 (1, 0, 1, 30), // [ ... ]
183 (1, 0, 1, 30), // doc
184 (1, 0, 1, 30), // =
185 (1, 0, 1, 30), // "This is..."
186 (2, 0, 2, 7), // testing
187 (2, 8, 2, 11), // 123
188 (3, 0, 5, 1), // { ... }
189 (4, 2, 4, 9), // testing
190 (4, 10, 4, 13), // 234
David Tolnayb28f38a2018-03-31 22:02:29 +0200191 ],
192 );
Nika Layzellf8d5f212017-12-11 14:07:02 -0500193}
194
David Tolnay1ebe3972018-01-02 20:14:20 -0800195#[cfg(procmacro2_semver_exempt)]
David Tolnayd66ecf62018-01-02 20:05:42 -0800196#[cfg(not(feature = "nightly"))]
Nika Layzellf8d5f212017-12-11 14:07:02 -0500197#[test]
198fn default_span() {
199 let start = Span::call_site().start();
200 assert_eq!(start.line, 1);
201 assert_eq!(start.column, 0);
202 let end = Span::call_site().end();
203 assert_eq!(end.line, 1);
204 assert_eq!(end.column, 0);
205 let source_file = Span::call_site().source_file();
Nika Layzellfb783e32017-12-30 14:58:27 -0500206 assert_eq!(source_file.path().to_string(), "<unspecified>");
Nika Layzellf8d5f212017-12-11 14:07:02 -0500207 assert!(!source_file.is_real());
208}
209
David Tolnay1ebe3972018-01-02 20:14:20 -0800210#[cfg(procmacro2_semver_exempt)]
Nika Layzellddea1562017-12-11 14:25:35 -0500211#[test]
212fn span_join() {
David Tolnayb28f38a2018-03-31 22:02:29 +0200213 let source1 = "aaa\nbbb"
214 .parse::<TokenStream>()
215 .unwrap()
216 .into_iter()
217 .collect::<Vec<_>>();
218 let source2 = "ccc\nddd"
219 .parse::<TokenStream>()
220 .unwrap()
221 .into_iter()
222 .collect::<Vec<_>>();
Nika Layzellddea1562017-12-11 14:25:35 -0500223
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700224 assert!(source1[0].span().source_file() != source2[0].span().source_file());
David Tolnayb28f38a2018-03-31 22:02:29 +0200225 assert_eq!(
226 source1[0].span().source_file(),
227 source1[1].span().source_file()
228 );
Nika Layzellddea1562017-12-11 14:25:35 -0500229
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700230 let joined1 = source1[0].span().join(source1[1].span());
231 let joined2 = source1[0].span().join(source2[0].span());
Nika Layzellddea1562017-12-11 14:25:35 -0500232 assert!(joined1.is_some());
233 assert!(joined2.is_none());
234
235 let start = joined1.unwrap().start();
236 let end = joined1.unwrap().end();
237 assert_eq!(start.line, 1);
238 assert_eq!(start.column, 0);
239 assert_eq!(end.line, 2);
240 assert_eq!(end.column, 3);
241
David Tolnayb28f38a2018-03-31 22:02:29 +0200242 assert_eq!(
243 joined1.unwrap().source_file(),
244 source1[0].span().source_file()
245 );
Nika Layzellddea1562017-12-11 14:25:35 -0500246}
Alex Crichton8c030332018-01-16 08:07:36 -0800247
248#[test]
249fn no_panic() {
250 let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
251 assert!(s.parse::<proc_macro2::TokenStream>().is_err());
252}
253
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800254#[test]
David Tolnay639e4ba2018-03-31 21:10:55 +0200255fn tricky_doc_comment() {
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800256 let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
257 let tokens = stream.into_iter().collect::<Vec<_>>();
258 assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
Alex Crichtond7904e52018-01-23 11:08:45 -0800259
260 let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
261 let tokens = stream.into_iter().collect::<Vec<_>>();
Alex Crichton1eb96a02018-04-04 13:07:35 -0700262 assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700263 match tokens[0] {
Alex Crichton1eb96a02018-04-04 13:07:35 -0700264 proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '#'),
Alex Crichtond7904e52018-01-23 11:08:45 -0800265 _ => panic!("wrong token {:?}", tokens[0]),
266 }
Alex Crichton1eb96a02018-04-04 13:07:35 -0700267 let mut tokens = match tokens[1] {
268 proc_macro2::TokenTree::Group(ref tt) => {
269 assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
270 tt.stream().into_iter()
271 }
272 _ => panic!("wrong token {:?}", tokens[0]),
273 };
274
275 match tokens.next().unwrap() {
276 proc_macro2::TokenTree::Term(ref tt) => assert_eq!(tt.as_str(), "doc"),
277 t => panic!("wrong token {:?}", t),
278 }
279 match tokens.next().unwrap() {
280 proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '='),
281 t => panic!("wrong token {:?}", t),
282 }
283 match tokens.next().unwrap() {
284 proc_macro2::TokenTree::Literal(ref tt) => {
285 assert_eq!(tt.to_string(), "\" doc\"");
286 }
287 t => panic!("wrong token {:?}", t),
288 }
289 assert!(tokens.next().is_none());
290
291 let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
292 let tokens = stream.into_iter().collect::<Vec<_>>();
293 assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800294}
295
David Tolnaya13d1422018-03-31 21:27:48 +0200296#[test]
297fn raw_identifier() {
298 let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
299 match tts.next().unwrap() {
300 TokenTree::Term(raw) => assert_eq!("r#dyn", raw.as_str()),
301 wrong => panic!("wrong token {:?}", wrong),
302 }
303 assert!(tts.next().is_none());
304}
David Tolnay034205f2018-04-22 16:45:28 -0700305
306#[test]
307fn test_debug() {
308 let tts = TokenStream::from_str("[a + 1]").unwrap();
309
310 #[cfg(not(procmacro2_semver_exempt))]
311 let expected = "\
312TokenStream [
313 Group {
314 delimiter: Bracket,
315 stream: TokenStream [
316 Term {
317 sym: a
318 },
319 Op {
320 op: '+',
321 spacing: Alone
322 },
323 Literal {
324 lit: 1
325 }
326 ]
327 }
328]\
329 ";
330
331 #[cfg(procmacro2_semver_exempt)]
332 let expected = "\
333TokenStream [
334 Group {
335 delimiter: Bracket,
336 stream: TokenStream [
337 Term {
338 sym: a,
339 span: bytes(2..3)
340 },
341 Op {
342 op: '+',
343 spacing: Alone,
344 span: bytes(4..5)
345 },
346 Literal {
347 lit: 1,
348 span: bytes(6..7)
349 }
350 ],
351 span: bytes(1..8)
352 }
353]\
354 ";
355
356 assert_eq!(expected, format!("{:#?}", tts));
357}