blob: 8ab975c28518e84079b425e58311caceea5d7a99 [file] [log] [blame]
David Tolnaya13d1422018-03-31 21:27:48 +02001use std::str::{self, FromStr};
Alex Crichton8c030332018-01-16 08:07:36 -08002
David Tolnay3d9d6ad2018-05-18 10:51:55 -07003use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
David Tolnaye5806852017-06-01 12:49:20 -07004
5#[test]
David Tolnay637eef42019-04-20 13:36:18 -07006fn idents() {
David Tolnay3d9d6ad2018-05-18 10:51:55 -07007 assert_eq!(
8 Ident::new("String", Span::call_site()).to_string(),
9 "String"
10 );
Alex Crichtonf3888432018-05-16 09:11:05 -070011 assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn");
12 assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_");
David Tolnay489c6422018-04-07 08:37:28 -070013}
14
15#[test]
Alex Crichtonf3888432018-05-16 09:11:05 -070016#[cfg(procmacro2_semver_exempt)]
David Tolnay637eef42019-04-20 13:36:18 -070017fn raw_idents() {
David Tolnay48ea5042018-04-23 19:17:35 -070018 assert_eq!(
Alex Crichtonf3888432018-05-16 09:11:05 -070019 Ident::new_raw("String", Span::call_site()).to_string(),
David Tolnay48ea5042018-04-23 19:17:35 -070020 "r#String"
21 );
Alex Crichtonf3888432018-05-16 09:11:05 -070022 assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn");
23 assert_eq!(Ident::new_raw("_", Span::call_site()).to_string(), "r#_");
David Tolnay489c6422018-04-07 08:37:28 -070024}
25
26#[test]
Alex Crichtonf3888432018-05-16 09:11:05 -070027#[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")]
David Tolnay637eef42019-04-20 13:36:18 -070028fn ident_empty() {
Alex Crichtonf3888432018-05-16 09:11:05 -070029 Ident::new("", Span::call_site());
David Tolnay489c6422018-04-07 08:37:28 -070030}
31
32#[test]
Alex Crichtonf3888432018-05-16 09:11:05 -070033#[should_panic(expected = "Ident cannot be a number; use Literal instead")]
David Tolnay637eef42019-04-20 13:36:18 -070034fn ident_number() {
Alex Crichtonf3888432018-05-16 09:11:05 -070035 Ident::new("255", Span::call_site());
David Tolnay489c6422018-04-07 08:37:28 -070036}
37
38#[test]
Alex Crichtonf3888432018-05-16 09:11:05 -070039#[should_panic(expected = "\"a#\" is not a valid Ident")]
David Tolnay637eef42019-04-20 13:36:18 -070040fn ident_invalid() {
Alex Crichtonf3888432018-05-16 09:11:05 -070041 Ident::new("a#", Span::call_site());
David Tolnay489c6422018-04-07 08:37:28 -070042}
43
44#[test]
Alex Crichtonf3888432018-05-16 09:11:05 -070045#[should_panic(expected = "not a valid Ident")]
David Tolnay637eef42019-04-20 13:36:18 -070046fn raw_ident_empty() {
Alex Crichtonf3888432018-05-16 09:11:05 -070047 Ident::new("r#", Span::call_site());
David Tolnay489c6422018-04-07 08:37:28 -070048}
49
50#[test]
Alex Crichtonf3888432018-05-16 09:11:05 -070051#[should_panic(expected = "not a valid Ident")]
David Tolnay637eef42019-04-20 13:36:18 -070052fn raw_ident_number() {
Alex Crichtonf3888432018-05-16 09:11:05 -070053 Ident::new("r#255", Span::call_site());
David Tolnay489c6422018-04-07 08:37:28 -070054}
55
56#[test]
Alex Crichtonf3888432018-05-16 09:11:05 -070057#[should_panic(expected = "\"r#a#\" is not a valid Ident")]
David Tolnay637eef42019-04-20 13:36:18 -070058fn raw_ident_invalid() {
Alex Crichtonf3888432018-05-16 09:11:05 -070059 Ident::new("r#a#", Span::call_site());
David Tolnay489c6422018-04-07 08:37:28 -070060}
61
62#[test]
Alex Crichtonf3888432018-05-16 09:11:05 -070063#[should_panic(expected = "not a valid Ident")]
David Tolnay489c6422018-04-07 08:37:28 -070064fn lifetime_empty() {
Alex Crichtonf3888432018-05-16 09:11:05 -070065 Ident::new("'", Span::call_site());
David Tolnay489c6422018-04-07 08:37:28 -070066}
67
68#[test]
Alex Crichtonf3888432018-05-16 09:11:05 -070069#[should_panic(expected = "not a valid Ident")]
David Tolnay489c6422018-04-07 08:37:28 -070070fn lifetime_number() {
Alex Crichtonf3888432018-05-16 09:11:05 -070071 Ident::new("'255", Span::call_site());
David Tolnay489c6422018-04-07 08:37:28 -070072}
73
74#[test]
Alex Crichtonf3888432018-05-16 09:11:05 -070075#[should_panic(expected = r#""\'a#" is not a valid Ident"#)]
David Tolnay489c6422018-04-07 08:37:28 -070076fn lifetime_invalid() {
Alex Crichtonf3888432018-05-16 09:11:05 -070077 Ident::new("'a#", Span::call_site());
David Tolnaye5806852017-06-01 12:49:20 -070078}
79
80#[test]
David Tolnaye4482f42019-04-22 16:15:14 -070081fn literal_string() {
Alex Crichton1a7f7622017-07-05 17:47:15 -070082 assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
83 assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
David Tolnaye4482f42019-04-22 16:15:14 -070084 assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
85}
86
87#[test]
88fn literal_character() {
89 assert_eq!(Literal::character('x').to_string(), "'x'");
90 assert_eq!(Literal::character('\'').to_string(), "'\\''");
91 assert_eq!(Literal::character('"').to_string(), "'\"'");
92}
93
94#[test]
95fn literal_float() {
Alex Crichtonaf5bad42018-03-27 14:45:10 -070096 assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
David Tolnaye5806852017-06-01 12:49:20 -070097}
98
99#[test]
100fn roundtrip() {
101 fn roundtrip(p: &str) {
102 println!("parse: {}", p);
103 let s = p.parse::<TokenStream>().unwrap().to_string();
104 println!("first: {}", s);
105 let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string();
106 assert_eq!(s, s2);
107 }
108 roundtrip("a");
109 roundtrip("<<");
110 roundtrip("<<=");
David Tolnayb28f38a2018-03-31 22:02:29 +0200111 roundtrip(
112 "
David Tolnaye5806852017-06-01 12:49:20 -0700113 1
114 1.0
115 1f32
116 2f64
117 1usize
118 4isize
119 4e10
120 1_000
121 1_0i32
122 8u8
123 9
124 0
125 0xffffffffffffffffffffffffffffffff
David Tolnayb28f38a2018-03-31 22:02:29 +0200126 ",
127 );
David Tolnaye5806852017-06-01 12:49:20 -0700128 roundtrip("'a");
Alex Crichtonf5479a92018-05-17 10:56:26 -0700129 roundtrip("'_");
David Tolnaye5806852017-06-01 12:49:20 -0700130 roundtrip("'static");
David Tolnay8d109342017-12-25 18:24:45 -0500131 roundtrip("'\\u{10__FFFF}'");
132 roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
David Tolnaye5806852017-06-01 12:49:20 -0700133}
134
135#[test]
136fn fail() {
137 fn fail(p: &str) {
Alex Crichtonf3888432018-05-16 09:11:05 -0700138 if let Ok(s) = p.parse::<TokenStream>() {
139 panic!("should have failed to parse: {}\n{:#?}", p, s);
David Tolnaye5806852017-06-01 12:49:20 -0700140 }
141 }
142 fail("1x");
143 fail("1u80");
144 fail("1f320");
145 fail("' static");
David Tolnaya13d1422018-03-31 21:27:48 +0200146 fail("r#1");
147 fail("r#_");
David Tolnaye5806852017-06-01 12:49:20 -0700148}
Nika Layzellf8d5f212017-12-11 14:07:02 -0500149
David Tolnay3b1f7d22019-01-28 12:22:11 -0800150#[cfg(span_locations)]
Nika Layzellf8d5f212017-12-11 14:07:02 -0500151#[test]
152fn span_test() {
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700153 use proc_macro2::TokenTree;
154
Nika Layzellf8d5f212017-12-11 14:07:02 -0500155 fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
Nika Layzellf8d5f212017-12-11 14:07:02 -0500156 let ts = p.parse::<TokenStream>().unwrap();
157 check_spans_internal(ts, &mut lines);
158 }
159
David Tolnayb28f38a2018-03-31 22:02:29 +0200160 fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
Nika Layzellf8d5f212017-12-11 14:07:02 -0500161 for i in ts {
162 if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
163 *lines = rest;
164
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700165 let start = i.span().start();
Nika Layzellf8d5f212017-12-11 14:07:02 -0500166 assert_eq!(start.line, sline, "sline did not match for {}", i);
167 assert_eq!(start.column, scol, "scol did not match for {}", i);
168
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700169 let end = i.span().end();
Nika Layzellf8d5f212017-12-11 14:07:02 -0500170 assert_eq!(end.line, eline, "eline did not match for {}", i);
171 assert_eq!(end.column, ecol, "ecol did not match for {}", i);
172
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700173 match i {
174 TokenTree::Group(ref g) => {
175 check_spans_internal(g.stream().clone(), lines);
176 }
Nika Layzellf8d5f212017-12-11 14:07:02 -0500177 _ => {}
178 }
179 }
180 }
181 }
182
David Tolnayb28f38a2018-03-31 22:02:29 +0200183 check_spans(
184 "\
Nika Layzellf8d5f212017-12-11 14:07:02 -0500185/// This is a document comment
186testing 123
187{
188 testing 234
David Tolnayb28f38a2018-03-31 22:02:29 +0200189}",
190 &[
Alex Crichton1eb96a02018-04-04 13:07:35 -0700191 (1, 0, 1, 30), // #
192 (1, 0, 1, 30), // [ ... ]
193 (1, 0, 1, 30), // doc
194 (1, 0, 1, 30), // =
195 (1, 0, 1, 30), // "This is..."
196 (2, 0, 2, 7), // testing
197 (2, 8, 2, 11), // 123
198 (3, 0, 5, 1), // { ... }
199 (4, 2, 4, 9), // testing
200 (4, 10, 4, 13), // 234
David Tolnayb28f38a2018-03-31 22:02:29 +0200201 ],
202 );
Nika Layzellf8d5f212017-12-11 14:07:02 -0500203}
204
David Tolnay1ebe3972018-01-02 20:14:20 -0800205#[cfg(procmacro2_semver_exempt)]
David Tolnay17eb0702019-01-05 12:23:17 -0800206#[cfg(not(nightly))]
Nika Layzellf8d5f212017-12-11 14:07:02 -0500207#[test]
208fn default_span() {
209 let start = Span::call_site().start();
210 assert_eq!(start.line, 1);
211 assert_eq!(start.column, 0);
212 let end = Span::call_site().end();
213 assert_eq!(end.line, 1);
214 assert_eq!(end.column, 0);
215 let source_file = Span::call_site().source_file();
David Tolnay9cd3b4c2018-11-11 16:47:32 -0800216 assert_eq!(source_file.path().to_string_lossy(), "<unspecified>");
Nika Layzellf8d5f212017-12-11 14:07:02 -0500217 assert!(!source_file.is_real());
218}
219
David Tolnay1ebe3972018-01-02 20:14:20 -0800220#[cfg(procmacro2_semver_exempt)]
Nika Layzellddea1562017-12-11 14:25:35 -0500221#[test]
222fn span_join() {
David Tolnayb28f38a2018-03-31 22:02:29 +0200223 let source1 = "aaa\nbbb"
224 .parse::<TokenStream>()
225 .unwrap()
226 .into_iter()
227 .collect::<Vec<_>>();
228 let source2 = "ccc\nddd"
229 .parse::<TokenStream>()
230 .unwrap()
231 .into_iter()
232 .collect::<Vec<_>>();
Nika Layzellddea1562017-12-11 14:25:35 -0500233
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700234 assert!(source1[0].span().source_file() != source2[0].span().source_file());
David Tolnayb28f38a2018-03-31 22:02:29 +0200235 assert_eq!(
236 source1[0].span().source_file(),
237 source1[1].span().source_file()
238 );
Nika Layzellddea1562017-12-11 14:25:35 -0500239
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700240 let joined1 = source1[0].span().join(source1[1].span());
241 let joined2 = source1[0].span().join(source2[0].span());
Nika Layzellddea1562017-12-11 14:25:35 -0500242 assert!(joined1.is_some());
243 assert!(joined2.is_none());
244
245 let start = joined1.unwrap().start();
246 let end = joined1.unwrap().end();
247 assert_eq!(start.line, 1);
248 assert_eq!(start.column, 0);
249 assert_eq!(end.line, 2);
250 assert_eq!(end.column, 3);
251
David Tolnayb28f38a2018-03-31 22:02:29 +0200252 assert_eq!(
253 joined1.unwrap().source_file(),
254 source1[0].span().source_file()
255 );
Nika Layzellddea1562017-12-11 14:25:35 -0500256}
Alex Crichton8c030332018-01-16 08:07:36 -0800257
258#[test]
259fn no_panic() {
260 let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
261 assert!(s.parse::<proc_macro2::TokenStream>().is_err());
262}
263
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800264#[test]
David Tolnay639e4ba2018-03-31 21:10:55 +0200265fn tricky_doc_comment() {
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800266 let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
267 let tokens = stream.into_iter().collect::<Vec<_>>();
268 assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
Alex Crichtond7904e52018-01-23 11:08:45 -0800269
270 let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
271 let tokens = stream.into_iter().collect::<Vec<_>>();
Alex Crichton1eb96a02018-04-04 13:07:35 -0700272 assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700273 match tokens[0] {
Alex Crichtonf3888432018-05-16 09:11:05 -0700274 proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'),
Alex Crichtond7904e52018-01-23 11:08:45 -0800275 _ => panic!("wrong token {:?}", tokens[0]),
276 }
Alex Crichton1eb96a02018-04-04 13:07:35 -0700277 let mut tokens = match tokens[1] {
278 proc_macro2::TokenTree::Group(ref tt) => {
279 assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
280 tt.stream().into_iter()
281 }
282 _ => panic!("wrong token {:?}", tokens[0]),
283 };
284
285 match tokens.next().unwrap() {
Alex Crichtonf3888432018-05-16 09:11:05 -0700286 proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"),
Alex Crichton1eb96a02018-04-04 13:07:35 -0700287 t => panic!("wrong token {:?}", t),
288 }
289 match tokens.next().unwrap() {
Alex Crichtonf3888432018-05-16 09:11:05 -0700290 proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='),
Alex Crichton1eb96a02018-04-04 13:07:35 -0700291 t => panic!("wrong token {:?}", t),
292 }
293 match tokens.next().unwrap() {
294 proc_macro2::TokenTree::Literal(ref tt) => {
295 assert_eq!(tt.to_string(), "\" doc\"");
296 }
297 t => panic!("wrong token {:?}", t),
298 }
299 assert!(tokens.next().is_none());
300
301 let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
302 let tokens = stream.into_iter().collect::<Vec<_>>();
303 assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800304}
305
David Tolnaya13d1422018-03-31 21:27:48 +0200306#[test]
David Tolnay3a592ad2018-04-22 21:20:24 -0700307fn op_before_comment() {
308 let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
309 match tts.next().unwrap() {
Alex Crichtonf3888432018-05-16 09:11:05 -0700310 TokenTree::Punct(tt) => {
311 assert_eq!(tt.as_char(), '~');
David Tolnay3a592ad2018-04-22 21:20:24 -0700312 assert_eq!(tt.spacing(), Spacing::Alone);
313 }
314 wrong => panic!("wrong token {:?}", wrong),
315 }
316}
317
318#[test]
David Tolnaya13d1422018-03-31 21:27:48 +0200319fn raw_identifier() {
320 let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
321 match tts.next().unwrap() {
Alex Crichtonf3888432018-05-16 09:11:05 -0700322 TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()),
David Tolnaya13d1422018-03-31 21:27:48 +0200323 wrong => panic!("wrong token {:?}", wrong),
324 }
325 assert!(tts.next().is_none());
326}
David Tolnay034205f2018-04-22 16:45:28 -0700327
328#[test]
David Tolnayd8fcdb82018-06-02 15:43:53 -0700329fn test_debug_ident() {
330 let ident = Ident::new("proc_macro", Span::call_site());
331
332 #[cfg(not(procmacro2_semver_exempt))]
333 let expected = "Ident(proc_macro)";
334
335 #[cfg(procmacro2_semver_exempt)]
336 let expected = "Ident { sym: proc_macro, span: bytes(0..0) }";
337
338 assert_eq!(expected, format!("{:?}", ident));
339}
340
341#[test]
342fn test_debug_tokenstream() {
David Tolnay034205f2018-04-22 16:45:28 -0700343 let tts = TokenStream::from_str("[a + 1]").unwrap();
344
345 #[cfg(not(procmacro2_semver_exempt))]
346 let expected = "\
347TokenStream [
348 Group {
349 delimiter: Bracket,
350 stream: TokenStream [
Alex Crichtonf3888432018-05-16 09:11:05 -0700351 Ident {
David Tolnay5a2f7302019-04-10 15:57:36 -0700352 sym: a,
353 },
354 Punct {
355 op: '+',
356 spacing: Alone,
357 },
358 Literal {
359 lit: 1,
360 },
361 ],
362 },
363]\
364 ";
365
366 #[cfg(not(procmacro2_semver_exempt))]
367 let expected_before_trailing_commas = "\
368TokenStream [
369 Group {
370 delimiter: Bracket,
371 stream: TokenStream [
372 Ident {
David Tolnayd8fcdb82018-06-02 15:43:53 -0700373 sym: a
David Tolnay034205f2018-04-22 16:45:28 -0700374 },
Alex Crichtonf3888432018-05-16 09:11:05 -0700375 Punct {
David Tolnay034205f2018-04-22 16:45:28 -0700376 op: '+',
377 spacing: Alone
378 },
379 Literal {
380 lit: 1
381 }
382 ]
383 }
384]\
385 ";
386
387 #[cfg(procmacro2_semver_exempt)]
388 let expected = "\
389TokenStream [
390 Group {
391 delimiter: Bracket,
392 stream: TokenStream [
Alex Crichtonf3888432018-05-16 09:11:05 -0700393 Ident {
David Tolnay034205f2018-04-22 16:45:28 -0700394 sym: a,
David Tolnay5a2f7302019-04-10 15:57:36 -0700395 span: bytes(2..3),
396 },
397 Punct {
398 op: '+',
399 spacing: Alone,
400 span: bytes(4..5),
401 },
402 Literal {
403 lit: 1,
404 span: bytes(6..7),
405 },
406 ],
407 span: bytes(1..8),
408 },
409]\
410 ";
411
412 #[cfg(procmacro2_semver_exempt)]
413 let expected_before_trailing_commas = "\
414TokenStream [
415 Group {
416 delimiter: Bracket,
417 stream: TokenStream [
418 Ident {
419 sym: a,
David Tolnayd8fcdb82018-06-02 15:43:53 -0700420 span: bytes(2..3)
David Tolnay034205f2018-04-22 16:45:28 -0700421 },
Alex Crichtonf3888432018-05-16 09:11:05 -0700422 Punct {
David Tolnay034205f2018-04-22 16:45:28 -0700423 op: '+',
424 spacing: Alone,
425 span: bytes(4..5)
426 },
427 Literal {
428 lit: 1,
429 span: bytes(6..7)
430 }
431 ],
432 span: bytes(1..8)
433 }
434]\
435 ";
436
David Tolnay5a2f7302019-04-10 15:57:36 -0700437 let actual = format!("{:#?}", tts);
438 if actual.ends_with(",\n]") {
439 assert_eq!(expected, actual);
440 } else {
441 assert_eq!(expected_before_trailing_commas, actual);
442 }
David Tolnay034205f2018-04-22 16:45:28 -0700443}
Árpád Goretity4f74b682018-07-14 00:47:51 +0200444
445#[test]
446fn default_tokenstream_is_empty() {
447 let default_token_stream: TokenStream = Default::default();
448
449 assert!(default_token_stream.is_empty());
450}