blob: dde66d4f3f5f1035dc34c0128efb315949fe0c01 [file] [log] [blame]
David Tolnaye5806852017-06-01 12:49:20 -07001extern crate proc_macro2;
2
David Tolnaya13d1422018-03-31 21:27:48 +02003use std::str::{self, FromStr};
Alex Crichton8c030332018-01-16 08:07:36 -08004
David Tolnay3a592ad2018-04-22 21:20:24 -07005use proc_macro2::{Literal, Spacing, Span, Term, TokenStream, TokenTree};
David Tolnaye5806852017-06-01 12:49:20 -07006
7#[test]
David Tolnay489c6422018-04-07 08:37:28 -07008fn terms() {
9 assert_eq!(Term::new("String", Span::call_site()).as_str(), "String");
10 assert_eq!(Term::new("fn", Span::call_site()).as_str(), "fn");
11 assert_eq!(Term::new("_", Span::call_site()).as_str(), "_");
12}
13
14#[test]
15fn raw_terms() {
David Tolnay48ea5042018-04-23 19:17:35 -070016 assert_eq!(
17 Term::new("r#String", Span::call_site()).as_str(),
18 "r#String"
19 );
David Tolnay489c6422018-04-07 08:37:28 -070020 assert_eq!(Term::new("r#fn", Span::call_site()).as_str(), "r#fn");
21 assert_eq!(Term::new("r#_", Span::call_site()).as_str(), "r#_");
22}
23
24#[test]
25fn lifetimes() {
26 assert_eq!(Term::new("'a", Span::call_site()).as_str(), "'a");
27 assert_eq!(Term::new("'static", Span::call_site()).as_str(), "'static");
28 assert_eq!(Term::new("'_", Span::call_site()).as_str(), "'_");
29}
30
31#[test]
32#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")]
33fn term_empty() {
34 Term::new("", Span::call_site());
35}
36
37#[test]
38#[should_panic(expected = "Term cannot be a number; use Literal instead")]
39fn term_number() {
40 Term::new("255", Span::call_site());
41}
42
43#[test]
44#[should_panic(expected = "\"a#\" is not a valid Term")]
45fn term_invalid() {
46 Term::new("a#", Span::call_site());
47}
48
49#[test]
50#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")]
51fn raw_term_empty() {
52 Term::new("r#", Span::call_site());
53}
54
55#[test]
56#[should_panic(expected = "Term cannot be a number; use Literal instead")]
57fn raw_term_number() {
58 Term::new("r#255", Span::call_site());
59}
60
61#[test]
62#[should_panic(expected = "\"r#a#\" is not a valid Term")]
63fn raw_term_invalid() {
64 Term::new("r#a#", Span::call_site());
65}
66
67#[test]
68#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")]
69fn lifetime_empty() {
70 Term::new("'", Span::call_site());
71}
72
73#[test]
74#[should_panic(expected = "Term cannot be a number; use Literal instead")]
75fn lifetime_number() {
76 Term::new("'255", Span::call_site());
77}
78
79#[test]
80#[should_panic(expected = r#""\'a#" is not a valid Term"#)]
81fn lifetime_invalid() {
82 Term::new("'a#", Span::call_site());
David Tolnaye5806852017-06-01 12:49:20 -070083}
84
85#[test]
86fn literals() {
Alex Crichton1a7f7622017-07-05 17:47:15 -070087 assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
88 assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
Alex Crichtonaf5bad42018-03-27 14:45:10 -070089 assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
David Tolnaye5806852017-06-01 12:49:20 -070090}
91
92#[test]
93fn roundtrip() {
94 fn roundtrip(p: &str) {
95 println!("parse: {}", p);
96 let s = p.parse::<TokenStream>().unwrap().to_string();
97 println!("first: {}", s);
98 let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string();
99 assert_eq!(s, s2);
100 }
101 roundtrip("a");
102 roundtrip("<<");
103 roundtrip("<<=");
David Tolnayb28f38a2018-03-31 22:02:29 +0200104 roundtrip(
105 "
David Tolnaye5806852017-06-01 12:49:20 -0700106 1
107 1.0
108 1f32
109 2f64
110 1usize
111 4isize
112 4e10
113 1_000
114 1_0i32
115 8u8
116 9
117 0
118 0xffffffffffffffffffffffffffffffff
David Tolnayb28f38a2018-03-31 22:02:29 +0200119 ",
120 );
David Tolnaye5806852017-06-01 12:49:20 -0700121 roundtrip("'a");
122 roundtrip("'static");
David Tolnay8d109342017-12-25 18:24:45 -0500123 roundtrip("'\\u{10__FFFF}'");
124 roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
David Tolnaye5806852017-06-01 12:49:20 -0700125}
126
127#[test]
128fn fail() {
129 fn fail(p: &str) {
130 if p.parse::<TokenStream>().is_ok() {
131 panic!("should have failed to parse: {}", p);
132 }
133 }
134 fail("1x");
135 fail("1u80");
136 fail("1f320");
137 fail("' static");
138 fail("'mut");
David Tolnaya13d1422018-03-31 21:27:48 +0200139 fail("r#1");
140 fail("r#_");
David Tolnaye5806852017-06-01 12:49:20 -0700141}
Nika Layzellf8d5f212017-12-11 14:07:02 -0500142
David Tolnay1ebe3972018-01-02 20:14:20 -0800143#[cfg(procmacro2_semver_exempt)]
Nika Layzellf8d5f212017-12-11 14:07:02 -0500144#[test]
145fn span_test() {
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700146 use proc_macro2::TokenTree;
147
Nika Layzellf8d5f212017-12-11 14:07:02 -0500148 fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
Nika Layzellf8d5f212017-12-11 14:07:02 -0500149 let ts = p.parse::<TokenStream>().unwrap();
150 check_spans_internal(ts, &mut lines);
151 }
152
David Tolnayb28f38a2018-03-31 22:02:29 +0200153 fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
Nika Layzellf8d5f212017-12-11 14:07:02 -0500154 for i in ts {
155 if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
156 *lines = rest;
157
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700158 let start = i.span().start();
Nika Layzellf8d5f212017-12-11 14:07:02 -0500159 assert_eq!(start.line, sline, "sline did not match for {}", i);
160 assert_eq!(start.column, scol, "scol did not match for {}", i);
161
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700162 let end = i.span().end();
Nika Layzellf8d5f212017-12-11 14:07:02 -0500163 assert_eq!(end.line, eline, "eline did not match for {}", i);
164 assert_eq!(end.column, ecol, "ecol did not match for {}", i);
165
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700166 match i {
167 TokenTree::Group(ref g) => {
168 check_spans_internal(g.stream().clone(), lines);
169 }
Nika Layzellf8d5f212017-12-11 14:07:02 -0500170 _ => {}
171 }
172 }
173 }
174 }
175
David Tolnayb28f38a2018-03-31 22:02:29 +0200176 check_spans(
177 "\
Nika Layzellf8d5f212017-12-11 14:07:02 -0500178/// This is a document comment
179testing 123
180{
181 testing 234
David Tolnayb28f38a2018-03-31 22:02:29 +0200182}",
183 &[
Alex Crichton1eb96a02018-04-04 13:07:35 -0700184 (1, 0, 1, 30), // #
185 (1, 0, 1, 30), // [ ... ]
186 (1, 0, 1, 30), // doc
187 (1, 0, 1, 30), // =
188 (1, 0, 1, 30), // "This is..."
189 (2, 0, 2, 7), // testing
190 (2, 8, 2, 11), // 123
191 (3, 0, 5, 1), // { ... }
192 (4, 2, 4, 9), // testing
193 (4, 10, 4, 13), // 234
David Tolnayb28f38a2018-03-31 22:02:29 +0200194 ],
195 );
Nika Layzellf8d5f212017-12-11 14:07:02 -0500196}
197
David Tolnay1ebe3972018-01-02 20:14:20 -0800198#[cfg(procmacro2_semver_exempt)]
David Tolnayd66ecf62018-01-02 20:05:42 -0800199#[cfg(not(feature = "nightly"))]
Nika Layzellf8d5f212017-12-11 14:07:02 -0500200#[test]
201fn default_span() {
202 let start = Span::call_site().start();
203 assert_eq!(start.line, 1);
204 assert_eq!(start.column, 0);
205 let end = Span::call_site().end();
206 assert_eq!(end.line, 1);
207 assert_eq!(end.column, 0);
208 let source_file = Span::call_site().source_file();
Nika Layzellfb783e32017-12-30 14:58:27 -0500209 assert_eq!(source_file.path().to_string(), "<unspecified>");
Nika Layzellf8d5f212017-12-11 14:07:02 -0500210 assert!(!source_file.is_real());
211}
212
David Tolnay1ebe3972018-01-02 20:14:20 -0800213#[cfg(procmacro2_semver_exempt)]
Nika Layzellddea1562017-12-11 14:25:35 -0500214#[test]
215fn span_join() {
David Tolnayb28f38a2018-03-31 22:02:29 +0200216 let source1 = "aaa\nbbb"
217 .parse::<TokenStream>()
218 .unwrap()
219 .into_iter()
220 .collect::<Vec<_>>();
221 let source2 = "ccc\nddd"
222 .parse::<TokenStream>()
223 .unwrap()
224 .into_iter()
225 .collect::<Vec<_>>();
Nika Layzellddea1562017-12-11 14:25:35 -0500226
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700227 assert!(source1[0].span().source_file() != source2[0].span().source_file());
David Tolnayb28f38a2018-03-31 22:02:29 +0200228 assert_eq!(
229 source1[0].span().source_file(),
230 source1[1].span().source_file()
231 );
Nika Layzellddea1562017-12-11 14:25:35 -0500232
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700233 let joined1 = source1[0].span().join(source1[1].span());
234 let joined2 = source1[0].span().join(source2[0].span());
Nika Layzellddea1562017-12-11 14:25:35 -0500235 assert!(joined1.is_some());
236 assert!(joined2.is_none());
237
238 let start = joined1.unwrap().start();
239 let end = joined1.unwrap().end();
240 assert_eq!(start.line, 1);
241 assert_eq!(start.column, 0);
242 assert_eq!(end.line, 2);
243 assert_eq!(end.column, 3);
244
David Tolnayb28f38a2018-03-31 22:02:29 +0200245 assert_eq!(
246 joined1.unwrap().source_file(),
247 source1[0].span().source_file()
248 );
Nika Layzellddea1562017-12-11 14:25:35 -0500249}
Alex Crichton8c030332018-01-16 08:07:36 -0800250
251#[test]
252fn no_panic() {
253 let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
254 assert!(s.parse::<proc_macro2::TokenStream>().is_err());
255}
256
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800257#[test]
David Tolnay639e4ba2018-03-31 21:10:55 +0200258fn tricky_doc_comment() {
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800259 let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
260 let tokens = stream.into_iter().collect::<Vec<_>>();
261 assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
Alex Crichtond7904e52018-01-23 11:08:45 -0800262
263 let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
264 let tokens = stream.into_iter().collect::<Vec<_>>();
Alex Crichton1eb96a02018-04-04 13:07:35 -0700265 assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700266 match tokens[0] {
Alex Crichton1eb96a02018-04-04 13:07:35 -0700267 proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '#'),
Alex Crichtond7904e52018-01-23 11:08:45 -0800268 _ => panic!("wrong token {:?}", tokens[0]),
269 }
Alex Crichton1eb96a02018-04-04 13:07:35 -0700270 let mut tokens = match tokens[1] {
271 proc_macro2::TokenTree::Group(ref tt) => {
272 assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
273 tt.stream().into_iter()
274 }
275 _ => panic!("wrong token {:?}", tokens[0]),
276 };
277
278 match tokens.next().unwrap() {
279 proc_macro2::TokenTree::Term(ref tt) => assert_eq!(tt.as_str(), "doc"),
280 t => panic!("wrong token {:?}", t),
281 }
282 match tokens.next().unwrap() {
283 proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '='),
284 t => panic!("wrong token {:?}", t),
285 }
286 match tokens.next().unwrap() {
287 proc_macro2::TokenTree::Literal(ref tt) => {
288 assert_eq!(tt.to_string(), "\" doc\"");
289 }
290 t => panic!("wrong token {:?}", t),
291 }
292 assert!(tokens.next().is_none());
293
294 let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
295 let tokens = stream.into_iter().collect::<Vec<_>>();
296 assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800297}
298
David Tolnaya13d1422018-03-31 21:27:48 +0200299#[test]
David Tolnay3a592ad2018-04-22 21:20:24 -0700300fn op_before_comment() {
301 let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
302 match tts.next().unwrap() {
303 TokenTree::Op(tt) => {
304 assert_eq!(tt.op(), '~');
305 assert_eq!(tt.spacing(), Spacing::Alone);
306 }
307 wrong => panic!("wrong token {:?}", wrong),
308 }
309}
310
311#[test]
David Tolnaya13d1422018-03-31 21:27:48 +0200312fn raw_identifier() {
313 let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
314 match tts.next().unwrap() {
315 TokenTree::Term(raw) => assert_eq!("r#dyn", raw.as_str()),
316 wrong => panic!("wrong token {:?}", wrong),
317 }
318 assert!(tts.next().is_none());
319}
David Tolnay034205f2018-04-22 16:45:28 -0700320
321#[test]
322fn test_debug() {
323 let tts = TokenStream::from_str("[a + 1]").unwrap();
324
325 #[cfg(not(procmacro2_semver_exempt))]
326 let expected = "\
327TokenStream [
328 Group {
329 delimiter: Bracket,
330 stream: TokenStream [
331 Term {
332 sym: a
333 },
334 Op {
335 op: '+',
336 spacing: Alone
337 },
338 Literal {
339 lit: 1
340 }
341 ]
342 }
343]\
344 ";
345
346 #[cfg(procmacro2_semver_exempt)]
347 let expected = "\
348TokenStream [
349 Group {
350 delimiter: Bracket,
351 stream: TokenStream [
352 Term {
353 sym: a,
354 span: bytes(2..3)
355 },
356 Op {
357 op: '+',
358 spacing: Alone,
359 span: bytes(4..5)
360 },
361 Literal {
362 lit: 1,
363 span: bytes(6..7)
364 }
365 ],
366 span: bytes(1..8)
367 }
368]\
369 ";
370
371 assert_eq!(expected, format!("{:#?}", tts));
372}