blob: 23ff04d47dc267ba06de717dfcc0e1f1374d6445 [file] [log] [blame]
David Tolnaye5806852017-06-01 12:49:20 -07001extern crate proc_macro2;
2
David Tolnaya13d1422018-03-31 21:27:48 +02003use std::str::{self, FromStr};
Alex Crichton8c030332018-01-16 08:07:36 -08004
David Tolnaya13d1422018-03-31 21:27:48 +02005use proc_macro2::{Term, Literal, TokenStream, Span, TokenTree};
David Tolnaye5806852017-06-01 12:49:20 -07006
7#[test]
8fn symbols() {
Alex Crichtonaf5bad42018-03-27 14:45:10 -07009 assert_eq!(Term::new("foo", Span::call_site()).as_str(), "foo");
10 assert_eq!(Term::new("bar", Span::call_site()).as_str(), "bar");
David Tolnaye5806852017-06-01 12:49:20 -070011}
12
13#[test]
14fn literals() {
Alex Crichton1a7f7622017-07-05 17:47:15 -070015 assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
16 assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
Alex Crichtonaf5bad42018-03-27 14:45:10 -070017 assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
David Tolnaye5806852017-06-01 12:49:20 -070018}
19
20#[test]
21fn roundtrip() {
22 fn roundtrip(p: &str) {
23 println!("parse: {}", p);
24 let s = p.parse::<TokenStream>().unwrap().to_string();
25 println!("first: {}", s);
26 let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string();
27 assert_eq!(s, s2);
28 }
29 roundtrip("a");
30 roundtrip("<<");
31 roundtrip("<<=");
32 roundtrip("
33 /// a
34 wut
35 ");
36 roundtrip("
37 1
38 1.0
39 1f32
40 2f64
41 1usize
42 4isize
43 4e10
44 1_000
45 1_0i32
46 8u8
47 9
48 0
49 0xffffffffffffffffffffffffffffffff
50 ");
51 roundtrip("'a");
52 roundtrip("'static");
David Tolnay8d109342017-12-25 18:24:45 -050053 roundtrip("'\\u{10__FFFF}'");
54 roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
David Tolnaye5806852017-06-01 12:49:20 -070055}
56
57#[test]
58fn fail() {
59 fn fail(p: &str) {
60 if p.parse::<TokenStream>().is_ok() {
61 panic!("should have failed to parse: {}", p);
62 }
63 }
64 fail("1x");
65 fail("1u80");
66 fail("1f320");
67 fail("' static");
68 fail("'mut");
David Tolnaya13d1422018-03-31 21:27:48 +020069 fail("r#1");
70 fail("r#_");
David Tolnaye5806852017-06-01 12:49:20 -070071}
Nika Layzellf8d5f212017-12-11 14:07:02 -050072
David Tolnay1ebe3972018-01-02 20:14:20 -080073#[cfg(procmacro2_semver_exempt)]
Nika Layzellf8d5f212017-12-11 14:07:02 -050074#[test]
75fn span_test() {
Alex Crichtonaf5bad42018-03-27 14:45:10 -070076 use proc_macro2::TokenTree;
77
Nika Layzellf8d5f212017-12-11 14:07:02 -050078 fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
Nika Layzellf8d5f212017-12-11 14:07:02 -050079 let ts = p.parse::<TokenStream>().unwrap();
80 check_spans_internal(ts, &mut lines);
81 }
82
83 fn check_spans_internal(
84 ts: TokenStream,
85 lines: &mut &[(usize, usize, usize, usize)],
86 ) {
87 for i in ts {
88 if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
89 *lines = rest;
90
Alex Crichtonaf5bad42018-03-27 14:45:10 -070091 let start = i.span().start();
Nika Layzellf8d5f212017-12-11 14:07:02 -050092 assert_eq!(start.line, sline, "sline did not match for {}", i);
93 assert_eq!(start.column, scol, "scol did not match for {}", i);
94
Alex Crichtonaf5bad42018-03-27 14:45:10 -070095 let end = i.span().end();
Nika Layzellf8d5f212017-12-11 14:07:02 -050096 assert_eq!(end.line, eline, "eline did not match for {}", i);
97 assert_eq!(end.column, ecol, "ecol did not match for {}", i);
98
Alex Crichtonaf5bad42018-03-27 14:45:10 -070099 match i {
100 TokenTree::Group(ref g) => {
101 check_spans_internal(g.stream().clone(), lines);
102 }
Nika Layzellf8d5f212017-12-11 14:07:02 -0500103 _ => {}
104 }
105 }
106 }
107 }
108
109 check_spans("\
110/// This is a document comment
111testing 123
112{
113 testing 234
114}", &[
115 (1, 0, 1, 30),
116 (2, 0, 2, 7),
117 (2, 8, 2, 11),
118 (3, 0, 5, 1),
119 (4, 2, 4, 9),
120 (4, 10, 4, 13),
121]);
122}
123
David Tolnay1ebe3972018-01-02 20:14:20 -0800124#[cfg(procmacro2_semver_exempt)]
David Tolnayd66ecf62018-01-02 20:05:42 -0800125#[cfg(not(feature = "nightly"))]
Nika Layzellf8d5f212017-12-11 14:07:02 -0500126#[test]
127fn default_span() {
128 let start = Span::call_site().start();
129 assert_eq!(start.line, 1);
130 assert_eq!(start.column, 0);
131 let end = Span::call_site().end();
132 assert_eq!(end.line, 1);
133 assert_eq!(end.column, 0);
134 let source_file = Span::call_site().source_file();
Nika Layzellfb783e32017-12-30 14:58:27 -0500135 assert_eq!(source_file.path().to_string(), "<unspecified>");
Nika Layzellf8d5f212017-12-11 14:07:02 -0500136 assert!(!source_file.is_real());
137}
138
David Tolnay1ebe3972018-01-02 20:14:20 -0800139#[cfg(procmacro2_semver_exempt)]
Nika Layzellddea1562017-12-11 14:25:35 -0500140#[test]
141fn span_join() {
142 let source1 =
143 "aaa\nbbb".parse::<TokenStream>().unwrap().into_iter().collect::<Vec<_>>();
144 let source2 =
145 "ccc\nddd".parse::<TokenStream>().unwrap().into_iter().collect::<Vec<_>>();
146
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700147 assert!(source1[0].span().source_file() != source2[0].span().source_file());
148 assert_eq!(source1[0].span().source_file(), source1[1].span().source_file());
Nika Layzellddea1562017-12-11 14:25:35 -0500149
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700150 let joined1 = source1[0].span().join(source1[1].span());
151 let joined2 = source1[0].span().join(source2[0].span());
Nika Layzellddea1562017-12-11 14:25:35 -0500152 assert!(joined1.is_some());
153 assert!(joined2.is_none());
154
155 let start = joined1.unwrap().start();
156 let end = joined1.unwrap().end();
157 assert_eq!(start.line, 1);
158 assert_eq!(start.column, 0);
159 assert_eq!(end.line, 2);
160 assert_eq!(end.column, 3);
161
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700162 assert_eq!(joined1.unwrap().source_file(), source1[0].span().source_file());
Nika Layzellddea1562017-12-11 14:25:35 -0500163}
Alex Crichton8c030332018-01-16 08:07:36 -0800164
165#[test]
166fn no_panic() {
167 let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
168 assert!(s.parse::<proc_macro2::TokenStream>().is_err());
169}
170
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800171#[test]
David Tolnay639e4ba2018-03-31 21:10:55 +0200172fn tricky_doc_comment() {
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800173 let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
174 let tokens = stream.into_iter().collect::<Vec<_>>();
175 assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
Alex Crichtond7904e52018-01-23 11:08:45 -0800176
177 let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
178 let tokens = stream.into_iter().collect::<Vec<_>>();
179 assert!(tokens.len() == 1, "not length 1 -- {:?}", tokens);
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700180 match tokens[0] {
181 proc_macro2::TokenTree::Literal(_) => {}
Alex Crichtond7904e52018-01-23 11:08:45 -0800182 _ => panic!("wrong token {:?}", tokens[0]),
183 }
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800184}
185
David Tolnaya13d1422018-03-31 21:27:48 +0200186#[test]
187fn raw_identifier() {
188 let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
189 match tts.next().unwrap() {
190 TokenTree::Term(raw) => assert_eq!("r#dyn", raw.as_str()),
191 wrong => panic!("wrong token {:?}", wrong),
192 }
193 assert!(tts.next().is_none());
194}