blob: 50322a1cd57c2b66a2d62923a287b6d4881d8811 [file] [log] [blame]
David Tolnaye5806852017-06-01 12:49:20 -07001extern crate proc_macro2;
2
David Tolnaya13d1422018-03-31 21:27:48 +02003use std::str::{self, FromStr};
Alex Crichton8c030332018-01-16 08:07:36 -08004
David Tolnayb28f38a2018-03-31 22:02:29 +02005use proc_macro2::{Literal, Span, Term, TokenStream, TokenTree};
David Tolnaye5806852017-06-01 12:49:20 -07006
7#[test]
8fn symbols() {
Alex Crichtonaf5bad42018-03-27 14:45:10 -07009 assert_eq!(Term::new("foo", Span::call_site()).as_str(), "foo");
10 assert_eq!(Term::new("bar", Span::call_site()).as_str(), "bar");
David Tolnaye5806852017-06-01 12:49:20 -070011}
12
13#[test]
14fn literals() {
Alex Crichton1a7f7622017-07-05 17:47:15 -070015 assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
16 assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
Alex Crichtonaf5bad42018-03-27 14:45:10 -070017 assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
David Tolnaye5806852017-06-01 12:49:20 -070018}
19
20#[test]
21fn roundtrip() {
22 fn roundtrip(p: &str) {
23 println!("parse: {}", p);
24 let s = p.parse::<TokenStream>().unwrap().to_string();
25 println!("first: {}", s);
26 let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string();
27 assert_eq!(s, s2);
28 }
29 roundtrip("a");
30 roundtrip("<<");
31 roundtrip("<<=");
David Tolnayb28f38a2018-03-31 22:02:29 +020032 roundtrip(
33 "
David Tolnaye5806852017-06-01 12:49:20 -070034 /// a
35 wut
David Tolnayb28f38a2018-03-31 22:02:29 +020036 ",
37 );
38 roundtrip(
39 "
David Tolnaye5806852017-06-01 12:49:20 -070040 1
41 1.0
42 1f32
43 2f64
44 1usize
45 4isize
46 4e10
47 1_000
48 1_0i32
49 8u8
50 9
51 0
52 0xffffffffffffffffffffffffffffffff
David Tolnayb28f38a2018-03-31 22:02:29 +020053 ",
54 );
David Tolnaye5806852017-06-01 12:49:20 -070055 roundtrip("'a");
56 roundtrip("'static");
David Tolnay8d109342017-12-25 18:24:45 -050057 roundtrip("'\\u{10__FFFF}'");
58 roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
David Tolnaye5806852017-06-01 12:49:20 -070059}
60
61#[test]
62fn fail() {
63 fn fail(p: &str) {
64 if p.parse::<TokenStream>().is_ok() {
65 panic!("should have failed to parse: {}", p);
66 }
67 }
68 fail("1x");
69 fail("1u80");
70 fail("1f320");
71 fail("' static");
72 fail("'mut");
David Tolnaya13d1422018-03-31 21:27:48 +020073 fail("r#1");
74 fail("r#_");
David Tolnaye5806852017-06-01 12:49:20 -070075}
Nika Layzellf8d5f212017-12-11 14:07:02 -050076
David Tolnay1ebe3972018-01-02 20:14:20 -080077#[cfg(procmacro2_semver_exempt)]
Nika Layzellf8d5f212017-12-11 14:07:02 -050078#[test]
79fn span_test() {
Alex Crichtonaf5bad42018-03-27 14:45:10 -070080 use proc_macro2::TokenTree;
81
Nika Layzellf8d5f212017-12-11 14:07:02 -050082 fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
Nika Layzellf8d5f212017-12-11 14:07:02 -050083 let ts = p.parse::<TokenStream>().unwrap();
84 check_spans_internal(ts, &mut lines);
85 }
86
David Tolnayb28f38a2018-03-31 22:02:29 +020087 fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
Nika Layzellf8d5f212017-12-11 14:07:02 -050088 for i in ts {
89 if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
90 *lines = rest;
91
Alex Crichtonaf5bad42018-03-27 14:45:10 -070092 let start = i.span().start();
Nika Layzellf8d5f212017-12-11 14:07:02 -050093 assert_eq!(start.line, sline, "sline did not match for {}", i);
94 assert_eq!(start.column, scol, "scol did not match for {}", i);
95
Alex Crichtonaf5bad42018-03-27 14:45:10 -070096 let end = i.span().end();
Nika Layzellf8d5f212017-12-11 14:07:02 -050097 assert_eq!(end.line, eline, "eline did not match for {}", i);
98 assert_eq!(end.column, ecol, "ecol did not match for {}", i);
99
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700100 match i {
101 TokenTree::Group(ref g) => {
102 check_spans_internal(g.stream().clone(), lines);
103 }
Nika Layzellf8d5f212017-12-11 14:07:02 -0500104 _ => {}
105 }
106 }
107 }
108 }
109
David Tolnayb28f38a2018-03-31 22:02:29 +0200110 check_spans(
111 "\
Nika Layzellf8d5f212017-12-11 14:07:02 -0500112/// This is a document comment
113testing 123
114{
115 testing 234
David Tolnayb28f38a2018-03-31 22:02:29 +0200116}",
117 &[
118 (1, 0, 1, 30),
119 (2, 0, 2, 7),
120 (2, 8, 2, 11),
121 (3, 0, 5, 1),
122 (4, 2, 4, 9),
123 (4, 10, 4, 13),
124 ],
125 );
Nika Layzellf8d5f212017-12-11 14:07:02 -0500126}
127
David Tolnay1ebe3972018-01-02 20:14:20 -0800128#[cfg(procmacro2_semver_exempt)]
David Tolnayd66ecf62018-01-02 20:05:42 -0800129#[cfg(not(feature = "nightly"))]
Nika Layzellf8d5f212017-12-11 14:07:02 -0500130#[test]
131fn default_span() {
132 let start = Span::call_site().start();
133 assert_eq!(start.line, 1);
134 assert_eq!(start.column, 0);
135 let end = Span::call_site().end();
136 assert_eq!(end.line, 1);
137 assert_eq!(end.column, 0);
138 let source_file = Span::call_site().source_file();
Nika Layzellfb783e32017-12-30 14:58:27 -0500139 assert_eq!(source_file.path().to_string(), "<unspecified>");
Nika Layzellf8d5f212017-12-11 14:07:02 -0500140 assert!(!source_file.is_real());
141}
142
David Tolnay1ebe3972018-01-02 20:14:20 -0800143#[cfg(procmacro2_semver_exempt)]
Nika Layzellddea1562017-12-11 14:25:35 -0500144#[test]
145fn span_join() {
David Tolnayb28f38a2018-03-31 22:02:29 +0200146 let source1 = "aaa\nbbb"
147 .parse::<TokenStream>()
148 .unwrap()
149 .into_iter()
150 .collect::<Vec<_>>();
151 let source2 = "ccc\nddd"
152 .parse::<TokenStream>()
153 .unwrap()
154 .into_iter()
155 .collect::<Vec<_>>();
Nika Layzellddea1562017-12-11 14:25:35 -0500156
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700157 assert!(source1[0].span().source_file() != source2[0].span().source_file());
David Tolnayb28f38a2018-03-31 22:02:29 +0200158 assert_eq!(
159 source1[0].span().source_file(),
160 source1[1].span().source_file()
161 );
Nika Layzellddea1562017-12-11 14:25:35 -0500162
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700163 let joined1 = source1[0].span().join(source1[1].span());
164 let joined2 = source1[0].span().join(source2[0].span());
Nika Layzellddea1562017-12-11 14:25:35 -0500165 assert!(joined1.is_some());
166 assert!(joined2.is_none());
167
168 let start = joined1.unwrap().start();
169 let end = joined1.unwrap().end();
170 assert_eq!(start.line, 1);
171 assert_eq!(start.column, 0);
172 assert_eq!(end.line, 2);
173 assert_eq!(end.column, 3);
174
David Tolnayb28f38a2018-03-31 22:02:29 +0200175 assert_eq!(
176 joined1.unwrap().source_file(),
177 source1[0].span().source_file()
178 );
Nika Layzellddea1562017-12-11 14:25:35 -0500179}
Alex Crichton8c030332018-01-16 08:07:36 -0800180
181#[test]
182fn no_panic() {
183 let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
184 assert!(s.parse::<proc_macro2::TokenStream>().is_err());
185}
186
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800187#[test]
David Tolnay639e4ba2018-03-31 21:10:55 +0200188fn tricky_doc_comment() {
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800189 let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
190 let tokens = stream.into_iter().collect::<Vec<_>>();
191 assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
Alex Crichtond7904e52018-01-23 11:08:45 -0800192
193 let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
194 let tokens = stream.into_iter().collect::<Vec<_>>();
195 assert!(tokens.len() == 1, "not length 1 -- {:?}", tokens);
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700196 match tokens[0] {
197 proc_macro2::TokenTree::Literal(_) => {}
Alex Crichtond7904e52018-01-23 11:08:45 -0800198 _ => panic!("wrong token {:?}", tokens[0]),
199 }
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800200}
201
David Tolnaya13d1422018-03-31 21:27:48 +0200202#[test]
203fn raw_identifier() {
204 let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
205 match tts.next().unwrap() {
206 TokenTree::Term(raw) => assert_eq!("r#dyn", raw.as_str()),
207 wrong => panic!("wrong token {:?}", wrong),
208 }
209 assert!(tts.next().is_none());
210}