blob: 7699d53d1d8e00438953ed6eab2c5cd84182f456 [file] [log] [blame]
David Tolnaye5806852017-06-01 12:49:20 -07001extern crate proc_macro2;
2
David Tolnaya13d1422018-03-31 21:27:48 +02003use std::str::{self, FromStr};
Alex Crichton8c030332018-01-16 08:07:36 -08004
David Tolnayb28f38a2018-03-31 22:02:29 +02005use proc_macro2::{Literal, Span, Term, TokenStream, TokenTree};
David Tolnaye5806852017-06-01 12:49:20 -07006
7#[test]
8fn symbols() {
Alex Crichtonaf5bad42018-03-27 14:45:10 -07009 assert_eq!(Term::new("foo", Span::call_site()).as_str(), "foo");
10 assert_eq!(Term::new("bar", Span::call_site()).as_str(), "bar");
David Tolnaye5806852017-06-01 12:49:20 -070011}
12
13#[test]
14fn literals() {
Alex Crichton1a7f7622017-07-05 17:47:15 -070015 assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
16 assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
Alex Crichtonaf5bad42018-03-27 14:45:10 -070017 assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
David Tolnaye5806852017-06-01 12:49:20 -070018}
19
20#[test]
21fn roundtrip() {
22 fn roundtrip(p: &str) {
23 println!("parse: {}", p);
24 let s = p.parse::<TokenStream>().unwrap().to_string();
25 println!("first: {}", s);
26 let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string();
27 assert_eq!(s, s2);
28 }
29 roundtrip("a");
30 roundtrip("<<");
31 roundtrip("<<=");
David Tolnayb28f38a2018-03-31 22:02:29 +020032 roundtrip(
33 "
David Tolnaye5806852017-06-01 12:49:20 -070034 1
35 1.0
36 1f32
37 2f64
38 1usize
39 4isize
40 4e10
41 1_000
42 1_0i32
43 8u8
44 9
45 0
46 0xffffffffffffffffffffffffffffffff
David Tolnayb28f38a2018-03-31 22:02:29 +020047 ",
48 );
David Tolnaye5806852017-06-01 12:49:20 -070049 roundtrip("'a");
50 roundtrip("'static");
David Tolnay8d109342017-12-25 18:24:45 -050051 roundtrip("'\\u{10__FFFF}'");
52 roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
David Tolnaye5806852017-06-01 12:49:20 -070053}
54
55#[test]
56fn fail() {
57 fn fail(p: &str) {
58 if p.parse::<TokenStream>().is_ok() {
59 panic!("should have failed to parse: {}", p);
60 }
61 }
62 fail("1x");
63 fail("1u80");
64 fail("1f320");
65 fail("' static");
66 fail("'mut");
David Tolnaya13d1422018-03-31 21:27:48 +020067 fail("r#1");
68 fail("r#_");
David Tolnaye5806852017-06-01 12:49:20 -070069}
Nika Layzellf8d5f212017-12-11 14:07:02 -050070
David Tolnay1ebe3972018-01-02 20:14:20 -080071#[cfg(procmacro2_semver_exempt)]
Nika Layzellf8d5f212017-12-11 14:07:02 -050072#[test]
73fn span_test() {
Alex Crichtonaf5bad42018-03-27 14:45:10 -070074 use proc_macro2::TokenTree;
75
Nika Layzellf8d5f212017-12-11 14:07:02 -050076 fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
Nika Layzellf8d5f212017-12-11 14:07:02 -050077 let ts = p.parse::<TokenStream>().unwrap();
78 check_spans_internal(ts, &mut lines);
79 }
80
David Tolnayb28f38a2018-03-31 22:02:29 +020081 fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
Nika Layzellf8d5f212017-12-11 14:07:02 -050082 for i in ts {
83 if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
84 *lines = rest;
85
Alex Crichtonaf5bad42018-03-27 14:45:10 -070086 let start = i.span().start();
Nika Layzellf8d5f212017-12-11 14:07:02 -050087 assert_eq!(start.line, sline, "sline did not match for {}", i);
88 assert_eq!(start.column, scol, "scol did not match for {}", i);
89
Alex Crichtonaf5bad42018-03-27 14:45:10 -070090 let end = i.span().end();
Nika Layzellf8d5f212017-12-11 14:07:02 -050091 assert_eq!(end.line, eline, "eline did not match for {}", i);
92 assert_eq!(end.column, ecol, "ecol did not match for {}", i);
93
Alex Crichtonaf5bad42018-03-27 14:45:10 -070094 match i {
95 TokenTree::Group(ref g) => {
96 check_spans_internal(g.stream().clone(), lines);
97 }
Nika Layzellf8d5f212017-12-11 14:07:02 -050098 _ => {}
99 }
100 }
101 }
102 }
103
David Tolnayb28f38a2018-03-31 22:02:29 +0200104 check_spans(
105 "\
Nika Layzellf8d5f212017-12-11 14:07:02 -0500106/// This is a document comment
107testing 123
108{
109 testing 234
David Tolnayb28f38a2018-03-31 22:02:29 +0200110}",
111 &[
Alex Crichton1eb96a02018-04-04 13:07:35 -0700112 (1, 0, 1, 30), // #
113 (1, 0, 1, 30), // [ ... ]
114 (1, 0, 1, 30), // doc
115 (1, 0, 1, 30), // =
116 (1, 0, 1, 30), // "This is..."
117 (2, 0, 2, 7), // testing
118 (2, 8, 2, 11), // 123
119 (3, 0, 5, 1), // { ... }
120 (4, 2, 4, 9), // testing
121 (4, 10, 4, 13), // 234
David Tolnayb28f38a2018-03-31 22:02:29 +0200122 ],
123 );
Nika Layzellf8d5f212017-12-11 14:07:02 -0500124}
125
David Tolnay1ebe3972018-01-02 20:14:20 -0800126#[cfg(procmacro2_semver_exempt)]
David Tolnayd66ecf62018-01-02 20:05:42 -0800127#[cfg(not(feature = "nightly"))]
Nika Layzellf8d5f212017-12-11 14:07:02 -0500128#[test]
129fn default_span() {
130 let start = Span::call_site().start();
131 assert_eq!(start.line, 1);
132 assert_eq!(start.column, 0);
133 let end = Span::call_site().end();
134 assert_eq!(end.line, 1);
135 assert_eq!(end.column, 0);
136 let source_file = Span::call_site().source_file();
Nika Layzellfb783e32017-12-30 14:58:27 -0500137 assert_eq!(source_file.path().to_string(), "<unspecified>");
Nika Layzellf8d5f212017-12-11 14:07:02 -0500138 assert!(!source_file.is_real());
139}
140
David Tolnay1ebe3972018-01-02 20:14:20 -0800141#[cfg(procmacro2_semver_exempt)]
Nika Layzellddea1562017-12-11 14:25:35 -0500142#[test]
143fn span_join() {
David Tolnayb28f38a2018-03-31 22:02:29 +0200144 let source1 = "aaa\nbbb"
145 .parse::<TokenStream>()
146 .unwrap()
147 .into_iter()
148 .collect::<Vec<_>>();
149 let source2 = "ccc\nddd"
150 .parse::<TokenStream>()
151 .unwrap()
152 .into_iter()
153 .collect::<Vec<_>>();
Nika Layzellddea1562017-12-11 14:25:35 -0500154
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700155 assert!(source1[0].span().source_file() != source2[0].span().source_file());
David Tolnayb28f38a2018-03-31 22:02:29 +0200156 assert_eq!(
157 source1[0].span().source_file(),
158 source1[1].span().source_file()
159 );
Nika Layzellddea1562017-12-11 14:25:35 -0500160
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700161 let joined1 = source1[0].span().join(source1[1].span());
162 let joined2 = source1[0].span().join(source2[0].span());
Nika Layzellddea1562017-12-11 14:25:35 -0500163 assert!(joined1.is_some());
164 assert!(joined2.is_none());
165
166 let start = joined1.unwrap().start();
167 let end = joined1.unwrap().end();
168 assert_eq!(start.line, 1);
169 assert_eq!(start.column, 0);
170 assert_eq!(end.line, 2);
171 assert_eq!(end.column, 3);
172
David Tolnayb28f38a2018-03-31 22:02:29 +0200173 assert_eq!(
174 joined1.unwrap().source_file(),
175 source1[0].span().source_file()
176 );
Nika Layzellddea1562017-12-11 14:25:35 -0500177}
Alex Crichton8c030332018-01-16 08:07:36 -0800178
179#[test]
180fn no_panic() {
181 let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
182 assert!(s.parse::<proc_macro2::TokenStream>().is_err());
183}
184
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800185#[test]
David Tolnay639e4ba2018-03-31 21:10:55 +0200186fn tricky_doc_comment() {
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800187 let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
188 let tokens = stream.into_iter().collect::<Vec<_>>();
189 assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
Alex Crichtond7904e52018-01-23 11:08:45 -0800190
191 let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
192 let tokens = stream.into_iter().collect::<Vec<_>>();
Alex Crichton1eb96a02018-04-04 13:07:35 -0700193 assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700194 match tokens[0] {
Alex Crichton1eb96a02018-04-04 13:07:35 -0700195 proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '#'),
Alex Crichtond7904e52018-01-23 11:08:45 -0800196 _ => panic!("wrong token {:?}", tokens[0]),
197 }
Alex Crichton1eb96a02018-04-04 13:07:35 -0700198 let mut tokens = match tokens[1] {
199 proc_macro2::TokenTree::Group(ref tt) => {
200 assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
201 tt.stream().into_iter()
202 }
203 _ => panic!("wrong token {:?}", tokens[0]),
204 };
205
206 match tokens.next().unwrap() {
207 proc_macro2::TokenTree::Term(ref tt) => assert_eq!(tt.as_str(), "doc"),
208 t => panic!("wrong token {:?}", t),
209 }
210 match tokens.next().unwrap() {
211 proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '='),
212 t => panic!("wrong token {:?}", t),
213 }
214 match tokens.next().unwrap() {
215 proc_macro2::TokenTree::Literal(ref tt) => {
216 assert_eq!(tt.to_string(), "\" doc\"");
217 }
218 t => panic!("wrong token {:?}", t),
219 }
220 assert!(tokens.next().is_none());
221
222 let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
223 let tokens = stream.into_iter().collect::<Vec<_>>();
224 assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800225}
226
David Tolnaya13d1422018-03-31 21:27:48 +0200227#[test]
228fn raw_identifier() {
229 let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
230 match tts.next().unwrap() {
231 TokenTree::Term(raw) => assert_eq!("r#dyn", raw.as_str()),
232 wrong => panic!("wrong token {:?}", wrong),
233 }
234 assert!(tts.next().is_none());
235}