blob: 7bd96f0c8e1d040b735a29bec01f7959fd71d188 [file] [log] [blame]
David Tolnaye5806852017-06-01 12:49:20 -07001extern crate proc_macro2;
2
Alex Crichton8c030332018-01-16 08:07:36 -08003use std::str;
4
Alex Crichtonaf5bad42018-03-27 14:45:10 -07005use proc_macro2::{Term, Literal, TokenStream, Span};
David Tolnaye5806852017-06-01 12:49:20 -07006
7#[test]
8fn symbols() {
Alex Crichtonaf5bad42018-03-27 14:45:10 -07009 assert_eq!(Term::new("foo", Span::call_site()).as_str(), "foo");
10 assert_eq!(Term::new("bar", Span::call_site()).as_str(), "bar");
David Tolnaye5806852017-06-01 12:49:20 -070011}
12
13#[test]
14fn literals() {
Alex Crichton1a7f7622017-07-05 17:47:15 -070015 assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
16 assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
Alex Crichtonaf5bad42018-03-27 14:45:10 -070017 assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
David Tolnaye5806852017-06-01 12:49:20 -070018}
19
20#[test]
21fn roundtrip() {
22 fn roundtrip(p: &str) {
23 println!("parse: {}", p);
24 let s = p.parse::<TokenStream>().unwrap().to_string();
25 println!("first: {}", s);
26 let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string();
27 assert_eq!(s, s2);
28 }
29 roundtrip("a");
30 roundtrip("<<");
31 roundtrip("<<=");
32 roundtrip("
33 /// a
34 wut
35 ");
36 roundtrip("
37 1
38 1.0
39 1f32
40 2f64
41 1usize
42 4isize
43 4e10
44 1_000
45 1_0i32
46 8u8
47 9
48 0
49 0xffffffffffffffffffffffffffffffff
50 ");
51 roundtrip("'a");
52 roundtrip("'static");
David Tolnay8d109342017-12-25 18:24:45 -050053 roundtrip("'\\u{10__FFFF}'");
54 roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
David Tolnaye5806852017-06-01 12:49:20 -070055}
56
57#[test]
58fn fail() {
59 fn fail(p: &str) {
60 if p.parse::<TokenStream>().is_ok() {
61 panic!("should have failed to parse: {}", p);
62 }
63 }
64 fail("1x");
65 fail("1u80");
66 fail("1f320");
67 fail("' static");
68 fail("'mut");
69}
Nika Layzellf8d5f212017-12-11 14:07:02 -050070
David Tolnay1ebe3972018-01-02 20:14:20 -080071#[cfg(procmacro2_semver_exempt)]
Nika Layzellf8d5f212017-12-11 14:07:02 -050072#[test]
73fn span_test() {
Alex Crichtonaf5bad42018-03-27 14:45:10 -070074 use proc_macro2::TokenTree;
75
Nika Layzellf8d5f212017-12-11 14:07:02 -050076 fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
Nika Layzellf8d5f212017-12-11 14:07:02 -050077 let ts = p.parse::<TokenStream>().unwrap();
78 check_spans_internal(ts, &mut lines);
79 }
80
81 fn check_spans_internal(
82 ts: TokenStream,
83 lines: &mut &[(usize, usize, usize, usize)],
84 ) {
85 for i in ts {
86 if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
87 *lines = rest;
88
Alex Crichtonaf5bad42018-03-27 14:45:10 -070089 let start = i.span().start();
Nika Layzellf8d5f212017-12-11 14:07:02 -050090 assert_eq!(start.line, sline, "sline did not match for {}", i);
91 assert_eq!(start.column, scol, "scol did not match for {}", i);
92
Alex Crichtonaf5bad42018-03-27 14:45:10 -070093 let end = i.span().end();
Nika Layzellf8d5f212017-12-11 14:07:02 -050094 assert_eq!(end.line, eline, "eline did not match for {}", i);
95 assert_eq!(end.column, ecol, "ecol did not match for {}", i);
96
Alex Crichtonaf5bad42018-03-27 14:45:10 -070097 match i {
98 TokenTree::Group(ref g) => {
99 check_spans_internal(g.stream().clone(), lines);
100 }
Nika Layzellf8d5f212017-12-11 14:07:02 -0500101 _ => {}
102 }
103 }
104 }
105 }
106
107 check_spans("\
108/// This is a document comment
109testing 123
110{
111 testing 234
112}", &[
113 (1, 0, 1, 30),
114 (2, 0, 2, 7),
115 (2, 8, 2, 11),
116 (3, 0, 5, 1),
117 (4, 2, 4, 9),
118 (4, 10, 4, 13),
119]);
120}
121
David Tolnay1ebe3972018-01-02 20:14:20 -0800122#[cfg(procmacro2_semver_exempt)]
David Tolnayd66ecf62018-01-02 20:05:42 -0800123#[cfg(not(feature = "nightly"))]
Nika Layzellf8d5f212017-12-11 14:07:02 -0500124#[test]
125fn default_span() {
126 let start = Span::call_site().start();
127 assert_eq!(start.line, 1);
128 assert_eq!(start.column, 0);
129 let end = Span::call_site().end();
130 assert_eq!(end.line, 1);
131 assert_eq!(end.column, 0);
132 let source_file = Span::call_site().source_file();
Nika Layzellfb783e32017-12-30 14:58:27 -0500133 assert_eq!(source_file.path().to_string(), "<unspecified>");
Nika Layzellf8d5f212017-12-11 14:07:02 -0500134 assert!(!source_file.is_real());
135}
136
David Tolnay1ebe3972018-01-02 20:14:20 -0800137#[cfg(procmacro2_semver_exempt)]
Nika Layzellddea1562017-12-11 14:25:35 -0500138#[test]
139fn span_join() {
140 let source1 =
141 "aaa\nbbb".parse::<TokenStream>().unwrap().into_iter().collect::<Vec<_>>();
142 let source2 =
143 "ccc\nddd".parse::<TokenStream>().unwrap().into_iter().collect::<Vec<_>>();
144
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700145 assert!(source1[0].span().source_file() != source2[0].span().source_file());
146 assert_eq!(source1[0].span().source_file(), source1[1].span().source_file());
Nika Layzellddea1562017-12-11 14:25:35 -0500147
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700148 let joined1 = source1[0].span().join(source1[1].span());
149 let joined2 = source1[0].span().join(source2[0].span());
Nika Layzellddea1562017-12-11 14:25:35 -0500150 assert!(joined1.is_some());
151 assert!(joined2.is_none());
152
153 let start = joined1.unwrap().start();
154 let end = joined1.unwrap().end();
155 assert_eq!(start.line, 1);
156 assert_eq!(start.column, 0);
157 assert_eq!(end.line, 2);
158 assert_eq!(end.column, 3);
159
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700160 assert_eq!(joined1.unwrap().source_file(), source1[0].span().source_file());
Nika Layzellddea1562017-12-11 14:25:35 -0500161}
Alex Crichton8c030332018-01-16 08:07:36 -0800162
163#[test]
164fn no_panic() {
165 let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
166 assert!(s.parse::<proc_macro2::TokenStream>().is_err());
167}
168
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800169#[test]
170fn tricky_doc_commaent() {
171 let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
172 let tokens = stream.into_iter().collect::<Vec<_>>();
173 assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
Alex Crichtond7904e52018-01-23 11:08:45 -0800174
175 let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
176 let tokens = stream.into_iter().collect::<Vec<_>>();
177 assert!(tokens.len() == 1, "not length 1 -- {:?}", tokens);
Alex Crichtonaf5bad42018-03-27 14:45:10 -0700178 match tokens[0] {
179 proc_macro2::TokenTree::Literal(_) => {}
Alex Crichtond7904e52018-01-23 11:08:45 -0800180 _ => panic!("wrong token {:?}", tokens[0]),
181 }
Alex Crichtonf7df57c2018-01-21 21:05:11 -0800182}
183