blob: ba4bc5ddf376f8f1323be14d21d4284a399d1d6a [file] [log] [blame]
David Tolnayb2188a62019-05-09 11:42:44 -07001use crate::error::Result;
David Tolnay950cc122019-05-07 14:21:13 -07002use crate::version;
Carl Lerche058ff472019-02-13 16:23:52 -08003
David Tolnay14d463e2019-02-15 14:23:51 -08004use indexmap::IndexMap;
David Tolnay397bd0b2019-02-15 20:51:10 -08005use quote::quote;
David Tolnay950cc122019-05-07 14:21:13 -07006use syn::parse::Parser;
David Tolnay397bd0b2019-02-15 20:51:10 -08007use syn::{parse_quote, Data, DataStruct, DeriveInput, Ident, Item};
David Tolnay950cc122019-05-07 14:21:13 -07008use syn_codegen as types;
Carl Lerche058ff472019-02-13 16:23:52 -08009
10use std::collections::BTreeMap;
11use std::fs::File;
12use std::io::Read;
13use std::path::Path;
14
15const SYN_CRATE_ROOT: &str = "../src/lib.rs";
16const TOKEN_SRC: &str = "../src/token.rs";
17const IGNORED_MODS: &[&str] = &["fold", "visit", "visit_mut"];
18const EXTRA_TYPES: &[&str] = &["Lifetime"];
Carl Lerche058ff472019-02-13 16:23:52 -080019
20// NOTE: BTreeMap is used here instead of HashMap to have deterministic output.
21type ItemLookup = BTreeMap<Ident, AstItem>;
22type TokenLookup = BTreeMap<String, String>;
23
24/// Parse the contents of `src` and return a list of AST types.
David Tolnayb2188a62019-05-09 11:42:44 -070025pub fn parse() -> Result<types::Definitions> {
Carl Lerche058ff472019-02-13 16:23:52 -080026 let mut item_lookup = BTreeMap::new();
David Tolnayb2188a62019-05-09 11:42:44 -070027 load_file(SYN_CRATE_ROOT, &[], &mut item_lookup)?;
Carl Lerche058ff472019-02-13 16:23:52 -080028
David Tolnayb2188a62019-05-09 11:42:44 -070029 let token_lookup = load_token_file(TOKEN_SRC)?;
Carl Lerche058ff472019-02-13 16:23:52 -080030
David Tolnayb2188a62019-05-09 11:42:44 -070031 let version = version::get()?;
David Tolnay10227122019-02-15 20:53:45 -080032
David Tolnayf9bb8ff2019-02-15 13:10:14 -080033 let types = item_lookup
Carl Lerche058ff472019-02-13 16:23:52 -080034 .values()
35 .map(|item| introspect_item(item, &item_lookup, &token_lookup))
David Tolnayf9bb8ff2019-02-15 13:10:14 -080036 .collect();
37
David Tolnay47fe7402019-02-15 14:35:25 -080038 let tokens = token_lookup
39 .into_iter()
40 .map(|(name, ty)| (ty, name))
41 .collect();
David Tolnayf9bb8ff2019-02-15 13:10:14 -080042
David Tolnayb2188a62019-05-09 11:42:44 -070043 Ok(types::Definitions {
David Tolnay10227122019-02-15 20:53:45 -080044 version,
45 types,
46 tokens,
David Tolnayb2188a62019-05-09 11:42:44 -070047 })
Carl Lerche058ff472019-02-13 16:23:52 -080048}
49
50/// Data extracted from syn source
51#[derive(Clone)]
52pub struct AstItem {
53 ast: DeriveInput,
54 features: Vec<syn::Attribute>,
55}
56
David Tolnayf9bb8ff2019-02-15 13:10:14 -080057fn introspect_item(item: &AstItem, items: &ItemLookup, tokens: &TokenLookup) -> types::Node {
Carl Lerche058ff472019-02-13 16:23:52 -080058 let features = introspect_features(&item.features);
59
60 match &item.ast.data {
David Tolnayc2be7b22019-02-15 18:48:31 -080061 Data::Enum(ref data) => types::Node {
62 ident: item.ast.ident.to_string(),
Carl Lerche058ff472019-02-13 16:23:52 -080063 features,
David Tolnayc2be7b22019-02-15 18:48:31 -080064 data: types::Data::Enum(introspect_enum(data, items, tokens)),
65 },
66 Data::Struct(ref data) => types::Node {
67 ident: item.ast.ident.to_string(),
Carl Lerche058ff472019-02-13 16:23:52 -080068 features,
David Tolnayc2be7b22019-02-15 18:48:31 -080069 data: {
70 if data.fields.iter().all(|f| is_pub(&f.vis)) {
71 types::Data::Struct(introspect_struct(data, items, tokens))
72 } else {
73 types::Data::Private
74 }
75 },
76 },
Carl Lerche058ff472019-02-13 16:23:52 -080077 Data::Union(..) => panic!("Union not supported"),
78 }
79}
80
81fn introspect_enum(
Carl Lerche058ff472019-02-13 16:23:52 -080082 item: &syn::DataEnum,
83 items: &ItemLookup,
84 tokens: &TokenLookup,
David Tolnay75c5a172019-02-15 20:35:41 -080085) -> types::Variants {
David Tolnayc2be7b22019-02-15 18:48:31 -080086 item.variants
Carl Lerche058ff472019-02-13 16:23:52 -080087 .iter()
88 .map(|variant| {
89 let fields = match &variant.fields {
90 syn::Fields::Unnamed(fields) => fields
91 .unnamed
92 .iter()
93 .map(|field| introspect_type(&field.ty, items, tokens))
94 .collect(),
95 syn::Fields::Unit => vec![],
96 _ => panic!("Enum representation not supported"),
97 };
98
David Tolnay75c5a172019-02-15 20:35:41 -080099 (variant.ident.to_string(), fields)
Carl Lerche058ff472019-02-13 16:23:52 -0800100 })
David Tolnayc2be7b22019-02-15 18:48:31 -0800101 .collect()
Carl Lerche058ff472019-02-13 16:23:52 -0800102}
103
104fn introspect_struct(
Carl Lerche058ff472019-02-13 16:23:52 -0800105 item: &syn::DataStruct,
106 items: &ItemLookup,
107 tokens: &TokenLookup,
David Tolnay75c5a172019-02-15 20:35:41 -0800108) -> types::Fields {
David Tolnayc2be7b22019-02-15 18:48:31 -0800109 match &item.fields {
Carl Lerche058ff472019-02-13 16:23:52 -0800110 syn::Fields::Named(fields) => fields
111 .named
112 .iter()
113 .map(|field| {
David Tolnay14d463e2019-02-15 14:23:51 -0800114 (
Carl Lerche058ff472019-02-13 16:23:52 -0800115 field.ident.as_ref().unwrap().to_string(),
116 introspect_type(&field.ty, items, tokens),
117 )
118 })
119 .collect(),
David Tolnay14d463e2019-02-15 14:23:51 -0800120 syn::Fields::Unit => IndexMap::new(),
Carl Lerche058ff472019-02-13 16:23:52 -0800121 _ => panic!("Struct representation not supported"),
David Tolnayc2be7b22019-02-15 18:48:31 -0800122 }
Carl Lerche058ff472019-02-13 16:23:52 -0800123}
124
125fn introspect_type(item: &syn::Type, items: &ItemLookup, tokens: &TokenLookup) -> types::Type {
126 match item {
127 syn::Type::Path(syn::TypePath {
128 qself: None,
129 ref path,
130 }) => {
131 let last = path.segments.last().unwrap().into_value();
David Tolnay15730f22019-02-15 20:46:56 -0800132 let string = last.ident.to_string();
Carl Lerche058ff472019-02-13 16:23:52 -0800133
David Tolnay15730f22019-02-15 20:46:56 -0800134 match string.as_str() {
Carl Lerche058ff472019-02-13 16:23:52 -0800135 "Option" => {
136 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
137 types::Type::Option(Box::new(nested))
138 }
139 "Punctuated" => {
140 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
141 let punct = match introspect_type(last_arg(&last.arguments), items, tokens) {
142 types::Type::Token(s) => s,
143 _ => panic!(),
144 };
145
David Tolnayfa67ab02019-02-15 20:17:30 -0800146 types::Type::Punctuated(types::Punctuated {
147 element: Box::new(nested),
148 punct,
149 })
Carl Lerche058ff472019-02-13 16:23:52 -0800150 }
151 "Vec" => {
152 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
153 types::Type::Vec(Box::new(nested))
154 }
155 "Box" => {
156 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
157 types::Type::Box(Box::new(nested))
158 }
David Tolnay15730f22019-02-15 20:46:56 -0800159 "Brace" | "Bracket" | "Paren" | "Group" => types::Type::Group(string),
160 "TokenStream" | "Literal" | "Ident" | "Span" => types::Type::Ext(string),
161 "String" | "u32" | "usize" | "bool" => types::Type::Std(string),
Carl Lerche058ff472019-02-13 16:23:52 -0800162 _ => {
163 if items.get(&last.ident).is_some() {
David Tolnay15730f22019-02-15 20:46:56 -0800164 types::Type::Syn(string)
Carl Lerche058ff472019-02-13 16:23:52 -0800165 } else {
David Tolnay15730f22019-02-15 20:46:56 -0800166 unimplemented!("{}", string);
Carl Lerche058ff472019-02-13 16:23:52 -0800167 }
168 }
169 }
170 }
171 syn::Type::Tuple(syn::TypeTuple { ref elems, .. }) => {
172 let tys = elems
173 .iter()
174 .map(|ty| introspect_type(&ty, items, tokens))
175 .collect();
176 types::Type::Tuple(tys)
177 }
178 syn::Type::Macro(syn::TypeMacro { ref mac })
179 if mac.path.segments.last().unwrap().into_value().ident == "Token" =>
180 {
181 let content = mac.tts.to_string();
182 let ty = tokens.get(&content).unwrap().to_string();
183
David Tolnay157c7eb2019-02-15 13:21:48 -0800184 types::Type::Token(ty)
Carl Lerche058ff472019-02-13 16:23:52 -0800185 }
186 _ => panic!("{}", quote!(#item).to_string()),
187 }
188}
189
190fn introspect_features(attrs: &[syn::Attribute]) -> types::Features {
191 let mut ret = types::Features::default();
192
193 for attr in attrs {
194 if !attr.path.is_ident("cfg") {
195 continue;
196 }
197
David Tolnay950cc122019-05-07 14:21:13 -0700198 let features = parsing::parse_features.parse2(attr.tts.clone()).unwrap();
David Tolnay440fe582019-02-15 20:23:14 -0800199
200 if ret.any.is_empty() {
201 ret = features;
202 } else if ret.any.len() < features.any.len() {
203 assert!(ret.any.iter().all(|f| features.any.contains(f)));
204 } else {
205 assert!(features.any.iter().all(|f| ret.any.contains(f)));
206 ret = features;
207 }
Carl Lerche058ff472019-02-13 16:23:52 -0800208 }
209
210 ret
211}
212
213fn is_pub(vis: &syn::Visibility) -> bool {
214 match vis {
215 syn::Visibility::Public(_) => true,
216 _ => false,
217 }
218}
219
220fn first_arg(params: &syn::PathArguments) -> &syn::Type {
221 let data = match *params {
222 syn::PathArguments::AngleBracketed(ref data) => data,
223 _ => panic!("Expected at least 1 type argument here"),
224 };
225
226 match **data
227 .args
228 .first()
229 .expect("Expected at least 1 type argument here")
230 .value()
231 {
232 syn::GenericArgument::Type(ref ty) => ty,
233 _ => panic!("Expected at least 1 type argument here"),
234 }
235}
236
237fn last_arg(params: &syn::PathArguments) -> &syn::Type {
238 let data = match *params {
239 syn::PathArguments::AngleBracketed(ref data) => data,
240 _ => panic!("Expected at least 1 type argument here"),
241 };
242
243 match **data
244 .args
245 .last()
246 .expect("Expected at least 1 type argument here")
247 .value()
248 {
249 syn::GenericArgument::Type(ref ty) => ty,
250 _ => panic!("Expected at least 1 type argument here"),
251 }
252}
253
254mod parsing {
255 use super::{AstItem, TokenLookup};
Carl Lerche058ff472019-02-13 16:23:52 -0800256
257 use proc_macro2::TokenStream;
David Tolnay397bd0b2019-02-15 20:51:10 -0800258 use quote::quote;
Carl Lerche058ff472019-02-13 16:23:52 -0800259 use syn;
260 use syn::parse::{Parse, ParseStream, Result};
261 use syn::*;
David Tolnay950cc122019-05-07 14:21:13 -0700262 use syn_codegen as types;
Carl Lerche058ff472019-02-13 16:23:52 -0800263
David Tolnay440fe582019-02-15 20:23:14 -0800264 use std::collections::{BTreeMap, BTreeSet};
Carl Lerche058ff472019-02-13 16:23:52 -0800265
266 fn peek_tag(input: ParseStream, tag: &str) -> bool {
267 let ahead = input.fork();
268 ahead.parse::<Token![#]>().is_ok()
269 && ahead
270 .parse::<Ident>()
271 .map(|ident| ident == tag)
272 .unwrap_or(false)
273 }
274
275 // Parses #full - returns #[cfg(feature = "full")] if it is present, and
276 // nothing otherwise.
277 fn full(input: ParseStream) -> Vec<syn::Attribute> {
278 if peek_tag(input, "full") {
279 input.parse::<Token![#]>().unwrap();
280 input.parse::<Ident>().unwrap();
281 vec![parse_quote!(#[cfg(feature = "full")])]
282 } else {
283 vec![]
284 }
285 }
286
287 fn skip_manual_extra_traits(input: ParseStream) {
288 if peek_tag(input, "manual_extra_traits") {
289 input.parse::<Token![#]>().unwrap();
290 input.parse::<Ident>().unwrap();
291 }
292 }
293
294 // Parses a simple AstStruct without the `pub struct` prefix.
295 fn ast_struct_inner(input: ParseStream) -> Result<AstItem> {
296 let ident: Ident = input.parse()?;
297 let features = full(input);
298 skip_manual_extra_traits(input);
299 let rest: TokenStream = input.parse()?;
300 Ok(AstItem {
301 ast: syn::parse2(quote! {
302 pub struct #ident #rest
303 })?,
304 features,
305 })
306 }
307
308 // ast_struct! parsing
309 pub struct AstStruct(pub(super) Vec<AstItem>);
310 impl Parse for AstStruct {
311 fn parse(input: ParseStream) -> Result<Self> {
312 input.call(Attribute::parse_outer)?;
313 input.parse::<Token![pub]>()?;
314 input.parse::<Token![struct]>()?;
315 let res = input.call(ast_struct_inner)?;
316 Ok(AstStruct(vec![res]))
317 }
318 }
319
320 fn no_visit(input: ParseStream) -> bool {
321 if peek_tag(input, "no_visit") {
322 input.parse::<Token![#]>().unwrap();
323 input.parse::<Ident>().unwrap();
324 true
325 } else {
326 false
327 }
328 }
329
330 // ast_enum! parsing
331 pub struct AstEnum(pub Vec<AstItem>);
332 impl Parse for AstEnum {
333 fn parse(input: ParseStream) -> Result<Self> {
334 input.call(Attribute::parse_outer)?;
335 input.parse::<Token![pub]>()?;
336 input.parse::<Token![enum]>()?;
337 let ident: Ident = input.parse()?;
338 let no_visit = no_visit(input);
339 let rest: TokenStream = input.parse()?;
340 Ok(AstEnum(if no_visit {
341 vec![]
342 } else {
343 vec![AstItem {
344 ast: syn::parse2(quote! {
345 pub enum #ident #rest
346 })?,
347 features: vec![],
348 }]
349 }))
350 }
351 }
352
353 // A single variant of an ast_enum_of_structs!
354 struct EosVariant {
355 name: Ident,
356 member: Option<Path>,
357 inner: Option<AstItem>,
358 }
359 fn eos_variant(input: ParseStream) -> Result<EosVariant> {
360 input.call(Attribute::parse_outer)?;
361 input.parse::<Token![pub]>()?;
362 let variant: Ident = input.parse()?;
363 let (member, inner) = if input.peek(token::Paren) {
364 let content;
365 parenthesized!(content in input);
366 if content.fork().call(ast_struct_inner).is_ok() {
367 let item = content.call(ast_struct_inner)?;
368 (Some(Path::from(item.ast.ident.clone())), Some(item))
369 } else {
370 let path: Path = content.parse()?;
371 (Some(path), None)
372 }
373 } else {
374 (None, None)
375 };
376 input.parse::<Token![,]>()?;
377 Ok(EosVariant {
378 name: variant,
379 member,
380 inner,
381 })
382 }
383
384 // ast_enum_of_structs! parsing
385 pub struct AstEnumOfStructs(pub Vec<AstItem>);
386 impl Parse for AstEnumOfStructs {
387 fn parse(input: ParseStream) -> Result<Self> {
388 input.call(Attribute::parse_outer)?;
389 input.parse::<Token![pub]>()?;
390 input.parse::<Token![enum]>()?;
391 let ident: Ident = input.parse()?;
392
393 let content;
394 braced!(content in input);
395 let mut variants = Vec::new();
396 while !content.is_empty() {
397 variants.push(content.call(eos_variant)?);
398 }
399
400 if let Some(ident) = input.parse::<Option<Ident>>()? {
401 assert_eq!(ident, "do_not_generate_to_tokens");
402 }
403
404 let enum_item = {
405 let variants = variants.iter().map(|v| {
406 let name = v.name.clone();
407 match v.member {
408 Some(ref member) => quote!(#name(#member)),
409 None => quote!(#name),
410 }
411 });
412 parse_quote! {
413 pub enum #ident {
414 #(#variants),*
415 }
416 }
417 };
418 let mut items = vec![AstItem {
419 ast: enum_item,
420 features: vec![],
421 }];
422 items.extend(variants.into_iter().filter_map(|v| v.inner));
423 Ok(AstEnumOfStructs(items))
424 }
425 }
426
427 pub struct TokenMacro(pub TokenLookup);
428 impl Parse for TokenMacro {
429 fn parse(input: ParseStream) -> Result<Self> {
430 let mut tokens = BTreeMap::new();
431 while !input.is_empty() {
432 let content;
433 parenthesized!(content in input);
434 let token = content.parse::<TokenStream>()?.to_string();
435 input.parse::<Token![=]>()?;
436 input.parse::<Token![>]>()?;
437 let content;
438 braced!(content in input);
439 input.parse::<Token![;]>()?;
440 content.parse::<token::Dollar>()?;
441 let path: Path = content.parse()?;
442 let ty = path.segments.last().unwrap().into_value().ident.to_string();
443 tokens.insert(token, ty.to_string());
444 }
445 Ok(TokenMacro(tokens))
446 }
447 }
448
449 fn parse_feature(input: ParseStream) -> Result<String> {
450 let i: syn::Ident = input.parse()?;
451 assert_eq!(i, "feature");
452
453 input.parse::<Token![=]>()?;
454 let s = input.parse::<syn::LitStr>()?;
455
456 Ok(s.value())
457 }
458
David Tolnay950cc122019-05-07 14:21:13 -0700459 pub fn parse_features(input: ParseStream) -> Result<types::Features> {
460 let mut features = BTreeSet::new();
Carl Lerche058ff472019-02-13 16:23:52 -0800461
David Tolnay950cc122019-05-07 14:21:13 -0700462 let level_1;
463 parenthesized!(level_1 in input);
Carl Lerche058ff472019-02-13 16:23:52 -0800464
David Tolnay950cc122019-05-07 14:21:13 -0700465 let i: syn::Ident = level_1.fork().parse()?;
Carl Lerche058ff472019-02-13 16:23:52 -0800466
David Tolnay950cc122019-05-07 14:21:13 -0700467 if i == "any" {
468 level_1.parse::<syn::Ident>()?;
Carl Lerche058ff472019-02-13 16:23:52 -0800469
David Tolnay950cc122019-05-07 14:21:13 -0700470 let level_2;
471 parenthesized!(level_2 in level_1);
Carl Lerche058ff472019-02-13 16:23:52 -0800472
David Tolnay950cc122019-05-07 14:21:13 -0700473 while !level_2.is_empty() {
474 features.insert(parse_feature(&level_2)?);
Carl Lerche058ff472019-02-13 16:23:52 -0800475
David Tolnay950cc122019-05-07 14:21:13 -0700476 if !level_2.is_empty() {
477 level_2.parse::<Token![,]>()?;
Carl Lerche058ff472019-02-13 16:23:52 -0800478 }
Carl Lerche058ff472019-02-13 16:23:52 -0800479 }
David Tolnay950cc122019-05-07 14:21:13 -0700480 } else if i == "feature" {
481 features.insert(parse_feature(&level_1)?);
482 assert!(level_1.is_empty());
483 } else {
484 panic!("{:?}", i);
Carl Lerche058ff472019-02-13 16:23:52 -0800485 }
David Tolnay950cc122019-05-07 14:21:13 -0700486
487 assert!(input.is_empty());
488
489 Ok(types::Features { any: features })
Carl Lerche058ff472019-02-13 16:23:52 -0800490 }
491}
492
493fn get_features(attrs: &[syn::Attribute], base: &[syn::Attribute]) -> Vec<syn::Attribute> {
494 let mut ret = base.to_owned();
495
496 for attr in attrs {
497 if attr.path.is_ident("cfg") {
498 ret.push(attr.clone());
499 }
500 }
501
502 ret
503}
504
Carl Lerche058ff472019-02-13 16:23:52 -0800505fn load_file<P: AsRef<Path>>(
506 name: P,
507 features: &[syn::Attribute],
508 lookup: &mut ItemLookup,
David Tolnayb2188a62019-05-09 11:42:44 -0700509) -> Result<()> {
Carl Lerche058ff472019-02-13 16:23:52 -0800510 let name = name.as_ref();
David Tolnayb2188a62019-05-09 11:42:44 -0700511 let parent = name.parent().expect("no parent path");
Carl Lerche058ff472019-02-13 16:23:52 -0800512
513 let mut f = File::open(name)?;
514 let mut src = String::new();
515 f.read_to_string(&mut src)?;
516
517 // Parse the file
518 let file = syn::parse_file(&src)?;
519
520 // Collect all of the interesting AstItems declared in this file or submodules.
521 'items: for item in file.items {
522 match item {
523 Item::Mod(item) => {
524 // Don't inspect inline modules.
525 if item.content.is_some() {
526 continue;
527 }
528
529 // We don't want to try to load the generated rust files and
530 // parse them, so we ignore them here.
531 for name in IGNORED_MODS {
532 if item.ident == name {
533 continue 'items;
534 }
535 }
536
537 // Lookup any #[cfg()] attributes on the module and add them to
538 // the feature set.
539 //
540 // The derive module is weird because it is built with either
541 // `full` or `derive` but exported only under `derive`.
542 let features = if item.ident == "derive" {
543 vec![parse_quote!(#[cfg(feature = "derive")])]
544 } else {
545 get_features(&item.attrs, features)
546 };
547
548 // Look up the submodule file, and recursively parse it.
549 // XXX: Only handles same-directory .rs file submodules.
550 let path = parent.join(&format!("{}.rs", item.ident));
551 load_file(path, &features, lookup)?;
552 }
553 Item::Macro(item) => {
554 // Lookip any #[cfg()] attributes directly on the macro
555 // invocation, and add them to the feature set.
556 let features = get_features(&item.attrs, features);
557
558 // Try to parse the AstItem declaration out of the item.
559 let tts = &item.mac.tts;
560 let found = if item.mac.path.is_ident("ast_struct") {
561 syn::parse2::<parsing::AstStruct>(quote!(#tts))?.0
562 } else if item.mac.path.is_ident("ast_enum") {
563 syn::parse2::<parsing::AstEnum>(quote!(#tts))?.0
564 } else if item.mac.path.is_ident("ast_enum_of_structs") {
565 syn::parse2::<parsing::AstEnumOfStructs>(quote!(#tts))?.0
566 } else {
567 continue;
568 };
569
570 // Record our features on the parsed AstItems.
571 for mut item in found {
572 item.features.extend(features.clone());
573 lookup.insert(item.ast.ident.clone(), item);
574 }
575 }
576 Item::Struct(item) => {
577 let ident = item.ident;
578 if EXTRA_TYPES.contains(&&ident.to_string()[..]) {
579 lookup.insert(
580 ident.clone(),
581 AstItem {
582 ast: DeriveInput {
583 ident,
584 vis: item.vis,
585 attrs: item.attrs,
586 generics: item.generics,
587 data: Data::Struct(DataStruct {
588 fields: item.fields,
589 struct_token: item.struct_token,
590 semi_token: item.semi_token,
591 }),
592 },
593 features: features.to_owned(),
594 },
595 );
596 }
597 }
598 _ => {}
599 }
600 }
601 Ok(())
602}
603
David Tolnayb2188a62019-05-09 11:42:44 -0700604fn load_token_file<P: AsRef<Path>>(name: P) -> Result<TokenLookup> {
Carl Lerche058ff472019-02-13 16:23:52 -0800605 let name = name.as_ref();
606 let mut f = File::open(name)?;
607 let mut src = String::new();
608 f.read_to_string(&mut src)?;
609 let file = syn::parse_file(&src)?;
610 for item in file.items {
611 match item {
612 Item::Macro(item) => {
613 match item.ident {
614 Some(ref i) if i == "Token" => {}
615 _ => continue,
616 }
617 let tts = &item.mac.tts;
618 let tokens = syn::parse2::<parsing::TokenMacro>(quote!(#tts))?.0;
619 return Ok(tokens);
620 }
621 _ => {}
622 }
623 }
624
David Tolnayb2188a62019-05-09 11:42:44 -0700625 panic!("failed to parse Token macro")
Carl Lerche058ff472019-02-13 16:23:52 -0800626}