blob: eff8539650fae705f00a446bed9e0825628d88cc [file] [log] [blame]
Carl Lerche058ff472019-02-13 16:23:52 -08001use crate::types;
2
David Tolnay14d463e2019-02-15 14:23:51 -08003use indexmap::IndexMap;
Carl Lerche058ff472019-02-13 16:23:52 -08004use syn::{Data, DataStruct, DeriveInput, Ident, Item};
5
6use std::collections::BTreeMap;
7use std::fs::File;
8use std::io::Read;
9use std::path::Path;
10
11const SYN_CRATE_ROOT: &str = "../src/lib.rs";
12const TOKEN_SRC: &str = "../src/token.rs";
13const IGNORED_MODS: &[&str] = &["fold", "visit", "visit_mut"];
14const EXTRA_TYPES: &[&str] = &["Lifetime"];
Carl Lerche058ff472019-02-13 16:23:52 -080015
16// NOTE: BTreeMap is used here instead of HashMap to have deterministic output.
17type ItemLookup = BTreeMap<Ident, AstItem>;
18type TokenLookup = BTreeMap<String, String>;
19
20/// Parse the contents of `src` and return a list of AST types.
David Tolnayf9bb8ff2019-02-15 13:10:14 -080021pub fn parse() -> types::Definitions {
Carl Lerche058ff472019-02-13 16:23:52 -080022 let mut item_lookup = BTreeMap::new();
23 load_file(SYN_CRATE_ROOT, &[], &mut item_lookup).unwrap();
24
25 let token_lookup = load_token_file(TOKEN_SRC).unwrap();
26
David Tolnayf9bb8ff2019-02-15 13:10:14 -080027 let types = item_lookup
Carl Lerche058ff472019-02-13 16:23:52 -080028 .values()
29 .map(|item| introspect_item(item, &item_lookup, &token_lookup))
David Tolnayf9bb8ff2019-02-15 13:10:14 -080030 .collect();
31
David Tolnay47fe7402019-02-15 14:35:25 -080032 let tokens = token_lookup
33 .into_iter()
34 .map(|(name, ty)| (ty, name))
35 .collect();
David Tolnayf9bb8ff2019-02-15 13:10:14 -080036
37 types::Definitions { types, tokens }
Carl Lerche058ff472019-02-13 16:23:52 -080038}
39
40/// Data extracted from syn source
41#[derive(Clone)]
42pub struct AstItem {
43 ast: DeriveInput,
44 features: Vec<syn::Attribute>,
45}
46
David Tolnayf9bb8ff2019-02-15 13:10:14 -080047fn introspect_item(item: &AstItem, items: &ItemLookup, tokens: &TokenLookup) -> types::Node {
Carl Lerche058ff472019-02-13 16:23:52 -080048 let features = introspect_features(&item.features);
49
50 match &item.ast.data {
David Tolnayc2be7b22019-02-15 18:48:31 -080051 Data::Enum(ref data) => types::Node {
52 ident: item.ast.ident.to_string(),
Carl Lerche058ff472019-02-13 16:23:52 -080053 features,
David Tolnayc2be7b22019-02-15 18:48:31 -080054 data: types::Data::Enum(introspect_enum(data, items, tokens)),
55 },
56 Data::Struct(ref data) => types::Node {
57 ident: item.ast.ident.to_string(),
Carl Lerche058ff472019-02-13 16:23:52 -080058 features,
David Tolnayc2be7b22019-02-15 18:48:31 -080059 data: {
60 if data.fields.iter().all(|f| is_pub(&f.vis)) {
61 types::Data::Struct(introspect_struct(data, items, tokens))
62 } else {
63 types::Data::Private
64 }
65 },
66 },
Carl Lerche058ff472019-02-13 16:23:52 -080067 Data::Union(..) => panic!("Union not supported"),
68 }
69}
70
71fn introspect_enum(
Carl Lerche058ff472019-02-13 16:23:52 -080072 item: &syn::DataEnum,
73 items: &ItemLookup,
74 tokens: &TokenLookup,
David Tolnay75c5a172019-02-15 20:35:41 -080075) -> types::Variants {
David Tolnayc2be7b22019-02-15 18:48:31 -080076 item.variants
Carl Lerche058ff472019-02-13 16:23:52 -080077 .iter()
78 .map(|variant| {
79 let fields = match &variant.fields {
80 syn::Fields::Unnamed(fields) => fields
81 .unnamed
82 .iter()
83 .map(|field| introspect_type(&field.ty, items, tokens))
84 .collect(),
85 syn::Fields::Unit => vec![],
86 _ => panic!("Enum representation not supported"),
87 };
88
David Tolnay75c5a172019-02-15 20:35:41 -080089 (variant.ident.to_string(), fields)
Carl Lerche058ff472019-02-13 16:23:52 -080090 })
David Tolnayc2be7b22019-02-15 18:48:31 -080091 .collect()
Carl Lerche058ff472019-02-13 16:23:52 -080092}
93
94fn introspect_struct(
Carl Lerche058ff472019-02-13 16:23:52 -080095 item: &syn::DataStruct,
96 items: &ItemLookup,
97 tokens: &TokenLookup,
David Tolnay75c5a172019-02-15 20:35:41 -080098) -> types::Fields {
David Tolnayc2be7b22019-02-15 18:48:31 -080099 match &item.fields {
Carl Lerche058ff472019-02-13 16:23:52 -0800100 syn::Fields::Named(fields) => fields
101 .named
102 .iter()
103 .map(|field| {
David Tolnay14d463e2019-02-15 14:23:51 -0800104 (
Carl Lerche058ff472019-02-13 16:23:52 -0800105 field.ident.as_ref().unwrap().to_string(),
106 introspect_type(&field.ty, items, tokens),
107 )
108 })
109 .collect(),
David Tolnay14d463e2019-02-15 14:23:51 -0800110 syn::Fields::Unit => IndexMap::new(),
Carl Lerche058ff472019-02-13 16:23:52 -0800111 _ => panic!("Struct representation not supported"),
David Tolnayc2be7b22019-02-15 18:48:31 -0800112 }
Carl Lerche058ff472019-02-13 16:23:52 -0800113}
114
115fn introspect_type(item: &syn::Type, items: &ItemLookup, tokens: &TokenLookup) -> types::Type {
116 match item {
117 syn::Type::Path(syn::TypePath {
118 qself: None,
119 ref path,
120 }) => {
121 let last = path.segments.last().unwrap().into_value();
David Tolnay15730f22019-02-15 20:46:56 -0800122 let string = last.ident.to_string();
Carl Lerche058ff472019-02-13 16:23:52 -0800123
David Tolnay15730f22019-02-15 20:46:56 -0800124 match string.as_str() {
Carl Lerche058ff472019-02-13 16:23:52 -0800125 "Option" => {
126 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
127 types::Type::Option(Box::new(nested))
128 }
129 "Punctuated" => {
130 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
131 let punct = match introspect_type(last_arg(&last.arguments), items, tokens) {
132 types::Type::Token(s) => s,
133 _ => panic!(),
134 };
135
David Tolnayfa67ab02019-02-15 20:17:30 -0800136 types::Type::Punctuated(types::Punctuated {
137 element: Box::new(nested),
138 punct,
139 })
Carl Lerche058ff472019-02-13 16:23:52 -0800140 }
141 "Vec" => {
142 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
143 types::Type::Vec(Box::new(nested))
144 }
145 "Box" => {
146 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
147 types::Type::Box(Box::new(nested))
148 }
David Tolnay15730f22019-02-15 20:46:56 -0800149 "Brace" | "Bracket" | "Paren" | "Group" => types::Type::Group(string),
150 "TokenStream" | "Literal" | "Ident" | "Span" => types::Type::Ext(string),
151 "String" | "u32" | "usize" | "bool" => types::Type::Std(string),
Carl Lerche058ff472019-02-13 16:23:52 -0800152 _ => {
153 if items.get(&last.ident).is_some() {
David Tolnay15730f22019-02-15 20:46:56 -0800154 types::Type::Syn(string)
Carl Lerche058ff472019-02-13 16:23:52 -0800155 } else {
David Tolnay15730f22019-02-15 20:46:56 -0800156 unimplemented!("{}", string);
Carl Lerche058ff472019-02-13 16:23:52 -0800157 }
158 }
159 }
160 }
161 syn::Type::Tuple(syn::TypeTuple { ref elems, .. }) => {
162 let tys = elems
163 .iter()
164 .map(|ty| introspect_type(&ty, items, tokens))
165 .collect();
166 types::Type::Tuple(tys)
167 }
168 syn::Type::Macro(syn::TypeMacro { ref mac })
169 if mac.path.segments.last().unwrap().into_value().ident == "Token" =>
170 {
171 let content = mac.tts.to_string();
172 let ty = tokens.get(&content).unwrap().to_string();
173
David Tolnay157c7eb2019-02-15 13:21:48 -0800174 types::Type::Token(ty)
Carl Lerche058ff472019-02-13 16:23:52 -0800175 }
176 _ => panic!("{}", quote!(#item).to_string()),
177 }
178}
179
180fn introspect_features(attrs: &[syn::Attribute]) -> types::Features {
181 let mut ret = types::Features::default();
182
183 for attr in attrs {
184 if !attr.path.is_ident("cfg") {
185 continue;
186 }
187
188 let features: types::Features = syn::parse2(attr.tts.clone()).unwrap();
David Tolnay440fe582019-02-15 20:23:14 -0800189
190 if ret.any.is_empty() {
191 ret = features;
192 } else if ret.any.len() < features.any.len() {
193 assert!(ret.any.iter().all(|f| features.any.contains(f)));
194 } else {
195 assert!(features.any.iter().all(|f| ret.any.contains(f)));
196 ret = features;
197 }
Carl Lerche058ff472019-02-13 16:23:52 -0800198 }
199
200 ret
201}
202
203fn is_pub(vis: &syn::Visibility) -> bool {
204 match vis {
205 syn::Visibility::Public(_) => true,
206 _ => false,
207 }
208}
209
210fn first_arg(params: &syn::PathArguments) -> &syn::Type {
211 let data = match *params {
212 syn::PathArguments::AngleBracketed(ref data) => data,
213 _ => panic!("Expected at least 1 type argument here"),
214 };
215
216 match **data
217 .args
218 .first()
219 .expect("Expected at least 1 type argument here")
220 .value()
221 {
222 syn::GenericArgument::Type(ref ty) => ty,
223 _ => panic!("Expected at least 1 type argument here"),
224 }
225}
226
227fn last_arg(params: &syn::PathArguments) -> &syn::Type {
228 let data = match *params {
229 syn::PathArguments::AngleBracketed(ref data) => data,
230 _ => panic!("Expected at least 1 type argument here"),
231 };
232
233 match **data
234 .args
235 .last()
236 .expect("Expected at least 1 type argument here")
237 .value()
238 {
239 syn::GenericArgument::Type(ref ty) => ty,
240 _ => panic!("Expected at least 1 type argument here"),
241 }
242}
243
244mod parsing {
245 use super::{AstItem, TokenLookup};
246 use crate::types;
247
248 use proc_macro2::TokenStream;
249 use syn;
250 use syn::parse::{Parse, ParseStream, Result};
251 use syn::*;
252
David Tolnay440fe582019-02-15 20:23:14 -0800253 use std::collections::{BTreeMap, BTreeSet};
Carl Lerche058ff472019-02-13 16:23:52 -0800254
255 fn peek_tag(input: ParseStream, tag: &str) -> bool {
256 let ahead = input.fork();
257 ahead.parse::<Token![#]>().is_ok()
258 && ahead
259 .parse::<Ident>()
260 .map(|ident| ident == tag)
261 .unwrap_or(false)
262 }
263
264 // Parses #full - returns #[cfg(feature = "full")] if it is present, and
265 // nothing otherwise.
266 fn full(input: ParseStream) -> Vec<syn::Attribute> {
267 if peek_tag(input, "full") {
268 input.parse::<Token![#]>().unwrap();
269 input.parse::<Ident>().unwrap();
270 vec![parse_quote!(#[cfg(feature = "full")])]
271 } else {
272 vec![]
273 }
274 }
275
276 fn skip_manual_extra_traits(input: ParseStream) {
277 if peek_tag(input, "manual_extra_traits") {
278 input.parse::<Token![#]>().unwrap();
279 input.parse::<Ident>().unwrap();
280 }
281 }
282
283 // Parses a simple AstStruct without the `pub struct` prefix.
284 fn ast_struct_inner(input: ParseStream) -> Result<AstItem> {
285 let ident: Ident = input.parse()?;
286 let features = full(input);
287 skip_manual_extra_traits(input);
288 let rest: TokenStream = input.parse()?;
289 Ok(AstItem {
290 ast: syn::parse2(quote! {
291 pub struct #ident #rest
292 })?,
293 features,
294 })
295 }
296
297 // ast_struct! parsing
298 pub struct AstStruct(pub(super) Vec<AstItem>);
299 impl Parse for AstStruct {
300 fn parse(input: ParseStream) -> Result<Self> {
301 input.call(Attribute::parse_outer)?;
302 input.parse::<Token![pub]>()?;
303 input.parse::<Token![struct]>()?;
304 let res = input.call(ast_struct_inner)?;
305 Ok(AstStruct(vec![res]))
306 }
307 }
308
309 fn no_visit(input: ParseStream) -> bool {
310 if peek_tag(input, "no_visit") {
311 input.parse::<Token![#]>().unwrap();
312 input.parse::<Ident>().unwrap();
313 true
314 } else {
315 false
316 }
317 }
318
319 // ast_enum! parsing
320 pub struct AstEnum(pub Vec<AstItem>);
321 impl Parse for AstEnum {
322 fn parse(input: ParseStream) -> Result<Self> {
323 input.call(Attribute::parse_outer)?;
324 input.parse::<Token![pub]>()?;
325 input.parse::<Token![enum]>()?;
326 let ident: Ident = input.parse()?;
327 let no_visit = no_visit(input);
328 let rest: TokenStream = input.parse()?;
329 Ok(AstEnum(if no_visit {
330 vec![]
331 } else {
332 vec![AstItem {
333 ast: syn::parse2(quote! {
334 pub enum #ident #rest
335 })?,
336 features: vec![],
337 }]
338 }))
339 }
340 }
341
342 // A single variant of an ast_enum_of_structs!
343 struct EosVariant {
344 name: Ident,
345 member: Option<Path>,
346 inner: Option<AstItem>,
347 }
348 fn eos_variant(input: ParseStream) -> Result<EosVariant> {
349 input.call(Attribute::parse_outer)?;
350 input.parse::<Token![pub]>()?;
351 let variant: Ident = input.parse()?;
352 let (member, inner) = if input.peek(token::Paren) {
353 let content;
354 parenthesized!(content in input);
355 if content.fork().call(ast_struct_inner).is_ok() {
356 let item = content.call(ast_struct_inner)?;
357 (Some(Path::from(item.ast.ident.clone())), Some(item))
358 } else {
359 let path: Path = content.parse()?;
360 (Some(path), None)
361 }
362 } else {
363 (None, None)
364 };
365 input.parse::<Token![,]>()?;
366 Ok(EosVariant {
367 name: variant,
368 member,
369 inner,
370 })
371 }
372
373 // ast_enum_of_structs! parsing
374 pub struct AstEnumOfStructs(pub Vec<AstItem>);
375 impl Parse for AstEnumOfStructs {
376 fn parse(input: ParseStream) -> Result<Self> {
377 input.call(Attribute::parse_outer)?;
378 input.parse::<Token![pub]>()?;
379 input.parse::<Token![enum]>()?;
380 let ident: Ident = input.parse()?;
381
382 let content;
383 braced!(content in input);
384 let mut variants = Vec::new();
385 while !content.is_empty() {
386 variants.push(content.call(eos_variant)?);
387 }
388
389 if let Some(ident) = input.parse::<Option<Ident>>()? {
390 assert_eq!(ident, "do_not_generate_to_tokens");
391 }
392
393 let enum_item = {
394 let variants = variants.iter().map(|v| {
395 let name = v.name.clone();
396 match v.member {
397 Some(ref member) => quote!(#name(#member)),
398 None => quote!(#name),
399 }
400 });
401 parse_quote! {
402 pub enum #ident {
403 #(#variants),*
404 }
405 }
406 };
407 let mut items = vec![AstItem {
408 ast: enum_item,
409 features: vec![],
410 }];
411 items.extend(variants.into_iter().filter_map(|v| v.inner));
412 Ok(AstEnumOfStructs(items))
413 }
414 }
415
416 pub struct TokenMacro(pub TokenLookup);
417 impl Parse for TokenMacro {
418 fn parse(input: ParseStream) -> Result<Self> {
419 let mut tokens = BTreeMap::new();
420 while !input.is_empty() {
421 let content;
422 parenthesized!(content in input);
423 let token = content.parse::<TokenStream>()?.to_string();
424 input.parse::<Token![=]>()?;
425 input.parse::<Token![>]>()?;
426 let content;
427 braced!(content in input);
428 input.parse::<Token![;]>()?;
429 content.parse::<token::Dollar>()?;
430 let path: Path = content.parse()?;
431 let ty = path.segments.last().unwrap().into_value().ident.to_string();
432 tokens.insert(token, ty.to_string());
433 }
434 Ok(TokenMacro(tokens))
435 }
436 }
437
438 fn parse_feature(input: ParseStream) -> Result<String> {
439 let i: syn::Ident = input.parse()?;
440 assert_eq!(i, "feature");
441
442 input.parse::<Token![=]>()?;
443 let s = input.parse::<syn::LitStr>()?;
444
445 Ok(s.value())
446 }
447
448 impl Parse for types::Features {
449 fn parse(input: ParseStream) -> Result<Self> {
David Tolnay440fe582019-02-15 20:23:14 -0800450 let mut features = BTreeSet::new();
Carl Lerche058ff472019-02-13 16:23:52 -0800451
452 let level_1;
453 parenthesized!(level_1 in input);
454
455 let i: syn::Ident = level_1.fork().parse()?;
456
457 if i == "any" {
458 level_1.parse::<syn::Ident>()?;
459
460 let level_2;
461 parenthesized!(level_2 in level_1);
462
463 while !level_2.is_empty() {
David Tolnay440fe582019-02-15 20:23:14 -0800464 features.insert(parse_feature(&level_2)?);
Carl Lerche058ff472019-02-13 16:23:52 -0800465
466 if !level_2.is_empty() {
467 level_2.parse::<Token![,]>()?;
468 }
469 }
470 } else if i == "feature" {
David Tolnay440fe582019-02-15 20:23:14 -0800471 features.insert(parse_feature(&level_1)?);
Carl Lerche058ff472019-02-13 16:23:52 -0800472 assert!(level_1.is_empty());
473 } else {
474 panic!("{:?}", i);
475 }
476
477 assert!(input.is_empty());
478
David Tolnayfa67ab02019-02-15 20:17:30 -0800479 Ok(types::Features { any: features })
Carl Lerche058ff472019-02-13 16:23:52 -0800480 }
481 }
482}
483
484fn get_features(attrs: &[syn::Attribute], base: &[syn::Attribute]) -> Vec<syn::Attribute> {
485 let mut ret = base.to_owned();
486
487 for attr in attrs {
488 if attr.path.is_ident("cfg") {
489 ret.push(attr.clone());
490 }
491 }
492
493 ret
494}
495
496type Error = Box<::std::error::Error>;
497
498fn load_file<P: AsRef<Path>>(
499 name: P,
500 features: &[syn::Attribute],
501 lookup: &mut ItemLookup,
502) -> Result<(), Error> {
503 let name = name.as_ref();
504 let parent = name.parent().ok_or("no parent path")?;
505
506 let mut f = File::open(name)?;
507 let mut src = String::new();
508 f.read_to_string(&mut src)?;
509
510 // Parse the file
511 let file = syn::parse_file(&src)?;
512
513 // Collect all of the interesting AstItems declared in this file or submodules.
514 'items: for item in file.items {
515 match item {
516 Item::Mod(item) => {
517 // Don't inspect inline modules.
518 if item.content.is_some() {
519 continue;
520 }
521
522 // We don't want to try to load the generated rust files and
523 // parse them, so we ignore them here.
524 for name in IGNORED_MODS {
525 if item.ident == name {
526 continue 'items;
527 }
528 }
529
530 // Lookup any #[cfg()] attributes on the module and add them to
531 // the feature set.
532 //
533 // The derive module is weird because it is built with either
534 // `full` or `derive` but exported only under `derive`.
535 let features = if item.ident == "derive" {
536 vec![parse_quote!(#[cfg(feature = "derive")])]
537 } else {
538 get_features(&item.attrs, features)
539 };
540
541 // Look up the submodule file, and recursively parse it.
542 // XXX: Only handles same-directory .rs file submodules.
543 let path = parent.join(&format!("{}.rs", item.ident));
544 load_file(path, &features, lookup)?;
545 }
546 Item::Macro(item) => {
547 // Lookip any #[cfg()] attributes directly on the macro
548 // invocation, and add them to the feature set.
549 let features = get_features(&item.attrs, features);
550
551 // Try to parse the AstItem declaration out of the item.
552 let tts = &item.mac.tts;
553 let found = if item.mac.path.is_ident("ast_struct") {
554 syn::parse2::<parsing::AstStruct>(quote!(#tts))?.0
555 } else if item.mac.path.is_ident("ast_enum") {
556 syn::parse2::<parsing::AstEnum>(quote!(#tts))?.0
557 } else if item.mac.path.is_ident("ast_enum_of_structs") {
558 syn::parse2::<parsing::AstEnumOfStructs>(quote!(#tts))?.0
559 } else {
560 continue;
561 };
562
563 // Record our features on the parsed AstItems.
564 for mut item in found {
565 item.features.extend(features.clone());
566 lookup.insert(item.ast.ident.clone(), item);
567 }
568 }
569 Item::Struct(item) => {
570 let ident = item.ident;
571 if EXTRA_TYPES.contains(&&ident.to_string()[..]) {
572 lookup.insert(
573 ident.clone(),
574 AstItem {
575 ast: DeriveInput {
576 ident,
577 vis: item.vis,
578 attrs: item.attrs,
579 generics: item.generics,
580 data: Data::Struct(DataStruct {
581 fields: item.fields,
582 struct_token: item.struct_token,
583 semi_token: item.semi_token,
584 }),
585 },
586 features: features.to_owned(),
587 },
588 );
589 }
590 }
591 _ => {}
592 }
593 }
594 Ok(())
595}
596
597fn load_token_file<P: AsRef<Path>>(name: P) -> Result<TokenLookup, Error> {
598 let name = name.as_ref();
599 let mut f = File::open(name)?;
600 let mut src = String::new();
601 f.read_to_string(&mut src)?;
602 let file = syn::parse_file(&src)?;
603 for item in file.items {
604 match item {
605 Item::Macro(item) => {
606 match item.ident {
607 Some(ref i) if i == "Token" => {}
608 _ => continue,
609 }
610 let tts = &item.mac.tts;
611 let tokens = syn::parse2::<parsing::TokenMacro>(quote!(#tts))?.0;
612 return Ok(tokens);
613 }
614 _ => {}
615 }
616 }
617
618 Err("failed to parse Token macro".into())
619}