blob: c7a087d7b8c60c30b16c632e19bf73dee9ccd79b [file] [log] [blame]
Carl Lerche058ff472019-02-13 16:23:52 -08001use crate::types;
2
David Tolnay14d463e2019-02-15 14:23:51 -08003use indexmap::IndexMap;
Carl Lerche058ff472019-02-13 16:23:52 -08004use syn::{Data, DataStruct, DeriveInput, Ident, Item};
5
6use std::collections::BTreeMap;
7use std::fs::File;
8use std::io::Read;
9use std::path::Path;
10
11const SYN_CRATE_ROOT: &str = "../src/lib.rs";
12const TOKEN_SRC: &str = "../src/token.rs";
13const IGNORED_MODS: &[&str] = &["fold", "visit", "visit_mut"];
14const EXTRA_TYPES: &[&str] = &["Lifetime"];
Carl Lerche058ff472019-02-13 16:23:52 -080015
16// NOTE: BTreeMap is used here instead of HashMap to have deterministic output.
17type ItemLookup = BTreeMap<Ident, AstItem>;
18type TokenLookup = BTreeMap<String, String>;
19
20/// Parse the contents of `src` and return a list of AST types.
David Tolnayf9bb8ff2019-02-15 13:10:14 -080021pub fn parse() -> types::Definitions {
Carl Lerche058ff472019-02-13 16:23:52 -080022 let mut item_lookup = BTreeMap::new();
23 load_file(SYN_CRATE_ROOT, &[], &mut item_lookup).unwrap();
24
25 let token_lookup = load_token_file(TOKEN_SRC).unwrap();
26
David Tolnayf9bb8ff2019-02-15 13:10:14 -080027 let types = item_lookup
Carl Lerche058ff472019-02-13 16:23:52 -080028 .values()
29 .map(|item| introspect_item(item, &item_lookup, &token_lookup))
David Tolnayf9bb8ff2019-02-15 13:10:14 -080030 .collect();
31
David Tolnay47fe7402019-02-15 14:35:25 -080032 let tokens = token_lookup
33 .into_iter()
34 .map(|(name, ty)| (ty, name))
35 .collect();
David Tolnayf9bb8ff2019-02-15 13:10:14 -080036
37 types::Definitions { types, tokens }
Carl Lerche058ff472019-02-13 16:23:52 -080038}
39
40/// Data extracted from syn source
41#[derive(Clone)]
42pub struct AstItem {
43 ast: DeriveInput,
44 features: Vec<syn::Attribute>,
45}
46
David Tolnayf9bb8ff2019-02-15 13:10:14 -080047fn introspect_item(item: &AstItem, items: &ItemLookup, tokens: &TokenLookup) -> types::Node {
Carl Lerche058ff472019-02-13 16:23:52 -080048 let features = introspect_features(&item.features);
49
50 match &item.ast.data {
David Tolnayc2be7b22019-02-15 18:48:31 -080051 Data::Enum(ref data) => types::Node {
52 ident: item.ast.ident.to_string(),
Carl Lerche058ff472019-02-13 16:23:52 -080053 features,
David Tolnayc2be7b22019-02-15 18:48:31 -080054 data: types::Data::Enum(introspect_enum(data, items, tokens)),
55 },
56 Data::Struct(ref data) => types::Node {
57 ident: item.ast.ident.to_string(),
Carl Lerche058ff472019-02-13 16:23:52 -080058 features,
David Tolnayc2be7b22019-02-15 18:48:31 -080059 data: {
60 if data.fields.iter().all(|f| is_pub(&f.vis)) {
61 types::Data::Struct(introspect_struct(data, items, tokens))
62 } else {
63 types::Data::Private
64 }
65 },
66 },
Carl Lerche058ff472019-02-13 16:23:52 -080067 Data::Union(..) => panic!("Union not supported"),
68 }
69}
70
71fn introspect_enum(
Carl Lerche058ff472019-02-13 16:23:52 -080072 item: &syn::DataEnum,
73 items: &ItemLookup,
74 tokens: &TokenLookup,
David Tolnay75c5a172019-02-15 20:35:41 -080075) -> types::Variants {
David Tolnayc2be7b22019-02-15 18:48:31 -080076 item.variants
Carl Lerche058ff472019-02-13 16:23:52 -080077 .iter()
78 .map(|variant| {
79 let fields = match &variant.fields {
80 syn::Fields::Unnamed(fields) => fields
81 .unnamed
82 .iter()
83 .map(|field| introspect_type(&field.ty, items, tokens))
84 .collect(),
85 syn::Fields::Unit => vec![],
86 _ => panic!("Enum representation not supported"),
87 };
88
David Tolnay75c5a172019-02-15 20:35:41 -080089 (variant.ident.to_string(), fields)
Carl Lerche058ff472019-02-13 16:23:52 -080090 })
David Tolnayc2be7b22019-02-15 18:48:31 -080091 .collect()
Carl Lerche058ff472019-02-13 16:23:52 -080092}
93
94fn introspect_struct(
Carl Lerche058ff472019-02-13 16:23:52 -080095 item: &syn::DataStruct,
96 items: &ItemLookup,
97 tokens: &TokenLookup,
David Tolnay75c5a172019-02-15 20:35:41 -080098) -> types::Fields {
David Tolnayc2be7b22019-02-15 18:48:31 -080099 match &item.fields {
Carl Lerche058ff472019-02-13 16:23:52 -0800100 syn::Fields::Named(fields) => fields
101 .named
102 .iter()
103 .map(|field| {
David Tolnay14d463e2019-02-15 14:23:51 -0800104 (
Carl Lerche058ff472019-02-13 16:23:52 -0800105 field.ident.as_ref().unwrap().to_string(),
106 introspect_type(&field.ty, items, tokens),
107 )
108 })
109 .collect(),
David Tolnay14d463e2019-02-15 14:23:51 -0800110 syn::Fields::Unit => IndexMap::new(),
Carl Lerche058ff472019-02-13 16:23:52 -0800111 _ => panic!("Struct representation not supported"),
David Tolnayc2be7b22019-02-15 18:48:31 -0800112 }
Carl Lerche058ff472019-02-13 16:23:52 -0800113}
114
115fn introspect_type(item: &syn::Type, items: &ItemLookup, tokens: &TokenLookup) -> types::Type {
116 match item {
117 syn::Type::Path(syn::TypePath {
118 qself: None,
119 ref path,
120 }) => {
121 let last = path.segments.last().unwrap().into_value();
122
123 match &last.ident.to_string()[..] {
124 "Option" => {
125 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
126 types::Type::Option(Box::new(nested))
127 }
128 "Punctuated" => {
129 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
130 let punct = match introspect_type(last_arg(&last.arguments), items, tokens) {
131 types::Type::Token(s) => s,
132 _ => panic!(),
133 };
134
David Tolnayfa67ab02019-02-15 20:17:30 -0800135 types::Type::Punctuated(types::Punctuated {
136 element: Box::new(nested),
137 punct,
138 })
Carl Lerche058ff472019-02-13 16:23:52 -0800139 }
140 "Vec" => {
141 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
142 types::Type::Vec(Box::new(nested))
143 }
144 "Box" => {
145 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
146 types::Type::Box(Box::new(nested))
147 }
148 "Brace" | "Bracket" | "Paren" | "Group" => {
David Tolnay295141b2019-02-15 12:45:33 -0800149 types::Type::Group(last.ident.to_string())
Carl Lerche058ff472019-02-13 16:23:52 -0800150 }
David Tolnay47fe7402019-02-15 14:35:25 -0800151 "TokenStream" | "Literal" | "Ident" | "Span" => {
152 types::Type::Ext(last.ident.to_string())
153 }
Carl Lerche058ff472019-02-13 16:23:52 -0800154 "String" | "u32" | "usize" | "bool" => types::Type::Std(last.ident.to_string()),
155 _ => {
156 if items.get(&last.ident).is_some() {
David Tolnayd3076572019-02-15 13:32:44 -0800157 types::Type::Syn(last.ident.to_string())
Carl Lerche058ff472019-02-13 16:23:52 -0800158 } else {
159 unimplemented!("{}", last.ident.to_string());
160 }
161 }
162 }
163 }
164 syn::Type::Tuple(syn::TypeTuple { ref elems, .. }) => {
165 let tys = elems
166 .iter()
167 .map(|ty| introspect_type(&ty, items, tokens))
168 .collect();
169 types::Type::Tuple(tys)
170 }
171 syn::Type::Macro(syn::TypeMacro { ref mac })
172 if mac.path.segments.last().unwrap().into_value().ident == "Token" =>
173 {
174 let content = mac.tts.to_string();
175 let ty = tokens.get(&content).unwrap().to_string();
176
David Tolnay157c7eb2019-02-15 13:21:48 -0800177 types::Type::Token(ty)
Carl Lerche058ff472019-02-13 16:23:52 -0800178 }
179 _ => panic!("{}", quote!(#item).to_string()),
180 }
181}
182
183fn introspect_features(attrs: &[syn::Attribute]) -> types::Features {
184 let mut ret = types::Features::default();
185
186 for attr in attrs {
187 if !attr.path.is_ident("cfg") {
188 continue;
189 }
190
191 let features: types::Features = syn::parse2(attr.tts.clone()).unwrap();
David Tolnay440fe582019-02-15 20:23:14 -0800192
193 if ret.any.is_empty() {
194 ret = features;
195 } else if ret.any.len() < features.any.len() {
196 assert!(ret.any.iter().all(|f| features.any.contains(f)));
197 } else {
198 assert!(features.any.iter().all(|f| ret.any.contains(f)));
199 ret = features;
200 }
Carl Lerche058ff472019-02-13 16:23:52 -0800201 }
202
203 ret
204}
205
206fn is_pub(vis: &syn::Visibility) -> bool {
207 match vis {
208 syn::Visibility::Public(_) => true,
209 _ => false,
210 }
211}
212
213fn first_arg(params: &syn::PathArguments) -> &syn::Type {
214 let data = match *params {
215 syn::PathArguments::AngleBracketed(ref data) => data,
216 _ => panic!("Expected at least 1 type argument here"),
217 };
218
219 match **data
220 .args
221 .first()
222 .expect("Expected at least 1 type argument here")
223 .value()
224 {
225 syn::GenericArgument::Type(ref ty) => ty,
226 _ => panic!("Expected at least 1 type argument here"),
227 }
228}
229
230fn last_arg(params: &syn::PathArguments) -> &syn::Type {
231 let data = match *params {
232 syn::PathArguments::AngleBracketed(ref data) => data,
233 _ => panic!("Expected at least 1 type argument here"),
234 };
235
236 match **data
237 .args
238 .last()
239 .expect("Expected at least 1 type argument here")
240 .value()
241 {
242 syn::GenericArgument::Type(ref ty) => ty,
243 _ => panic!("Expected at least 1 type argument here"),
244 }
245}
246
247mod parsing {
248 use super::{AstItem, TokenLookup};
249 use crate::types;
250
251 use proc_macro2::TokenStream;
252 use syn;
253 use syn::parse::{Parse, ParseStream, Result};
254 use syn::*;
255
David Tolnay440fe582019-02-15 20:23:14 -0800256 use std::collections::{BTreeMap, BTreeSet};
Carl Lerche058ff472019-02-13 16:23:52 -0800257
258 fn peek_tag(input: ParseStream, tag: &str) -> bool {
259 let ahead = input.fork();
260 ahead.parse::<Token![#]>().is_ok()
261 && ahead
262 .parse::<Ident>()
263 .map(|ident| ident == tag)
264 .unwrap_or(false)
265 }
266
267 // Parses #full - returns #[cfg(feature = "full")] if it is present, and
268 // nothing otherwise.
269 fn full(input: ParseStream) -> Vec<syn::Attribute> {
270 if peek_tag(input, "full") {
271 input.parse::<Token![#]>().unwrap();
272 input.parse::<Ident>().unwrap();
273 vec![parse_quote!(#[cfg(feature = "full")])]
274 } else {
275 vec![]
276 }
277 }
278
279 fn skip_manual_extra_traits(input: ParseStream) {
280 if peek_tag(input, "manual_extra_traits") {
281 input.parse::<Token![#]>().unwrap();
282 input.parse::<Ident>().unwrap();
283 }
284 }
285
286 // Parses a simple AstStruct without the `pub struct` prefix.
287 fn ast_struct_inner(input: ParseStream) -> Result<AstItem> {
288 let ident: Ident = input.parse()?;
289 let features = full(input);
290 skip_manual_extra_traits(input);
291 let rest: TokenStream = input.parse()?;
292 Ok(AstItem {
293 ast: syn::parse2(quote! {
294 pub struct #ident #rest
295 })?,
296 features,
297 })
298 }
299
300 // ast_struct! parsing
301 pub struct AstStruct(pub(super) Vec<AstItem>);
302 impl Parse for AstStruct {
303 fn parse(input: ParseStream) -> Result<Self> {
304 input.call(Attribute::parse_outer)?;
305 input.parse::<Token![pub]>()?;
306 input.parse::<Token![struct]>()?;
307 let res = input.call(ast_struct_inner)?;
308 Ok(AstStruct(vec![res]))
309 }
310 }
311
312 fn no_visit(input: ParseStream) -> bool {
313 if peek_tag(input, "no_visit") {
314 input.parse::<Token![#]>().unwrap();
315 input.parse::<Ident>().unwrap();
316 true
317 } else {
318 false
319 }
320 }
321
322 // ast_enum! parsing
323 pub struct AstEnum(pub Vec<AstItem>);
324 impl Parse for AstEnum {
325 fn parse(input: ParseStream) -> Result<Self> {
326 input.call(Attribute::parse_outer)?;
327 input.parse::<Token![pub]>()?;
328 input.parse::<Token![enum]>()?;
329 let ident: Ident = input.parse()?;
330 let no_visit = no_visit(input);
331 let rest: TokenStream = input.parse()?;
332 Ok(AstEnum(if no_visit {
333 vec![]
334 } else {
335 vec![AstItem {
336 ast: syn::parse2(quote! {
337 pub enum #ident #rest
338 })?,
339 features: vec![],
340 }]
341 }))
342 }
343 }
344
345 // A single variant of an ast_enum_of_structs!
346 struct EosVariant {
347 name: Ident,
348 member: Option<Path>,
349 inner: Option<AstItem>,
350 }
351 fn eos_variant(input: ParseStream) -> Result<EosVariant> {
352 input.call(Attribute::parse_outer)?;
353 input.parse::<Token![pub]>()?;
354 let variant: Ident = input.parse()?;
355 let (member, inner) = if input.peek(token::Paren) {
356 let content;
357 parenthesized!(content in input);
358 if content.fork().call(ast_struct_inner).is_ok() {
359 let item = content.call(ast_struct_inner)?;
360 (Some(Path::from(item.ast.ident.clone())), Some(item))
361 } else {
362 let path: Path = content.parse()?;
363 (Some(path), None)
364 }
365 } else {
366 (None, None)
367 };
368 input.parse::<Token![,]>()?;
369 Ok(EosVariant {
370 name: variant,
371 member,
372 inner,
373 })
374 }
375
376 // ast_enum_of_structs! parsing
377 pub struct AstEnumOfStructs(pub Vec<AstItem>);
378 impl Parse for AstEnumOfStructs {
379 fn parse(input: ParseStream) -> Result<Self> {
380 input.call(Attribute::parse_outer)?;
381 input.parse::<Token![pub]>()?;
382 input.parse::<Token![enum]>()?;
383 let ident: Ident = input.parse()?;
384
385 let content;
386 braced!(content in input);
387 let mut variants = Vec::new();
388 while !content.is_empty() {
389 variants.push(content.call(eos_variant)?);
390 }
391
392 if let Some(ident) = input.parse::<Option<Ident>>()? {
393 assert_eq!(ident, "do_not_generate_to_tokens");
394 }
395
396 let enum_item = {
397 let variants = variants.iter().map(|v| {
398 let name = v.name.clone();
399 match v.member {
400 Some(ref member) => quote!(#name(#member)),
401 None => quote!(#name),
402 }
403 });
404 parse_quote! {
405 pub enum #ident {
406 #(#variants),*
407 }
408 }
409 };
410 let mut items = vec![AstItem {
411 ast: enum_item,
412 features: vec![],
413 }];
414 items.extend(variants.into_iter().filter_map(|v| v.inner));
415 Ok(AstEnumOfStructs(items))
416 }
417 }
418
419 pub struct TokenMacro(pub TokenLookup);
420 impl Parse for TokenMacro {
421 fn parse(input: ParseStream) -> Result<Self> {
422 let mut tokens = BTreeMap::new();
423 while !input.is_empty() {
424 let content;
425 parenthesized!(content in input);
426 let token = content.parse::<TokenStream>()?.to_string();
427 input.parse::<Token![=]>()?;
428 input.parse::<Token![>]>()?;
429 let content;
430 braced!(content in input);
431 input.parse::<Token![;]>()?;
432 content.parse::<token::Dollar>()?;
433 let path: Path = content.parse()?;
434 let ty = path.segments.last().unwrap().into_value().ident.to_string();
435 tokens.insert(token, ty.to_string());
436 }
437 Ok(TokenMacro(tokens))
438 }
439 }
440
441 fn parse_feature(input: ParseStream) -> Result<String> {
442 let i: syn::Ident = input.parse()?;
443 assert_eq!(i, "feature");
444
445 input.parse::<Token![=]>()?;
446 let s = input.parse::<syn::LitStr>()?;
447
448 Ok(s.value())
449 }
450
451 impl Parse for types::Features {
452 fn parse(input: ParseStream) -> Result<Self> {
David Tolnay440fe582019-02-15 20:23:14 -0800453 let mut features = BTreeSet::new();
Carl Lerche058ff472019-02-13 16:23:52 -0800454
455 let level_1;
456 parenthesized!(level_1 in input);
457
458 let i: syn::Ident = level_1.fork().parse()?;
459
460 if i == "any" {
461 level_1.parse::<syn::Ident>()?;
462
463 let level_2;
464 parenthesized!(level_2 in level_1);
465
466 while !level_2.is_empty() {
David Tolnay440fe582019-02-15 20:23:14 -0800467 features.insert(parse_feature(&level_2)?);
Carl Lerche058ff472019-02-13 16:23:52 -0800468
469 if !level_2.is_empty() {
470 level_2.parse::<Token![,]>()?;
471 }
472 }
473 } else if i == "feature" {
David Tolnay440fe582019-02-15 20:23:14 -0800474 features.insert(parse_feature(&level_1)?);
Carl Lerche058ff472019-02-13 16:23:52 -0800475 assert!(level_1.is_empty());
476 } else {
477 panic!("{:?}", i);
478 }
479
480 assert!(input.is_empty());
481
David Tolnayfa67ab02019-02-15 20:17:30 -0800482 Ok(types::Features { any: features })
Carl Lerche058ff472019-02-13 16:23:52 -0800483 }
484 }
485}
486
487fn get_features(attrs: &[syn::Attribute], base: &[syn::Attribute]) -> Vec<syn::Attribute> {
488 let mut ret = base.to_owned();
489
490 for attr in attrs {
491 if attr.path.is_ident("cfg") {
492 ret.push(attr.clone());
493 }
494 }
495
496 ret
497}
498
499type Error = Box<::std::error::Error>;
500
501fn load_file<P: AsRef<Path>>(
502 name: P,
503 features: &[syn::Attribute],
504 lookup: &mut ItemLookup,
505) -> Result<(), Error> {
506 let name = name.as_ref();
507 let parent = name.parent().ok_or("no parent path")?;
508
509 let mut f = File::open(name)?;
510 let mut src = String::new();
511 f.read_to_string(&mut src)?;
512
513 // Parse the file
514 let file = syn::parse_file(&src)?;
515
516 // Collect all of the interesting AstItems declared in this file or submodules.
517 'items: for item in file.items {
518 match item {
519 Item::Mod(item) => {
520 // Don't inspect inline modules.
521 if item.content.is_some() {
522 continue;
523 }
524
525 // We don't want to try to load the generated rust files and
526 // parse them, so we ignore them here.
527 for name in IGNORED_MODS {
528 if item.ident == name {
529 continue 'items;
530 }
531 }
532
533 // Lookup any #[cfg()] attributes on the module and add them to
534 // the feature set.
535 //
536 // The derive module is weird because it is built with either
537 // `full` or `derive` but exported only under `derive`.
538 let features = if item.ident == "derive" {
539 vec![parse_quote!(#[cfg(feature = "derive")])]
540 } else {
541 get_features(&item.attrs, features)
542 };
543
544 // Look up the submodule file, and recursively parse it.
545 // XXX: Only handles same-directory .rs file submodules.
546 let path = parent.join(&format!("{}.rs", item.ident));
547 load_file(path, &features, lookup)?;
548 }
549 Item::Macro(item) => {
550 // Lookip any #[cfg()] attributes directly on the macro
551 // invocation, and add them to the feature set.
552 let features = get_features(&item.attrs, features);
553
554 // Try to parse the AstItem declaration out of the item.
555 let tts = &item.mac.tts;
556 let found = if item.mac.path.is_ident("ast_struct") {
557 syn::parse2::<parsing::AstStruct>(quote!(#tts))?.0
558 } else if item.mac.path.is_ident("ast_enum") {
559 syn::parse2::<parsing::AstEnum>(quote!(#tts))?.0
560 } else if item.mac.path.is_ident("ast_enum_of_structs") {
561 syn::parse2::<parsing::AstEnumOfStructs>(quote!(#tts))?.0
562 } else {
563 continue;
564 };
565
566 // Record our features on the parsed AstItems.
567 for mut item in found {
568 item.features.extend(features.clone());
569 lookup.insert(item.ast.ident.clone(), item);
570 }
571 }
572 Item::Struct(item) => {
573 let ident = item.ident;
574 if EXTRA_TYPES.contains(&&ident.to_string()[..]) {
575 lookup.insert(
576 ident.clone(),
577 AstItem {
578 ast: DeriveInput {
579 ident,
580 vis: item.vis,
581 attrs: item.attrs,
582 generics: item.generics,
583 data: Data::Struct(DataStruct {
584 fields: item.fields,
585 struct_token: item.struct_token,
586 semi_token: item.semi_token,
587 }),
588 },
589 features: features.to_owned(),
590 },
591 );
592 }
593 }
594 _ => {}
595 }
596 }
597 Ok(())
598}
599
600fn load_token_file<P: AsRef<Path>>(name: P) -> Result<TokenLookup, Error> {
601 let name = name.as_ref();
602 let mut f = File::open(name)?;
603 let mut src = String::new();
604 f.read_to_string(&mut src)?;
605 let file = syn::parse_file(&src)?;
606 for item in file.items {
607 match item {
608 Item::Macro(item) => {
609 match item.ident {
610 Some(ref i) if i == "Token" => {}
611 _ => continue,
612 }
613 let tts = &item.mac.tts;
614 let tokens = syn::parse2::<parsing::TokenMacro>(quote!(#tts))?.0;
615 return Ok(tokens);
616 }
617 _ => {}
618 }
619 }
620
621 Err("failed to parse Token macro".into())
622}