blob: e38c1e2fb2a2101d51958c9e192ca6785c09cac1 [file] [log] [blame]
Carl Lerche058ff472019-02-13 16:23:52 -08001use crate::types;
2
David Tolnay14d463e2019-02-15 14:23:51 -08003use indexmap::IndexMap;
Carl Lerche058ff472019-02-13 16:23:52 -08004use syn::{Data, DataStruct, DeriveInput, Ident, Item};
5
6use std::collections::BTreeMap;
7use std::fs::File;
8use std::io::Read;
9use std::path::Path;
10
11const SYN_CRATE_ROOT: &str = "../src/lib.rs";
12const TOKEN_SRC: &str = "../src/token.rs";
13const IGNORED_MODS: &[&str] = &["fold", "visit", "visit_mut"];
14const EXTRA_TYPES: &[&str] = &["Lifetime"];
Carl Lerche058ff472019-02-13 16:23:52 -080015
16// NOTE: BTreeMap is used here instead of HashMap to have deterministic output.
17type ItemLookup = BTreeMap<Ident, AstItem>;
18type TokenLookup = BTreeMap<String, String>;
19
20/// Parse the contents of `src` and return a list of AST types.
David Tolnayf9bb8ff2019-02-15 13:10:14 -080021pub fn parse() -> types::Definitions {
Carl Lerche058ff472019-02-13 16:23:52 -080022 let mut item_lookup = BTreeMap::new();
23 load_file(SYN_CRATE_ROOT, &[], &mut item_lookup).unwrap();
24
25 let token_lookup = load_token_file(TOKEN_SRC).unwrap();
26
David Tolnayf9bb8ff2019-02-15 13:10:14 -080027 let types = item_lookup
Carl Lerche058ff472019-02-13 16:23:52 -080028 .values()
29 .map(|item| introspect_item(item, &item_lookup, &token_lookup))
David Tolnayf9bb8ff2019-02-15 13:10:14 -080030 .collect();
31
David Tolnay47fe7402019-02-15 14:35:25 -080032 let tokens = token_lookup
33 .into_iter()
34 .map(|(name, ty)| (ty, name))
35 .collect();
David Tolnayf9bb8ff2019-02-15 13:10:14 -080036
37 types::Definitions { types, tokens }
Carl Lerche058ff472019-02-13 16:23:52 -080038}
39
40/// Data extracted from syn source
41#[derive(Clone)]
42pub struct AstItem {
43 ast: DeriveInput,
44 features: Vec<syn::Attribute>,
45}
46
David Tolnayf9bb8ff2019-02-15 13:10:14 -080047fn introspect_item(item: &AstItem, items: &ItemLookup, tokens: &TokenLookup) -> types::Node {
Carl Lerche058ff472019-02-13 16:23:52 -080048 let features = introspect_features(&item.features);
49
50 match &item.ast.data {
David Tolnayf9bb8ff2019-02-15 13:10:14 -080051 Data::Enum(ref data) => types::Node::Enum(introspect_enum(
Carl Lerche058ff472019-02-13 16:23:52 -080052 &item.ast.ident,
53 features,
54 data,
55 items,
56 tokens,
57 )),
David Tolnayf9bb8ff2019-02-15 13:10:14 -080058 Data::Struct(ref data) => types::Node::Struct(introspect_struct(
Carl Lerche058ff472019-02-13 16:23:52 -080059 &item.ast.ident,
60 features,
61 data,
62 items,
63 tokens,
64 )),
65 Data::Union(..) => panic!("Union not supported"),
66 }
67}
68
69fn introspect_enum(
70 ident: &Ident,
71 features: types::Features,
72 item: &syn::DataEnum,
73 items: &ItemLookup,
74 tokens: &TokenLookup,
75) -> types::Enum {
76 let variants = item
77 .variants
78 .iter()
79 .map(|variant| {
80 let fields = match &variant.fields {
81 syn::Fields::Unnamed(fields) => fields
82 .unnamed
83 .iter()
84 .map(|field| introspect_type(&field.ty, items, tokens))
85 .collect(),
86 syn::Fields::Unit => vec![],
87 _ => panic!("Enum representation not supported"),
88 };
89
90 types::Variant::new(variant.ident.to_string(), fields)
91 })
92 .collect();
93
94 types::Enum::new(ident.to_string(), features, variants)
95}
96
97fn introspect_struct(
98 ident: &Ident,
99 features: types::Features,
100 item: &syn::DataStruct,
101 items: &ItemLookup,
102 tokens: &TokenLookup,
103) -> types::Struct {
David Tolnay8964fff2019-02-15 14:34:51 -0800104 let all_fields_pub = item.fields.iter().all(|field| is_pub(&field.vis));
105 if !all_fields_pub {
106 return types::Struct::new(ident.to_string(), features, IndexMap::new());
107 }
108
Carl Lerche058ff472019-02-13 16:23:52 -0800109 let fields = match &item.fields {
110 syn::Fields::Named(fields) => fields
111 .named
112 .iter()
113 .map(|field| {
David Tolnay14d463e2019-02-15 14:23:51 -0800114 (
Carl Lerche058ff472019-02-13 16:23:52 -0800115 field.ident.as_ref().unwrap().to_string(),
116 introspect_type(&field.ty, items, tokens),
117 )
118 })
119 .collect(),
David Tolnay14d463e2019-02-15 14:23:51 -0800120 syn::Fields::Unit => IndexMap::new(),
Carl Lerche058ff472019-02-13 16:23:52 -0800121 _ => panic!("Struct representation not supported"),
122 };
123
David Tolnay8964fff2019-02-15 14:34:51 -0800124 types::Struct::new(ident.to_string(), features, fields)
Carl Lerche058ff472019-02-13 16:23:52 -0800125}
126
127fn introspect_type(item: &syn::Type, items: &ItemLookup, tokens: &TokenLookup) -> types::Type {
128 match item {
129 syn::Type::Path(syn::TypePath {
130 qself: None,
131 ref path,
132 }) => {
133 let last = path.segments.last().unwrap().into_value();
134
135 match &last.ident.to_string()[..] {
136 "Option" => {
137 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
138 types::Type::Option(Box::new(nested))
139 }
140 "Punctuated" => {
141 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
142 let punct = match introspect_type(last_arg(&last.arguments), items, tokens) {
143 types::Type::Token(s) => s,
144 _ => panic!(),
145 };
146
David Tolnay295141b2019-02-15 12:45:33 -0800147 types::Type::Punctuated(types::Punctuated::new(nested, punct))
Carl Lerche058ff472019-02-13 16:23:52 -0800148 }
149 "Vec" => {
150 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
151 types::Type::Vec(Box::new(nested))
152 }
153 "Box" => {
154 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
155 types::Type::Box(Box::new(nested))
156 }
157 "Brace" | "Bracket" | "Paren" | "Group" => {
David Tolnay295141b2019-02-15 12:45:33 -0800158 types::Type::Group(last.ident.to_string())
Carl Lerche058ff472019-02-13 16:23:52 -0800159 }
David Tolnay47fe7402019-02-15 14:35:25 -0800160 "TokenStream" | "Literal" | "Ident" | "Span" => {
161 types::Type::Ext(last.ident.to_string())
162 }
Carl Lerche058ff472019-02-13 16:23:52 -0800163 "String" | "u32" | "usize" | "bool" => types::Type::Std(last.ident.to_string()),
164 _ => {
165 if items.get(&last.ident).is_some() {
David Tolnayd3076572019-02-15 13:32:44 -0800166 types::Type::Syn(last.ident.to_string())
Carl Lerche058ff472019-02-13 16:23:52 -0800167 } else {
168 unimplemented!("{}", last.ident.to_string());
169 }
170 }
171 }
172 }
173 syn::Type::Tuple(syn::TypeTuple { ref elems, .. }) => {
174 let tys = elems
175 .iter()
176 .map(|ty| introspect_type(&ty, items, tokens))
177 .collect();
178 types::Type::Tuple(tys)
179 }
180 syn::Type::Macro(syn::TypeMacro { ref mac })
181 if mac.path.segments.last().unwrap().into_value().ident == "Token" =>
182 {
183 let content = mac.tts.to_string();
184 let ty = tokens.get(&content).unwrap().to_string();
185
David Tolnay157c7eb2019-02-15 13:21:48 -0800186 types::Type::Token(ty)
Carl Lerche058ff472019-02-13 16:23:52 -0800187 }
188 _ => panic!("{}", quote!(#item).to_string()),
189 }
190}
191
192fn introspect_features(attrs: &[syn::Attribute]) -> types::Features {
193 let mut ret = types::Features::default();
194
195 for attr in attrs {
196 if !attr.path.is_ident("cfg") {
197 continue;
198 }
199
200 let features: types::Features = syn::parse2(attr.tts.clone()).unwrap();
201 ret.join(&features);
202 }
203
204 ret
205}
206
207fn is_pub(vis: &syn::Visibility) -> bool {
208 match vis {
209 syn::Visibility::Public(_) => true,
210 _ => false,
211 }
212}
213
214fn first_arg(params: &syn::PathArguments) -> &syn::Type {
215 let data = match *params {
216 syn::PathArguments::AngleBracketed(ref data) => data,
217 _ => panic!("Expected at least 1 type argument here"),
218 };
219
220 match **data
221 .args
222 .first()
223 .expect("Expected at least 1 type argument here")
224 .value()
225 {
226 syn::GenericArgument::Type(ref ty) => ty,
227 _ => panic!("Expected at least 1 type argument here"),
228 }
229}
230
231fn last_arg(params: &syn::PathArguments) -> &syn::Type {
232 let data = match *params {
233 syn::PathArguments::AngleBracketed(ref data) => data,
234 _ => panic!("Expected at least 1 type argument here"),
235 };
236
237 match **data
238 .args
239 .last()
240 .expect("Expected at least 1 type argument here")
241 .value()
242 {
243 syn::GenericArgument::Type(ref ty) => ty,
244 _ => panic!("Expected at least 1 type argument here"),
245 }
246}
247
248mod parsing {
249 use super::{AstItem, TokenLookup};
250 use crate::types;
251
252 use proc_macro2::TokenStream;
253 use syn;
254 use syn::parse::{Parse, ParseStream, Result};
255 use syn::*;
256
257 use std::collections::BTreeMap;
258
259 fn peek_tag(input: ParseStream, tag: &str) -> bool {
260 let ahead = input.fork();
261 ahead.parse::<Token![#]>().is_ok()
262 && ahead
263 .parse::<Ident>()
264 .map(|ident| ident == tag)
265 .unwrap_or(false)
266 }
267
268 // Parses #full - returns #[cfg(feature = "full")] if it is present, and
269 // nothing otherwise.
270 fn full(input: ParseStream) -> Vec<syn::Attribute> {
271 if peek_tag(input, "full") {
272 input.parse::<Token![#]>().unwrap();
273 input.parse::<Ident>().unwrap();
274 vec![parse_quote!(#[cfg(feature = "full")])]
275 } else {
276 vec![]
277 }
278 }
279
280 fn skip_manual_extra_traits(input: ParseStream) {
281 if peek_tag(input, "manual_extra_traits") {
282 input.parse::<Token![#]>().unwrap();
283 input.parse::<Ident>().unwrap();
284 }
285 }
286
287 // Parses a simple AstStruct without the `pub struct` prefix.
288 fn ast_struct_inner(input: ParseStream) -> Result<AstItem> {
289 let ident: Ident = input.parse()?;
290 let features = full(input);
291 skip_manual_extra_traits(input);
292 let rest: TokenStream = input.parse()?;
293 Ok(AstItem {
294 ast: syn::parse2(quote! {
295 pub struct #ident #rest
296 })?,
297 features,
298 })
299 }
300
301 // ast_struct! parsing
302 pub struct AstStruct(pub(super) Vec<AstItem>);
303 impl Parse for AstStruct {
304 fn parse(input: ParseStream) -> Result<Self> {
305 input.call(Attribute::parse_outer)?;
306 input.parse::<Token![pub]>()?;
307 input.parse::<Token![struct]>()?;
308 let res = input.call(ast_struct_inner)?;
309 Ok(AstStruct(vec![res]))
310 }
311 }
312
313 fn no_visit(input: ParseStream) -> bool {
314 if peek_tag(input, "no_visit") {
315 input.parse::<Token![#]>().unwrap();
316 input.parse::<Ident>().unwrap();
317 true
318 } else {
319 false
320 }
321 }
322
323 // ast_enum! parsing
324 pub struct AstEnum(pub Vec<AstItem>);
325 impl Parse for AstEnum {
326 fn parse(input: ParseStream) -> Result<Self> {
327 input.call(Attribute::parse_outer)?;
328 input.parse::<Token![pub]>()?;
329 input.parse::<Token![enum]>()?;
330 let ident: Ident = input.parse()?;
331 let no_visit = no_visit(input);
332 let rest: TokenStream = input.parse()?;
333 Ok(AstEnum(if no_visit {
334 vec![]
335 } else {
336 vec![AstItem {
337 ast: syn::parse2(quote! {
338 pub enum #ident #rest
339 })?,
340 features: vec![],
341 }]
342 }))
343 }
344 }
345
346 // A single variant of an ast_enum_of_structs!
347 struct EosVariant {
348 name: Ident,
349 member: Option<Path>,
350 inner: Option<AstItem>,
351 }
352 fn eos_variant(input: ParseStream) -> Result<EosVariant> {
353 input.call(Attribute::parse_outer)?;
354 input.parse::<Token![pub]>()?;
355 let variant: Ident = input.parse()?;
356 let (member, inner) = if input.peek(token::Paren) {
357 let content;
358 parenthesized!(content in input);
359 if content.fork().call(ast_struct_inner).is_ok() {
360 let item = content.call(ast_struct_inner)?;
361 (Some(Path::from(item.ast.ident.clone())), Some(item))
362 } else {
363 let path: Path = content.parse()?;
364 (Some(path), None)
365 }
366 } else {
367 (None, None)
368 };
369 input.parse::<Token![,]>()?;
370 Ok(EosVariant {
371 name: variant,
372 member,
373 inner,
374 })
375 }
376
377 // ast_enum_of_structs! parsing
378 pub struct AstEnumOfStructs(pub Vec<AstItem>);
379 impl Parse for AstEnumOfStructs {
380 fn parse(input: ParseStream) -> Result<Self> {
381 input.call(Attribute::parse_outer)?;
382 input.parse::<Token![pub]>()?;
383 input.parse::<Token![enum]>()?;
384 let ident: Ident = input.parse()?;
385
386 let content;
387 braced!(content in input);
388 let mut variants = Vec::new();
389 while !content.is_empty() {
390 variants.push(content.call(eos_variant)?);
391 }
392
393 if let Some(ident) = input.parse::<Option<Ident>>()? {
394 assert_eq!(ident, "do_not_generate_to_tokens");
395 }
396
397 let enum_item = {
398 let variants = variants.iter().map(|v| {
399 let name = v.name.clone();
400 match v.member {
401 Some(ref member) => quote!(#name(#member)),
402 None => quote!(#name),
403 }
404 });
405 parse_quote! {
406 pub enum #ident {
407 #(#variants),*
408 }
409 }
410 };
411 let mut items = vec![AstItem {
412 ast: enum_item,
413 features: vec![],
414 }];
415 items.extend(variants.into_iter().filter_map(|v| v.inner));
416 Ok(AstEnumOfStructs(items))
417 }
418 }
419
420 pub struct TokenMacro(pub TokenLookup);
421 impl Parse for TokenMacro {
422 fn parse(input: ParseStream) -> Result<Self> {
423 let mut tokens = BTreeMap::new();
424 while !input.is_empty() {
425 let content;
426 parenthesized!(content in input);
427 let token = content.parse::<TokenStream>()?.to_string();
428 input.parse::<Token![=]>()?;
429 input.parse::<Token![>]>()?;
430 let content;
431 braced!(content in input);
432 input.parse::<Token![;]>()?;
433 content.parse::<token::Dollar>()?;
434 let path: Path = content.parse()?;
435 let ty = path.segments.last().unwrap().into_value().ident.to_string();
436 tokens.insert(token, ty.to_string());
437 }
438 Ok(TokenMacro(tokens))
439 }
440 }
441
442 fn parse_feature(input: ParseStream) -> Result<String> {
443 let i: syn::Ident = input.parse()?;
444 assert_eq!(i, "feature");
445
446 input.parse::<Token![=]>()?;
447 let s = input.parse::<syn::LitStr>()?;
448
449 Ok(s.value())
450 }
451
452 impl Parse for types::Features {
453 fn parse(input: ParseStream) -> Result<Self> {
454 let mut features = vec![];
455
456 let level_1;
457 parenthesized!(level_1 in input);
458
459 let i: syn::Ident = level_1.fork().parse()?;
460
461 if i == "any" {
462 level_1.parse::<syn::Ident>()?;
463
464 let level_2;
465 parenthesized!(level_2 in level_1);
466
467 while !level_2.is_empty() {
468 features.push(parse_feature(&level_2)?);
469
470 if !level_2.is_empty() {
471 level_2.parse::<Token![,]>()?;
472 }
473 }
474 } else if i == "feature" {
475 features.push(parse_feature(&level_1)?);
476 assert!(level_1.is_empty());
477 } else {
478 panic!("{:?}", i);
479 }
480
481 assert!(input.is_empty());
482
483 Ok(types::Features::new(features))
484 }
485 }
486}
487
488fn get_features(attrs: &[syn::Attribute], base: &[syn::Attribute]) -> Vec<syn::Attribute> {
489 let mut ret = base.to_owned();
490
491 for attr in attrs {
492 if attr.path.is_ident("cfg") {
493 ret.push(attr.clone());
494 }
495 }
496
497 ret
498}
499
500type Error = Box<::std::error::Error>;
501
502fn load_file<P: AsRef<Path>>(
503 name: P,
504 features: &[syn::Attribute],
505 lookup: &mut ItemLookup,
506) -> Result<(), Error> {
507 let name = name.as_ref();
508 let parent = name.parent().ok_or("no parent path")?;
509
510 let mut f = File::open(name)?;
511 let mut src = String::new();
512 f.read_to_string(&mut src)?;
513
514 // Parse the file
515 let file = syn::parse_file(&src)?;
516
517 // Collect all of the interesting AstItems declared in this file or submodules.
518 'items: for item in file.items {
519 match item {
520 Item::Mod(item) => {
521 // Don't inspect inline modules.
522 if item.content.is_some() {
523 continue;
524 }
525
526 // We don't want to try to load the generated rust files and
527 // parse them, so we ignore them here.
528 for name in IGNORED_MODS {
529 if item.ident == name {
530 continue 'items;
531 }
532 }
533
534 // Lookup any #[cfg()] attributes on the module and add them to
535 // the feature set.
536 //
537 // The derive module is weird because it is built with either
538 // `full` or `derive` but exported only under `derive`.
539 let features = if item.ident == "derive" {
540 vec![parse_quote!(#[cfg(feature = "derive")])]
541 } else {
542 get_features(&item.attrs, features)
543 };
544
545 // Look up the submodule file, and recursively parse it.
546 // XXX: Only handles same-directory .rs file submodules.
547 let path = parent.join(&format!("{}.rs", item.ident));
548 load_file(path, &features, lookup)?;
549 }
550 Item::Macro(item) => {
551 // Lookip any #[cfg()] attributes directly on the macro
552 // invocation, and add them to the feature set.
553 let features = get_features(&item.attrs, features);
554
555 // Try to parse the AstItem declaration out of the item.
556 let tts = &item.mac.tts;
557 let found = if item.mac.path.is_ident("ast_struct") {
558 syn::parse2::<parsing::AstStruct>(quote!(#tts))?.0
559 } else if item.mac.path.is_ident("ast_enum") {
560 syn::parse2::<parsing::AstEnum>(quote!(#tts))?.0
561 } else if item.mac.path.is_ident("ast_enum_of_structs") {
562 syn::parse2::<parsing::AstEnumOfStructs>(quote!(#tts))?.0
563 } else {
564 continue;
565 };
566
567 // Record our features on the parsed AstItems.
568 for mut item in found {
569 item.features.extend(features.clone());
570 lookup.insert(item.ast.ident.clone(), item);
571 }
572 }
573 Item::Struct(item) => {
574 let ident = item.ident;
575 if EXTRA_TYPES.contains(&&ident.to_string()[..]) {
576 lookup.insert(
577 ident.clone(),
578 AstItem {
579 ast: DeriveInput {
580 ident,
581 vis: item.vis,
582 attrs: item.attrs,
583 generics: item.generics,
584 data: Data::Struct(DataStruct {
585 fields: item.fields,
586 struct_token: item.struct_token,
587 semi_token: item.semi_token,
588 }),
589 },
590 features: features.to_owned(),
591 },
592 );
593 }
594 }
595 _ => {}
596 }
597 }
598 Ok(())
599}
600
601fn load_token_file<P: AsRef<Path>>(name: P) -> Result<TokenLookup, Error> {
602 let name = name.as_ref();
603 let mut f = File::open(name)?;
604 let mut src = String::new();
605 f.read_to_string(&mut src)?;
606 let file = syn::parse_file(&src)?;
607 for item in file.items {
608 match item {
609 Item::Macro(item) => {
610 match item.ident {
611 Some(ref i) if i == "Token" => {}
612 _ => continue,
613 }
614 let tts = &item.mac.tts;
615 let tokens = syn::parse2::<parsing::TokenMacro>(quote!(#tts))?.0;
616 return Ok(tokens);
617 }
618 _ => {}
619 }
620 }
621
622 Err("failed to parse Token macro".into())
623}