David Tolnay | 1022712 | 2019-02-15 20:53:45 -0800 | [diff] [blame^] | 1 | use crate::{types, version}; |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 2 | |
David Tolnay | 14d463e | 2019-02-15 14:23:51 -0800 | [diff] [blame] | 3 | use indexmap::IndexMap; |
David Tolnay | 397bd0b | 2019-02-15 20:51:10 -0800 | [diff] [blame] | 4 | use quote::quote; |
| 5 | use syn::{parse_quote, Data, DataStruct, DeriveInput, Ident, Item}; |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 6 | |
| 7 | use std::collections::BTreeMap; |
| 8 | use std::fs::File; |
| 9 | use std::io::Read; |
| 10 | use std::path::Path; |
| 11 | |
| 12 | const SYN_CRATE_ROOT: &str = "../src/lib.rs"; |
| 13 | const TOKEN_SRC: &str = "../src/token.rs"; |
| 14 | const IGNORED_MODS: &[&str] = &["fold", "visit", "visit_mut"]; |
| 15 | const EXTRA_TYPES: &[&str] = &["Lifetime"]; |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 16 | |
| 17 | // NOTE: BTreeMap is used here instead of HashMap to have deterministic output. |
| 18 | type ItemLookup = BTreeMap<Ident, AstItem>; |
| 19 | type TokenLookup = BTreeMap<String, String>; |
| 20 | |
| 21 | /// Parse the contents of `src` and return a list of AST types. |
David Tolnay | f9bb8ff | 2019-02-15 13:10:14 -0800 | [diff] [blame] | 22 | pub fn parse() -> types::Definitions { |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 23 | let mut item_lookup = BTreeMap::new(); |
| 24 | load_file(SYN_CRATE_ROOT, &[], &mut item_lookup).unwrap(); |
| 25 | |
| 26 | let token_lookup = load_token_file(TOKEN_SRC).unwrap(); |
| 27 | |
David Tolnay | 1022712 | 2019-02-15 20:53:45 -0800 | [diff] [blame^] | 28 | let version = version::get(); |
| 29 | |
David Tolnay | f9bb8ff | 2019-02-15 13:10:14 -0800 | [diff] [blame] | 30 | let types = item_lookup |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 31 | .values() |
| 32 | .map(|item| introspect_item(item, &item_lookup, &token_lookup)) |
David Tolnay | f9bb8ff | 2019-02-15 13:10:14 -0800 | [diff] [blame] | 33 | .collect(); |
| 34 | |
David Tolnay | 47fe740 | 2019-02-15 14:35:25 -0800 | [diff] [blame] | 35 | let tokens = token_lookup |
| 36 | .into_iter() |
| 37 | .map(|(name, ty)| (ty, name)) |
| 38 | .collect(); |
David Tolnay | f9bb8ff | 2019-02-15 13:10:14 -0800 | [diff] [blame] | 39 | |
David Tolnay | 1022712 | 2019-02-15 20:53:45 -0800 | [diff] [blame^] | 40 | types::Definitions { |
| 41 | version, |
| 42 | types, |
| 43 | tokens, |
| 44 | } |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 45 | } |
| 46 | |
| 47 | /// Data extracted from syn source |
| 48 | #[derive(Clone)] |
| 49 | pub struct AstItem { |
| 50 | ast: DeriveInput, |
| 51 | features: Vec<syn::Attribute>, |
| 52 | } |
| 53 | |
David Tolnay | f9bb8ff | 2019-02-15 13:10:14 -0800 | [diff] [blame] | 54 | fn introspect_item(item: &AstItem, items: &ItemLookup, tokens: &TokenLookup) -> types::Node { |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 55 | let features = introspect_features(&item.features); |
| 56 | |
| 57 | match &item.ast.data { |
David Tolnay | c2be7b2 | 2019-02-15 18:48:31 -0800 | [diff] [blame] | 58 | Data::Enum(ref data) => types::Node { |
| 59 | ident: item.ast.ident.to_string(), |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 60 | features, |
David Tolnay | c2be7b2 | 2019-02-15 18:48:31 -0800 | [diff] [blame] | 61 | data: types::Data::Enum(introspect_enum(data, items, tokens)), |
| 62 | }, |
| 63 | Data::Struct(ref data) => types::Node { |
| 64 | ident: item.ast.ident.to_string(), |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 65 | features, |
David Tolnay | c2be7b2 | 2019-02-15 18:48:31 -0800 | [diff] [blame] | 66 | data: { |
| 67 | if data.fields.iter().all(|f| is_pub(&f.vis)) { |
| 68 | types::Data::Struct(introspect_struct(data, items, tokens)) |
| 69 | } else { |
| 70 | types::Data::Private |
| 71 | } |
| 72 | }, |
| 73 | }, |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 74 | Data::Union(..) => panic!("Union not supported"), |
| 75 | } |
| 76 | } |
| 77 | |
| 78 | fn introspect_enum( |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 79 | item: &syn::DataEnum, |
| 80 | items: &ItemLookup, |
| 81 | tokens: &TokenLookup, |
David Tolnay | 75c5a17 | 2019-02-15 20:35:41 -0800 | [diff] [blame] | 82 | ) -> types::Variants { |
David Tolnay | c2be7b2 | 2019-02-15 18:48:31 -0800 | [diff] [blame] | 83 | item.variants |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 84 | .iter() |
| 85 | .map(|variant| { |
| 86 | let fields = match &variant.fields { |
| 87 | syn::Fields::Unnamed(fields) => fields |
| 88 | .unnamed |
| 89 | .iter() |
| 90 | .map(|field| introspect_type(&field.ty, items, tokens)) |
| 91 | .collect(), |
| 92 | syn::Fields::Unit => vec![], |
| 93 | _ => panic!("Enum representation not supported"), |
| 94 | }; |
| 95 | |
David Tolnay | 75c5a17 | 2019-02-15 20:35:41 -0800 | [diff] [blame] | 96 | (variant.ident.to_string(), fields) |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 97 | }) |
David Tolnay | c2be7b2 | 2019-02-15 18:48:31 -0800 | [diff] [blame] | 98 | .collect() |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 99 | } |
| 100 | |
| 101 | fn introspect_struct( |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 102 | item: &syn::DataStruct, |
| 103 | items: &ItemLookup, |
| 104 | tokens: &TokenLookup, |
David Tolnay | 75c5a17 | 2019-02-15 20:35:41 -0800 | [diff] [blame] | 105 | ) -> types::Fields { |
David Tolnay | c2be7b2 | 2019-02-15 18:48:31 -0800 | [diff] [blame] | 106 | match &item.fields { |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 107 | syn::Fields::Named(fields) => fields |
| 108 | .named |
| 109 | .iter() |
| 110 | .map(|field| { |
David Tolnay | 14d463e | 2019-02-15 14:23:51 -0800 | [diff] [blame] | 111 | ( |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 112 | field.ident.as_ref().unwrap().to_string(), |
| 113 | introspect_type(&field.ty, items, tokens), |
| 114 | ) |
| 115 | }) |
| 116 | .collect(), |
David Tolnay | 14d463e | 2019-02-15 14:23:51 -0800 | [diff] [blame] | 117 | syn::Fields::Unit => IndexMap::new(), |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 118 | _ => panic!("Struct representation not supported"), |
David Tolnay | c2be7b2 | 2019-02-15 18:48:31 -0800 | [diff] [blame] | 119 | } |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 120 | } |
| 121 | |
| 122 | fn introspect_type(item: &syn::Type, items: &ItemLookup, tokens: &TokenLookup) -> types::Type { |
| 123 | match item { |
| 124 | syn::Type::Path(syn::TypePath { |
| 125 | qself: None, |
| 126 | ref path, |
| 127 | }) => { |
| 128 | let last = path.segments.last().unwrap().into_value(); |
David Tolnay | 15730f2 | 2019-02-15 20:46:56 -0800 | [diff] [blame] | 129 | let string = last.ident.to_string(); |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 130 | |
David Tolnay | 15730f2 | 2019-02-15 20:46:56 -0800 | [diff] [blame] | 131 | match string.as_str() { |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 132 | "Option" => { |
| 133 | let nested = introspect_type(first_arg(&last.arguments), items, tokens); |
| 134 | types::Type::Option(Box::new(nested)) |
| 135 | } |
| 136 | "Punctuated" => { |
| 137 | let nested = introspect_type(first_arg(&last.arguments), items, tokens); |
| 138 | let punct = match introspect_type(last_arg(&last.arguments), items, tokens) { |
| 139 | types::Type::Token(s) => s, |
| 140 | _ => panic!(), |
| 141 | }; |
| 142 | |
David Tolnay | fa67ab0 | 2019-02-15 20:17:30 -0800 | [diff] [blame] | 143 | types::Type::Punctuated(types::Punctuated { |
| 144 | element: Box::new(nested), |
| 145 | punct, |
| 146 | }) |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 147 | } |
| 148 | "Vec" => { |
| 149 | let nested = introspect_type(first_arg(&last.arguments), items, tokens); |
| 150 | types::Type::Vec(Box::new(nested)) |
| 151 | } |
| 152 | "Box" => { |
| 153 | let nested = introspect_type(first_arg(&last.arguments), items, tokens); |
| 154 | types::Type::Box(Box::new(nested)) |
| 155 | } |
David Tolnay | 15730f2 | 2019-02-15 20:46:56 -0800 | [diff] [blame] | 156 | "Brace" | "Bracket" | "Paren" | "Group" => types::Type::Group(string), |
| 157 | "TokenStream" | "Literal" | "Ident" | "Span" => types::Type::Ext(string), |
| 158 | "String" | "u32" | "usize" | "bool" => types::Type::Std(string), |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 159 | _ => { |
| 160 | if items.get(&last.ident).is_some() { |
David Tolnay | 15730f2 | 2019-02-15 20:46:56 -0800 | [diff] [blame] | 161 | types::Type::Syn(string) |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 162 | } else { |
David Tolnay | 15730f2 | 2019-02-15 20:46:56 -0800 | [diff] [blame] | 163 | unimplemented!("{}", string); |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 164 | } |
| 165 | } |
| 166 | } |
| 167 | } |
| 168 | syn::Type::Tuple(syn::TypeTuple { ref elems, .. }) => { |
| 169 | let tys = elems |
| 170 | .iter() |
| 171 | .map(|ty| introspect_type(&ty, items, tokens)) |
| 172 | .collect(); |
| 173 | types::Type::Tuple(tys) |
| 174 | } |
| 175 | syn::Type::Macro(syn::TypeMacro { ref mac }) |
| 176 | if mac.path.segments.last().unwrap().into_value().ident == "Token" => |
| 177 | { |
| 178 | let content = mac.tts.to_string(); |
| 179 | let ty = tokens.get(&content).unwrap().to_string(); |
| 180 | |
David Tolnay | 157c7eb | 2019-02-15 13:21:48 -0800 | [diff] [blame] | 181 | types::Type::Token(ty) |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 182 | } |
| 183 | _ => panic!("{}", quote!(#item).to_string()), |
| 184 | } |
| 185 | } |
| 186 | |
| 187 | fn introspect_features(attrs: &[syn::Attribute]) -> types::Features { |
| 188 | let mut ret = types::Features::default(); |
| 189 | |
| 190 | for attr in attrs { |
| 191 | if !attr.path.is_ident("cfg") { |
| 192 | continue; |
| 193 | } |
| 194 | |
| 195 | let features: types::Features = syn::parse2(attr.tts.clone()).unwrap(); |
David Tolnay | 440fe58 | 2019-02-15 20:23:14 -0800 | [diff] [blame] | 196 | |
| 197 | if ret.any.is_empty() { |
| 198 | ret = features; |
| 199 | } else if ret.any.len() < features.any.len() { |
| 200 | assert!(ret.any.iter().all(|f| features.any.contains(f))); |
| 201 | } else { |
| 202 | assert!(features.any.iter().all(|f| ret.any.contains(f))); |
| 203 | ret = features; |
| 204 | } |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 205 | } |
| 206 | |
| 207 | ret |
| 208 | } |
| 209 | |
| 210 | fn is_pub(vis: &syn::Visibility) -> bool { |
| 211 | match vis { |
| 212 | syn::Visibility::Public(_) => true, |
| 213 | _ => false, |
| 214 | } |
| 215 | } |
| 216 | |
| 217 | fn first_arg(params: &syn::PathArguments) -> &syn::Type { |
| 218 | let data = match *params { |
| 219 | syn::PathArguments::AngleBracketed(ref data) => data, |
| 220 | _ => panic!("Expected at least 1 type argument here"), |
| 221 | }; |
| 222 | |
| 223 | match **data |
| 224 | .args |
| 225 | .first() |
| 226 | .expect("Expected at least 1 type argument here") |
| 227 | .value() |
| 228 | { |
| 229 | syn::GenericArgument::Type(ref ty) => ty, |
| 230 | _ => panic!("Expected at least 1 type argument here"), |
| 231 | } |
| 232 | } |
| 233 | |
| 234 | fn last_arg(params: &syn::PathArguments) -> &syn::Type { |
| 235 | let data = match *params { |
| 236 | syn::PathArguments::AngleBracketed(ref data) => data, |
| 237 | _ => panic!("Expected at least 1 type argument here"), |
| 238 | }; |
| 239 | |
| 240 | match **data |
| 241 | .args |
| 242 | .last() |
| 243 | .expect("Expected at least 1 type argument here") |
| 244 | .value() |
| 245 | { |
| 246 | syn::GenericArgument::Type(ref ty) => ty, |
| 247 | _ => panic!("Expected at least 1 type argument here"), |
| 248 | } |
| 249 | } |
| 250 | |
| 251 | mod parsing { |
| 252 | use super::{AstItem, TokenLookup}; |
| 253 | use crate::types; |
| 254 | |
| 255 | use proc_macro2::TokenStream; |
David Tolnay | 397bd0b | 2019-02-15 20:51:10 -0800 | [diff] [blame] | 256 | use quote::quote; |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 257 | use syn; |
| 258 | use syn::parse::{Parse, ParseStream, Result}; |
| 259 | use syn::*; |
| 260 | |
David Tolnay | 440fe58 | 2019-02-15 20:23:14 -0800 | [diff] [blame] | 261 | use std::collections::{BTreeMap, BTreeSet}; |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 262 | |
| 263 | fn peek_tag(input: ParseStream, tag: &str) -> bool { |
| 264 | let ahead = input.fork(); |
| 265 | ahead.parse::<Token![#]>().is_ok() |
| 266 | && ahead |
| 267 | .parse::<Ident>() |
| 268 | .map(|ident| ident == tag) |
| 269 | .unwrap_or(false) |
| 270 | } |
| 271 | |
| 272 | // Parses #full - returns #[cfg(feature = "full")] if it is present, and |
| 273 | // nothing otherwise. |
| 274 | fn full(input: ParseStream) -> Vec<syn::Attribute> { |
| 275 | if peek_tag(input, "full") { |
| 276 | input.parse::<Token![#]>().unwrap(); |
| 277 | input.parse::<Ident>().unwrap(); |
| 278 | vec![parse_quote!(#[cfg(feature = "full")])] |
| 279 | } else { |
| 280 | vec![] |
| 281 | } |
| 282 | } |
| 283 | |
| 284 | fn skip_manual_extra_traits(input: ParseStream) { |
| 285 | if peek_tag(input, "manual_extra_traits") { |
| 286 | input.parse::<Token![#]>().unwrap(); |
| 287 | input.parse::<Ident>().unwrap(); |
| 288 | } |
| 289 | } |
| 290 | |
| 291 | // Parses a simple AstStruct without the `pub struct` prefix. |
| 292 | fn ast_struct_inner(input: ParseStream) -> Result<AstItem> { |
| 293 | let ident: Ident = input.parse()?; |
| 294 | let features = full(input); |
| 295 | skip_manual_extra_traits(input); |
| 296 | let rest: TokenStream = input.parse()?; |
| 297 | Ok(AstItem { |
| 298 | ast: syn::parse2(quote! { |
| 299 | pub struct #ident #rest |
| 300 | })?, |
| 301 | features, |
| 302 | }) |
| 303 | } |
| 304 | |
| 305 | // ast_struct! parsing |
| 306 | pub struct AstStruct(pub(super) Vec<AstItem>); |
| 307 | impl Parse for AstStruct { |
| 308 | fn parse(input: ParseStream) -> Result<Self> { |
| 309 | input.call(Attribute::parse_outer)?; |
| 310 | input.parse::<Token![pub]>()?; |
| 311 | input.parse::<Token![struct]>()?; |
| 312 | let res = input.call(ast_struct_inner)?; |
| 313 | Ok(AstStruct(vec![res])) |
| 314 | } |
| 315 | } |
| 316 | |
| 317 | fn no_visit(input: ParseStream) -> bool { |
| 318 | if peek_tag(input, "no_visit") { |
| 319 | input.parse::<Token![#]>().unwrap(); |
| 320 | input.parse::<Ident>().unwrap(); |
| 321 | true |
| 322 | } else { |
| 323 | false |
| 324 | } |
| 325 | } |
| 326 | |
| 327 | // ast_enum! parsing |
| 328 | pub struct AstEnum(pub Vec<AstItem>); |
| 329 | impl Parse for AstEnum { |
| 330 | fn parse(input: ParseStream) -> Result<Self> { |
| 331 | input.call(Attribute::parse_outer)?; |
| 332 | input.parse::<Token![pub]>()?; |
| 333 | input.parse::<Token![enum]>()?; |
| 334 | let ident: Ident = input.parse()?; |
| 335 | let no_visit = no_visit(input); |
| 336 | let rest: TokenStream = input.parse()?; |
| 337 | Ok(AstEnum(if no_visit { |
| 338 | vec![] |
| 339 | } else { |
| 340 | vec![AstItem { |
| 341 | ast: syn::parse2(quote! { |
| 342 | pub enum #ident #rest |
| 343 | })?, |
| 344 | features: vec![], |
| 345 | }] |
| 346 | })) |
| 347 | } |
| 348 | } |
| 349 | |
| 350 | // A single variant of an ast_enum_of_structs! |
| 351 | struct EosVariant { |
| 352 | name: Ident, |
| 353 | member: Option<Path>, |
| 354 | inner: Option<AstItem>, |
| 355 | } |
| 356 | fn eos_variant(input: ParseStream) -> Result<EosVariant> { |
| 357 | input.call(Attribute::parse_outer)?; |
| 358 | input.parse::<Token![pub]>()?; |
| 359 | let variant: Ident = input.parse()?; |
| 360 | let (member, inner) = if input.peek(token::Paren) { |
| 361 | let content; |
| 362 | parenthesized!(content in input); |
| 363 | if content.fork().call(ast_struct_inner).is_ok() { |
| 364 | let item = content.call(ast_struct_inner)?; |
| 365 | (Some(Path::from(item.ast.ident.clone())), Some(item)) |
| 366 | } else { |
| 367 | let path: Path = content.parse()?; |
| 368 | (Some(path), None) |
| 369 | } |
| 370 | } else { |
| 371 | (None, None) |
| 372 | }; |
| 373 | input.parse::<Token![,]>()?; |
| 374 | Ok(EosVariant { |
| 375 | name: variant, |
| 376 | member, |
| 377 | inner, |
| 378 | }) |
| 379 | } |
| 380 | |
| 381 | // ast_enum_of_structs! parsing |
| 382 | pub struct AstEnumOfStructs(pub Vec<AstItem>); |
| 383 | impl Parse for AstEnumOfStructs { |
| 384 | fn parse(input: ParseStream) -> Result<Self> { |
| 385 | input.call(Attribute::parse_outer)?; |
| 386 | input.parse::<Token![pub]>()?; |
| 387 | input.parse::<Token![enum]>()?; |
| 388 | let ident: Ident = input.parse()?; |
| 389 | |
| 390 | let content; |
| 391 | braced!(content in input); |
| 392 | let mut variants = Vec::new(); |
| 393 | while !content.is_empty() { |
| 394 | variants.push(content.call(eos_variant)?); |
| 395 | } |
| 396 | |
| 397 | if let Some(ident) = input.parse::<Option<Ident>>()? { |
| 398 | assert_eq!(ident, "do_not_generate_to_tokens"); |
| 399 | } |
| 400 | |
| 401 | let enum_item = { |
| 402 | let variants = variants.iter().map(|v| { |
| 403 | let name = v.name.clone(); |
| 404 | match v.member { |
| 405 | Some(ref member) => quote!(#name(#member)), |
| 406 | None => quote!(#name), |
| 407 | } |
| 408 | }); |
| 409 | parse_quote! { |
| 410 | pub enum #ident { |
| 411 | #(#variants),* |
| 412 | } |
| 413 | } |
| 414 | }; |
| 415 | let mut items = vec![AstItem { |
| 416 | ast: enum_item, |
| 417 | features: vec![], |
| 418 | }]; |
| 419 | items.extend(variants.into_iter().filter_map(|v| v.inner)); |
| 420 | Ok(AstEnumOfStructs(items)) |
| 421 | } |
| 422 | } |
| 423 | |
| 424 | pub struct TokenMacro(pub TokenLookup); |
| 425 | impl Parse for TokenMacro { |
| 426 | fn parse(input: ParseStream) -> Result<Self> { |
| 427 | let mut tokens = BTreeMap::new(); |
| 428 | while !input.is_empty() { |
| 429 | let content; |
| 430 | parenthesized!(content in input); |
| 431 | let token = content.parse::<TokenStream>()?.to_string(); |
| 432 | input.parse::<Token![=]>()?; |
| 433 | input.parse::<Token![>]>()?; |
| 434 | let content; |
| 435 | braced!(content in input); |
| 436 | input.parse::<Token![;]>()?; |
| 437 | content.parse::<token::Dollar>()?; |
| 438 | let path: Path = content.parse()?; |
| 439 | let ty = path.segments.last().unwrap().into_value().ident.to_string(); |
| 440 | tokens.insert(token, ty.to_string()); |
| 441 | } |
| 442 | Ok(TokenMacro(tokens)) |
| 443 | } |
| 444 | } |
| 445 | |
| 446 | fn parse_feature(input: ParseStream) -> Result<String> { |
| 447 | let i: syn::Ident = input.parse()?; |
| 448 | assert_eq!(i, "feature"); |
| 449 | |
| 450 | input.parse::<Token![=]>()?; |
| 451 | let s = input.parse::<syn::LitStr>()?; |
| 452 | |
| 453 | Ok(s.value()) |
| 454 | } |
| 455 | |
| 456 | impl Parse for types::Features { |
| 457 | fn parse(input: ParseStream) -> Result<Self> { |
David Tolnay | 440fe58 | 2019-02-15 20:23:14 -0800 | [diff] [blame] | 458 | let mut features = BTreeSet::new(); |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 459 | |
| 460 | let level_1; |
| 461 | parenthesized!(level_1 in input); |
| 462 | |
| 463 | let i: syn::Ident = level_1.fork().parse()?; |
| 464 | |
| 465 | if i == "any" { |
| 466 | level_1.parse::<syn::Ident>()?; |
| 467 | |
| 468 | let level_2; |
| 469 | parenthesized!(level_2 in level_1); |
| 470 | |
| 471 | while !level_2.is_empty() { |
David Tolnay | 440fe58 | 2019-02-15 20:23:14 -0800 | [diff] [blame] | 472 | features.insert(parse_feature(&level_2)?); |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 473 | |
| 474 | if !level_2.is_empty() { |
| 475 | level_2.parse::<Token![,]>()?; |
| 476 | } |
| 477 | } |
| 478 | } else if i == "feature" { |
David Tolnay | 440fe58 | 2019-02-15 20:23:14 -0800 | [diff] [blame] | 479 | features.insert(parse_feature(&level_1)?); |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 480 | assert!(level_1.is_empty()); |
| 481 | } else { |
| 482 | panic!("{:?}", i); |
| 483 | } |
| 484 | |
| 485 | assert!(input.is_empty()); |
| 486 | |
David Tolnay | fa67ab0 | 2019-02-15 20:17:30 -0800 | [diff] [blame] | 487 | Ok(types::Features { any: features }) |
Carl Lerche | 058ff47 | 2019-02-13 16:23:52 -0800 | [diff] [blame] | 488 | } |
| 489 | } |
| 490 | } |
| 491 | |
| 492 | fn get_features(attrs: &[syn::Attribute], base: &[syn::Attribute]) -> Vec<syn::Attribute> { |
| 493 | let mut ret = base.to_owned(); |
| 494 | |
| 495 | for attr in attrs { |
| 496 | if attr.path.is_ident("cfg") { |
| 497 | ret.push(attr.clone()); |
| 498 | } |
| 499 | } |
| 500 | |
| 501 | ret |
| 502 | } |
| 503 | |
| 504 | type Error = Box<::std::error::Error>; |
| 505 | |
| 506 | fn load_file<P: AsRef<Path>>( |
| 507 | name: P, |
| 508 | features: &[syn::Attribute], |
| 509 | lookup: &mut ItemLookup, |
| 510 | ) -> Result<(), Error> { |
| 511 | let name = name.as_ref(); |
| 512 | let parent = name.parent().ok_or("no parent path")?; |
| 513 | |
| 514 | let mut f = File::open(name)?; |
| 515 | let mut src = String::new(); |
| 516 | f.read_to_string(&mut src)?; |
| 517 | |
| 518 | // Parse the file |
| 519 | let file = syn::parse_file(&src)?; |
| 520 | |
| 521 | // Collect all of the interesting AstItems declared in this file or submodules. |
| 522 | 'items: for item in file.items { |
| 523 | match item { |
| 524 | Item::Mod(item) => { |
| 525 | // Don't inspect inline modules. |
| 526 | if item.content.is_some() { |
| 527 | continue; |
| 528 | } |
| 529 | |
| 530 | // We don't want to try to load the generated rust files and |
| 531 | // parse them, so we ignore them here. |
| 532 | for name in IGNORED_MODS { |
| 533 | if item.ident == name { |
| 534 | continue 'items; |
| 535 | } |
| 536 | } |
| 537 | |
| 538 | // Lookup any #[cfg()] attributes on the module and add them to |
| 539 | // the feature set. |
| 540 | // |
| 541 | // The derive module is weird because it is built with either |
| 542 | // `full` or `derive` but exported only under `derive`. |
| 543 | let features = if item.ident == "derive" { |
| 544 | vec![parse_quote!(#[cfg(feature = "derive")])] |
| 545 | } else { |
| 546 | get_features(&item.attrs, features) |
| 547 | }; |
| 548 | |
| 549 | // Look up the submodule file, and recursively parse it. |
| 550 | // XXX: Only handles same-directory .rs file submodules. |
| 551 | let path = parent.join(&format!("{}.rs", item.ident)); |
| 552 | load_file(path, &features, lookup)?; |
| 553 | } |
| 554 | Item::Macro(item) => { |
| 555 | // Lookip any #[cfg()] attributes directly on the macro |
| 556 | // invocation, and add them to the feature set. |
| 557 | let features = get_features(&item.attrs, features); |
| 558 | |
| 559 | // Try to parse the AstItem declaration out of the item. |
| 560 | let tts = &item.mac.tts; |
| 561 | let found = if item.mac.path.is_ident("ast_struct") { |
| 562 | syn::parse2::<parsing::AstStruct>(quote!(#tts))?.0 |
| 563 | } else if item.mac.path.is_ident("ast_enum") { |
| 564 | syn::parse2::<parsing::AstEnum>(quote!(#tts))?.0 |
| 565 | } else if item.mac.path.is_ident("ast_enum_of_structs") { |
| 566 | syn::parse2::<parsing::AstEnumOfStructs>(quote!(#tts))?.0 |
| 567 | } else { |
| 568 | continue; |
| 569 | }; |
| 570 | |
| 571 | // Record our features on the parsed AstItems. |
| 572 | for mut item in found { |
| 573 | item.features.extend(features.clone()); |
| 574 | lookup.insert(item.ast.ident.clone(), item); |
| 575 | } |
| 576 | } |
| 577 | Item::Struct(item) => { |
| 578 | let ident = item.ident; |
| 579 | if EXTRA_TYPES.contains(&&ident.to_string()[..]) { |
| 580 | lookup.insert( |
| 581 | ident.clone(), |
| 582 | AstItem { |
| 583 | ast: DeriveInput { |
| 584 | ident, |
| 585 | vis: item.vis, |
| 586 | attrs: item.attrs, |
| 587 | generics: item.generics, |
| 588 | data: Data::Struct(DataStruct { |
| 589 | fields: item.fields, |
| 590 | struct_token: item.struct_token, |
| 591 | semi_token: item.semi_token, |
| 592 | }), |
| 593 | }, |
| 594 | features: features.to_owned(), |
| 595 | }, |
| 596 | ); |
| 597 | } |
| 598 | } |
| 599 | _ => {} |
| 600 | } |
| 601 | } |
| 602 | Ok(()) |
| 603 | } |
| 604 | |
| 605 | fn load_token_file<P: AsRef<Path>>(name: P) -> Result<TokenLookup, Error> { |
| 606 | let name = name.as_ref(); |
| 607 | let mut f = File::open(name)?; |
| 608 | let mut src = String::new(); |
| 609 | f.read_to_string(&mut src)?; |
| 610 | let file = syn::parse_file(&src)?; |
| 611 | for item in file.items { |
| 612 | match item { |
| 613 | Item::Macro(item) => { |
| 614 | match item.ident { |
| 615 | Some(ref i) if i == "Token" => {} |
| 616 | _ => continue, |
| 617 | } |
| 618 | let tts = &item.mac.tts; |
| 619 | let tokens = syn::parse2::<parsing::TokenMacro>(quote!(#tts))?.0; |
| 620 | return Ok(tokens); |
| 621 | } |
| 622 | _ => {} |
| 623 | } |
| 624 | } |
| 625 | |
| 626 | Err("failed to parse Token macro".into()) |
| 627 | } |