blob: c6335b7039ebe3d6c581942b60e55d477113f576 [file] [log] [blame]
David Tolnay950cc122019-05-07 14:21:13 -07001use crate::version;
Carl Lerche058ff472019-02-13 16:23:52 -08002
David Tolnay14d463e2019-02-15 14:23:51 -08003use indexmap::IndexMap;
David Tolnay397bd0b2019-02-15 20:51:10 -08004use quote::quote;
David Tolnay950cc122019-05-07 14:21:13 -07005use syn::parse::Parser;
David Tolnay397bd0b2019-02-15 20:51:10 -08006use syn::{parse_quote, Data, DataStruct, DeriveInput, Ident, Item};
David Tolnay950cc122019-05-07 14:21:13 -07007use syn_codegen as types;
Carl Lerche058ff472019-02-13 16:23:52 -08008
9use std::collections::BTreeMap;
10use std::fs::File;
11use std::io::Read;
12use std::path::Path;
13
14const SYN_CRATE_ROOT: &str = "../src/lib.rs";
15const TOKEN_SRC: &str = "../src/token.rs";
16const IGNORED_MODS: &[&str] = &["fold", "visit", "visit_mut"];
17const EXTRA_TYPES: &[&str] = &["Lifetime"];
Carl Lerche058ff472019-02-13 16:23:52 -080018
19// NOTE: BTreeMap is used here instead of HashMap to have deterministic output.
20type ItemLookup = BTreeMap<Ident, AstItem>;
21type TokenLookup = BTreeMap<String, String>;
22
23/// Parse the contents of `src` and return a list of AST types.
David Tolnayf9bb8ff2019-02-15 13:10:14 -080024pub fn parse() -> types::Definitions {
Carl Lerche058ff472019-02-13 16:23:52 -080025 let mut item_lookup = BTreeMap::new();
26 load_file(SYN_CRATE_ROOT, &[], &mut item_lookup).unwrap();
27
28 let token_lookup = load_token_file(TOKEN_SRC).unwrap();
29
David Tolnay10227122019-02-15 20:53:45 -080030 let version = version::get();
31
David Tolnayf9bb8ff2019-02-15 13:10:14 -080032 let types = item_lookup
Carl Lerche058ff472019-02-13 16:23:52 -080033 .values()
34 .map(|item| introspect_item(item, &item_lookup, &token_lookup))
David Tolnayf9bb8ff2019-02-15 13:10:14 -080035 .collect();
36
David Tolnay47fe7402019-02-15 14:35:25 -080037 let tokens = token_lookup
38 .into_iter()
39 .map(|(name, ty)| (ty, name))
40 .collect();
David Tolnayf9bb8ff2019-02-15 13:10:14 -080041
David Tolnay10227122019-02-15 20:53:45 -080042 types::Definitions {
43 version,
44 types,
45 tokens,
46 }
Carl Lerche058ff472019-02-13 16:23:52 -080047}
48
49/// Data extracted from syn source
50#[derive(Clone)]
51pub struct AstItem {
52 ast: DeriveInput,
53 features: Vec<syn::Attribute>,
54}
55
David Tolnayf9bb8ff2019-02-15 13:10:14 -080056fn introspect_item(item: &AstItem, items: &ItemLookup, tokens: &TokenLookup) -> types::Node {
Carl Lerche058ff472019-02-13 16:23:52 -080057 let features = introspect_features(&item.features);
58
59 match &item.ast.data {
David Tolnayc2be7b22019-02-15 18:48:31 -080060 Data::Enum(ref data) => types::Node {
61 ident: item.ast.ident.to_string(),
Carl Lerche058ff472019-02-13 16:23:52 -080062 features,
David Tolnayc2be7b22019-02-15 18:48:31 -080063 data: types::Data::Enum(introspect_enum(data, items, tokens)),
64 },
65 Data::Struct(ref data) => types::Node {
66 ident: item.ast.ident.to_string(),
Carl Lerche058ff472019-02-13 16:23:52 -080067 features,
David Tolnayc2be7b22019-02-15 18:48:31 -080068 data: {
69 if data.fields.iter().all(|f| is_pub(&f.vis)) {
70 types::Data::Struct(introspect_struct(data, items, tokens))
71 } else {
72 types::Data::Private
73 }
74 },
75 },
Carl Lerche058ff472019-02-13 16:23:52 -080076 Data::Union(..) => panic!("Union not supported"),
77 }
78}
79
80fn introspect_enum(
Carl Lerche058ff472019-02-13 16:23:52 -080081 item: &syn::DataEnum,
82 items: &ItemLookup,
83 tokens: &TokenLookup,
David Tolnay75c5a172019-02-15 20:35:41 -080084) -> types::Variants {
David Tolnayc2be7b22019-02-15 18:48:31 -080085 item.variants
Carl Lerche058ff472019-02-13 16:23:52 -080086 .iter()
87 .map(|variant| {
88 let fields = match &variant.fields {
89 syn::Fields::Unnamed(fields) => fields
90 .unnamed
91 .iter()
92 .map(|field| introspect_type(&field.ty, items, tokens))
93 .collect(),
94 syn::Fields::Unit => vec![],
95 _ => panic!("Enum representation not supported"),
96 };
97
David Tolnay75c5a172019-02-15 20:35:41 -080098 (variant.ident.to_string(), fields)
Carl Lerche058ff472019-02-13 16:23:52 -080099 })
David Tolnayc2be7b22019-02-15 18:48:31 -0800100 .collect()
Carl Lerche058ff472019-02-13 16:23:52 -0800101}
102
103fn introspect_struct(
Carl Lerche058ff472019-02-13 16:23:52 -0800104 item: &syn::DataStruct,
105 items: &ItemLookup,
106 tokens: &TokenLookup,
David Tolnay75c5a172019-02-15 20:35:41 -0800107) -> types::Fields {
David Tolnayc2be7b22019-02-15 18:48:31 -0800108 match &item.fields {
Carl Lerche058ff472019-02-13 16:23:52 -0800109 syn::Fields::Named(fields) => fields
110 .named
111 .iter()
112 .map(|field| {
David Tolnay14d463e2019-02-15 14:23:51 -0800113 (
Carl Lerche058ff472019-02-13 16:23:52 -0800114 field.ident.as_ref().unwrap().to_string(),
115 introspect_type(&field.ty, items, tokens),
116 )
117 })
118 .collect(),
David Tolnay14d463e2019-02-15 14:23:51 -0800119 syn::Fields::Unit => IndexMap::new(),
Carl Lerche058ff472019-02-13 16:23:52 -0800120 _ => panic!("Struct representation not supported"),
David Tolnayc2be7b22019-02-15 18:48:31 -0800121 }
Carl Lerche058ff472019-02-13 16:23:52 -0800122}
123
124fn introspect_type(item: &syn::Type, items: &ItemLookup, tokens: &TokenLookup) -> types::Type {
125 match item {
126 syn::Type::Path(syn::TypePath {
127 qself: None,
128 ref path,
129 }) => {
130 let last = path.segments.last().unwrap().into_value();
David Tolnay15730f22019-02-15 20:46:56 -0800131 let string = last.ident.to_string();
Carl Lerche058ff472019-02-13 16:23:52 -0800132
David Tolnay15730f22019-02-15 20:46:56 -0800133 match string.as_str() {
Carl Lerche058ff472019-02-13 16:23:52 -0800134 "Option" => {
135 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
136 types::Type::Option(Box::new(nested))
137 }
138 "Punctuated" => {
139 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
140 let punct = match introspect_type(last_arg(&last.arguments), items, tokens) {
141 types::Type::Token(s) => s,
142 _ => panic!(),
143 };
144
David Tolnayfa67ab02019-02-15 20:17:30 -0800145 types::Type::Punctuated(types::Punctuated {
146 element: Box::new(nested),
147 punct,
148 })
Carl Lerche058ff472019-02-13 16:23:52 -0800149 }
150 "Vec" => {
151 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
152 types::Type::Vec(Box::new(nested))
153 }
154 "Box" => {
155 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
156 types::Type::Box(Box::new(nested))
157 }
David Tolnay15730f22019-02-15 20:46:56 -0800158 "Brace" | "Bracket" | "Paren" | "Group" => types::Type::Group(string),
159 "TokenStream" | "Literal" | "Ident" | "Span" => types::Type::Ext(string),
160 "String" | "u32" | "usize" | "bool" => types::Type::Std(string),
Carl Lerche058ff472019-02-13 16:23:52 -0800161 _ => {
162 if items.get(&last.ident).is_some() {
David Tolnay15730f22019-02-15 20:46:56 -0800163 types::Type::Syn(string)
Carl Lerche058ff472019-02-13 16:23:52 -0800164 } else {
David Tolnay15730f22019-02-15 20:46:56 -0800165 unimplemented!("{}", string);
Carl Lerche058ff472019-02-13 16:23:52 -0800166 }
167 }
168 }
169 }
170 syn::Type::Tuple(syn::TypeTuple { ref elems, .. }) => {
171 let tys = elems
172 .iter()
173 .map(|ty| introspect_type(&ty, items, tokens))
174 .collect();
175 types::Type::Tuple(tys)
176 }
177 syn::Type::Macro(syn::TypeMacro { ref mac })
178 if mac.path.segments.last().unwrap().into_value().ident == "Token" =>
179 {
180 let content = mac.tts.to_string();
181 let ty = tokens.get(&content).unwrap().to_string();
182
David Tolnay157c7eb2019-02-15 13:21:48 -0800183 types::Type::Token(ty)
Carl Lerche058ff472019-02-13 16:23:52 -0800184 }
185 _ => panic!("{}", quote!(#item).to_string()),
186 }
187}
188
189fn introspect_features(attrs: &[syn::Attribute]) -> types::Features {
190 let mut ret = types::Features::default();
191
192 for attr in attrs {
193 if !attr.path.is_ident("cfg") {
194 continue;
195 }
196
David Tolnay950cc122019-05-07 14:21:13 -0700197 let features = parsing::parse_features.parse2(attr.tts.clone()).unwrap();
David Tolnay440fe582019-02-15 20:23:14 -0800198
199 if ret.any.is_empty() {
200 ret = features;
201 } else if ret.any.len() < features.any.len() {
202 assert!(ret.any.iter().all(|f| features.any.contains(f)));
203 } else {
204 assert!(features.any.iter().all(|f| ret.any.contains(f)));
205 ret = features;
206 }
Carl Lerche058ff472019-02-13 16:23:52 -0800207 }
208
209 ret
210}
211
212fn is_pub(vis: &syn::Visibility) -> bool {
213 match vis {
214 syn::Visibility::Public(_) => true,
215 _ => false,
216 }
217}
218
219fn first_arg(params: &syn::PathArguments) -> &syn::Type {
220 let data = match *params {
221 syn::PathArguments::AngleBracketed(ref data) => data,
222 _ => panic!("Expected at least 1 type argument here"),
223 };
224
225 match **data
226 .args
227 .first()
228 .expect("Expected at least 1 type argument here")
229 .value()
230 {
231 syn::GenericArgument::Type(ref ty) => ty,
232 _ => panic!("Expected at least 1 type argument here"),
233 }
234}
235
236fn last_arg(params: &syn::PathArguments) -> &syn::Type {
237 let data = match *params {
238 syn::PathArguments::AngleBracketed(ref data) => data,
239 _ => panic!("Expected at least 1 type argument here"),
240 };
241
242 match **data
243 .args
244 .last()
245 .expect("Expected at least 1 type argument here")
246 .value()
247 {
248 syn::GenericArgument::Type(ref ty) => ty,
249 _ => panic!("Expected at least 1 type argument here"),
250 }
251}
252
253mod parsing {
254 use super::{AstItem, TokenLookup};
Carl Lerche058ff472019-02-13 16:23:52 -0800255
256 use proc_macro2::TokenStream;
David Tolnay397bd0b2019-02-15 20:51:10 -0800257 use quote::quote;
Carl Lerche058ff472019-02-13 16:23:52 -0800258 use syn;
259 use syn::parse::{Parse, ParseStream, Result};
260 use syn::*;
David Tolnay950cc122019-05-07 14:21:13 -0700261 use syn_codegen as types;
Carl Lerche058ff472019-02-13 16:23:52 -0800262
David Tolnay440fe582019-02-15 20:23:14 -0800263 use std::collections::{BTreeMap, BTreeSet};
Carl Lerche058ff472019-02-13 16:23:52 -0800264
265 fn peek_tag(input: ParseStream, tag: &str) -> bool {
266 let ahead = input.fork();
267 ahead.parse::<Token![#]>().is_ok()
268 && ahead
269 .parse::<Ident>()
270 .map(|ident| ident == tag)
271 .unwrap_or(false)
272 }
273
274 // Parses #full - returns #[cfg(feature = "full")] if it is present, and
275 // nothing otherwise.
276 fn full(input: ParseStream) -> Vec<syn::Attribute> {
277 if peek_tag(input, "full") {
278 input.parse::<Token![#]>().unwrap();
279 input.parse::<Ident>().unwrap();
280 vec![parse_quote!(#[cfg(feature = "full")])]
281 } else {
282 vec![]
283 }
284 }
285
286 fn skip_manual_extra_traits(input: ParseStream) {
287 if peek_tag(input, "manual_extra_traits") {
288 input.parse::<Token![#]>().unwrap();
289 input.parse::<Ident>().unwrap();
290 }
291 }
292
293 // Parses a simple AstStruct without the `pub struct` prefix.
294 fn ast_struct_inner(input: ParseStream) -> Result<AstItem> {
295 let ident: Ident = input.parse()?;
296 let features = full(input);
297 skip_manual_extra_traits(input);
298 let rest: TokenStream = input.parse()?;
299 Ok(AstItem {
300 ast: syn::parse2(quote! {
301 pub struct #ident #rest
302 })?,
303 features,
304 })
305 }
306
307 // ast_struct! parsing
308 pub struct AstStruct(pub(super) Vec<AstItem>);
309 impl Parse for AstStruct {
310 fn parse(input: ParseStream) -> Result<Self> {
311 input.call(Attribute::parse_outer)?;
312 input.parse::<Token![pub]>()?;
313 input.parse::<Token![struct]>()?;
314 let res = input.call(ast_struct_inner)?;
315 Ok(AstStruct(vec![res]))
316 }
317 }
318
319 fn no_visit(input: ParseStream) -> bool {
320 if peek_tag(input, "no_visit") {
321 input.parse::<Token![#]>().unwrap();
322 input.parse::<Ident>().unwrap();
323 true
324 } else {
325 false
326 }
327 }
328
329 // ast_enum! parsing
330 pub struct AstEnum(pub Vec<AstItem>);
331 impl Parse for AstEnum {
332 fn parse(input: ParseStream) -> Result<Self> {
333 input.call(Attribute::parse_outer)?;
334 input.parse::<Token![pub]>()?;
335 input.parse::<Token![enum]>()?;
336 let ident: Ident = input.parse()?;
337 let no_visit = no_visit(input);
338 let rest: TokenStream = input.parse()?;
339 Ok(AstEnum(if no_visit {
340 vec![]
341 } else {
342 vec![AstItem {
343 ast: syn::parse2(quote! {
344 pub enum #ident #rest
345 })?,
346 features: vec![],
347 }]
348 }))
349 }
350 }
351
352 // A single variant of an ast_enum_of_structs!
353 struct EosVariant {
354 name: Ident,
355 member: Option<Path>,
356 inner: Option<AstItem>,
357 }
358 fn eos_variant(input: ParseStream) -> Result<EosVariant> {
359 input.call(Attribute::parse_outer)?;
360 input.parse::<Token![pub]>()?;
361 let variant: Ident = input.parse()?;
362 let (member, inner) = if input.peek(token::Paren) {
363 let content;
364 parenthesized!(content in input);
365 if content.fork().call(ast_struct_inner).is_ok() {
366 let item = content.call(ast_struct_inner)?;
367 (Some(Path::from(item.ast.ident.clone())), Some(item))
368 } else {
369 let path: Path = content.parse()?;
370 (Some(path), None)
371 }
372 } else {
373 (None, None)
374 };
375 input.parse::<Token![,]>()?;
376 Ok(EosVariant {
377 name: variant,
378 member,
379 inner,
380 })
381 }
382
383 // ast_enum_of_structs! parsing
384 pub struct AstEnumOfStructs(pub Vec<AstItem>);
385 impl Parse for AstEnumOfStructs {
386 fn parse(input: ParseStream) -> Result<Self> {
387 input.call(Attribute::parse_outer)?;
388 input.parse::<Token![pub]>()?;
389 input.parse::<Token![enum]>()?;
390 let ident: Ident = input.parse()?;
391
392 let content;
393 braced!(content in input);
394 let mut variants = Vec::new();
395 while !content.is_empty() {
396 variants.push(content.call(eos_variant)?);
397 }
398
399 if let Some(ident) = input.parse::<Option<Ident>>()? {
400 assert_eq!(ident, "do_not_generate_to_tokens");
401 }
402
403 let enum_item = {
404 let variants = variants.iter().map(|v| {
405 let name = v.name.clone();
406 match v.member {
407 Some(ref member) => quote!(#name(#member)),
408 None => quote!(#name),
409 }
410 });
411 parse_quote! {
412 pub enum #ident {
413 #(#variants),*
414 }
415 }
416 };
417 let mut items = vec![AstItem {
418 ast: enum_item,
419 features: vec![],
420 }];
421 items.extend(variants.into_iter().filter_map(|v| v.inner));
422 Ok(AstEnumOfStructs(items))
423 }
424 }
425
426 pub struct TokenMacro(pub TokenLookup);
427 impl Parse for TokenMacro {
428 fn parse(input: ParseStream) -> Result<Self> {
429 let mut tokens = BTreeMap::new();
430 while !input.is_empty() {
431 let content;
432 parenthesized!(content in input);
433 let token = content.parse::<TokenStream>()?.to_string();
434 input.parse::<Token![=]>()?;
435 input.parse::<Token![>]>()?;
436 let content;
437 braced!(content in input);
438 input.parse::<Token![;]>()?;
439 content.parse::<token::Dollar>()?;
440 let path: Path = content.parse()?;
441 let ty = path.segments.last().unwrap().into_value().ident.to_string();
442 tokens.insert(token, ty.to_string());
443 }
444 Ok(TokenMacro(tokens))
445 }
446 }
447
448 fn parse_feature(input: ParseStream) -> Result<String> {
449 let i: syn::Ident = input.parse()?;
450 assert_eq!(i, "feature");
451
452 input.parse::<Token![=]>()?;
453 let s = input.parse::<syn::LitStr>()?;
454
455 Ok(s.value())
456 }
457
David Tolnay950cc122019-05-07 14:21:13 -0700458 pub fn parse_features(input: ParseStream) -> Result<types::Features> {
459 let mut features = BTreeSet::new();
Carl Lerche058ff472019-02-13 16:23:52 -0800460
David Tolnay950cc122019-05-07 14:21:13 -0700461 let level_1;
462 parenthesized!(level_1 in input);
Carl Lerche058ff472019-02-13 16:23:52 -0800463
David Tolnay950cc122019-05-07 14:21:13 -0700464 let i: syn::Ident = level_1.fork().parse()?;
Carl Lerche058ff472019-02-13 16:23:52 -0800465
David Tolnay950cc122019-05-07 14:21:13 -0700466 if i == "any" {
467 level_1.parse::<syn::Ident>()?;
Carl Lerche058ff472019-02-13 16:23:52 -0800468
David Tolnay950cc122019-05-07 14:21:13 -0700469 let level_2;
470 parenthesized!(level_2 in level_1);
Carl Lerche058ff472019-02-13 16:23:52 -0800471
David Tolnay950cc122019-05-07 14:21:13 -0700472 while !level_2.is_empty() {
473 features.insert(parse_feature(&level_2)?);
Carl Lerche058ff472019-02-13 16:23:52 -0800474
David Tolnay950cc122019-05-07 14:21:13 -0700475 if !level_2.is_empty() {
476 level_2.parse::<Token![,]>()?;
Carl Lerche058ff472019-02-13 16:23:52 -0800477 }
Carl Lerche058ff472019-02-13 16:23:52 -0800478 }
David Tolnay950cc122019-05-07 14:21:13 -0700479 } else if i == "feature" {
480 features.insert(parse_feature(&level_1)?);
481 assert!(level_1.is_empty());
482 } else {
483 panic!("{:?}", i);
Carl Lerche058ff472019-02-13 16:23:52 -0800484 }
David Tolnay950cc122019-05-07 14:21:13 -0700485
486 assert!(input.is_empty());
487
488 Ok(types::Features { any: features })
Carl Lerche058ff472019-02-13 16:23:52 -0800489 }
490}
491
492fn get_features(attrs: &[syn::Attribute], base: &[syn::Attribute]) -> Vec<syn::Attribute> {
493 let mut ret = base.to_owned();
494
495 for attr in attrs {
496 if attr.path.is_ident("cfg") {
497 ret.push(attr.clone());
498 }
499 }
500
501 ret
502}
503
504type Error = Box<::std::error::Error>;
505
506fn load_file<P: AsRef<Path>>(
507 name: P,
508 features: &[syn::Attribute],
509 lookup: &mut ItemLookup,
510) -> Result<(), Error> {
511 let name = name.as_ref();
512 let parent = name.parent().ok_or("no parent path")?;
513
514 let mut f = File::open(name)?;
515 let mut src = String::new();
516 f.read_to_string(&mut src)?;
517
518 // Parse the file
519 let file = syn::parse_file(&src)?;
520
521 // Collect all of the interesting AstItems declared in this file or submodules.
522 'items: for item in file.items {
523 match item {
524 Item::Mod(item) => {
525 // Don't inspect inline modules.
526 if item.content.is_some() {
527 continue;
528 }
529
530 // We don't want to try to load the generated rust files and
531 // parse them, so we ignore them here.
532 for name in IGNORED_MODS {
533 if item.ident == name {
534 continue 'items;
535 }
536 }
537
538 // Lookup any #[cfg()] attributes on the module and add them to
539 // the feature set.
540 //
541 // The derive module is weird because it is built with either
542 // `full` or `derive` but exported only under `derive`.
543 let features = if item.ident == "derive" {
544 vec![parse_quote!(#[cfg(feature = "derive")])]
545 } else {
546 get_features(&item.attrs, features)
547 };
548
549 // Look up the submodule file, and recursively parse it.
550 // XXX: Only handles same-directory .rs file submodules.
551 let path = parent.join(&format!("{}.rs", item.ident));
552 load_file(path, &features, lookup)?;
553 }
554 Item::Macro(item) => {
555 // Lookip any #[cfg()] attributes directly on the macro
556 // invocation, and add them to the feature set.
557 let features = get_features(&item.attrs, features);
558
559 // Try to parse the AstItem declaration out of the item.
560 let tts = &item.mac.tts;
561 let found = if item.mac.path.is_ident("ast_struct") {
562 syn::parse2::<parsing::AstStruct>(quote!(#tts))?.0
563 } else if item.mac.path.is_ident("ast_enum") {
564 syn::parse2::<parsing::AstEnum>(quote!(#tts))?.0
565 } else if item.mac.path.is_ident("ast_enum_of_structs") {
566 syn::parse2::<parsing::AstEnumOfStructs>(quote!(#tts))?.0
567 } else {
568 continue;
569 };
570
571 // Record our features on the parsed AstItems.
572 for mut item in found {
573 item.features.extend(features.clone());
574 lookup.insert(item.ast.ident.clone(), item);
575 }
576 }
577 Item::Struct(item) => {
578 let ident = item.ident;
579 if EXTRA_TYPES.contains(&&ident.to_string()[..]) {
580 lookup.insert(
581 ident.clone(),
582 AstItem {
583 ast: DeriveInput {
584 ident,
585 vis: item.vis,
586 attrs: item.attrs,
587 generics: item.generics,
588 data: Data::Struct(DataStruct {
589 fields: item.fields,
590 struct_token: item.struct_token,
591 semi_token: item.semi_token,
592 }),
593 },
594 features: features.to_owned(),
595 },
596 );
597 }
598 }
599 _ => {}
600 }
601 }
602 Ok(())
603}
604
605fn load_token_file<P: AsRef<Path>>(name: P) -> Result<TokenLookup, Error> {
606 let name = name.as_ref();
607 let mut f = File::open(name)?;
608 let mut src = String::new();
609 f.read_to_string(&mut src)?;
610 let file = syn::parse_file(&src)?;
611 for item in file.items {
612 match item {
613 Item::Macro(item) => {
614 match item.ident {
615 Some(ref i) if i == "Token" => {}
616 _ => continue,
617 }
618 let tts = &item.mac.tts;
619 let tokens = syn::parse2::<parsing::TokenMacro>(quote!(#tts))?.0;
620 return Ok(tokens);
621 }
622 _ => {}
623 }
624 }
625
626 Err("failed to parse Token macro".into())
627}