Include token types in json file
diff --git a/codegen/src/gen.rs b/codegen/src/gen.rs
index c954961..1186ff9 100644
--- a/codegen/src/gen.rs
+++ b/codegen/src/gen.rs
@@ -116,7 +116,7 @@
fn box_visit(
elem: &types::Type,
features: &types::Features,
- types: &[types::TypeDef],
+ types: &[types::Node],
kind: Kind,
name: &Operand,
) -> Option<TokenStream> {
@@ -133,7 +133,7 @@
fn vec_visit(
elem: &types::Type,
features: &types::Features,
- types: &[types::TypeDef],
+ types: &[types::Node],
kind: Kind,
name: &Operand,
) -> Option<TokenStream> {
@@ -171,7 +171,7 @@
fn punctuated_visit(
elem: &types::Type,
features: &types::Features,
- types: &[types::TypeDef],
+ types: &[types::Node],
kind: Kind,
name: &Operand,
) -> Option<TokenStream> {
@@ -211,7 +211,7 @@
fn option_visit(
elem: &types::Type,
features: &types::Features,
- types: &[types::TypeDef],
+ types: &[types::Node],
kind: Kind,
name: &Operand,
) -> Option<TokenStream> {
@@ -241,7 +241,7 @@
fn tuple_visit(
elems: &[types::Type],
features: &types::Features,
- types: &[types::TypeDef],
+ types: &[types::Node],
kind: Kind,
name: &Operand,
) -> Option<TokenStream> {
@@ -333,7 +333,7 @@
fn visit(
ty: &types::Type,
features: &types::Features,
- types: &[types::TypeDef],
+ types: &[types::Node],
kind: Kind,
name: &Operand,
) -> Option<TokenStream> {
@@ -391,7 +391,7 @@
}
}
- pub fn generate(state: &mut State, s: &types::TypeDef, types: &[types::TypeDef]) {
+ pub fn generate(state: &mut State, s: &types::Node, types: &[types::Node]) {
let features = visit_features(s.features());
let under_name = under_name(s.ident());
let ty = Ident::new(s.ident(), Span::call_site());
@@ -404,7 +404,7 @@
let mut fold_impl = TokenStream::new();
match s {
- types::TypeDef::Enum(ref e) => {
+ types::Node::Enum(ref e) => {
let mut visit_variants = TokenStream::new();
let mut visit_mut_variants = TokenStream::new();
let mut fold_variants = TokenStream::new();
@@ -508,7 +508,7 @@
}
});
}
- types::TypeDef::Struct(ref v) => {
+ types::Node::Struct(ref v) => {
let mut fold_fields = TokenStream::new();
for field in v.fields() {
@@ -555,7 +555,7 @@
}
let mut include_fold_impl = true;
- if let types::TypeDef::Struct(ref data) = s {
+ if let types::Node::Struct(ref data) = s {
if !data.all_fields_pub() {
include_fold_impl = false;
}
@@ -631,10 +631,10 @@
.unwrap();
}
-pub fn generate(types: &[types::TypeDef]) {
+pub fn generate(defs: &types::Definitions) {
let mut state = codegen::State::default();
- for s in types {
- codegen::generate(&mut state, s, types);
+ for s in &defs.types {
+ codegen::generate(&mut state, s, &defs.types);
}
let full_macro = quote! {
diff --git a/codegen/src/json.rs b/codegen/src/json.rs
index e1b641d..3bcf7b2 100644
--- a/codegen/src/json.rs
+++ b/codegen/src/json.rs
@@ -1,10 +1,11 @@
use crate::types;
+use std::collections::BTreeMap;
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
-pub fn generate(types: &[types::TypeDef]) {
+pub fn generate(defs: &types::Definitions) {
let codegen_root = Path::new(env!("CARGO_MANIFEST_DIR"));
let mut f = File::open(codegen_root.join("../Cargo.toml")).unwrap();
@@ -17,7 +18,8 @@
serde_json::to_writer_pretty(f, &Introspect {
version: &manifest.package.version,
- types,
+ types: &defs.types,
+ tokens: &defs.tokens,
}).unwrap();
}
@@ -35,5 +37,6 @@
struct Introspect<'a> {
/// The `syn` version used to generate the introspection file
version: &'a str,
- types: &'a [types::TypeDef],
+ types: &'a [types::Node],
+ tokens: &'a BTreeMap<String, String>,
}
diff --git a/codegen/src/parse.rs b/codegen/src/parse.rs
index 25b8ac1..cd43d60 100644
--- a/codegen/src/parse.rs
+++ b/codegen/src/parse.rs
@@ -19,7 +19,7 @@
type TokenLookup = BTreeMap<String, String>;
/// Parse the contents of `src` and return a list of AST types.
-pub fn parse() -> Vec<types::TypeDef> {
+pub fn parse() -> types::Definitions {
let mut item_lookup = BTreeMap::new();
load_file(SYN_CRATE_ROOT, &[], &mut item_lookup).unwrap();
@@ -49,10 +49,14 @@
);
}
- item_lookup
+ let types = item_lookup
.values()
.map(|item| introspect_item(item, &item_lookup, &token_lookup))
- .collect()
+ .collect();
+
+ let tokens = token_lookup.into_iter().map(|(name, ty)| (ty, name)).collect();
+
+ types::Definitions { types, tokens }
}
/// Data extracted from syn source
@@ -62,18 +66,18 @@
features: Vec<syn::Attribute>,
}
-fn introspect_item(item: &AstItem, items: &ItemLookup, tokens: &TokenLookup) -> types::TypeDef {
+fn introspect_item(item: &AstItem, items: &ItemLookup, tokens: &TokenLookup) -> types::Node {
let features = introspect_features(&item.features);
match &item.ast.data {
- Data::Enum(ref data) => types::TypeDef::Enum(introspect_enum(
+ Data::Enum(ref data) => types::Node::Enum(introspect_enum(
&item.ast.ident,
features,
data,
items,
tokens,
)),
- Data::Struct(ref data) => types::TypeDef::Struct(introspect_struct(
+ Data::Struct(ref data) => types::Node::Struct(introspect_struct(
&item.ast.ident,
features,
data,
diff --git a/codegen/src/types.rs b/codegen/src/types.rs
index 679b72e..1eea214 100644
--- a/codegen/src/types.rs
+++ b/codegen/src/types.rs
@@ -1,8 +1,14 @@
+use std::collections::BTreeMap;
use std::ops;
+pub struct Definitions {
+ pub types: Vec<Node>,
+ pub tokens: BTreeMap<String, String>,
+}
+
#[derive(Debug, Serialize)]
#[serde(tag = "node", rename_all = "lowercase")]
-pub enum TypeDef {
+pub enum Node {
Struct(Struct),
Enum(Enum),
}
@@ -80,18 +86,18 @@
any: Vec<String>,
}
-impl TypeDef {
+impl Node {
pub fn ident(&self) -> &str {
match self {
- TypeDef::Struct(i) => &i.ident,
- TypeDef::Enum(i) => &i.ident,
+ Node::Struct(i) => &i.ident,
+ Node::Enum(i) => &i.ident,
}
}
pub fn features(&self) -> &Features {
match self {
- TypeDef::Struct(i) => &i.features,
- TypeDef::Enum(i) => &i.features,
+ Node::Struct(i) => &i.features,
+ Node::Enum(i) => &i.features,
}
}
}
diff --git a/syn.json b/syn.json
index 597cc1f..80dcb73 100644
--- a/syn.json
+++ b/syn.json
@@ -9490,5 +9490,105 @@
}
]
}
- ]
+ ],
+ "tokens": {
+ "Abstract": "abstract",
+ "Add": "+",
+ "AddEq": "+=",
+ "And": "&",
+ "AndAnd": "&&",
+ "AndEq": "&=",
+ "As": "as",
+ "Async": "async",
+ "At": "@",
+ "Auto": "auto",
+ "Bang": "!",
+ "Become": "become",
+ "Box": "box",
+ "Break": "break",
+ "Caret": "^",
+ "CaretEq": "^=",
+ "Colon": ":",
+ "Colon2": "::",
+ "Comma": ",",
+ "Const": "const",
+ "Continue": "continue",
+ "Crate": "crate",
+ "Default": "default",
+ "Div": "/",
+ "DivEq": "/=",
+ "Do": "do",
+ "Dot": ".",
+ "Dot2": "..",
+ "Dot3": "...",
+ "DotDotEq": "..=",
+ "Dyn": "dyn",
+ "Else": "else",
+ "Enum": "enum",
+ "Eq": "=",
+ "EqEq": "==",
+ "Existential": "existential",
+ "Extern": "extern",
+ "FatArrow": "=>",
+ "Final": "final",
+ "Fn": "fn",
+ "For": "for",
+ "Ge": ">=",
+ "Gt": ">",
+ "If": "if",
+ "Impl": "impl",
+ "In": "in",
+ "LArrow": "<-",
+ "Le": "<=",
+ "Let": "let",
+ "Loop": "loop",
+ "Lt": "<",
+ "Macro": "macro",
+ "Match": "match",
+ "Mod": "mod",
+ "Move": "move",
+ "MulEq": "*=",
+ "Mut": "mut",
+ "Ne": "!=",
+ "Or": "|",
+ "OrEq": "|=",
+ "OrOr": "||",
+ "Override": "override",
+ "Pound": "#",
+ "Priv": "priv",
+ "Pub": "pub",
+ "Question": "?",
+ "RArrow": "->",
+ "Ref": "ref",
+ "Rem": "%",
+ "RemEq": "%=",
+ "Return": "return",
+ "SelfType": "Self",
+ "SelfValue": "self",
+ "Semi": ";",
+ "Shl": "<<",
+ "ShlEq": "<<=",
+ "Shr": ">>",
+ "ShrEq": ">>=",
+ "Star": "*",
+ "Static": "static",
+ "Struct": "struct",
+ "Sub": "-",
+ "SubEq": "-=",
+ "Super": "super",
+ "Tilde": "~",
+ "Trait": "trait",
+ "Try": "try",
+ "Type": "type",
+ "Typeof": "typeof",
+ "Underscore": "_",
+ "Union": "union",
+ "Unsafe": "unsafe",
+ "Unsized": "unsized",
+ "Use": "use",
+ "Virtual": "virtual",
+ "Where": "where",
+ "While": "while",
+ "Yield": "yield"
+ }
}
\ No newline at end of file