Merge 57d8f7ad6fdfe6d23fff130c16d52f0bcf4c7602 on remote branch

Change-Id: Ic1aaf2b7794595104a1609fd6584c35f86776ac3
diff --git a/Android.bp b/Android.bp
new file mode 100644
index 0000000..ccfa35c
--- /dev/null
+++ b/Android.bp
@@ -0,0 +1,59 @@
+// This file is generated by cargo2android.py --run --host-first-multilib.
+// Do not modify this file as changes will be overridden on upgrade.
+
+
+
+package {
+    default_applicable_licenses: [
+        "external_rust_crates_pest_generator_license",
+    ],
+}
+
+// Added automatically by a large-scale-change that took the approach of
+// 'apply every license found to every target'. While this makes sure we respect
+// every license restriction, it may not be entirely correct.
+//
+// e.g. GPL in an MIT project might only apply to the contrib/ directory.
+//
+// Please consider splitting the single license below into multiple licenses,
+// taking care not to lose any license_kind information, and overriding the
+// default license using the 'licenses: [...]' property on targets as needed.
+//
+// For unused files, consider creating a 'fileGroup' with "//visibility:private"
+// to attach the license to, and including a comment whether the files may be
+// used in the current project.
+//
+// large-scale-change included anything that looked like it might be a license
+// text as a license_text. e.g. LICENSE, NOTICE, COPYING etc.
+//
+// Please consider removing redundant or irrelevant files from 'license_text:'.
+// See: http://go/android-license-faq
+license {
+    name: "external_rust_crates_pest_generator_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+        "SPDX-license-identifier-MIT",
+    ],
+    license_text: [
+        "LICENSE-APACHE",
+        "LICENSE-MIT",
+    ],
+}
+
+rust_library_host {
+    name: "libpest_generator",
+    crate_name: "pest_generator",
+    cargo_env_compat: true,
+    cargo_pkg_version: "2.1.3",
+    srcs: ["src/lib.rs"],
+    edition: "2015",
+    rustlibs: [
+        "libpest",
+        "libpest_meta",
+        "libproc_macro2",
+        "libquote",
+        "libsyn",
+    ],
+    compile_multilib: "first",
+}
diff --git a/Cargo.toml b/Cargo.toml
new file mode 100644
index 0000000..1ef45f9
--- /dev/null
+++ b/Cargo.toml
@@ -0,0 +1,46 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+name = "pest_generator"
+version = "2.1.3"
+authors = ["Dragoș Tiselice <dragostiselice@gmail.com>"]
+description = "pest code generator"
+homepage = "https://pest-parser.github.io/"
+documentation = "https://docs.rs/pest"
+readme = "_README.md"
+keywords = ["pest", "generator"]
+categories = ["parsing"]
+license = "MIT/Apache-2.0"
+repository = "https://github.com/pest-parser/pest"
+[dependencies.pest]
+version = "2.1.0"
+
+[dependencies.pest_meta]
+version = "2.1.0"
+
+[dependencies.proc-macro2]
+version = "1.0"
+
+[dependencies.quote]
+version = "1.0"
+
+[dependencies.syn]
+version = "1.0"
+[badges.codecov]
+repository = "pest-parser/pest"
+
+[badges.maintenance]
+status = "actively-developed"
+
+[badges.travis-ci]
+repository = "pest-parser/pest"
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
new file mode 100644
index 0000000..a70cb5f
--- /dev/null
+++ b/Cargo.toml.orig
@@ -0,0 +1,24 @@
+[package]
+name = "pest_generator"
+description = "pest code generator"
+version = "2.1.3"
+authors = ["Dragoș Tiselice <dragostiselice@gmail.com>"]
+homepage = "https://pest-parser.github.io/"
+repository = "https://github.com/pest-parser/pest"
+documentation = "https://docs.rs/pest"
+keywords = ["pest", "generator"]
+categories = ["parsing"]
+license = "MIT/Apache-2.0"
+readme = "_README.md"
+
+[dependencies]
+pest = { path = "../pest", version = "2.1.0" }
+pest_meta = { path = "../meta", version = "2.1.0" }
+proc-macro2 = "1.0"
+quote = "1.0"
+syn = "1.0"
+
+[badges]
+codecov = { repository = "pest-parser/pest" }
+maintenance = { status = "actively-developed" }
+travis-ci = { repository = "pest-parser/pest" }
diff --git a/LICENSE b/LICENSE
new file mode 120000
index 0000000..6b579aa
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1 @@
+LICENSE-APACHE
\ No newline at end of file
diff --git a/LICENSE-APACHE b/LICENSE-APACHE
new file mode 100644
index 0000000..16fe87b
--- /dev/null
+++ b/LICENSE-APACHE
@@ -0,0 +1,201 @@
+                              Apache License
+                        Version 2.0, January 2004
+                     http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+   "License" shall mean the terms and conditions for use, reproduction,
+   and distribution as defined by Sections 1 through 9 of this document.
+
+   "Licensor" shall mean the copyright owner or entity authorized by
+   the copyright owner that is granting the License.
+
+   "Legal Entity" shall mean the union of the acting entity and all
+   other entities that control, are controlled by, or are under common
+   control with that entity. For the purposes of this definition,
+   "control" means (i) the power, direct or indirect, to cause the
+   direction or management of such entity, whether by contract or
+   otherwise, or (ii) ownership of fifty percent (50%) or more of the
+   outstanding shares, or (iii) beneficial ownership of such entity.
+
+   "You" (or "Your") shall mean an individual or Legal Entity
+   exercising permissions granted by this License.
+
+   "Source" form shall mean the preferred form for making modifications,
+   including but not limited to software source code, documentation
+   source, and configuration files.
+
+   "Object" form shall mean any form resulting from mechanical
+   transformation or translation of a Source form, including but
+   not limited to compiled object code, generated documentation,
+   and conversions to other media types.
+
+   "Work" shall mean the work of authorship, whether in Source or
+   Object form, made available under the License, as indicated by a
+   copyright notice that is included in or attached to the work
+   (an example is provided in the Appendix below).
+
+   "Derivative Works" shall mean any work, whether in Source or Object
+   form, that is based on (or derived from) the Work and for which the
+   editorial revisions, annotations, elaborations, or other modifications
+   represent, as a whole, an original work of authorship. For the purposes
+   of this License, Derivative Works shall not include works that remain
+   separable from, or merely link (or bind by name) to the interfaces of,
+   the Work and Derivative Works thereof.
+
+   "Contribution" shall mean any work of authorship, including
+   the original version of the Work and any modifications or additions
+   to that Work or Derivative Works thereof, that is intentionally
+   submitted to Licensor for inclusion in the Work by the copyright owner
+   or by an individual or Legal Entity authorized to submit on behalf of
+   the copyright owner. For the purposes of this definition, "submitted"
+   means any form of electronic, verbal, or written communication sent
+   to the Licensor or its representatives, including but not limited to
+   communication on electronic mailing lists, source code control systems,
+   and issue tracking systems that are managed by, or on behalf of, the
+   Licensor for the purpose of discussing and improving the Work, but
+   excluding communication that is conspicuously marked or otherwise
+   designated in writing by the copyright owner as "Not a Contribution."
+
+   "Contributor" shall mean Licensor and any individual or Legal Entity
+   on behalf of whom a Contribution has been received by Licensor and
+   subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   copyright license to reproduce, prepare Derivative Works of,
+   publicly display, publicly perform, sublicense, and distribute the
+   Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   (except as stated in this section) patent license to make, have made,
+   use, offer to sell, sell, import, and otherwise transfer the Work,
+   where such license applies only to those patent claims licensable
+   by such Contributor that are necessarily infringed by their
+   Contribution(s) alone or by combination of their Contribution(s)
+   with the Work to which such Contribution(s) was submitted. If You
+   institute patent litigation against any entity (including a
+   cross-claim or counterclaim in a lawsuit) alleging that the Work
+   or a Contribution incorporated within the Work constitutes direct
+   or contributory patent infringement, then any patent licenses
+   granted to You under this License for that Work shall terminate
+   as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+   Work or Derivative Works thereof in any medium, with or without
+   modifications, and in Source or Object form, provided that You
+   meet the following conditions:
+
+   (a) You must give any other recipients of the Work or
+       Derivative Works a copy of this License; and
+
+   (b) You must cause any modified files to carry prominent notices
+       stating that You changed the files; and
+
+   (c) You must retain, in the Source form of any Derivative Works
+       that You distribute, all copyright, patent, trademark, and
+       attribution notices from the Source form of the Work,
+       excluding those notices that do not pertain to any part of
+       the Derivative Works; and
+
+   (d) If the Work includes a "NOTICE" text file as part of its
+       distribution, then any Derivative Works that You distribute must
+       include a readable copy of the attribution notices contained
+       within such NOTICE file, excluding those notices that do not
+       pertain to any part of the Derivative Works, in at least one
+       of the following places: within a NOTICE text file distributed
+       as part of the Derivative Works; within the Source form or
+       documentation, if provided along with the Derivative Works; or,
+       within a display generated by the Derivative Works, if and
+       wherever such third-party notices normally appear. The contents
+       of the NOTICE file are for informational purposes only and
+       do not modify the License. You may add Your own attribution
+       notices within Derivative Works that You distribute, alongside
+       or as an addendum to the NOTICE text from the Work, provided
+       that such additional attribution notices cannot be construed
+       as modifying the License.
+
+   You may add Your own copyright statement to Your modifications and
+   may provide additional or different license terms and conditions
+   for use, reproduction, or distribution of Your modifications, or
+   for any such Derivative Works as a whole, provided Your use,
+   reproduction, and distribution of the Work otherwise complies with
+   the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+   any Contribution intentionally submitted for inclusion in the Work
+   by You to the Licensor shall be under the terms and conditions of
+   this License, without any additional terms or conditions.
+   Notwithstanding the above, nothing herein shall supersede or modify
+   the terms of any separate license agreement you may have executed
+   with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+   names, trademarks, service marks, or product names of the Licensor,
+   except as required for reasonable and customary use in describing the
+   origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+   agreed to in writing, Licensor provides the Work (and each
+   Contributor provides its Contributions) on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+   implied, including, without limitation, any warranties or conditions
+   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+   PARTICULAR PURPOSE. You are solely responsible for determining the
+   appropriateness of using or redistributing the Work and assume any
+   risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+   whether in tort (including negligence), contract, or otherwise,
+   unless required by applicable law (such as deliberate and grossly
+   negligent acts) or agreed to in writing, shall any Contributor be
+   liable to You for damages, including any direct, indirect, special,
+   incidental, or consequential damages of any character arising as a
+   result of this License or out of the use or inability to use the
+   Work (including but not limited to damages for loss of goodwill,
+   work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses), even if such Contributor
+   has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+   the Work or Derivative Works thereof, You may choose to offer,
+   and charge a fee for, acceptance of support, warranty, indemnity,
+   or other liability obligations and/or rights consistent with this
+   License. However, in accepting such obligations, You may act only
+   on Your own behalf and on Your sole responsibility, not on behalf
+   of any other Contributor, and only if You agree to indemnify,
+   defend, and hold each Contributor harmless for any liability
+   incurred by, or claims asserted against, such Contributor by reason
+   of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+   To apply the Apache License to your work, attach the following
+   boilerplate notice, with the fields enclosed by brackets "[]"
+   replaced with your own identifying information. (Don't include
+   the brackets!)  The text should be enclosed in the appropriate
+   comment syntax for the file format. We also recommend that a
+   file or class name and description of purpose be included on the
+   same "printed page" as the copyright notice for easier
+   identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+	http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/LICENSE-MIT b/LICENSE-MIT
new file mode 100644
index 0000000..31aa793
--- /dev/null
+++ b/LICENSE-MIT
@@ -0,0 +1,23 @@
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/METADATA b/METADATA
new file mode 100644
index 0000000..66a82b1
--- /dev/null
+++ b/METADATA
@@ -0,0 +1,20 @@
+name: "pest_generator"
+description: "pest code generator"
+third_party {
+  url {
+    type: HOMEPAGE
+    value: "https://crates.io/crates/pest_generator"
+  }
+  url {
+    type: ARCHIVE
+    value: "https://static.crates.io/crates/pest_generator/pest_generator-2.1.3.crate"
+  }
+  version: "2.1.3"
+  # Dual-licensed, using the least restrictive per go/thirdpartylicenses#same.
+  license_type: NOTICE
+  last_upgrade_date {
+    year: 2022
+    month: 1
+    day: 27
+  }
+}
diff --git a/MODULE_LICENSE_APACHE2 b/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/MODULE_LICENSE_APACHE2
diff --git a/OWNERS b/OWNERS
new file mode 100644
index 0000000..45dc4dd
--- /dev/null
+++ b/OWNERS
@@ -0,0 +1 @@
+include platform/prebuilts/rust:master:/OWNERS
diff --git a/_README.md b/_README.md
new file mode 100644
index 0000000..8e55f29
--- /dev/null
+++ b/_README.md
@@ -0,0 +1,169 @@
+<p align="center">
+  <img src="https://raw.github.com/pest-parser/pest/master/pest-logo.svg?sanitize=true" width="80%"/>
+</p>
+
+# pest. The Elegant Parser
+
+[![Join the chat at https://gitter.im/dragostis/pest](https://badges.gitter.im/dragostis/pest.svg)](https://gitter.im/dragostis/pest?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
+[![Book](https://img.shields.io/badge/book-WIP-4d76ae.svg)](https://pest-parser.github.io/book)
+[![Docs](https://docs.rs/pest/badge.svg)](https://docs.rs/pest)
+
+[![Build Status](https://travis-ci.org/pest-parser/pest.svg?branch=master)](https://travis-ci.org/pest-parser/pest)
+[![codecov](https://codecov.io/gh/pest-parser/pest/branch/master/graph/badge.svg)](https://codecov.io/gh/pest-parser/pest)
+[![Fuzzit Status](https://app.fuzzit.dev/badge?org_id=pest-parser)](https://app.fuzzit.dev/orgs/pest-parser/dashboard)
+[![Crates.io](https://img.shields.io/crates/d/pest.svg)](https://crates.io/crates/pest)
+[![Crates.io](https://img.shields.io/crates/v/pest.svg)](https://crates.io/crates/pest)
+
+pest is a general purpose parser written in Rust with a focus on accessibility,
+correctness, and performance. It uses parsing expression grammars
+(or [PEG]) as input, which are similar in spirit to regular expressions, but
+which offer the enhanced expressivity needed to parse complex languages.
+
+[PEG]: https://en.wikipedia.org/wiki/Parsing_expression_grammar
+
+## Getting started
+
+The recommended way to start parsing with pest is to read the official [book].
+
+Other helpful resources:
+
+* API reference on [docs.rs]
+* play with grammars and share them on our [fiddle]
+* leave feedback, ask questions, or greet us on [Gitter]
+
+[book]: https://pest-parser.github.io/book
+[docs.rs]: https://docs.rs/pest
+[fiddle]: https://pest-parser.github.io/#editor
+[Gitter]: https://gitter.im/dragostis/pest
+
+## Example
+
+The following is an example of a grammar for a list of alpha-numeric identifiers
+where the first identifier does not start with a digit:
+
+```rust
+alpha = { 'a'..'z' | 'A'..'Z' }
+digit = { '0'..'9' }
+
+ident = { (alpha | digit)+ }
+
+ident_list = _{ !digit ~ ident ~ (" " ~ ident)+ }
+          // ^
+          // ident_list rule is silent which means it produces no tokens
+```
+
+Grammars are saved in separate .pest files which are never mixed with procedural
+code. This results in an always up-to-date formalization of a language that is
+easy to read and maintain.
+
+## Meaningful error reporting
+
+Based on the grammar definition, the parser also includes automatic error
+reporting. For the example above, the input `"123"` will result in:
+
+```
+thread 'main' panicked at ' --> 1:1
+  |
+1 | 123
+  | ^---
+  |
+  = unexpected digit', src/main.rs:12
+```
+while `"ab *"` will result in:
+```
+thread 'main' panicked at ' --> 1:1
+  |
+1 | ab *
+  |    ^---
+  |
+  = expected ident', src/main.rs:12
+```
+
+## Pairs API
+
+The grammar can be used to derive a `Parser` implementation automatically.
+Parsing returns an iterator of nested token pairs:
+
+```rust
+extern crate pest;
+#[macro_use]
+extern crate pest_derive;
+
+use pest::Parser;
+
+#[derive(Parser)]
+#[grammar = "ident.pest"]
+struct IdentParser;
+
+fn main() {
+    let pairs = IdentParser::parse(Rule::ident_list, "a1 b2").unwrap_or_else(|e| panic!("{}", e));
+
+    // Because ident_list is silent, the iterator will contain idents
+    for pair in pairs {
+        // A pair is a combination of the rule which matched and a span of input
+        println!("Rule:    {:?}", pair.as_rule());
+        println!("Span:    {:?}", pair.as_span());
+        println!("Text:    {}", pair.as_str());
+
+        // A pair can be converted to an iterator of the tokens which make it up:
+        for inner_pair in pair.into_inner() {
+            match inner_pair.as_rule() {
+                Rule::alpha => println!("Letter:  {}", inner_pair.as_str()),
+                Rule::digit => println!("Digit:   {}", inner_pair.as_str()),
+                _ => unreachable!()
+            };
+        }
+    }
+}
+```
+
+This produces the following output:
+```
+Rule:    ident
+Span:    Span { start: 0, end: 2 }
+Text:    a1
+Letter:  a
+Digit:   1
+Rule:    ident
+Span:    Span { start: 3, end: 5 }
+Text:    b2
+Letter:  b
+Digit:   2
+```
+
+## Other features
+
+* Precedence climbing
+* Input handling
+* Custom errors
+* Runs on stable Rust
+
+## Projects using pest
+
+* [pest_meta](https://github.com/pest-parser/pest/blob/master/meta/src/grammar.pest) (bootstrapped)
+* [AshPaper](https://github.com/shnewto/ashpaper)
+* [brain](https://github.com/brain-lang/brain)
+* [Chelone](https://github.com/Aaronepower/chelone)
+* [comrak](https://github.com/kivikakk/comrak)
+* [elastic-rs](https://github.com/cch123/elastic-rs)
+* [graphql-parser](https://github.com/Keats/graphql-parser)
+* [handlebars-rust](https://github.com/sunng87/handlebars-rust)
+* [hexdino](https://github.com/Luz/hexdino)
+* [Huia](https://gitlab.com/jimsy/huia/)
+* [jql](https://github.com/yamafaktory/jql)
+* [json5-rs](https://github.com/callum-oakley/json5-rs)
+* [mt940](https://github.com/svenstaro/mt940-rs)
+* [py_literal](https://github.com/jturner314/py_literal)
+* [rouler](https://github.com/jarcane/rouler)
+* [RuSh](https://github.com/lwandrebeck/RuSh)
+* [rs_pbrt](https://github.com/wahn/rs_pbrt)
+* [stache](https://github.com/dgraham/stache)
+* [tera](https://github.com/Keats/tera)
+* [ui_gen](https://github.com/emoon/ui_gen)
+* [ukhasnet-parser](https://github.com/adamgreig/ukhasnet-parser)
+* [ZoKrates](https://github.com/ZoKrates/ZoKrates)
+
+## Special thanks
+
+A special round of applause goes to prof. Marius Minea for his guidance and all
+pest contributors, some of which being none other than my friends.
diff --git a/src/generator.rs b/src/generator.rs
new file mode 100644
index 0000000..bed56f3
--- /dev/null
+++ b/src/generator.rs
@@ -0,0 +1,1002 @@
+// pest. The Elegant Parser
+// Copyright (c) 2018 Dragoș Tiselice
+//
+// Licensed under the Apache License, Version 2.0
+// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
+// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. All files in the project carrying such notice may not be copied,
+// modified, or distributed except according to those terms.
+
+use std::path::PathBuf;
+
+use proc_macro2::{Span, TokenStream};
+use quote::{ToTokens, TokenStreamExt};
+use syn::{self, Generics, Ident};
+
+use pest_meta::ast::*;
+use pest_meta::optimizer::*;
+use pest_meta::UNICODE_PROPERTY_NAMES;
+
+#[allow(clippy::needless_pass_by_value)]
+pub fn generate(
+    name: Ident,
+    generics: &Generics,
+    path: Option<PathBuf>,
+    rules: Vec<OptimizedRule>,
+    defaults: Vec<&str>,
+    include_grammar: bool,
+) -> TokenStream {
+    let uses_eoi = defaults.iter().any(|name| *name == "EOI");
+
+    let builtins = generate_builtin_rules();
+    let include_fix = if include_grammar {
+        match path {
+            Some(ref path) => generate_include(&name, path.to_str().expect("non-Unicode path")),
+            None => quote!(),
+        }
+    } else {
+        quote!()
+    };
+    let rule_enum = generate_enum(&rules, uses_eoi);
+    let patterns = generate_patterns(&rules, uses_eoi);
+    let skip = generate_skip(&rules);
+
+    let mut rules: Vec<_> = rules.into_iter().map(generate_rule).collect();
+    rules.extend(builtins.into_iter().filter_map(|(builtin, tokens)| {
+        if defaults.contains(&builtin) {
+            Some(tokens)
+        } else {
+            None
+        }
+    }));
+
+    let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
+
+    let parser_impl = quote! {
+        #[allow(clippy::all)]
+        impl #impl_generics ::pest::Parser<Rule> for #name #ty_generics #where_clause {
+            fn parse<'i>(
+                rule: Rule,
+                input: &'i str
+            ) -> ::std::result::Result<
+                ::pest::iterators::Pairs<'i, Rule>,
+                ::pest::error::Error<Rule>
+            > {
+                mod rules {
+                    pub mod hidden {
+                        use super::super::Rule;
+                        #skip
+                    }
+
+                    pub mod visible {
+                        use super::super::Rule;
+                        #( #rules )*
+                    }
+
+                    pub use self::visible::*;
+                }
+
+                ::pest::state(input, |state| {
+                    match rule {
+                        #patterns
+                    }
+                })
+            }
+        }
+    };
+
+    quote! {
+        #include_fix
+        #rule_enum
+        #parser_impl
+    }
+}
+
+// Note: All builtin rules should be validated as pest builtins in meta/src/validator.rs.
+// Some should also be keywords.
+fn generate_builtin_rules() -> Vec<(&'static str, TokenStream)> {
+    let mut builtins = Vec::new();
+
+    insert_builtin!(builtins, ANY, state.skip(1));
+    insert_public_builtin!(
+        builtins,
+        EOI,
+        state.rule(Rule::EOI, |state| state.end_of_input())
+    );
+    insert_builtin!(builtins, SOI, state.start_of_input());
+    insert_builtin!(builtins, PEEK, state.stack_peek());
+    insert_builtin!(builtins, PEEK_ALL, state.stack_match_peek());
+    insert_builtin!(builtins, POP, state.stack_pop());
+    insert_builtin!(builtins, POP_ALL, state.stack_match_pop());
+    insert_builtin!(builtins, DROP, state.stack_drop());
+
+    insert_builtin!(builtins, ASCII_DIGIT, state.match_range('0'..'9'));
+    insert_builtin!(builtins, ASCII_NONZERO_DIGIT, state.match_range('1'..'9'));
+    insert_builtin!(builtins, ASCII_BIN_DIGIT, state.match_range('0'..'1'));
+    insert_builtin!(builtins, ASCII_OCT_DIGIT, state.match_range('0'..'7'));
+    insert_builtin!(
+        builtins,
+        ASCII_HEX_DIGIT,
+        state
+            .match_range('0'..'9')
+            .or_else(|state| state.match_range('a'..'f'))
+            .or_else(|state| state.match_range('A'..'F'))
+    );
+    insert_builtin!(builtins, ASCII_ALPHA_LOWER, state.match_range('a'..'z'));
+    insert_builtin!(builtins, ASCII_ALPHA_UPPER, state.match_range('A'..'Z'));
+    insert_builtin!(
+        builtins,
+        ASCII_ALPHA,
+        state
+            .match_range('a'..'z')
+            .or_else(|state| state.match_range('A'..'Z'))
+    );
+    insert_builtin!(
+        builtins,
+        ASCII_ALPHANUMERIC,
+        state
+            .match_range('a'..'z')
+            .or_else(|state| state.match_range('A'..'Z'))
+            .or_else(|state| state.match_range('0'..'9'))
+    );
+    insert_builtin!(builtins, ASCII, state.match_range('\x00'..'\x7f'));
+    insert_builtin!(
+        builtins,
+        NEWLINE,
+        state
+            .match_string("\n")
+            .or_else(|state| state.match_string("\r\n"))
+            .or_else(|state| state.match_string("\r"))
+    );
+
+    for property in UNICODE_PROPERTY_NAMES {
+        let property_ident: Ident = syn::parse_str(property).unwrap();
+        // insert manually for #property substitution
+        builtins.push((property, quote! {
+            #[inline]
+            #[allow(dead_code, non_snake_case, unused_variables)]
+            fn #property_ident(state: Box<::pest::ParserState<Rule>>) -> ::pest::ParseResult<Box<::pest::ParserState<Rule>>> {
+                state.match_char_by(::pest::unicode::#property_ident)
+            }
+        }));
+    }
+    builtins
+}
+
+// Needed because Cargo doesn't watch for changes in grammars.
+fn generate_include(name: &Ident, path: &str) -> TokenStream {
+    let const_name = Ident::new(&format!("_PEST_GRAMMAR_{}", name), Span::call_site());
+    // Need to make this relative to the current directory since the path to the file
+    // is derived from the CARGO_MANIFEST_DIR environment variable
+    let mut current_dir = std::env::current_dir().expect("Unable to get current directory");
+    current_dir.push(path);
+    let relative_path = current_dir.to_str().expect("path contains invalid unicode");
+    quote! {
+        #[allow(non_upper_case_globals)]
+        const #const_name: &'static str = include_str!(#relative_path);
+    }
+}
+
+fn generate_enum(rules: &[OptimizedRule], uses_eoi: bool) -> TokenStream {
+    let rules = rules
+        .iter()
+        .map(|rule| Ident::new(rule.name.as_str(), Span::call_site()));
+    if uses_eoi {
+        quote! {
+            #[allow(dead_code, non_camel_case_types)]
+            #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+            pub enum Rule {
+                EOI,
+                #( #rules ),*
+            }
+        }
+    } else {
+        quote! {
+            #[allow(dead_code, non_camel_case_types)]
+            #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+            pub enum Rule {
+                #( #rules ),*
+            }
+        }
+    }
+}
+
+fn generate_patterns(rules: &[OptimizedRule], uses_eoi: bool) -> TokenStream {
+    let mut rules: Vec<TokenStream> = rules
+        .iter()
+        .map(|rule| {
+            let rule = Ident::new(rule.name.as_str(), Span::call_site());
+            quote! {
+                Rule::#rule => rules::#rule(state)
+            }
+        })
+        .collect();
+
+    if uses_eoi {
+        rules.push(quote! {
+            Rule::EOI => rules::EOI(state)
+        });
+    }
+
+    quote! {
+        #( #rules ),*
+    }
+}
+
+fn generate_rule(rule: OptimizedRule) -> TokenStream {
+    let name = Ident::new(&rule.name, Span::call_site());
+    let expr = if rule.ty == RuleType::Atomic || rule.ty == RuleType::CompoundAtomic {
+        generate_expr_atomic(rule.expr)
+    } else if name == "WHITESPACE" || name == "COMMENT" {
+        let atomic = generate_expr_atomic(rule.expr);
+
+        quote! {
+            state.atomic(::pest::Atomicity::Atomic, |state| {
+                #atomic
+            })
+        }
+    } else {
+        generate_expr(rule.expr)
+    };
+
+    match rule.ty {
+        RuleType::Normal => quote! {
+            #[inline]
+            #[allow(non_snake_case, unused_variables)]
+            pub fn #name(state: Box<::pest::ParserState<Rule>>) -> ::pest::ParseResult<Box<::pest::ParserState<Rule>>> {
+                state.rule(Rule::#name, |state| {
+                    #expr
+                })
+            }
+        },
+        RuleType::Silent => quote! {
+            #[inline]
+            #[allow(non_snake_case, unused_variables)]
+            pub fn #name(state: Box<::pest::ParserState<Rule>>) -> ::pest::ParseResult<Box<::pest::ParserState<Rule>>> {
+                #expr
+            }
+        },
+        RuleType::Atomic => quote! {
+            #[inline]
+            #[allow(non_snake_case, unused_variables)]
+            pub fn #name(state: Box<::pest::ParserState<Rule>>) -> ::pest::ParseResult<Box<::pest::ParserState<Rule>>> {
+                state.rule(Rule::#name, |state| {
+                    state.atomic(::pest::Atomicity::Atomic, |state| {
+                        #expr
+                    })
+                })
+            }
+        },
+        RuleType::CompoundAtomic => quote! {
+            #[inline]
+            #[allow(non_snake_case, unused_variables)]
+            pub fn #name(state: Box<::pest::ParserState<Rule>>) -> ::pest::ParseResult<Box<::pest::ParserState<Rule>>> {
+                state.atomic(::pest::Atomicity::CompoundAtomic, |state| {
+                    state.rule(Rule::#name, |state| {
+                        #expr
+                    })
+                })
+            }
+        },
+        RuleType::NonAtomic => quote! {
+            #[inline]
+            #[allow(non_snake_case, unused_variables)]
+            pub fn #name(state: Box<::pest::ParserState<Rule>>) -> ::pest::ParseResult<Box<::pest::ParserState<Rule>>> {
+                state.atomic(::pest::Atomicity::NonAtomic, |state| {
+                    state.rule(Rule::#name, |state| {
+                        #expr
+                    })
+                })
+            }
+        },
+    }
+}
+
+fn generate_skip(rules: &[OptimizedRule]) -> TokenStream {
+    let whitespace = rules.iter().any(|rule| rule.name == "WHITESPACE");
+    let comment = rules.iter().any(|rule| rule.name == "COMMENT");
+
+    match (whitespace, comment) {
+        (false, false) => generate_rule!(skip, Ok(state)),
+        (true, false) => generate_rule!(
+            skip,
+            if state.atomicity() == ::pest::Atomicity::NonAtomic {
+                state.repeat(|state| super::visible::WHITESPACE(state))
+            } else {
+                Ok(state)
+            }
+        ),
+        (false, true) => generate_rule!(
+            skip,
+            if state.atomicity() == ::pest::Atomicity::NonAtomic {
+                state.repeat(|state| super::visible::COMMENT(state))
+            } else {
+                Ok(state)
+            }
+        ),
+        (true, true) => generate_rule!(
+            skip,
+            if state.atomicity() == ::pest::Atomicity::NonAtomic {
+                state.sequence(|state| {
+                    state
+                        .repeat(|state| super::visible::WHITESPACE(state))
+                        .and_then(|state| {
+                            state.repeat(|state| {
+                                state.sequence(|state| {
+                                    super::visible::COMMENT(state).and_then(|state| {
+                                        state.repeat(|state| super::visible::WHITESPACE(state))
+                                    })
+                                })
+                            })
+                        })
+                })
+            } else {
+                Ok(state)
+            }
+        ),
+    }
+}
+
+fn generate_expr(expr: OptimizedExpr) -> TokenStream {
+    match expr {
+        OptimizedExpr::Str(string) => {
+            quote! {
+                state.match_string(#string)
+            }
+        }
+        OptimizedExpr::Insens(string) => {
+            quote! {
+                state.match_insensitive(#string)
+            }
+        }
+        OptimizedExpr::Range(start, end) => {
+            let start = start.chars().next().unwrap();
+            let end = end.chars().next().unwrap();
+
+            quote! {
+                state.match_range(#start..#end)
+            }
+        }
+        OptimizedExpr::Ident(ident) => {
+            let ident = Ident::new(&ident, Span::call_site());
+            quote! { self::#ident(state) }
+        }
+        OptimizedExpr::PeekSlice(start, end_) => {
+            let end = QuoteOption(end_);
+            quote! {
+                state.stack_match_peek_slice(#start, #end, ::pest::MatchDir::BottomToTop)
+            }
+        }
+        OptimizedExpr::PosPred(expr) => {
+            let expr = generate_expr(*expr);
+
+            quote! {
+                state.lookahead(true, |state| {
+                    #expr
+                })
+            }
+        }
+        OptimizedExpr::NegPred(expr) => {
+            let expr = generate_expr(*expr);
+
+            quote! {
+                state.lookahead(false, |state| {
+                    #expr
+                })
+            }
+        }
+        OptimizedExpr::Seq(lhs, rhs) => {
+            let head = generate_expr(*lhs);
+            let mut tail = vec![];
+            let mut current = *rhs;
+
+            while let OptimizedExpr::Seq(lhs, rhs) = current {
+                tail.push(generate_expr(*lhs));
+                current = *rhs;
+            }
+            tail.push(generate_expr(current));
+
+            quote! {
+                state.sequence(|state| {
+                    #head
+                    #(
+                        .and_then(|state| {
+                            super::hidden::skip(state)
+                        }).and_then(|state| {
+                            #tail
+                        })
+                    )*
+                })
+            }
+        }
+        OptimizedExpr::Choice(lhs, rhs) => {
+            let head = generate_expr(*lhs);
+            let mut tail = vec![];
+            let mut current = *rhs;
+
+            while let OptimizedExpr::Choice(lhs, rhs) = current {
+                tail.push(generate_expr(*lhs));
+                current = *rhs;
+            }
+            tail.push(generate_expr(current));
+
+            quote! {
+                #head
+                #(
+                    .or_else(|state| {
+                        #tail
+                    })
+                )*
+            }
+        }
+        OptimizedExpr::Opt(expr) => {
+            let expr = generate_expr(*expr);
+
+            quote! {
+                state.optional(|state| {
+                    #expr
+                })
+            }
+        }
+        OptimizedExpr::Rep(expr) => {
+            let expr = generate_expr(*expr);
+
+            quote! {
+                state.sequence(|state| {
+                    state.optional(|state| {
+                        #expr.and_then(|state| {
+                            state.repeat(|state| {
+                                state.sequence(|state| {
+                                    super::hidden::skip(
+                                        state
+                                    ).and_then(|state| {
+                                        #expr
+                                    })
+                                })
+                            })
+                        })
+                    })
+                })
+            }
+        }
+        OptimizedExpr::Skip(strings) => {
+            quote! {
+                let strings = [#(#strings),*];
+
+                state.skip_until(&strings)
+            }
+        }
+        OptimizedExpr::Push(expr) => {
+            let expr = generate_expr(*expr);
+
+            quote! {
+                state.stack_push(|state| #expr)
+            }
+        }
+        OptimizedExpr::RestoreOnErr(expr) => {
+            let expr = generate_expr(*expr);
+
+            quote! {
+                state.restore_on_err(|state| #expr)
+            }
+        }
+    }
+}
+
+fn generate_expr_atomic(expr: OptimizedExpr) -> TokenStream {
+    match expr {
+        OptimizedExpr::Str(string) => {
+            quote! {
+                state.match_string(#string)
+            }
+        }
+        OptimizedExpr::Insens(string) => {
+            quote! {
+                state.match_insensitive(#string)
+            }
+        }
+        OptimizedExpr::Range(start, end) => {
+            let start = start.chars().next().unwrap();
+            let end = end.chars().next().unwrap();
+
+            quote! {
+                state.match_range(#start..#end)
+            }
+        }
+        OptimizedExpr::Ident(ident) => {
+            let ident = Ident::new(&ident, Span::call_site());
+            quote! { self::#ident(state) }
+        }
+        OptimizedExpr::PeekSlice(start, end_) => {
+            let end = QuoteOption(end_);
+            quote! {
+                state.stack_match_peek_slice(#start, #end, ::pest::MatchDir::BottomToTop)
+            }
+        }
+        OptimizedExpr::PosPred(expr) => {
+            let expr = generate_expr_atomic(*expr);
+
+            quote! {
+                state.lookahead(true, |state| {
+                    #expr
+                })
+            }
+        }
+        OptimizedExpr::NegPred(expr) => {
+            let expr = generate_expr_atomic(*expr);
+
+            quote! {
+                state.lookahead(false, |state| {
+                    #expr
+                })
+            }
+        }
+        OptimizedExpr::Seq(lhs, rhs) => {
+            let head = generate_expr_atomic(*lhs);
+            let mut tail = vec![];
+            let mut current = *rhs;
+
+            while let OptimizedExpr::Seq(lhs, rhs) = current {
+                tail.push(generate_expr_atomic(*lhs));
+                current = *rhs;
+            }
+            tail.push(generate_expr_atomic(current));
+
+            quote! {
+                state.sequence(|state| {
+                    #head
+                    #(
+                        .and_then(|state| {
+                            #tail
+                        })
+                    )*
+                })
+            }
+        }
+        OptimizedExpr::Choice(lhs, rhs) => {
+            let head = generate_expr_atomic(*lhs);
+            let mut tail = vec![];
+            let mut current = *rhs;
+
+            while let OptimizedExpr::Choice(lhs, rhs) = current {
+                tail.push(generate_expr_atomic(*lhs));
+                current = *rhs;
+            }
+            tail.push(generate_expr_atomic(current));
+
+            quote! {
+                #head
+                #(
+                    .or_else(|state| {
+                        #tail
+                    })
+                )*
+            }
+        }
+        OptimizedExpr::Opt(expr) => {
+            let expr = generate_expr_atomic(*expr);
+
+            quote! {
+                state.optional(|state| {
+                    #expr
+                })
+            }
+        }
+        OptimizedExpr::Rep(expr) => {
+            let expr = generate_expr_atomic(*expr);
+
+            quote! {
+                state.repeat(|state| {
+                    #expr
+                })
+            }
+        }
+        OptimizedExpr::Skip(strings) => {
+            quote! {
+                let strings = [#(#strings),*];
+
+                state.skip_until(&strings)
+            }
+        }
+        OptimizedExpr::Push(expr) => {
+            let expr = generate_expr_atomic(*expr);
+
+            quote! {
+                state.stack_push(|state| #expr)
+            }
+        }
+        OptimizedExpr::RestoreOnErr(expr) => {
+            let expr = generate_expr_atomic(*expr);
+
+            quote! {
+                state.restore_on_err(|state| #expr)
+            }
+        }
+    }
+}
+
+struct QuoteOption<T>(Option<T>);
+
+impl<T: ToTokens> ToTokens for QuoteOption<T> {
+    fn to_tokens(&self, tokens: &mut TokenStream) {
+        tokens.append_all(match self.0 {
+            Some(ref t) => quote! { ::std::option::Option::Some(#t) },
+            None => quote! { ::std::option::Option::None },
+        });
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn rule_enum_simple() {
+        let rules = vec![OptimizedRule {
+            name: "f".to_owned(),
+            ty: RuleType::Normal,
+            expr: OptimizedExpr::Ident("g".to_owned()),
+        }];
+
+        assert_eq!(
+            generate_enum(&rules, false).to_string(),
+            quote! {
+                #[allow(dead_code, non_camel_case_types)]
+                #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+                pub enum Rule {
+                    f
+                }
+            }
+            .to_string()
+        );
+    }
+
+    #[test]
+    fn sequence() {
+        let expr = OptimizedExpr::Seq(
+            Box::new(OptimizedExpr::Str("a".to_owned())),
+            Box::new(OptimizedExpr::Seq(
+                Box::new(OptimizedExpr::Str("b".to_owned())),
+                Box::new(OptimizedExpr::Seq(
+                    Box::new(OptimizedExpr::Str("c".to_owned())),
+                    Box::new(OptimizedExpr::Str("d".to_owned())),
+                )),
+            )),
+        );
+
+        assert_eq!(
+            generate_expr(expr).to_string(),
+            quote! {
+                state.sequence(|state| {
+                    state.match_string("a").and_then(|state| {
+                        super::hidden::skip(state)
+                    }).and_then(|state| {
+                        state.match_string("b")
+                    }).and_then(|state| {
+                        super::hidden::skip(state)
+                    }).and_then(|state| {
+                        state.match_string("c")
+                    }).and_then(|state| {
+                        super::hidden::skip(state)
+                    }).and_then(|state| {
+                        state.match_string("d")
+                    })
+                })
+            }
+            .to_string()
+        );
+    }
+
+    #[test]
+    fn sequence_atomic() {
+        let expr = OptimizedExpr::Seq(
+            Box::new(OptimizedExpr::Str("a".to_owned())),
+            Box::new(OptimizedExpr::Seq(
+                Box::new(OptimizedExpr::Str("b".to_owned())),
+                Box::new(OptimizedExpr::Seq(
+                    Box::new(OptimizedExpr::Str("c".to_owned())),
+                    Box::new(OptimizedExpr::Str("d".to_owned())),
+                )),
+            )),
+        );
+
+        assert_eq!(
+            generate_expr_atomic(expr).to_string(),
+            quote! {
+                state.sequence(|state| {
+                    state.match_string("a").and_then(|state| {
+                        state.match_string("b")
+                    }).and_then(|state| {
+                        state.match_string("c")
+                    }).and_then(|state| {
+                        state.match_string("d")
+                    })
+                })
+            }
+            .to_string()
+        );
+    }
+
+    #[test]
+    fn choice() {
+        let expr = OptimizedExpr::Choice(
+            Box::new(OptimizedExpr::Str("a".to_owned())),
+            Box::new(OptimizedExpr::Choice(
+                Box::new(OptimizedExpr::Str("b".to_owned())),
+                Box::new(OptimizedExpr::Choice(
+                    Box::new(OptimizedExpr::Str("c".to_owned())),
+                    Box::new(OptimizedExpr::Str("d".to_owned())),
+                )),
+            )),
+        );
+
+        assert_eq!(
+            generate_expr(expr).to_string(),
+            quote! {
+                state.match_string("a").or_else(|state| {
+                    state.match_string("b")
+                }).or_else(|state| {
+                    state.match_string("c")
+                }).or_else(|state| {
+                    state.match_string("d")
+                })
+            }
+            .to_string()
+        );
+    }
+
+    #[test]
+    fn choice_atomic() {
+        let expr = OptimizedExpr::Choice(
+            Box::new(OptimizedExpr::Str("a".to_owned())),
+            Box::new(OptimizedExpr::Choice(
+                Box::new(OptimizedExpr::Str("b".to_owned())),
+                Box::new(OptimizedExpr::Choice(
+                    Box::new(OptimizedExpr::Str("c".to_owned())),
+                    Box::new(OptimizedExpr::Str("d".to_owned())),
+                )),
+            )),
+        );
+
+        assert_eq!(
+            generate_expr_atomic(expr).to_string(),
+            quote! {
+                state.match_string("a").or_else(|state| {
+                    state.match_string("b")
+                }).or_else(|state| {
+                    state.match_string("c")
+                }).or_else(|state| {
+                    state.match_string("d")
+                })
+            }
+            .to_string()
+        );
+    }
+
+    #[test]
+    fn skip() {
+        let expr = OptimizedExpr::Skip(vec!["a".to_owned(), "b".to_owned()]);
+
+        assert_eq!(
+            generate_expr_atomic(expr).to_string(),
+            quote! {
+                let strings = ["a", "b"];
+
+                state.skip_until(&strings)
+            }
+            .to_string()
+        );
+    }
+
+    #[test]
+    fn expr_complex() {
+        let expr = OptimizedExpr::Choice(
+            Box::new(OptimizedExpr::Ident("a".to_owned())),
+            Box::new(OptimizedExpr::Seq(
+                Box::new(OptimizedExpr::Range("a".to_owned(), "b".to_owned())),
+                Box::new(OptimizedExpr::Seq(
+                    Box::new(OptimizedExpr::NegPred(Box::new(OptimizedExpr::Rep(
+                        Box::new(OptimizedExpr::Insens("b".to_owned())),
+                    )))),
+                    Box::new(OptimizedExpr::PosPred(Box::new(OptimizedExpr::Opt(
+                        Box::new(OptimizedExpr::Rep(Box::new(OptimizedExpr::Choice(
+                            Box::new(OptimizedExpr::Str("c".to_owned())),
+                            Box::new(OptimizedExpr::Str("d".to_owned())),
+                        )))),
+                    )))),
+                )),
+            )),
+        );
+
+        let sequence = quote! {
+            state.sequence(|state| {
+                super::hidden::skip(state).and_then(
+                    |state| {
+                        state.match_insensitive("b")
+                    }
+                )
+            })
+        };
+        let repeat = quote! {
+            state.repeat(|state| {
+                state.sequence(|state| {
+                    super::hidden::skip(state).and_then(|state| {
+                        state.match_string("c")
+                            .or_else(|state| {
+                                state.match_string("d")
+                            })
+                     })
+                })
+            })
+        };
+        assert_eq!(
+            generate_expr(expr).to_string(),
+            quote! {
+                self::a(state).or_else(|state| {
+                    state.sequence(|state| {
+                        state.match_range('a'..'b').and_then(|state| {
+                            super::hidden::skip(state)
+                        }).and_then(|state| {
+                            state.lookahead(false, |state| {
+                                state.sequence(|state| {
+                                    state.optional(|state| {
+                                        state.match_insensitive(
+                                            "b"
+                                        ).and_then(|state| {
+                                            state.repeat(|state| {
+                                                #sequence
+                                            })
+                                        })
+                                    })
+                                })
+                            })
+                        }).and_then(|state| {
+                            super::hidden::skip(state)
+                        }).and_then(|state| {
+                            state.lookahead(true, |state| {
+                                state.optional(|state| {
+                                    state.sequence(|state| {
+                                        state.optional(|state| {
+                                            state.match_string("c")
+                                            .or_else(|state| {
+                                                state.match_string("d")
+                                            }).and_then(|state| {
+                                                #repeat
+                                            })
+                                        })
+                                    })
+                                })
+                            })
+                        })
+                    })
+                })
+            }
+            .to_string()
+        );
+    }
+
+    #[test]
+    fn expr_complex_atomic() {
+        let expr = OptimizedExpr::Choice(
+            Box::new(OptimizedExpr::Ident("a".to_owned())),
+            Box::new(OptimizedExpr::Seq(
+                Box::new(OptimizedExpr::Range("a".to_owned(), "b".to_owned())),
+                Box::new(OptimizedExpr::Seq(
+                    Box::new(OptimizedExpr::NegPred(Box::new(OptimizedExpr::Rep(
+                        Box::new(OptimizedExpr::Insens("b".to_owned())),
+                    )))),
+                    Box::new(OptimizedExpr::PosPred(Box::new(OptimizedExpr::Opt(
+                        Box::new(OptimizedExpr::Rep(Box::new(OptimizedExpr::Choice(
+                            Box::new(OptimizedExpr::Str("c".to_owned())),
+                            Box::new(OptimizedExpr::Str("d".to_owned())),
+                        )))),
+                    )))),
+                )),
+            )),
+        );
+
+        assert_eq!(
+            generate_expr_atomic(expr).to_string(),
+            quote! {
+                self::a(state).or_else(|state| {
+                    state.sequence(|state| {
+                        state.match_range('a'..'b').and_then(|state| {
+                            state.lookahead(false, |state| {
+                                state.repeat(|state| {
+                                    state.match_insensitive("b")
+                                })
+                            })
+                        }).and_then(|state| {
+                            state.lookahead(true, |state| {
+                                state.optional(|state| {
+                                    state.repeat(|state| {
+                                        state.match_string("c")
+                                           .or_else(|state| {
+                                            state.match_string("d")
+                                        })
+                                    })
+                                })
+                            })
+                        })
+                    })
+                })
+            }
+            .to_string()
+        );
+    }
+
+    #[test]
+    fn generate_complete() {
+        let name = Ident::new("MyParser", Span::call_site());
+        let generics = Generics::default();
+        let rules = vec![OptimizedRule {
+            name: "a".to_owned(),
+            ty: RuleType::Silent,
+            expr: OptimizedExpr::Str("b".to_owned()),
+        }];
+        let defaults = vec!["ANY"];
+        let mut current_dir = std::env::current_dir().expect("Unable to get current directory");
+        current_dir.push("test.pest");
+        let test_path = current_dir.to_str().expect("path contains invalid unicode");
+        assert_eq!(
+            generate(name, &generics, Some(PathBuf::from("test.pest")), rules, defaults, true).to_string(),
+            quote! {
+                #[allow(non_upper_case_globals)]
+                const _PEST_GRAMMAR_MyParser: &'static str = include_str!(#test_path);
+
+                #[allow(dead_code, non_camel_case_types)]
+                #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+                pub enum Rule {
+                    a
+                }
+
+                #[allow(clippy::all)]
+                impl ::pest::Parser<Rule> for MyParser {
+                    fn parse<'i>(
+                        rule: Rule,
+                        input: &'i str
+                    ) -> ::std::result::Result<
+                        ::pest::iterators::Pairs<'i, Rule>,
+                        ::pest::error::Error<Rule>
+                    > {
+                        mod rules {
+                            pub mod hidden {
+                                use super::super::Rule;
+
+                                #[inline]
+                                #[allow(dead_code, non_snake_case, unused_variables)]
+                                pub fn skip(state: Box<::pest::ParserState<Rule>>) -> ::pest::ParseResult<Box<::pest::ParserState<Rule>>> {
+                                    Ok(state)
+                                }
+                            }
+
+                            pub mod visible {
+                                use super::super::Rule;
+
+                                #[inline]
+                                #[allow(non_snake_case, unused_variables)]
+                                pub fn a(state: Box<::pest::ParserState<Rule>>) -> ::pest::ParseResult<Box<::pest::ParserState<Rule>>> {
+                                    state.match_string("b")
+                                }
+
+                                #[inline]
+                                #[allow(dead_code, non_snake_case, unused_variables)]
+                                pub fn ANY(state: Box<::pest::ParserState<Rule>>) -> ::pest::ParseResult<Box<::pest::ParserState<Rule>>> {
+                                    state.skip(1)
+                                }
+                            }
+
+                            pub use self::visible::*;
+                        }
+
+                        ::pest::state(input, |state| {
+                            match rule {
+                                Rule::a => rules::a(state)
+                            }
+                        })
+                    }
+                }
+            }.to_string()
+        );
+    }
+}
diff --git a/src/lib.rs b/src/lib.rs
new file mode 100644
index 0000000..27b4d81
--- /dev/null
+++ b/src/lib.rs
@@ -0,0 +1,208 @@
+// pest. The Elegant Parser
+// Copyright (c) 2018 Dragoș Tiselice
+//
+// Licensed under the Apache License, Version 2.0
+// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
+// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. All files in the project carrying such notice may not be copied,
+// modified, or distributed except according to those terms.
+
+#![doc(html_root_url = "https://docs.rs/pest_derive")]
+#![recursion_limit = "256"]
+
+extern crate pest;
+extern crate pest_meta;
+
+extern crate proc_macro;
+extern crate proc_macro2;
+#[macro_use]
+extern crate quote;
+extern crate syn;
+
+use std::env;
+use std::fs::File;
+use std::io::{self, Read};
+use std::path::Path;
+
+use proc_macro2::TokenStream;
+use syn::{Attribute, DeriveInput, Generics, Ident, Lit, Meta};
+
+#[macro_use]
+mod macros;
+mod generator;
+
+use pest_meta::parser::{self, Rule};
+use pest_meta::{optimizer, unwrap_or_report, validator};
+
+pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
+    let ast: DeriveInput = syn::parse2(input).unwrap();
+    let (name, generics, content) = parse_derive(ast);
+
+    let (data, path) = match content {
+        GrammarSource::File(ref path) => {
+            let root = env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".into());
+            let path = Path::new(&root).join("src/").join(&path);
+            let file_name = match path.file_name() {
+                Some(file_name) => file_name,
+                None => panic!("grammar attribute should point to a file"),
+            };
+
+            let data = match read_file(&path) {
+                Ok(data) => data,
+                Err(error) => panic!("error opening {:?}: {}", file_name, error),
+            };
+            (data, Some(path.clone()))
+        }
+        GrammarSource::Inline(content) => (content, None),
+    };
+
+    let pairs = match parser::parse(Rule::grammar_rules, &data) {
+        Ok(pairs) => pairs,
+        Err(error) => panic!(
+            "error parsing \n{}",
+            error.renamed_rules(|rule| match *rule {
+                Rule::grammar_rule => "rule".to_owned(),
+                Rule::_push => "PUSH".to_owned(),
+                Rule::assignment_operator => "`=`".to_owned(),
+                Rule::silent_modifier => "`_`".to_owned(),
+                Rule::atomic_modifier => "`@`".to_owned(),
+                Rule::compound_atomic_modifier => "`$`".to_owned(),
+                Rule::non_atomic_modifier => "`!`".to_owned(),
+                Rule::opening_brace => "`{`".to_owned(),
+                Rule::closing_brace => "`}`".to_owned(),
+                Rule::opening_brack => "`[`".to_owned(),
+                Rule::closing_brack => "`]`".to_owned(),
+                Rule::opening_paren => "`(`".to_owned(),
+                Rule::positive_predicate_operator => "`&`".to_owned(),
+                Rule::negative_predicate_operator => "`!`".to_owned(),
+                Rule::sequence_operator => "`&`".to_owned(),
+                Rule::choice_operator => "`|`".to_owned(),
+                Rule::optional_operator => "`?`".to_owned(),
+                Rule::repeat_operator => "`*`".to_owned(),
+                Rule::repeat_once_operator => "`+`".to_owned(),
+                Rule::comma => "`,`".to_owned(),
+                Rule::closing_paren => "`)`".to_owned(),
+                Rule::quote => "`\"`".to_owned(),
+                Rule::insensitive_string => "`^`".to_owned(),
+                Rule::range_operator => "`..`".to_owned(),
+                Rule::single_quote => "`'`".to_owned(),
+                other_rule => format!("{:?}", other_rule),
+            })
+        ),
+    };
+
+    let defaults = unwrap_or_report(validator::validate_pairs(pairs.clone()));
+    let ast = unwrap_or_report(parser::consume_rules(pairs));
+    let optimized = optimizer::optimize(ast);
+
+    generator::generate(name, &generics, path, optimized, defaults, include_grammar)
+}
+
+fn read_file<P: AsRef<Path>>(path: P) -> io::Result<String> {
+    let mut file = File::open(path.as_ref())?;
+    let mut string = String::new();
+    file.read_to_string(&mut string)?;
+    Ok(string)
+}
+
+#[derive(Debug, PartialEq)]
+enum GrammarSource {
+    File(String),
+    Inline(String),
+}
+
+fn parse_derive(ast: DeriveInput) -> (Ident, Generics, GrammarSource) {
+    let name = ast.ident;
+    let generics = ast.generics;
+
+    let grammar: Vec<&Attribute> = ast
+        .attrs
+        .iter()
+        .filter(|attr| match attr.parse_meta() {
+            Ok(Meta::NameValue(name_value)) => {
+                name_value.path.is_ident("grammar") || name_value.path.is_ident("grammar_inline")
+            }
+            _ => false,
+        })
+        .collect();
+
+    let argument = match grammar.len() {
+        0 => panic!("a grammar file needs to be provided with the #[grammar = \"PATH\"] or #[grammar_inline = \"GRAMMAR CONTENTS\"] attribute"),
+        1 => get_attribute(grammar[0]),
+        _ => panic!("only 1 grammar file can be provided"),
+    };
+
+    (name, generics, argument)
+}
+
+fn get_attribute(attr: &Attribute) -> GrammarSource {
+    match attr.parse_meta() {
+        Ok(Meta::NameValue(name_value)) => match name_value.lit {
+            Lit::Str(string) => {
+                if name_value.path.is_ident("grammar") {
+                    GrammarSource::File(string.value())
+                } else {
+                    GrammarSource::Inline(string.value())
+                }
+            }
+            _ => panic!("grammar attribute must be a string"),
+        },
+        _ => panic!("grammar attribute must be of the form `grammar = \"...\"`"),
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::parse_derive;
+    use super::GrammarSource;
+    use syn;
+
+    #[test]
+    fn derive_inline_file() {
+        let definition = "
+            #[other_attr]
+            #[grammar_inline = \"GRAMMAR\"]
+            pub struct MyParser<'a, T>;
+        ";
+        let ast = syn::parse_str(definition).unwrap();
+        let (_, _, filename) = parse_derive(ast);
+        assert_eq!(filename, GrammarSource::Inline("GRAMMAR".to_string()));
+    }
+
+    #[test]
+    fn derive_ok() {
+        let definition = "
+            #[other_attr]
+            #[grammar = \"myfile.pest\"]
+            pub struct MyParser<'a, T>;
+        ";
+        let ast = syn::parse_str(definition).unwrap();
+        let (_, _, filename) = parse_derive(ast);
+        assert_eq!(filename, GrammarSource::File("myfile.pest".to_string()));
+    }
+
+    #[test]
+    #[should_panic(expected = "only 1 grammar file can be provided")]
+    fn derive_multiple_grammars() {
+        let definition = "
+            #[other_attr]
+            #[grammar = \"myfile1.pest\"]
+            #[grammar = \"myfile2.pest\"]
+            pub struct MyParser<'a, T>;
+        ";
+        let ast = syn::parse_str(definition).unwrap();
+        parse_derive(ast);
+    }
+
+    #[test]
+    #[should_panic(expected = "grammar attribute must be a string")]
+    fn derive_wrong_arg() {
+        let definition = "
+            #[other_attr]
+            #[grammar = 1]
+            pub struct MyParser<'a, T>;
+        ";
+        let ast = syn::parse_str(definition).unwrap();
+        parse_derive(ast);
+    }
+}
diff --git a/src/macros.rs b/src/macros.rs
new file mode 100644
index 0000000..9d02725
--- /dev/null
+++ b/src/macros.rs
@@ -0,0 +1,44 @@
+// pest. The Elegant Parser
+// Copyright (c) 2018 Dragoș Tiselice
+//
+// Licensed under the Apache License, Version 2.0
+// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
+// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. All files in the project carrying such notice may not be copied,
+// modified, or distributed except according to those terms.
+
+macro_rules! insert_builtin {
+    ($builtin: expr, $name: ident, $pattern: expr) => {
+        $builtin.push((stringify!($name), generate_rule!($name, $pattern)));
+    };
+}
+
+macro_rules! insert_public_builtin {
+    ($builtin: expr, $name: ident, $pattern: expr) => {
+        $builtin.push((stringify!($name), generate_public_rule!($name, $pattern)));
+    };
+}
+
+macro_rules! generate_rule {
+    ($name: ident, $pattern: expr) => {
+        quote! {
+            #[inline]
+            #[allow(dead_code, non_snake_case, unused_variables)]
+            pub fn $name(state: Box<::pest::ParserState<Rule>>) -> ::pest::ParseResult<Box<::pest::ParserState<Rule>>> {
+                $pattern
+            }
+        }
+    }
+}
+
+macro_rules! generate_public_rule {
+    ($name: ident, $pattern: expr) => {
+        quote! {
+            #[inline]
+            #[allow(dead_code, non_snake_case, unused_variables)]
+            pub fn $name(state: Box<::pest::ParserState<Rule>>) -> ::pest::ParseResult<Box<::pest::ParserState<Rule>>> {
+                $pattern
+            }
+        }
+    }
+}