[rust][pest] Move Pest to version 2.1

Change-Id: I8b3c7c3207df6895e4621b6b3b62ddb267a453c6
diff --git a/rustc_deps/Cargo.lock b/rustc_deps/Cargo.lock
index 0c0d14c..17e5d92 100644
--- a/rustc_deps/Cargo.lock
+++ b/rustc_deps/Cargo.lock
@@ -522,8 +522,8 @@
  "num 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "num-bigint 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "pest 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "pest_derive 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "pin-utils 0.1.0-alpha.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1058,7 +1058,7 @@
 
 [[package]]
 name = "pest"
-version = "2.0.2"
+version = "2.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "ucd-trie 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1066,32 +1066,32 @@
 
 [[package]]
 name = "pest_derive"
-version = "2.0.1"
+version = "2.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "pest 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "pest_generator 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest_generator 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "pest_generator"
-version = "2.0.0"
+version = "2.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "pest 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "pest_meta 2.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest_meta 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
  "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.23 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "pest_meta"
-version = "2.0.3"
+version = "2.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "maplit 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "pest 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "sha-1 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
@@ -1519,16 +1519,6 @@
 
 [[package]]
 name = "syn"
-version = "0.14.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "syn"
 version = "0.15.23"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
@@ -2067,10 +2057,10 @@
 "checksum parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "f0802bff09003b291ba756dc7e79313e51cc31667e94afbe847def490424cde5"
 "checksum parking_lot_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad7f7e6ebdc79edff6fdcb87a55b620174f7a989e3eb31b65231f4af57f00b8c"
 "checksum percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831"
-"checksum pest 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a677051ad923732bb5c70f2d45f8985a96e3eee2e2bff86697e3b11b0c3fcfde"
-"checksum pest_derive 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b76f477146419bc539a63f4ef40e902166cb43b3e51cecc71d9136fd12c567e7"
-"checksum pest_generator 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ebee4e9680be4fd162e6f3394ae4192a6b60b1e4d17d845e631f0c68d1a3386"
-"checksum pest_meta 2.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1f6d5f6f0e6082578c86af197d780dc38328e3f768cec06aac9bc46d714e8221"
+"checksum pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "54f0c72a98d8ab3c99560bfd16df8059cc10e1f9a8e83e6e3b97718dd766e9c3"
+"checksum pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "833d1ae558dc601e9a60366421196a8d94bc0ac980476d0b67e1d0988d72b2d0"
+"checksum pest_generator 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "63120576c4efd69615b5537d3d052257328a4ca82876771d6944424ccfd9f646"
+"checksum pest_meta 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f5a3492a4ed208ffc247adcdcc7ba2a95be3104f58877d0d02f0df39bf3efb5e"
 "checksum phf 0.7.23 (registry+https://github.com/rust-lang/crates.io-index)" = "cec29da322b242f4c3098852c77a0ca261c9c01b806cae85a5572a1eb94db9a6"
 "checksum phf_codegen 0.7.23 (registry+https://github.com/rust-lang/crates.io-index)" = "7d187f00cd98d5afbcd8898f6cf181743a449162aeb329dcd2f3849009e605ad"
 "checksum phf_generator 0.7.23 (registry+https://github.com/rust-lang/crates.io-index)" = "03dc191feb9b08b0dc1330d6549b795b9d81aec19efe6b4a45aec8d4caee0c4b"
@@ -2121,7 +2111,6 @@
 "checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
 "checksum structopt 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)" = "d77af7242f18c40fd19cb270985930f239ee1646cfb482050bbae9da1d18743b"
 "checksum structopt-derive 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)" = "17ff01fe96de9d16e7372ae5f19dd7ece2c703b51043c3db9ea27f9e393ea311"
-"checksum syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)" = "261ae9ecaa397c42b960649561949d69311f08eeaea86a65696e6e46517cf741"
 "checksum syn 0.15.23 (registry+https://github.com/rust-lang/crates.io-index)" = "9545a6a093a3f0bd59adb472700acc08cad3776f860f16a897dfce8c88721cbc"
 "checksum synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "73687139bf99285483c96ac0add482c3776528beac1d97d444f6e91f203a2015"
 "checksum syntect 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dc8a6f0db88d4afc340522c20d260411e746b2225b257c6b238a75de9d7cec78"
diff --git a/rustc_deps/Cargo.toml b/rustc_deps/Cargo.toml
index 002442f..be17778 100644
--- a/rustc_deps/Cargo.toml
+++ b/rustc_deps/Cargo.toml
@@ -40,8 +40,8 @@
 parking_lot = "0.6"
 pin-utils = "=0.1.0-alpha.3"
 pretty_assertions = "0.5.1"
-pest = "2.0.2"
-pest_derive = "2.0.1"
+pest = "2.1"
+pest_derive = "2.1"
 proptest = "0.8.7"
 rand = "0.5"
 regex = "1.0.6"
diff --git a/rustc_deps/vendor/pest/.cargo-checksum.json b/rustc_deps/vendor/pest/.cargo-checksum.json
index a2d79f0..9bc15a5 100644
--- a/rustc_deps/vendor/pest/.cargo-checksum.json
+++ b/rustc_deps/vendor/pest/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"9a77a89c6f839315b0070ef4ab2b8510c739a7456118be9f7411215e75797b8f","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","_README.md":"526859a2308dd9fe981850ed07ecef229e7f8ec5da57426cf430438f2d5f54b1","examples/parens.rs":"373247058e0ab6a50bbaf66a5c51f3559f153eaa75921a9be0786765b1917eb4","src/error.rs":"40b19810d9ec5f6c4a56c1507a56e2ac671910c3398b81a38e3fe047dab8f04b","src/iterators/flat_pairs.rs":"0c60a16936a14b7ae5a355a66a39abd82c15f1c4b08f6a2af6f008f370eb425b","src/iterators/mod.rs":"dfb9cc6d6c0c96f7365ec69500128e3547c0b15dd4e85a5d36e125649244d57a","src/iterators/pair.rs":"869c20b6c4c8babc1ff3cad10caa5613c403d0ab46519f4e83f0ff0c4afc54bd","src/iterators/pairs.rs":"e8111bbfdcb7f7108a52ede7551d91490450babe8ca73a72728e7b8e8fb38e0c","src/iterators/queueable_token.rs":"6a445c644da06d58c657760cf98999e09c20fe867430c24bd569a96cd2573cd1","src/iterators/tokens.rs":"dddf3df7cb4753e8e319c4985f27da0138471c862bed734b23a68741e0f10865","src/lib.rs":"4baa6f48d5424e9315d563c49c7dd826405e076b3f36c23ab5a1d140123afcf2","src/macros.rs":"511513ae550a9d5fdcb7b0e3fbd45913d787d75ef978cd74e7f63a5d19d764cc","src/parser.rs":"9f80ba60d25c672995c152d2369329ccd504b0810250a3a67809562e67e65302","src/parser_state.rs":"18dae048b9f609a363ab578d68f6c30106f0ed518ee0dc5316253368faa4e6f2","src/position.rs":"72fdabc522387e53186bc0531e236c2d1fb14ca091ec81c9ce74504836b92d9f","src/prec_climber.rs":"3d9e97e3bf213642fb1907fa31c51d16200e5fe70a7439a18271179c9266fca5","src/span.rs":"54f08157021815ae1b4c960c87ba7540380b6c4c5db0ddf54328bdc2ede74dcd","src/stack.rs":"ec8eb4c379d6876bde2a3f1c7faad5c9d2bca16765520cfbbe196d4bf37b5cdb","src/token.rs":"971b9a03b43233c4fde2965509bff3ebf9375a7d7c34b00260bfeb790131d1cd","src/unicode/binary.rs":"1dcc20ae3922e92fa753849086f8290bebd6d3a1440c2f3cb3482d2192136323","src/unicode/category.rs":"94923b31f349503958e32def13c0b682e52c08b528dd1e0853b55b17627e4ad6","src/unicode/mod.rs":"4f711e5b9b9068fcb2f61866c87366d7070c4f7ab3223587d83accb8d2aa41c6","tests/calculator.rs":"c45365c361cb4852c98065bf21f8f46fbd365206a4656ee87ee406efe64f8dd6","tests/json.rs":"118e24ff557797c3e0aa787cf2c6ea39b772d046a5c3d9c873d9fb7e156def9c"},"package":"a677051ad923732bb5c70f2d45f8985a96e3eee2e2bff86697e3b11b0c3fcfde"}
\ No newline at end of file
+{"files":{"Cargo.toml":"bd6dffcabc4ed78c3374426f3447254dda3082a1b80defe8bac3e2bfbc6c2545","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","_README.md":"7c31c505c267ac538b70deaa9ad95ef0cc68720c46b4d3276df8f3a278d233e2","examples/parens.rs":"24b83b7fc014813aa089fd05c3ee36926e215ce74a96b07b2c4033dad4ba15b2","src/error.rs":"91a1cc3dad0700a7a14ffa3a259c1c4c3164c9a853c4a7ae8a9c801d4da12b3d","src/iterators/flat_pairs.rs":"893e9c3beb5d80e129c3cd654c4a97c15996b077b0ecc02d3ad07136a6a260a5","src/iterators/mod.rs":"dfb9cc6d6c0c96f7365ec69500128e3547c0b15dd4e85a5d36e125649244d57a","src/iterators/pair.rs":"5e3e432a26f297add52a0c7b49c64ceaf1be69d6b9ba292fb4d2dda99ec2f538","src/iterators/pairs.rs":"0877c888387ed13781a3c0015e46aa181ca8bb5dd2544e8d31f4c386f7765c77","src/iterators/queueable_token.rs":"b6c4fd1d43437ea7c6cab45717c1fb66f71fc91aaf3ed9cf9f5686fd8ccfa8f9","src/iterators/tokens.rs":"4deea56d66805afe36afb229b9355b43db051b21836af4a88d1febcc4f762ff6","src/lib.rs":"ca887bfcd076a0e5528125521b793faae6c91ec6ea3cf3386dca14defdc60f78","src/macros.rs":"576dc3b66de8d17da494c4a353474fd9bb5ebcde9ffb0d5e9ce7e950ac5b5e7e","src/parser.rs":"2cb858fa80115bf65ff6ceedf3ad73b33fd112bab2a3db4f97be84d6aa4482b5","src/parser_state.rs":"c0e6eecfea21b27540c5de0b3e45b8a1ed79ec92370392c2133ccc9faddb287e","src/position.rs":"1b28abf0c8cfefc0ed305c4072b27c4803946c2e5e0bc1d0a4d20ff70e9eb810","src/prec_climber.rs":"e56e8ef7e8e019b5277f164d44acda2d6b993979cc65457e37463204a462be1c","src/span.rs":"cc6ab623ea4c496ddac79d9f179937f47da7c08b514f917f1a3b4e16ca701f52","src/stack.rs":"67d58c3641e97bbdb54839061dca2bda19317833ce075926fd0e390cea0b13dc","src/token.rs":"4ee741dd833c06f8ae826226fb1e5eaa671732d1c68b6b79dea55856ab8b5a0a","src/unicode/binary.rs":"1dcc20ae3922e92fa753849086f8290bebd6d3a1440c2f3cb3482d2192136323","src/unicode/category.rs":"94923b31f349503958e32def13c0b682e52c08b528dd1e0853b55b17627e4ad6","src/unicode/mod.rs":"e1f7ac5b393ecf6be15a423f72fc153d88c80cfd05842ef2119568bd094baaf8","tests/calculator.rs":"5787fa07560f3187d0a83042336c8613ca90d71186aea88b79ef2a2ccf06f7ab","tests/json.rs":"430913278a7ed16587ac2fcdab2c68329a96cd5d2d2b2eff01245852792e1af1"},"package":"54f0c72a98d8ab3c99560bfd16df8059cc10e1f9a8e83e6e3b97718dd766e9c3"}
\ No newline at end of file
diff --git a/rustc_deps/vendor/pest/Cargo.toml b/rustc_deps/vendor/pest/Cargo.toml
index 8d33fdc..9f1dc14 100644
--- a/rustc_deps/vendor/pest/Cargo.toml
+++ b/rustc_deps/vendor/pest/Cargo.toml
@@ -12,7 +12,7 @@
 
 [package]
 name = "pest"
-version = "2.0.2"
+version = "2.1.0"
 authors = ["Dragoș Tiselice <dragostiselice@gmail.com>"]
 description = "The Elegant Parser"
 homepage = "https://pest-parser.github.io/"
diff --git a/rustc_deps/vendor/pest/_README.md b/rustc_deps/vendor/pest/_README.md
index 8e56956..2c94a72 100644
--- a/rustc_deps/vendor/pest/_README.md
+++ b/rustc_deps/vendor/pest/_README.md
@@ -150,6 +150,8 @@
 * [handlebars-rust](https://github.com/sunng87/handlebars-rust)
 * [hexdino](https://github.com/Luz/hexdino)
 * [Huia](https://gitlab.com/jimsy/huia/)
+* [json5-rs](https://github.com/callum-oakley/json5-rs)
+* [mt940](https://github.com/svenstaro/mt940-rs)
 * [py_literal](https://github.com/jturner314/py_literal)
 * [rouler](https://github.com/jarcane/rouler)
 * [RuSh](https://github.com/lwandrebeck/RuSh)
diff --git a/rustc_deps/vendor/pest/examples/parens.rs b/rustc_deps/vendor/pest/examples/parens.rs
index d6bc158..2720f63 100644
--- a/rustc_deps/vendor/pest/examples/parens.rs
+++ b/rustc_deps/vendor/pest/examples/parens.rs
@@ -2,16 +2,16 @@
 
 use std::io::{self, Write};
 
-use pest::{state, ParseResult, Parser, ParserState};
 use pest::error::Error;
 use pest::iterators::Pairs;
+use pest::{state, ParseResult, Parser, ParserState};
 
 #[allow(dead_code, non_camel_case_types)]
 #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
 enum Rule {
     expr,
     paren,
-    paren_end
+    paren_end,
 }
 
 struct ParenParser;
@@ -42,7 +42,7 @@
         state(input, |state| match rule {
             Rule::expr => expr(state),
             Rule::paren => paren(state),
-            _ => unreachable!()
+            _ => unreachable!(),
         })
     }
 }
@@ -69,7 +69,7 @@
 
         match ParenParser::parse(Rule::expr, &line) {
             Ok(pairs) => println!("{:?}", expr(pairs)),
-            Err(e) => println!("\n{}", e)
+            Err(e) => println!("\n{}", e),
         };
     }
 }
diff --git a/rustc_deps/vendor/pest/src/error.rs b/rustc_deps/vendor/pest/src/error.rs
index 7253793..81cedd1 100644
--- a/rustc_deps/vendor/pest/src/error.rs
+++ b/rustc_deps/vendor/pest/src/error.rs
@@ -14,9 +14,9 @@
 use std::fmt;
 use std::mem;
 
-use RuleType;
 use position::Position;
 use span::Span;
+use RuleType;
 
 /// Parse-related error type.
 #[derive(Clone, Debug, Eq, Hash, PartialEq)]
@@ -40,13 +40,13 @@
         /// Positive attempts
         positives: Vec<R>,
         /// Negative attempts
-        negatives: Vec<R>
+        negatives: Vec<R>,
     },
     /// Custom error with a message
     CustomError {
         /// Short explanation
-        message: String
-    }
+        message: String,
+    },
 }
 
 /// Where an `Error` has occurred.
@@ -55,7 +55,7 @@
     /// `Error` was created by `Error::new_from_pos`
     Pos(usize),
     /// `Error` was created by `Error::new_from_span`
-    Span((usize, usize))
+    Span((usize, usize)),
 }
 
 /// Line/column where an `Error` has occurred.
@@ -64,7 +64,7 @@
     /// Line/column pair if `Error` was created by `Error::new_from_pos`
     Pos((usize, usize)),
     /// Line/column pairs if `Error` was created by `Error::new_from_span`
-    Span((usize, usize), (usize, usize))
+    Span((usize, usize), (usize, usize)),
 }
 
 impl<R: RuleType> Error<R> {
@@ -94,15 +94,15 @@
     ///
     /// println!("{}", error);
     /// ```
-    #[cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
+    #[allow(clippy::needless_pass_by_value)]
     pub fn new_from_pos(variant: ErrorVariant<R>, pos: Position) -> Error<R> {
         Error {
             variant,
             location: InputLocation::Pos(pos.pos()),
             path: None,
-            line: pos.line_of().to_owned(),
+            line: visualize_whitespace(pos.line_of()),
             continued_line: None,
-            line_col: LineColLocation::Pos(pos.line_col())
+            line_col: LineColLocation::Pos(pos.line_col()),
         }
     }
 
@@ -134,21 +134,30 @@
     ///
     /// println!("{}", error);
     /// ```
-    #[cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
+    #[allow(clippy::needless_pass_by_value)]
     pub fn new_from_span(variant: ErrorVariant<R>, span: Span) -> Error<R> {
-        let continued_line = if span.start_pos().line_col().0 != span.end_pos().line_col().0 {
-            Some(span.end_pos().line_of().to_owned())
-        } else {
-            None
+        let end = span.end_pos();
+
+        let mut end_line_col = end.line_col();
+        // end position is after a \n, so we want to point to the visual lf symbol
+        if end_line_col.1 == 1 {
+            let mut visual_end = end.clone();
+            visual_end.skip_back(1);
+            let lc = visual_end.line_col();
+            end_line_col = (lc.0, lc.1 + 1);
         };
 
+        let mut line_iter = span.lines();
+        let start_line = visualize_whitespace(line_iter.next().unwrap_or(""));
+        let continued_line = line_iter.last().map(visualize_whitespace);
+
         Error {
             variant,
-            location: InputLocation::Span((span.start(), span.end())),
+            location: InputLocation::Span((span.start(), end.pos())),
             path: None,
-            line: span.start_pos().line_of().to_owned(),
+            line: start_line,
             continued_line,
-            line_col: LineColLocation::Span(span.start_pos().line_col(), span.end_pos().line_col())
+            line_col: LineColLocation::Span(span.start_pos().line_col(), end_line_col),
         }
     }
 
@@ -219,17 +228,17 @@
     /// ```
     pub fn renamed_rules<F>(mut self, f: F) -> Error<R>
     where
-        F: FnMut(&R) -> String
+        F: FnMut(&R) -> String,
     {
         let variant = match self.variant {
             ErrorVariant::ParsingError {
                 positives,
-                negatives
+                negatives,
             } => {
                 let message = Error::parsing_error_message(&positives, &negatives, f);
                 ErrorVariant::CustomError { message }
             }
-            variant => variant
+            variant => variant,
         };
 
         self.variant = variant;
@@ -240,14 +249,14 @@
     fn start(&self) -> (usize, usize) {
         match self.line_col {
             LineColLocation::Pos(line_col) => line_col,
-            LineColLocation::Span(start_line_col, _) => start_line_col
+            LineColLocation::Span(start_line_col, _) => start_line_col,
         }
     }
 
     fn spacing(&self) -> String {
         let line = match self.line_col {
             LineColLocation::Pos((line, _)) => line,
-            LineColLocation::Span((start_line, _), (end_line, _)) => cmp::max(start_line, end_line)
+            LineColLocation::Span((start_line, _), (end_line, _)) => cmp::max(start_line, end_line),
         };
 
         let line_str_len = format!("{}", line).len();
@@ -275,7 +284,7 @@
 
                 Some(end)
             }
-            _ => None
+            _ => None,
         };
         let offset = start - 1;
 
@@ -304,17 +313,15 @@
         match self.variant {
             ErrorVariant::ParsingError {
                 ref positives,
-                ref negatives
+                ref negatives,
             } => Error::parsing_error_message(positives, negatives, |r| format!("{:?}", r)),
-            ErrorVariant::CustomError { ref message } => {
-                message.clone()
-            }
+            ErrorVariant::CustomError { ref message } => message.clone(),
         }
     }
 
     fn parsing_error_message<F>(positives: &[R], negatives: &[R], mut f: F) -> String
     where
-        F: FnMut(&R) -> String
+        F: FnMut(&R) -> String,
     {
         match (negatives.is_empty(), positives.is_empty()) {
             (false, false) => format!(
@@ -324,13 +331,13 @@
             ),
             (false, true) => format!("unexpected {}", Error::enumerate(negatives, &mut f)),
             (true, false) => format!("expected {}", Error::enumerate(positives, &mut f)),
-            (true, true) => "unknown parsing error".to_owned()
+            (true, true) => "unknown parsing error".to_owned(),
         }
     }
 
     fn enumerate<F>(rules: &[R], f: &mut F) -> String
     where
-        F: FnMut(&R) -> String
+        F: FnMut(&R) -> String,
     {
         match rules.len() {
             1 => f(&rules[0]),
@@ -349,7 +356,11 @@
 
     pub(crate) fn format(&self) -> String {
         let spacing = self.spacing();
-        let path = self.path.as_ref().map(|path| format!("{}:", path)).unwrap_or_default();
+        let path = self
+            .path
+            .as_ref()
+            .map(|path| format!("{}:", path))
+            .unwrap_or_default();
 
         let pair = (self.line_col.clone(), &self.continued_line);
         if let (LineColLocation::Span(_, end), &Some(ref continued_line)) = pair {
@@ -426,26 +437,30 @@
     fn description(&self) -> &str {
         match self.variant {
             ErrorVariant::ParsingError { .. } => "parsing error",
-            ErrorVariant::CustomError { ref message } => message
+            ErrorVariant::CustomError { ref message } => message,
         }
     }
 }
 
+fn visualize_whitespace(input: &str) -> String {
+    input.to_owned().replace('\r', "␍").replace('\n', "␊")
+}
+
 #[cfg(test)]
 mod tests {
-    use super::*;
     use super::super::position;
+    use super::*;
 
     #[test]
     fn display_parsing_error_mixed() {
-        let input = b"ab\ncd\nef";
-        let pos = unsafe { position::new(input, 4) };
+        let input = "ab\ncd\nef";
+        let pos = position::Position::new(input, 4).unwrap();
         let error: Error<u32> = Error::new_from_pos(
             ErrorVariant::ParsingError {
                 positives: vec![1, 2, 3],
-                negatives: vec![4, 5, 6]
+                negatives: vec![4, 5, 6],
             },
-            pos
+            pos,
         );
 
         assert_eq!(
@@ -453,24 +468,25 @@
             vec![
                 " --> 2:2",
                 "  |",
-                "2 | cd",
+                "2 | cd␊",
                 "  |  ^---",
                 "  |",
                 "  = unexpected 4, 5, or 6; expected 1, 2, or 3",
-            ].join("\n")
+            ]
+            .join("\n")
         );
     }
 
     #[test]
     fn display_parsing_error_positives() {
-        let input = b"ab\ncd\nef";
-        let pos = unsafe { position::new(input, 4) };
+        let input = "ab\ncd\nef";
+        let pos = position::Position::new(input, 4).unwrap();
         let error: Error<u32> = Error::new_from_pos(
             ErrorVariant::ParsingError {
                 positives: vec![1, 2],
-                negatives: vec![]
+                negatives: vec![],
             },
-            pos
+            pos,
         );
 
         assert_eq!(
@@ -478,24 +494,25 @@
             vec![
                 " --> 2:2",
                 "  |",
-                "2 | cd",
+                "2 | cd␊",
                 "  |  ^---",
                 "  |",
                 "  = expected 1 or 2",
-            ].join("\n")
+            ]
+            .join("\n")
         );
     }
 
     #[test]
     fn display_parsing_error_negatives() {
-        let input = b"ab\ncd\nef";
-        let pos = unsafe { position::new(input, 4) };
+        let input = "ab\ncd\nef";
+        let pos = position::Position::new(input, 4).unwrap();
         let error: Error<u32> = Error::new_from_pos(
             ErrorVariant::ParsingError {
                 positives: vec![],
-                negatives: vec![4, 5, 6]
+                negatives: vec![4, 5, 6],
             },
-            pos
+            pos,
         );
 
         assert_eq!(
@@ -503,24 +520,25 @@
             vec![
                 " --> 2:2",
                 "  |",
-                "2 | cd",
+                "2 | cd␊",
                 "  |  ^---",
                 "  |",
                 "  = unexpected 4, 5, or 6",
-            ].join("\n")
+            ]
+            .join("\n")
         );
     }
 
     #[test]
     fn display_parsing_error_unknown() {
-        let input = b"ab\ncd\nef";
-        let pos = unsafe { position::new(input, 4) };
+        let input = "ab\ncd\nef";
+        let pos = position::Position::new(input, 4).unwrap();
         let error: Error<u32> = Error::new_from_pos(
             ErrorVariant::ParsingError {
                 positives: vec![],
-                negatives: vec![]
+                negatives: vec![],
             },
-            pos
+            pos,
         );
 
         assert_eq!(
@@ -528,23 +546,24 @@
             vec![
                 " --> 2:2",
                 "  |",
-                "2 | cd",
+                "2 | cd␊",
                 "  |  ^---",
                 "  |",
                 "  = unknown parsing error",
-            ].join("\n")
+            ]
+            .join("\n")
         );
     }
 
     #[test]
     fn display_custom_pos() {
-        let input = b"ab\ncd\nef";
-        let pos = unsafe { position::new(input, 4) };
+        let input = "ab\ncd\nef";
+        let pos = position::Position::new(input, 4).unwrap();
         let error: Error<u32> = Error::new_from_pos(
             ErrorVariant::CustomError {
-                message: "error: big one".to_owned()
+                message: "error: big one".to_owned(),
             },
-            pos
+            pos,
         );
 
         assert_eq!(
@@ -552,24 +571,25 @@
             vec![
                 " --> 2:2",
                 "  |",
-                "2 | cd",
+                "2 | cd␊",
                 "  |  ^---",
                 "  |",
                 "  = error: big one",
-            ].join("\n")
+            ]
+            .join("\n")
         );
     }
 
     #[test]
     fn display_custom_span_two_lines() {
-        let input = b"ab\ncd\nefgh";
-        let start = unsafe { position::new(input, 4) };
-        let end = unsafe { position::new(input, 9) };
+        let input = "ab\ncd\nefgh";
+        let start = position::Position::new(input, 4).unwrap();
+        let end = position::Position::new(input, 9).unwrap();
         let error: Error<u32> = Error::new_from_span(
             ErrorVariant::CustomError {
-                message: "error: big one".to_owned()
+                message: "error: big one".to_owned(),
             },
-            start.span(&end)
+            start.span(&end),
         );
 
         assert_eq!(
@@ -577,25 +597,26 @@
             vec![
                 " --> 2:2",
                 "  |",
-                "2 | cd",
+                "2 | cd␊",
                 "3 | efgh",
                 "  |  ^^",
                 "  |",
                 "  = error: big one",
-            ].join("\n")
+            ]
+            .join("\n")
         );
     }
 
     #[test]
     fn display_custom_span_three_lines() {
-        let input = b"ab\ncd\nefgh";
-        let start = unsafe { position::new(input, 1) };
-        let end = unsafe { position::new(input, 9) };
+        let input = "ab\ncd\nefgh";
+        let start = position::Position::new(input, 1).unwrap();
+        let end = position::Position::new(input, 9).unwrap();
         let error: Error<u32> = Error::new_from_span(
             ErrorVariant::CustomError {
-                message: "error: big one".to_owned()
+                message: "error: big one".to_owned(),
             },
-            start.span(&end)
+            start.span(&end),
         );
 
         assert_eq!(
@@ -603,26 +624,27 @@
             vec![
                 " --> 1:2",
                 "  |",
-                "1 | ab",
+                "1 | ab␊",
                 "  | ...",
                 "3 | efgh",
                 "  |  ^^",
                 "  |",
                 "  = error: big one",
-            ].join("\n")
+            ]
+            .join("\n")
         );
     }
 
     #[test]
     fn display_custom_span_two_lines_inverted_cols() {
-        let input = b"abcdef\ngh";
-        let start = unsafe { position::new(input, 5) };
-        let end = unsafe { position::new(input, 8) };
+        let input = "abcdef\ngh";
+        let start = position::Position::new(input, 5).unwrap();
+        let end = position::Position::new(input, 8).unwrap();
         let error: Error<u32> = Error::new_from_span(
             ErrorVariant::CustomError {
-                message: "error: big one".to_owned()
+                message: "error: big one".to_owned(),
             },
-            start.span(&end)
+            start.span(&end),
         );
 
         assert_eq!(
@@ -630,62 +652,125 @@
             vec![
                 " --> 1:6",
                 "  |",
-                "1 | abcdef",
+                "1 | abcdef␊",
                 "2 | gh",
                 "  | ^----^",
                 "  |",
                 "  = error: big one",
-            ].join("\n")
+            ]
+            .join("\n")
+        );
+    }
+
+    #[test]
+    fn display_custom_span_end_after_newline() {
+        let input = "abcdef\n";
+        let start = position::Position::new(input, 0).unwrap();
+        let end = position::Position::new(input, 7).unwrap();
+        assert!(start.at_start());
+        assert!(end.at_end());
+
+        let error: Error<u32> = Error::new_from_span(
+            ErrorVariant::CustomError {
+                message: "error: big one".to_owned(),
+            },
+            start.span(&end),
+        );
+
+        assert_eq!(
+            format!("{}", error),
+            vec![
+                " --> 1:1",
+                "  |",
+                "1 | abcdef␊",
+                "  | ^-----^",
+                "  |",
+                "  = error: big one",
+            ]
+            .join("\n")
+        );
+    }
+
+    #[test]
+    fn display_custom_span_empty() {
+        let input = "";
+        let start = position::Position::new(input, 0).unwrap();
+        let end = position::Position::new(input, 0).unwrap();
+        assert!(start.at_start());
+        assert!(end.at_end());
+
+        let error: Error<u32> = Error::new_from_span(
+            ErrorVariant::CustomError {
+                message: "error: empty".to_owned(),
+            },
+            start.span(&end),
+        );
+
+        assert_eq!(
+            format!("{}", error),
+            vec![
+                " --> 1:1",
+                "  |",
+                "1 | ",
+                "  | ^",
+                "  |",
+                "  = error: empty",
+            ]
+            .join("\n")
         );
     }
 
     #[test]
     fn mapped_parsing_error() {
-        let input = b"ab\ncd\nef";
-        let pos = unsafe { position::new(input, 4) };
+        let input = "ab\ncd\nef";
+        let pos = position::Position::new(input, 4).unwrap();
         let error: Error<u32> = Error::new_from_pos(
             ErrorVariant::ParsingError {
                 positives: vec![1, 2, 3],
-                negatives: vec![4, 5, 6]
+                negatives: vec![4, 5, 6],
             },
-            pos
-        ).renamed_rules(|n| format!("{}", n + 1));
+            pos,
+        )
+        .renamed_rules(|n| format!("{}", n + 1));
 
         assert_eq!(
             format!("{}", error),
             vec![
                 " --> 2:2",
                 "  |",
-                "2 | cd",
+                "2 | cd␊",
                 "  |  ^---",
                 "  |",
                 "  = unexpected 5, 6, or 7; expected 2, 3, or 4",
-            ].join("\n")
+            ]
+            .join("\n")
         );
     }
 
     #[test]
     fn error_with_path() {
-        let input = b"ab\ncd\nef";
-        let pos = unsafe { position::new(input, 4) };
+        let input = "ab\ncd\nef";
+        let pos = position::Position::new(input, 4).unwrap();
         let error: Error<u32> = Error::new_from_pos(
             ErrorVariant::ParsingError {
                 positives: vec![1, 2, 3],
-                negatives: vec![4, 5, 6]
+                negatives: vec![4, 5, 6],
             },
-            pos
-        ).with_path("file.rs");
+            pos,
+        )
+        .with_path("file.rs");
 
         assert_eq!(
             format!("{}", error),
             vec![
                 " --> file.rs:2:2",
                 "  |",
-                "2 | cd",
+                "2 | cd␊",
                 "  |  ^---",
                 "  |",
                 "  = unexpected 4, 5, or 6; expected 1, 2, or 3",
-            ].join("\n")
+            ]
+            .join("\n")
         );
     }
 }
diff --git a/rustc_deps/vendor/pest/src/iterators/flat_pairs.rs b/rustc_deps/vendor/pest/src/iterators/flat_pairs.rs
index ef9f81e..89ce829 100644
--- a/rustc_deps/vendor/pest/src/iterators/flat_pairs.rs
+++ b/rustc_deps/vendor/pest/src/iterators/flat_pairs.rs
@@ -21,22 +21,22 @@
 /// [`Pairs::flatten`]: struct.Pairs.html#method.flatten
 pub struct FlatPairs<'i, R> {
     queue: Rc<Vec<QueueableToken<R>>>,
-    input: &'i [u8],
+    input: &'i str,
     start: usize,
-    end: usize
+    end: usize,
 }
 
 pub fn new<R: RuleType>(
     queue: Rc<Vec<QueueableToken<R>>>,
-    input: &[u8],
+    input: &str,
     start: usize,
-    end: usize
+    end: usize,
 ) -> FlatPairs<R> {
     FlatPairs {
         queue,
         input,
         start,
-        end
+        end,
     }
 }
 
@@ -87,7 +87,7 @@
     fn is_start(&self, index: usize) -> bool {
         match self.queue[index] {
             QueueableToken::Start { .. } => true,
-            QueueableToken::End { .. } => false
+            QueueableToken::End { .. } => false,
         }
     }
 }
@@ -136,15 +136,15 @@
             queue: Rc::clone(&self.queue),
             input: self.input,
             start: self.start,
-            end: self.end
+            end: self.end,
         }
     }
 }
 
 #[cfg(test)]
 mod tests {
-    use super::super::super::Parser;
     use super::super::super::macros::tests::*;
+    use super::super::super::Parser;
 
     #[test]
     fn iter_for_flat_pairs() {
diff --git a/rustc_deps/vendor/pest/src/iterators/pair.rs b/rustc_deps/vendor/pest/src/iterators/pair.rs
index a1d5adc..554f70e 100644
--- a/rustc_deps/vendor/pest/src/iterators/pair.rs
+++ b/rustc_deps/vendor/pest/src/iterators/pair.rs
@@ -16,8 +16,8 @@
 use super::pairs::{self, Pairs};
 use super::queueable_token::QueueableToken;
 use super::tokens::{self, Tokens};
-use RuleType;
 use span::{self, Span};
+use RuleType;
 
 /// A matching pair of [`Token`]s and everything between them.
 ///
@@ -29,16 +29,34 @@
 /// [`Token`]: ../enum.Token.html
 #[derive(Clone)]
 pub struct Pair<'i, R> {
+    /// # Safety
+    ///
+    /// All `QueueableToken`s' `input_pos` must be valid character boundary indices into `input`.
     queue: Rc<Vec<QueueableToken<R>>>,
-    input: &'i [u8],
-    start: usize
+    input: &'i str,
+    /// Token index into `queue`.
+    start: usize,
 }
 
-pub fn new<R: RuleType>(queue: Rc<Vec<QueueableToken<R>>>, input: &[u8], start: usize) -> Pair<R> {
+// TODO(safety): QueueableTokens must be valid indices into input.
+pub fn new<R: RuleType>(queue: Rc<Vec<QueueableToken<R>>>, input: &str, start: usize) -> Pair<R> {
+    if cfg!(debug_assertions) {
+        for tok in queue.iter() {
+            match *tok {
+                QueueableToken::Start { input_pos, .. } | QueueableToken::End { input_pos, .. } => {
+                    assert!(
+                        input.get(input_pos..).is_some(),
+                        "💥 UNSAFE `Pair` CREATED 💥"
+                    )
+                }
+            }
+        }
+    }
+
     Pair {
         queue,
         input,
-        start
+        start,
     }
 }
 
@@ -68,7 +86,7 @@
     pub fn as_rule(&self) -> R {
         match self.queue[self.pair()] {
             QueueableToken::End { rule, .. } => rule,
-            _ => unreachable!()
+            _ => unreachable!(),
         }
     }
 
@@ -99,7 +117,7 @@
         let end = self.pos(self.pair());
 
         // Generated positions always come from Positions and are UTF-8 borders.
-        unsafe { str::from_utf8_unchecked(&self.input[start..end]) }
+        &self.input[start..end]
     }
 
     /// Returns the `Span` defined by the `Pair`, consuming it.
@@ -156,7 +174,7 @@
         let end = self.pos(self.pair());
 
         // Generated positions always come from Positions and are UTF-8 borders.
-        unsafe { span::new(self.input, start, end) }
+        unsafe { span::Span::new_unchecked(self.input, start, end) }
     }
 
     /// Returns the inner `Pairs` between the `Pair`, consuming it.
@@ -221,7 +239,7 @@
             QueueableToken::Start {
                 end_token_index, ..
             } => end_token_index,
-            _ => unreachable!()
+            _ => unreachable!(),
         }
     }
 
@@ -271,7 +289,8 @@
 
 impl<'i, R: PartialEq> PartialEq for Pair<'i, R> {
     fn eq(&self, other: &Pair<'i, R>) -> bool {
-        Rc::ptr_eq(&self.queue, &other.queue) && ptr::eq(self.input, other.input)
+        Rc::ptr_eq(&self.queue, &other.queue)
+            && ptr::eq(self.input, other.input)
             && self.start == other.start
     }
 }
@@ -281,7 +300,7 @@
 impl<'i, R: Hash> Hash for Pair<'i, R> {
     fn hash<H: Hasher>(&self, state: &mut H) {
         (&*self.queue as *const Vec<QueueableToken<R>>).hash(state);
-        (self.input as *const [u8]).hash(state);
+        (self.input as *const str).hash(state);
         self.start.hash(state);
     }
 }
diff --git a/rustc_deps/vendor/pest/src/iterators/pairs.rs b/rustc_deps/vendor/pest/src/iterators/pairs.rs
index 19925f2..51077f0 100644
--- a/rustc_deps/vendor/pest/src/iterators/pairs.rs
+++ b/rustc_deps/vendor/pest/src/iterators/pairs.rs
@@ -27,22 +27,22 @@
 #[derive(Clone)]
 pub struct Pairs<'i, R> {
     queue: Rc<Vec<QueueableToken<R>>>,
-    input: &'i [u8],
+    input: &'i str,
     start: usize,
-    end: usize
+    end: usize,
 }
 
 pub fn new<R: RuleType>(
     queue: Rc<Vec<QueueableToken<R>>>,
-    input: &[u8],
+    input: &str,
     start: usize,
-    end: usize
+    end: usize,
 ) -> Pairs<R> {
     Pairs {
         queue,
         input,
         start,
-        end
+        end,
     }
 }
 
@@ -78,7 +78,7 @@
         let end = self.pos(self.end - 1);
 
         // Generated positions always come from Positions and are UTF-8 borders.
-        unsafe { str::from_utf8_unchecked(&self.input[start..end]) }
+        &self.input[start..end]
     }
 
     /// Captures inner token `Pair`s and concatenates resulting `&str`s. This does not capture
@@ -107,7 +107,8 @@
     /// ```
     #[inline]
     pub fn concat(&self) -> String {
-        self.clone().fold(String::new(), |string, pair| string + pair.as_str())
+        self.clone()
+            .fold(String::new(), |string, pair| string + pair.as_str())
     }
 
     /// Flattens the `Pairs`.
@@ -167,12 +168,22 @@
         tokens::new(self.queue, self.input, self.start, self.end)
     }
 
+    /// Peek at the first inner `Pair` without changing the position of this iterator.
+    #[inline]
+    pub fn peek(&self) -> Option<Pair<'i, R>> {
+        if self.start < self.end {
+            Some(pair::new(Rc::clone(&self.queue), self.input, self.start))
+        } else {
+            None
+        }
+    }
+
     fn pair(&self) -> usize {
         match self.queue[self.start] {
             QueueableToken::Start {
                 end_token_index, ..
             } => end_token_index,
-            _ => unreachable!()
+            _ => unreachable!(),
         }
     }
 
@@ -181,7 +192,7 @@
             QueueableToken::End {
                 start_token_index, ..
             } => start_token_index,
-            _ => unreachable!()
+            _ => unreachable!(),
         }
     }
 
@@ -198,14 +209,8 @@
     type Item = Pair<'i, R>;
 
     fn next(&mut self) -> Option<Self::Item> {
-        if self.start >= self.end {
-            return None;
-        }
-
-        let pair = pair::new(Rc::clone(&self.queue), self.input, self.start);
-
+        let pair = self.peek()?;
         self.start = self.pair() + 1;
-
         Some(pair)
     }
 }
@@ -245,8 +250,10 @@
 
 impl<'i, R: PartialEq> PartialEq for Pairs<'i, R> {
     fn eq(&self, other: &Pairs<'i, R>) -> bool {
-        Rc::ptr_eq(&self.queue, &other.queue) && ptr::eq(self.input, other.input)
-            && self.start == other.start && self.end == other.end
+        Rc::ptr_eq(&self.queue, &other.queue)
+            && ptr::eq(self.input, other.input)
+            && self.start == other.start
+            && self.end == other.end
     }
 }
 
@@ -255,7 +262,7 @@
 impl<'i, R: Hash> Hash for Pairs<'i, R> {
     fn hash<H: Hasher>(&self, state: &mut H) {
         (&*self.queue as *const Vec<QueueableToken<R>>).hash(state);
-        (self.input as *const [u8]).hash(state);
+        (self.input as *const str).hash(state);
         self.start.hash(state);
         self.end.hash(state);
     }
@@ -263,8 +270,8 @@
 
 #[cfg(test)]
 mod tests {
-    use super::super::super::Parser;
     use super::super::super::macros::tests::*;
+    use super::super::super::Parser;
 
     #[test]
     fn as_str() {
@@ -284,6 +291,7 @@
     fn pairs_debug() {
         let pairs = AbcParser::parse(Rule::a, "abcde").unwrap();
 
+        #[rustfmt::skip]
         assert_eq!(
             format!("{:?}", pairs),
             "[\
@@ -292,7 +300,7 @@
                 ] }, \
                 Pair { rule: c, span: Span { str: \"e\", start: 4, end: 5 }, inner: [] }\
             ]"
-        .to_owned()
+            .to_owned()
         );
     }
 
diff --git a/rustc_deps/vendor/pest/src/iterators/queueable_token.rs b/rustc_deps/vendor/pest/src/iterators/queueable_token.rs
index ced57d1..7d56749 100644
--- a/rustc_deps/vendor/pest/src/iterators/queueable_token.rs
+++ b/rustc_deps/vendor/pest/src/iterators/queueable_token.rs
@@ -17,11 +17,11 @@
 pub enum QueueableToken<R> {
     Start {
         end_token_index: usize,
-        input_pos: usize
+        input_pos: usize,
     },
     End {
         start_token_index: usize,
         rule: R,
-        input_pos: usize
-    }
+        input_pos: usize,
+    },
 }
diff --git a/rustc_deps/vendor/pest/src/iterators/tokens.rs b/rustc_deps/vendor/pest/src/iterators/tokens.rs
index e76273d..59b75c5 100644
--- a/rustc_deps/vendor/pest/src/iterators/tokens.rs
+++ b/rustc_deps/vendor/pest/src/iterators/tokens.rs
@@ -9,11 +9,12 @@
 
 use std::fmt;
 use std::rc::Rc;
+use std::str;
 
 use super::queueable_token::QueueableToken;
-use RuleType;
 use position;
 use token::Token;
+use RuleType;
 
 /// An iterator over [`Token`]s. It is created by [`Pair::tokens`] and [`Pairs::tokens`].
 ///
@@ -22,23 +23,40 @@
 /// [`Pairs::tokens`]: struct.Pairs.html#method.tokens
 #[derive(Clone)]
 pub struct Tokens<'i, R> {
+    /// # Safety:
+    ///
+    /// All `QueueableToken`s' `input_pos` must be valid character boundary indices into `input`.
     queue: Rc<Vec<QueueableToken<R>>>,
-    input: &'i [u8],
+    input: &'i str,
     start: usize,
-    end: usize
+    end: usize,
 }
 
+// TODO(safety): QueueableTokens must be valid indices into input.
 pub fn new<R: RuleType>(
     queue: Rc<Vec<QueueableToken<R>>>,
-    input: &[u8],
+    input: &str,
     start: usize,
-    end: usize
+    end: usize,
 ) -> Tokens<R> {
+    if cfg!(debug_assertions) {
+        for tok in queue.iter() {
+            match *tok {
+                QueueableToken::Start { input_pos, .. } | QueueableToken::End { input_pos, .. } => {
+                    assert!(
+                        input.get(input_pos..).is_some(),
+                        "💥 UNSAFE `Tokens` CREATED 💥"
+                    )
+                }
+            }
+        }
+    }
+
     Tokens {
         queue,
         input,
         start,
-        end
+        end,
     }
 }
 
@@ -47,17 +65,17 @@
         match self.queue[index] {
             QueueableToken::Start {
                 end_token_index,
-                input_pos
+                input_pos,
             } => {
                 let rule = match self.queue[end_token_index] {
                     QueueableToken::End { rule, .. } => rule,
-                    _ => unreachable!()
+                    _ => unreachable!(),
                 };
 
                 Token::Start {
                     rule,
                     // QueueableTokens are safely created.
-                    pos: unsafe { position::new(self.input, input_pos) }
+                    pos: unsafe { position::Position::new_unchecked(self.input, input_pos) },
                 }
             }
             QueueableToken::End {
@@ -66,7 +84,7 @@
                 Token::End {
                     rule,
                     // QueueableTokens are safely created.
-                    pos: unsafe { position::new(self.input, input_pos) }
+                    pos: unsafe { position::Position::new_unchecked(self.input, input_pos) },
                 }
             }
         }
@@ -111,9 +129,9 @@
 
 #[cfg(test)]
 mod tests {
-    use super::Token;
-    use super::super::super::Parser;
     use super::super::super::macros::tests::*;
+    use super::super::super::Parser;
+    use super::Token;
 
     #[test]
     fn double_ended_iter_for_tokens() {
diff --git a/rustc_deps/vendor/pest/src/lib.rs b/rustc_deps/vendor/pest/src/lib.rs
index fea3a6e..8f2de1b 100644
--- a/rustc_deps/vendor/pest/src/lib.rs
+++ b/rustc_deps/vendor/pest/src/lib.rs
@@ -25,7 +25,7 @@
 //! * API reference on [docs.rs]
 //! * play with grammars and share them on our [fiddle]
 //! * leave feedback, ask questions, or greet us on [Gitter]
-//! 
+//!
 //! [book]: https://pest-parser.github.io/book
 //! [docs.rs]: https://docs.rs/pest
 //! [fiddle]: https://pest-parser.github.io/#editor
@@ -54,6 +54,10 @@
 //!
 //! The syntax of `.pest` files is documented in the [`pest_derive` crate].
 //!
+//! ## Inline grammars
+//!
+//! Grammars can also be inlined by using the `#[grammar_inline = "..."]` attribute.
+//!
 //! [`Parser`]: trait.Parser.html
 //! [`pest_derive` crate]: https://docs.rs/pest_derive/
 
@@ -62,9 +66,9 @@
 extern crate ucd_trie;
 
 pub use parser::Parser;
-pub use parser_state::{state, Atomicity, Lookahead, ParseResult, ParserState};
+pub use parser_state::{state, Atomicity, Lookahead, MatchDir, ParseResult, ParserState};
 pub use position::Position;
-pub use span::Span;
+pub use span::{Lines, Span};
 use std::fmt::Debug;
 use std::hash::Hash;
 pub use token::Token;
diff --git a/rustc_deps/vendor/pest/src/macros.rs b/rustc_deps/vendor/pest/src/macros.rs
index 6f8d696..01f4106 100644
--- a/rustc_deps/vendor/pest/src/macros.rs
+++ b/rustc_deps/vendor/pest/src/macros.rs
@@ -304,7 +304,6 @@
 macro_rules! fails_with {
     ( parser: $parser:ident, input: $string:expr, rule: $rules:tt :: $rule:tt,
       positives: $positives:expr, negatives: $negatives:expr, pos: $pos:expr ) => {
-
         #[allow(unused_mut)]
         {
             use $crate::Parser;
@@ -312,16 +311,19 @@
             let error = $parser::parse($rules::$rule, $string).unwrap_err();
 
             match error.variant {
-                $crate::error::ErrorVariant::ParsingError { positives, negatives } => {
+                $crate::error::ErrorVariant::ParsingError {
+                    positives,
+                    negatives,
+                } => {
                     assert_eq!(positives, $positives);
                     assert_eq!(negatives, $negatives);
                 }
-                _ => unreachable!()
+                _ => unreachable!(),
             };
 
             match error.location {
                 $crate::error::InputLocation::Pos(pos) => assert_eq!(pos, $pos),
-                _ => unreachable!()
+                _ => unreachable!(),
             }
         }
     };
@@ -329,16 +331,16 @@
 
 #[cfg(test)]
 pub mod tests {
-    use super::super::{state, Parser};
     use super::super::error::Error;
     use super::super::iterators::Pairs;
+    use super::super::{state, Parser};
 
     #[allow(non_camel_case_types)]
     #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
     pub enum Rule {
         a,
         b,
-        c
+        c,
     }
 
     pub struct AbcParser;
diff --git a/rustc_deps/vendor/pest/src/parser.rs b/rustc_deps/vendor/pest/src/parser.rs
index 20fd4e7..a8792d1 100644
--- a/rustc_deps/vendor/pest/src/parser.rs
+++ b/rustc_deps/vendor/pest/src/parser.rs
@@ -7,9 +7,9 @@
 // option. All files in the project carrying such notice may not be copied,
 // modified, or distributed except according to those terms.
 
-use RuleType;
 use error::Error;
 use iterators::Pairs;
+use RuleType;
 
 /// A trait with a single method that parses strings.
 pub trait Parser<R: RuleType> {
diff --git a/rustc_deps/vendor/pest/src/parser_state.rs b/rustc_deps/vendor/pest/src/parser_state.rs
index f37bf86..541604d 100644
--- a/rustc_deps/vendor/pest/src/parser_state.rs
+++ b/rustc_deps/vendor/pest/src/parser_state.rs
@@ -10,12 +10,12 @@
 use std::ops::Range;
 use std::rc::Rc;
 
-use RuleType;
 use error::{Error, ErrorVariant};
 use iterators::{pairs, QueueableToken};
 use position::{self, Position};
 use span::Span;
 use stack::Stack;
+use RuleType;
 
 /// The current lookahead status of a [`ParserState`].
 ///
@@ -24,7 +24,7 @@
 pub enum Lookahead {
     Positive,
     Negative,
-    None
+    None,
 }
 
 /// The current atomicity of a [`ParserState`].
@@ -34,12 +34,19 @@
 pub enum Atomicity {
     Atomic,
     CompoundAtomic,
-    NonAtomic
+    NonAtomic,
 }
 
 /// Type alias to simplify specifying the return value of chained closures.
 pub type ParseResult<S> = Result<S, S>;
 
+/// Match direction for the stack. Used in `PEEK[a..b]`/`stack_match_peek_slice`.
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub enum MatchDir {
+    BottomToTop,
+    TopToBottom,
+}
+
 /// The complete state of a [`Parser`].
 ///
 /// [`Parser`]: trait.Parser.html
@@ -52,7 +59,7 @@
     neg_attempts: Vec<R>,
     attempt_pos: usize,
     atomicity: Atomicity,
-    stack: Stack<Span<'i>>
+    stack: Stack<Span<'i>>,
 }
 
 /// Creates a `ParserState` from a `&str`, supplying it to a closure `f`.
@@ -66,14 +73,14 @@
 /// ```
 pub fn state<'i, R: RuleType, F>(input: &'i str, f: F) -> Result<pairs::Pairs<'i, R>, Error<R>>
 where
-    F: FnOnce(Box<ParserState<'i, R>>) -> ParseResult<Box<ParserState<'i, R>>>
+    F: FnOnce(Box<ParserState<'i, R>>) -> ParseResult<Box<ParserState<'i, R>>>,
 {
     let state = ParserState::new(input);
 
     match f(state) {
         Ok(state) => {
             let len = state.queue.len();
-            Ok(pairs::new(Rc::new(state.queue), input.as_bytes(), 0, len))
+            Ok(pairs::new(Rc::new(state.queue), input, 0, len))
         }
         Err(mut state) => {
             state.pos_attempts.sort();
@@ -84,9 +91,10 @@
             Err(Error::new_from_pos(
                 ErrorVariant::ParsingError {
                     positives: state.pos_attempts.clone(),
-                    negatives: state.neg_attempts.clone()
+                    negatives: state.neg_attempts.clone(),
                 },
-                unsafe { position::new(input.as_bytes(), state.attempt_pos) }
+                // TODO(performance): Guarantee state.attempt_pos is a valid position
+                position::Position::new(input, state.attempt_pos).unwrap(),
             ))
         }
     }
@@ -103,6 +111,7 @@
     /// let input = "";
     /// let state: Box<pest::ParserState<&str>> = pest::ParserState::new(input);
     /// ```
+    #[allow(clippy::new_ret_no_self)]
     pub fn new(input: &'i str) -> Box<Self> {
         Box::new(ParserState {
             position: Position::from_start(input),
@@ -112,7 +121,7 @@
             neg_attempts: vec![],
             attempt_pos: 0,
             atomicity: Atomicity::NonAtomic,
-            stack: Stack::new()
+            stack: Stack::new(),
         })
     }
 
@@ -182,7 +191,7 @@
     #[inline]
     pub fn rule<F>(mut self: Box<Self>, rule: R, f: F) -> ParseResult<Box<Self>>
     where
-        F: FnOnce(Box<Self>) -> ParseResult<Box<Self>>
+        F: FnOnce(Box<Self>) -> ParseResult<Box<Self>>,
     {
         let actual_pos = self.position.pos();
         let index = self.queue.len();
@@ -198,11 +207,11 @@
             // Pair's position will only be known after running the closure.
             self.queue.push(QueueableToken::Start {
                 end_token_index: 0,
-                input_pos: actual_pos
+                input_pos: actual_pos,
             });
         }
 
-        let attempts = self.pos_attempts.len() + self.neg_attempts.len();
+        let attempts = self.attempts_at(actual_pos);
 
         let result = f(self);
 
@@ -214,7 +223,7 @@
                         actual_pos,
                         pos_attempts_index,
                         neg_attempts_index,
-                        attempts
+                        attempts,
                     );
                 }
 
@@ -229,7 +238,7 @@
                             ref mut end_token_index,
                             ..
                         } => *end_token_index = new_index,
-                        _ => unreachable!()
+                        _ => unreachable!(),
                     };
 
                     let new_pos = new_state.position.pos();
@@ -237,7 +246,7 @@
                     new_state.queue.push(QueueableToken::End {
                         start_token_index: index,
                         rule,
-                        input_pos: new_pos
+                        input_pos: new_pos,
                     });
                 }
 
@@ -250,7 +259,7 @@
                         actual_pos,
                         pos_attempts_index,
                         neg_attempts_index,
-                        attempts
+                        attempts,
                     );
                 }
 
@@ -265,13 +274,21 @@
         }
     }
 
+    fn attempts_at(&self, pos: usize) -> usize {
+        if self.attempt_pos == pos {
+            self.pos_attempts.len() + self.neg_attempts.len()
+        } else {
+            0
+        }
+    }
+
     fn track(
         &mut self,
         rule: R,
         pos: usize,
         pos_attempts_index: usize,
         neg_attempts_index: usize,
-        prev_attempts: usize
+        prev_attempts: usize,
     ) {
         if self.atomicity == Atomicity::Atomic {
             return;
@@ -280,7 +297,7 @@
         // If nested rules made no progress, there is no use to report them; it's only useful to
         // track the current rule, the exception being when only one attempt has been made during
         // the children rules.
-        let curr_attempts = self.pos_attempts.len() + self.neg_attempts.len();
+        let curr_attempts = self.attempts_at(pos);
         if curr_attempts > prev_attempts && curr_attempts - prev_attempts == 1 {
             return;
         }
@@ -342,7 +359,7 @@
     #[inline]
     pub fn sequence<F>(self: Box<Self>, f: F) -> ParseResult<Box<Self>>
     where
-        F: FnOnce(Box<Self>) -> ParseResult<Box<Self>>
+        F: FnOnce(Box<Self>) -> ParseResult<Box<Self>>,
     {
         let token_index = self.queue.len();
         let initial_pos = self.position.clone();
@@ -391,14 +408,14 @@
     #[inline]
     pub fn repeat<F>(self: Box<Self>, mut f: F) -> ParseResult<Box<Self>>
     where
-        F: FnMut(Box<Self>) -> ParseResult<Box<Self>>
+        F: FnMut(Box<Self>) -> ParseResult<Box<Self>>,
     {
         let mut result = f(self);
 
         loop {
             match result {
                 Ok(state) => result = f(state),
-                Err(state) => return Ok(state)
+                Err(state) => return Ok(state),
             };
         }
     }
@@ -432,10 +449,10 @@
     #[inline]
     pub fn optional<F>(self: Box<Self>, f: F) -> ParseResult<Box<Self>>
     where
-        F: FnOnce(Box<Self>) -> ParseResult<Box<Self>>
+        F: FnOnce(Box<Self>) -> ParseResult<Box<Self>>,
     {
         match f(self) {
-            Ok(state) | Err(state) => Ok(state)
+            Ok(state) | Err(state) => Ok(state),
         }
     }
 
@@ -465,7 +482,8 @@
     /// ```
     #[inline]
     pub fn match_char_by<F>(mut self: Box<Self>, f: F) -> ParseResult<Box<Self>>
-    where F: FnOnce(char) -> bool
+    where
+        F: FnOnce(char) -> bool,
     {
         if self.position.match_char_by(f) {
             Ok(self)
@@ -708,19 +726,19 @@
     #[inline]
     pub fn lookahead<F>(mut self: Box<Self>, is_positive: bool, f: F) -> ParseResult<Box<Self>>
     where
-        F: FnOnce(Box<Self>) -> ParseResult<Box<Self>>
+        F: FnOnce(Box<Self>) -> ParseResult<Box<Self>>,
     {
         let initial_lookahead = self.lookahead;
 
         self.lookahead = if is_positive {
             match initial_lookahead {
                 Lookahead::None | Lookahead::Positive => Lookahead::Positive,
-                Lookahead::Negative => Lookahead::Negative
+                Lookahead::Negative => Lookahead::Negative,
             }
         } else {
             match initial_lookahead {
                 Lookahead::None | Lookahead::Positive => Lookahead::Negative,
-                Lookahead::Negative => Lookahead::Positive
+                Lookahead::Negative => Lookahead::Positive,
             }
         };
 
@@ -746,7 +764,7 @@
         } else {
             match result_state {
                 Ok(state) => Err(state),
-                Err(state) => Ok(state)
+                Err(state) => Ok(state),
             }
         }
     }
@@ -775,7 +793,7 @@
     #[inline]
     pub fn atomic<F>(mut self: Box<Self>, atomicity: Atomicity, f: F) -> ParseResult<Box<Self>>
     where
-        F: FnOnce(Box<Self>) -> ParseResult<Box<Self>>
+        F: FnOnce(Box<Self>) -> ParseResult<Box<Self>>,
     {
         let initial_atomicity = self.atomicity;
         let should_toggle = self.atomicity != atomicity;
@@ -823,7 +841,7 @@
     #[inline]
     pub fn stack_push<F>(self: Box<Self>, f: F) -> ParseResult<Box<Self>>
     where
-        F: FnOnce(Box<Self>) -> ParseResult<Box<Self>>
+        F: FnOnce(Box<Self>) -> ParseResult<Box<Self>>,
     {
         let start = self.position.clone();
 
@@ -835,7 +853,7 @@
                 state.stack.push(start.span(&end));
                 Ok(state)
             }
-            Err(state) => Err(state)
+            Err(state) => Err(state),
         }
     }
 
@@ -860,7 +878,8 @@
     /// ```
     #[inline]
     pub fn stack_peek(self: Box<Self>) -> ParseResult<Box<Self>> {
-        let string = self.stack
+        let string = self
+            .stack
             .peek()
             .expect("peek was called on empty stack")
             .as_str();
@@ -888,13 +907,71 @@
     /// ```
     #[inline]
     pub fn stack_pop(mut self: Box<Self>) -> ParseResult<Box<Self>> {
-        let string = self.stack
+        let string = self
+            .stack
             .pop()
             .expect("pop was called on empty stack")
             .as_str();
         self.match_string(string)
     }
 
+    /// Matches part of the state of the stack.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # use pest::{self, MatchDir};
+    /// # #[allow(non_camel_case_types)]
+    /// # #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+    /// enum Rule {}
+    ///
+    /// let input = "abcd cd cb";
+    /// let mut state: Box<pest::ParserState<Rule>> = pest::ParserState::new(input);
+    /// let mut result = state
+    ///     .stack_push(|state| state.match_string("a"))
+    ///     .and_then(|state| state.stack_push(|state| state.match_string("b")))
+    ///     .and_then(|state| state.stack_push(|state| state.match_string("c")))
+    ///     .and_then(|state| state.stack_push(|state| state.match_string("d")))
+    ///     .and_then(|state| state.match_string(" "))
+    ///     .and_then(|state| state.stack_match_peek_slice(2, None, MatchDir::BottomToTop))
+    ///     .and_then(|state| state.match_string(" "))
+    ///     .and_then(|state| state.stack_match_peek_slice(1, Some(-1), MatchDir::TopToBottom));
+    /// assert!(result.is_ok());
+    /// assert_eq!(result.unwrap().position().pos(), 10);
+    /// ```
+    #[inline]
+    pub fn stack_match_peek_slice(
+        mut self: Box<Self>,
+        start: i32,
+        end: Option<i32>,
+        match_dir: MatchDir,
+    ) -> ParseResult<Box<Self>> {
+        let range = match constrain_idxs(start, end, self.stack.len()) {
+            Some(r) => r,
+            None => return Err(self),
+        };
+        // return true if an empty sequence is requested
+        if range.end <= range.start {
+            return Ok(self);
+        }
+
+        let mut position = self.position.clone();
+        let result = {
+            let mut iter_b2t = self.stack[range].iter();
+            let matcher = |span: &Span| position.match_string(span.as_str());
+            match match_dir {
+                MatchDir::BottomToTop => iter_b2t.all(matcher),
+                MatchDir::TopToBottom => iter_b2t.rev().all(matcher),
+            }
+        };
+        if result {
+            self.position = position;
+            Ok(self)
+        } else {
+            Err(self)
+        }
+    }
+
     /// Matches the full state of the stack.
     ///
     /// # Examples
@@ -905,27 +982,18 @@
     /// # #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
     /// enum Rule {}
     ///
-    /// let input = "aaaa";
+    /// let input = "abba";
     /// let mut state: Box<pest::ParserState<Rule>> = pest::ParserState::new(input);
-    /// let mut result = state.stack_push(|state| state.match_string("a")).and_then(|state| {
-    ///     state.stack_push(|state| state.match_string("a"))
-    /// }).and_then(|state| state.stack_match_peek());
+    /// let mut result = state
+    ///     .stack_push(|state| state.match_string("a"))
+    ///     .and_then(|state| { state.stack_push(|state| state.match_string("b")) })
+    ///     .and_then(|state| state.stack_match_peek());
     /// assert!(result.is_ok());
     /// assert_eq!(result.unwrap().position().pos(), 4);
     /// ```
     #[inline]
-    pub fn stack_match_peek(mut self: Box<Self>) -> ParseResult<Box<Self>> {
-        let mut position = self.position.clone();
-        let result = self.stack.iter().all(|span| {
-            position.match_string(span.as_str())
-        });
-
-        if result {
-            self.position = position;
-            Ok(self)
-        } else {
-            Err(self)
-        }
+    pub fn stack_match_peek(self: Box<Self>) -> ParseResult<Box<Self>> {
+        self.stack_match_peek_slice(0, None, MatchDir::TopToBottom)
     }
 
     /// Matches the full state of the stack. This method will clear the stack as it evaluates.
@@ -950,8 +1018,7 @@
     pub fn stack_match_pop(mut self: Box<Self>) -> ParseResult<Box<Self>> {
         let mut position = self.position.clone();
         let mut result = true;
-        while self.stack.peek().is_some() {
-            let span = self.stack.pop().unwrap();
+        while let Some(span) = self.stack.pop() {
             result = position.match_string(span.as_str());
             if !result {
                 break;
@@ -989,7 +1056,7 @@
     pub fn stack_drop(mut self: Box<Self>) -> ParseResult<Box<Self>> {
         match self.stack.pop() {
             Some(_) => Ok(self),
-            None => Err(self)
+            None => Err(self),
         }
     }
 
@@ -1019,11 +1086,11 @@
     #[inline]
     pub fn restore_on_err<F>(self: Box<Self>, f: F) -> ParseResult<Box<Self>>
     where
-        F: FnOnce(Box<Self>) -> ParseResult<Box<Self>>
+        F: FnOnce(Box<Self>) -> ParseResult<Box<Self>>,
     {
         match f(self.checkpoint()) {
             Ok(state) => Ok(state),
-            Err(state) => Err(state.restore())
+            Err(state) => Err(state.restore()),
         }
     }
 
@@ -1041,3 +1108,45 @@
         self
     }
 }
+
+fn constrain_idxs(start: i32, end: Option<i32>, len: usize) -> Option<Range<usize>> {
+    let start_norm = normalize_index(start, len)?;
+    let end_norm = end.map_or(Some(len), |e| normalize_index(e, len))?;
+    Some(start_norm..end_norm)
+}
+
+/// Normalizes the index using its sequence’s length.
+/// Returns `None` if the normalized index is OOB.
+fn normalize_index(i: i32, len: usize) -> Option<usize> {
+    if i > len as i32 {
+        None
+    } else if i >= 0 {
+        Some(i as usize)
+    } else {
+        let real_i = len as i32 + i;
+        if real_i >= 0 {
+            Some(real_i as usize)
+        } else {
+            None
+        }
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use super::*;
+
+    #[test]
+    fn normalize_index_pos() {
+        assert_eq!(normalize_index(4, 6), Some(4));
+        assert_eq!(normalize_index(5, 5), Some(5));
+        assert_eq!(normalize_index(6, 3), None);
+    }
+
+    #[test]
+    fn normalize_index_neg() {
+        assert_eq!(normalize_index(-4, 6), Some(2));
+        assert_eq!(normalize_index(-5, 5), Some(0));
+        assert_eq!(normalize_index(-6, 3), None);
+    }
+}
diff --git a/rustc_deps/vendor/pest/src/position.rs b/rustc_deps/vendor/pest/src/position.rs
index 905a5ee..76e3383 100644
--- a/rustc_deps/vendor/pest/src/position.rs
+++ b/rustc_deps/vendor/pest/src/position.rs
@@ -17,16 +17,43 @@
 use span;
 
 /// A cursor position in a `&str` which provides useful methods to manually parse that string.
+#[derive(Clone)]
 pub struct Position<'i> {
-    input: &'i [u8],
-    pos: usize
-}
-
-pub unsafe fn new(input: &[u8], pos: usize) -> Position {
-    Position { input, pos }
+    input: &'i str,
+    /// # Safety:
+    ///
+    /// `input[pos..]` must be a valid codepoint boundary (should not panic when indexing thus).
+    pos: usize,
 }
 
 impl<'i> Position<'i> {
+    /// Create a new `Position` without checking invariants. (Checked with `debug_assertions`.)
+    ///
+    /// # Safety:
+    ///
+    /// `input[pos..]` must be a valid codepoint boundary (should not panic when indexing thus).
+    pub(crate) unsafe fn new_unchecked(input: &str, pos: usize) -> Position {
+        debug_assert!(input.get(pos..).is_some());
+        Position { input, pos }
+    }
+
+    /// Attempts to create a new `Position` at the given position. If the specified position is
+    /// an invalid index, or the specified position is not a valid UTF8 boundary, then None is
+    /// returned.
+    ///
+    /// # Examples
+    /// ```
+    /// # use pest::Position;
+    /// let cheart = '💖';
+    /// let heart = "💖";
+    /// assert_eq!(Position::new(heart, 1), None);
+    /// assert_ne!(Position::new(heart, cheart.len_utf8()), None);
+    /// ```
+    #[allow(clippy::new_ret_no_self)]
+    pub fn new(input: &str, pos: usize) -> Option<Position> {
+        input.get(pos..).map(|_| Position { input, pos })
+    }
+
     /// Creates a `Position` at the start of a `&str`.
     ///
     /// # Examples
@@ -39,7 +66,7 @@
     #[inline]
     pub fn from_start(input: &'i str) -> Position<'i> {
         // Position 0 is always safe because it's always a valid UTF-8 border.
-        unsafe { new(input.as_bytes(), 0) }
+        Position { input, pos: 0 }
     }
 
     /// Returns the byte position of this `Position` as a `usize`.
@@ -77,10 +104,13 @@
     /// ```
     #[inline]
     pub fn span(&self, other: &Position<'i>) -> span::Span<'i> {
-        if ptr::eq(self.input, other.input) {
-            // Position's pos is always a UTF-8 border.
-            unsafe { span::new(self.input, self.pos, other.pos) }
+        if ptr::eq(self.input, other.input)
+        /* && self.input.get(self.pos..other.pos).is_some() */
+        {
+            // This is safe because the pos field of a Position should always be a valid str index.
+            unsafe { span::Span::new_unchecked(self.input, self.pos, other.pos) }
         } else {
+            // TODO: maybe a panic if self.pos < other.pos
             panic!("span created from positions from different inputs")
         }
     }
@@ -109,7 +139,7 @@
 
         let mut pos = self.pos;
         // Position's pos is always a UTF-8 border.
-        let slice = unsafe { str::from_utf8_unchecked(&self.input[..pos]) };
+        let slice = &self.input[..pos];
         let mut chars = slice.chars().peekable();
 
         let mut line_col = (1, 1);
@@ -140,7 +170,7 @@
                     pos -= c.len_utf8();
                     line_col = (line_col.0, line_col.1 + 1);
                 }
-                None => unreachable!()
+                None => unreachable!(),
             }
         }
 
@@ -164,59 +194,48 @@
     /// assert_eq!(result.unwrap().position().line_of(), "a");
     /// ```
     #[inline]
-    pub fn line_of(&self) -> &str {
+    pub fn line_of(&self) -> &'i str {
         if self.pos > self.input.len() {
             panic!("position out of bounds");
-        }
-
-        let start = if self.pos == 0 {
-            0
-        } else {
-            // Position's pos is always a UTF-8 border.
-            let start = unsafe { str::from_utf8_unchecked(self.input) }
-                .char_indices()
-                .rev()
-                .skip_while(|&(i, _)| i >= self.pos)
-                .find(|&(_, c)| c == '\n');
-            match start {
-                Some((i, _)) => i + 1,
-                None => 0
-            }
         };
+        // Safe since start and end can only be valid UTF-8 borders.
+        &self.input[self.find_line_start()..self.find_line_end()]
+    }
 
-        let end = if self.input.is_empty() {
+    pub(crate) fn find_line_start(&self) -> usize {
+        if self.input.is_empty() {
+            return 0;
+        };
+        // Position's pos is always a UTF-8 border.
+        let start = self
+            .input
+            .char_indices()
+            .rev()
+            .skip_while(|&(i, _)| i >= self.pos)
+            .find(|&(_, c)| c == '\n');
+        match start {
+            Some((i, _)) => i + 1,
+            None => 0,
+        }
+    }
+
+    pub(crate) fn find_line_end(&self) -> usize {
+        if self.input.is_empty() {
             0
         } else if self.pos == self.input.len() - 1 {
-            let mut end = self.input.len();
-
-            if end > 0 && self.input[end - 1] == b'\n' {
-                end -= 1;
-            }
-            if end > 0 && self.input[end - 1] == b'\r' {
-                end -= 1;
-            }
-
-            end
+            self.input.len()
         } else {
             // Position's pos is always a UTF-8 border.
-            let end = unsafe { str::from_utf8_unchecked(self.input) }
+            let end = self
+                .input
                 .char_indices()
                 .skip_while(|&(i, _)| i < self.pos)
                 .find(|&(_, c)| c == '\n');
-            let mut end = match end {
-                Some((i, _)) => i,
-                None => self.input.len()
-            };
-
-            if end > 0 && self.input[end - 1] == b'\r' {
-                end -= 1;
+            match end {
+                Some((i, _)) => i + 1,
+                None => self.input.len(),
             }
-
-            end
-        };
-
-        // Safe since start and end can only be valid UTF-8 borders.
-        unsafe { str::from_utf8_unchecked(&self.input[start..end]) }
+        }
     }
 
     /// Returns `true` when the `Position` points to the start of the input `&str`.
@@ -238,8 +257,7 @@
         let skipped = {
             let mut len = 0;
             // Position's pos is always a UTF-8 border.
-            let mut chars = unsafe { str::from_utf8_unchecked(&self.input[self.pos..]) }.chars();
-
+            let mut chars = (&self.input[self.pos..]).chars();
             for _ in 0..n {
                 if let Some(c) = chars.next() {
                     len += c.len_utf8();
@@ -247,7 +265,6 @@
                     return false;
                 }
             }
-
             len
         };
 
@@ -255,15 +272,42 @@
         true
     }
 
+    /// Goes back `n` `char`s from the `Position` and returns `true` if the skip was possible or `false`
+    /// otherwise. If the return value is `false`, `pos` will not be updated.
+    #[inline]
+    pub(crate) fn skip_back(&mut self, n: usize) -> bool {
+        let skipped = {
+            let mut len = 0;
+            // Position's pos is always a UTF-8 border.
+            let mut chars = (&self.input[..self.pos]).chars().rev();
+            for _ in 0..n {
+                if let Some(c) = chars.next() {
+                    len += c.len_utf8();
+                } else {
+                    return false;
+                }
+            }
+            len
+        };
+
+        self.pos -= skipped;
+        true
+    }
+
     /// Skips until one of the given `strings` is found. If none of the `strings` can be found,
     /// this function will return `false` but its `pos` will *still* be updated.
     #[inline]
     pub(crate) fn skip_until(&mut self, strings: &[&str]) -> bool {
         for from in self.pos..self.input.len() {
-            for slice in strings.iter().map(|s| s.as_bytes()) {
-                let to = from + slice.len();
+            let bytes = if let Some(string) = self.input.get(from..) {
+                string.as_bytes()
+            } else {
+                continue;
+            };
 
-                if to <= self.input.len() && slice == &self.input[from..to] {
+            for slice in strings.iter() {
+                let to = slice.len();
+                if Some(slice.as_bytes()) == bytes.get(0..to) {
                     self.pos = from;
                     return true;
                 }
@@ -278,11 +322,10 @@
     /// was made. If no match was made, returns `false` and `pos` will not be updated.
     #[inline]
     pub(crate) fn match_char_by<F>(&mut self, f: F) -> bool
-    where F: FnOnce(char) -> bool
+    where
+        F: FnOnce(char) -> bool,
     {
-        // Guaranteed UTF-8
-        let s = unsafe { str::from_utf8_unchecked(&self.input[self.pos..]) };
-        if let Some(c) = s.chars().next() {
+        if let Some(c) = (&self.input[self.pos..]).chars().next() {
             if f(c) {
                 self.pos += c.len_utf8();
                 true
@@ -298,18 +341,10 @@
     /// otherwise. If no match was made, `pos` will not be updated.
     #[inline]
     pub(crate) fn match_string(&mut self, string: &str) -> bool {
-        let matched = {
-            let to = self.pos + string.len();
+        let to = self.pos + string.len();
 
-            if to <= self.input.len() {
-                string.as_bytes() == &self.input[self.pos..to]
-            } else {
-                false
-            }
-        };
-
-        if matched {
-            self.pos += string.len();
+        if Some(string.as_bytes()) == self.input.as_bytes().get(self.pos..to) {
+            self.pos = to;
             true
         } else {
             false
@@ -321,12 +356,8 @@
     #[inline]
     pub(crate) fn match_insensitive(&mut self, string: &str) -> bool {
         let matched = {
-            // Matching is safe since, even if the string does not fall on UTF-8 borders, that
-            // particular slice is only used for comparison which will be handled correctly.
-            let slice = unsafe { str::from_utf8_unchecked(&self.input[self.pos..]) };
-
-            if slice.is_char_boundary(string.len()) {
-                let slice = unsafe { slice.get_unchecked(0..string.len()) };
+            let slice = &self.input[self.pos..];
+            if let Some(slice) = slice.get(0..string.len()) {
                 slice.eq_ignore_ascii_case(string)
             } else {
                 false
@@ -345,28 +376,14 @@
     /// otherwise. If no match was made, `pos` will not be updated.
     #[inline]
     pub(crate) fn match_range(&mut self, range: Range<char>) -> bool {
-        let len = {
-            // Cannot actually cause undefined behavior.
-            let slice = unsafe { str::from_utf8_unchecked(&self.input[self.pos..]) };
-
-            if let Some(c) = slice.chars().next() {
-                if range.start <= c && c <= range.end {
-                    Some(c.len_utf8())
-                } else {
-                    None
-                }
-            } else {
-                None
+        if let Some(c) = (&self.input[self.pos..]).chars().next() {
+            if range.start <= c && c <= range.end {
+                self.pos += c.len_utf8();
+                return true;
             }
-        };
-
-        match len {
-            Some(len) => {
-                self.pos += len;
-                true
-            }
-            None => false
         }
+
+        false
     }
 }
 
@@ -376,13 +393,6 @@
     }
 }
 
-impl<'i> Clone for Position<'i> {
-    fn clone(&self) -> Position<'i> {
-        // Cloning a safe position is safe.
-        unsafe { new(self.input, self.pos) }
-    }
-}
-
 impl<'i> PartialEq for Position<'i> {
     fn eq(&self, other: &Position<'i>) -> bool {
         ptr::eq(self.input, other.input) && self.pos == other.pos
@@ -410,7 +420,7 @@
 
 impl<'i> Hash for Position<'i> {
     fn hash<H: Hasher>(&self, state: &mut H) {
-        (self.input as *const [u8]).hash(state);
+        (self.input as *const str).hash(state);
         self.pos.hash(state);
     }
 }
@@ -423,74 +433,74 @@
 
     #[test]
     fn empty() {
-        let input = b"";
-        assert_eq!(unsafe { new(input, 0) }.match_string(""), true);
-        assert_eq!(!unsafe { new(input, 0) }.match_string("a"), true);
+        let input = "";
+        assert_eq!(Position::new(input, 0).unwrap().match_string(""), true);
+        assert_eq!(!Position::new(input, 0).unwrap().match_string("a"), true);
     }
 
     #[test]
     fn parts() {
-        let input = b"asdasdf";
+        let input = "asdasdf";
 
-        assert_eq!(unsafe { new(input, 0) }.match_string("asd"), true);
-        assert_eq!(unsafe { new(input, 3) }.match_string("asdf"), true);
+        assert_eq!(Position::new(input, 0).unwrap().match_string("asd"), true);
+        assert_eq!(Position::new(input, 3).unwrap().match_string("asdf"), true);
     }
 
     #[test]
     fn line_col() {
-        let input = "a\rb\nc\r\nd嗨".as_bytes();
+        let input = "a\rb\nc\r\nd嗨";
 
-        assert_eq!(unsafe { new(input, 0) }.line_col(), (1, 1));
-        assert_eq!(unsafe { new(input, 1) }.line_col(), (1, 2));
-        assert_eq!(unsafe { new(input, 2) }.line_col(), (1, 3));
-        assert_eq!(unsafe { new(input, 3) }.line_col(), (1, 4));
-        assert_eq!(unsafe { new(input, 4) }.line_col(), (2, 1));
-        assert_eq!(unsafe { new(input, 5) }.line_col(), (2, 2));
-        assert_eq!(unsafe { new(input, 6) }.line_col(), (2, 3));
-        assert_eq!(unsafe { new(input, 7) }.line_col(), (3, 1));
-        assert_eq!(unsafe { new(input, 8) }.line_col(), (3, 2));
-        assert_eq!(unsafe { new(input, 11) }.line_col(), (3, 3));
+        assert_eq!(Position::new(input, 0).unwrap().line_col(), (1, 1));
+        assert_eq!(Position::new(input, 1).unwrap().line_col(), (1, 2));
+        assert_eq!(Position::new(input, 2).unwrap().line_col(), (1, 3));
+        assert_eq!(Position::new(input, 3).unwrap().line_col(), (1, 4));
+        assert_eq!(Position::new(input, 4).unwrap().line_col(), (2, 1));
+        assert_eq!(Position::new(input, 5).unwrap().line_col(), (2, 2));
+        assert_eq!(Position::new(input, 6).unwrap().line_col(), (2, 3));
+        assert_eq!(Position::new(input, 7).unwrap().line_col(), (3, 1));
+        assert_eq!(Position::new(input, 8).unwrap().line_col(), (3, 2));
+        assert_eq!(Position::new(input, 11).unwrap().line_col(), (3, 3));
     }
 
     #[test]
     fn line_of() {
-        let input = "a\rb\nc\r\nd嗨".as_bytes();
+        let input = "a\rb\nc\r\nd嗨";
 
-        assert_eq!(unsafe { new(input, 0) }.line_of(), "a\rb");
-        assert_eq!(unsafe { new(input, 1) }.line_of(), "a\rb");
-        assert_eq!(unsafe { new(input, 2) }.line_of(), "a\rb");
-        assert_eq!(unsafe { new(input, 3) }.line_of(), "a\rb");
-        assert_eq!(unsafe { new(input, 4) }.line_of(), "c");
-        assert_eq!(unsafe { new(input, 5) }.line_of(), "c");
-        assert_eq!(unsafe { new(input, 6) }.line_of(), "c");
-        assert_eq!(unsafe { new(input, 7) }.line_of(), "d嗨");
-        assert_eq!(unsafe { new(input, 8) }.line_of(), "d嗨");
-        assert_eq!(unsafe { new(input, 11) }.line_of(), "d嗨");
+        assert_eq!(Position::new(input, 0).unwrap().line_of(), "a\rb\n");
+        assert_eq!(Position::new(input, 1).unwrap().line_of(), "a\rb\n");
+        assert_eq!(Position::new(input, 2).unwrap().line_of(), "a\rb\n");
+        assert_eq!(Position::new(input, 3).unwrap().line_of(), "a\rb\n");
+        assert_eq!(Position::new(input, 4).unwrap().line_of(), "c\r\n");
+        assert_eq!(Position::new(input, 5).unwrap().line_of(), "c\r\n");
+        assert_eq!(Position::new(input, 6).unwrap().line_of(), "c\r\n");
+        assert_eq!(Position::new(input, 7).unwrap().line_of(), "d嗨");
+        assert_eq!(Position::new(input, 8).unwrap().line_of(), "d嗨");
+        assert_eq!(Position::new(input, 11).unwrap().line_of(), "d嗨");
     }
 
     #[test]
     fn line_of_empty() {
-        let input = b"";
+        let input = "";
 
-        assert_eq!(unsafe { new(input, 0) }.line_of(), "");
+        assert_eq!(Position::new(input, 0).unwrap().line_of(), "");
     }
 
     #[test]
     fn line_of_new_line() {
-        let input = b"\n";
+        let input = "\n";
 
-        assert_eq!(unsafe { new(input, 0) }.line_of(), "");
+        assert_eq!(Position::new(input, 0).unwrap().line_of(), "\n");
     }
 
     #[test]
     fn line_of_between_new_line() {
-        let input = b"\n\n";
+        let input = "\n\n";
 
-        assert_eq!(unsafe { new(input, 1) }.line_of(), "");
+        assert_eq!(Position::new(input, 1).unwrap().line_of(), "\n");
     }
 
-    fn measure_skip(input: &[u8], pos: usize, n: usize) -> Option<usize> {
-        let mut p = unsafe { new(input, pos) };
+    fn measure_skip(input: &str, pos: usize, n: usize) -> Option<usize> {
+        let mut p = Position::new(input, pos).unwrap();
         if p.skip(n) {
             Some(p.pos - pos)
         } else {
@@ -500,7 +510,7 @@
 
     #[test]
     fn skip_empty() {
-        let input = b"";
+        let input = "";
 
         assert_eq!(measure_skip(input, 0, 0), Some(0));
         assert_eq!(measure_skip(input, 0, 1), None);
@@ -508,7 +518,7 @@
 
     #[test]
     fn skip() {
-        let input = "d嗨".as_bytes();
+        let input = "d嗨";
 
         assert_eq!(measure_skip(input, 0, 0), Some(0));
         assert_eq!(measure_skip(input, 0, 1), Some(1));
@@ -543,21 +553,36 @@
 
     #[test]
     fn match_range() {
-        let input = b"b";
+        let input = "b";
 
-        assert_eq!(unsafe { new(input, 0) }.match_range('a'..'c'), true);
-        assert_eq!(unsafe { new(input, 0) }.match_range('b'..'b'), true);
-        assert_eq!(!unsafe { new(input, 0) }.match_range('a'..'a'), true);
-        assert_eq!(!unsafe { new(input, 0) }.match_range('c'..'c'), true);
-        assert_eq!(unsafe { new(input, 0) }.match_range('a'..'嗨'), true);
+        assert_eq!(Position::new(input, 0).unwrap().match_range('a'..'c'), true);
+        assert_eq!(Position::new(input, 0).unwrap().match_range('b'..'b'), true);
+        assert_eq!(
+            !Position::new(input, 0).unwrap().match_range('a'..'a'),
+            true
+        );
+        assert_eq!(
+            !Position::new(input, 0).unwrap().match_range('c'..'c'),
+            true
+        );
+        assert_eq!(
+            Position::new(input, 0).unwrap().match_range('a'..'嗨'),
+            true
+        );
     }
 
     #[test]
     fn match_insensitive() {
-        let input = b"AsdASdF";
+        let input = "AsdASdF";
 
-        assert_eq!(unsafe { new(input, 0) }.match_insensitive("asd"), true);
-        assert_eq!(unsafe { new(input, 3) }.match_insensitive("asdf"), true);
+        assert_eq!(
+            Position::new(input, 0).unwrap().match_insensitive("asd"),
+            true
+        );
+        assert_eq!(
+            Position::new(input, 3).unwrap().match_insensitive("asdf"),
+            true
+        );
     }
 
     #[test]
diff --git a/rustc_deps/vendor/pest/src/prec_climber.rs b/rustc_deps/vendor/pest/src/prec_climber.rs
index 3be2c46..d8bf0a0 100644
--- a/rustc_deps/vendor/pest/src/prec_climber.rs
+++ b/rustc_deps/vendor/pest/src/prec_climber.rs
@@ -13,8 +13,8 @@
 use std::iter::Peekable;
 use std::ops::BitOr;
 
-use RuleType;
 use iterators::Pair;
+use RuleType;
 
 /// Associativity of an [`Operator`].
 ///
@@ -24,7 +24,7 @@
     /// Left `Operator` associativity
     Left,
     /// Right `Operator` associativity
-    Right
+    Right,
 }
 
 /// Infix operator used in [`PrecClimber`].
@@ -34,7 +34,7 @@
 pub struct Operator<R: RuleType> {
     rule: R,
     assoc: Assoc,
-    next: Option<Box<Operator<R>>>
+    next: Option<Box<Operator<R>>>,
 }
 
 impl<R: RuleType> Operator<R> {
@@ -57,7 +57,7 @@
         Operator {
             rule,
             assoc,
-            next: None
+            next: None,
         }
     }
 }
@@ -87,7 +87,7 @@
 /// [`Pairs`]: ../iterators/struct.Pairs.html
 #[derive(Debug)]
 pub struct PrecClimber<R: RuleType> {
-    ops: HashMap<R, (u32, Assoc)>
+    ops: HashMap<R, (u32, Assoc)>,
 }
 
 impl<R: RuleType> PrecClimber<R> {
@@ -116,7 +116,8 @@
     /// ]);
     /// ```
     pub fn new(ops: Vec<Operator<R>>) -> PrecClimber<R> {
-        let ops = ops.into_iter()
+        let ops = ops
+            .into_iter()
             .zip(1..)
             .fold(HashMap::new(), |mut map, (op, prec)| {
                 let mut next = Some(op);
@@ -126,7 +127,7 @@
                         Operator {
                             rule,
                             assoc,
-                            next: op_next
+                            next: op_next,
                         } => {
                             map.insert(rule, (prec, assoc));
                             next = op_next.map(|op| *op);
@@ -172,12 +173,12 @@
     where
         P: Iterator<Item = Pair<'i, R>>,
         F: FnMut(Pair<'i, R>) -> T,
-        G: FnMut(T, Pair<'i, R>, T) -> T
+        G: FnMut(T, Pair<'i, R>, T) -> T,
     {
         let lhs = primary(
             pairs
                 .next()
-                .expect("precedence climbing requires a non-empty Pairs")
+                .expect("precedence climbing requires a non-empty Pairs"),
         );
         self.climb_rec(lhs, 0, &mut pairs.peekable(), &mut primary, &mut infix)
     }
@@ -188,12 +189,12 @@
         min_prec: u32,
         pairs: &mut Peekable<P>,
         primary: &mut F,
-        infix: &mut G
+        infix: &mut G,
     ) -> T
     where
         P: Iterator<Item = Pair<'i, R>>,
         F: FnMut(Pair<'i, R>) -> T,
-        G: FnMut(T, Pair<'i, R>, T) -> T
+        G: FnMut(T, Pair<'i, R>, T) -> T,
     {
         while pairs.peek().is_some() {
             let rule = pairs.peek().unwrap().as_rule();
@@ -202,7 +203,7 @@
                     let op = pairs.next().unwrap();
                     let mut rhs = primary(pairs.next().expect(
                         "infix operator must be followed by \
-                         a primary expression"
+                         a primary expression",
                     ));
 
                     while pairs.peek().is_some() {
diff --git a/rustc_deps/vendor/pest/src/span.rs b/rustc_deps/vendor/pest/src/span.rs
index 6befd36..f8beffe 100644
--- a/rustc_deps/vendor/pest/src/span.rs
+++ b/rustc_deps/vendor/pest/src/span.rs
@@ -18,18 +18,50 @@
 ///
 /// [two `Position`s]: struct.Position.html#method.span
 /// [`Pair`]: ../iterators/struct.Pair.html#method.span
+#[derive(Clone)]
 pub struct Span<'i> {
-    input: &'i [u8],
+    input: &'i str,
+    /// # Safety
+    ///
+    /// Must be a valid character boundary index into `input`.
     start: usize,
-    end: usize
-}
-
-#[inline]
-pub unsafe fn new(input: &[u8], start: usize, end: usize) -> Span {
-    Span { input, start, end }
+    /// # Safety
+    ///
+    /// Must be a valid character boundary index into `input`.
+    end: usize,
 }
 
 impl<'i> Span<'i> {
+    /// Create a new `Span` without checking invariants. (Checked with `debug_assertions`.)
+    ///
+    /// # Safety
+    ///
+    /// `input[start..end]` must be a valid subslice; that is, said indexing should not panic.
+    pub(crate) unsafe fn new_unchecked(input: &str, start: usize, end: usize) -> Span {
+        debug_assert!(input.get(start..end).is_some());
+        Span { input, start, end }
+    }
+
+    /// Attempts to create a new span. Will return `None` if `input[start..end]` is an invalid index
+    /// into `input`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # use pest::Span;
+    /// let input = "Hello!";
+    /// assert_eq!(None, Span::new(input, 100, 0));
+    /// assert!(Span::new(input, 0, input.len()).is_some());
+    /// ```
+    #[allow(clippy::new_ret_no_self)]
+    pub fn new(input: &str, start: usize, end: usize) -> Option<Span> {
+        if input.get(start..end).is_some() {
+            Some(Span { input, start, end })
+        } else {
+            None
+        }
+    }
+
     /// Returns the `Span`'s start byte position as a `usize`.
     ///
     /// # Examples
@@ -82,7 +114,7 @@
     #[inline]
     pub fn start_pos(&self) -> position::Position<'i> {
         // Span's start position is always a UTF-8 border.
-        unsafe { position::new(self.input, self.start) }
+        unsafe { position::Position::new_unchecked(self.input, self.start) }
     }
 
     /// Returns the `Span`'s end `Position`.
@@ -101,7 +133,7 @@
     #[inline]
     pub fn end_pos(&self) -> position::Position<'i> {
         // Span's end position is always a UTF-8 border.
-        unsafe { position::new(self.input, self.end) }
+        unsafe { position::Position::new_unchecked(self.input, self.end) }
     }
 
     /// Splits the `Span` into a pair of `Position`s.
@@ -120,8 +152,8 @@
     #[inline]
     pub fn split(self) -> (position::Position<'i>, position::Position<'i>) {
         // Span's start and end positions are always a UTF-8 borders.
-        let pos1 = unsafe { position::new(self.input, self.start) };
-        let pos2 = unsafe { position::new(self.input, self.end) };
+        let pos1 = unsafe { position::Position::new_unchecked(self.input, self.start) };
+        let pos2 = unsafe { position::Position::new_unchecked(self.input, self.end) };
 
         (pos1, pos2)
     }
@@ -146,7 +178,32 @@
     #[inline]
     pub fn as_str(&self) -> &'i str {
         // Span's start and end positions are always a UTF-8 borders.
-        unsafe { str::from_utf8_unchecked(&self.input[self.start..self.end]) }
+        &self.input[self.start..self.end]
+    }
+
+    /// Iterates over all lines (partially) covered by this span.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # use pest;
+    /// # #[allow(non_camel_case_types)]
+    /// # #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
+    /// enum Rule {}
+    ///
+    /// let input = "a\nb\nc";
+    /// let mut state: Box<pest::ParserState<Rule>> = pest::ParserState::new(input).skip(2).unwrap();
+    /// let start_pos = state.position().clone();
+    /// state = state.match_string("b\nc").unwrap();
+    /// let span = start_pos.span(&state.position().clone());
+    /// assert_eq!(span.lines().collect::<Vec<_>>(), vec!["b\n", "c"]);
+    /// ```
+    #[inline]
+    pub fn lines(&self) -> Lines {
+        Lines {
+            span: self,
+            pos: self.start,
+        }
     }
 }
 
@@ -160,12 +217,6 @@
     }
 }
 
-impl<'i> Clone for Span<'i> {
-    fn clone(&self) -> Span<'i> {
-        unsafe { new(self.input, self.start, self.end) }
-    }
-}
-
 impl<'i> PartialEq for Span<'i> {
     fn eq(&self, other: &Span<'i>) -> bool {
         ptr::eq(self.input, other.input) && self.start == other.start && self.end == other.end
@@ -176,12 +227,38 @@
 
 impl<'i> Hash for Span<'i> {
     fn hash<H: Hasher>(&self, state: &mut H) {
-        (self.input as *const [u8]).hash(state);
+        (self.input as *const str).hash(state);
         self.start.hash(state);
         self.end.hash(state);
     }
 }
 
+/// Line iterator for Spans, created by [`Span::lines()`].
+///
+/// Iterates all lines that are at least partially covered by the span.
+///
+/// [`Span::lines()`]: struct.Span.html#method.lines
+pub struct Lines<'i> {
+    span: &'i Span<'i>,
+    pos: usize,
+}
+
+impl<'i> Iterator for Lines<'i> {
+    type Item = &'i str;
+    fn next(&mut self) -> Option<&'i str> {
+        if self.pos > self.span.end {
+            return None;
+        }
+        let pos = position::Position::new(self.span.input, self.pos)?;
+        if pos.at_end() {
+            return None;
+        }
+        let line = pos.line_of();
+        self.pos = pos.find_line_end();
+        Some(line)
+    }
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
@@ -198,4 +275,27 @@
 
         assert_eq!(span.split(), (start, end));
     }
+
+    #[test]
+    fn lines_mid() {
+        let input = "abc\ndef\nghi";
+        let span = Span::new(input, 1, 7).unwrap();
+        let lines: Vec<_> = span.lines().collect();
+        println!("{:?}", lines);
+        assert_eq!(lines.len(), 2);
+        assert_eq!(lines[0], "abc\n".to_owned());
+        assert_eq!(lines[1], "def\n".to_owned());
+    }
+
+    #[test]
+    fn lines_eof() {
+        let input = "abc\ndef\nghi";
+        let span = Span::new(input, 5, 11).unwrap();
+        assert!(span.end_pos().at_end());
+        let lines: Vec<_> = span.lines().collect();
+        println!("{:?}", lines);
+        assert_eq!(lines.len(), 2);
+        assert_eq!(lines[0], "def\n".to_owned());
+        assert_eq!(lines[1], "ghi".to_owned());
+    }
 }
diff --git a/rustc_deps/vendor/pest/src/stack.rs b/rustc_deps/vendor/pest/src/stack.rs
index e4010be..c19e18a 100644
--- a/rustc_deps/vendor/pest/src/stack.rs
+++ b/rustc_deps/vendor/pest/src/stack.rs
@@ -7,13 +7,15 @@
 // option. All files in the project carrying such notice may not be copied,
 // modified, or distributed except according to those terms.
 
+use std::ops::{Index, Range};
+
 /// Implementation of a `Stack` which maintains an log of `StackOp`s in order to rewind the stack
 /// to a previous state.
 #[derive(Debug)]
 pub struct Stack<T: Clone> {
     ops: Vec<StackOp<T>>,
     cache: Vec<T>,
-    snapshots: Vec<usize>
+    snapshots: Vec<usize>,
 }
 
 impl<T: Clone> Stack<T> {
@@ -22,7 +24,7 @@
         Stack {
             ops: vec![],
             cache: vec![],
-            snapshots: vec![]
+            snapshots: vec![],
         }
     }
 
@@ -52,18 +54,14 @@
         popped
     }
 
-    /// Returns an iterator to the current state of the cache in fifo order.
-    pub fn iter(&self) -> impl Iterator<Item = &T> {
-        self.cache.iter().rev()
+    /// Returns the size of the stack
+    pub fn len(&self) -> usize {
+        self.cache.len()
     }
 
     /// Takes a snapshot of the current `Stack`.
     pub fn snapshot(&mut self) {
-        let ops_index = self.ops.len();
-
-        if ops_index > self.most_recent_snap() {
-            self.snapshots.push(ops_index);
-        }
+        self.snapshots.push(self.ops.len());
     }
 
     /// Rewinds the `Stack` to the most recent `snapshot()`. If no `snapshot()` has been taken, this
@@ -73,7 +71,7 @@
             Some(ops_index) => {
                 self.rewind_to(ops_index);
                 self.ops.truncate(ops_index);
-            },
+            }
             None => {
                 self.cache.clear();
                 self.ops.clear();
@@ -95,16 +93,20 @@
             }
         }
     }
+}
 
-    fn most_recent_snap(&self) -> usize {
-        *self.snapshots.last().unwrap_or(&0)
+impl<T: Clone> Index<Range<usize>> for Stack<T> {
+    type Output = [T];
+
+    fn index(&self, range: Range<usize>) -> &[T] {
+        self.cache.index(range)
     }
 }
 
 #[derive(Debug)]
 enum StackOp<T> {
     Push(T),
-    Pop(T)
+    Pop(T),
 }
 
 #[cfg(test)]
@@ -125,14 +127,17 @@
     }
 
     #[test]
-    fn iter() {
+    fn snapshot_twice() {
         let mut stack = Stack::new();
 
         stack.push(0);
-        stack.push(1);
-        stack.push(2);
 
-        assert_eq!(stack.iter().collect::<Vec<&i32>>(), vec![&2, &1, &0]);
+        stack.snapshot();
+        stack.snapshot();
+        stack.restore();
+        stack.restore();
+
+        assert_eq!(stack[0..stack.len()], [0]);
     }
 
     #[test]
diff --git a/rustc_deps/vendor/pest/src/token.rs b/rustc_deps/vendor/pest/src/token.rs
index a88d15c..fbe544a 100644
--- a/rustc_deps/vendor/pest/src/token.rs
+++ b/rustc_deps/vendor/pest/src/token.rs
@@ -15,5 +15,5 @@
     /// The starting `Position` of a matched `Rule`
     Start { rule: R, pos: Position<'i> },
     /// The ending `Position` of a matched `Rule`
-    End { rule: R, pos: Position<'i> }
+    End { rule: R, pos: Position<'i> },
 }
diff --git a/rustc_deps/vendor/pest/src/unicode/mod.rs b/rustc_deps/vendor/pest/src/unicode/mod.rs
index b4a3397..b24f95c 100644
--- a/rustc_deps/vendor/pest/src/unicode/mod.rs
+++ b/rustc_deps/vendor/pest/src/unicode/mod.rs
@@ -3,7 +3,7 @@
 //! We rely on dead code elimination to remove the tables that aren't needed.
 
 #![allow(bad_style)]
-#![cfg_attr(feature = "cargo-clippy", allow(clippy))]
+#![allow(clippy::all)]
 
 macro_rules! char_property_functions {
     {$(
diff --git a/rustc_deps/vendor/pest/tests/calculator.rs b/rustc_deps/vendor/pest/tests/calculator.rs
index 471b8a5..b78ee0d 100644
--- a/rustc_deps/vendor/pest/tests/calculator.rs
+++ b/rustc_deps/vendor/pest/tests/calculator.rs
@@ -10,10 +10,10 @@
 #[macro_use]
 extern crate pest;
 
-use pest::{state, ParseResult, Parser, ParserState};
 use pest::error::Error;
 use pest::iterators::{Pair, Pairs};
 use pest::prec_climber::{Assoc, Operator, PrecClimber};
+use pest::{state, ParseResult, Parser, ParserState};
 
 #[allow(dead_code, non_camel_case_types)]
 #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
@@ -26,7 +26,7 @@
     times,
     divide,
     modulus,
-    power
+    power,
 }
 
 struct CalculatorParser;
@@ -104,7 +104,7 @@
 
         state(input, |state| match rule {
             Rule::expression => expression(state),
-            _ => unreachable!()
+            _ => unreachable!(),
         })
     }
 }
@@ -118,13 +118,13 @@
         Rule::divide => lhs / rhs,
         Rule::modulus => lhs % rhs,
         Rule::power => lhs.pow(rhs as u32),
-        _ => unreachable!()
+        _ => unreachable!(),
     };
 
     match pair.as_rule() {
         Rule::expression => climber.climb(pair.into_inner(), primary, infix),
         Rule::number => pair.as_str().parse().unwrap(),
-        _ => unreachable!()
+        _ => unreachable!(),
     }
 }
 
@@ -190,7 +190,8 @@
 fn prec_climb() {
     let climber = PrecClimber::new(vec![
         Operator::new(Rule::plus, Assoc::Left) | Operator::new(Rule::minus, Assoc::Left),
-        Operator::new(Rule::times, Assoc::Left) | Operator::new(Rule::divide, Assoc::Left)
+        Operator::new(Rule::times, Assoc::Left)
+            | Operator::new(Rule::divide, Assoc::Left)
             | Operator::new(Rule::modulus, Assoc::Left),
         Operator::new(Rule::power, Assoc::Right),
     ]);
diff --git a/rustc_deps/vendor/pest/tests/json.rs b/rustc_deps/vendor/pest/tests/json.rs
index cdd5fb8..8490625 100644
--- a/rustc_deps/vendor/pest/tests/json.rs
+++ b/rustc_deps/vendor/pest/tests/json.rs
@@ -12,9 +12,9 @@
 
 use std::collections::HashMap;
 
-use pest::{state, ParseResult, Parser, ParserState, Span};
 use pest::error::Error;
 use pest::iterators::{Pair, Pairs};
+use pest::{state, ParseResult, Parser, ParserState, Span};
 
 #[allow(dead_code, non_camel_case_types)]
 #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
@@ -32,7 +32,7 @@
     int,
     exp,
     bool,
-    null
+    null,
 }
 
 struct JsonParser;
@@ -61,7 +61,8 @@
                             })
                         })
                         .and_then(|s| s.match_string("}"))
-                }).or_else(|s| {
+                })
+                .or_else(|s| {
                     s.sequence(|s| {
                         s.match_string("{")
                             .and_then(|s| skip(s))
@@ -101,7 +102,8 @@
                             })
                         })
                         .and_then(|s| s.match_string("]"))
-                }).or_else(|s| {
+                })
+                .or_else(|s| {
                     s.sequence(|s| {
                         s.match_string("[")
                             .and_then(|s| skip(s))
@@ -131,7 +133,8 @@
                                 s.sequence(|s| {
                                     s.lookahead(false, |s| {
                                         s.match_string("\"").or_else(|s| s.match_string("\\"))
-                                    }).and_then(|s| s.skip(1))
+                                    })
+                                    .and_then(|s| s.skip(1))
                                 })
                             })
                         })
@@ -245,7 +248,7 @@
             Rule::int => int(state),
             Rule::exp => exp(state),
             Rule::bool => bool(state),
-            Rule::null => null(state)
+            Rule::null => null(state),
         })
     }
 }
@@ -257,7 +260,7 @@
     Number(f64),
     String(Span<'i>),
     Array(Vec<Json<'i>>),
-    Object(HashMap<Span<'i>, Json<'i>>)
+    Object(HashMap<Span<'i>, Json<'i>>),
 }
 
 fn consume(pair: Pair<Rule>) -> Json {
@@ -269,7 +272,7 @@
             Rule::bool => match pair.as_str() {
                 "false" => Json::Bool(false),
                 "true" => Json::Bool(true),
-                _ => unreachable!()
+                _ => unreachable!(),
             },
             Rule::number => Json::Number(pair.as_str().parse().unwrap()),
             Rule::string => Json::String(pair.as_span()),
@@ -286,7 +289,7 @@
 
                 Json::Object(pairs.collect())
             }
-            _ => unreachable!()
+            _ => unreachable!(),
         }
     }
 
@@ -445,7 +448,7 @@
         JsonParser::parse(Rule::json, input)
             .unwrap()
             .next()
-            .unwrap()
+            .unwrap(),
     );
 
     match ast {
diff --git a/rustc_deps/vendor/pest_derive/.cargo-checksum.json b/rustc_deps/vendor/pest_derive/.cargo-checksum.json
index abe0937..0f48460 100644
--- a/rustc_deps/vendor/pest_derive/.cargo-checksum.json
+++ b/rustc_deps/vendor/pest_derive/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"c71809a75322703724358adc929460fbab89bf991c0542f67588cfc2f02a2013","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","_README.md":"f3f63bceae9c5806e755e9cea772a97874d54211723a0f542f1cec9d514bc12a","src/lib.rs":"ce1ff8751db4407c416707812bcdcbf47060bab8b50e7d543931d7f35742c790","tests/grammar.pest":"a491d7f627b45b4aa9ea0e5597bac8764634a94c97a9436de0a3bbd9610c975b","tests/grammar.rs":"b83a80b19bacd93ab7ff201942786d7e87f70945b3d3646a64c89e57904e7fe9","tests/lists.pest":"96d90ad5eb7b14648fa8720f0a48e680327080b07251a4dd74e1a023625e9c1a","tests/lists.rs":"5cc0c79494685d5056ab6164bc8893e26d29810f5c5a20d895f27c1695bab31c","tests/reporting.pest":"f5bc8405ea117b76338e0003e359731a8aaf1a36672f31a6a639a4e67a65e331","tests/reporting.rs":"d1658fd9466278f80769384f4377056274bd06fff586c2422b4fe42d39967e4b"},"package":"b76f477146419bc539a63f4ef40e902166cb43b3e51cecc71d9136fd12c567e7"}
\ No newline at end of file
+{"files":{"Cargo.toml":"3616e31e313d646dd0372ba0116b596a4c9dfc21cda88625ef00001ab13b0d68","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","_README.md":"7c31c505c267ac538b70deaa9ad95ef0cc68720c46b4d3276df8f3a278d233e2","src/lib.rs":"f343c474ce5ff5520907bd69dd52b965b4aec42750fe3bf7eaf2b5075b08cfd6","tests/grammar.pest":"3a2212c28dd560ba3169e0e1470394ae706eb9336360ecdc0f1d9934af9fc223","tests/grammar.rs":"75261bc1ba2817d59e35fa2a7c07a661f9f2200a0144cb768837d37d3c370aa0","tests/grammar_inline.rs":"675c55697df7b8d011cef7182f2b05d52b4e73d405c869b9ea361bff5f17b63a","tests/lists.pest":"96d90ad5eb7b14648fa8720f0a48e680327080b07251a4dd74e1a023625e9c1a","tests/lists.rs":"5cc0c79494685d5056ab6164bc8893e26d29810f5c5a20d895f27c1695bab31c","tests/reporting.pest":"f5bc8405ea117b76338e0003e359731a8aaf1a36672f31a6a639a4e67a65e331","tests/reporting.rs":"d1658fd9466278f80769384f4377056274bd06fff586c2422b4fe42d39967e4b"},"package":"833d1ae558dc601e9a60366421196a8d94bc0ac980476d0b67e1d0988d72b2d0"}
\ No newline at end of file
diff --git a/rustc_deps/vendor/pest_derive/Cargo.toml b/rustc_deps/vendor/pest_derive/Cargo.toml
index 4eeccd1..47dfec4 100644
--- a/rustc_deps/vendor/pest_derive/Cargo.toml
+++ b/rustc_deps/vendor/pest_derive/Cargo.toml
@@ -12,7 +12,7 @@
 
 [package]
 name = "pest_derive"
-version = "2.0.1"
+version = "2.1.0"
 authors = ["Dragoș Tiselice <dragostiselice@gmail.com>"]
 description = "pest's derive macro"
 homepage = "https://pest-parser.github.io/"
@@ -27,10 +27,10 @@
 name = "pest_derive"
 proc-macro = true
 [dependencies.pest]
-version = "2.0"
+version = "2.1.0"
 
 [dependencies.pest_generator]
-version = "2.0"
+version = "2.1.0"
 [badges.codecov]
 repository = "pest-parser/pest"
 
diff --git a/rustc_deps/vendor/pest_derive/_README.md b/rustc_deps/vendor/pest_derive/_README.md
index b754292..2c94a72 100644
--- a/rustc_deps/vendor/pest_derive/_README.md
+++ b/rustc_deps/vendor/pest_derive/_README.md
@@ -148,7 +148,10 @@
 * [comrak](https://github.com/kivikakk/comrak)
 * [graphql-parser](https://github.com/Keats/graphql-parser)
 * [handlebars-rust](https://github.com/sunng87/handlebars-rust)
+* [hexdino](https://github.com/Luz/hexdino)
 * [Huia](https://gitlab.com/jimsy/huia/)
+* [json5-rs](https://github.com/callum-oakley/json5-rs)
+* [mt940](https://github.com/svenstaro/mt940-rs)
 * [py_literal](https://github.com/jturner314/py_literal)
 * [rouler](https://github.com/jarcane/rouler)
 * [RuSh](https://github.com/lwandrebeck/RuSh)
diff --git a/rustc_deps/vendor/pest_derive/src/lib.rs b/rustc_deps/vendor/pest_derive/src/lib.rs
index 0cc1185..f60d5d9 100644
--- a/rustc_deps/vendor/pest_derive/src/lib.rs
+++ b/rustc_deps/vendor/pest_derive/src/lib.rs
@@ -43,6 +43,10 @@
 //! struct MyParser;
 //! ```
 //!
+//! ## Inline grammars
+//!
+//! Grammars can also be inlined by using the `#[grammar_inline = "..."]` attribute.
+//!
 //! ## Grammar
 //!
 //! A grammar is a series of rules separated by whitespace, possibly containing comments.
@@ -190,6 +194,7 @@
 //! * `POP` - pops a string from the stack and matches it
 //! * `POP_ALL` - pops the entire state of the stack and matches it
 //! * `PEEK` - peeks a string from the stack and matches it
+//! * `PEEK[a..b]` - peeks part of the stack and matches it
 //! * `PEEK_ALL` - peeks the entire state of the stack and matches it
 //! * `DROP` - drops the top of the stack (fails to match if the stack is empty)
 //!
@@ -222,8 +227,8 @@
 //! `PUSH(e)` simply pushes the captured string of the expression `e` down a stack. This stack can
 //! then later be used to match grammar based on its content with `POP` and `PEEK`.
 //!
-//! `PEEK` always matches the string at the top of stack. So, if the stack contains `["a", "b"]`,
-//! the this grammar:
+//! `PEEK` always matches the string at the top of stack. So, if the stack contains `["b", "a"]`
+//! (`"a"` being on top), this grammar:
 //!
 //! ```ignore
 //! a = { PEEK }
@@ -236,13 +241,33 @@
 //! ```
 //!
 //! `POP` works the same way with the exception that it pops the string off of the stack if the
-//! the match worked. With the stack from above, if `POP` matches `"a"`, the stack will be mutated
+//! match worked. With the stack from above, if `POP` matches `"a"`, the stack will be mutated
 //! to `["b"]`.
 //!
 //! `DROP` makes it possible to remove the string at the top of the stack
 //! without matching it. If the stack is nonempty, `DROP` drops the top of the
 //! stack. If the stack is empty, then `DROP` fails to match.
 //!
+//! ### Advanced peeking
+//!
+//! `PEEK[start..end]` and `PEEK_ALL` allow to peek deeper into the stack. The syntax works exactly
+//! like Rust’s exclusive slice syntax. Additionally, negative indices can be used to indicate an
+//! offset from the top. If the end lies before or at the start, the expression matches (as does
+//! a `PEEK_ALL` on an empty stack). With the stack `["c", "b", "a"]` (`"a"` on top):
+//!
+//! ```ignore
+//! fill = PUSH("c") ~ PUSH("b") ~ PUSH("a")
+//! v = { PEEK_ALL } = { "a" ~ "b" ~ "c" }  // top to bottom
+//! w = { PEEK[..] } = { "c" ~ "b" ~ "a" }  // bottom to top
+//! x = { PEEK[1..2] } = { PEEK[1..-1] } = { "b" }
+//! y = { PEEK[..-2] } = { PEEK[0..1] } = { "a" }
+//! z = { PEEK[1..] } = { PEEK[-2..3] } = { "c" ~ "b" }
+//! n = { PEEK[2..-2] } = { PEEK[2..1] } = { "" }
+//! ```
+//!
+//! For historical reasons, `PEEK_ALL` matches from top to bottom, while `PEEK[start..end]` matches
+//! from bottom to top. There is currectly no syntax to match a slice of the stack top to bottom.
+//!
 //! ## `Rule`
 //!
 //! All rules defined or used in the grammar populate a generated `enum` called `Rule`. This
@@ -271,7 +296,7 @@
 
 use proc_macro::TokenStream;
 
-#[proc_macro_derive(Parser, attributes(grammar))]
+#[proc_macro_derive(Parser, attributes(grammar, grammar_inline))]
 pub fn derive_parser(input: TokenStream) -> TokenStream {
     pest_generator::derive_parser(input.into(), true).into()
 }
diff --git a/rustc_deps/vendor/pest_derive/tests/grammar.pest b/rustc_deps/vendor/pest_derive/tests/grammar.pest
index 630f2c1..43a7b8c 100644
--- a/rustc_deps/vendor/pest_derive/tests/grammar.pest
+++ b/rustc_deps/vendor/pest_derive/tests/grammar.pest
@@ -38,10 +38,13 @@
 repeat_mutate_stack = { (PUSH('a'..'c') ~ ",")* ~ POP ~ POP ~ POP }
 peek_ = { PUSH(range) ~ PUSH(range) ~ PEEK ~ PEEK }
 peek_all = { PUSH(range) ~ PUSH(range) ~ PEEK_ALL }
+peek_slice_23 = { PUSH(range) ~ PUSH(range) ~ PUSH(range) ~ PUSH(range) ~ PUSH(range) ~ PEEK[1..-2] }
 pop_ = { PUSH(range) ~ PUSH(range) ~ POP ~ POP }
 pop_all = { PUSH(range) ~ PUSH(range) ~ POP_ALL }
 pop_fail = { PUSH(range) ~ !POP ~ range ~ POP }
-checkpoint_restore = ${ PUSH("") ~ (PUSH("a") ~ "b" ~ POP | POP ~ "a") ~ EOI }
+checkpoint_restore = ${
+		PUSH("") ~ (PUSH("a") ~ "b" ~ POP | DROP ~ "b" | POP ~ "a") ~ EOI
+}
 ascii_digits = { ASCII_DIGIT+ }
 ascii_nonzero_digits = { ASCII_NONZERO_DIGIT+ }
 ascii_bin_digits = { ASCII_BIN_DIGIT+ }
@@ -58,3 +61,23 @@
 
 WHITESPACE = _{ " " }
 COMMENT = _{ "$"+ }
+
+// Line comment
+
+/* 1-line multiline comment */
+
+/*
+	N-line multiline comment
+*/
+
+/*
+	// Line comment inside multiline
+
+	/*
+		(Multiline inside) multiline
+	*/
+
+	Invalid segment of grammar below (repeated rule)
+
+	WHITESPACE = _{ "hi" }
+*/
diff --git a/rustc_deps/vendor/pest_derive/tests/grammar.rs b/rustc_deps/vendor/pest_derive/tests/grammar.rs
index c218dcf..799beb7 100644
--- a/rustc_deps/vendor/pest_derive/tests/grammar.rs
+++ b/rustc_deps/vendor/pest_derive/tests/grammar.rs
@@ -677,7 +677,6 @@
     };
 }
 
-
 #[test]
 fn peek() {
     parses_to! {
@@ -709,6 +708,24 @@
 }
 
 #[test]
+fn peek_slice_23() {
+    parses_to! {
+        parser: GrammarParser,
+        input: "0123412",
+        rule: Rule::peek_slice_23,
+        tokens: [
+            peek_slice_23(0, 7, [
+                range(0, 1),
+                range(1, 2),
+                range(2, 3),
+                range(3, 4),
+                range(4, 5),
+            ])
+        ]
+    };
+}
+
+#[test]
 fn pop() {
     parses_to! {
         parser: GrammarParser,
diff --git a/rustc_deps/vendor/pest_derive/tests/grammar_inline.rs b/rustc_deps/vendor/pest_derive/tests/grammar_inline.rs
new file mode 100644
index 0000000..2cc730a
--- /dev/null
+++ b/rustc_deps/vendor/pest_derive/tests/grammar_inline.rs
@@ -0,0 +1,26 @@
+// Licensed under the Apache License, Version 2.0
+// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
+// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. All files in the project carrying such notice may not be copied,
+// modified, or distributed except according to those terms.
+
+#[macro_use]
+extern crate pest;
+#[macro_use]
+extern crate pest_derive;
+
+#[derive(Parser)]
+#[grammar_inline = "string = { \"abc\" }"]
+struct GrammarParser;
+
+#[test]
+fn inline_string() {
+    parses_to! {
+        parser: GrammarParser,
+        input: "abc",
+        rule: Rule::string,
+        tokens: [
+            string(0, 3)
+        ]
+    };
+}
diff --git a/rustc_deps/vendor/pest_generator/.cargo-checksum.json b/rustc_deps/vendor/pest_generator/.cargo-checksum.json
index aca897a..d09f8d2 100644
--- a/rustc_deps/vendor/pest_generator/.cargo-checksum.json
+++ b/rustc_deps/vendor/pest_generator/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"9d64f4f328f650fe60e0b23d86ca6c0542a18554dd903acc8627b24ad4a30abc","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","_README.md":"f3f63bceae9c5806e755e9cea772a97874d54211723a0f542f1cec9d514bc12a","src/generator.rs":"260cd5beeb8336612d07a727e7412268083131870f902dccdfd56e6107d95b59","src/lib.rs":"48599b275bc86d79e4727e8cc03f3c0fd60de948aebd906de876613b8f8033fa","src/macros.rs":"5b90beebf6de58b15d3f9f7b7c41a503030943a4b63f696c696ae6e00d4f58a3"},"package":"3ebee4e9680be4fd162e6f3394ae4192a6b60b1e4d17d845e631f0c68d1a3386"}
\ No newline at end of file
+{"files":{"Cargo.toml":"514802079f606e981012920ee69e958190de7628f492095a98301143d09f85b3","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","_README.md":"7c31c505c267ac538b70deaa9ad95ef0cc68720c46b4d3276df8f3a278d233e2","src/generator.rs":"7f8a91470d881c3e4a49cbb0d9f00eec23e741ae18f172466e7e3264aabb372e","src/lib.rs":"3fff930457b0de1630b330cb30ad8db8665b44e055dd688fb64fc471dff8c076","src/macros.rs":"5b90beebf6de58b15d3f9f7b7c41a503030943a4b63f696c696ae6e00d4f58a3"},"package":"63120576c4efd69615b5537d3d052257328a4ca82876771d6944424ccfd9f646"}
\ No newline at end of file
diff --git a/rustc_deps/vendor/pest_generator/Cargo.toml b/rustc_deps/vendor/pest_generator/Cargo.toml
index 0d1f7df..dfdb96f 100644
--- a/rustc_deps/vendor/pest_generator/Cargo.toml
+++ b/rustc_deps/vendor/pest_generator/Cargo.toml
@@ -12,7 +12,7 @@
 
 [package]
 name = "pest_generator"
-version = "2.0.0"
+version = "2.1.0"
 authors = ["Dragoș Tiselice <dragostiselice@gmail.com>"]
 description = "pest code generator"
 homepage = "https://pest-parser.github.io/"
@@ -23,10 +23,10 @@
 license = "MIT/Apache-2.0"
 repository = "https://github.com/pest-parser/pest"
 [dependencies.pest]
-version = "2.0"
+version = "2.1.0"
 
 [dependencies.pest_meta]
-version = "2.0"
+version = "2.1.0"
 
 [dependencies.proc-macro2]
 version = "0.4.4"
@@ -35,7 +35,7 @@
 version = "0.6.3"
 
 [dependencies.syn]
-version = "0.14.1"
+version = "0.15"
 [badges.codecov]
 repository = "pest-parser/pest"
 
diff --git a/rustc_deps/vendor/pest_generator/_README.md b/rustc_deps/vendor/pest_generator/_README.md
index b754292..2c94a72 100644
--- a/rustc_deps/vendor/pest_generator/_README.md
+++ b/rustc_deps/vendor/pest_generator/_README.md
@@ -148,7 +148,10 @@
 * [comrak](https://github.com/kivikakk/comrak)
 * [graphql-parser](https://github.com/Keats/graphql-parser)
 * [handlebars-rust](https://github.com/sunng87/handlebars-rust)
+* [hexdino](https://github.com/Luz/hexdino)
 * [Huia](https://gitlab.com/jimsy/huia/)
+* [json5-rs](https://github.com/callum-oakley/json5-rs)
+* [mt940](https://github.com/svenstaro/mt940-rs)
 * [py_literal](https://github.com/jturner314/py_literal)
 * [rouler](https://github.com/jarcane/rouler)
 * [RuSh](https://github.com/lwandrebeck/RuSh)
diff --git a/rustc_deps/vendor/pest_generator/src/generator.rs b/rustc_deps/vendor/pest_generator/src/generator.rs
index f360b5c..24ad277 100644
--- a/rustc_deps/vendor/pest_generator/src/generator.rs
+++ b/rustc_deps/vendor/pest_generator/src/generator.rs
@@ -8,28 +8,33 @@
 // modified, or distributed except according to those terms.
 
 use std::collections::HashMap;
-use std::path::Path;
+use std::path::PathBuf;
 
 use proc_macro2::{Span, TokenStream};
+use quote::{ToTokens, TokenStreamExt};
 use syn::{self, Generics, Ident};
 
 use pest_meta::ast::*;
 use pest_meta::optimizer::*;
 use pest_meta::UNICODE_PROPERTY_NAMES;
 
+#[allow(clippy::needless_pass_by_value)]
 pub fn generate(
     name: Ident,
     generics: &Generics,
-    path: &Path,
+    path: Option<PathBuf>,
     rules: Vec<OptimizedRule>,
     defaults: Vec<&str>,
-    include_grammar: bool
+    include_grammar: bool,
 ) -> TokenStream {
     let uses_eoi = defaults.iter().any(|name| *name == "EOI");
 
     let builtins = generate_builtin_rules();
     let include_fix = if include_grammar {
-        generate_include(&name, &path.to_str().expect("non-Unicode path"))
+        match path {
+            Some(ref path) => generate_include(&name, path.to_str().expect("non-Unicode path")),
+            None => quote!(),
+        }
     } else {
         quote!()
     };
@@ -37,16 +42,13 @@
     let patterns = generate_patterns(&rules, uses_eoi);
     let skip = generate_skip(&rules);
 
-    let mut rules: Vec<_> = rules.into_iter().map(|rule| generate_rule(rule)).collect();
-    rules.extend(
-        defaults
-            .into_iter()
-            .map(|name| builtins.get(name).unwrap().clone())
-    );
+    let mut rules: Vec<_> = rules.into_iter().map(generate_rule).collect();
+    rules.extend(defaults.into_iter().map(|name| builtins[name].clone()));
 
     let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
 
     let parser_impl = quote! {
+        #[allow(clippy::all)]
         impl #impl_generics ::pest::Parser<Rule> for #name #ty_generics #where_clause {
             fn parse<'i>(
                 rule: Rule,
@@ -110,7 +112,8 @@
     insert_builtin!(
         builtins,
         ASCII_HEX_DIGIT,
-        state.match_range('0'..'9')
+        state
+            .match_range('0'..'9')
             .or_else(|state| state.match_range('a'..'f'))
             .or_else(|state| state.match_range('A'..'F'))
     );
@@ -119,13 +122,15 @@
     insert_builtin!(
         builtins,
         ASCII_ALPHA,
-        state.match_range('a'..'z')
+        state
+            .match_range('a'..'z')
             .or_else(|state| state.match_range('A'..'Z'))
     );
     insert_builtin!(
         builtins,
         ASCII_ALPHANUMERIC,
-        state.match_range('a'..'z')
+        state
+            .match_range('a'..'z')
             .or_else(|state| state.match_range('A'..'Z'))
             .or_else(|state| state.match_range('0'..'9'))
     );
@@ -133,7 +138,8 @@
     insert_builtin!(
         builtins,
         NEWLINE,
-        state.match_string("\n")
+        state
+            .match_string("\n")
             .or_else(|state| state.match_string("\r\n"))
             .or_else(|state| state.match_string("\r"))
     );
@@ -147,24 +153,25 @@
             fn #property_ident(state: Box<::pest::ParserState<Rule>>) -> ::pest::ParseResult<Box<::pest::ParserState<Rule>>> {
                 state.match_char_by(::pest::unicode::#property_ident)
             }
-        }.into());
+        });
     }
 
     builtins
 }
 
-// Needed because Cargo doesn't watch for changes in grammers.
+// Needed because Cargo doesn't watch for changes in grammars.
 fn generate_include(name: &Ident, path: &str) -> TokenStream {
     let const_name = Ident::new(&format!("_PEST_GRAMMAR_{}", name), Span::call_site());
     quote! {
         #[allow(non_upper_case_globals)]
-        #[cfg(debug_assertions)]
         const #const_name: &'static str = include_str!(#path);
     }
 }
 
-fn generate_enum(rules: &Vec<OptimizedRule>, uses_eoi: bool) -> TokenStream {
-    let rules = rules.iter().map(|rule| Ident::new(rule.name.as_str(), Span::call_site()));
+fn generate_enum(rules: &[OptimizedRule], uses_eoi: bool) -> TokenStream {
+    let rules = rules
+        .iter()
+        .map(|rule| Ident::new(rule.name.as_str(), Span::call_site()));
     if uses_eoi {
         quote! {
             #[allow(dead_code, non_camel_case_types)]
@@ -185,7 +192,7 @@
     }
 }
 
-fn generate_patterns(rules: &Vec<OptimizedRule>, uses_eoi: bool) -> TokenStream {
+fn generate_patterns(rules: &[OptimizedRule], uses_eoi: bool) -> TokenStream {
     let mut rules: Vec<TokenStream> = rules
         .iter()
         .map(|rule| {
@@ -209,20 +216,18 @@
 
 fn generate_rule(rule: OptimizedRule) -> TokenStream {
     let name = Ident::new(&rule.name, Span::call_site());
-    let expr = if { rule.ty == RuleType::Atomic || rule.ty == RuleType::CompoundAtomic } {
+    let expr = if rule.ty == RuleType::Atomic || rule.ty == RuleType::CompoundAtomic {
         generate_expr_atomic(rule.expr)
-    } else {
-        if name == "WHITESPACE" || name == "COMMENT" {
-            let atomic = generate_expr_atomic(rule.expr);
+    } else if name == "WHITESPACE" || name == "COMMENT" {
+        let atomic = generate_expr_atomic(rule.expr);
 
-            quote! {
-                state.atomic(::pest::Atomicity::Atomic, |state| {
-                    #atomic
-                })
-            }
-        } else {
-            generate_expr(rule.expr)
+        quote! {
+            state.atomic(::pest::Atomicity::Atomic, |state| {
+                #atomic
+            })
         }
+    } else {
+        generate_expr(rule.expr)
     };
 
     match rule.ty {
@@ -274,11 +279,11 @@
                     })
                 })
             }
-        }
+        },
     }
 }
 
-fn generate_skip(rules: &Vec<OptimizedRule>) -> TokenStream {
+fn generate_skip(rules: &[OptimizedRule]) -> TokenStream {
     let whitespace = rules.iter().any(|rule| rule.name == "WHITESPACE");
     let comment = rules.iter().any(|rule| rule.name == "COMMENT");
 
@@ -287,9 +292,7 @@
         (true, false) => generate_rule!(
             skip,
             if state.atomicity() == ::pest::Atomicity::NonAtomic {
-                state.repeat(|state| {
-                    super::visible::WHITESPACE(state)
-                })
+                state.repeat(|state| super::visible::WHITESPACE(state))
             } else {
                 Ok(state)
             }
@@ -297,9 +300,7 @@
         (false, true) => generate_rule!(
             skip,
             if state.atomicity() == ::pest::Atomicity::NonAtomic {
-                state.repeat(|state| {
-                    super::visible::COMMENT(state)
-                })
+                state.repeat(|state| super::visible::COMMENT(state))
             } else {
                 Ok(state)
             }
@@ -308,24 +309,22 @@
             skip,
             if state.atomicity() == ::pest::Atomicity::NonAtomic {
                 state.sequence(|state| {
-                    state.repeat(|state| {
-                        super::visible::WHITESPACE(state)
-                    }).and_then(|state| {
-                        state.repeat(|state| {
-                            state.sequence(|state| {
-                                super::visible::COMMENT(state).and_then(|state| {
-                                    state.repeat(|state| {
-                                        super::visible::WHITESPACE(state)
+                    state
+                        .repeat(|state| super::visible::WHITESPACE(state))
+                        .and_then(|state| {
+                            state.repeat(|state| {
+                                state.sequence(|state| {
+                                    super::visible::COMMENT(state).and_then(|state| {
+                                        state.repeat(|state| super::visible::WHITESPACE(state))
                                     })
                                 })
                             })
                         })
-                    })
                 })
             } else {
                 Ok(state)
             }
-        )
+        ),
     }
 }
 
@@ -353,6 +352,12 @@
             let ident = Ident::new(&ident, Span::call_site());
             quote! { self::#ident(state) }
         }
+        OptimizedExpr::PeekSlice(start, end_) => {
+            let end = QuoteOption(end_);
+            quote! {
+                state.stack_match_peek_slice(#start, #end, ::pest::MatchDir::BottomToTop)
+            }
+        }
         OptimizedExpr::PosPred(expr) => {
             let expr = generate_expr(*expr);
 
@@ -493,6 +498,12 @@
             let ident = Ident::new(&ident, Span::call_site());
             quote! { self::#ident(state) }
         }
+        OptimizedExpr::PeekSlice(start, end_) => {
+            let end = QuoteOption(end_);
+            quote! {
+                state.stack_match_peek_slice(#start, #end, ::pest::MatchDir::BottomToTop)
+            }
+        }
         OptimizedExpr::PosPred(expr) => {
             let expr = generate_expr_atomic(*expr);
 
@@ -595,19 +606,28 @@
     }
 }
 
+struct QuoteOption<T>(Option<T>);
+
+impl<T: ToTokens> ToTokens for QuoteOption<T> {
+    fn to_tokens(&self, tokens: &mut TokenStream) {
+        tokens.append_all(match self.0 {
+            Some(ref t) => quote! { ::std::option::Option::Some(#t) },
+            None => quote! { ::std::option::Option::None },
+        });
+    }
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
 
     #[test]
     fn rule_enum_simple() {
-        let rules = vec![
-            OptimizedRule {
-                name: "f".to_owned(),
-                ty: RuleType::Normal,
-                expr: OptimizedExpr::Ident("g".to_owned())
-            },
-        ];
+        let rules = vec![OptimizedRule {
+            name: "f".to_owned(),
+            ty: RuleType::Normal,
+            expr: OptimizedExpr::Ident("g".to_owned()),
+        }];
 
         assert_eq!(
             generate_enum(&rules, false).to_string(),
@@ -617,7 +637,8 @@
                 pub enum Rule {
                     f
                 }
-            }.to_string()
+            }
+            .to_string()
         );
     }
 
@@ -629,9 +650,9 @@
                 Box::new(OptimizedExpr::Str("b".to_owned())),
                 Box::new(OptimizedExpr::Seq(
                     Box::new(OptimizedExpr::Str("c".to_owned())),
-                    Box::new(OptimizedExpr::Str("d".to_owned()))
-                ))
-            ))
+                    Box::new(OptimizedExpr::Str("d".to_owned())),
+                )),
+            )),
         );
 
         assert_eq!(
@@ -652,7 +673,8 @@
                         state.match_string("d")
                     })
                 })
-            }.to_string()
+            }
+            .to_string()
         );
     }
 
@@ -664,9 +686,9 @@
                 Box::new(OptimizedExpr::Str("b".to_owned())),
                 Box::new(OptimizedExpr::Seq(
                     Box::new(OptimizedExpr::Str("c".to_owned())),
-                    Box::new(OptimizedExpr::Str("d".to_owned()))
-                ))
-            ))
+                    Box::new(OptimizedExpr::Str("d".to_owned())),
+                )),
+            )),
         );
 
         assert_eq!(
@@ -681,7 +703,8 @@
                         state.match_string("d")
                     })
                 })
-            }.to_string()
+            }
+            .to_string()
         );
     }
 
@@ -693,9 +716,9 @@
                 Box::new(OptimizedExpr::Str("b".to_owned())),
                 Box::new(OptimizedExpr::Choice(
                     Box::new(OptimizedExpr::Str("c".to_owned())),
-                    Box::new(OptimizedExpr::Str("d".to_owned()))
-                ))
-            ))
+                    Box::new(OptimizedExpr::Str("d".to_owned())),
+                )),
+            )),
         );
 
         assert_eq!(
@@ -708,7 +731,8 @@
                 }).or_else(|state| {
                     state.match_string("d")
                 })
-            }.to_string()
+            }
+            .to_string()
         );
     }
 
@@ -720,9 +744,9 @@
                 Box::new(OptimizedExpr::Str("b".to_owned())),
                 Box::new(OptimizedExpr::Choice(
                     Box::new(OptimizedExpr::Str("c".to_owned())),
-                    Box::new(OptimizedExpr::Str("d".to_owned()))
-                ))
-            ))
+                    Box::new(OptimizedExpr::Str("d".to_owned())),
+                )),
+            )),
         );
 
         assert_eq!(
@@ -735,7 +759,8 @@
                 }).or_else(|state| {
                     state.match_string("d")
                 })
-            }.to_string()
+            }
+            .to_string()
         );
     }
 
@@ -749,7 +774,8 @@
                 let strings = ["a", "b"];
 
                 state.skip_until(&strings)
-            }.to_string()
+            }
+            .to_string()
         );
     }
 
@@ -761,16 +787,16 @@
                 Box::new(OptimizedExpr::Range("a".to_owned(), "b".to_owned())),
                 Box::new(OptimizedExpr::Seq(
                     Box::new(OptimizedExpr::NegPred(Box::new(OptimizedExpr::Rep(
-                        Box::new(OptimizedExpr::Insens("b".to_owned()))
+                        Box::new(OptimizedExpr::Insens("b".to_owned())),
                     )))),
                     Box::new(OptimizedExpr::PosPred(Box::new(OptimizedExpr::Opt(
                         Box::new(OptimizedExpr::Rep(Box::new(OptimizedExpr::Choice(
                             Box::new(OptimizedExpr::Str("c".to_owned())),
-                            Box::new(OptimizedExpr::Str("d".to_owned()))
-                        ))))
-                    ))))
-                ))
-            ))
+                            Box::new(OptimizedExpr::Str("d".to_owned())),
+                        )))),
+                    )))),
+                )),
+            )),
         );
 
         let sequence = quote! {
@@ -835,7 +861,8 @@
                         })
                     })
                 })
-            }.to_string()
+            }
+            .to_string()
         );
     }
 
@@ -847,16 +874,16 @@
                 Box::new(OptimizedExpr::Range("a".to_owned(), "b".to_owned())),
                 Box::new(OptimizedExpr::Seq(
                     Box::new(OptimizedExpr::NegPred(Box::new(OptimizedExpr::Rep(
-                        Box::new(OptimizedExpr::Insens("b".to_owned()))
+                        Box::new(OptimizedExpr::Insens("b".to_owned())),
                     )))),
                     Box::new(OptimizedExpr::PosPred(Box::new(OptimizedExpr::Opt(
                         Box::new(OptimizedExpr::Rep(Box::new(OptimizedExpr::Choice(
                             Box::new(OptimizedExpr::Str("c".to_owned())),
-                            Box::new(OptimizedExpr::Str("d".to_owned()))
-                        ))))
-                    ))))
-                ))
-            ))
+                            Box::new(OptimizedExpr::Str("d".to_owned())),
+                        )))),
+                    )))),
+                )),
+            )),
         );
 
         assert_eq!(
@@ -884,7 +911,8 @@
                         })
                     })
                 })
-            }.to_string()
+            }
+            .to_string()
         );
     }
 
@@ -892,20 +920,17 @@
     fn generate_complete() {
         let name = Ident::new("MyParser", Span::call_site());
         let generics = Generics::default();
-        let rules = vec![
-            OptimizedRule {
-                name: "a".to_owned(),
-                ty: RuleType::Silent,
-                expr: OptimizedExpr::Str("b".to_owned())
-            },
-        ];
+        let rules = vec![OptimizedRule {
+            name: "a".to_owned(),
+            ty: RuleType::Silent,
+            expr: OptimizedExpr::Str("b".to_owned()),
+        }];
         let defaults = vec!["ANY"];
 
         assert_eq!(
-            generate(name, &generics, Path::new("test.pest"), rules, defaults, true).to_string(),
+            generate(name, &generics, Some(PathBuf::from("test.pest")), rules, defaults, true).to_string(),
             quote! {
                 #[allow(non_upper_case_globals)]
-                #[cfg(debug_assertions)]
                 const _PEST_GRAMMAR_MyParser: &'static str = include_str!("test.pest");
 
                 #[allow(dead_code, non_camel_case_types)]
@@ -914,6 +939,7 @@
                     a
                 }
 
+                #[allow(clippy::all)]
                 impl ::pest::Parser<Rule> for MyParser {
                     fn parse<'i>(
                         rule: Rule,
diff --git a/rustc_deps/vendor/pest_generator/src/lib.rs b/rustc_deps/vendor/pest_generator/src/lib.rs
index 9658fe8..becafa2 100644
--- a/rustc_deps/vendor/pest_generator/src/lib.rs
+++ b/rustc_deps/vendor/pest_generator/src/lib.rs
@@ -31,30 +31,35 @@
 mod macros;
 mod generator;
 
-use pest_meta::{optimizer, unwrap_or_report, validator};
 use pest_meta::parser::{self, Rule};
+use pest_meta::{optimizer, unwrap_or_report, validator};
 
 pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
     let ast: DeriveInput = syn::parse2(input).unwrap();
-    let (name, generics, path) = parse_derive(ast);
+    let (name, generics, content) = parse_derive(ast);
 
-    let root = env::var("CARGO_MANIFEST_DIR").unwrap_or(".".into());
-    let path = Path::new(&root).join("src/").join(&path);
-    let file_name = match path.file_name() {
-        Some(file_name) => file_name,
-        None => panic!("grammar attribute should point to a file")
-    };
+    let (data, path) = match content {
+        GrammarSource::File(ref path) => {
+            let root = env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".into());
+            let path = Path::new(&root).join("src/").join(&path);
+            let file_name = match path.file_name() {
+                Some(file_name) => file_name,
+                None => panic!("grammar attribute should point to a file"),
+            };
 
-    let data = match read_file(&path) {
-        Ok(data) => data,
-        Err(error) => panic!("error opening {:?}: {}", file_name, error)
+            let data = match read_file(&path) {
+                Ok(data) => data,
+                Err(error) => panic!("error opening {:?}: {}", file_name, error),
+            };
+            (data, Some(path.clone()))
+        }
+        GrammarSource::Inline(content) => (content, None),
     };
 
     let pairs = match parser::parse(Rule::grammar_rules, &data) {
         Ok(pairs) => pairs,
         Err(error) => panic!(
-            "error parsing {:?}\n\n{}",
-            file_name,
+            "error parsing \n{}",
             error.renamed_rules(|rule| match *rule {
                 Rule::grammar_rule => "rule".to_owned(),
                 Rule::_push => "PUSH".to_owned(),
@@ -65,6 +70,8 @@
                 Rule::non_atomic_modifier => "`!`".to_owned(),
                 Rule::opening_brace => "`{`".to_owned(),
                 Rule::closing_brace => "`}`".to_owned(),
+                Rule::opening_brack => "`[`".to_owned(),
+                Rule::closing_brack => "`]`".to_owned(),
                 Rule::opening_paren => "`(`".to_owned(),
                 Rule::positive_predicate_operator => "`&`".to_owned(),
                 Rule::negative_predicate_operator => "`!`".to_owned(),
@@ -79,18 +86,16 @@
                 Rule::insensitive_string => "`^`".to_owned(),
                 Rule::range_operator => "`..`".to_owned(),
                 Rule::single_quote => "`'`".to_owned(),
-                other_rule => format!("{:?}", other_rule)
+                other_rule => format!("{:?}", other_rule),
             })
-        )
+        ),
     };
 
     let defaults = unwrap_or_report(validator::validate_pairs(pairs.clone()));
     let ast = unwrap_or_report(parser::consume_rules(pairs));
     let optimized = optimizer::optimize(ast);
-    let generated =
-        generator::generate(name, &generics, &path, optimized, defaults, include_grammar);
 
-    generated.into()
+    generator::generate(name, &generics, path, optimized, defaults, include_grammar)
 }
 
 fn read_file<P: AsRef<Path>>(path: P) -> io::Result<String> {
@@ -100,43 +105,71 @@
     Ok(string)
 }
 
-fn parse_derive(ast: DeriveInput) -> (Ident, Generics, String) {
+#[derive(Debug, PartialEq)]
+enum GrammarSource {
+    File(String),
+    Inline(String),
+}
+
+fn parse_derive(ast: DeriveInput) -> (Ident, Generics, GrammarSource) {
     let name = ast.ident;
     let generics = ast.generics;
 
-    let grammar: Vec<&Attribute> = ast.attrs
+    let grammar: Vec<&Attribute> = ast
+        .attrs
         .iter()
         .filter(|attr| match attr.interpret_meta() {
-            Some(Meta::NameValue(name_value)) => name_value.ident.to_string() == "grammar",
-            _ => false
+            Some(Meta::NameValue(name_value)) => {
+                (name_value.ident == "grammar" || name_value.ident == "grammar_inline")
+            }
+            _ => false,
         })
         .collect();
 
-    let filename = match grammar.len() {
-        0 => panic!("a grammar file needs to be provided with the #[grammar = \"...\"] attribute"),
-        1 => get_filename(grammar[0]),
-        _ => panic!("only 1 grammar file can be provided")
+    let argument = match grammar.len() {
+        0 => panic!("a grammar file needs to be provided with the #[grammar = \"PATH\"] or #[grammar_inline = \"GRAMMAR CONTENTS\"] attribute"),
+        1 => get_attribute(grammar[0]),
+        _ => panic!("only 1 grammar file can be provided"),
     };
 
-    (name, generics, filename)
+    (name, generics, argument)
 }
 
-fn get_filename(attr: &Attribute) -> String {
+fn get_attribute(attr: &Attribute) -> GrammarSource {
     match attr.interpret_meta() {
         Some(Meta::NameValue(name_value)) => match name_value.lit {
-            Lit::Str(filename) => filename.value(),
-            _ => panic!("grammar attribute must be a string")
+            Lit::Str(string) => {
+                if name_value.ident == "grammar" {
+                    GrammarSource::File(string.value())
+                } else {
+                    GrammarSource::Inline(string.value())
+                }
+            }
+            _ => panic!("grammar attribute must be a string"),
         },
-        _ => panic!("grammar attribute must be of the form `grammar = \"...\"`")
+        _ => panic!("grammar attribute must be of the form `grammar = \"...\"`"),
     }
 }
 
 #[cfg(test)]
 mod tests {
     use super::parse_derive;
+    use super::GrammarSource;
     use syn;
 
     #[test]
+    fn derive_inline_file() {
+        let definition = "
+            #[other_attr]
+            #[grammar_inline = \"GRAMMAR\"]
+            pub struct MyParser<'a, T>;
+        ";
+        let ast = syn::parse_str(definition).unwrap();
+        let (_, _, filename) = parse_derive(ast);
+        assert_eq!(filename, GrammarSource::Inline("GRAMMAR".to_string()));
+    }
+
+    #[test]
     fn derive_ok() {
         let definition = "
             #[other_attr]
@@ -145,7 +178,7 @@
         ";
         let ast = syn::parse_str(definition).unwrap();
         let (_, _, filename) = parse_derive(ast);
-        assert_eq!(filename, "myfile.pest");
+        assert_eq!(filename, GrammarSource::File("myfile.pest".to_string()));
     }
 
     #[test]
diff --git a/rustc_deps/vendor/pest_meta/.cargo-checksum.json b/rustc_deps/vendor/pest_meta/.cargo-checksum.json
index 271359e..a86f4fb 100644
--- a/rustc_deps/vendor/pest_meta/.cargo-checksum.json
+++ b/rustc_deps/vendor/pest_meta/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"783b1be5f465b261d0383e1c91f1467d4d80700987c5792f3efbc31f3345ea44","src/ast.rs":"d265224cb8fb2ee6ff854d37d1b5ae9f30b27bccdf239f4f157aeebb541ffcf8","src/grammar.pest":"9e84ba084e7fdb52792bcf6f138047e58dfdce224ce7aa20a70d986e5441558b","src/grammar.rs":"3391e8997d98df591c838d68535694c75af143aa1f4c78b2140aba547c659258","src/lib.rs":"3b052dec93ff87359a72c357999ccd7afb963554f2779e8d97d6cf6be4532364","src/optimizer/concatenator.rs":"1039d336a30fe1ae4688e295aab770c33f8c60345df7330fed7fafed3467f387","src/optimizer/factorizer.rs":"297857fc5613efad11fe158d6327edecd65b0992c8163585db8bb3a87927dbe0","src/optimizer/mod.rs":"67835a49e3117ab6ee7218de60dd6bfecb6b4d6b18bc26a9c8a87a13e8e88729","src/optimizer/restorer.rs":"1ec5a3acb5498173e0e44c5dd8604379145afe9696ff75e260e0094a65fdcca4","src/optimizer/rotater.rs":"388843ab92617a01c6a875a18017b3392551c32b86f51d95e978b62c2c40a67d","src/optimizer/skipper.rs":"4e6d6660fa6118f7219bc0437c8dd0df22bc2961c8dc7171591a1a21b58ddc11","src/optimizer/unroller.rs":"de7d67d38a1b0a58119c2f720f0177613db7d57364a00b1301ed6bc90b6ac933","src/parser.rs":"8cabdf763b382d87aa78f6f9933a0ab531725e4bbe4c35b3103c93484aa90c2d","src/validator.rs":"24d5f066dd5e2cac3796439b3479cd5972f13144e65b5396c7347a853f833819"},"package":"1f6d5f6f0e6082578c86af197d780dc38328e3f768cec06aac9bc46d714e8221"}
\ No newline at end of file
+{"files":{"Cargo.toml":"8756f43b632c8aadf6cdb6e0701e136f3997ca5ed5486b5da45bc8fb90bff518","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","_README.md":"7c31c505c267ac538b70deaa9ad95ef0cc68720c46b4d3276df8f3a278d233e2","src/ast.rs":"b2d0b054d66525b6c9ceed7a9b24cabcdd1074b525b230fc99a1a31019010f58","src/grammar.pest":"fac55bde3f3b59058293d220fec96ef033e61cb24fc032cad21d76aa6f963bc1","src/grammar.rs":"3e49f17a8c526ef1cd5493b77aa88d7133fdb35fc9a291e16080730783c01b89","src/lib.rs":"b248cf6bc5d4a8537f682c23f8d2c8910eb764ee5f1cc02b2c344b0b920943da","src/optimizer/concatenator.rs":"94c2a79d39ebeddbc413d30f2c20d0b50ae2c0e2e86bff90a029b70aa028e2fc","src/optimizer/factorizer.rs":"84df67d6e3f1b507b3e970cb8fba1ab9e2254c3ba778c4a0cf100452b36f1e08","src/optimizer/mod.rs":"69f25916112cbfa4b3d2225f8af463a03ebe706ae757d9c97be523c5601d7b3c","src/optimizer/restorer.rs":"182561b972752ec961fa45d0ca705a9852462010003e4e1ab646e5cd486bedd1","src/optimizer/rotater.rs":"e59f6c3e9197dc3cb4f21edf0c5ffb1a2638a81e4ce9b0373a2d2147a12a3bd5","src/optimizer/skipper.rs":"ab52c7a44a35aefcd1546da5e2084af642c472f07565dcb1be1bb3e2b5fc5a41","src/optimizer/unroller.rs":"99b4940906b0eb443a8b0bf6499130098d59a8141f2693ec50a653cb21c534d5","src/parser.rs":"43c16be0f3dfde85e46509c674c788c989b873f0a3181d2d4a2f8c4cb4e194f0","src/validator.rs":"d1475f319991079fcfe0e0633bc8ca30aa90e77cc6f4739799174120d37ffb5b"},"package":"f5a3492a4ed208ffc247adcdcc7ba2a95be3104f58877d0d02f0df39bf3efb5e"}
\ No newline at end of file
diff --git a/rustc_deps/vendor/pest_meta/Cargo.toml b/rustc_deps/vendor/pest_meta/Cargo.toml
index 6203db1..7991e2c 100644
--- a/rustc_deps/vendor/pest_meta/Cargo.toml
+++ b/rustc_deps/vendor/pest_meta/Cargo.toml
@@ -12,10 +12,10 @@
 
 [package]
 name = "pest_meta"
-version = "2.0.3"
+version = "2.1.0"
 authors = ["Dragoș Tiselice <dragostiselice@gmail.com>"]
 exclude = ["src/grammar.pest"]
-include = ["Cargo.toml", "src/**/*", "src/grammar.rs"]
+include = ["Cargo.toml", "src/**/*", "src/grammar.rs", "_README.md", "LICENSE-*"]
 description = "pest meta language parser and validator"
 homepage = "https://pest-parser.github.io/"
 documentation = "https://docs.rs/pest"
@@ -28,7 +28,7 @@
 version = "1.0"
 
 [dependencies.pest]
-version = "2.0"
+version = "2.1.0"
 [build-dependencies.sha-1]
 version = "0.7.0"
 [badges.codecov]
diff --git a/rustc_deps/vendor/pest_meta/LICENSE b/rustc_deps/vendor/pest_meta/LICENSE
deleted file mode 100644
index e470cf5..0000000
--- a/rustc_deps/vendor/pest_meta/LICENSE
+++ /dev/null
@@ -1,229 +0,0 @@
-https://raw.githubusercontent.com/pest-parser/pest/master/LICENSE-APACHE:
-
-                              Apache License
-                        Version 2.0, January 2004
-                     http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
-   "License" shall mean the terms and conditions for use, reproduction,
-   and distribution as defined by Sections 1 through 9 of this document.
-
-   "Licensor" shall mean the copyright owner or entity authorized by
-   the copyright owner that is granting the License.
-
-   "Legal Entity" shall mean the union of the acting entity and all
-   other entities that control, are controlled by, or are under common
-   control with that entity. For the purposes of this definition,
-   "control" means (i) the power, direct or indirect, to cause the
-   direction or management of such entity, whether by contract or
-   otherwise, or (ii) ownership of fifty percent (50%) or more of the
-   outstanding shares, or (iii) beneficial ownership of such entity.
-
-   "You" (or "Your") shall mean an individual or Legal Entity
-   exercising permissions granted by this License.
-
-   "Source" form shall mean the preferred form for making modifications,
-   including but not limited to software source code, documentation
-   source, and configuration files.
-
-   "Object" form shall mean any form resulting from mechanical
-   transformation or translation of a Source form, including but
-   not limited to compiled object code, generated documentation,
-   and conversions to other media types.
-
-   "Work" shall mean the work of authorship, whether in Source or
-   Object form, made available under the License, as indicated by a
-   copyright notice that is included in or attached to the work
-   (an example is provided in the Appendix below).
-
-   "Derivative Works" shall mean any work, whether in Source or Object
-   form, that is based on (or derived from) the Work and for which the
-   editorial revisions, annotations, elaborations, or other modifications
-   represent, as a whole, an original work of authorship. For the purposes
-   of this License, Derivative Works shall not include works that remain
-   separable from, or merely link (or bind by name) to the interfaces of,
-   the Work and Derivative Works thereof.
-
-   "Contribution" shall mean any work of authorship, including
-   the original version of the Work and any modifications or additions
-   to that Work or Derivative Works thereof, that is intentionally
-   submitted to Licensor for inclusion in the Work by the copyright owner
-   or by an individual or Legal Entity authorized to submit on behalf of
-   the copyright owner. For the purposes of this definition, "submitted"
-   means any form of electronic, verbal, or written communication sent
-   to the Licensor or its representatives, including but not limited to
-   communication on electronic mailing lists, source code control systems,
-   and issue tracking systems that are managed by, or on behalf of, the
-   Licensor for the purpose of discussing and improving the Work, but
-   excluding communication that is conspicuously marked or otherwise
-   designated in writing by the copyright owner as "Not a Contribution."
-
-   "Contributor" shall mean Licensor and any individual or Legal Entity
-   on behalf of whom a Contribution has been received by Licensor and
-   subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of
-   this License, each Contributor hereby grants to You a perpetual,
-   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-   copyright license to reproduce, prepare Derivative Works of,
-   publicly display, publicly perform, sublicense, and distribute the
-   Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of
-   this License, each Contributor hereby grants to You a perpetual,
-   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-   (except as stated in this section) patent license to make, have made,
-   use, offer to sell, sell, import, and otherwise transfer the Work,
-   where such license applies only to those patent claims licensable
-   by such Contributor that are necessarily infringed by their
-   Contribution(s) alone or by combination of their Contribution(s)
-   with the Work to which such Contribution(s) was submitted. If You
-   institute patent litigation against any entity (including a
-   cross-claim or counterclaim in a lawsuit) alleging that the Work
-   or a Contribution incorporated within the Work constitutes direct
-   or contributory patent infringement, then any patent licenses
-   granted to You under this License for that Work shall terminate
-   as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the
-   Work or Derivative Works thereof in any medium, with or without
-   modifications, and in Source or Object form, provided that You
-   meet the following conditions:
-
-   (a) You must give any other recipients of the Work or
-       Derivative Works a copy of this License; and
-
-   (b) You must cause any modified files to carry prominent notices
-       stating that You changed the files; and
-
-   (c) You must retain, in the Source form of any Derivative Works
-       that You distribute, all copyright, patent, trademark, and
-       attribution notices from the Source form of the Work,
-       excluding those notices that do not pertain to any part of
-       the Derivative Works; and
-
-   (d) If the Work includes a "NOTICE" text file as part of its
-       distribution, then any Derivative Works that You distribute must
-       include a readable copy of the attribution notices contained
-       within such NOTICE file, excluding those notices that do not
-       pertain to any part of the Derivative Works, in at least one
-       of the following places: within a NOTICE text file distributed
-       as part of the Derivative Works; within the Source form or
-       documentation, if provided along with the Derivative Works; or,
-       within a display generated by the Derivative Works, if and
-       wherever such third-party notices normally appear. The contents
-       of the NOTICE file are for informational purposes only and
-       do not modify the License. You may add Your own attribution
-       notices within Derivative Works that You distribute, alongside
-       or as an addendum to the NOTICE text from the Work, provided
-       that such additional attribution notices cannot be construed
-       as modifying the License.
-
-   You may add Your own copyright statement to Your modifications and
-   may provide additional or different license terms and conditions
-   for use, reproduction, or distribution of Your modifications, or
-   for any such Derivative Works as a whole, provided Your use,
-   reproduction, and distribution of the Work otherwise complies with
-   the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise,
-   any Contribution intentionally submitted for inclusion in the Work
-   by You to the Licensor shall be under the terms and conditions of
-   this License, without any additional terms or conditions.
-   Notwithstanding the above, nothing herein shall supersede or modify
-   the terms of any separate license agreement you may have executed
-   with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade
-   names, trademarks, service marks, or product names of the Licensor,
-   except as required for reasonable and customary use in describing the
-   origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or
-   agreed to in writing, Licensor provides the Work (and each
-   Contributor provides its Contributions) on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-   implied, including, without limitation, any warranties or conditions
-   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-   PARTICULAR PURPOSE. You are solely responsible for determining the
-   appropriateness of using or redistributing the Work and assume any
-   risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory,
-   whether in tort (including negligence), contract, or otherwise,
-   unless required by applicable law (such as deliberate and grossly
-   negligent acts) or agreed to in writing, shall any Contributor be
-   liable to You for damages, including any direct, indirect, special,
-   incidental, or consequential damages of any character arising as a
-   result of this License or out of the use or inability to use the
-   Work (including but not limited to damages for loss of goodwill,
-   work stoppage, computer failure or malfunction, or any and all
-   other commercial damages or losses), even if such Contributor
-   has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing
-   the Work or Derivative Works thereof, You may choose to offer,
-   and charge a fee for, acceptance of support, warranty, indemnity,
-   or other liability obligations and/or rights consistent with this
-   License. However, in accepting such obligations, You may act only
-   on Your own behalf and on Your sole responsibility, not on behalf
-   of any other Contributor, and only if You agree to indemnify,
-   defend, and hold each Contributor harmless for any liability
-   incurred by, or claims asserted against, such Contributor by reason
-   of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
-   To apply the Apache License to your work, attach the following
-   boilerplate notice, with the fields enclosed by brackets "[]"
-   replaced with your own identifying information. (Don't include
-   the brackets!)  The text should be enclosed in the appropriate
-   comment syntax for the file format. We also recommend that a
-   file or class name and description of purpose be included on the
-   same "printed page" as the copyright notice for easier
-   identification within third-party archives.
-
-Copyright [yyyy] [name of copyright owner]
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-	http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-========================================
-https://raw.githubusercontent.com/pest-parser/pest/master/LICENSE-MIT:
-
-Permission is hereby granted, free of charge, to any
-person obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the
-Software without restriction, including without
-limitation the rights to use, copy, modify, merge,
-publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software
-is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice
-shall be included in all copies or substantial portions
-of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
-ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
-TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
-PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
-IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
-DEALINGS IN THE SOFTWARE.
diff --git a/rustc_deps/vendor/syn-0.14.9/LICENSE-APACHE b/rustc_deps/vendor/pest_meta/LICENSE-APACHE
similarity index 100%
rename from rustc_deps/vendor/syn-0.14.9/LICENSE-APACHE
rename to rustc_deps/vendor/pest_meta/LICENSE-APACHE
diff --git a/rustc_deps/vendor/syn-0.14.9/LICENSE-MIT b/rustc_deps/vendor/pest_meta/LICENSE-MIT
similarity index 96%
rename from rustc_deps/vendor/syn-0.14.9/LICENSE-MIT
rename to rustc_deps/vendor/pest_meta/LICENSE-MIT
index 5767dea..31aa793 100644
--- a/rustc_deps/vendor/syn-0.14.9/LICENSE-MIT
+++ b/rustc_deps/vendor/pest_meta/LICENSE-MIT
@@ -1,5 +1,3 @@
-Copyright (c) 2018 Syn Developers
-
 Permission is hereby granted, free of charge, to any
 person obtaining a copy of this software and associated
 documentation files (the "Software"), to deal in the
diff --git a/rustc_deps/vendor/pest_meta/_README.md b/rustc_deps/vendor/pest_meta/_README.md
new file mode 100644
index 0000000..2c94a72
--- /dev/null
+++ b/rustc_deps/vendor/pest_meta/_README.md
@@ -0,0 +1,167 @@
+<p align="center">
+  <img src="https://raw.github.com/pest-parser/pest/master/pest-logo.svg?sanitize=true" width="80%"/>
+</p>
+
+# pest. The Elegant Parser
+
+[![Join the chat at https://gitter.im/dragostis/pest](https://badges.gitter.im/dragostis/pest.svg)](https://gitter.im/dragostis/pest?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
+[![Book](https://img.shields.io/badge/book-WIP-4d76ae.svg)](https://pest-parser.github.io/book)
+[![Docs](https://docs.rs/pest/badge.svg)](https://docs.rs/pest)
+
+[![Build Status](https://travis-ci.org/pest-parser/pest.svg?branch=master)](https://travis-ci.org/pest-parser/pest)
+[![codecov](https://codecov.io/gh/pest-parser/pest/branch/master/graph/badge.svg)](https://codecov.io/gh/pest-parser/pest)
+[![Crates.io](https://img.shields.io/crates/d/pest.svg)](https://crates.io/crates/pest)
+[![Crates.io](https://img.shields.io/crates/v/pest.svg)](https://crates.io/crates/pest)
+
+pest is a general purpose parser written in Rust with a focus on accessibility,
+correctness, and performance. It uses parsing expression grammars
+(or [PEG]) as input, which are similar in spirit to regular expressions, but
+which offer the enhanced expressivity needed to parse complex languages.
+
+[PEG]: https://en.wikipedia.org/wiki/Parsing_expression_grammar
+
+## Getting started
+
+The recommended way to start parsing with pest is to read the official [book].
+
+Other helpful resources:
+
+* API reference on [docs.rs]
+* play with grammars and share them on our [fiddle]
+* leave feedback, ask questions, or greet us on [Gitter]
+
+[book]: https://pest-parser.github.io/book
+[docs.rs]: https://docs.rs/pest
+[fiddle]: https://pest-parser.github.io/#editor
+[Gitter]: https://gitter.im/dragostis/pest
+
+## Example
+
+The following is an example of a grammar for a list of alpha-numeric identifiers
+where the first identifier does not start with a digit:
+
+```rust
+alpha = { 'a'..'z' | 'A'..'Z' }
+digit = { '0'..'9' }
+
+ident = { (alpha | digit)+ }
+
+ident_list = _{ !digit ~ ident ~ (" " ~ ident)+ }
+          // ^
+          // ident_list rule is silent which means it produces no tokens
+```
+
+Grammars are saved in separate .pest files which are never mixed with procedural
+code. This results in an always up-to-date formalization of a language that is
+easy to read and maintain.
+
+## Meaningful error reporting
+
+Based on the grammar definition, the parser also includes automatic error
+reporting. For the example above, the input `"123"` will result in:
+
+```
+thread 'main' panicked at ' --> 1:1
+  |
+1 | 123
+  | ^---
+  |
+  = unexpected digit', src/main.rs:12
+```
+while `"ab *"` will result in:
+```
+thread 'main' panicked at ' --> 1:1
+  |
+1 | ab *
+  |    ^---
+  |
+  = expected ident', src/main.rs:12
+```
+
+## Pairs API
+
+The grammar can be used to derive a `Parser` implementation automatically.
+Parsing returns an iterator of nested token pairs:
+
+```rust
+extern crate pest;
+#[macro_use]
+extern crate pest_derive;
+
+use pest::Parser;
+
+#[derive(Parser)]
+#[grammar = "ident.pest"]
+struct IdentParser;
+
+fn main() {
+    let pairs = IdentParser::parse(Rule::ident_list, "a1 b2").unwrap_or_else(|e| panic!("{}", e));
+
+    // Because ident_list is silent, the iterator will contain idents
+    for pair in pairs {
+
+        let span = pair.clone().into_span();
+        // A pair is a combination of the rule which matched and a span of input
+        println!("Rule:    {:?}", pair.as_rule());
+        println!("Span:    {:?}", span);
+        println!("Text:    {}", span.as_str());
+
+        // A pair can be converted to an iterator of the tokens which make it up:
+        for inner_pair in pair.into_inner() {
+            let inner_span = inner_pair.clone().into_span();
+            match inner_pair.as_rule() {
+                Rule::alpha => println!("Letter:  {}", inner_span.as_str()),
+                Rule::digit => println!("Digit:   {}", inner_span.as_str()),
+                _ => unreachable!()
+            };
+        }
+    }
+}
+```
+
+This produces the following output:
+```
+Rule:    ident
+Span:    Span { start: 0, end: 2 }
+Text:    a1
+Letter:  a
+Digit:   1
+Rule:    ident
+Span:    Span { start: 3, end: 5 }
+Text:    b2
+Letter:  b
+Digit:   2
+```
+
+## Other features
+
+* Precedence climbing
+* Input handling
+* Custom errors
+* Runs on stable Rust
+
+## Projects using pest
+
+* [pest_meta](https://github.com/pest-parser/pest/blob/master/meta/src/grammar.pest) (bootstrapped)
+* [brain](https://github.com/brain-lang/brain)
+* [Chelone](https://github.com/Aaronepower/chelone)
+* [comrak](https://github.com/kivikakk/comrak)
+* [graphql-parser](https://github.com/Keats/graphql-parser)
+* [handlebars-rust](https://github.com/sunng87/handlebars-rust)
+* [hexdino](https://github.com/Luz/hexdino)
+* [Huia](https://gitlab.com/jimsy/huia/)
+* [json5-rs](https://github.com/callum-oakley/json5-rs)
+* [mt940](https://github.com/svenstaro/mt940-rs)
+* [py_literal](https://github.com/jturner314/py_literal)
+* [rouler](https://github.com/jarcane/rouler)
+* [RuSh](https://github.com/lwandrebeck/RuSh)
+* [rs_pbrt](https://github.com/wahn/rs_pbrt)
+* [stache](https://github.com/dgraham/stache)
+* [tera](https://github.com/Keats/tera)
+* [ui_gen](https://github.com/emoon/ui_gen)
+* [ukhasnet-parser](https://github.com/adamgreig/ukhasnet-parser)
+
+## Special thanks
+
+A special round of applause goes to prof. Marius Minea for his guidance and all
+pest contributors, some of which being none other than my friends.
diff --git a/rustc_deps/vendor/pest_meta/src/ast.rs b/rustc_deps/vendor/pest_meta/src/ast.rs
index 151b550..b80c596 100644
--- a/rustc_deps/vendor/pest_meta/src/ast.rs
+++ b/rustc_deps/vendor/pest_meta/src/ast.rs
@@ -11,7 +11,7 @@
 pub struct Rule {
     pub name: String,
     pub ty: RuleType,
-    pub expr: Expr
+    pub expr: Expr,
 }
 
 #[derive(Clone, Copy, Debug, Eq, PartialEq)]
@@ -20,7 +20,7 @@
     Silent,
     Atomic,
     CompoundAtomic,
-    NonAtomic
+    NonAtomic,
 }
 
 #[derive(Clone, Debug, Eq, PartialEq)]
@@ -33,6 +33,8 @@
     Range(String, String),
     /// Matches the rule with the given name, e.g. `a`
     Ident(String),
+    /// Matches a custom part of the stack, e.g. `PEEK[..]`
+    PeekSlice(i32, Option<i32>),
     /// Positive lookahead; matches expression without making progress, e.g. `&e`
     PosPred(Box<Expr>),
     /// Negative lookahead; matches if expression doesn't match, without making progress, e.g. `!e`
@@ -58,7 +60,7 @@
     /// Continues to match expressions until one of the strings in the `Vec` is found
     Skip(Vec<String>),
     /// Matches an expression and pushes it to the stack, e.g. `push(e)`
-    Push(Box<Expr>)
+    Push(Box<Expr>),
 }
 
 impl Expr {
@@ -68,11 +70,11 @@
 
     pub fn map_top_down<F>(self, mut f: F) -> Expr
     where
-        F: FnMut(Expr) -> Expr
+        F: FnMut(Expr) -> Expr,
     {
         fn map_internal<F>(expr: Expr, f: &mut F) -> Expr
         where
-            F: FnMut(Expr) -> Expr
+            F: FnMut(Expr) -> Expr,
         {
             let expr = f(expr);
 
@@ -128,7 +130,7 @@
                     let mapped = Box::new(map_internal(*expr, f));
                     Expr::Push(mapped)
                 }
-                expr => expr
+                expr => expr,
             }
         }
 
@@ -137,11 +139,11 @@
 
     pub fn map_bottom_up<F>(self, mut f: F) -> Expr
     where
-        F: FnMut(Expr) -> Expr
+        F: FnMut(Expr) -> Expr,
     {
         fn map_internal<F>(expr: Expr, f: &mut F) -> Expr
         where
-            F: FnMut(Expr) -> Expr
+            F: FnMut(Expr) -> Expr,
         {
             let mapped = match expr {
                 Expr::PosPred(expr) => {
@@ -195,7 +197,7 @@
                     let mapped = Box::new(map_internal(*expr, f));
                     Expr::Push(mapped)
                 }
-                expr => expr
+                expr => expr,
             };
 
             f(mapped)
@@ -208,7 +210,7 @@
 pub struct ExprTopDownIterator {
     current: Option<Expr>,
     next: Option<Expr>,
-    right_branches: Vec<Expr>
+    right_branches: Vec<Expr>,
 }
 
 impl ExprTopDownIterator {
@@ -216,7 +218,7 @@
         let mut iter = ExprTopDownIterator {
             current: None,
             next: None,
-            right_branches: vec![]
+            right_branches: vec![],
         };
         iter.iterate_expr(expr.clone());
         iter
@@ -276,7 +278,7 @@
     fn top_down_iterator() {
         let expr = Expr::Choice(
             Box::new(Expr::Str(String::from("a"))),
-            Box::new(Expr::Str(String::from("b")))
+            Box::new(Expr::Str(String::from("b"))),
         );
         let mut top_down = expr.clone().iter_top_down();
         assert_eq!(top_down.next(), Some(expr));
@@ -290,17 +292,17 @@
         let expr = Expr::Choice(
             Box::new(Expr::Seq(
                 Box::new(Expr::Ident("a".to_owned())),
-                Box::new(Expr::Str("b".to_owned()))
+                Box::new(Expr::Str("b".to_owned())),
             )),
             Box::new(Expr::PosPred(Box::new(Expr::NegPred(Box::new(Expr::Rep(
                 Box::new(Expr::RepOnce(Box::new(Expr::Opt(Box::new(Expr::Choice(
                     Box::new(Expr::Insens("c".to_owned())),
                     Box::new(Expr::Push(Box::new(Expr::Range(
                         "'d'".to_owned(),
-                        "'e'".to_owned()
-                    ))))
-                ))))))
-            ))))))
+                        "'e'".to_owned(),
+                    )))),
+                )))))),
+            )))))),
         );
 
         assert_eq!(
diff --git a/rustc_deps/vendor/pest_meta/src/grammar.pest b/rustc_deps/vendor/pest_meta/src/grammar.pest
index bf5da55..0d03ba8 100644
--- a/rustc_deps/vendor/pest_meta/src/grammar.pest
+++ b/rustc_deps/vendor/pest_meta/src/grammar.pest
@@ -19,6 +19,8 @@
 closing_brace       = { "}" }
 opening_paren       = { "(" }
 closing_paren       = { ")" }
+opening_brack       = { "[" }
+closing_brack       = { "]" }
 
 modifier = _{
     silent_modifier |
@@ -35,7 +37,7 @@
 expression =  { term ~ (infix_operator ~ term)* }
 term       =  { prefix_operator* ~ node ~ postfix_operator* }
 node       = _{ opening_paren ~ expression ~ closing_paren | terminal }
-terminal   = _{ _push | identifier | string | insensitive_string | range }
+terminal   = _{ _push | peek_slice | identifier | string | insensitive_string | range }
 
 prefix_operator  = _{ positive_predicate_operator | negative_predicate_operator }
 infix_operator   = _{ sequence_operator | choice_operator }
@@ -63,10 +65,12 @@
 repeat_min_max = { opening_brace ~ number ~ comma ~ number ~ closing_brace }
 
 number = @{ '0'..'9'+ }
+integer = @{ number | "-" ~ "0"* ~ '1'..'9' ~ number? }
 
 comma = { "," }
 
 _push = { "PUSH" ~ opening_paren ~ expression ~ closing_paren }
+peek_slice = { "PEEK" ~ opening_brack ~ integer? ~ range_operator ~ integer? ~ closing_brack }
 
 identifier = @{ !"PUSH" ~ ("_" | alpha) ~ ("_" | alpha_num)* }
 alpha      = _{ 'a'..'z' | 'A'..'Z' }
@@ -90,4 +94,5 @@
 
 newline    = _{ "\n" | "\r\n" }
 WHITESPACE = _{ " " | "\t" | newline }
-COMMENT    = _{ "//" ~ (!newline ~ ANY)* }
+block_comment = _{ "/*" ~ (block_comment | !"*/" ~ ANY)* ~ "*/" }
+COMMENT    = _{ block_comment | ("//" ~ (!newline ~ ANY)*) }
diff --git a/rustc_deps/vendor/pest_meta/src/grammar.rs b/rustc_deps/vendor/pest_meta/src/grammar.rs
index 23658d0..6deefe5 100644
--- a/rustc_deps/vendor/pest_meta/src/grammar.rs
+++ b/rustc_deps/vendor/pest_meta/src/grammar.rs
@@ -1,2 +1,2 @@
 pub struct PestParser;
-# [ allow ( dead_code , non_camel_case_types ) ] # [ derive ( Clone , Copy , Debug , Eq , Hash , Ord , PartialEq , PartialOrd ) ] pub enum Rule { EOI , grammar_rules , grammar_rule , assignment_operator , opening_brace , closing_brace , opening_paren , closing_paren , modifier , silent_modifier , atomic_modifier , compound_atomic_modifier , non_atomic_modifier , expression , term , node , terminal , prefix_operator , infix_operator , postfix_operator , positive_predicate_operator , negative_predicate_operator , sequence_operator , choice_operator , optional_operator , repeat_operator , repeat_once_operator , repeat_exact , repeat_min , repeat_max , repeat_min_max , number , comma , _push , identifier , alpha , alpha_num , string , insensitive_string , range , character , inner_str , inner_chr , escape , code , unicode , hex_digit , quote , single_quote , range_operator , newline , WHITESPACE , COMMENT } impl :: pest :: Parser < Rule > for PestParser { fn parse < 'i > ( rule : Rule , input : & 'i str ) -> :: std :: result :: Result < :: pest :: iterators :: Pairs < 'i , Rule > , :: pest :: error :: Error < Rule > > { mod rules { pub mod hidden { use super :: super :: Rule ; # [ inline ] # [ allow ( dead_code , non_snake_case , unused_variables ) ] pub fn skip ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { if state . atomicity ( ) == :: pest :: Atomicity :: NonAtomic { state . sequence ( | state | { state . repeat ( | state | { super :: visible :: WHITESPACE ( state ) } ) . and_then ( | state | { state . repeat ( | state | { state . sequence ( | state | { super :: visible :: COMMENT ( state ) . and_then ( | state | { state . repeat ( | state | { super :: visible :: WHITESPACE ( state ) } ) } ) } ) } ) } ) } ) } else { Ok ( state ) } } } pub mod visible { use super :: super :: Rule ; # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn grammar_rules ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . sequence ( | state | { self :: SOI ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { state . sequence ( | state | { self :: grammar_rule ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { state . sequence ( | state | { state . optional ( | state | { self :: grammar_rule ( state ) . and_then ( | state | { state . repeat ( | state | { state . sequence ( | state | { super :: hidden :: skip ( state ) . and_then ( | state | { self :: grammar_rule ( state ) } ) } ) } ) } ) } ) } ) } ) } ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: EOI ( state ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn grammar_rule ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: grammar_rule , | state | { state . sequence ( | state | { self :: identifier ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: assignment_operator ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { state . optional ( | state | { self :: modifier ( state ) } ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: opening_brace ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: expression ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: closing_brace ( state ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn assignment_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: assignment_operator , | state | { state . match_string ( "=" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn opening_brace ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: opening_brace , | state | { state . match_string ( "{" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn closing_brace ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: closing_brace , | state | { state . match_string ( "}" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn opening_paren ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: opening_paren , | state | { state . match_string ( "(" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn closing_paren ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: closing_paren , | state | { state . match_string ( ")" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn modifier ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { self :: silent_modifier ( state ) . or_else ( | state | { self :: atomic_modifier ( state ) } ) . or_else ( | state | { self :: compound_atomic_modifier ( state ) } ) . or_else ( | state | { self :: non_atomic_modifier ( state ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn silent_modifier ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: silent_modifier , | state | { state . match_string ( "_" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn atomic_modifier ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: atomic_modifier , | state | { state . match_string ( "@" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn compound_atomic_modifier ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: compound_atomic_modifier , | state | { state . match_string ( "$" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn non_atomic_modifier ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: non_atomic_modifier , | state | { state . match_string ( "!" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn expression ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: expression , | state | { state . sequence ( | state | { self :: term ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { state . sequence ( | state | { state . optional ( | state | { state . sequence ( | state | { self :: infix_operator ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: term ( state ) } ) } ) . and_then ( | state | { state . repeat ( | state | { state . sequence ( | state | { super :: hidden :: skip ( state ) . and_then ( | state | { state . sequence ( | state | { self :: infix_operator ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: term ( state ) } ) } ) } ) } ) } ) } ) } ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn term ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: term , | state | { state . sequence ( | state | { state . sequence ( | state | { state . optional ( | state | { self :: prefix_operator ( state ) . and_then ( | state | { state . repeat ( | state | { state . sequence ( | state | { super :: hidden :: skip ( state ) . and_then ( | state | { self :: prefix_operator ( state ) } ) } ) } ) } ) } ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: node ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { state . sequence ( | state | { state . optional ( | state | { self :: postfix_operator ( state ) . and_then ( | state | { state . repeat ( | state | { state . sequence ( | state | { super :: hidden :: skip ( state ) . and_then ( | state | { self :: postfix_operator ( state ) } ) } ) } ) } ) } ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn node ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . sequence ( | state | { self :: opening_paren ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: expression ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: closing_paren ( state ) } ) } ) . or_else ( | state | { self :: terminal ( state ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn terminal ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { self :: _push ( state ) . or_else ( | state | { self :: identifier ( state ) } ) . or_else ( | state | { self :: string ( state ) } ) . or_else ( | state | { self :: insensitive_string ( state ) } ) . or_else ( | state | { self :: range ( state ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn prefix_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { self :: positive_predicate_operator ( state ) . or_else ( | state | { self :: negative_predicate_operator ( state ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn infix_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { self :: sequence_operator ( state ) . or_else ( | state | { self :: choice_operator ( state ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn postfix_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { self :: optional_operator ( state ) . or_else ( | state | { self :: repeat_operator ( state ) } ) . or_else ( | state | { self :: repeat_once_operator ( state ) } ) . or_else ( | state | { self :: repeat_exact ( state ) } ) . or_else ( | state | { self :: repeat_min ( state ) } ) . or_else ( | state | { self :: repeat_max ( state ) } ) . or_else ( | state | { self :: repeat_min_max ( state ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn positive_predicate_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: positive_predicate_operator , | state | { state . match_string ( "&" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn negative_predicate_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: negative_predicate_operator , | state | { state . match_string ( "!" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn sequence_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: sequence_operator , | state | { state . match_string ( "~" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn choice_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: choice_operator , | state | { state . match_string ( "|" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn optional_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: optional_operator , | state | { state . match_string ( "?" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn repeat_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: repeat_operator , | state | { state . match_string ( "*" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn repeat_once_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: repeat_once_operator , | state | { state . match_string ( "+" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn repeat_exact ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: repeat_exact , | state | { state . sequence ( | state | { self :: opening_brace ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: number ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: closing_brace ( state ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn repeat_min ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: repeat_min , | state | { state . sequence ( | state | { self :: opening_brace ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: number ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: comma ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: closing_brace ( state ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn repeat_max ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: repeat_max , | state | { state . sequence ( | state | { self :: opening_brace ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: comma ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: number ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: closing_brace ( state ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn repeat_min_max ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: repeat_min_max , | state | { state . sequence ( | state | { self :: opening_brace ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: number ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: comma ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: number ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: closing_brace ( state ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn number ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: number , | state | { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { state . sequence ( | state | { state . match_range ( '0' .. '9' ) . and_then ( | state | { state . repeat ( | state | { state . match_range ( '0' .. '9' ) } ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn comma ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: comma , | state | { state . match_string ( "," ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn _push ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: _push , | state | { state . sequence ( | state | { state . match_string ( "PUSH" ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: opening_paren ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: expression ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: closing_paren ( state ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn identifier ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: identifier , | state | { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { state . sequence ( | state | { state . lookahead ( false , | state | { state . match_string ( "PUSH" ) } ) . and_then ( | state | { state . match_string ( "_" ) . or_else ( | state | { self :: alpha ( state ) } ) } ) . and_then ( | state | { state . repeat ( | state | { state . match_string ( "_" ) . or_else ( | state | { self :: alpha_num ( state ) } ) } ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn alpha ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . match_range ( 'a' .. 'z' ) . or_else ( | state | { state . match_range ( 'A' .. 'Z' ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn alpha_num ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { self :: alpha ( state ) . or_else ( | state | { state . match_range ( '0' .. '9' ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn string ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . atomic ( :: pest :: Atomicity :: CompoundAtomic , | state | { state . rule ( Rule :: string , | state | { state . sequence ( | state | { self :: quote ( state ) . and_then ( | state | { self :: inner_str ( state ) } ) . and_then ( | state | { self :: quote ( state ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn insensitive_string ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: insensitive_string , | state | { state . sequence ( | state | { state . match_string ( "^" ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: string ( state ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn range ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: range , | state | { state . sequence ( | state | { self :: character ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: range_operator ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: character ( state ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn character ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . atomic ( :: pest :: Atomicity :: CompoundAtomic , | state | { state . rule ( Rule :: character , | state | { state . sequence ( | state | { self :: single_quote ( state ) . and_then ( | state | { self :: inner_chr ( state ) } ) . and_then ( | state | { self :: single_quote ( state ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn inner_str ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: inner_str , | state | { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { state . sequence ( | state | { let strings = [ "\"" , "\\" ] ; state . skip_until ( & strings ) . and_then ( | state | { state . optional ( | state | { state . sequence ( | state | { self :: escape ( state ) . and_then ( | state | { self :: inner_str ( state ) } ) } ) } ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn inner_chr ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: inner_chr , | state | { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { self :: escape ( state ) . or_else ( | state | { self :: ANY ( state ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn escape ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: escape , | state | { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { state . sequence ( | state | { state . match_string ( "\\" ) . and_then ( | state | { state . match_string ( "\"" ) . or_else ( | state | { state . match_string ( "\\" ) } ) . or_else ( | state | { state . match_string ( "r" ) } ) . or_else ( | state | { state . match_string ( "n" ) } ) . or_else ( | state | { state . match_string ( "t" ) } ) . or_else ( | state | { state . match_string ( "0" ) } ) . or_else ( | state | { state . match_string ( "\'" ) } ) . or_else ( | state | { self :: code ( state ) } ) . or_else ( | state | { self :: unicode ( state ) } ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn code ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: code , | state | { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { state . sequence ( | state | { state . match_string ( "x" ) . and_then ( | state | { self :: hex_digit ( state ) } ) . and_then ( | state | { self :: hex_digit ( state ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn unicode ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: unicode , | state | { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { state . sequence ( | state | { state . match_string ( "u" ) . and_then ( | state | { self :: opening_brace ( state ) } ) . and_then ( | state | { state . sequence ( | state | { self :: hex_digit ( state ) . and_then ( | state | { self :: hex_digit ( state ) } ) . and_then ( | state | { state . optional ( | state | { self :: hex_digit ( state ) } ) } ) . and_then ( | state | { state . optional ( | state | { self :: hex_digit ( state ) } ) } ) . and_then ( | state | { state . optional ( | state | { self :: hex_digit ( state ) } ) } ) . and_then ( | state | { state . optional ( | state | { self :: hex_digit ( state ) } ) } ) } ) } ) . and_then ( | state | { self :: closing_brace ( state ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn hex_digit ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: hex_digit , | state | { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { state . match_range ( '0' .. '9' ) . or_else ( | state | { state . match_range ( 'a' .. 'f' ) } ) . or_else ( | state | { state . match_range ( 'A' .. 'F' ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn quote ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: quote , | state | { state . match_string ( "\"" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn single_quote ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: single_quote , | state | { state . match_string ( "\'" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn range_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: range_operator , | state | { state . match_string ( ".." ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn newline ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . match_string ( "\n" ) . or_else ( | state | { state . match_string ( "\r\n" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn WHITESPACE ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { state . match_string ( " " ) . or_else ( | state | { state . match_string ( "\t" ) } ) . or_else ( | state | { self :: newline ( state ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn COMMENT ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { state . sequence ( | state | { state . match_string ( "//" ) . and_then ( | state | { state . repeat ( | state | { state . sequence ( | state | { state . lookahead ( false , | state | { self :: newline ( state ) } ) . and_then ( | state | { self :: ANY ( state ) } ) } ) } ) } ) } ) } ) } # [ inline ] # [ allow ( dead_code , non_snake_case , unused_variables ) ] pub fn ANY ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . skip ( 1 ) } # [ inline ] # [ allow ( dead_code , non_snake_case , unused_variables ) ] pub fn SOI ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . start_of_input ( ) } # [ inline ] # [ allow ( dead_code , non_snake_case , unused_variables ) ] pub fn EOI ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: EOI , | state | state . end_of_input ( ) ) } } pub use self :: visible :: * ; } :: pest :: state ( input , | state | { match rule { Rule :: grammar_rules => rules :: grammar_rules ( state ) , Rule :: grammar_rule => rules :: grammar_rule ( state ) , Rule :: assignment_operator => rules :: assignment_operator ( state ) , Rule :: opening_brace => rules :: opening_brace ( state ) , Rule :: closing_brace => rules :: closing_brace ( state ) , Rule :: opening_paren => rules :: opening_paren ( state ) , Rule :: closing_paren => rules :: closing_paren ( state ) , Rule :: modifier => rules :: modifier ( state ) , Rule :: silent_modifier => rules :: silent_modifier ( state ) , Rule :: atomic_modifier => rules :: atomic_modifier ( state ) , Rule :: compound_atomic_modifier => rules :: compound_atomic_modifier ( state ) , Rule :: non_atomic_modifier => rules :: non_atomic_modifier ( state ) , Rule :: expression => rules :: expression ( state ) , Rule :: term => rules :: term ( state ) , Rule :: node => rules :: node ( state ) , Rule :: terminal => rules :: terminal ( state ) , Rule :: prefix_operator => rules :: prefix_operator ( state ) , Rule :: infix_operator => rules :: infix_operator ( state ) , Rule :: postfix_operator => rules :: postfix_operator ( state ) , Rule :: positive_predicate_operator => rules :: positive_predicate_operator ( state ) , Rule :: negative_predicate_operator => rules :: negative_predicate_operator ( state ) , Rule :: sequence_operator => rules :: sequence_operator ( state ) , Rule :: choice_operator => rules :: choice_operator ( state ) , Rule :: optional_operator => rules :: optional_operator ( state ) , Rule :: repeat_operator => rules :: repeat_operator ( state ) , Rule :: repeat_once_operator => rules :: repeat_once_operator ( state ) , Rule :: repeat_exact => rules :: repeat_exact ( state ) , Rule :: repeat_min => rules :: repeat_min ( state ) , Rule :: repeat_max => rules :: repeat_max ( state ) , Rule :: repeat_min_max => rules :: repeat_min_max ( state ) , Rule :: number => rules :: number ( state ) , Rule :: comma => rules :: comma ( state ) , Rule :: _push => rules :: _push ( state ) , Rule :: identifier => rules :: identifier ( state ) , Rule :: alpha => rules :: alpha ( state ) , Rule :: alpha_num => rules :: alpha_num ( state ) , Rule :: string => rules :: string ( state ) , Rule :: insensitive_string => rules :: insensitive_string ( state ) , Rule :: range => rules :: range ( state ) , Rule :: character => rules :: character ( state ) , Rule :: inner_str => rules :: inner_str ( state ) , Rule :: inner_chr => rules :: inner_chr ( state ) , Rule :: escape => rules :: escape ( state ) , Rule :: code => rules :: code ( state ) , Rule :: unicode => rules :: unicode ( state ) , Rule :: hex_digit => rules :: hex_digit ( state ) , Rule :: quote => rules :: quote ( state ) , Rule :: single_quote => rules :: single_quote ( state ) , Rule :: range_operator => rules :: range_operator ( state ) , Rule :: newline => rules :: newline ( state ) , Rule :: WHITESPACE => rules :: WHITESPACE ( state ) , Rule :: COMMENT => rules :: COMMENT ( state ) , Rule :: EOI => rules :: EOI ( state ) } } ) } }
+# [ allow ( dead_code , non_camel_case_types ) ] # [ derive ( Clone , Copy , Debug , Eq , Hash , Ord , PartialEq , PartialOrd ) ] pub enum Rule { EOI , grammar_rules , grammar_rule , assignment_operator , opening_brace , closing_brace , opening_paren , closing_paren , opening_brack , closing_brack , modifier , silent_modifier , atomic_modifier , compound_atomic_modifier , non_atomic_modifier , expression , term , node , terminal , prefix_operator , infix_operator , postfix_operator , positive_predicate_operator , negative_predicate_operator , sequence_operator , choice_operator , optional_operator , repeat_operator , repeat_once_operator , repeat_exact , repeat_min , repeat_max , repeat_min_max , number , integer , comma , _push , peek_slice , identifier , alpha , alpha_num , string , insensitive_string , range , character , inner_str , inner_chr , escape , code , unicode , hex_digit , quote , single_quote , range_operator , newline , WHITESPACE , block_comment , COMMENT } impl :: pest :: Parser < Rule > for PestParser { fn parse < 'i > ( rule : Rule , input : & 'i str ) -> :: std :: result :: Result < :: pest :: iterators :: Pairs < 'i , Rule > , :: pest :: error :: Error < Rule > > { mod rules { pub mod hidden { use super :: super :: Rule ; # [ inline ] # [ allow ( dead_code , non_snake_case , unused_variables ) ] pub fn skip ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { if state . atomicity ( ) == :: pest :: Atomicity :: NonAtomic { state . sequence ( | state | { state . repeat ( | state | { super :: visible :: WHITESPACE ( state ) } ) . and_then ( | state | { state . repeat ( | state | { state . sequence ( | state | { super :: visible :: COMMENT ( state ) . and_then ( | state | { state . repeat ( | state | { super :: visible :: WHITESPACE ( state ) } ) } ) } ) } ) } ) } ) } else { Ok ( state ) } } } pub mod visible { use super :: super :: Rule ; # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn grammar_rules ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . sequence ( | state | { self :: SOI ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { state . sequence ( | state | { self :: grammar_rule ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { state . sequence ( | state | { state . optional ( | state | { self :: grammar_rule ( state ) . and_then ( | state | { state . repeat ( | state | { state . sequence ( | state | { super :: hidden :: skip ( state ) . and_then ( | state | { self :: grammar_rule ( state ) } ) } ) } ) } ) } ) } ) } ) } ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: EOI ( state ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn grammar_rule ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: grammar_rule , | state | { state . sequence ( | state | { self :: identifier ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: assignment_operator ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { state . optional ( | state | { self :: modifier ( state ) } ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: opening_brace ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: expression ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: closing_brace ( state ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn assignment_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: assignment_operator , | state | { state . match_string ( "=" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn opening_brace ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: opening_brace , | state | { state . match_string ( "{" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn closing_brace ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: closing_brace , | state | { state . match_string ( "}" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn opening_paren ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: opening_paren , | state | { state . match_string ( "(" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn closing_paren ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: closing_paren , | state | { state . match_string ( ")" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn opening_brack ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: opening_brack , | state | { state . match_string ( "[" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn closing_brack ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: closing_brack , | state | { state . match_string ( "]" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn modifier ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { self :: silent_modifier ( state ) . or_else ( | state | { self :: atomic_modifier ( state ) } ) . or_else ( | state | { self :: compound_atomic_modifier ( state ) } ) . or_else ( | state | { self :: non_atomic_modifier ( state ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn silent_modifier ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: silent_modifier , | state | { state . match_string ( "_" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn atomic_modifier ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: atomic_modifier , | state | { state . match_string ( "@" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn compound_atomic_modifier ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: compound_atomic_modifier , | state | { state . match_string ( "$" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn non_atomic_modifier ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: non_atomic_modifier , | state | { state . match_string ( "!" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn expression ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: expression , | state | { state . sequence ( | state | { self :: term ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { state . sequence ( | state | { state . optional ( | state | { state . sequence ( | state | { self :: infix_operator ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: term ( state ) } ) } ) . and_then ( | state | { state . repeat ( | state | { state . sequence ( | state | { super :: hidden :: skip ( state ) . and_then ( | state | { state . sequence ( | state | { self :: infix_operator ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: term ( state ) } ) } ) } ) } ) } ) } ) } ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn term ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: term , | state | { state . sequence ( | state | { state . sequence ( | state | { state . optional ( | state | { self :: prefix_operator ( state ) . and_then ( | state | { state . repeat ( | state | { state . sequence ( | state | { super :: hidden :: skip ( state ) . and_then ( | state | { self :: prefix_operator ( state ) } ) } ) } ) } ) } ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: node ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { state . sequence ( | state | { state . optional ( | state | { self :: postfix_operator ( state ) . and_then ( | state | { state . repeat ( | state | { state . sequence ( | state | { super :: hidden :: skip ( state ) . and_then ( | state | { self :: postfix_operator ( state ) } ) } ) } ) } ) } ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn node ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . sequence ( | state | { self :: opening_paren ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: expression ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: closing_paren ( state ) } ) } ) . or_else ( | state | { self :: terminal ( state ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn terminal ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { self :: _push ( state ) . or_else ( | state | { self :: peek_slice ( state ) } ) . or_else ( | state | { self :: identifier ( state ) } ) . or_else ( | state | { self :: string ( state ) } ) . or_else ( | state | { self :: insensitive_string ( state ) } ) . or_else ( | state | { self :: range ( state ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn prefix_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { self :: positive_predicate_operator ( state ) . or_else ( | state | { self :: negative_predicate_operator ( state ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn infix_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { self :: sequence_operator ( state ) . or_else ( | state | { self :: choice_operator ( state ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn postfix_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { self :: optional_operator ( state ) . or_else ( | state | { self :: repeat_operator ( state ) } ) . or_else ( | state | { self :: repeat_once_operator ( state ) } ) . or_else ( | state | { self :: repeat_exact ( state ) } ) . or_else ( | state | { self :: repeat_min ( state ) } ) . or_else ( | state | { self :: repeat_max ( state ) } ) . or_else ( | state | { self :: repeat_min_max ( state ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn positive_predicate_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: positive_predicate_operator , | state | { state . match_string ( "&" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn negative_predicate_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: negative_predicate_operator , | state | { state . match_string ( "!" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn sequence_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: sequence_operator , | state | { state . match_string ( "~" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn choice_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: choice_operator , | state | { state . match_string ( "|" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn optional_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: optional_operator , | state | { state . match_string ( "?" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn repeat_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: repeat_operator , | state | { state . match_string ( "*" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn repeat_once_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: repeat_once_operator , | state | { state . match_string ( "+" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn repeat_exact ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: repeat_exact , | state | { state . sequence ( | state | { self :: opening_brace ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: number ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: closing_brace ( state ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn repeat_min ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: repeat_min , | state | { state . sequence ( | state | { self :: opening_brace ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: number ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: comma ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: closing_brace ( state ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn repeat_max ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: repeat_max , | state | { state . sequence ( | state | { self :: opening_brace ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: comma ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: number ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: closing_brace ( state ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn repeat_min_max ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: repeat_min_max , | state | { state . sequence ( | state | { self :: opening_brace ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: number ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: comma ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: number ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: closing_brace ( state ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn number ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: number , | state | { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { state . sequence ( | state | { state . match_range ( '0' .. '9' ) . and_then ( | state | { state . repeat ( | state | { state . match_range ( '0' .. '9' ) } ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn integer ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: integer , | state | { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { self :: number ( state ) . or_else ( | state | { state . sequence ( | state | { state . match_string ( "-" ) . and_then ( | state | { state . repeat ( | state | { state . match_string ( "0" ) } ) } ) . and_then ( | state | { state . match_range ( '1' .. '9' ) } ) . and_then ( | state | { state . optional ( | state | { self :: number ( state ) } ) } ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn comma ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: comma , | state | { state . match_string ( "," ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn _push ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: _push , | state | { state . sequence ( | state | { state . match_string ( "PUSH" ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: opening_paren ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: expression ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: closing_paren ( state ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn peek_slice ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: peek_slice , | state | { state . sequence ( | state | { state . match_string ( "PEEK" ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: opening_brack ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { state . optional ( | state | { self :: integer ( state ) } ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: range_operator ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { state . optional ( | state | { self :: integer ( state ) } ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: closing_brack ( state ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn identifier ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: identifier , | state | { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { state . sequence ( | state | { state . lookahead ( false , | state | { state . match_string ( "PUSH" ) } ) . and_then ( | state | { state . match_string ( "_" ) . or_else ( | state | { self :: alpha ( state ) } ) } ) . and_then ( | state | { state . repeat ( | state | { state . match_string ( "_" ) . or_else ( | state | { self :: alpha_num ( state ) } ) } ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn alpha ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . match_range ( 'a' .. 'z' ) . or_else ( | state | { state . match_range ( 'A' .. 'Z' ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn alpha_num ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { self :: alpha ( state ) . or_else ( | state | { state . match_range ( '0' .. '9' ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn string ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . atomic ( :: pest :: Atomicity :: CompoundAtomic , | state | { state . rule ( Rule :: string , | state | { state . sequence ( | state | { self :: quote ( state ) . and_then ( | state | { self :: inner_str ( state ) } ) . and_then ( | state | { self :: quote ( state ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn insensitive_string ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: insensitive_string , | state | { state . sequence ( | state | { state . match_string ( "^" ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: string ( state ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn range ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: range , | state | { state . sequence ( | state | { self :: character ( state ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: range_operator ( state ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: character ( state ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn character ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . atomic ( :: pest :: Atomicity :: CompoundAtomic , | state | { state . rule ( Rule :: character , | state | { state . sequence ( | state | { self :: single_quote ( state ) . and_then ( | state | { self :: inner_chr ( state ) } ) . and_then ( | state | { self :: single_quote ( state ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn inner_str ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: inner_str , | state | { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { state . sequence ( | state | { let strings = [ "\"" , "\\" ] ; state . skip_until ( & strings ) . and_then ( | state | { state . optional ( | state | { state . sequence ( | state | { self :: escape ( state ) . and_then ( | state | { self :: inner_str ( state ) } ) } ) } ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn inner_chr ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: inner_chr , | state | { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { self :: escape ( state ) . or_else ( | state | { self :: ANY ( state ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn escape ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: escape , | state | { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { state . sequence ( | state | { state . match_string ( "\\" ) . and_then ( | state | { state . match_string ( "\"" ) . or_else ( | state | { state . match_string ( "\\" ) } ) . or_else ( | state | { state . match_string ( "r" ) } ) . or_else ( | state | { state . match_string ( "n" ) } ) . or_else ( | state | { state . match_string ( "t" ) } ) . or_else ( | state | { state . match_string ( "0" ) } ) . or_else ( | state | { state . match_string ( "\'" ) } ) . or_else ( | state | { self :: code ( state ) } ) . or_else ( | state | { self :: unicode ( state ) } ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn code ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: code , | state | { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { state . sequence ( | state | { state . match_string ( "x" ) . and_then ( | state | { self :: hex_digit ( state ) } ) . and_then ( | state | { self :: hex_digit ( state ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn unicode ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: unicode , | state | { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { state . sequence ( | state | { state . match_string ( "u" ) . and_then ( | state | { self :: opening_brace ( state ) } ) . and_then ( | state | { state . sequence ( | state | { self :: hex_digit ( state ) . and_then ( | state | { self :: hex_digit ( state ) } ) . and_then ( | state | { state . optional ( | state | { self :: hex_digit ( state ) } ) } ) . and_then ( | state | { state . optional ( | state | { self :: hex_digit ( state ) } ) } ) . and_then ( | state | { state . optional ( | state | { self :: hex_digit ( state ) } ) } ) . and_then ( | state | { state . optional ( | state | { self :: hex_digit ( state ) } ) } ) } ) } ) . and_then ( | state | { self :: closing_brace ( state ) } ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn hex_digit ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: hex_digit , | state | { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { state . match_range ( '0' .. '9' ) . or_else ( | state | { state . match_range ( 'a' .. 'f' ) } ) . or_else ( | state | { state . match_range ( 'A' .. 'F' ) } ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn quote ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: quote , | state | { state . match_string ( "\"" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn single_quote ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: single_quote , | state | { state . match_string ( "\'" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn range_operator ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: range_operator , | state | { state . match_string ( ".." ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn newline ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . match_string ( "\n" ) . or_else ( | state | { state . match_string ( "\r\n" ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn WHITESPACE ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { state . match_string ( " " ) . or_else ( | state | { state . match_string ( "\t" ) } ) . or_else ( | state | { self :: newline ( state ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn block_comment ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . sequence ( | state | { state . match_string ( "/*" ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { state . sequence ( | state | { state . optional ( | state | { self :: block_comment ( state ) . or_else ( | state | { state . sequence ( | state | { state . lookahead ( false , | state | { state . match_string ( "*/" ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: ANY ( state ) } ) } ) } ) . and_then ( | state | { state . repeat ( | state | { state . sequence ( | state | { super :: hidden :: skip ( state ) . and_then ( | state | { self :: block_comment ( state ) . or_else ( | state | { state . sequence ( | state | { state . lookahead ( false , | state | { state . match_string ( "*/" ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { self :: ANY ( state ) } ) } ) } ) } ) } ) } ) } ) } ) } ) } ) . and_then ( | state | { super :: hidden :: skip ( state ) } ) . and_then ( | state | { state . match_string ( "*/" ) } ) } ) } # [ inline ] # [ allow ( non_snake_case , unused_variables ) ] pub fn COMMENT ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . atomic ( :: pest :: Atomicity :: Atomic , | state | { self :: block_comment ( state ) . or_else ( | state | { state . sequence ( | state | { state . match_string ( "//" ) . and_then ( | state | { state . repeat ( | state | { state . sequence ( | state | { state . lookahead ( false , | state | { self :: newline ( state ) } ) . and_then ( | state | { self :: ANY ( state ) } ) } ) } ) } ) } ) } ) } ) } # [ inline ] # [ allow ( dead_code , non_snake_case , unused_variables ) ] pub fn SOI ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . start_of_input ( ) } # [ inline ] # [ allow ( dead_code , non_snake_case , unused_variables ) ] pub fn EOI ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . rule ( Rule :: EOI , | state | state . end_of_input ( ) ) } # [ inline ] # [ allow ( dead_code , non_snake_case , unused_variables ) ] pub fn ANY ( state : Box < :: pest :: ParserState < Rule >> ) -> :: pest :: ParseResult < Box < :: pest :: ParserState < Rule >> > { state . skip ( 1 ) } } pub use self :: visible :: * ; } :: pest :: state ( input , | state | { match rule { Rule :: grammar_rules => rules :: grammar_rules ( state ) , Rule :: grammar_rule => rules :: grammar_rule ( state ) , Rule :: assignment_operator => rules :: assignment_operator ( state ) , Rule :: opening_brace => rules :: opening_brace ( state ) , Rule :: closing_brace => rules :: closing_brace ( state ) , Rule :: opening_paren => rules :: opening_paren ( state ) , Rule :: closing_paren => rules :: closing_paren ( state ) , Rule :: opening_brack => rules :: opening_brack ( state ) , Rule :: closing_brack => rules :: closing_brack ( state ) , Rule :: modifier => rules :: modifier ( state ) , Rule :: silent_modifier => rules :: silent_modifier ( state ) , Rule :: atomic_modifier => rules :: atomic_modifier ( state ) , Rule :: compound_atomic_modifier => rules :: compound_atomic_modifier ( state ) , Rule :: non_atomic_modifier => rules :: non_atomic_modifier ( state ) , Rule :: expression => rules :: expression ( state ) , Rule :: term => rules :: term ( state ) , Rule :: node => rules :: node ( state ) , Rule :: terminal => rules :: terminal ( state ) , Rule :: prefix_operator => rules :: prefix_operator ( state ) , Rule :: infix_operator => rules :: infix_operator ( state ) , Rule :: postfix_operator => rules :: postfix_operator ( state ) , Rule :: positive_predicate_operator => rules :: positive_predicate_operator ( state ) , Rule :: negative_predicate_operator => rules :: negative_predicate_operator ( state ) , Rule :: sequence_operator => rules :: sequence_operator ( state ) , Rule :: choice_operator => rules :: choice_operator ( state ) , Rule :: optional_operator => rules :: optional_operator ( state ) , Rule :: repeat_operator => rules :: repeat_operator ( state ) , Rule :: repeat_once_operator => rules :: repeat_once_operator ( state ) , Rule :: repeat_exact => rules :: repeat_exact ( state ) , Rule :: repeat_min => rules :: repeat_min ( state ) , Rule :: repeat_max => rules :: repeat_max ( state ) , Rule :: repeat_min_max => rules :: repeat_min_max ( state ) , Rule :: number => rules :: number ( state ) , Rule :: integer => rules :: integer ( state ) , Rule :: comma => rules :: comma ( state ) , Rule :: _push => rules :: _push ( state ) , Rule :: peek_slice => rules :: peek_slice ( state ) , Rule :: identifier => rules :: identifier ( state ) , Rule :: alpha => rules :: alpha ( state ) , Rule :: alpha_num => rules :: alpha_num ( state ) , Rule :: string => rules :: string ( state ) , Rule :: insensitive_string => rules :: insensitive_string ( state ) , Rule :: range => rules :: range ( state ) , Rule :: character => rules :: character ( state ) , Rule :: inner_str => rules :: inner_str ( state ) , Rule :: inner_chr => rules :: inner_chr ( state ) , Rule :: escape => rules :: escape ( state ) , Rule :: code => rules :: code ( state ) , Rule :: unicode => rules :: unicode ( state ) , Rule :: hex_digit => rules :: hex_digit ( state ) , Rule :: quote => rules :: quote ( state ) , Rule :: single_quote => rules :: single_quote ( state ) , Rule :: range_operator => rules :: range_operator ( state ) , Rule :: newline => rules :: newline ( state ) , Rule :: WHITESPACE => rules :: WHITESPACE ( state ) , Rule :: block_comment => rules :: block_comment ( state ) , Rule :: COMMENT => rules :: COMMENT ( state ) , Rule :: EOI => rules :: EOI ( state ) } } ) } }
diff --git a/rustc_deps/vendor/pest_meta/src/lib.rs b/rustc_deps/vendor/pest_meta/src/lib.rs
index e3adb11..1f9c5bc 100644
--- a/rustc_deps/vendor/pest_meta/src/lib.rs
+++ b/rustc_deps/vendor/pest_meta/src/lib.rs
@@ -7,6 +7,8 @@
 // option. All files in the project carrying such notice may not be copied,
 // modified, or distributed except according to those terms.
 
+#![allow(clippy::range_plus_one)]
+
 extern crate maplit;
 #[cfg(test)]
 #[macro_use]
@@ -16,15 +18,15 @@
 
 use std::fmt::Display;
 
-pub mod parser;
 pub mod ast;
 pub mod optimizer;
+pub mod parser;
 pub mod validator;
 
 pub fn unwrap_or_report<T, E>(result: Result<T, E>) -> T
 where
     E: IntoIterator,
-    E::Item: Display
+    E::Item: Display,
 {
     result.unwrap_or_else(|e| {
         panic!(
@@ -39,23 +41,94 @@
 
 #[doc(hidden)]
 pub static UNICODE_PROPERTY_NAMES: &[&str] = &[
-    /* BINARY */ "ALPHABETIC", "BIDI_CONTROL", "CASE_IGNORABLE", "CASED",
-    "CHANGES_WHEN_CASEFOLDED", "CHANGES_WHEN_CASEMAPPED", "CHANGES_WHEN_LOWERCASED",
-    "CHANGES_WHEN_TITLECASED", "CHANGES_WHEN_UPPERCASED", "DASH", "DEFAULT_IGNORABLE_CODE_POINT",
-    "DEPRECATED", "DIACRITIC", "EXTENDER", "GRAPHEME_BASE", "GRAPHEME_EXTEND", "GRAPHEME_LINK",
-    "HEX_DIGIT", "HYPHEN", "IDS_BINARY_OPERATOR", "IDS_TRINARY_OPERATOR", "ID_CONTINUE", "ID_START",
-    "IDEOGRAPHIC", "JOIN_CONTROL", "LOGICAL_ORDER_EXCEPTION", "LOWERCASE", "MATH",
-    "NONCHARACTER_CODE_POINT", "OTHER_ALPHABETIC", "OTHER_DEFAULT_IGNORABLE_CODE_POINT",
-    "OTHER_GRAPHEME_EXTEND", "OTHER_ID_CONTINUE", "OTHER_ID_START", "OTHER_LOWERCASE", "OTHER_MATH",
-    "OTHER_UPPERCASE", "PATTERN_SYNTAX", "PATTERN_WHITE_SPACE", "PREPENDED_CONCATENATION_MARK",
-    "QUOTATION_MARK", "RADICAL", "REGIONAL_INDICATOR", "SENTENCE_TERMINAL", "SOFT_DOTTED",
-    "TERMINAL_PUNCTUATION", "UNIFIED_IDEOGRAPH", "UPPERCASE", "VARIATION_SELECTOR", "WHITE_SPACE",
-    "XID_CONTINUE", "XID_START", /* CATEGORY */ "CASED_LETTER", "CLOSE_PUNCTUATION",
-    "CONNECTOR_PUNCTUATION", "CONTROL", "CURRENCY_SYMBOL", "DASH_PUNCTUATION", "DECIMAL_NUMBER",
-    "ENCLOSING_MARK", "FINAL_PUNCTUATION", "FORMAT", "INITIAL_PUNCTUATION", "LETTER",
-    "LETTER_NUMBER", "LINE_SEPARATOR", "LOWERCASE_LETTER", "MARK", "MATH_SYMBOL", "MODIFIER_LETTER",
-    "MODIFIER_SYMBOL", "NONSPACING_MARK", "NUMBER", "OPEN_PUNCTUATION", "OTHER", "OTHER_LETTER",
-    "OTHER_NUMBER", "OTHER_PUNCTUATION", "OTHER_SYMBOL", "PARAGRAPH_SEPARATOR", "PRIVATE_USE",
-    "PUNCTUATION", "SEPARATOR", "SPACE_SEPARATOR", "SPACING_MARK", "SURROGATE", "SYMBOL",
-    "TITLECASE_LETTER", "UNASSIGNED", "UPPERCASE_LETTER",
+    /* BINARY */ "ALPHABETIC",
+    "BIDI_CONTROL",
+    "CASE_IGNORABLE",
+    "CASED",
+    "CHANGES_WHEN_CASEFOLDED",
+    "CHANGES_WHEN_CASEMAPPED",
+    "CHANGES_WHEN_LOWERCASED",
+    "CHANGES_WHEN_TITLECASED",
+    "CHANGES_WHEN_UPPERCASED",
+    "DASH",
+    "DEFAULT_IGNORABLE_CODE_POINT",
+    "DEPRECATED",
+    "DIACRITIC",
+    "EXTENDER",
+    "GRAPHEME_BASE",
+    "GRAPHEME_EXTEND",
+    "GRAPHEME_LINK",
+    "HEX_DIGIT",
+    "HYPHEN",
+    "IDS_BINARY_OPERATOR",
+    "IDS_TRINARY_OPERATOR",
+    "ID_CONTINUE",
+    "ID_START",
+    "IDEOGRAPHIC",
+    "JOIN_CONTROL",
+    "LOGICAL_ORDER_EXCEPTION",
+    "LOWERCASE",
+    "MATH",
+    "NONCHARACTER_CODE_POINT",
+    "OTHER_ALPHABETIC",
+    "OTHER_DEFAULT_IGNORABLE_CODE_POINT",
+    "OTHER_GRAPHEME_EXTEND",
+    "OTHER_ID_CONTINUE",
+    "OTHER_ID_START",
+    "OTHER_LOWERCASE",
+    "OTHER_MATH",
+    "OTHER_UPPERCASE",
+    "PATTERN_SYNTAX",
+    "PATTERN_WHITE_SPACE",
+    "PREPENDED_CONCATENATION_MARK",
+    "QUOTATION_MARK",
+    "RADICAL",
+    "REGIONAL_INDICATOR",
+    "SENTENCE_TERMINAL",
+    "SOFT_DOTTED",
+    "TERMINAL_PUNCTUATION",
+    "UNIFIED_IDEOGRAPH",
+    "UPPERCASE",
+    "VARIATION_SELECTOR",
+    "WHITE_SPACE",
+    "XID_CONTINUE",
+    "XID_START",
+    /* CATEGORY */ "CASED_LETTER",
+    "CLOSE_PUNCTUATION",
+    "CONNECTOR_PUNCTUATION",
+    "CONTROL",
+    "CURRENCY_SYMBOL",
+    "DASH_PUNCTUATION",
+    "DECIMAL_NUMBER",
+    "ENCLOSING_MARK",
+    "FINAL_PUNCTUATION",
+    "FORMAT",
+    "INITIAL_PUNCTUATION",
+    "LETTER",
+    "LETTER_NUMBER",
+    "LINE_SEPARATOR",
+    "LOWERCASE_LETTER",
+    "MARK",
+    "MATH_SYMBOL",
+    "MODIFIER_LETTER",
+    "MODIFIER_SYMBOL",
+    "NONSPACING_MARK",
+    "NUMBER",
+    "OPEN_PUNCTUATION",
+    "OTHER",
+    "OTHER_LETTER",
+    "OTHER_NUMBER",
+    "OTHER_PUNCTUATION",
+    "OTHER_SYMBOL",
+    "PARAGRAPH_SEPARATOR",
+    "PRIVATE_USE",
+    "PUNCTUATION",
+    "SEPARATOR",
+    "SPACE_SEPARATOR",
+    "SPACING_MARK",
+    "SURROGATE",
+    "SYMBOL",
+    "TITLECASE_LETTER",
+    "UNASSIGNED",
+    "UPPERCASE_LETTER",
 ];
diff --git a/rustc_deps/vendor/pest_meta/src/optimizer/concatenator.rs b/rustc_deps/vendor/pest_meta/src/optimizer/concatenator.rs
index 7df7fa9..e0aab7b 100644
--- a/rustc_deps/vendor/pest_meta/src/optimizer/concatenator.rs
+++ b/rustc_deps/vendor/pest_meta/src/optimizer/concatenator.rs
@@ -21,14 +21,14 @@
                         Expr::Seq(lhs, rhs) => match (*lhs, *rhs) {
                             (Expr::Str(lhs), Expr::Str(rhs)) => Expr::Str(lhs + &rhs),
                             (Expr::Insens(lhs), Expr::Insens(rhs)) => Expr::Insens(lhs + &rhs),
-                            (lhs, rhs) => Expr::Seq(Box::new(lhs), Box::new(rhs))
+                            (lhs, rhs) => Expr::Seq(Box::new(lhs), Box::new(rhs)),
                         },
-                        expr => expr
+                        expr => expr,
                     }
                 } else {
                     expr
                 }
-            })
-        }
+            }),
+        },
     }
 }
diff --git a/rustc_deps/vendor/pest_meta/src/optimizer/factorizer.rs b/rustc_deps/vendor/pest_meta/src/optimizer/factorizer.rs
index 259e2c2..236289e 100644
--- a/rustc_deps/vendor/pest_meta/src/optimizer/factorizer.rs
+++ b/rustc_deps/vendor/pest_meta/src/optimizer/factorizer.rs
@@ -24,15 +24,15 @@
                             } else {
                                 Expr::Choice(
                                     Box::new(Expr::Seq(l1, r1)),
-                                    Box::new(Expr::Seq(l2, r2))
+                                    Box::new(Expr::Seq(l2, r2)),
                                 )
                             }
                         }
-                        (lhs, rhs) => Expr::Choice(Box::new(lhs), Box::new(rhs))
+                        (lhs, rhs) => Expr::Choice(Box::new(lhs), Box::new(rhs)),
                     },
-                    expr => expr
+                    expr => expr,
                 }
-            })
-        }
+            }),
+        },
     }
 }
diff --git a/rustc_deps/vendor/pest_meta/src/optimizer/mod.rs b/rustc_deps/vendor/pest_meta/src/optimizer/mod.rs
index 4aafe8a..7013c43 100644
--- a/rustc_deps/vendor/pest_meta/src/optimizer/mod.rs
+++ b/rustc_deps/vendor/pest_meta/src/optimizer/mod.rs
@@ -10,6 +10,14 @@
 use ast::*;
 use std::collections::HashMap;
 
+#[cfg(test)]
+macro_rules! box_tree {
+    ( $node:ident( $(                      $child:ident( $($args:tt)* )     ),+ ) ) => (
+      $node      ( $( Box::new( box_tree!( $child      ( $($args   )* ) ) ) ),+ )
+    );
+    ($expr:expr) => ($expr);
+}
+
 mod concatenator;
 mod factorizer;
 mod restorer;
@@ -42,6 +50,7 @@
             Expr::Insens(string) => OptimizedExpr::Insens(string),
             Expr::Range(start, end) => OptimizedExpr::Range(start, end),
             Expr::Ident(ident) => OptimizedExpr::Ident(ident),
+            Expr::PeekSlice(start, end) => OptimizedExpr::PeekSlice(start, end),
             Expr::PosPred(expr) => OptimizedExpr::PosPred(Box::new(to_optimized(*expr))),
             Expr::NegPred(expr) => OptimizedExpr::NegPred(Box::new(to_optimized(*expr))),
             Expr::Seq(lhs, rhs) => {
@@ -54,26 +63,33 @@
             Expr::Rep(expr) => OptimizedExpr::Rep(Box::new(to_optimized(*expr))),
             Expr::Skip(strings) => OptimizedExpr::Skip(strings),
             Expr::Push(expr) => OptimizedExpr::Push(Box::new(to_optimized(*expr))),
-            _ => unreachable!("No valid transformation to OptimizedRule")
+            Expr::RepOnce(_)
+            | Expr::RepExact(..)
+            | Expr::RepMin(..)
+            | Expr::RepMax(..)
+            | Expr::RepMinMax(..) => unreachable!("No valid transformation to OptimizedRule"),
         }
     }
 
     OptimizedRule {
         name: rule.name,
         ty: rule.ty,
-        expr: to_optimized(rule.expr)
+        expr: to_optimized(rule.expr),
     }
 }
 
-fn to_hash_map(rules: & Vec<OptimizedRule>) -> HashMap<String, OptimizedExpr> {
-    rules.iter().map(|r| (r.name.clone(), r.expr.clone())).collect()
+fn to_hash_map(rules: &[OptimizedRule]) -> HashMap<String, OptimizedExpr> {
+    rules
+        .iter()
+        .map(|r| (r.name.clone(), r.expr.clone()))
+        .collect()
 }
 
 #[derive(Clone, Debug, Eq, PartialEq)]
 pub struct OptimizedRule {
     pub name: String,
     pub ty: RuleType,
-    pub expr: OptimizedExpr
+    pub expr: OptimizedExpr,
 }
 
 #[derive(Clone, Debug, Eq, PartialEq)]
@@ -82,6 +98,7 @@
     Insens(String),
     Range(String, String),
     Ident(String),
+    PeekSlice(i32, Option<i32>),
     PosPred(Box<OptimizedExpr>),
     NegPred(Box<OptimizedExpr>),
     Seq(Box<OptimizedExpr>, Box<OptimizedExpr>),
@@ -90,7 +107,7 @@
     Rep(Box<OptimizedExpr>),
     Skip(Vec<String>),
     Push(Box<OptimizedExpr>),
-    RestoreOnErr(Box<OptimizedExpr>)
+    RestoreOnErr(Box<OptimizedExpr>),
 }
 
 impl OptimizedExpr {
@@ -100,11 +117,11 @@
 
     pub fn map_top_down<F>(self, mut f: F) -> OptimizedExpr
     where
-        F: FnMut(OptimizedExpr) -> OptimizedExpr
+        F: FnMut(OptimizedExpr) -> OptimizedExpr,
     {
         fn map_internal<F>(expr: OptimizedExpr, f: &mut F) -> OptimizedExpr
         where
-            F: FnMut(OptimizedExpr) -> OptimizedExpr
+            F: FnMut(OptimizedExpr) -> OptimizedExpr,
         {
             let expr = f(expr);
 
@@ -140,7 +157,7 @@
                     let mapped = Box::new(map_internal(*expr, f));
                     OptimizedExpr::Push(mapped)
                 }
-                expr => expr
+                expr => expr,
             }
         }
 
@@ -149,11 +166,11 @@
 
     pub fn map_bottom_up<F>(self, mut f: F) -> OptimizedExpr
     where
-        F: FnMut(OptimizedExpr) -> OptimizedExpr
+        F: FnMut(OptimizedExpr) -> OptimizedExpr,
     {
         fn map_internal<F>(expr: OptimizedExpr, f: &mut F) -> OptimizedExpr
         where
-            F: FnMut(OptimizedExpr) -> OptimizedExpr
+            F: FnMut(OptimizedExpr) -> OptimizedExpr,
         {
             let mapped = match expr {
                 OptimizedExpr::PosPred(expr) => {
@@ -187,7 +204,7 @@
                     let mapped = Box::new(map_internal(*expr, f));
                     OptimizedExpr::Push(mapped)
                 }
-                expr => expr
+                expr => expr,
             };
 
             f(mapped)
@@ -200,7 +217,7 @@
 pub struct OptimizedExprTopDownIterator {
     current: Option<OptimizedExpr>,
     next: Option<OptimizedExpr>,
-    right_branches: Vec<OptimizedExpr>
+    right_branches: Vec<OptimizedExpr>,
 }
 
 impl OptimizedExprTopDownIterator {
@@ -208,7 +225,7 @@
         let mut iter = OptimizedExprTopDownIterator {
             current: None,
             next: None,
-            right_branches: vec![]
+            right_branches: vec![],
         };
         iter.iterate_expr(expr.clone());
         iter
@@ -261,289 +278,232 @@
 
     #[test]
     fn rotate() {
-        let rules = vec![
-            Rule {
+        let rules = {
+            use ast::Expr::*;
+            vec![Rule {
                 name: "rule".to_owned(),
                 ty: RuleType::Normal,
-                expr: Expr::Choice(
-                    Box::new(Expr::Choice(
-                        Box::new(Expr::Choice(
-                            Box::new(Expr::Str("a".to_owned())),
-                            Box::new(Expr::Str("b".to_owned()))
-                        )),
-                        Box::new(Expr::Str("c".to_owned()))
-                    )),
-                    Box::new(Expr::Str("d".to_owned()))
-                )
-            },
-        ];
-        let rotated = vec![
-            OptimizedRule {
+                expr: box_tree!(Choice(
+                    Choice(
+                        Choice(Str(String::from("a")), Str(String::from("b"))),
+                        Str(String::from("c"))
+                    ),
+                    Str(String::from("d"))
+                )),
+            }]
+        };
+        let rotated = {
+            use optimizer::OptimizedExpr::*;
+            vec![OptimizedRule {
                 name: "rule".to_owned(),
                 ty: RuleType::Normal,
-                expr: OptimizedExpr::Choice(
-                    Box::new(OptimizedExpr::Str("a".to_owned())),
-                    Box::new(OptimizedExpr::Choice(
-                        Box::new(OptimizedExpr::Str("b".to_owned())),
-                        Box::new(OptimizedExpr::Choice(
-                            Box::new(OptimizedExpr::Str("c".to_owned())),
-                            Box::new(OptimizedExpr::Str("d".to_owned()))
-                        ))
-                    ))
-                )
-            },
-        ];
+                expr: box_tree!(Choice(
+                    Str(String::from("a")),
+                    Choice(
+                        Str(String::from("b")),
+                        Choice(Str(String::from("c")), Str(String::from("d")))
+                    )
+                )),
+            }]
+        };
 
         assert_eq!(optimize(rules), rotated);
     }
 
     #[test]
     fn skip() {
-        let rules = vec![
-            Rule {
+        let rules = {
+            use ast::Expr::*;
+            vec![Rule {
                 name: "rule".to_owned(),
                 ty: RuleType::Atomic,
-                expr: Expr::Rep(Box::new(Expr::Seq(
-                    Box::new(Expr::NegPred(Box::new(Expr::Choice(
-                        Box::new(Expr::Str("a".to_owned())),
-                        Box::new(Expr::Str("b".to_owned()))
-                    )))),
-                    Box::new(Expr::Ident("ANY".to_owned()))
-                )))
-            },
-        ];
-        let skipped = vec![
-            OptimizedRule {
-                name: "rule".to_owned(),
-                ty: RuleType::Atomic,
-                expr: OptimizedExpr::Skip(vec!["a".to_owned(), "b".to_owned()])
-            },
-        ];
+                expr: box_tree!(Rep(Seq(
+                    NegPred(Choice(Str(String::from("a")), Str(String::from("b")))),
+                    Ident(String::from("ANY"))
+                ))),
+            }]
+        };
+        let skipped = vec![OptimizedRule {
+            name: "rule".to_owned(),
+            ty: RuleType::Atomic,
+            expr: OptimizedExpr::Skip(vec![String::from("a"), String::from("b")]),
+        }];
 
         assert_eq!(optimize(rules), skipped);
     }
 
     #[test]
     fn concat_strings() {
-        let rules = vec![
-            Rule {
+        let rules = {
+            use ast::Expr::*;
+            vec![Rule {
                 name: "rule".to_owned(),
                 ty: RuleType::Atomic,
-                expr: Expr::Seq(
-                    Box::new(Expr::Seq(
-                        Box::new(Expr::Str("a".to_owned())),
-                        Box::new(Expr::Str("b".to_owned()))
-                    )),
-                    Box::new(Expr::Seq(
-                        Box::new(Expr::Str("c".to_owned())),
-                        Box::new(Expr::Str("d".to_owned()))
-                    ))
-                )
-            },
-        ];
-        let concatenated = vec![
-            OptimizedRule {
-                name: "rule".to_owned(),
-                ty: RuleType::Atomic,
-                expr: OptimizedExpr::Str("abcd".to_owned())
-            },
-        ];
+                expr: box_tree!(Seq(
+                    Seq(Str(String::from("a")), Str(String::from("b"))),
+                    Seq(Str(String::from("c")), Str(String::from("d")))
+                )),
+            }]
+        };
+        let concatenated = vec![OptimizedRule {
+            name: "rule".to_owned(),
+            ty: RuleType::Atomic,
+            expr: OptimizedExpr::Str(String::from("abcd")),
+        }];
 
         assert_eq!(optimize(rules), concatenated);
     }
 
     #[test]
     fn unroll_loop_exact() {
-        let rules = vec![
-            Rule {
+        let rules = vec![Rule {
+            name: "rule".to_owned(),
+            ty: RuleType::Atomic,
+            expr: Expr::RepExact(Box::new(Expr::Ident(String::from("a"))), 3),
+        }];
+        let unrolled = {
+            use optimizer::OptimizedExpr::*;
+            vec![OptimizedRule {
                 name: "rule".to_owned(),
                 ty: RuleType::Atomic,
-                expr: Expr::RepExact(Box::new(Expr::Ident("a".to_owned())), 3)
-            },
-        ];
-        let unrolled = vec![
-            OptimizedRule {
-                name: "rule".to_owned(),
-                ty: RuleType::Atomic,
-                expr: OptimizedExpr::Seq(
-                    Box::new(OptimizedExpr::Ident("a".to_owned())),
-                    Box::new(OptimizedExpr::Seq(
-                        Box::new(OptimizedExpr::Ident("a".to_owned())),
-                        Box::new(OptimizedExpr::Ident("a".to_owned()))
-                    ))
-                )
-            },
-        ];
+                expr: box_tree!(Seq(
+                    Ident(String::from("a")),
+                    Seq(Ident(String::from("a")), Ident(String::from("a")))
+                )),
+            }]
+        };
 
         assert_eq!(optimize(rules), unrolled);
     }
 
     #[test]
     fn unroll_loop_max() {
-        let rules = vec![
-            Rule {
+        let rules = vec![Rule {
+            name: "rule".to_owned(),
+            ty: RuleType::Atomic,
+            expr: Expr::RepMax(Box::new(Expr::Str("a".to_owned())), 3),
+        }];
+        let unrolled = {
+            use optimizer::OptimizedExpr::*;
+            vec![OptimizedRule {
                 name: "rule".to_owned(),
                 ty: RuleType::Atomic,
-                expr: Expr::RepMax(Box::new(Expr::Str("a".to_owned())), 3)
-            },
-        ];
-        let unrolled = vec![
-            OptimizedRule {
-                name: "rule".to_owned(),
-                ty: RuleType::Atomic,
-                expr: OptimizedExpr::Seq(
-                    Box::new(OptimizedExpr::Opt(Box::new(OptimizedExpr::Str(
-                        "a".to_owned()
-                    )))),
-                    Box::new(OptimizedExpr::Seq(
-                        Box::new(OptimizedExpr::Opt(Box::new(OptimizedExpr::Str(
-                            "a".to_owned()
-                        )))),
-                        Box::new(OptimizedExpr::Opt(Box::new(OptimizedExpr::Str(
-                            "a".to_owned()
-                        ))))
-                    ))
-                )
-            },
-        ];
+                expr: box_tree!(Seq(
+                    Opt(Str(String::from("a"))),
+                    Seq(Opt(Str(String::from("a"))), Opt(Str(String::from("a"))))
+                )),
+            }]
+        };
 
         assert_eq!(optimize(rules), unrolled);
     }
 
     #[test]
     fn unroll_loop_min() {
-        let rules = vec![
-            Rule {
+        let rules = vec![Rule {
+            name: "rule".to_owned(),
+            ty: RuleType::Atomic,
+            expr: Expr::RepMin(Box::new(Expr::Str("a".to_owned())), 2),
+        }];
+        let unrolled = {
+            use optimizer::OptimizedExpr::*;
+            vec![OptimizedRule {
                 name: "rule".to_owned(),
                 ty: RuleType::Atomic,
-                expr: Expr::RepMin(Box::new(Expr::Str("a".to_owned())), 2)
-            },
-        ];
-        let unrolled = vec![
-            OptimizedRule {
-                name: "rule".to_owned(),
-                ty: RuleType::Atomic,
-                expr: OptimizedExpr::Seq(
-                    Box::new(OptimizedExpr::Str("a".to_owned())),
-                    Box::new(OptimizedExpr::Seq(
-                        Box::new(OptimizedExpr::Str("a".to_owned())),
-                        Box::new(OptimizedExpr::Rep(Box::new(OptimizedExpr::Str(
-                            "a".to_owned()
-                        ))))
-                    ))
-                )
-            },
-        ];
+                expr: box_tree!(Seq(
+                    Str(String::from("a")),
+                    Seq(Str(String::from("a")), Rep(Str(String::from("a"))))
+                )),
+            }]
+        };
 
         assert_eq!(optimize(rules), unrolled);
     }
 
     #[test]
     fn unroll_loop_min_max() {
-        let rules = vec![
-            Rule {
+        let rules = vec![Rule {
+            name: "rule".to_owned(),
+            ty: RuleType::Atomic,
+            expr: Expr::RepMinMax(Box::new(Expr::Str("a".to_owned())), 2, 3),
+        }];
+        let unrolled = {
+            use optimizer::OptimizedExpr::*;
+            vec![OptimizedRule {
                 name: "rule".to_owned(),
                 ty: RuleType::Atomic,
-                expr: Expr::RepMinMax(Box::new(Expr::Str("a".to_owned())), 2, 3)
-            },
-        ];
-        let unrolled = vec![
-            OptimizedRule {
-                name: "rule".to_owned(),
-                ty: RuleType::Atomic,
-                expr: OptimizedExpr::Seq(
-                    /* TODO possible room for improvement here:
-                     * if the sequences were rolled out in the opposite
-                     * order, we could further optimize the strings
-                     * in cases like this.
-                    Box::new(Expr::Str("aa".to_owned())),
-                    Box::new(Expr::Opt(
-                        Box::new(Expr::Str("a".to_owned()))
-                    ))
-                    */
-                    Box::new(OptimizedExpr::Str("a".to_owned())),
-                    Box::new(OptimizedExpr::Seq(
-                        Box::new(OptimizedExpr::Str("a".to_owned())),
-                        Box::new(OptimizedExpr::Opt(Box::new(OptimizedExpr::Str(
-                            "a".to_owned()
-                        ))))
-                    ))
-                )
-            },
-        ];
+                /* TODO possible room for improvement here:
+                 * if the sequences were rolled out in the opposite
+                 * order, we could further optimize the strings
+                 * in cases like this.
+                Str(String::from(("aa")),
+                Opt(Str(String::from("a")))
+                */
+                expr: box_tree!(Seq(
+                    Str(String::from("a")),
+                    Seq(Str(String::from("a")), Opt(Str(String::from("a"))))
+                )),
+            }]
+        };
 
         assert_eq!(optimize(rules), unrolled);
     }
 
     #[test]
     fn concat_insensitive_strings() {
-        let rules = vec![
-            Rule {
+        let rules = {
+            use ast::Expr::*;
+            vec![Rule {
                 name: "rule".to_owned(),
                 ty: RuleType::Atomic,
-                expr: Expr::Seq(
-                    Box::new(Expr::Seq(
-                        Box::new(Expr::Insens("a".to_owned())),
-                        Box::new(Expr::Insens("b".to_owned()))
-                    )),
-                    Box::new(Expr::Seq(
-                        Box::new(Expr::Insens("c".to_owned())),
-                        Box::new(Expr::Insens("d".to_owned()))
-                    ))
-                )
-            },
-        ];
-        let concatenated = vec![
-            OptimizedRule {
-                name: "rule".to_owned(),
-                ty: RuleType::Atomic,
-                expr: OptimizedExpr::Insens("abcd".to_owned())
-            },
-        ];
+                expr: box_tree!(Seq(
+                    Seq(Insens(String::from("a")), Insens(String::from("b"))),
+                    Seq(Insens(String::from("c")), Insens(String::from("d")))
+                )),
+            }]
+        };
+        let concatenated = vec![OptimizedRule {
+            name: "rule".to_owned(),
+            ty: RuleType::Atomic,
+            expr: OptimizedExpr::Insens(String::from("abcd")),
+        }];
 
         assert_eq!(optimize(rules), concatenated);
     }
 
     #[test]
     fn long_common_sequence() {
-        let rules = vec![
-            Rule {
+        let rules = {
+            use ast::Expr::*;
+            vec![Rule {
                 name: "rule".to_owned(),
                 ty: RuleType::Silent,
-                expr: Expr::Choice(
-                    Box::new(Expr::Seq(
-                        Box::new(Expr::Ident("a".to_owned())),
-                        Box::new(Expr::Seq(
-                            Box::new(Expr::Ident("b".to_owned())),
-                            Box::new(Expr::Ident("c".to_owned()))
-                        ))
-                    )),
-                    Box::new(Expr::Seq(
-                        Box::new(Expr::Seq(
-                            Box::new(Expr::Ident("a".to_owned())),
-                            Box::new(Expr::Ident("b".to_owned()))
-                        )),
-                        Box::new(Expr::Ident("d".to_owned()))
-                    ))
-                )
-            },
-        ];
-        let optimized = vec![
-            OptimizedRule {
+                expr: box_tree!(Choice(
+                    Seq(
+                        Ident(String::from("a")),
+                        Seq(Ident(String::from("b")), Ident(String::from("c")))
+                    ),
+                    Seq(
+                        Seq(Ident(String::from("a")), Ident(String::from("b"))),
+                        Ident(String::from("d"))
+                    )
+                )),
+            }]
+        };
+        let optimized = {
+            use optimizer::OptimizedExpr::*;
+            vec![OptimizedRule {
                 name: "rule".to_owned(),
                 ty: RuleType::Silent,
-                expr: OptimizedExpr::Seq(
-                    Box::new(OptimizedExpr::Ident("a".to_owned())),
-                    Box::new(OptimizedExpr::Seq(
-                        Box::new(OptimizedExpr::Ident("b".to_owned())),
-                        Box::new(OptimizedExpr::Choice(
-                            Box::new(OptimizedExpr::Ident("c".to_owned())),
-                            Box::new(OptimizedExpr::Ident("d".to_owned()))
-                        ))
-                    ))
-                )
-            },
-        ];
+                expr: box_tree!(Seq(
+                    Ident(String::from("a")),
+                    Seq(
+                        Ident(String::from("b")),
+                        Choice(Ident(String::from("c")), Ident(String::from("d")))
+                    )
+                )),
+            }]
+        };
 
         assert_eq!(optimize(rules), optimized);
     }
diff --git a/rustc_deps/vendor/pest_meta/src/optimizer/restorer.rs b/rustc_deps/vendor/pest_meta/src/optimizer/restorer.rs
index 3ea36f1..34a710e 100644
--- a/rustc_deps/vendor/pest_meta/src/optimizer/restorer.rs
+++ b/rustc_deps/vendor/pest_meta/src/optimizer/restorer.rs
@@ -12,7 +12,7 @@
 
 pub fn restore_on_err(
     rule: OptimizedRule,
-    rules: &HashMap<String, OptimizedExpr>
+    rules: &HashMap<String, OptimizedExpr>,
 ) -> OptimizedRule {
     match rule {
         OptimizedRule { name, ty, expr } => {
@@ -25,7 +25,7 @@
 
 fn wrap_branching_exprs(
     expr: OptimizedExpr,
-    rules: &HashMap<String, OptimizedExpr>
+    rules: &HashMap<String, OptimizedExpr>,
 ) -> OptimizedExpr {
     match expr {
         OptimizedExpr::Opt(expr) => {
@@ -55,42 +55,41 @@
                 OptimizedExpr::Rep(expr)
             }
         }
-        _ => expr
+        _ => expr,
     }
 }
 
 fn child_modifies_state(
     expr: &OptimizedExpr,
     rules: &HashMap<String, OptimizedExpr>,
-    cache: &mut HashMap<String, Option<bool>>
+    cache: &mut HashMap<String, Option<bool>>,
 ) -> bool {
     expr.iter_top_down().any(|expr| match expr {
         OptimizedExpr::Push(_) => true,
+        OptimizedExpr::Ident(ref name) if name == "DROP" => true,
         OptimizedExpr::Ident(ref name) if name == "POP" => true,
-        OptimizedExpr::Ident(ref name) => {
-            match cache.get(name).map(|result| *result) {
-                Some(option) => match option {
-                    Some(cached) => cached,
-                    None => {
-                        cache.insert(name.to_owned(), Some(false));
-                        false
-                    }
-                }
+        OptimizedExpr::Ident(ref name) => match cache.get(name).cloned() {
+            Some(option) => match option {
+                Some(cached) => cached,
                 None => {
-                    cache.insert(name.to_owned(), None);
-
-                    let result = match rules.get(name) {
-                        Some(expr) => child_modifies_state(expr, rules, cache),
-                        None => false
-                    };
-
-                    cache.insert(name.to_owned(), Some(result));
-
-                    result
+                    cache.insert(name.to_owned(), Some(false));
+                    false
                 }
+            },
+            None => {
+                cache.insert(name.to_owned(), None);
+
+                let result = match rules.get(name) {
+                    Some(expr) => child_modifies_state(expr, rules, cache),
+                    None => false,
+                };
+
+                cache.insert(name.to_owned(), Some(result));
+
+                result
             }
-        }
-        _ => false
+        },
+        _ => false,
     })
 }
 
@@ -101,13 +100,11 @@
 
     #[test]
     fn restore_no_stack_children() {
-        let rules = vec![
-            OptimizedRule {
-                name: "rule".to_owned(),
-                ty: RuleType::Normal,
-                expr: Opt(Box::new(Str(String::from("a"))))
-            },
-        ];
+        let rules = vec![OptimizedRule {
+            name: "rule".to_owned(),
+            ty: RuleType::Normal,
+            expr: box_tree!(Opt(Str("a".to_string()))),
+        }];
 
         assert_eq!(
             restore_on_err(rules[0].clone(), &to_hash_map(&rules)),
@@ -117,49 +114,44 @@
 
     #[test]
     fn restore_with_child_stack_ops() {
-        let rules = vec![
-            OptimizedRule {
-                name: "rule".to_owned(),
-                ty: RuleType::Normal,
-                expr: Rep(Box::new(Push(Box::new(Str(String::from("a"))))))
-            },
-        ];
+        let rules = vec![OptimizedRule {
+            name: "rule".to_owned(),
+            ty: RuleType::Normal,
+            expr: box_tree!(Rep(Push(Str("a".to_string())))),
+        }];
 
         let restored = OptimizedRule {
             name: "rule".to_owned(),
             ty: RuleType::Normal,
-            expr: Rep(Box::new(RestoreOnErr(Box::new(Push(Box::new(Str(
-                String::from("a")
-            )))))))
+            expr: box_tree!(Rep(RestoreOnErr(Push(Str("a".to_string()))))),
         };
 
-        assert_eq!(restore_on_err(rules[0].clone(), &to_hash_map(&rules)), restored);
+        assert_eq!(
+            restore_on_err(rules[0].clone(), &to_hash_map(&rules)),
+            restored
+        );
     }
 
     #[test]
     fn restore_choice_branch_with_and_branch_without() {
-        let rules = vec![
-            OptimizedRule {
-                name: "rule".to_owned(),
-                ty: RuleType::Normal,
-                expr: Choice(
-                    Box::new(Push(Box::new(Str(String::from("a"))))),
-                    Box::new(Str(String::from("a")))
-                )
-            },
-        ];
+        let rules = vec![OptimizedRule {
+            name: "rule".to_owned(),
+            ty: RuleType::Normal,
+            expr: box_tree!(Choice(Push(Str("a".to_string())), Str("a".to_string()))),
+        }];
 
         let restored = OptimizedRule {
             name: "rule".to_owned(),
             ty: RuleType::Normal,
-            expr: Choice(
-                Box::new(RestoreOnErr(Box::new(Push(Box::new(Str(String::from(
-                    "a"
-                ))))))),
-                Box::new(Str(String::from("a")))
-            )
+            expr: box_tree!(Choice(
+                RestoreOnErr(Push(Str("a".to_string()))),
+                Str("a".to_string())
+            )),
         };
 
-        assert_eq!(restore_on_err(rules[0].clone(), &to_hash_map(&rules)), restored);
+        assert_eq!(
+            restore_on_err(rules[0].clone(), &to_hash_map(&rules)),
+            restored
+        );
     }
 }
diff --git a/rustc_deps/vendor/pest_meta/src/optimizer/rotater.rs b/rustc_deps/vendor/pest_meta/src/optimizer/rotater.rs
index 0f6ae4d..3588738 100644
--- a/rustc_deps/vendor/pest_meta/src/optimizer/rotater.rs
+++ b/rustc_deps/vendor/pest_meta/src/optimizer/rotater.rs
@@ -19,7 +19,7 @@
                     Expr::Seq(ll, lr) => {
                         rotate_internal(Expr::Seq(ll, Box::new(Expr::Seq(lr, rhs))))
                     }
-                    lhs => Expr::Seq(Box::new(lhs), rhs)
+                    lhs => Expr::Seq(Box::new(lhs), rhs),
                 }
             }
             Expr::Choice(lhs, rhs) => {
@@ -28,10 +28,10 @@
                     Expr::Choice(ll, lr) => {
                         rotate_internal(Expr::Choice(ll, Box::new(Expr::Choice(lr, rhs))))
                     }
-                    lhs => Expr::Choice(Box::new(lhs), rhs)
+                    lhs => Expr::Choice(Box::new(lhs), rhs),
                 }
             }
-            expr => expr
+            expr => expr,
         }
     }
 
@@ -39,7 +39,7 @@
         Rule { name, ty, expr } => Rule {
             name,
             ty,
-            expr: expr.map_top_down(rotate_internal)
-        }
+            expr: expr.map_top_down(rotate_internal),
+        },
     }
 }
diff --git a/rustc_deps/vendor/pest_meta/src/optimizer/skipper.rs b/rustc_deps/vendor/pest_meta/src/optimizer/skipper.rs
index 48c0f6b..f55edc0 100644
--- a/rustc_deps/vendor/pest_meta/src/optimizer/skipper.rs
+++ b/rustc_deps/vendor/pest_meta/src/optimizer/skipper.rs
@@ -24,7 +24,7 @@
                 choices.push(string);
                 Some(Expr::Skip(choices))
             }
-            _ => None
+            _ => None,
         }
     }
 
@@ -35,28 +35,23 @@
             expr: if ty == RuleType::Atomic {
                 expr.map_top_down(|expr| {
                     // TODO: Use box syntax when it gets stabilized.
-                    match expr.clone() {
-                        Expr::Rep(expr) => match *expr.clone() {
-                            Expr::Seq(lhs, rhs) => match (*lhs, *rhs) {
-                                (Expr::NegPred(expr), Expr::Ident(ident)) => {
-                                    if ident == "ANY" {
-                                        if let Some(expr) = populate_choices(*expr, vec![]) {
-                                            return expr;
-                                        }
+                    if let Expr::Rep(expr) = expr.clone() {
+                        if let Expr::Seq(lhs, rhs) = *expr.clone() {
+                            if let (Expr::NegPred(expr), Expr::Ident(ident)) = (*lhs, *rhs) {
+                                if ident == "ANY" {
+                                    if let Some(expr) = populate_choices(*expr, vec![]) {
+                                        return expr;
                                     }
                                 }
-                                _ => ()
-                            },
-                            _ => ()
-                        },
-                        _ => ()
+                            }
+                        }
                     };
 
                     expr
                 })
             } else {
                 expr
-            }
-        }
+            },
+        },
     }
 }
diff --git a/rustc_deps/vendor/pest_meta/src/optimizer/unroller.rs b/rustc_deps/vendor/pest_meta/src/optimizer/unroller.rs
index 2af052c..fff1733 100644
--- a/rustc_deps/vendor/pest_meta/src/optimizer/unroller.rs
+++ b/rustc_deps/vendor/pest_meta/src/optimizer/unroller.rs
@@ -21,7 +21,7 @@
                     .rev()
                     .fold(None, |rep, expr| match rep {
                         None => Some(expr),
-                        Some(rep) => Some(Expr::Seq(Box::new(expr), Box::new(rep)))
+                        Some(rep) => Some(Expr::Seq(Box::new(expr), Box::new(rep))),
                     })
                     .unwrap(),
                 Expr::RepMin(expr, min) => (1..min + 2)
@@ -35,7 +35,7 @@
                     .rev()
                     .fold(None, |rep, expr| match rep {
                         None => Some(expr),
-                        Some(rep) => Some(Expr::Seq(Box::new(expr), Box::new(rep)))
+                        Some(rep) => Some(Expr::Seq(Box::new(expr), Box::new(rep))),
                     })
                     .unwrap(),
                 Expr::RepMax(expr, max) => (1..max + 1)
@@ -43,7 +43,7 @@
                     .rev()
                     .fold(None, |rep, expr| match rep {
                         None => Some(expr),
-                        Some(rep) => Some(Expr::Seq(Box::new(expr), Box::new(rep)))
+                        Some(rep) => Some(Expr::Seq(Box::new(expr), Box::new(rep))),
                     })
                     .unwrap(),
                 Expr::RepMinMax(expr, min, max) => (1..max + 1)
@@ -57,11 +57,11 @@
                     .rev()
                     .fold(None, |rep, expr| match rep {
                         None => Some(expr),
-                        Some(rep) => Some(Expr::Seq(Box::new(expr), Box::new(rep)))
+                        Some(rep) => Some(Expr::Seq(Box::new(expr), Box::new(rep))),
                     })
                     .unwrap(),
-                expr => expr
-            })
-        }
+                expr => expr,
+            }),
+        },
     }
 }
diff --git a/rustc_deps/vendor/pest_meta/src/parser.rs b/rustc_deps/vendor/pest_meta/src/parser.rs
index 841b171..5726079 100644
--- a/rustc_deps/vendor/pest_meta/src/parser.rs
+++ b/rustc_deps/vendor/pest_meta/src/parser.rs
@@ -10,17 +10,17 @@
 use std::char;
 use std::iter::Peekable;
 
+use pest::error::{Error, ErrorVariant};
 use pest::iterators::{Pair, Pairs};
 use pest::prec_climber::{Assoc, Operator, PrecClimber};
-use pest::{Span, Parser};
-use pest::error::{Error, ErrorVariant};
+use pest::{Parser, Span};
 
 use ast::{Expr, Rule as AstRule, RuleType};
 use validator;
 
 include!("grammar.rs");
 
-pub fn parse<'i>(rule: Rule, data: &'i str) -> Result<Pairs<Rule>, Error<Rule>> {
+pub fn parse(rule: Rule, data: &str) -> Result<Pairs<Rule>, Error<Rule>> {
     PestParser::parse(rule, data)
 }
 
@@ -29,23 +29,23 @@
     pub name: String,
     pub span: Span<'i>,
     pub ty: RuleType,
-    pub node: ParserNode<'i>
+    pub node: ParserNode<'i>,
 }
 
 #[derive(Clone, Debug, Eq, PartialEq)]
 pub struct ParserNode<'i> {
     pub expr: ParserExpr<'i>,
-    pub span: Span<'i>
+    pub span: Span<'i>,
 }
 
 impl<'i> ParserNode<'i> {
     pub fn filter_map_top_down<F, T>(self, mut f: F) -> Vec<T>
     where
-        F: FnMut(ParserNode<'i>) -> Option<T>
+        F: FnMut(ParserNode<'i>) -> Option<T>,
     {
         pub fn filter_internal<'i, F, T>(node: ParserNode<'i>, f: &mut F, result: &mut Vec<T>)
         where
-            F: FnMut(ParserNode<'i>) -> Option<T>
+            F: FnMut(ParserNode<'i>) -> Option<T>,
         {
             if let Some(value) = f(node.clone()) {
                 result.push(value);
@@ -91,7 +91,7 @@
                 ParserExpr::Push(node) => {
                     filter_internal(*node, f, result);
                 }
-                _ => ()
+                _ => (),
             }
         }
 
@@ -109,6 +109,7 @@
     Insens(String),
     Range(String, String),
     Ident(String),
+    PeekSlice(i32, Option<i32>),
     PosPred(Box<ParserNode<'i>>),
     NegPred(Box<ParserNode<'i>>),
     Seq(Box<ParserNode<'i>>, Box<ParserNode<'i>>),
@@ -120,10 +121,10 @@
     RepMin(Box<ParserNode<'i>>, u32),
     RepMax(Box<ParserNode<'i>>, u32),
     RepMinMax(Box<ParserNode<'i>>, u32, u32),
-    Push(Box<ParserNode<'i>>)
+    Push(Box<ParserNode<'i>>),
 }
 
-fn convert_rule<'i>(rule: ParserRule<'i>) -> AstRule {
+fn convert_rule(rule: ParserRule) -> AstRule {
     match rule {
         ParserRule { name, ty, node, .. } => {
             let expr = convert_node(node);
@@ -133,21 +134,22 @@
     }
 }
 
-fn convert_node<'i>(node: ParserNode<'i>) -> Expr {
+fn convert_node(node: ParserNode) -> Expr {
     match node.expr {
         ParserExpr::Str(string) => Expr::Str(string),
         ParserExpr::Insens(string) => Expr::Insens(string),
         ParserExpr::Range(start, end) => Expr::Range(start, end),
         ParserExpr::Ident(ident) => Expr::Ident(ident),
+        ParserExpr::PeekSlice(start, end) => Expr::PeekSlice(start, end),
         ParserExpr::PosPred(node) => Expr::PosPred(Box::new(convert_node(*node))),
         ParserExpr::NegPred(node) => Expr::NegPred(Box::new(convert_node(*node))),
         ParserExpr::Seq(node1, node2) => Expr::Seq(
             Box::new(convert_node(*node1)),
-            Box::new(convert_node(*node2))
+            Box::new(convert_node(*node2)),
         ),
         ParserExpr::Choice(node1, node2) => Expr::Choice(
             Box::new(convert_node(*node1)),
-            Box::new(convert_node(*node2))
+            Box::new(convert_node(*node2)),
         ),
         ParserExpr::Opt(node) => Expr::Opt(Box::new(convert_node(*node))),
         ParserExpr::Rep(node) => Expr::Rep(Box::new(convert_node(*node))),
@@ -158,22 +160,22 @@
         ParserExpr::RepMinMax(node, min, max) => {
             Expr::RepMinMax(Box::new(convert_node(*node)), min, max)
         }
-        ParserExpr::Push(node) => Expr::Push(Box::new(convert_node(*node)))
+        ParserExpr::Push(node) => Expr::Push(Box::new(convert_node(*node))),
     }
 }
 
-pub fn consume_rules<'i>(pairs: Pairs<'i, Rule>) -> Result<Vec<AstRule>, Vec<Error<Rule>>> {
+pub fn consume_rules(pairs: Pairs<Rule>) -> Result<Vec<AstRule>, Vec<Error<Rule>>> {
     let rules = consume_rules_with_spans(pairs)?;
     let errors = validator::validate_ast(&rules);
-    if errors.len() == 0 {
-        Ok(rules.into_iter().map(|rule| convert_rule(rule)).collect())
+    if errors.is_empty() {
+        Ok(rules.into_iter().map(convert_rule).collect())
     } else {
         Err(errors)
     }
 }
 
 fn consume_rules_with_spans<'i>(
-    pairs: Pairs<'i, Rule>
+    pairs: Pairs<'i, Rule>,
 ) -> Result<Vec<ParserRule<'i>>, Vec<Error<Rule>>> {
     let climber = PrecClimber::new(vec![
         Operator::new(Rule::choice_operator, Assoc::Left),
@@ -196,7 +198,7 @@
                     Rule::atomic_modifier => RuleType::Atomic,
                     Rule::compound_atomic_modifier => RuleType::CompoundAtomic,
                     Rule::non_atomic_modifier => RuleType::NonAtomic,
-                    _ => unreachable!()
+                    _ => unreachable!(),
                 }
             } else {
                 RuleType::Normal
@@ -210,7 +212,7 @@
                 name,
                 span,
                 ty,
-                node
+                node,
             })
         })
         .collect()
@@ -218,11 +220,11 @@
 
 fn consume_expr<'i>(
     pairs: Peekable<Pairs<'i, Rule>>,
-    climber: &PrecClimber<Rule>
+    climber: &PrecClimber<Rule>,
 ) -> Result<ParserNode<'i>, Vec<Error<Rule>>> {
     fn unaries<'i>(
         mut pairs: Peekable<Pairs<'i, Rule>>,
-        climber: &PrecClimber<Rule>
+        climber: &PrecClimber<Rule>,
     ) -> Result<ParserNode<'i>, Vec<Error<Rule>>> {
         let pair = pairs.next().unwrap();
 
@@ -233,7 +235,7 @@
 
                 ParserNode {
                     expr: node.expr,
-                    span: pair.into_span().start_pos().span(&end)
+                    span: pair.into_span().start_pos().span(&end),
                 }
             }
             Rule::positive_predicate_operator => {
@@ -242,7 +244,7 @@
 
                 ParserNode {
                     expr: ParserExpr::PosPred(Box::new(node)),
-                    span: pair.into_span().start_pos().span(&end)
+                    span: pair.into_span().start_pos().span(&end),
                 }
             }
             Rule::negative_predicate_operator => {
@@ -251,7 +253,7 @@
 
                 ParserNode {
                     expr: ParserExpr::NegPred(Box::new(node)),
-                    span: pair.into_span().start_pos().span(&end)
+                    span: pair.into_span().start_pos().span(&end),
                 }
             }
             other_rule => {
@@ -268,25 +270,51 @@
 
                         ParserNode {
                             expr: ParserExpr::Push(Box::new(node)),
-                            span: start.span(&end)
+                            span: start.span(&end),
+                        }
+                    }
+                    Rule::peek_slice => {
+                        let mut pairs = pair.clone().into_inner();
+                        pairs.next().unwrap(); // opening_brack
+                        let pair_start = pairs.next().unwrap(); // .. or integer
+                        let start: i32 = match pair_start.as_rule() {
+                            Rule::range_operator => 0,
+                            Rule::integer => {
+                                pairs.next().unwrap(); // ..
+                                pair_start.as_str().parse().unwrap()
+                            }
+                            _ => unreachable!(),
+                        };
+                        let pair_end = pairs.next().unwrap(); // integer or }
+                        let end: Option<i32> = match pair_end.as_rule() {
+                            Rule::closing_brack => None,
+                            Rule::integer => {
+                                pairs.next().unwrap(); // }
+                                Some(pair_end.as_str().parse().unwrap())
+                            }
+                            _ => unreachable!(),
+                        };
+                        ParserNode {
+                            expr: ParserExpr::PeekSlice(start, end),
+                            span: pair.into_span(),
                         }
                     }
                     Rule::identifier => ParserNode {
                         expr: ParserExpr::Ident(pair.as_str().to_owned()),
-                        span: pair.clone().into_span()
+                        span: pair.clone().into_span(),
                     },
                     Rule::string => {
                         let string = unescape(pair.as_str()).expect("incorrect string literal");
                         ParserNode {
                             expr: ParserExpr::Str(string[1..string.len() - 1].to_owned()),
-                            span: pair.clone().into_span()
+                            span: pair.clone().into_span(),
                         }
                     }
                     Rule::insensitive_string => {
                         let string = unescape(pair.as_str()).expect("incorrect string literal");
                         ParserNode {
                             expr: ParserExpr::Insens(string[2..string.len() - 1].to_owned()),
-                            span: pair.clone().into_span()
+                            span: pair.clone().into_span(),
                         }
                     }
                     Rule::range => {
@@ -302,12 +330,12 @@
                         ParserNode {
                             expr: ParserExpr::Range(
                                 start[1..start.len() - 1].to_owned(),
-                                end[1..end.len() - 1].to_owned()
+                                end[1..end.len() - 1].to_owned(),
                             ),
-                            span: start_pos.span(&end_pos)
+                            span: start_pos.span(&end_pos),
                         }
                     }
-                    _ => unreachable!()
+                    _ => unreachable!(),
                 };
 
                 pairs.fold(
@@ -320,21 +348,21 @@
                                 let start = node.span.start_pos();
                                 ParserNode {
                                     expr: ParserExpr::Opt(Box::new(node)),
-                                    span: start.span(&pair.into_span().end_pos())
+                                    span: start.span(&pair.into_span().end_pos()),
                                 }
                             }
                             Rule::repeat_operator => {
                                 let start = node.span.start_pos();
                                 ParserNode {
                                     expr: ParserExpr::Rep(Box::new(node)),
-                                    span: start.span(&pair.into_span().end_pos())
+                                    span: start.span(&pair.into_span().end_pos()),
                                 }
                             }
                             Rule::repeat_once_operator => {
                                 let start = node.span.start_pos();
                                 ParserNode {
                                     expr: ParserExpr::RepOnce(Box::new(node)),
-                                    span: start.span(&pair.into_span().end_pos())
+                                    span: start.span(&pair.into_span().end_pos()),
                                 }
                             }
                             Rule::repeat_exact => {
@@ -348,18 +376,18 @@
                                 } else {
                                     return Err(vec![Error::new_from_span(
                                         ErrorVariant::CustomError {
-                                            message: "number cannot overflow u32".to_owned()
+                                            message: "number cannot overflow u32".to_owned(),
                                         },
-                                        number.into_span()
+                                        number.into_span(),
                                     )]);
                                 };
 
                                 if num == 0 {
                                     let error: Error<Rule> = Error::new_from_span(
                                         ErrorVariant::CustomError {
-                                            message: "cannot repeat 0 times".to_owned()
+                                            message: "cannot repeat 0 times".to_owned(),
                                         },
-                                        number.into_span()
+                                        number.into_span(),
                                     );
 
                                     return Err(vec![error]);
@@ -368,7 +396,7 @@
                                 let start = node.span.start_pos();
                                 ParserNode {
                                     expr: ParserExpr::RepExact(Box::new(node), num),
-                                    span: start.span(&pair.into_span().end_pos())
+                                    span: start.span(&pair.into_span().end_pos()),
                                 }
                             }
                             Rule::repeat_min => {
@@ -382,16 +410,16 @@
                                 } else {
                                     return Err(vec![Error::new_from_span(
                                         ErrorVariant::CustomError {
-                                            message: "number cannot overflow u32".to_owned()
+                                            message: "number cannot overflow u32".to_owned(),
                                         },
-                                        min_number.into_span()
+                                        min_number.into_span(),
                                     )]);
                                 };
 
                                 let start = node.span.start_pos();
                                 ParserNode {
                                     expr: ParserExpr::RepMin(Box::new(node), min),
-                                    span: start.span(&pair.into_span().end_pos())
+                                    span: start.span(&pair.into_span().end_pos()),
                                 }
                             }
                             Rule::repeat_max => {
@@ -406,18 +434,18 @@
                                 } else {
                                     return Err(vec![Error::new_from_span(
                                         ErrorVariant::CustomError {
-                                            message: "number cannot overflow u32".to_owned()
+                                            message: "number cannot overflow u32".to_owned(),
                                         },
-                                        max_number.into_span()
+                                        max_number.into_span(),
                                     )]);
                                 };
 
                                 if max == 0 {
                                     let error: Error<Rule> = Error::new_from_span(
                                         ErrorVariant::CustomError {
-                                            message: "cannot repeat 0 times".to_owned()
+                                            message: "cannot repeat 0 times".to_owned(),
                                         },
-                                        max_number.into_span()
+                                        max_number.into_span(),
                                     );
 
                                     return Err(vec![error]);
@@ -426,7 +454,7 @@
                                 let start = node.span.start_pos();
                                 ParserNode {
                                     expr: ParserExpr::RepMax(Box::new(node), max),
-                                    span: start.span(&pair.into_span().end_pos())
+                                    span: start.span(&pair.into_span().end_pos()),
                                 }
                             }
                             Rule::repeat_min_max => {
@@ -440,9 +468,9 @@
                                 } else {
                                     return Err(vec![Error::new_from_span(
                                         ErrorVariant::CustomError {
-                                            message: "number cannot overflow u32".to_owned()
+                                            message: "number cannot overflow u32".to_owned(),
                                         },
-                                        min_number.into_span()
+                                        min_number.into_span(),
                                     )]);
                                 };
 
@@ -454,18 +482,18 @@
                                 } else {
                                     return Err(vec![Error::new_from_span(
                                         ErrorVariant::CustomError {
-                                            message: "number cannot overflow u32".to_owned()
+                                            message: "number cannot overflow u32".to_owned(),
                                         },
-                                        max_number.into_span()
+                                        max_number.into_span(),
                                     )]);
                                 };
 
                                 if max == 0 {
                                     let error: Error<Rule> = Error::new_from_span(
                                         ErrorVariant::CustomError {
-                                            message: "cannot repeat 0 times".to_owned()
+                                            message: "cannot repeat 0 times".to_owned(),
                                         },
-                                        max_number.into_span()
+                                        max_number.into_span(),
                                     );
 
                                     return Err(vec![error]);
@@ -474,7 +502,7 @@
                                 let start = node.span.start_pos();
                                 ParserNode {
                                     expr: ParserExpr::RepMinMax(Box::new(node), min, max),
-                                    span: start.span(&pair.into_span().end_pos())
+                                    span: start.span(&pair.into_span().end_pos()),
                                 }
                             }
                             Rule::closing_paren => {
@@ -482,14 +510,14 @@
 
                                 ParserNode {
                                     expr: node.expr,
-                                    span: start.span(&pair.into_span().end_pos())
+                                    span: start.span(&pair.into_span().end_pos()),
                                 }
                             }
-                            _ => unreachable!()
+                            _ => unreachable!(),
                         };
 
                         Ok(node)
-                    }
+                    },
                 )?
             }
         };
@@ -500,8 +528,7 @@
     let term = |pair: Pair<'i, Rule>| unaries(pair.into_inner().peekable(), climber);
     let infix = |lhs: Result<ParserNode<'i>, Vec<Error<Rule>>>,
                  op: Pair<'i, Rule>,
-                 rhs: Result<ParserNode<'i>, Vec<Error<Rule>>>| match op.as_rule(
-    ) {
+                 rhs: Result<ParserNode<'i>, Vec<Error<Rule>>>| match op.as_rule() {
         Rule::sequence_operator => {
             let lhs = lhs?;
             let rhs = rhs?;
@@ -511,7 +538,7 @@
 
             Ok(ParserNode {
                 expr: ParserExpr::Seq(Box::new(lhs), Box::new(rhs)),
-                span: start.span(&end)
+                span: start.span(&end),
             })
         }
         Rule::choice_operator => {
@@ -523,10 +550,10 @@
 
             Ok(ParserNode {
                 expr: ParserExpr::Choice(Box::new(lhs), Box::new(rhs)),
-                span: start.span(&end)
+                span: start.span(&end),
             })
         }
-        _ => unreachable!()
+        _ => unreachable!(),
     };
 
     climber.climb(pairs, term, infix)
@@ -580,18 +607,18 @@
 
                     result.push(char::from_u32(value)?);
                 }
-                _ => return None
+                _ => return None,
             },
             Some(c) => result.push(c),
-            None => return Some(result)
+            None => return Some(result),
         };
     }
 }
 
 #[cfg(test)]
 mod tests {
-    use super::*;
     use super::super::unwrap_or_report;
+    use super::*;
 
     #[test]
     fn rules() {
@@ -801,6 +828,74 @@
     }
 
     #[test]
+    fn peek_slice_all() {
+        parses_to! {
+            parser: PestParser,
+            input: "PEEK[..]",
+            rule: Rule::peek_slice,
+            tokens: [
+                peek_slice(0, 8, [
+                    opening_brack(4, 5),
+                    range_operator(5, 7),
+                    closing_brack(7, 8)
+                ])
+            ]
+        };
+    }
+
+    #[test]
+    fn peek_slice_start() {
+        parses_to! {
+            parser: PestParser,
+            input: "PEEK[1..]",
+            rule: Rule::peek_slice,
+            tokens: [
+                peek_slice(0, 9, [
+                    opening_brack(4, 5),
+                    integer(5, 6),
+                    range_operator(6, 8),
+                    closing_brack(8, 9)
+                ])
+            ]
+        };
+    }
+
+    #[test]
+    fn peek_slice_end() {
+        parses_to! {
+            parser: PestParser,
+            input: "PEEK[ ..-1]",
+            rule: Rule::peek_slice,
+            tokens: [
+                peek_slice(0, 11, [
+                    opening_brack(4, 5),
+                    range_operator(6, 8),
+                    integer(8, 10),
+                    closing_brack(10, 11)
+                ])
+            ]
+        };
+    }
+
+    #[test]
+    fn peek_slice_start_end() {
+        parses_to! {
+            parser: PestParser,
+            input: "PEEK[-5..10]",
+            rule: Rule::peek_slice,
+            tokens: [
+                peek_slice(0, 12, [
+                    opening_brack(4, 5),
+                    integer(5, 7),
+                    range_operator(7, 9),
+                    integer(9, 11),
+                    closing_brack(11, 12)
+                ])
+            ]
+        };
+    }
+
+    #[test]
     fn identifier() {
         parses_to! {
             parser: PestParser,
@@ -978,7 +1073,7 @@
             parser: PestParser,
             input: "a = {}",
             rule: Rule::grammar_rules,
-            positives: vec![Rule::expression],
+            positives: vec![Rule::term],
             negatives: vec![],
             pos: 5
         };
@@ -990,16 +1085,7 @@
             parser: PestParser,
             input: "a = { b ~ }",
             rule: Rule::grammar_rules,
-            positives: vec![
-                Rule::opening_paren,
-                Rule::positive_predicate_operator,
-                Rule::negative_predicate_operator,
-                Rule::_push,
-                Rule::identifier,
-                Rule::insensitive_string,
-                Rule::quote,
-                Rule::single_quote
-            ],
+            positives: vec![Rule::term],
             negatives: vec![],
             pos: 10
         };
@@ -1056,6 +1142,7 @@
                 Rule::positive_predicate_operator,
                 Rule::negative_predicate_operator,
                 Rule::_push,
+                Rule::peek_slice,
                 Rule::identifier,
                 Rule::insensitive_string,
                 Rule::quote,
@@ -1084,7 +1171,7 @@
             parser: PestParser,
             input: "a = { ^ }",
             rule: Rule::grammar_rules,
-            positives: vec![Rule::string],
+            positives: vec![Rule::quote],
             negatives: vec![],
             pos: 8
         };
@@ -1145,36 +1232,52 @@
 
         assert_eq!(
             ast,
-            vec![
-                AstRule {
-                    name: "rule".to_owned(),
-                    ty: RuleType::Silent,
-                    expr: Expr::Choice(
+            vec![AstRule {
+                name: "rule".to_owned(),
+                ty: RuleType::Silent,
+                expr: Expr::Choice(
+                    Box::new(Expr::Seq(
                         Box::new(Expr::Seq(
                             Box::new(Expr::Seq(
-                                Box::new(Expr::Seq(
-                                    Box::new(Expr::RepExact(
-                                        Box::new(Expr::Ident("a".to_owned())),
-                                        1
-                                    )),
-                                    Box::new(Expr::RepMin(Box::new(Expr::Str("a".to_owned())), 3))
-                                )),
-                                Box::new(Expr::RepMax(Box::new(Expr::Ident("b".to_owned())), 2))
+                                Box::new(Expr::RepExact(Box::new(Expr::Ident("a".to_owned())), 1)),
+                                Box::new(Expr::RepMin(Box::new(Expr::Str("a".to_owned())), 3))
                             )),
-                            Box::new(Expr::RepMinMax(Box::new(Expr::Str("b".to_owned())), 1, 2))
+                            Box::new(Expr::RepMax(Box::new(Expr::Ident("b".to_owned())), 2))
                         )),
-                        Box::new(Expr::NegPred(Box::new(Expr::Rep(Box::new(Expr::Opt(
-                            Box::new(Expr::Choice(
-                                Box::new(Expr::Insens("c".to_owned())),
-                                Box::new(Expr::Push(Box::new(Expr::Range(
-                                    "d".to_owned(),
-                                    "e".to_owned()
-                                ))))
-                            ))
-                        ))))))
-                    )
-                },
-            ]
+                        Box::new(Expr::RepMinMax(Box::new(Expr::Str("b".to_owned())), 1, 2))
+                    )),
+                    Box::new(Expr::NegPred(Box::new(Expr::Rep(Box::new(Expr::Opt(
+                        Box::new(Expr::Choice(
+                            Box::new(Expr::Insens("c".to_owned())),
+                            Box::new(Expr::Push(Box::new(Expr::Range(
+                                "d".to_owned(),
+                                "e".to_owned()
+                            ))))
+                        ))
+                    ))))))
+                )
+            },]
+        );
+    }
+
+    #[test]
+    fn ast_peek_slice() {
+        let input = "rule = _{ PEEK[-04..] ~ PEEK[..3] }";
+
+        let pairs = PestParser::parse(Rule::grammar_rules, input).unwrap();
+        let ast = consume_rules_with_spans(pairs).unwrap();
+        let ast: Vec<_> = ast.into_iter().map(|rule| convert_rule(rule)).collect();
+
+        assert_eq!(
+            ast,
+            vec![AstRule {
+                name: "rule".to_owned(),
+                ty: RuleType::Silent,
+                expr: Expr::Seq(
+                    Box::new(Expr::PeekSlice(-4, None)),
+                    Box::new(Expr::PeekSlice(0, Some(3))),
+                )
+            }],
         );
     }
 
@@ -1290,7 +1393,6 @@
         unwrap_or_report(consume_rules_with_spans(pairs));
     }
 
-
     #[test]
     fn unescape_all() {
         let string = r"a\nb\x55c\u{111}d";
diff --git a/rustc_deps/vendor/pest_meta/src/validator.rs b/rustc_deps/vendor/pest_meta/src/validator.rs
index af13902..403ff43 100644
--- a/rustc_deps/vendor/pest_meta/src/validator.rs
+++ b/rustc_deps/vendor/pest_meta/src/validator.rs
@@ -16,6 +16,7 @@
 use parser::{ParserExpr, ParserNode, ParserRule, Rule};
 use UNICODE_PROPERTY_NAMES;
 
+#[allow(clippy::needless_pass_by_value)]
 pub fn validate_pairs<'i>(pairs: Pairs<'i, Rule>) -> Result<Vec<&'i str>, Vec<Error<Rule>>> {
     let mut rust_keywords = HashSet::new();
     rust_keywords.insert("abstract");
@@ -129,7 +130,7 @@
     errors.extend(validate_already_defined(&definitions));
     errors.extend(validate_undefined(&definitions, &called_rules, &builtins));
 
-    if errors.len() > 0 {
+    if !errors.is_empty() {
         return Err(errors);
     }
 
@@ -138,12 +139,13 @@
 
     let defaults = called_rules.difference(&definitions);
 
-    Ok(defaults.into_iter().map(|string| *string).collect())
+    Ok(defaults.cloned().collect())
 }
 
+#[allow(clippy::implicit_hasher, clippy::ptr_arg)]
 pub fn validate_rust_keywords<'i>(
     definitions: &Vec<Span<'i>>,
-    rust_keywords: &HashSet<&str>
+    rust_keywords: &HashSet<&str>,
 ) -> Vec<Error<Rule>> {
     let mut errors = vec![];
 
@@ -153,9 +155,9 @@
         if rust_keywords.contains(name) {
             errors.push(Error::new_from_span(
                 ErrorVariant::CustomError {
-                    message: format!("{} is a rust keyword", name)
+                    message: format!("{} is a rust keyword", name),
                 },
-                definition.clone()
+                definition.clone(),
             ))
         }
     }
@@ -163,9 +165,10 @@
     errors
 }
 
+#[allow(clippy::implicit_hasher, clippy::ptr_arg)]
 pub fn validate_pest_keywords<'i>(
     definitions: &Vec<Span<'i>>,
-    pest_keywords: &HashSet<&str>
+    pest_keywords: &HashSet<&str>,
 ) -> Vec<Error<Rule>> {
     let mut errors = vec![];
 
@@ -175,9 +178,9 @@
         if pest_keywords.contains(name) {
             errors.push(Error::new_from_span(
                 ErrorVariant::CustomError {
-                    message: format!("{} is a pest keyword", name)
+                    message: format!("{} is a pest keyword", name),
                 },
-                definition.clone()
+                definition.clone(),
             ))
         }
     }
@@ -185,6 +188,7 @@
     errors
 }
 
+#[allow(clippy::ptr_arg)]
 pub fn validate_already_defined<'i>(definitions: &Vec<Span<'i>>) -> Vec<Error<Rule>> {
     let mut errors = vec![];
     let mut defined = HashSet::new();
@@ -195,9 +199,9 @@
         if defined.contains(&name) {
             errors.push(Error::new_from_span(
                 ErrorVariant::CustomError {
-                    message: format!("rule {} already defined", name)
+                    message: format!("rule {} already defined", name),
                 },
-                definition.clone()
+                definition.clone(),
             ))
         } else {
             defined.insert(name);
@@ -207,10 +211,11 @@
     errors
 }
 
+#[allow(clippy::implicit_hasher, clippy::ptr_arg)]
 pub fn validate_undefined<'i>(
     definitions: &Vec<Span<'i>>,
     called_rules: &Vec<Span<'i>>,
-    builtins: &HashSet<&str>
+    builtins: &HashSet<&str>,
 ) -> Vec<Error<Rule>> {
     let mut errors = vec![];
     let definitions: HashSet<_> = definitions.iter().map(|span| span.as_str()).collect();
@@ -221,9 +226,9 @@
         if !definitions.contains(name) && !builtins.contains(name) {
             errors.push(Error::new_from_span(
                 ErrorVariant::CustomError {
-                    message: format!("rule {} is undefined", name)
+                    message: format!("rule {} is undefined", name),
                 },
-                rule.clone()
+                rule.clone(),
             ))
         }
     }
@@ -231,6 +236,7 @@
     errors
 }
 
+#[allow(clippy::ptr_arg)]
 pub fn validate_ast<'a, 'i: 'a>(rules: &'a Vec<ParserRule<'i>>) -> Vec<Error<Rule>> {
     let mut errors = vec![];
 
@@ -239,9 +245,9 @@
     errors.extend(validate_whitespace_comment(rules));
     errors.extend(validate_left_recursion(rules));
 
-    errors.sort_by_key(|error| match &error.location {
-        InputLocation::Span(span) => span.clone(),
-        _ => unreachable!()
+    errors.sort_by_key(|error| match error.location {
+        InputLocation::Span(span) => span,
+        _ => unreachable!(),
     });
 
     errors
@@ -250,7 +256,7 @@
 fn is_non_progressing<'i>(
     expr: &ParserExpr<'i>,
     rules: &HashMap<String, &ParserNode<'i>>,
-    trace: &mut Vec<String>
+    trace: &mut Vec<String>,
 ) -> bool {
     match *expr {
         ParserExpr::Str(ref string) => string == "",
@@ -281,14 +287,14 @@
             is_non_progressing(&lhs.expr, rules, trace)
                 || is_non_progressing(&rhs.expr, rules, trace)
         }
-        _ => false
+        _ => false,
     }
 }
 
 fn is_non_failing<'i>(
     expr: &ParserExpr<'i>,
     rules: &HashMap<String, &ParserNode<'i>>,
-    trace: &mut Vec<String>
+    trace: &mut Vec<String>,
 ) -> bool {
     match *expr {
         ParserExpr::Str(ref string) => string == "",
@@ -313,15 +319,15 @@
         ParserExpr::Choice(ref lhs, ref rhs) => {
             is_non_failing(&lhs.expr, rules, trace) || is_non_failing(&rhs.expr, rules, trace)
         }
-        _ => false
+        _ => false,
     }
 }
 
-fn validate_repetition<'a, 'i: 'a>(rules: &'a Vec<ParserRule<'i>>) -> Vec<Error<Rule>> {
+fn validate_repetition<'a, 'i: 'a>(rules: &'a [ParserRule<'i>]) -> Vec<Error<Rule>> {
     let mut result = vec![];
     let map = to_hash_map(rules);
 
-    for rule in rules.into_iter() {
+    for rule in rules {
         let mut errors = rule.node
             .clone()
             .filter_map_top_down(|node| match node.expr {
@@ -361,11 +367,11 @@
     result
 }
 
-fn validate_choices<'a, 'i: 'a>(rules: &'a Vec<ParserRule<'i>>) -> Vec<Error<Rule>> {
+fn validate_choices<'a, 'i: 'a>(rules: &'a [ParserRule<'i>]) -> Vec<Error<Rule>> {
     let mut result = vec![];
     let map = to_hash_map(rules);
 
-    for rule in rules.into_iter() {
+    for rule in rules {
         let mut errors = rule
             .node
             .clone()
@@ -373,7 +379,7 @@
                 ParserExpr::Choice(ref lhs, _) => {
                     let node = match lhs.expr {
                         ParserExpr::Choice(_, ref rhs) => rhs,
-                        _ => lhs
+                        _ => lhs,
                     };
 
                     if is_non_failing(&node.expr, &map, &mut vec![]) {
@@ -381,15 +387,15 @@
                             ErrorVariant::CustomError {
                                 message:
                                     "expression cannot fail; following choices cannot be reached"
-                                        .to_owned()
+                                        .to_owned(),
                             },
-                            node.span.clone()
+                            node.span.clone(),
                         ))
                     } else {
                         None
                     }
                 }
-                _ => None
+                _ => None,
             });
 
         result.append(&mut errors);
@@ -398,11 +404,11 @@
     result
 }
 
-fn validate_whitespace_comment<'a, 'i: 'a>(rules: &'a Vec<ParserRule<'i>>) -> Vec<Error<Rule>> {
+fn validate_whitespace_comment<'a, 'i: 'a>(rules: &'a [ParserRule<'i>]) -> Vec<Error<Rule>> {
     let map = to_hash_map(rules);
 
     rules
-        .into_iter()
+        .iter()
         .filter_map(|rule| {
             if rule.name == "whitespace" || rule.name == "comment" {
                 if is_non_failing(&rule.node.expr, &map, &mut vec![]) {
@@ -411,9 +417,9 @@
                             message: format!(
                                 "{} cannot fail and will repeat infinitely",
                                 &rule.name
-                            )
+                            ),
                         },
-                        rule.node.span.clone()
+                        rule.node.span.clone(),
                     ))
                 } else if is_non_progressing(&rule.node.expr, &map, &mut vec![]) {
                     Some(Error::new_from_span(
@@ -421,9 +427,9 @@
                             message: format!(
                                 "{} is non-progressing and will repeat infinitely",
                                 &rule.name
-                            )
+                            ),
                         },
-                        rule.node.span.clone()
+                        rule.node.span.clone(),
                     ))
                 } else {
                     None
@@ -435,19 +441,20 @@
         .collect()
 }
 
-fn validate_left_recursion<'a, 'i: 'a>(rules: &'a Vec<ParserRule<'i>>) -> Vec<Error<Rule>> {
+fn validate_left_recursion<'a, 'i: 'a>(rules: &'a [ParserRule<'i>]) -> Vec<Error<Rule>> {
     left_recursion(to_hash_map(rules))
 }
 
-fn to_hash_map<'a, 'i: 'a>(rules: &'a Vec<ParserRule<'i>>) -> HashMap<String, &'a ParserNode<'i>> {
+fn to_hash_map<'a, 'i: 'a>(rules: &'a [ParserRule<'i>]) -> HashMap<String, &'a ParserNode<'i>> {
     rules.iter().map(|r| (r.name.clone(), &r.node)).collect()
 }
 
+#[allow(clippy::needless_pass_by_value)]
 fn left_recursion<'a, 'i: 'a>(rules: HashMap<String, &'a ParserNode<'i>>) -> Vec<Error<Rule>> {
     fn check_expr<'a, 'i: 'a>(
         node: &'a ParserNode<'i>,
         rules: &'a HashMap<String, &ParserNode<'i>>,
-        trace: &mut Vec<String>
+        trace: &mut Vec<String>,
     ) -> Option<Error<Rule>> {
         match node.expr.clone() {
             ParserExpr::Ident(other) => {
@@ -492,7 +499,7 @@
                 }
             }
             ParserExpr::Choice(ref lhs, ref rhs) => {
-                check_expr(&lhs, rules, trace).or(check_expr(&rhs, rules, trace))
+                check_expr(&lhs, rules, trace).or_else(|| check_expr(&rhs, rules, trace))
             }
             ParserExpr::Rep(ref node) => check_expr(&node, rules, trace),
             ParserExpr::RepOnce(ref node) => check_expr(&node, rules, trace),
@@ -500,7 +507,7 @@
             ParserExpr::PosPred(ref node) => check_expr(&node, rules, trace),
             ParserExpr::NegPred(ref node) => check_expr(&node, rules, trace),
             ParserExpr::Push(ref node) => check_expr(&node, rules, trace),
-            _ => None
+            _ => None,
         }
     }
 
@@ -519,9 +526,9 @@
 
 #[cfg(test)]
 mod tests {
-    use super::*;
     use super::super::parser::{consume_rules, PestParser};
     use super::super::unwrap_or_report;
+    use super::*;
     use pest::Parser;
 
     #[test]
@@ -536,7 +543,7 @@
     fn rust_keyword() {
         let input = "let = { \"a\" }";
         unwrap_or_report(validate_pairs(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 
@@ -552,7 +559,7 @@
     fn pest_keyword() {
         let input = "ANY = { \"a\" }";
         unwrap_or_report(validate_pairs(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 
@@ -568,7 +575,7 @@
     fn already_defined() {
         let input = "a = { \"a\" } a = { \"a\" }";
         unwrap_or_report(validate_pairs(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 
@@ -584,7 +591,7 @@
     fn undefined() {
         let input = "a = { b }";
         unwrap_or_report(validate_pairs(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 
@@ -592,7 +599,7 @@
     fn valid_recursion() {
         let input = "a = { \"\" ~ \"a\"? ~ \"a\"* ~ (\"a\" | \"b\") ~ a }";
         unwrap_or_report(consume_rules(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 
@@ -608,7 +615,7 @@
     fn non_failing_whitespace() {
         let input = "whitespace = { \"\" }";
         unwrap_or_report(consume_rules(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 
@@ -624,7 +631,7 @@
     fn non_progressing_comment() {
         let input = "comment = { soi }";
         unwrap_or_report(consume_rules(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 
@@ -640,7 +647,7 @@
     fn non_failing_repetition() {
         let input = "a = { (\"\")* }";
         unwrap_or_report(consume_rules(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 
@@ -656,7 +663,7 @@
     fn indirect_non_failing_repetition() {
         let input = "a = { \"\" } b = { a* }";
         unwrap_or_report(consume_rules(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 
@@ -672,7 +679,7 @@
     fn deep_non_failing_repetition() {
         let input = "a = { \"a\" ~ (\"b\" ~ (\"\")*) }";
         unwrap_or_report(consume_rules(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 
@@ -688,7 +695,7 @@
     fn non_progressing_repetition() {
         let input = "a = { (\"\" ~ &\"a\" ~ !\"a\" ~ (soi | eoi))* }";
         unwrap_or_report(consume_rules(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 
@@ -704,7 +711,7 @@
     fn indirect_non_progressing_repetition() {
         let input = "a = { !\"a\" } b = { a* }";
         unwrap_or_report(consume_rules(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 
@@ -720,7 +727,7 @@
     fn simple_left_recursion() {
         let input = "a = { a }";
         unwrap_or_report(consume_rules(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 
@@ -743,7 +750,7 @@
     fn indirect_left_recursion() {
         let input = "a = { b } b = { a }";
         unwrap_or_report(consume_rules(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 
@@ -759,7 +766,7 @@
     fn non_failing_left_recursion() {
         let input = "a = { \"\" ~ \"a\"? ~ \"a\"* ~ (\"a\" | \"\") ~ a }";
         unwrap_or_report(consume_rules(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 
@@ -775,7 +782,7 @@
     fn non_primary_choice_left_recursion() {
         let input = "a = { \"a\" | a }";
         unwrap_or_report(consume_rules(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 
@@ -791,7 +798,7 @@
     fn lhs_non_failing_choice() {
         let input = "a = { \"a\"* | \"a\" | \"b\" }";
         unwrap_or_report(consume_rules(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 
@@ -807,7 +814,7 @@
     fn lhs_non_failing_choice_middle() {
         let input = "a = { \"a\" | \"a\"* | \"b\" }";
         unwrap_or_report(consume_rules(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 
@@ -830,7 +837,7 @@
     fn lhs_non_failing_nested_choices() {
         let input = "a = { b | \"a\" } b = { \"b\"* | \"c\" }";
         unwrap_or_report(consume_rules(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 
@@ -838,7 +845,7 @@
     fn skip_can_be_defined() {
         let input = "skip = { \"\" }";
         unwrap_or_report(consume_rules(
-            PestParser::parse(Rule::grammar_rules, input).unwrap()
+            PestParser::parse(Rule::grammar_rules, input).unwrap(),
         ));
     }
 }
diff --git a/rustc_deps/vendor/syn-0.14.9/.cargo-checksum.json b/rustc_deps/vendor/syn-0.14.9/.cargo-checksum.json
deleted file mode 100644
index 63c7195..0000000
--- a/rustc_deps/vendor/syn-0.14.9/.cargo-checksum.json
+++ /dev/null
@@ -1 +0,0 @@
-{"files":{"Cargo.toml":"dbbc708f123c27e504bb4d7a2511cc46f8c7d64ffab74eae60acdc13f728c979","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"f033c371492a4769d377a8bf1a139adc7bf94ea00595b867a3e234eeab994c8c","README.md":"43062927a3e318771d5c2218e5066147b10cdd51cdeb240b06352506cfb7a72c","src/attr.rs":"4796108202ed2c89a2144039a145f20037465dd3cb9a8f8c712709082b889a5a","src/buffer.rs":"f600f012eaa26d412a3389032e2a78d319f7f1bbfe33ebf2b9d762274aad796b","src/data.rs":"5fbed19b7984502c9451cf446fd86b6489b9f6c52015421bb78c07e4ba294f03","src/derive.rs":"42a52cd1dea50473f3160c6515139bcaaf57c99962517eb2ac57abb04cd03688","src/error.rs":"aa215e0a8a98d85b970a9003565746be9fcdeb24a594b322e591d0ce74ed310b","src/expr.rs":"d0be324be2e4a4ba0e44173c8c4e0188c9ffff474f8cea5fab76e957e4b1e6cf","src/file.rs":"b1ae5b48a1937c475d57de8630d0a99326021e22b107a428342cf8b824330fbc","src/gen/fold.rs":"18795bc0a9a766dd16e78b725bc48c6cef901d12dc3452a8fde0d4f89ce227a3","src/gen/visit.rs":"eb9ff30320305576d0dd9a29c917f8d80e498e9da5f0f6b9f9067c3d0940752f","src/gen/visit_mut.rs":"e9d5ccebfd8eb9f29df68fec8beb694435a91106e48e0256c44a09787b58377d","src/gen_helper.rs":"d128fbd24fadfc5634976bdb9188c649f9905718c9c987a2839c3e6134b155a2","src/generics.rs":"0daf222a841f1965a25703855bbe17401b3edb0eb512ee96840f32940801551b","src/item.rs":"25e471c7c95ae6f6671f0a3413b526906e61fb8d345c7b2413b4edb0528fea24","src/lib.rs":"c733f5ea29df3abaeffc6762650feaaa9fc7060ab01277cb9f226d0970e1407d","src/lifetime.rs":"383cd564a452540f043858c636f19ed5b316a8c2752d83e53aacb5730307a754","src/lit.rs":"7ccdd679878c324ad47b0c29e76eb8d9e5d9b00e53cd3767060563bf7048eee9","src/mac.rs":"07c5aab9af440087315d58edeea733dda80bef0240463bf4d324c1ba1e2a272c","src/macros.rs":"c0bb8312f7ec422cb2c86ad2c20a609eb150fdcc38c74712e68214a4d6161994","src/op.rs":"7f20c79ac0ccc8cb129b550a305d8c57e58fdc2dd241f5fc1d1634450c26c32b","src/parse_quote.rs":"a3fbe5ad2cbeb967fbb8677f925431baa5303445a128c5284254b453284f21ab","src/parsers.rs":"ce324ac0882771b7ba24b110e55fbc2874be19cb7eb93fef7abcfb848d9c3a74","src/path.rs":"102ca22184ecfbfb21f9d82b49d0bc2d0d06fb792895a025dee1a435f9efee6e","src/punctuated.rs":"047cd8095cce72ed0ddae097a556c567ed0ca3294b5eac29a661669fd32a6748","src/spanned.rs":"70e8c7d953e76b1ce5330c73d7c0103c02a9b56e8dd8299c621fe0c91c615e23","src/synom.rs":"98b7fc3899099c5a0a6e659090c56a278237f9471f69f746404474eb5222e41a","src/token.rs":"20b25a6d572225f7e718a13b6e753b138ab9b8d840d31b6bfce5ed764eaaff8c","src/tt.rs":"e0541709fc8038a58156ef590f4449969a752aa96d7722beb41c097a83db5d21","src/ty.rs":"6888b6001c6f5908571684e90c73f43302d9e3a4186971caa8bbd72220d6c611","src/verbatim.rs":"40476ed104ae4cd16517c220becfc008b8ff0a0727d3be6001374d993b3f4b25"},"package":"261ae9ecaa397c42b960649561949d69311f08eeaea86a65696e6e46517cf741"}
\ No newline at end of file
diff --git a/rustc_deps/vendor/syn-0.14.9/Cargo.toml b/rustc_deps/vendor/syn-0.14.9/Cargo.toml
deleted file mode 100644
index a63b481..0000000
--- a/rustc_deps/vendor/syn-0.14.9/Cargo.toml
+++ /dev/null
@@ -1,64 +0,0 @@
-# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
-#
-# When uploading crates to the registry Cargo will automatically
-# "normalize" Cargo.toml files for maximal compatibility
-# with all versions of Cargo and also rewrite `path` dependencies
-# to registry (e.g. crates.io) dependencies
-#
-# If you believe there's an error in this file please file an
-# issue against the rust-lang/cargo repository. If you're
-# editing this file be aware that the upstream Cargo.toml
-# will likely look very different (and much more reasonable)
-
-[package]
-name = "syn"
-version = "0.14.9"
-authors = ["David Tolnay <dtolnay@gmail.com>"]
-include = ["/Cargo.toml", "/src/**/*.rs", "/README.md", "/LICENSE-APACHE", "/LICENSE-MIT"]
-description = "Nom parser for Rust source code"
-documentation = "https://docs.rs/syn"
-readme = "README.md"
-categories = ["development-tools::procedural-macro-helpers"]
-license = "MIT/Apache-2.0"
-repository = "https://github.com/dtolnay/syn"
-[package.metadata.docs.rs]
-all-features = true
-
-[package.metadata.playground]
-all-features = true
-
-[[example]]
-name = "dump-syntax"
-path = "examples/dump-syntax/main.rs"
-required-features = ["full", "parsing", "extra-traits"]
-[dependencies.proc-macro2]
-version = "0.4.4"
-default-features = false
-
-[dependencies.quote]
-version = "0.6"
-optional = true
-default-features = false
-
-[dependencies.unicode-xid]
-version = "0.1"
-[dev-dependencies.rayon]
-version = "1.0"
-
-[dev-dependencies.walkdir]
-version = "2.1"
-
-[features]
-clone-impls = []
-default = ["derive", "parsing", "printing", "clone-impls", "proc-macro"]
-derive = []
-extra-traits = []
-fold = []
-full = []
-parsing = []
-printing = ["quote"]
-proc-macro = ["proc-macro2/proc-macro", "quote/proc-macro"]
-visit = []
-visit-mut = []
-[badges.travis-ci]
-repository = "dtolnay/syn"
diff --git a/rustc_deps/vendor/syn-0.14.9/README.md b/rustc_deps/vendor/syn-0.14.9/README.md
deleted file mode 100644
index e7a51ab..0000000
--- a/rustc_deps/vendor/syn-0.14.9/README.md
+++ /dev/null
@@ -1,282 +0,0 @@
-Nom parser for Rust source code
-===============================
-
-[![Build Status](https://api.travis-ci.org/dtolnay/syn.svg?branch=master)](https://travis-ci.org/dtolnay/syn)
-[![Latest Version](https://img.shields.io/crates/v/syn.svg)](https://crates.io/crates/syn)
-[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/syn/0.14/syn/)
-[![Rustc Version 1.15+](https://img.shields.io/badge/rustc-1.15+-lightgray.svg)](https://blog.rust-lang.org/2017/02/02/Rust-1.15.html)
-
-Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree
-of Rust source code.
-
-Currently this library is geared toward the [custom derive] use case but
-contains some APIs that may be useful for Rust procedural macros more generally.
-
-[custom derive]: https://github.com/rust-lang/rfcs/blob/master/text/1681-macros-1.1.md
-
-- **Data structures** — Syn provides a complete syntax tree that can represent
-  any valid Rust source code. The syntax tree is rooted at [`syn::File`] which
-  represents a full source file, but there are other entry points that may be
-  useful to procedural macros including [`syn::Item`], [`syn::Expr`] and
-  [`syn::Type`].
-
-- **Custom derives** — Of particular interest to custom derives is
-  [`syn::DeriveInput`] which is any of the three legal input items to a derive
-  macro. An example below shows using this type in a library that can derive
-  implementations of a trait of your own.
-
-- **Parser combinators** — Parsing in Syn is built on a suite of public parser
-  combinator macros that you can use for parsing any token-based syntax you
-  dream up within a `functionlike!(...)` procedural macro. Every syntax tree
-  node defined by Syn is individually parsable and may be used as a building
-  block for custom syntaxes, or you may do it all yourself working from the most
-  primitive tokens.
-
-- **Location information** — Every token parsed by Syn is associated with a
-  `Span` that tracks line and column information back to the source of that
-  token. These spans allow a procedural macro to display detailed error messages
-  pointing to all the right places in the user's code. There is an example of
-  this below.
-
-- **Feature flags** — Functionality is aggressively feature gated so your
-  procedural macros enable only what they need, and do not pay in compile time
-  for all the rest.
-
-[`syn::File`]: https://docs.rs/syn/0.14/syn/struct.File.html
-[`syn::Item`]: https://docs.rs/syn/0.14/syn/enum.Item.html
-[`syn::Expr`]: https://docs.rs/syn/0.14/syn/enum.Expr.html
-[`syn::Type`]: https://docs.rs/syn/0.14/syn/enum.Type.html
-[`syn::DeriveInput`]: https://docs.rs/syn/0.14/syn/struct.DeriveInput.html
-
-If you get stuck with anything involving procedural macros in Rust I am happy to
-provide help even if the issue is not related to Syn. Please file a ticket in
-this repo.
-
-*Version requirement: Syn supports any compiler version back to Rust's very
-first support for procedural macros in Rust 1.15.0. Some features especially
-around error reporting are only available in newer compilers or on the nightly
-channel.*
-
-[*Release notes*](https://github.com/dtolnay/syn/releases)
-
-## Example of a custom derive
-
-The canonical custom derive using Syn looks like this. We write an ordinary Rust
-function tagged with a `proc_macro_derive` attribute and the name of the trait
-we are deriving. Any time that derive appears in the user's code, the Rust
-compiler passes their data structure as tokens into our macro. We get to execute
-arbitrary Rust code to figure out what to do with those tokens, then hand some
-tokens back to the compiler to compile into the user's crate.
-
-[`TokenStream`]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
-
-```toml
-[dependencies]
-syn = "0.14"
-quote = "0.6"
-
-[lib]
-proc-macro = true
-```
-
-```rust
-extern crate proc_macro;
-extern crate syn;
-
-#[macro_use]
-extern crate quote;
-
-use proc_macro::TokenStream;
-use syn::DeriveInput;
-
-#[proc_macro_derive(MyMacro)]
-pub fn my_macro(input: TokenStream) -> TokenStream {
-    // Parse the input tokens into a syntax tree
-    let input: DeriveInput = syn::parse(input).unwrap();
-
-    // Build the output, possibly using quasi-quotation
-    let expanded = quote! {
-        // ...
-    };
-
-    // Hand the output tokens back to the compiler
-    expanded.into()
-}
-```
-
-The [`heapsize`] example directory shows a complete working Macros 1.1
-implementation of a custom derive. It works on any Rust compiler \>=1.15.0. The
-example derives a `HeapSize` trait which computes an estimate of the amount of
-heap memory owned by a value.
-
-[`heapsize`]: examples/heapsize
-
-```rust
-pub trait HeapSize {
-    /// Total number of bytes of heap memory owned by `self`.
-    fn heap_size_of_children(&self) -> usize;
-}
-```
-
-The custom derive allows users to write `#[derive(HeapSize)]` on data structures
-in their program.
-
-```rust
-#[derive(HeapSize)]
-struct Demo<'a, T: ?Sized> {
-    a: Box<T>,
-    b: u8,
-    c: &'a str,
-    d: String,
-}
-```
-
-## Spans and error reporting
-
-The [`heapsize2`] example directory is an extension of the `heapsize` example
-that demonstrates some of the hygiene and error reporting properties of Macros
-2.0. This example currently requires a nightly Rust compiler \>=1.24.0-nightly
-but we are working to stabilize all of the APIs involved.
-
-[`heapsize2`]: examples/heapsize2
-
-The token-based procedural macro API provides great control over where the
-compiler's error messages are displayed in user code. Consider the error the
-user sees if one of their field types does not implement `HeapSize`.
-
-```rust
-#[derive(HeapSize)]
-struct Broken {
-    ok: String,
-    bad: std::thread::Thread,
-}
-```
-
-In the Macros 1.1 string-based procedural macro world, the resulting error would
-point unhelpfully to the invocation of the derive macro and not to the actual
-problematic field.
-
-```
-error[E0599]: no method named `heap_size_of_children` found for type `std::thread::Thread` in the current scope
- --> src/main.rs:4:10
-  |
-4 | #[derive(HeapSize)]
-  |          ^^^^^^^^
-```
-
-By tracking span information all the way through the expansion of a procedural
-macro as shown in the `heapsize2` example, token-based macros in Syn are able to
-trigger errors that directly pinpoint the source of the problem.
-
-```
-error[E0277]: the trait bound `std::thread::Thread: HeapSize` is not satisfied
- --> src/main.rs:7:5
-  |
-7 |     bad: std::thread::Thread,
-  |     ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `HeapSize` is not implemented for `std::thread::Thread`
-```
-
-## Parsing a custom syntax using combinators
-
-The [`lazy-static`] example directory shows the implementation of a
-`functionlike!(...)` procedural macro in which the input tokens are parsed using
-[`nom`]-style parser combinators.
-
-[`lazy-static`]: examples/lazy-static
-[`nom`]: https://github.com/Geal/nom
-
-The example reimplements the popular `lazy_static` crate from crates.io as a
-procedural macro.
-
-```
-lazy_static! {
-    static ref USERNAME: Regex = Regex::new("^[a-z0-9_-]{3,16}$").unwrap();
-}
-```
-
-The implementation shows how to trigger custom warnings and error messages on
-the macro input.
-
-```
-warning: come on, pick a more creative name
-  --> src/main.rs:10:16
-   |
-10 |     static ref FOO: String = "lazy_static".to_owned();
-   |                ^^^
-```
-
-## Debugging
-
-When developing a procedural macro it can be helpful to look at what the
-generated code looks like. Use `cargo rustc -- -Zunstable-options
---pretty=expanded` or the [`cargo expand`] subcommand.
-
-[`cargo expand`]: https://github.com/dtolnay/cargo-expand
-
-To show the expanded code for some crate that uses your procedural macro, run
-`cargo expand` from that crate. To show the expanded code for one of your own
-test cases, run `cargo expand --test the_test_case` where the last argument is
-the name of the test file without the `.rs` extension.
-
-This write-up by Brandon W Maister discusses debugging in more detail:
-[Debugging Rust's new Custom Derive system][debugging].
-
-[debugging]: https://quodlibetor.github.io/posts/debugging-rusts-new-custom-derive-system/
-
-## Optional features
-
-Syn puts a lot of functionality behind optional features in order to optimize
-compile time for the most common use cases. The following features are
-available.
-
-- **`derive`** *(enabled by default)* — Data structures for representing the
-  possible input to a custom derive, including structs and enums and types.
-- **`full`** — Data structures for representing the syntax tree of all valid
-  Rust source code, including items and expressions.
-- **`parsing`** *(enabled by default)* — Ability to parse input tokens into a
-  syntax tree node of a chosen type.
-- **`printing`** *(enabled by default)* — Ability to print a syntax tree node as
-  tokens of Rust source code.
-- **`visit`** — Trait for traversing a syntax tree.
-- **`visit-mut`** — Trait for traversing and mutating in place a syntax tree.
-- **`fold`** — Trait for transforming an owned syntax tree.
-- **`clone-impls`** *(enabled by default)* — Clone impls for all syntax tree
-  types.
-- **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree
-  types.
-- **`proc-macro`** *(enabled by default)* — Runtime dependency on the dynamic
-  library libproc_macro from rustc toolchain.
-
-## Nightly features
-
-By default Syn uses the [`proc-macro2`] crate to emulate the nightly compiler's
-procedural macro API in a stable way that works all the way back to Rust 1.15.0.
-This shim makes it possible to write code without regard for whether the current
-compiler version supports the features we use.
-
-[`proc-macro2`]: https://github.com/alexcrichton/proc-macro2
-
-On a nightly compiler, to eliminate the stable shim and use the compiler's
-`proc-macro` directly, add `proc-macro2` to your Cargo.toml and set its
-`"nightly"` feature which bypasses the stable shim.
-
-```toml
-[dependencies]
-syn = "0.14"
-proc-macro2 = { version = "0.4", features = ["nightly"] }
-```
-
-## License
-
-Licensed under either of
-
- * Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
- * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
-
-at your option.
-
-### Contribution
-
-Unless you explicitly state otherwise, any contribution intentionally submitted
-for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
-be dual licensed as above, without any additional terms or conditions.
diff --git a/rustc_deps/vendor/syn-0.14.9/src/attr.rs b/rustc_deps/vendor/syn-0.14.9/src/attr.rs
deleted file mode 100644
index d818b4f..0000000
--- a/rustc_deps/vendor/syn-0.14.9/src/attr.rs
+++ /dev/null
@@ -1,554 +0,0 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use super::*;
-use punctuated::Punctuated;
-
-use std::iter;
-
-use proc_macro2::{Delimiter, Spacing, TokenStream, TokenTree};
-
-#[cfg(feature = "extra-traits")]
-use std::hash::{Hash, Hasher};
-#[cfg(feature = "extra-traits")]
-use tt::TokenStreamHelper;
-
-ast_struct! {
-    /// An attribute like `#[repr(transparent)]`.
-    ///
-    /// *This type is available if Syn is built with the `"derive"` or `"full"`
-    /// feature.*
-    ///
-    /// # Syntax
-    ///
-    /// Rust has six types of attributes.
-    ///
-    /// - Outer attributes like `#[repr(transparent)]`. These appear outside or
-    ///   in front of the item they describe.
-    /// - Inner attributes like `#![feature(proc_macro)]`. These appear inside
-    ///   of the item they describe, usually a module.
-    /// - Outer doc comments like `/// # Example`.
-    /// - Inner doc comments like `//! Please file an issue`.
-    /// - Outer block comments `/** # Example */`.
-    /// - Inner block comments `/*! Please file an issue */`.
-    ///
-    /// The `style` field of type `AttrStyle` distinguishes whether an attribute
-    /// is outer or inner. Doc comments and block comments are promoted to
-    /// attributes that have `is_sugared_doc` set to true, as this is how they
-    /// are processed by the compiler and by `macro_rules!` macros.
-    ///
-    /// The `path` field gives the possibly colon-delimited path against which
-    /// the attribute is resolved. It is equal to `"doc"` for desugared doc
-    /// comments. The `tts` field contains the rest of the attribute body as
-    /// tokens.
-    ///
-    /// ```text
-    /// #[derive(Copy)]      #[crate::precondition x < 5]
-    ///   ^^^^^^~~~~~~         ^^^^^^^^^^^^^^^^^^^ ~~~~~
-    ///    path  tts                   path         tts
-    /// ```
-    ///
-    /// Use the [`interpret_meta`] method to try parsing the tokens of an
-    /// attribute into the structured representation that is used by convention
-    /// across most Rust libraries.
-    ///
-    /// [`interpret_meta`]: #method.interpret_meta
-    pub struct Attribute #manual_extra_traits {
-        pub pound_token: Token![#],
-        pub style: AttrStyle,
-        pub bracket_token: token::Bracket,
-        pub path: Path,
-        pub tts: TokenStream,
-        pub is_sugared_doc: bool,
-    }
-}
-
-#[cfg(feature = "extra-traits")]
-impl Eq for Attribute {}
-
-#[cfg(feature = "extra-traits")]
-impl PartialEq for Attribute {
-    fn eq(&self, other: &Self) -> bool {
-        self.style == other.style
-            && self.pound_token == other.pound_token
-            && self.bracket_token == other.bracket_token
-            && self.path == other.path
-            && TokenStreamHelper(&self.tts) == TokenStreamHelper(&other.tts)
-            && self.is_sugared_doc == other.is_sugared_doc
-    }
-}
-
-#[cfg(feature = "extra-traits")]
-impl Hash for Attribute {
-    fn hash<H>(&self, state: &mut H)
-    where
-        H: Hasher,
-    {
-        self.style.hash(state);
-        self.pound_token.hash(state);
-        self.bracket_token.hash(state);
-        self.path.hash(state);
-        TokenStreamHelper(&self.tts).hash(state);
-        self.is_sugared_doc.hash(state);
-    }
-}
-
-impl Attribute {
-    /// Parses the tokens after the path as a [`Meta`](enum.Meta.html) if
-    /// possible.
-    pub fn interpret_meta(&self) -> Option<Meta> {
-        let name = if self.path.segments.len() == 1 {
-            &self.path.segments.first().unwrap().value().ident
-        } else {
-            return None;
-        };
-
-        if self.tts.is_empty() {
-            return Some(Meta::Word(name.clone()));
-        }
-
-        let tts = self.tts.clone().into_iter().collect::<Vec<_>>();
-
-        if tts.len() == 1 {
-            if let Some(meta) = Attribute::extract_meta_list(name.clone(), &tts[0]) {
-                return Some(meta);
-            }
-        }
-
-        if tts.len() == 2 {
-            if let Some(meta) = Attribute::extract_name_value(name.clone(), &tts[0], &tts[1]) {
-                return Some(meta);
-            }
-        }
-
-        None
-    }
-
-    fn extract_meta_list(ident: Ident, tt: &TokenTree) -> Option<Meta> {
-        let g = match *tt {
-            TokenTree::Group(ref g) => g,
-            _ => return None,
-        };
-        if g.delimiter() != Delimiter::Parenthesis {
-            return None;
-        }
-        let tokens = g.stream().clone().into_iter().collect::<Vec<_>>();
-        let nested = match list_of_nested_meta_items_from_tokens(&tokens) {
-            Some(n) => n,
-            None => return None,
-        };
-        Some(Meta::List(MetaList {
-            paren_token: token::Paren(g.span()),
-            ident: ident,
-            nested: nested,
-        }))
-    }
-
-    fn extract_name_value(ident: Ident, a: &TokenTree, b: &TokenTree) -> Option<Meta> {
-        let a = match *a {
-            TokenTree::Punct(ref o) => o,
-            _ => return None,
-        };
-        if a.spacing() != Spacing::Alone {
-            return None;
-        }
-        if a.as_char() != '=' {
-            return None;
-        }
-
-        match *b {
-            TokenTree::Literal(ref l) if !l.to_string().starts_with('/') => {
-                Some(Meta::NameValue(MetaNameValue {
-                    ident: ident,
-                    eq_token: Token![=]([a.span()]),
-                    lit: Lit::new(l.clone()),
-                }))
-            }
-            TokenTree::Ident(ref v) => match &v.to_string()[..] {
-                v @ "true" | v @ "false" => Some(Meta::NameValue(MetaNameValue {
-                    ident: ident,
-                    eq_token: Token![=]([a.span()]),
-                    lit: Lit::Bool(LitBool {
-                        value: v == "true",
-                        span: b.span(),
-                    }),
-                })),
-                _ => None,
-            },
-            _ => None,
-        }
-    }
-}
-
-fn nested_meta_item_from_tokens(tts: &[TokenTree]) -> Option<(NestedMeta, &[TokenTree])> {
-    assert!(!tts.is_empty());
-
-    match tts[0] {
-        TokenTree::Literal(ref lit) => {
-            if lit.to_string().starts_with('/') {
-                None
-            } else {
-                let lit = Lit::new(lit.clone());
-                Some((NestedMeta::Literal(lit), &tts[1..]))
-            }
-        }
-
-        TokenTree::Ident(ref ident) => {
-            if tts.len() >= 3 {
-                if let Some(meta) = Attribute::extract_name_value(ident.clone(), &tts[1], &tts[2]) {
-                    return Some((NestedMeta::Meta(meta), &tts[3..]));
-                }
-            }
-
-            if tts.len() >= 2 {
-                if let Some(meta) = Attribute::extract_meta_list(ident.clone(), &tts[1]) {
-                    return Some((NestedMeta::Meta(meta), &tts[2..]));
-                }
-            }
-
-            Some((Meta::Word(ident.clone()).into(), &tts[1..]))
-        }
-
-        _ => None,
-    }
-}
-
-fn list_of_nested_meta_items_from_tokens(
-    mut tts: &[TokenTree],
-) -> Option<Punctuated<NestedMeta, Token![,]>> {
-    let mut nested_meta_items = Punctuated::new();
-    let mut first = true;
-
-    while !tts.is_empty() {
-        let prev_comma = if first {
-            first = false;
-            None
-        } else if let TokenTree::Punct(ref op) = tts[0] {
-            if op.spacing() != Spacing::Alone {
-                return None;
-            }
-            if op.as_char() != ',' {
-                return None;
-            }
-            let tok = Token![,]([op.span()]);
-            tts = &tts[1..];
-            if tts.is_empty() {
-                break;
-            }
-            Some(tok)
-        } else {
-            return None;
-        };
-        let (nested, rest) = match nested_meta_item_from_tokens(tts) {
-            Some(pair) => pair,
-            None => return None,
-        };
-        if let Some(comma) = prev_comma {
-            nested_meta_items.push_punct(comma);
-        }
-        nested_meta_items.push_value(nested);
-        tts = rest;
-    }
-
-    Some(nested_meta_items)
-}
-
-ast_enum! {
-    /// Distinguishes between attributes that decorate an item and attributes
-    /// that are contained within an item.
-    ///
-    /// *This type is available if Syn is built with the `"derive"` or `"full"`
-    /// feature.*
-    ///
-    /// # Outer attributes
-    ///
-    /// - `#[repr(transparent)]`
-    /// - `/// # Example`
-    /// - `/** Please file an issue */`
-    ///
-    /// # Inner attributes
-    ///
-    /// - `#![feature(proc_macro)]`
-    /// - `//! # Example`
-    /// - `/*! Please file an issue */`
-    #[cfg_attr(feature = "clone-impls", derive(Copy))]
-    pub enum AttrStyle {
-        Outer,
-        Inner(Token![!]),
-    }
-}
-
-ast_enum_of_structs! {
-    /// Content of a compile-time structured attribute.
-    ///
-    /// *This type is available if Syn is built with the `"derive"` or `"full"`
-    /// feature.*
-    ///
-    /// ## Word
-    ///
-    /// A meta word is like the `test` in `#[test]`.
-    ///
-    /// ## List
-    ///
-    /// A meta list is like the `derive(Copy)` in `#[derive(Copy)]`.
-    ///
-    /// ## NameValue
-    ///
-    /// A name-value meta is like the `path = "..."` in `#[path =
-    /// "sys/windows.rs"]`.
-    ///
-    /// # Syntax tree enum
-    ///
-    /// This type is a [syntax tree enum].
-    ///
-    /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
-    pub enum Meta {
-        pub Word(Ident),
-        /// A structured list within an attribute, like `derive(Copy, Clone)`.
-        ///
-        /// *This type is available if Syn is built with the `"derive"` or
-        /// `"full"` feature.*
-        pub List(MetaList {
-            pub ident: Ident,
-            pub paren_token: token::Paren,
-            pub nested: Punctuated<NestedMeta, Token![,]>,
-        }),
-        /// A name-value pair within an attribute, like `feature = "nightly"`.
-        ///
-        /// *This type is available if Syn is built with the `"derive"` or
-        /// `"full"` feature.*
-        pub NameValue(MetaNameValue {
-            pub ident: Ident,
-            pub eq_token: Token![=],
-            pub lit: Lit,
-        }),
-    }
-}
-
-impl Meta {
-    /// Returns the identifier that begins this structured meta item.
-    ///
-    /// For example this would return the `test` in `#[test]`, the `derive` in
-    /// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`.
-    pub fn name(&self) -> Ident {
-        match *self {
-            Meta::Word(ref meta) => meta.clone(),
-            Meta::List(ref meta) => meta.ident.clone(),
-            Meta::NameValue(ref meta) => meta.ident.clone(),
-        }
-    }
-}
-
-ast_enum_of_structs! {
-    /// Element of a compile-time attribute list.
-    ///
-    /// *This type is available if Syn is built with the `"derive"` or `"full"`
-    /// feature.*
-    pub enum NestedMeta {
-        /// A structured meta item, like the `Copy` in `#[derive(Copy)]` which
-        /// would be a nested `Meta::Word`.
-        pub Meta(Meta),
-
-        /// A Rust literal, like the `"new_name"` in `#[rename("new_name")]`.
-        pub Literal(Lit),
-    }
-}
-
-pub trait FilterAttrs<'a> {
-    type Ret: Iterator<Item = &'a Attribute>;
-
-    fn outer(self) -> Self::Ret;
-    fn inner(self) -> Self::Ret;
-}
-
-impl<'a, T> FilterAttrs<'a> for T
-where
-    T: IntoIterator<Item = &'a Attribute>,
-{
-    type Ret = iter::Filter<T::IntoIter, fn(&&Attribute) -> bool>;
-
-    fn outer(self) -> Self::Ret {
-        fn is_outer(attr: &&Attribute) -> bool {
-            match attr.style {
-                AttrStyle::Outer => true,
-                _ => false,
-            }
-        }
-        self.into_iter().filter(is_outer)
-    }
-
-    fn inner(self) -> Self::Ret {
-        fn is_inner(attr: &&Attribute) -> bool {
-            match attr.style {
-                AttrStyle::Inner(_) => true,
-                _ => false,
-            }
-        }
-        self.into_iter().filter(is_inner)
-    }
-}
-
-#[cfg(feature = "parsing")]
-pub mod parsing {
-    use super::*;
-    use buffer::Cursor;
-    use parse_error;
-    use proc_macro2::{Literal, Punct, Spacing, Span, TokenTree};
-    use synom::PResult;
-
-    fn eq(span: Span) -> TokenTree {
-        let mut op = Punct::new('=', Spacing::Alone);
-        op.set_span(span);
-        op.into()
-    }
-
-    impl Attribute {
-        named!(pub parse_inner -> Self, alt!(
-            do_parse!(
-                pound: punct!(#) >>
-                bang: punct!(!) >>
-                path_and_tts: brackets!(tuple!(
-                    call!(Path::parse_mod_style),
-                    syn!(TokenStream),
-                )) >>
-                ({
-                    let (bracket, (path, tts)) = path_and_tts;
-
-                    Attribute {
-                        style: AttrStyle::Inner(bang),
-                        path: path,
-                        tts: tts,
-                        is_sugared_doc: false,
-                        pound_token: pound,
-                        bracket_token: bracket,
-                    }
-                })
-            )
-            |
-            map!(
-                call!(lit_doc_comment, Comment::Inner),
-                |lit| {
-                    let span = lit.span();
-                    Attribute {
-                        style: AttrStyle::Inner(<Token![!]>::new(span)),
-                        path: Ident::new("doc", span).into(),
-                        tts: vec![
-                            eq(span),
-                            lit,
-                        ].into_iter().collect(),
-                        is_sugared_doc: true,
-                        pound_token: <Token![#]>::new(span),
-                        bracket_token: token::Bracket(span),
-                    }
-                }
-            )
-        ));
-
-        named!(pub parse_outer -> Self, alt!(
-            do_parse!(
-                pound: punct!(#) >>
-                path_and_tts: brackets!(tuple!(
-                    call!(Path::parse_mod_style),
-                    syn!(TokenStream),
-                )) >>
-                ({
-                    let (bracket, (path, tts)) = path_and_tts;
-
-                    Attribute {
-                        style: AttrStyle::Outer,
-                        path: path,
-                        tts: tts,
-                        is_sugared_doc: false,
-                        pound_token: pound,
-                        bracket_token: bracket,
-                    }
-                })
-            )
-            |
-            map!(
-                call!(lit_doc_comment, Comment::Outer),
-                |lit| {
-                    let span = lit.span();
-                    Attribute {
-                        style: AttrStyle::Outer,
-                        path: Ident::new("doc", span).into(),
-                        tts: vec![
-                            eq(span),
-                            lit,
-                        ].into_iter().collect(),
-                        is_sugared_doc: true,
-                        pound_token: <Token![#]>::new(span),
-                        bracket_token: token::Bracket(span),
-                    }
-                }
-            )
-        ));
-    }
-
-    enum Comment {
-        Inner,
-        Outer,
-    }
-
-    fn lit_doc_comment(input: Cursor, style: Comment) -> PResult<TokenTree> {
-        match input.literal() {
-            Some((lit, rest)) => {
-                let string = lit.to_string();
-                let ok = match style {
-                    Comment::Inner => string.starts_with("//!") || string.starts_with("/*!"),
-                    Comment::Outer => string.starts_with("///") || string.starts_with("/**"),
-                };
-                if ok {
-                    let mut new = Literal::string(&string);
-                    new.set_span(lit.span());
-                    Ok((new.into(), rest))
-                } else {
-                    parse_error()
-                }
-            }
-            _ => parse_error(),
-        }
-    }
-}
-
-#[cfg(feature = "printing")]
-mod printing {
-    use super::*;
-    use proc_macro2::TokenStream;
-    use quote::ToTokens;
-
-    impl ToTokens for Attribute {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.pound_token.to_tokens(tokens);
-            if let AttrStyle::Inner(ref b) = self.style {
-                b.to_tokens(tokens);
-            }
-            self.bracket_token.surround(tokens, |tokens| {
-                self.path.to_tokens(tokens);
-                self.tts.to_tokens(tokens);
-            });
-        }
-    }
-
-    impl ToTokens for MetaList {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.ident.to_tokens(tokens);
-            self.paren_token.surround(tokens, |tokens| {
-                self.nested.to_tokens(tokens);
-            })
-        }
-    }
-
-    impl ToTokens for MetaNameValue {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.ident.to_tokens(tokens);
-            self.eq_token.to_tokens(tokens);
-            self.lit.to_tokens(tokens);
-        }
-    }
-}
diff --git a/rustc_deps/vendor/syn-0.14.9/src/buffer.rs b/rustc_deps/vendor/syn-0.14.9/src/buffer.rs
deleted file mode 100644
index 499c4f1..0000000
--- a/rustc_deps/vendor/syn-0.14.9/src/buffer.rs
+++ /dev/null
@@ -1,469 +0,0 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! A stably addressed token buffer supporting efficient traversal based on a
-//! cheaply copyable cursor.
-//!
-//! The [`Synom`] trait is implemented for syntax tree types that can be parsed
-//! from one of these token cursors.
-//!
-//! [`Synom`]: ../synom/trait.Synom.html
-//!
-//! *This module is available if Syn is built with the `"parsing"` feature.*
-//!
-//! # Example
-//!
-//! This example shows a basic token parser for parsing a token stream without
-//! using Syn's parser combinator macros.
-//!
-//! ```
-//! #![feature(proc_macro_diagnostic)]
-//!
-//! extern crate syn;
-//! extern crate proc_macro;
-//!
-//! #[macro_use]
-//! extern crate quote;
-//!
-//! use syn::{token, ExprTuple};
-//! use syn::buffer::{Cursor, TokenBuffer};
-//! use syn::spanned::Spanned;
-//! use syn::synom::Synom;
-//! use proc_macro::{Diagnostic, Span, TokenStream};
-//!
-//! /// A basic token parser for parsing a token stream without using Syn's
-//! /// parser combinator macros.
-//! pub struct Parser<'a> {
-//!     cursor: Cursor<'a>,
-//! }
-//!
-//! impl<'a> Parser<'a> {
-//!     pub fn new(cursor: Cursor<'a>) -> Self {
-//!         Parser { cursor }
-//!     }
-//!
-//!     pub fn current_span(&self) -> Span {
-//!         self.cursor.span().unstable()
-//!     }
-//!
-//!     pub fn parse<T: Synom>(&mut self) -> Result<T, Diagnostic> {
-//!         let (val, rest) = T::parse(self.cursor)
-//!             .map_err(|e| match T::description() {
-//!                 Some(desc) => {
-//!                     self.current_span().error(format!("{}: expected {}", e, desc))
-//!                 }
-//!                 None => {
-//!                     self.current_span().error(e.to_string())
-//!                 }
-//!             })?;
-//!
-//!         self.cursor = rest;
-//!         Ok(val)
-//!     }
-//!
-//!     pub fn expect_eof(&mut self) -> Result<(), Diagnostic> {
-//!         if !self.cursor.eof() {
-//!             return Err(self.current_span().error("trailing characters; expected eof"));
-//!         }
-//!
-//!         Ok(())
-//!     }
-//! }
-//!
-//! fn eval(input: TokenStream) -> Result<TokenStream, Diagnostic> {
-//!     let buffer = TokenBuffer::new(input);
-//!     let mut parser = Parser::new(buffer.begin());
-//!
-//!     // Parse some syntax tree types out of the input tokens. In this case we
-//!     // expect something like:
-//!     //
-//!     //     (a, b, c) = (1, 2, 3)
-//!     let a = parser.parse::<ExprTuple>()?;
-//!     parser.parse::<token::Eq>()?;
-//!     let b = parser.parse::<ExprTuple>()?;
-//!     parser.expect_eof()?;
-//!
-//!     // Perform some validation and report errors.
-//!     let (a_len, b_len) = (a.elems.len(), b.elems.len());
-//!     if a_len != b_len {
-//!         let diag = b.span().unstable()
-//!             .error(format!("expected {} element(s), got {}", a_len, b_len))
-//!             .span_note(a.span().unstable(), "because of this");
-//!
-//!         return Err(diag);
-//!     }
-//!
-//!     // Build the output tokens.
-//!     let out = quote! {
-//!         println!("All good! Received two tuples of size {}", #a_len);
-//!     };
-//!
-//!     Ok(out.into())
-//! }
-//! #
-//! # extern crate proc_macro2;
-//! #
-//! # // This method exists on proc_macro2::Span but is behind the "nightly"
-//! # // feature.
-//! # trait ToUnstableSpan {
-//! #     fn unstable(&self) -> Span;
-//! # }
-//! #
-//! # impl ToUnstableSpan for proc_macro2::Span {
-//! #     fn unstable(&self) -> Span {
-//! #         unimplemented!()
-//! #     }
-//! # }
-//! #
-//! # fn main() {}
-//! ```
-
-// This module is heavily commented as it contains the only unsafe code in Syn,
-// and caution should be used when editing it. The public-facing interface is
-// 100% safe but the implementation is fragile internally.
-
-#[cfg(all(
-    not(all(target_arch = "wasm32", target_os = "unknown")),
-    feature = "proc-macro"
-))]
-use proc_macro as pm;
-use proc_macro2::{Delimiter, Ident, Literal, Span, TokenStream};
-use proc_macro2::{Group, Punct, TokenTree};
-
-use std::marker::PhantomData;
-use std::ptr;
-
-#[cfg(synom_verbose_trace)]
-use std::fmt::{self, Debug};
-
-/// Internal type which is used instead of `TokenTree` to represent a token tree
-/// within a `TokenBuffer`.
-enum Entry {
-    // Mimicking types from proc-macro.
-    Group(Span, Delimiter, TokenBuffer),
-    Ident(Ident),
-    Punct(Punct),
-    Literal(Literal),
-    // End entries contain a raw pointer to the entry from the containing
-    // token tree, or null if this is the outermost level.
-    End(*const Entry),
-}
-
-/// A buffer that can be efficiently traversed multiple times, unlike
-/// `TokenStream` which requires a deep copy in order to traverse more than
-/// once.
-///
-/// See the [module documentation] for an example of `TokenBuffer` in action.
-///
-/// [module documentation]: index.html
-///
-/// *This type is available if Syn is built with the `"parsing"` feature.*
-pub struct TokenBuffer {
-    // NOTE: Do not derive clone on this - there are raw pointers inside which
-    // will be messed up. Moving the `TokenBuffer` itself is safe as the actual
-    // backing slices won't be moved.
-    data: Box<[Entry]>,
-}
-
-impl TokenBuffer {
-    // NOTE: DO NOT MUTATE THE `Vec` RETURNED FROM THIS FUNCTION ONCE IT
-    // RETURNS, THE ADDRESS OF ITS BACKING MEMORY MUST REMAIN STABLE.
-    fn inner_new(stream: TokenStream, up: *const Entry) -> TokenBuffer {
-        // Build up the entries list, recording the locations of any Groups
-        // in the list to be processed later.
-        let mut entries = Vec::new();
-        let mut seqs = Vec::new();
-        for tt in stream {
-            match tt {
-                TokenTree::Ident(sym) => {
-                    entries.push(Entry::Ident(sym));
-                }
-                TokenTree::Punct(op) => {
-                    entries.push(Entry::Punct(op));
-                }
-                TokenTree::Literal(l) => {
-                    entries.push(Entry::Literal(l));
-                }
-                TokenTree::Group(g) => {
-                    // Record the index of the interesting entry, and store an
-                    // `End(null)` there temporarially.
-                    seqs.push((entries.len(), g.span(), g.delimiter(), g.stream().clone()));
-                    entries.push(Entry::End(ptr::null()));
-                }
-            }
-        }
-        // Add an `End` entry to the end with a reference to the enclosing token
-        // stream which was passed in.
-        entries.push(Entry::End(up));
-
-        // NOTE: This is done to ensure that we don't accidentally modify the
-        // length of the backing buffer. The backing buffer must remain at a
-        // constant address after this point, as we are going to store a raw
-        // pointer into it.
-        let mut entries = entries.into_boxed_slice();
-        for (idx, span, delim, seq_stream) in seqs {
-            // We know that this index refers to one of the temporary
-            // `End(null)` entries, and we know that the last entry is
-            // `End(up)`, so the next index is also valid.
-            let seq_up = &entries[idx + 1] as *const Entry;
-
-            // The end entry stored at the end of this Entry::Group should
-            // point to the Entry which follows the Group in the list.
-            let inner = Self::inner_new(seq_stream, seq_up);
-            entries[idx] = Entry::Group(span, delim, inner);
-        }
-
-        TokenBuffer { data: entries }
-    }
-
-    /// Creates a `TokenBuffer` containing all the tokens from the input
-    /// `TokenStream`.
-    ///
-    /// *This method is available if Syn is built with both the `"parsing"` and
-    /// `"proc-macro"` features.*
-    #[cfg(all(
-        not(all(target_arch = "wasm32", target_os = "unknown")),
-        feature = "proc-macro"
-    ))]
-    pub fn new(stream: pm::TokenStream) -> TokenBuffer {
-        Self::new2(stream.into())
-    }
-
-    /// Creates a `TokenBuffer` containing all the tokens from the input
-    /// `TokenStream`.
-    pub fn new2(stream: TokenStream) -> TokenBuffer {
-        Self::inner_new(stream, ptr::null())
-    }
-
-    /// Creates a cursor referencing the first token in the buffer and able to
-    /// traverse until the end of the buffer.
-    pub fn begin(&self) -> Cursor {
-        unsafe { Cursor::create(&self.data[0], &self.data[self.data.len() - 1]) }
-    }
-}
-
-/// A cheaply copyable cursor into a `TokenBuffer`.
-///
-/// This cursor holds a shared reference into the immutable data which is used
-/// internally to represent a `TokenStream`, and can be efficiently manipulated
-/// and copied around.
-///
-/// An empty `Cursor` can be created directly, or one may create a `TokenBuffer`
-/// object and get a cursor to its first token with `begin()`.
-///
-/// Two cursors are equal if they have the same location in the same input
-/// stream, and have the same scope.
-///
-/// See the [module documentation] for an example of a `Cursor` in action.
-///
-/// [module documentation]: index.html
-///
-/// *This type is available if Syn is built with the `"parsing"` feature.*
-#[derive(Copy, Clone, Eq, PartialEq)]
-pub struct Cursor<'a> {
-    /// The current entry which the `Cursor` is pointing at.
-    ptr: *const Entry,
-    /// This is the only `Entry::End(..)` object which this cursor is allowed to
-    /// point at. All other `End` objects are skipped over in `Cursor::create`.
-    scope: *const Entry,
-    /// This uses the &'a reference which guarantees that these pointers are
-    /// still valid.
-    marker: PhantomData<&'a Entry>,
-}
-
-impl<'a> Cursor<'a> {
-    /// Creates a cursor referencing a static empty TokenStream.
-    pub fn empty() -> Self {
-        // It's safe in this situation for us to put an `Entry` object in global
-        // storage, despite it not actually being safe to send across threads
-        // (`Ident` is a reference into a thread-local table). This is because
-        // this entry never includes a `Ident` object.
-        //
-        // This wrapper struct allows us to break the rules and put a `Sync`
-        // object in global storage.
-        struct UnsafeSyncEntry(Entry);
-        unsafe impl Sync for UnsafeSyncEntry {}
-        static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0 as *const Entry));
-
-        Cursor {
-            ptr: &EMPTY_ENTRY.0,
-            scope: &EMPTY_ENTRY.0,
-            marker: PhantomData,
-        }
-    }
-
-    /// This create method intelligently exits non-explicitly-entered
-    /// `None`-delimited scopes when the cursor reaches the end of them,
-    /// allowing for them to be treated transparently.
-    unsafe fn create(mut ptr: *const Entry, scope: *const Entry) -> Self {
-        // NOTE: If we're looking at a `End(..)`, we want to advance the cursor
-        // past it, unless `ptr == scope`, which means that we're at the edge of
-        // our cursor's scope. We should only have `ptr != scope` at the exit
-        // from None-delimited groups entered with `ignore_none`.
-        while let Entry::End(exit) = *ptr {
-            if ptr == scope {
-                break;
-            }
-            ptr = exit;
-        }
-
-        Cursor {
-            ptr: ptr,
-            scope: scope,
-            marker: PhantomData,
-        }
-    }
-
-    /// Get the current entry.
-    fn entry(self) -> &'a Entry {
-        unsafe { &*self.ptr }
-    }
-
-    /// Bump the cursor to point at the next token after the current one. This
-    /// is undefined behavior if the cursor is currently looking at an
-    /// `Entry::End`.
-    unsafe fn bump(self) -> Cursor<'a> {
-        Cursor::create(self.ptr.offset(1), self.scope)
-    }
-
-    /// If the cursor is looking at a `None`-delimited group, move it to look at
-    /// the first token inside instead. If the group is empty, this will move
-    /// the cursor past the `None`-delimited group.
-    ///
-    /// WARNING: This mutates its argument.
-    fn ignore_none(&mut self) {
-        if let Entry::Group(_, Delimiter::None, ref buf) = *self.entry() {
-            // NOTE: We call `Cursor::create` here to make sure that situations
-            // where we should immediately exit the span after entering it are
-            // handled correctly.
-            unsafe {
-                *self = Cursor::create(&buf.data[0], self.scope);
-            }
-        }
-    }
-
-    /// Checks whether the cursor is currently pointing at the end of its valid
-    /// scope.
-    #[inline]
-    pub fn eof(self) -> bool {
-        // We're at eof if we're at the end of our scope.
-        self.ptr == self.scope
-    }
-
-    /// If the cursor is pointing at a `Group` with the given delimiter, returns
-    /// a cursor into that group and one pointing to the next `TokenTree`.
-    pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, Span, Cursor<'a>)> {
-        // If we're not trying to enter a none-delimited group, we want to
-        // ignore them. We have to make sure to _not_ ignore them when we want
-        // to enter them, of course. For obvious reasons.
-        if delim != Delimiter::None {
-            self.ignore_none();
-        }
-
-        if let Entry::Group(span, group_delim, ref buf) = *self.entry() {
-            if group_delim == delim {
-                return Some((buf.begin(), span, unsafe { self.bump() }));
-            }
-        }
-
-        None
-    }
-
-    /// If the cursor is pointing at a `Ident`, returns it along with a cursor
-    /// pointing at the next `TokenTree`.
-    pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> {
-        self.ignore_none();
-        match *self.entry() {
-            Entry::Ident(ref ident) => Some((ident.clone(), unsafe { self.bump() })),
-            _ => None,
-        }
-    }
-
-    /// If the cursor is pointing at an `Punct`, returns it along with a cursor
-    /// pointing at the next `TokenTree`.
-    pub fn punct(mut self) -> Option<(Punct, Cursor<'a>)> {
-        self.ignore_none();
-        match *self.entry() {
-            Entry::Punct(ref op) => Some((op.clone(), unsafe { self.bump() })),
-            _ => None,
-        }
-    }
-
-    /// If the cursor is pointing at a `Literal`, return it along with a cursor
-    /// pointing at the next `TokenTree`.
-    pub fn literal(mut self) -> Option<(Literal, Cursor<'a>)> {
-        self.ignore_none();
-        match *self.entry() {
-            Entry::Literal(ref lit) => Some((lit.clone(), unsafe { self.bump() })),
-            _ => None,
-        }
-    }
-
-    /// Copies all remaining tokens visible from this cursor into a
-    /// `TokenStream`.
-    pub fn token_stream(self) -> TokenStream {
-        let mut tts = Vec::new();
-        let mut cursor = self;
-        while let Some((tt, rest)) = cursor.token_tree() {
-            tts.push(tt);
-            cursor = rest;
-        }
-        tts.into_iter().collect()
-    }
-
-    /// If the cursor is pointing at a `TokenTree`, returns it along with a
-    /// cursor pointing at the next `TokenTree`.
-    ///
-    /// Returns `None` if the cursor has reached the end of its stream.
-    ///
-    /// This method does not treat `None`-delimited groups as transparent, and
-    /// will return a `Group(None, ..)` if the cursor is looking at one.
-    pub fn token_tree(self) -> Option<(TokenTree, Cursor<'a>)> {
-        let tree = match *self.entry() {
-            Entry::Group(span, delim, ref buf) => {
-                let stream = buf.begin().token_stream();
-                let mut g = Group::new(delim, stream);
-                g.set_span(span);
-                TokenTree::from(g)
-            }
-            Entry::Literal(ref lit) => lit.clone().into(),
-            Entry::Ident(ref ident) => ident.clone().into(),
-            Entry::Punct(ref op) => op.clone().into(),
-            Entry::End(..) => {
-                return None;
-            }
-        };
-
-        Some((tree, unsafe { self.bump() }))
-    }
-
-    /// Returns the `Span` of the current token, or `Span::call_site()` if this
-    /// cursor points to eof.
-    pub fn span(self) -> Span {
-        match *self.entry() {
-            Entry::Group(span, ..) => span,
-            Entry::Literal(ref l) => l.span(),
-            Entry::Ident(ref t) => t.span(),
-            Entry::Punct(ref o) => o.span(),
-            Entry::End(..) => Span::call_site(),
-        }
-    }
-}
-
-// We do a custom implementation for `Debug` as the default implementation is
-// pretty useless.
-#[cfg(synom_verbose_trace)]
-impl<'a> Debug for Cursor<'a> {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        // Print what the cursor is currently looking at.
-        // This will look like Cursor("some remaining tokens here")
-        f.debug_tuple("Cursor")
-            .field(&self.token_stream().to_string())
-            .finish()
-    }
-}
diff --git a/rustc_deps/vendor/syn-0.14.9/src/data.rs b/rustc_deps/vendor/syn-0.14.9/src/data.rs
deleted file mode 100644
index 8ffc8a4..0000000
--- a/rustc_deps/vendor/syn-0.14.9/src/data.rs
+++ /dev/null
@@ -1,415 +0,0 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use super::*;
-use punctuated::Punctuated;
-
-ast_struct! {
-    /// An enum variant.
-    ///
-    /// *This type is available if Syn is built with the `"derive"` or `"full"`
-    /// feature.*
-    pub struct Variant {
-        /// Attributes tagged on the variant.
-        pub attrs: Vec<Attribute>,
-
-        /// Name of the variant.
-        pub ident: Ident,
-
-        /// Content stored in the variant.
-        pub fields: Fields,
-
-        /// Explicit discriminant: `Variant = 1`
-        pub discriminant: Option<(Token![=], Expr)>,
-    }
-}
-
-ast_enum_of_structs! {
-    /// Data stored within an enum variant or struct.
-    ///
-    /// *This type is available if Syn is built with the `"derive"` or `"full"`
-    /// feature.*
-    ///
-    /// # Syntax tree enum
-    ///
-    /// This type is a [syntax tree enum].
-    ///
-    /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
-    pub enum Fields {
-        /// Named fields of a struct or struct variant such as `Point { x: f64,
-        /// y: f64 }`.
-        ///
-        /// *This type is available if Syn is built with the `"derive"` or
-        /// `"full"` feature.*
-        pub Named(FieldsNamed {
-            pub brace_token: token::Brace,
-            pub named: Punctuated<Field, Token![,]>,
-        }),
-
-        /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
-        ///
-        /// *This type is available if Syn is built with the `"derive"` or
-        /// `"full"` feature.*
-        pub Unnamed(FieldsUnnamed {
-            pub paren_token: token::Paren,
-            pub unnamed: Punctuated<Field, Token![,]>,
-        }),
-
-        /// Unit struct or unit variant such as `None`.
-        pub Unit,
-    }
-}
-
-impl Fields {
-    /// Get an iterator over the borrowed [`Field`] items in this object. This
-    /// iterator can be used to iterate over a named or unnamed struct or
-    /// variant's fields uniformly.
-    ///
-    /// [`Field`]: struct.Field.html
-    pub fn iter(&self) -> punctuated::Iter<Field> {
-        match *self {
-            Fields::Unit => punctuated::Iter::private_empty(),
-            Fields::Named(ref f) => f.named.iter(),
-            Fields::Unnamed(ref f) => f.unnamed.iter(),
-        }
-    }
-
-    /// Get an iterator over the mutably borrowed [`Field`] items in this
-    /// object. This iterator can be used to iterate over a named or unnamed
-    /// struct or variant's fields uniformly.
-    ///
-    /// [`Field`]: struct.Field.html
-    pub fn iter_mut(&mut self) -> punctuated::IterMut<Field> {
-        match *self {
-            Fields::Unit => punctuated::IterMut::private_empty(),
-            Fields::Named(ref mut f) => f.named.iter_mut(),
-            Fields::Unnamed(ref mut f) => f.unnamed.iter_mut(),
-        }
-    }
-}
-
-impl<'a> IntoIterator for &'a Fields {
-    type Item = &'a Field;
-    type IntoIter = punctuated::Iter<'a, Field>;
-
-    fn into_iter(self) -> Self::IntoIter {
-        self.iter()
-    }
-}
-
-impl<'a> IntoIterator for &'a mut Fields {
-    type Item = &'a mut Field;
-    type IntoIter = punctuated::IterMut<'a, Field>;
-
-    fn into_iter(self) -> Self::IntoIter {
-        self.iter_mut()
-    }
-}
-
-ast_struct! {
-    /// A field of a struct or enum variant.
-    ///
-    /// *This type is available if Syn is built with the `"derive"` or `"full"`
-    /// feature.*
-    pub struct Field {
-        /// Attributes tagged on the field.
-        pub attrs: Vec<Attribute>,
-
-        /// Visibility of the field.
-        pub vis: Visibility,
-
-        /// Name of the field, if any.
-        ///
-        /// Fields of tuple structs have no names.
-        pub ident: Option<Ident>,
-
-        pub colon_token: Option<Token![:]>,
-
-        /// Type of the field.
-        pub ty: Type,
-    }
-}
-
-ast_enum_of_structs! {
-    /// The visibility level of an item: inherited or `pub` or
-    /// `pub(restricted)`.
-    ///
-    /// *This type is available if Syn is built with the `"derive"` or `"full"`
-    /// feature.*
-    ///
-    /// # Syntax tree enum
-    ///
-    /// This type is a [syntax tree enum].
-    ///
-    /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
-    pub enum Visibility {
-        /// A public visibility level: `pub`.
-        ///
-        /// *This type is available if Syn is built with the `"derive"` or
-        /// `"full"` feature.*
-        pub Public(VisPublic {
-            pub pub_token: Token![pub],
-        }),
-
-        /// A crate-level visibility: `crate`.
-        ///
-        /// *This type is available if Syn is built with the `"derive"` or
-        /// `"full"` feature.*
-        pub Crate(VisCrate {
-            pub crate_token: Token![crate],
-        }),
-
-        /// A visibility level restricted to some path: `pub(self)` or
-        /// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
-        ///
-        /// *This type is available if Syn is built with the `"derive"` or
-        /// `"full"` feature.*
-        pub Restricted(VisRestricted {
-            pub pub_token: Token![pub],
-            pub paren_token: token::Paren,
-            pub in_token: Option<Token![in]>,
-            pub path: Box<Path>,
-        }),
-
-        /// An inherited visibility, which usually means private.
-        pub Inherited,
-    }
-}
-
-#[cfg(feature = "parsing")]
-pub mod parsing {
-    use super::*;
-
-    use synom::Synom;
-
-    impl Synom for Variant {
-        named!(parse -> Self, do_parse!(
-            attrs: many0!(Attribute::parse_outer) >>
-            id: syn!(Ident) >>
-            fields: alt!(
-                syn!(FieldsNamed) => { Fields::Named }
-                |
-                syn!(FieldsUnnamed) => { Fields::Unnamed }
-                |
-                epsilon!() => { |_| Fields::Unit }
-            ) >>
-            disr: option!(tuple!(punct!(=), syn!(Expr))) >>
-            (Variant {
-                ident: id,
-                attrs: attrs,
-                fields: fields,
-                discriminant: disr,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("enum variant")
-        }
-    }
-
-    impl Synom for FieldsNamed {
-        named!(parse -> Self, map!(
-            braces!(call!(Punctuated::parse_terminated_with, Field::parse_named)),
-            |(brace, fields)| FieldsNamed {
-                brace_token: brace,
-                named: fields,
-            }
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("named fields in a struct or struct variant")
-        }
-    }
-
-    impl Synom for FieldsUnnamed {
-        named!(parse -> Self, map!(
-            parens!(call!(Punctuated::parse_terminated_with, Field::parse_unnamed)),
-            |(paren, fields)| FieldsUnnamed {
-                paren_token: paren,
-                unnamed: fields,
-            }
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("unnamed fields in a tuple struct or tuple variant")
-        }
-    }
-
-    impl Field {
-        named!(pub parse_named -> Self, do_parse!(
-            attrs: many0!(Attribute::parse_outer) >>
-            vis: syn!(Visibility) >>
-            id: syn!(Ident) >>
-            colon: punct!(:) >>
-            ty: syn!(Type) >>
-            (Field {
-                ident: Some(id),
-                vis: vis,
-                attrs: attrs,
-                ty: ty,
-                colon_token: Some(colon),
-            })
-        ));
-
-        named!(pub parse_unnamed -> Self, do_parse!(
-            attrs: many0!(Attribute::parse_outer) >>
-            vis: syn!(Visibility) >>
-            ty: syn!(Type) >>
-            (Field {
-                ident: None,
-                colon_token: None,
-                vis: vis,
-                attrs: attrs,
-                ty: ty,
-            })
-        ));
-    }
-
-    impl Synom for Visibility {
-        named!(parse -> Self, alt!(
-            do_parse!(
-                pub_token: keyword!(pub) >>
-                other: parens!(keyword!(crate)) >>
-                (Visibility::Restricted(VisRestricted {
-                    pub_token: pub_token,
-                    paren_token: other.0,
-                    in_token: None,
-                    path: Box::new(other.1.into()),
-                }))
-            )
-            |
-            do_parse!(
-                crate_token: keyword!(crate) >>
-                not!(punct!(::)) >>
-                (Visibility::Crate(VisCrate {
-                    crate_token: crate_token,
-                }))
-            )
-            |
-            do_parse!(
-                pub_token: keyword!(pub) >>
-                other: parens!(keyword!(self)) >>
-                (Visibility::Restricted(VisRestricted {
-                    pub_token: pub_token,
-                    paren_token: other.0,
-                    in_token: None,
-                    path: Box::new(other.1.into()),
-                }))
-            )
-            |
-            do_parse!(
-                pub_token: keyword!(pub) >>
-                other: parens!(keyword!(super)) >>
-                (Visibility::Restricted(VisRestricted {
-                    pub_token: pub_token,
-                    paren_token: other.0,
-                    in_token: None,
-                    path: Box::new(other.1.into()),
-                }))
-            )
-            |
-            do_parse!(
-                pub_token: keyword!(pub) >>
-                other: parens!(do_parse!(
-                    in_tok: keyword!(in) >>
-                    restricted: call!(Path::parse_mod_style) >>
-                    (in_tok, restricted)
-                )) >>
-                (Visibility::Restricted(VisRestricted {
-                    pub_token: pub_token,
-                    paren_token: other.0,
-                    in_token: Some((other.1).0),
-                    path: Box::new((other.1).1),
-                }))
-            )
-            |
-            keyword!(pub) => { |tok| {
-                Visibility::Public(VisPublic {
-                    pub_token: tok,
-                })
-            } }
-            |
-            epsilon!() => { |_| Visibility::Inherited }
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("visibility qualifier such as `pub`")
-        }
-    }
-}
-
-#[cfg(feature = "printing")]
-mod printing {
-    use super::*;
-    use proc_macro2::TokenStream;
-    use quote::{ToTokens, TokenStreamExt};
-
-    impl ToTokens for Variant {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            tokens.append_all(&self.attrs);
-            self.ident.to_tokens(tokens);
-            self.fields.to_tokens(tokens);
-            if let Some((ref eq_token, ref disc)) = self.discriminant {
-                eq_token.to_tokens(tokens);
-                disc.to_tokens(tokens);
-            }
-        }
-    }
-
-    impl ToTokens for FieldsNamed {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.brace_token.surround(tokens, |tokens| {
-                self.named.to_tokens(tokens);
-            });
-        }
-    }
-
-    impl ToTokens for FieldsUnnamed {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.paren_token.surround(tokens, |tokens| {
-                self.unnamed.to_tokens(tokens);
-            });
-        }
-    }
-
-    impl ToTokens for Field {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            tokens.append_all(&self.attrs);
-            self.vis.to_tokens(tokens);
-            if let Some(ref ident) = self.ident {
-                ident.to_tokens(tokens);
-                TokensOrDefault(&self.colon_token).to_tokens(tokens);
-            }
-            self.ty.to_tokens(tokens);
-        }
-    }
-
-    impl ToTokens for VisPublic {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.pub_token.to_tokens(tokens)
-        }
-    }
-
-    impl ToTokens for VisCrate {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.crate_token.to_tokens(tokens);
-        }
-    }
-
-    impl ToTokens for VisRestricted {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.pub_token.to_tokens(tokens);
-            self.paren_token.surround(tokens, |tokens| {
-                // XXX: If we have a path which is not "self" or "super" or
-                // "crate", automatically add the "in" token.
-                self.in_token.to_tokens(tokens);
-                self.path.to_tokens(tokens);
-            });
-        }
-    }
-}
diff --git a/rustc_deps/vendor/syn-0.14.9/src/derive.rs b/rustc_deps/vendor/syn-0.14.9/src/derive.rs
deleted file mode 100644
index ecef776..0000000
--- a/rustc_deps/vendor/syn-0.14.9/src/derive.rs
+++ /dev/null
@@ -1,242 +0,0 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use super::*;
-use punctuated::Punctuated;
-
-ast_struct! {
-    /// Data structure sent to a `proc_macro_derive` macro.
-    ///
-    /// *This type is available if Syn is built with the `"derive"` feature.*
-    pub struct DeriveInput {
-        /// Attributes tagged on the whole struct or enum.
-        pub attrs: Vec<Attribute>,
-
-        /// Visibility of the struct or enum.
-        pub vis: Visibility,
-
-        /// Name of the struct or enum.
-        pub ident: Ident,
-
-        /// Generics required to complete the definition.
-        pub generics: Generics,
-
-        /// Data within the struct or enum.
-        pub data: Data,
-    }
-}
-
-ast_enum_of_structs! {
-    /// The storage of a struct, enum or union data structure.
-    ///
-    /// *This type is available if Syn is built with the `"derive"` feature.*
-    ///
-    /// # Syntax tree enum
-    ///
-    /// This type is a [syntax tree enum].
-    ///
-    /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
-    pub enum Data {
-        /// A struct input to a `proc_macro_derive` macro.
-        ///
-        /// *This type is available if Syn is built with the `"derive"`
-        /// feature.*
-        pub Struct(DataStruct {
-            pub struct_token: Token![struct],
-            pub fields: Fields,
-            pub semi_token: Option<Token![;]>,
-        }),
-
-        /// An enum input to a `proc_macro_derive` macro.
-        ///
-        /// *This type is available if Syn is built with the `"derive"`
-        /// feature.*
-        pub Enum(DataEnum {
-            pub enum_token: Token![enum],
-            pub brace_token: token::Brace,
-            pub variants: Punctuated<Variant, Token![,]>,
-        }),
-
-        /// A tagged union input to a `proc_macro_derive` macro.
-        ///
-        /// *This type is available if Syn is built with the `"derive"`
-        /// feature.*
-        pub Union(DataUnion {
-            pub union_token: Token![union],
-            pub fields: FieldsNamed,
-        }),
-    }
-
-    do_not_generate_to_tokens
-}
-
-#[cfg(feature = "parsing")]
-pub mod parsing {
-    use super::*;
-
-    use synom::Synom;
-
-    enum DeriveInputKind {
-        Struct(Token![struct]),
-        Enum(Token![enum]),
-        Union(Token![union]),
-    }
-
-    impl Synom for DeriveInputKind {
-        named!(parse -> Self, alt!(
-            keyword!(struct) => { DeriveInputKind::Struct }
-            |
-            keyword!(enum) => { DeriveInputKind::Enum }
-            |
-            keyword!(union) => { DeriveInputKind::Union }
-        ));
-    }
-
-    impl Synom for DeriveInput {
-        named!(parse -> Self, do_parse!(
-            attrs: many0!(Attribute::parse_outer) >>
-            vis: syn!(Visibility) >>
-            which: syn!(DeriveInputKind) >>
-            id: syn!(Ident) >>
-            generics: syn!(Generics) >>
-            item: switch!(value!(which),
-                DeriveInputKind::Struct(s) => map!(data_struct, move |(wh, fields, semi)| DeriveInput {
-                    ident: id,
-                    vis: vis,
-                    attrs: attrs,
-                    generics: Generics {
-                        where_clause: wh,
-                        ..generics
-                    },
-                    data: Data::Struct(DataStruct {
-                        struct_token: s,
-                        fields: fields,
-                        semi_token: semi,
-                    }),
-                })
-                |
-                DeriveInputKind::Enum(e) => map!(data_enum, move |(wh, brace, variants)| DeriveInput {
-                    ident: id,
-                    vis: vis,
-                    attrs: attrs,
-                    generics: Generics {
-                        where_clause: wh,
-                        ..generics
-                    },
-                    data: Data::Enum(DataEnum {
-                        variants: variants,
-                        brace_token: brace,
-                        enum_token: e,
-                    }),
-                })
-                |
-                DeriveInputKind::Union(u) => map!(data_union, move |(wh, fields)| DeriveInput {
-                    ident: id,
-                    vis: vis,
-                    attrs: attrs,
-                    generics: Generics {
-                        where_clause: wh,
-                        ..generics
-                    },
-                    data: Data::Union(DataUnion {
-                        union_token: u,
-                        fields: fields,
-                    }),
-                })
-            ) >>
-            (item)
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("derive input")
-        }
-    }
-
-    named!(data_struct -> (Option<WhereClause>, Fields, Option<Token![;]>), alt!(
-        do_parse!(
-            wh: option!(syn!(WhereClause)) >>
-            fields: syn!(FieldsNamed) >>
-            (wh, Fields::Named(fields), None)
-        )
-        |
-        do_parse!(
-            fields: syn!(FieldsUnnamed) >>
-            wh: option!(syn!(WhereClause)) >>
-            semi: punct!(;) >>
-            (wh, Fields::Unnamed(fields), Some(semi))
-        )
-        |
-        do_parse!(
-            wh: option!(syn!(WhereClause)) >>
-            semi: punct!(;) >>
-            (wh, Fields::Unit, Some(semi))
-        )
-    ));
-
-    named!(data_enum -> (Option<WhereClause>, token::Brace, Punctuated<Variant, Token![,]>), do_parse!(
-        wh: option!(syn!(WhereClause)) >>
-        data: braces!(Punctuated::parse_terminated) >>
-        (wh, data.0, data.1)
-    ));
-
-    named!(data_union -> (Option<WhereClause>, FieldsNamed), tuple!(
-        option!(syn!(WhereClause)),
-        syn!(FieldsNamed),
-    ));
-}
-
-#[cfg(feature = "printing")]
-mod printing {
-    use super::*;
-    use attr::FilterAttrs;
-    use proc_macro2::TokenStream;
-    use quote::ToTokens;
-
-    impl ToTokens for DeriveInput {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            for attr in self.attrs.outer() {
-                attr.to_tokens(tokens);
-            }
-            self.vis.to_tokens(tokens);
-            match self.data {
-                Data::Struct(ref d) => d.struct_token.to_tokens(tokens),
-                Data::Enum(ref d) => d.enum_token.to_tokens(tokens),
-                Data::Union(ref d) => d.union_token.to_tokens(tokens),
-            }
-            self.ident.to_tokens(tokens);
-            self.generics.to_tokens(tokens);
-            match self.data {
-                Data::Struct(ref data) => match data.fields {
-                    Fields::Named(ref fields) => {
-                        self.generics.where_clause.to_tokens(tokens);
-                        fields.to_tokens(tokens);
-                    }
-                    Fields::Unnamed(ref fields) => {
-                        fields.to_tokens(tokens);
-                        self.generics.where_clause.to_tokens(tokens);
-                        TokensOrDefault(&data.semi_token).to_tokens(tokens);
-                    }
-                    Fields::Unit => {
-                        self.generics.where_clause.to_tokens(tokens);
-                        TokensOrDefault(&data.semi_token).to_tokens(tokens);
-                    }
-                },
-                Data::Enum(ref data) => {
-                    self.generics.where_clause.to_tokens(tokens);
-                    data.brace_token.surround(tokens, |tokens| {
-                        data.variants.to_tokens(tokens);
-                    });
-                }
-                Data::Union(ref data) => {
-                    self.generics.where_clause.to_tokens(tokens);
-                    data.fields.to_tokens(tokens);
-                }
-            }
-        }
-    }
-}
diff --git a/rustc_deps/vendor/syn-0.14.9/src/error.rs b/rustc_deps/vendor/syn-0.14.9/src/error.rs
deleted file mode 100644
index 6673aa3..0000000
--- a/rustc_deps/vendor/syn-0.14.9/src/error.rs
+++ /dev/null
@@ -1,60 +0,0 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use buffer::Cursor;
-use std::error::Error;
-use std::fmt::{self, Display};
-
-/// The result of a `Synom` parser.
-///
-/// Refer to the [module documentation] for details about parsing in Syn.
-///
-/// [module documentation]: index.html
-///
-/// *This type is available if Syn is built with the `"parsing"` feature.*
-pub type PResult<'a, O> = Result<(O, Cursor<'a>), ParseError>;
-
-/// An error with a default error message.
-///
-/// NOTE: We should provide better error messages in the future.
-pub fn parse_error<'a, O>() -> PResult<'a, O> {
-    Err(ParseError(None))
-}
-
-/// Error returned when a `Synom` parser cannot parse the input tokens.
-///
-/// Refer to the [module documentation] for details about parsing in Syn.
-///
-/// [module documentation]: index.html
-///
-/// *This type is available if Syn is built with the `"parsing"` feature.*
-#[derive(Debug)]
-pub struct ParseError(Option<String>);
-
-impl Error for ParseError {
-    fn description(&self) -> &str {
-        match self.0 {
-            Some(ref desc) => desc,
-            None => "failed to parse",
-        }
-    }
-}
-
-impl Display for ParseError {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        Display::fmt(self.description(), f)
-    }
-}
-
-impl ParseError {
-    // For syn use only. Not public API.
-    #[doc(hidden)]
-    pub fn new<T: Into<String>>(msg: T) -> Self {
-        ParseError(Some(msg.into()))
-    }
-}
diff --git a/rustc_deps/vendor/syn-0.14.9/src/expr.rs b/rustc_deps/vendor/syn-0.14.9/src/expr.rs
deleted file mode 100644
index 979de71..0000000
--- a/rustc_deps/vendor/syn-0.14.9/src/expr.rs
+++ /dev/null
@@ -1,3836 +0,0 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use super::*;
-use proc_macro2::{Span, TokenStream};
-use punctuated::Punctuated;
-#[cfg(feature = "extra-traits")]
-use std::hash::{Hash, Hasher};
-#[cfg(feature = "full")]
-use std::mem;
-#[cfg(feature = "extra-traits")]
-use tt::TokenStreamHelper;
-
-ast_enum_of_structs! {
-    /// A Rust expression.
-    ///
-    /// *This type is available if Syn is built with the `"derive"` or `"full"`
-    /// feature.*
-    ///
-    /// # Syntax tree enums
-    ///
-    /// This type is a syntax tree enum. In Syn this and other syntax tree enums
-    /// are designed to be traversed using the following rebinding idiom.
-    ///
-    /// ```
-    /// # use syn::Expr;
-    /// #
-    /// # fn example(expr: Expr) {
-    /// # const IGNORE: &str = stringify! {
-    /// let expr: Expr = /* ... */;
-    /// # };
-    /// match expr {
-    ///     Expr::MethodCall(expr) => {
-    ///         /* ... */
-    ///     }
-    ///     Expr::Cast(expr) => {
-    ///         /* ... */
-    ///     }
-    ///     Expr::IfLet(expr) => {
-    ///         /* ... */
-    ///     }
-    ///     /* ... */
-    ///     # _ => {}
-    /// }
-    /// # }
-    /// ```
-    ///
-    /// We begin with a variable `expr` of type `Expr` that has no fields
-    /// (because it is an enum), and by matching on it and rebinding a variable
-    /// with the same name `expr` we effectively imbue our variable with all of
-    /// the data fields provided by the variant that it turned out to be. So for
-    /// example above if we ended up in the `MethodCall` case then we get to use
-    /// `expr.receiver`, `expr.args` etc; if we ended up in the `IfLet` case we
-    /// get to use `expr.pat`, `expr.then_branch`, `expr.else_branch`.
-    ///
-    /// The pattern is similar if the input expression is borrowed:
-    ///
-    /// ```
-    /// # use syn::Expr;
-    /// #
-    /// # fn example(expr: &Expr) {
-    /// match *expr {
-    ///     Expr::MethodCall(ref expr) => {
-    /// #   }
-    /// #   _ => {}
-    /// # }
-    /// # }
-    /// ```
-    ///
-    /// This approach avoids repeating the variant names twice on every line.
-    ///
-    /// ```
-    /// # use syn::{Expr, ExprMethodCall};
-    /// #
-    /// # fn example(expr: Expr) {
-    /// # match expr {
-    /// Expr::MethodCall(ExprMethodCall { method, args, .. }) => { // repetitive
-    /// # }
-    /// # _ => {}
-    /// # }
-    /// # }
-    /// ```
-    ///
-    /// In general, the name to which a syntax tree enum variant is bound should
-    /// be a suitable name for the complete syntax tree enum type.
-    ///
-    /// ```
-    /// # use syn::{Expr, ExprField};
-    /// #
-    /// # fn example(discriminant: &ExprField) {
-    /// // Binding is called `base` which is the name I would use if I were
-    /// // assigning `*discriminant.base` without an `if let`.
-    /// if let Expr::Tuple(ref base) = *discriminant.base {
-    /// # }
-    /// # }
-    /// ```
-    ///
-    /// A sign that you may not be choosing the right variable names is if you
-    /// see names getting repeated in your code, like accessing
-    /// `receiver.receiver` or `pat.pat` or `cond.cond`.
-    pub enum Expr {
-        /// A box expression: `box f`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Box(ExprBox #full {
-            pub attrs: Vec<Attribute>,
-            pub box_token: Token![box],
-            pub expr: Box<Expr>,
-        }),
-
-        /// A placement expression: `place <- value`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub InPlace(ExprInPlace #full {
-            pub attrs: Vec<Attribute>,
-            pub place: Box<Expr>,
-            pub arrow_token: Token![<-],
-            pub value: Box<Expr>,
-        }),
-
-        /// A slice literal expression: `[a, b, c, d]`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Array(ExprArray #full {
-            pub attrs: Vec<Attribute>,
-            pub bracket_token: token::Bracket,
-            pub elems: Punctuated<Expr, Token![,]>,
-        }),
-
-        /// A function call expression: `invoke(a, b)`.
-        ///
-        /// *This type is available if Syn is built with the `"derive"` or
-        /// `"full"` feature.*
-        pub Call(ExprCall {
-            pub attrs: Vec<Attribute>,
-            pub func: Box<Expr>,
-            pub paren_token: token::Paren,
-            pub args: Punctuated<Expr, Token![,]>,
-        }),
-
-        /// A method call expression: `x.foo::<T>(a, b)`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub MethodCall(ExprMethodCall #full {
-            pub attrs: Vec<Attribute>,
-            pub receiver: Box<Expr>,
-            pub dot_token: Token![.],
-            pub method: Ident,
-            pub turbofish: Option<MethodTurbofish>,
-            pub paren_token: token::Paren,
-            pub args: Punctuated<Expr, Token![,]>,
-        }),
-
-        /// A tuple expression: `(a, b, c, d)`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Tuple(ExprTuple #full {
-            pub attrs: Vec<Attribute>,
-            pub paren_token: token::Paren,
-            pub elems: Punctuated<Expr, Token![,]>,
-        }),
-
-        /// A binary operation: `a + b`, `a * b`.
-        ///
-        /// *This type is available if Syn is built with the `"derive"` or
-        /// `"full"` feature.*
-        pub Binary(ExprBinary {
-            pub attrs: Vec<Attribute>,
-            pub left: Box<Expr>,
-            pub op: BinOp,
-            pub right: Box<Expr>,
-        }),
-
-        /// A unary operation: `!x`, `*x`.
-        ///
-        /// *This type is available if Syn is built with the `"derive"` or
-        /// `"full"` feature.*
-        pub Unary(ExprUnary {
-            pub attrs: Vec<Attribute>,
-            pub op: UnOp,
-            pub expr: Box<Expr>,
-        }),
-
-        /// A literal in place of an expression: `1`, `"foo"`.
-        ///
-        /// *This type is available if Syn is built with the `"derive"` or
-        /// `"full"` feature.*
-        pub Lit(ExprLit {
-            pub attrs: Vec<Attribute>,
-            pub lit: Lit,
-        }),
-
-        /// A cast expression: `foo as f64`.
-        ///
-        /// *This type is available if Syn is built with the `"derive"` or
-        /// `"full"` feature.*
-        pub Cast(ExprCast {
-            pub attrs: Vec<Attribute>,
-            pub expr: Box<Expr>,
-            pub as_token: Token![as],
-            pub ty: Box<Type>,
-        }),
-
-        /// A type ascription expression: `foo: f64`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Type(ExprType #full {
-            pub attrs: Vec<Attribute>,
-            pub expr: Box<Expr>,
-            pub colon_token: Token![:],
-            pub ty: Box<Type>,
-        }),
-
-        /// An `if` expression with an optional `else` block: `if expr { ... }
-        /// else { ... }`.
-        ///
-        /// The `else` branch expression may only be an `If`, `IfLet`, or
-        /// `Block` expression, not any of the other types of expression.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub If(ExprIf #full {
-            pub attrs: Vec<Attribute>,
-            pub if_token: Token![if],
-            pub cond: Box<Expr>,
-            pub then_branch: Block,
-            pub else_branch: Option<(Token![else], Box<Expr>)>,
-        }),
-
-        /// An `if let` expression with an optional `else` block: `if let pat =
-        /// expr { ... } else { ... }`.
-        ///
-        /// The `else` branch expression may only be an `If`, `IfLet`, or
-        /// `Block` expression, not any of the other types of expression.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub IfLet(ExprIfLet #full {
-            pub attrs: Vec<Attribute>,
-            pub if_token: Token![if],
-            pub let_token: Token![let],
-            pub pats: Punctuated<Pat, Token![|]>,
-            pub eq_token: Token![=],
-            pub expr: Box<Expr>,
-            pub then_branch: Block,
-            pub else_branch: Option<(Token![else], Box<Expr>)>,
-        }),
-
-        /// A while loop: `while expr { ... }`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub While(ExprWhile #full {
-            pub attrs: Vec<Attribute>,
-            pub label: Option<Label>,
-            pub while_token: Token![while],
-            pub cond: Box<Expr>,
-            pub body: Block,
-        }),
-
-        /// A while-let loop: `while let pat = expr { ... }`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub WhileLet(ExprWhileLet #full {
-            pub attrs: Vec<Attribute>,
-            pub label: Option<Label>,
-            pub while_token: Token![while],
-            pub let_token: Token![let],
-            pub pats: Punctuated<Pat, Token![|]>,
-            pub eq_token: Token![=],
-            pub expr: Box<Expr>,
-            pub body: Block,
-        }),
-
-        /// A for loop: `for pat in expr { ... }`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub ForLoop(ExprForLoop #full {
-            pub attrs: Vec<Attribute>,
-            pub label: Option<Label>,
-            pub for_token: Token![for],
-            pub pat: Box<Pat>,
-            pub in_token: Token![in],
-            pub expr: Box<Expr>,
-            pub body: Block,
-        }),
-
-        /// Conditionless loop: `loop { ... }`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Loop(ExprLoop #full {
-            pub attrs: Vec<Attribute>,
-            pub label: Option<Label>,
-            pub loop_token: Token![loop],
-            pub body: Block,
-        }),
-
-        /// A `match` expression: `match n { Some(n) => {}, None => {} }`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Match(ExprMatch #full {
-            pub attrs: Vec<Attribute>,
-            pub match_token: Token![match],
-            pub expr: Box<Expr>,
-            pub brace_token: token::Brace,
-            pub arms: Vec<Arm>,
-        }),
-
-        /// A closure expression: `|a, b| a + b`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Closure(ExprClosure #full {
-            pub attrs: Vec<Attribute>,
-            pub movability: Option<Token![static]>,
-            pub capture: Option<Token![move]>,
-            pub or1_token: Token![|],
-            pub inputs: Punctuated<FnArg, Token![,]>,
-            pub or2_token: Token![|],
-            pub output: ReturnType,
-            pub body: Box<Expr>,
-        }),
-
-        /// An unsafe block: `unsafe { ... }`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Unsafe(ExprUnsafe #full {
-            pub attrs: Vec<Attribute>,
-            pub unsafe_token: Token![unsafe],
-            pub block: Block,
-        }),
-
-        /// A blocked scope: `{ ... }`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Block(ExprBlock #full {
-            pub attrs: Vec<Attribute>,
-            pub block: Block,
-        }),
-
-        /// An assignment expression: `a = compute()`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Assign(ExprAssign #full {
-            pub attrs: Vec<Attribute>,
-            pub left: Box<Expr>,
-            pub eq_token: Token![=],
-            pub right: Box<Expr>,
-        }),
-
-        /// A compound assignment expression: `counter += 1`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub AssignOp(ExprAssignOp #full {
-            pub attrs: Vec<Attribute>,
-            pub left: Box<Expr>,
-            pub op: BinOp,
-            pub right: Box<Expr>,
-        }),
-
-        /// Access of a named struct field (`obj.k`) or unnamed tuple struct
-        /// field (`obj.0`).
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Field(ExprField {
-            pub attrs: Vec<Attribute>,
-            pub base: Box<Expr>,
-            pub dot_token: Token![.],
-            pub member: Member,
-        }),
-
-        /// A square bracketed indexing expression: `vector[2]`.
-        ///
-        /// *This type is available if Syn is built with the `"derive"` or
-        /// `"full"` feature.*
-        pub Index(ExprIndex {
-            pub attrs: Vec<Attribute>,
-            pub expr: Box<Expr>,
-            pub bracket_token: token::Bracket,
-            pub index: Box<Expr>,
-        }),
-
-        /// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Range(ExprRange #full {
-            pub attrs: Vec<Attribute>,
-            pub from: Option<Box<Expr>>,
-            pub limits: RangeLimits,
-            pub to: Option<Box<Expr>>,
-        }),
-
-        /// A path like `std::mem::replace` possibly containing generic
-        /// parameters and a qualified self-type.
-        ///
-        /// A plain identifier like `x` is a path of length 1.
-        ///
-        /// *This type is available if Syn is built with the `"derive"` or
-        /// `"full"` feature.*
-        pub Path(ExprPath {
-            pub attrs: Vec<Attribute>,
-            pub qself: Option<QSelf>,
-            pub path: Path,
-        }),
-
-        /// A referencing operation: `&a` or `&mut a`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Reference(ExprReference #full {
-            pub attrs: Vec<Attribute>,
-            pub and_token: Token![&],
-            pub mutability: Option<Token![mut]>,
-            pub expr: Box<Expr>,
-        }),
-
-        /// A `break`, with an optional label to break and an optional
-        /// expression.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Break(ExprBreak #full {
-            pub attrs: Vec<Attribute>,
-            pub break_token: Token![break],
-            pub label: Option<Lifetime>,
-            pub expr: Option<Box<Expr>>,
-        }),
-
-        /// A `continue`, with an optional label.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Continue(ExprContinue #full {
-            pub attrs: Vec<Attribute>,
-            pub continue_token: Token![continue],
-            pub label: Option<Lifetime>,
-        }),
-
-        /// A `return`, with an optional value to be returned.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Return(ExprReturn #full {
-            pub attrs: Vec<Attribute>,
-            pub return_token: Token![return],
-            pub expr: Option<Box<Expr>>,
-        }),
-
-        /// A macro invocation expression: `format!("{}", q)`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Macro(ExprMacro #full {
-            pub attrs: Vec<Attribute>,
-            pub mac: Macro,
-        }),
-
-        /// A struct literal expression: `Point { x: 1, y: 1 }`.
-        ///
-        /// The `rest` provides the value of the remaining fields as in `S { a:
-        /// 1, b: 1, ..rest }`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Struct(ExprStruct #full {
-            pub attrs: Vec<Attribute>,
-            pub path: Path,
-            pub brace_token: token::Brace,
-            pub fields: Punctuated<FieldValue, Token![,]>,
-            pub dot2_token: Option<Token![..]>,
-            pub rest: Option<Box<Expr>>,
-        }),
-
-        /// An array literal constructed from one repeated element: `[0u8; N]`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Repeat(ExprRepeat #full {
-            pub attrs: Vec<Attribute>,
-            pub bracket_token: token::Bracket,
-            pub expr: Box<Expr>,
-            pub semi_token: Token![;],
-            pub len: Box<Expr>,
-        }),
-
-        /// A parenthesized expression: `(a + b)`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Paren(ExprParen {
-            pub attrs: Vec<Attribute>,
-            pub paren_token: token::Paren,
-            pub expr: Box<Expr>,
-        }),
-
-        /// An expression contained within invisible delimiters.
-        ///
-        /// This variant is important for faithfully representing the precedence
-        /// of expressions and is related to `None`-delimited spans in a
-        /// `TokenStream`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Group(ExprGroup #full {
-            pub attrs: Vec<Attribute>,
-            pub group_token: token::Group,
-            pub expr: Box<Expr>,
-        }),
-
-        /// A try-expression: `expr?`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Try(ExprTry #full {
-            pub attrs: Vec<Attribute>,
-            pub expr: Box<Expr>,
-            pub question_token: Token![?],
-        }),
-
-        /// A catch expression: `do catch { ... }`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Catch(ExprCatch #full {
-            pub attrs: Vec<Attribute>,
-            pub do_token: Token![do],
-            pub catch_token: Token![catch],
-            pub block: Block,
-        }),
-
-        /// A yield expression: `yield expr`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Yield(ExprYield #full {
-            pub attrs: Vec<Attribute>,
-            pub yield_token: Token![yield],
-            pub expr: Option<Box<Expr>>,
-        }),
-
-        /// Tokens in expression position not interpreted by Syn.
-        ///
-        /// *This type is available if Syn is built with the `"derive"` or
-        /// `"full"` feature.*
-        pub Verbatim(ExprVerbatim #manual_extra_traits {
-            pub tts: TokenStream,
-        }),
-    }
-}
-
-#[cfg(feature = "extra-traits")]
-impl Eq for ExprVerbatim {}
-
-#[cfg(feature = "extra-traits")]
-impl PartialEq for ExprVerbatim {
-    fn eq(&self, other: &Self) -> bool {
-        TokenStreamHelper(&self.tts) == TokenStreamHelper(&other.tts)
-    }
-}
-
-#[cfg(feature = "extra-traits")]
-impl Hash for ExprVerbatim {
-    fn hash<H>(&self, state: &mut H)
-    where
-        H: Hasher,
-    {
-        TokenStreamHelper(&self.tts).hash(state);
-    }
-}
-
-impl Expr {
-    // Not public API.
-    #[doc(hidden)]
-    #[cfg(feature = "full")]
-    pub fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
-        match *self {
-            Expr::Box(ExprBox { ref mut attrs, .. })
-            | Expr::InPlace(ExprInPlace { ref mut attrs, .. })
-            | Expr::Array(ExprArray { ref mut attrs, .. })
-            | Expr::Call(ExprCall { ref mut attrs, .. })
-            | Expr::MethodCall(ExprMethodCall { ref mut attrs, .. })
-            | Expr::Tuple(ExprTuple { ref mut attrs, .. })
-            | Expr::Binary(ExprBinary { ref mut attrs, .. })
-            | Expr::Unary(ExprUnary { ref mut attrs, .. })
-            | Expr::Lit(ExprLit { ref mut attrs, .. })
-            | Expr::Cast(ExprCast { ref mut attrs, .. })
-            | Expr::Type(ExprType { ref mut attrs, .. })
-            | Expr::If(ExprIf { ref mut attrs, .. })
-            | Expr::IfLet(ExprIfLet { ref mut attrs, .. })
-            | Expr::While(ExprWhile { ref mut attrs, .. })
-            | Expr::WhileLet(ExprWhileLet { ref mut attrs, .. })
-            | Expr::ForLoop(ExprForLoop { ref mut attrs, .. })
-            | Expr::Loop(ExprLoop { ref mut attrs, .. })
-            | Expr::Match(ExprMatch { ref mut attrs, .. })
-            | Expr::Closure(ExprClosure { ref mut attrs, .. })
-            | Expr::Unsafe(ExprUnsafe { ref mut attrs, .. })
-            | Expr::Block(ExprBlock { ref mut attrs, .. })
-            | Expr::Assign(ExprAssign { ref mut attrs, .. })
-            | Expr::AssignOp(ExprAssignOp { ref mut attrs, .. })
-            | Expr::Field(ExprField { ref mut attrs, .. })
-            | Expr::Index(ExprIndex { ref mut attrs, .. })
-            | Expr::Range(ExprRange { ref mut attrs, .. })
-            | Expr::Path(ExprPath { ref mut attrs, .. })
-            | Expr::Reference(ExprReference { ref mut attrs, .. })
-            | Expr::Break(ExprBreak { ref mut attrs, .. })
-            | Expr::Continue(ExprContinue { ref mut attrs, .. })
-            | Expr::Return(ExprReturn { ref mut attrs, .. })
-            | Expr::Macro(ExprMacro { ref mut attrs, .. })
-            | Expr::Struct(ExprStruct { ref mut attrs, .. })
-            | Expr::Repeat(ExprRepeat { ref mut attrs, .. })
-            | Expr::Paren(ExprParen { ref mut attrs, .. })
-            | Expr::Group(ExprGroup { ref mut attrs, .. })
-            | Expr::Try(ExprTry { ref mut attrs, .. })
-            | Expr::Catch(ExprCatch { ref mut attrs, .. })
-            | Expr::Yield(ExprYield { ref mut attrs, .. }) => mem::replace(attrs, new),
-            Expr::Verbatim(_) => {
-                // TODO
-                Vec::new()
-            }
-        }
-    }
-}
-
-ast_enum! {
-    /// A struct or tuple struct field accessed in a struct literal or field
-    /// expression.
-    ///
-    /// *This type is available if Syn is built with the `"derive"` or `"full"`
-    /// feature.*
-    pub enum Member {
-        /// A named field like `self.x`.
-        Named(Ident),
-        /// An unnamed field like `self.0`.
-        Unnamed(Index),
-    }
-}
-
-ast_struct! {
-    /// The index of an unnamed tuple struct field.
-    ///
-    /// *This type is available if Syn is built with the `"derive"` or `"full"`
-    /// feature.*
-    pub struct Index #manual_extra_traits {
-        pub index: u32,
-        pub span: Span,
-    }
-}
-
-impl From<usize> for Index {
-    fn from(index: usize) -> Index {
-        assert!(index < u32::max_value() as usize);
-        Index {
-            index: index as u32,
-            span: Span::call_site(),
-        }
-    }
-}
-
-#[cfg(feature = "extra-traits")]
-impl Eq for Index {}
-
-#[cfg(feature = "extra-traits")]
-impl PartialEq for Index {
-    fn eq(&self, other: &Self) -> bool {
-        self.index == other.index
-    }
-}
-
-#[cfg(feature = "extra-traits")]
-impl Hash for Index {
-    fn hash<H: Hasher>(&self, state: &mut H) {
-        self.index.hash(state);
-    }
-}
-
-#[cfg(feature = "full")]
-ast_struct! {
-    /// The `::<>` explicit type parameters passed to a method call:
-    /// `parse::<u64>()`.
-    ///
-    /// *This type is available if Syn is built with the `"full"` feature.*
-    pub struct MethodTurbofish {
-        pub colon2_token: Token![::],
-        pub lt_token: Token![<],
-        pub args: Punctuated<GenericMethodArgument, Token![,]>,
-        pub gt_token: Token![>],
-    }
-}
-
-#[cfg(feature = "full")]
-ast_enum! {
-    /// An individual generic argument to a method, like `T`.
-    ///
-    /// *This type is available if Syn is built with the `"full"` feature.*
-    pub enum GenericMethodArgument {
-        /// A type argument.
-        Type(Type),
-        /// A const expression. Must be inside of a block.
-        ///
-        /// NOTE: Identity expressions are represented as Type arguments, as
-        /// they are indistinguishable syntactically.
-        Const(Expr),
-    }
-}
-
-#[cfg(feature = "full")]
-ast_struct! {
-    /// A field-value pair in a struct literal.
-    ///
-    /// *This type is available if Syn is built with the `"full"` feature.*
-    pub struct FieldValue {
-        /// Attributes tagged on the field.
-        pub attrs: Vec<Attribute>,
-
-        /// Name or index of the field.
-        pub member: Member,
-
-        /// The colon in `Struct { x: x }`. If written in shorthand like
-        /// `Struct { x }`, there is no colon.
-        pub colon_token: Option<Token![:]>,
-
-        /// Value of the field.
-        pub expr: Expr,
-    }
-}
-
-#[cfg(feature = "full")]
-ast_struct! {
-    /// A lifetime labeling a `for`, `while`, or `loop`.
-    ///
-    /// *This type is available if Syn is built with the `"full"` feature.*
-    pub struct Label {
-        pub name: Lifetime,
-        pub colon_token: Token![:],
-    }
-}
-
-#[cfg(feature = "full")]
-ast_struct! {
-    /// A braced block containing Rust statements.
-    ///
-    /// *This type is available if Syn is built with the `"full"` feature.*
-    pub struct Block {
-        pub brace_token: token::Brace,
-        /// Statements in a block
-        pub stmts: Vec<Stmt>,
-    }
-}
-
-#[cfg(feature = "full")]
-ast_enum! {
-    /// A statement, usually ending in a semicolon.
-    ///
-    /// *This type is available if Syn is built with the `"full"` feature.*
-    pub enum Stmt {
-        /// A local (let) binding.
-        Local(Local),
-
-        /// An item definition.
-        Item(Item),
-
-        /// Expr without trailing semicolon.
-        Expr(Expr),
-
-        /// Expression with trailing semicolon.
-        Semi(Expr, Token![;]),
-    }
-}
-
-#[cfg(feature = "full")]
-ast_struct! {
-    /// A local `let` binding: `let x: u64 = s.parse()?`.
-    ///
-    /// *This type is available if Syn is built with the `"full"` feature.*
-    pub struct Local {
-        pub attrs: Vec<Attribute>,
-        pub let_token: Token![let],
-        pub pats: Punctuated<Pat, Token![|]>,
-        pub ty: Option<(Token![:], Box<Type>)>,
-        pub init: Option<(Token![=], Box<Expr>)>,
-        pub semi_token: Token![;],
-    }
-}
-
-#[cfg(feature = "full")]
-ast_enum_of_structs! {
-    /// A pattern in a local binding, function signature, match expression, or
-    /// various other places.
-    ///
-    /// *This type is available if Syn is built with the `"full"` feature.*
-    ///
-    /// # Syntax tree enum
-    ///
-    /// This type is a [syntax tree enum].
-    ///
-    /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
-    // Clippy false positive
-    // https://github.com/Manishearth/rust-clippy/issues/1241
-    #[cfg_attr(feature = "cargo-clippy", allow(enum_variant_names))]
-    pub enum Pat {
-        /// A pattern that matches any value: `_`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Wild(PatWild {
-            pub underscore_token: Token![_],
-        }),
-
-        /// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Ident(PatIdent {
-            pub by_ref: Option<Token![ref]>,
-            pub mutability: Option<Token![mut]>,
-            pub ident: Ident,
-            pub subpat: Option<(Token![@], Box<Pat>)>,
-        }),
-
-        /// A struct or struct variant pattern: `Variant { x, y, .. }`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Struct(PatStruct {
-            pub path: Path,
-            pub brace_token: token::Brace,
-            pub fields: Punctuated<FieldPat, Token![,]>,
-            pub dot2_token: Option<Token![..]>,
-        }),
-
-        /// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub TupleStruct(PatTupleStruct {
-            pub path: Path,
-            pub pat: PatTuple,
-        }),
-
-        /// A path pattern like `Color::Red`, optionally qualified with a
-        /// self-type.
-        ///
-        /// Unquailfied path patterns can legally refer to variants, structs,
-        /// constants or associated constants. Quailfied path patterns like
-        /// `<A>::B::C` and `<A as Trait>::B::C` can only legally refer to
-        /// associated constants.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Path(PatPath {
-            pub qself: Option<QSelf>,
-            pub path: Path,
-        }),
-
-        /// A tuple pattern: `(a, b)`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Tuple(PatTuple {
-            pub paren_token: token::Paren,
-            pub front: Punctuated<Pat, Token![,]>,
-            pub dot2_token: Option<Token![..]>,
-            pub comma_token: Option<Token![,]>,
-            pub back: Punctuated<Pat, Token![,]>,
-        }),
-
-        /// A box pattern: `box v`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Box(PatBox {
-            pub box_token: Token![box],
-            pub pat: Box<Pat>,
-        }),
-
-        /// A reference pattern: `&mut (first, second)`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Ref(PatRef {
-            pub and_token: Token![&],
-            pub mutability: Option<Token![mut]>,
-            pub pat: Box<Pat>,
-        }),
-
-        /// A literal pattern: `0`.
-        ///
-        /// This holds an `Expr` rather than a `Lit` because negative numbers
-        /// are represented as an `Expr::Unary`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Lit(PatLit {
-            pub expr: Box<Expr>,
-        }),
-
-        /// A range pattern: `1..=2`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Range(PatRange {
-            pub lo: Box<Expr>,
-            pub limits: RangeLimits,
-            pub hi: Box<Expr>,
-        }),
-
-        /// A dynamically sized slice pattern: `[a, b, i.., y, z]`.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Slice(PatSlice {
-            pub bracket_token: token::Bracket,
-            pub front: Punctuated<Pat, Token![,]>,
-            pub middle: Option<Box<Pat>>,
-            pub dot2_token: Option<Token![..]>,
-            pub comma_token: Option<Token![,]>,
-            pub back: Punctuated<Pat, Token![,]>,
-        }),
-
-        /// A macro in expression position.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Macro(PatMacro {
-            pub mac: Macro,
-        }),
-
-        /// Tokens in pattern position not interpreted by Syn.
-        ///
-        /// *This type is available if Syn is built with the `"full"` feature.*
-        pub Verbatim(PatVerbatim #manual_extra_traits {
-            pub tts: TokenStream,
-        }),
-    }
-}
-
-#[cfg(all(feature = "full", feature = "extra-traits"))]
-impl Eq for PatVerbatim {}
-
-#[cfg(all(feature = "full", feature = "extra-traits"))]
-impl PartialEq for PatVerbatim {
-    fn eq(&self, other: &Self) -> bool {
-        TokenStreamHelper(&self.tts) == TokenStreamHelper(&other.tts)
-    }
-}
-
-#[cfg(all(feature = "full", feature = "extra-traits"))]
-impl Hash for PatVerbatim {
-    fn hash<H>(&self, state: &mut H)
-    where
-        H: Hasher,
-    {
-        TokenStreamHelper(&self.tts).hash(state);
-    }
-}
-
-#[cfg(feature = "full")]
-ast_struct! {
-    /// One arm of a `match` expression: `0...10 => { return true; }`.
-    ///
-    /// As in:
-    ///
-    /// ```rust
-    /// # fn f() -> bool {
-    /// #     let n = 0;
-    /// match n {
-    ///     0...10 => {
-    ///         return true;
-    ///     }
-    ///     // ...
-    ///     # _ => {}
-    /// }
-    /// #   false
-    /// # }
-    /// ```
-    ///
-    /// *This type is available if Syn is built with the `"full"` feature.*
-    pub struct Arm {
-        pub attrs: Vec<Attribute>,
-        pub leading_vert: Option<Token![|]>,
-        pub pats: Punctuated<Pat, Token![|]>,
-        pub guard: Option<(Token![if], Box<Expr>)>,
-        pub fat_arrow_token: Token![=>],
-        pub body: Box<Expr>,
-        pub comma: Option<Token![,]>,
-    }
-}
-
-#[cfg(feature = "full")]
-ast_enum! {
-    /// Limit types of a range, inclusive or exclusive.
-    ///
-    /// *This type is available if Syn is built with the `"full"` feature.*
-    #[cfg_attr(feature = "clone-impls", derive(Copy))]
-    pub enum RangeLimits {
-        /// Inclusive at the beginning, exclusive at the end.
-        HalfOpen(Token![..]),
-        /// Inclusive at the beginning and end.
-        Closed(Token![..=]),
-    }
-}
-
-#[cfg(feature = "full")]
-ast_struct! {
-    /// A single field in a struct pattern.
-    ///
-    /// Patterns like the fields of Foo `{ x, ref y, ref mut z }` are treated
-    /// the same as `x: x, y: ref y, z: ref mut z` but there is no colon token.
-    ///
-    /// *This type is available if Syn is built with the `"full"` feature.*
-    pub struct FieldPat {
-        pub attrs: Vec<Attribute>,
-        pub member: Member,
-        pub colon_token: Option<Token![:]>,
-        pub pat: Box<Pat>,
-    }
-}
-
-#[cfg(any(feature = "parsing", feature = "printing"))]
-#[cfg(feature = "full")]
-fn arm_expr_requires_comma(expr: &Expr) -> bool {
-    // see https://github.com/rust-lang/rust/blob/eb8f2586e
-    //                       /src/libsyntax/parse/classify.rs#L17-L37
-    match *expr {
-        Expr::Unsafe(..)
-        | Expr::Block(..)
-        | Expr::If(..)
-        | Expr::IfLet(..)
-        | Expr::Match(..)
-        | Expr::While(..)
-        | Expr::WhileLet(..)
-        | Expr::Loop(..)
-        | Expr::ForLoop(..)
-        | Expr::Catch(..) => false,
-        _ => true,
-    }
-}
-
-#[cfg(feature = "parsing")]
-pub mod parsing {
-    use super::*;
-    use path::parsing::qpath;
-    #[cfg(feature = "full")]
-    use path::parsing::ty_no_eq_after;
-
-    use buffer::Cursor;
-    #[cfg(feature = "full")]
-    use parse_error;
-    #[cfg(feature = "full")]
-    use proc_macro2::TokenStream;
-    use synom::PResult;
-    use synom::Synom;
-
-    // When we're parsing expressions which occur before blocks, like in an if
-    // statement's condition, we cannot parse a struct literal.
-    //
-    // Struct literals are ambiguous in certain positions
-    // https://github.com/rust-lang/rfcs/pull/92
-    macro_rules! ambiguous_expr {
-        ($i:expr, $allow_struct:ident) => {
-            ambiguous_expr($i, $allow_struct, true)
-        };
-    }
-
-    // When we are parsing an optional suffix expression, we cannot allow blocks
-    // if structs are not allowed.
-    //
-    // Example:
-    //
-    //     if break {} {}
-    //
-    // is ambiguous between:
-    //
-    //     if (break {}) {}
-    //     if (break) {} {}
-    #[cfg(feature = "full")]
-    macro_rules! opt_ambiguous_expr {
-        ($i:expr, $allow_struct:ident) => {
-            option!($i, call!(ambiguous_expr, $allow_struct, $allow_struct))
-        };
-    }
-
-    impl Synom for Expr {
-        named!(parse -> Self, ambiguous_expr!(true));
-
-        fn description() -> Option<&'static str> {
-            Some("expression")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    named!(expr_no_struct -> Expr, ambiguous_expr!(false));
-
-    // Parse an arbitrary expression.
-    #[cfg(feature = "full")]
-    fn ambiguous_expr(i: Cursor, allow_struct: bool, allow_block: bool) -> PResult<Expr> {
-        call!(i, assign_expr, allow_struct, allow_block)
-    }
-
-    #[cfg(not(feature = "full"))]
-    fn ambiguous_expr(i: Cursor, allow_struct: bool, allow_block: bool) -> PResult<Expr> {
-        // NOTE: We intentionally skip assign_expr, placement_expr, and
-        // range_expr, as they are not parsed in non-full mode.
-        call!(i, or_expr, allow_struct, allow_block)
-    }
-
-    // Parse a left-associative binary operator.
-    macro_rules! binop {
-        (
-            $name: ident,
-            $next: ident,
-            $submac: ident!( $($args:tt)* )
-        ) => {
-            named!($name(allow_struct: bool, allow_block: bool) -> Expr, do_parse!(
-                mut e: call!($next, allow_struct, allow_block) >>
-                many0!(do_parse!(
-                    op: $submac!($($args)*) >>
-                    rhs: call!($next, allow_struct, true) >>
-                    ({
-                        e = ExprBinary {
-                            attrs: Vec::new(),
-                            left: Box::new(e.into()),
-                            op: op,
-                            right: Box::new(rhs.into()),
-                        }.into();
-                    })
-                )) >>
-                (e)
-            ));
-        }
-    }
-
-    // <placement> = <placement> ..
-    // <placement> += <placement> ..
-    // <placement> -= <placement> ..
-    // <placement> *= <placement> ..
-    // <placement> /= <placement> ..
-    // <placement> %= <placement> ..
-    // <placement> ^= <placement> ..
-    // <placement> &= <placement> ..
-    // <placement> |= <placement> ..
-    // <placement> <<= <placement> ..
-    // <placement> >>= <placement> ..
-    //
-    // NOTE: This operator is right-associative.
-    #[cfg(feature = "full")]
-    named!(assign_expr(allow_struct: bool, allow_block: bool) -> Expr, do_parse!(
-        mut e: call!(placement_expr, allow_struct, allow_block) >>
-        alt!(
-            do_parse!(
-                eq: punct!(=) >>
-                // Recurse into self to parse right-associative operator.
-                rhs: call!(assign_expr, allow_struct, true) >>
-                ({
-                    e = ExprAssign {
-                        attrs: Vec::new(),
-                        left: Box::new(e),
-                        eq_token: eq,
-                        right: Box::new(rhs),
-                    }.into();
-                })
-            )
-            |
-            do_parse!(
-                op: call!(BinOp::parse_assign_op) >>
-                // Recurse into self to parse right-associative operator.
-                rhs: call!(assign_expr, allow_struct, true) >>
-                ({
-                    e = ExprAssignOp {
-                        attrs: Vec::new(),
-                        left: Box::new(e),
-                        op: op,
-                        right: Box::new(rhs),
-                    }.into();
-                })
-            )
-            |
-            epsilon!()
-        ) >>
-        (e)
-    ));
-
-    // <range> <- <range> ..
-    //
-    // NOTE: The `in place { expr }` version of this syntax is parsed in
-    // `atom_expr`, not here.
-    //
-    // NOTE: This operator is right-associative.
-    #[cfg(feature = "full")]
-    named!(placement_expr(allow_struct: bool, allow_block: bool) -> Expr, do_parse!(
-        mut e: call!(range_expr, allow_struct, allow_block) >>
-        alt!(
-            do_parse!(
-                arrow: punct!(<-) >>
-                // Recurse into self to parse right-associative operator.
-                rhs: call!(placement_expr, allow_struct, true) >>
-                ({
-                    e = ExprInPlace {
-                        attrs: Vec::new(),
-                        // op: BinOp::Place(larrow),
-                        place: Box::new(e),
-                        arrow_token: arrow,
-                        value: Box::new(rhs),
-                    }.into();
-                })
-            )
-            |
-            epsilon!()
-        ) >>
-        (e)
-    ));
-
-    // <or> ... <or> ..
-    // <or> .. <or> ..
-    // <or> ..
-    //
-    // NOTE: This is currently parsed oddly - I'm not sure of what the exact
-    // rules are for parsing these expressions are, but this is not correct.
-    // For example, `a .. b .. c` is not a legal expression. It should not
-    // be parsed as either `(a .. b) .. c` or `a .. (b .. c)` apparently.
-    //
-    // NOTE: The form of ranges which don't include a preceding expression are
-    // parsed by `atom_expr`, rather than by this function.
-    #[cfg(feature = "full")]
-    named!(range_expr(allow_struct: bool, allow_block: bool) -> Expr, do_parse!(
-        mut e: call!(or_expr, allow_struct, allow_block) >>
-        many0!(do_parse!(
-            limits: syn!(RangeLimits) >>
-            // We don't want to allow blocks here if we don't allow structs. See
-            // the reasoning for `opt_ambiguous_expr!` above.
-            hi: option!(call!(or_expr, allow_struct, allow_struct)) >>
-            ({
-                e = ExprRange {
-                    attrs: Vec::new(),
-                    from: Some(Box::new(e)),
-                    limits: limits,
-                    to: hi.map(|e| Box::new(e)),
-                }.into();
-            })
-        )) >>
-        (e)
-    ));
-
-    // <and> || <and> ...
-    binop!(or_expr, and_expr, map!(punct!(||), BinOp::Or));
-
-    // <compare> && <compare> ...
-    binop!(and_expr, compare_expr, map!(punct!(&&), BinOp::And));
-
-    // <bitor> == <bitor> ...
-    // <bitor> != <bitor> ...
-    // <bitor> >= <bitor> ...
-    // <bitor> <= <bitor> ...
-    // <bitor> > <bitor> ...
-    // <bitor> < <bitor> ...
-    //
-    // NOTE: This operator appears to be parsed as left-associative, but errors
-    // if it is used in a non-associative manner.
-    binop!(
-        compare_expr,
-        bitor_expr,
-        alt!(
-        punct!(==) => { BinOp::Eq }
-        |
-        punct!(!=) => { BinOp::Ne }
-        |
-        // must be above Lt
-        punct!(<=) => { BinOp::Le }
-        |
-        // must be above Gt
-        punct!(>=) => { BinOp::Ge }
-        |
-        do_parse!(
-            // Make sure that we don't eat the < part of a <- operator
-            not!(punct!(<-)) >>
-            t: punct!(<) >>
-            (BinOp::Lt(t))
-        )
-        |
-        punct!(>) => { BinOp::Gt }
-    )
-    );
-
-    // <bitxor> | <bitxor> ...
-    binop!(
-        bitor_expr,
-        bitxor_expr,
-        do_parse!(not!(punct!(||)) >> not!(punct!(|=)) >> t: punct!(|) >> (BinOp::BitOr(t)))
-    );
-
-    // <bitand> ^ <bitand> ...
-    binop!(
-        bitxor_expr,
-        bitand_expr,
-        do_parse!(
-            // NOTE: Make sure we aren't looking at ^=.
-            not!(punct!(^=)) >> t: punct!(^) >> (BinOp::BitXor(t))
-        )
-    );
-
-    // <shift> & <shift> ...
-    binop!(
-        bitand_expr,
-        shift_expr,
-        do_parse!(
-            // NOTE: Make sure we aren't looking at && or &=.
-            not!(punct!(&&)) >> not!(punct!(&=)) >> t: punct!(&) >> (BinOp::BitAnd(t))
-        )
-    );
-
-    // <arith> << <arith> ...
-    // <arith> >> <arith> ...
-    binop!(
-        shift_expr,
-        arith_expr,
-        alt!(
-        punct!(<<) => { BinOp::Shl }
-        |
-        punct!(>>) => { BinOp::Shr }
-    )
-    );
-
-    // <term> + <term> ...
-    // <term> - <term> ...
-    binop!(
-        arith_expr,
-        term_expr,
-        alt!(
-        punct!(+) => { BinOp::Add }
-        |
-        punct!(-) => { BinOp::Sub }
-    )
-    );
-
-    // <cast> * <cast> ...
-    // <cast> / <cast> ...
-    // <cast> % <cast> ...
-    binop!(
-        term_expr,
-        cast_expr,
-        alt!(
-        punct!(*) => { BinOp::Mul }
-        |
-        punct!(/) => { BinOp::Div }
-        |
-        punct!(%) => { BinOp::Rem }
-    )
-    );
-
-    // <unary> as <ty>
-    // <unary> : <ty>
-    #[cfg(feature = "full")]
-    named!(cast_expr(allow_struct: bool, allow_block: bool) -> Expr, do_parse!(
-        mut e: call!(unary_expr, allow_struct, allow_block) >>
-        many0!(alt!(
-            do_parse!(
-                as_: keyword!(as) >>
-                // We can't accept `A + B` in cast expressions, as it's
-                // ambiguous with the + expression.
-                ty: call!(Type::without_plus) >>
-                ({
-                    e = ExprCast {
-                        attrs: Vec::new(),
-                        expr: Box::new(e),
-                        as_token: as_,
-                        ty: Box::new(ty),
-                    }.into();
-                })
-            )
-            |
-            do_parse!(
-                colon: punct!(:) >>
-                // We can't accept `A + B` in cast expressions, as it's
-                // ambiguous with the + expression.
-                ty: call!(Type::without_plus) >>
-                ({
-                    e = ExprType {
-                        attrs: Vec::new(),
-                        expr: Box::new(e),
-                        colon_token: colon,
-                        ty: Box::new(ty),
-                    }.into();
-                })
-            )
-        )) >>
-        (e)
-    ));
-
-    // <unary> as <ty>
-    #[cfg(not(feature = "full"))]
-    named!(cast_expr(allow_struct: bool, allow_block: bool) -> Expr, do_parse!(
-        mut e: call!(unary_expr, allow_struct, allow_block) >>
-        many0!(do_parse!(
-            as_: keyword!(as) >>
-            // We can't accept `A + B` in cast expressions, as it's
-            // ambiguous with the + expression.
-            ty: call!(Type::without_plus) >>
-            ({
-                e = ExprCast {
-                    attrs: Vec::new(),
-                    expr: Box::new(e),
-                    as_token: as_,
-                    ty: Box::new(ty),
-                }.into();
-            })
-        )) >>
-        (e)
-    ));
-
-    // <UnOp> <trailer>
-    // & <trailer>
-    // &mut <trailer>
-    // box <trailer>
-    #[cfg(feature = "full")]
-    named!(unary_expr(allow_struct: bool, allow_block: bool) -> Expr, alt!(
-        do_parse!(
-            attrs: many0!(Attribute::parse_outer) >>
-            op: syn!(UnOp) >>
-            expr: call!(unary_expr, allow_struct, true) >>
-            (ExprUnary {
-                attrs: attrs,
-                op: op,
-                expr: Box::new(expr),
-            }.into())
-        )
-        |
-        do_parse!(
-            attrs: many0!(Attribute::parse_outer) >>
-            and: punct!(&) >>
-            mutability: option!(keyword!(mut)) >>
-            expr: call!(unary_expr, allow_struct, true) >>
-            (ExprReference {
-                attrs: attrs,
-                and_token: and,
-                mutability: mutability,
-                expr: Box::new(expr),
-            }.into())
-        )
-        |
-        do_parse!(
-            attrs: many0!(Attribute::parse_outer) >>
-            box_: keyword!(box) >>
-            expr: call!(unary_expr, allow_struct, true) >>
-            (ExprBox {
-                attrs: attrs,
-                box_token: box_,
-                expr: Box::new(expr),
-            }.into())
-        )
-        |
-        call!(trailer_expr, allow_struct, allow_block)
-    ));
-
-    // XXX: This duplication is ugly
-    #[cfg(not(feature = "full"))]
-    named!(unary_expr(allow_struct: bool, allow_block: bool) -> Expr, alt!(
-        do_parse!(
-            op: syn!(UnOp) >>
-            expr: call!(unary_expr, allow_struct, true) >>
-            (ExprUnary {
-                attrs: Vec::new(),
-                op: op,
-                expr: Box::new(expr),
-            }.into())
-        )
-        |
-        call!(trailer_expr, allow_struct, allow_block)
-    ));
-
-    #[cfg(feature = "full")]
-    fn take_outer(attrs: &mut Vec<Attribute>) -> Vec<Attribute> {
-        let mut outer = Vec::new();
-        let mut inner = Vec::new();
-        for attr in mem::replace(attrs, Vec::new()) {
-            match attr.style {
-                AttrStyle::Outer => outer.push(attr),
-                AttrStyle::Inner(_) => inner.push(attr),
-            }
-        }
-        *attrs = inner;
-        outer
-    }
-
-    // <atom> (..<args>) ...
-    // <atom> . <ident> (..<args>) ...
-    // <atom> . <ident> ...
-    // <atom> . <lit> ...
-    // <atom> [ <expr> ] ...
-    // <atom> ? ...
-    #[cfg(feature = "full")]
-    named!(trailer_expr(allow_struct: bool, allow_block: bool) -> Expr, do_parse!(
-        mut e: call!(atom_expr, allow_struct, allow_block) >>
-        outer_attrs: value!({
-            let mut attrs = e.replace_attrs(Vec::new());
-            let outer_attrs = take_outer(&mut attrs);
-            e.replace_attrs(attrs);
-            outer_attrs
-        }) >>
-        many0!(alt!(
-            tap!(args: and_call => {
-                let (paren, args) = args;
-                e = ExprCall {
-                    attrs: Vec::new(),
-                    func: Box::new(e),
-                    args: args,
-                    paren_token: paren,
-                }.into();
-            })
-            |
-            tap!(more: and_method_call => {
-                let mut call = more;
-                call.receiver = Box::new(e);
-                e = call.into();
-            })
-            |
-            tap!(field: and_field => {
-                let (token, member) = field;
-                e = ExprField {
-                    attrs: Vec::new(),
-                    base: Box::new(e),
-                    dot_token: token,
-                    member: member,
-                }.into();
-            })
-            |
-            tap!(i: and_index => {
-                let (bracket, i) = i;
-                e = ExprIndex {
-                    attrs: Vec::new(),
-                    expr: Box::new(e),
-                    bracket_token: bracket,
-                    index: Box::new(i),
-                }.into();
-            })
-            |
-            tap!(question: punct!(?) => {
-                e = ExprTry {
-                    attrs: Vec::new(),
-                    expr: Box::new(e),
-                    question_token: question,
-                }.into();
-            })
-        )) >>
-        ({
-            let mut attrs = outer_attrs;
-            attrs.extend(e.replace_attrs(Vec::new()));
-            e.replace_attrs(attrs);
-            e
-        })
-    ));
-
-    // XXX: Duplication == ugly
-    #[cfg(not(feature = "full"))]
-    named!(trailer_expr(allow_struct: bool, allow_block: bool) -> Expr, do_parse!(
-        mut e: call!(atom_expr, allow_struct, allow_block) >>
-        many0!(alt!(
-            tap!(args: and_call => {
-                e = ExprCall {
-                    attrs: Vec::new(),
-                    func: Box::new(e),
-                    paren_token: args.0,
-                    args: args.1,
-                }.into();
-            })
-            |
-            tap!(field: and_field => {
-                let (token, member) = field;
-                e = ExprField {
-                    attrs: Vec::new(),
-                    base: Box::new(e),
-                    dot_token: token,
-                    member: member,
-                }.into();
-            })
-            |
-            tap!(i: and_index => {
-                e = ExprIndex {
-                    attrs: Vec::new(),
-                    expr: Box::new(e),
-                    bracket_token: i.0,
-                    index: Box::new(i.1),
-                }.into();
-            })
-        )) >>
-        (e)
-    ));
-
-    // Parse all atomic expressions which don't have to worry about precedence
-    // interactions, as they are fully contained.
-    #[cfg(feature = "full")]
-    named!(atom_expr(allow_struct: bool, allow_block: bool) -> Expr, alt!(
-        syn!(ExprGroup) => { Expr::Group } // must be placed first
-        |
-        syn!(ExprLit) => { Expr::Lit } // must be before expr_struct
-        |
-        // must be before ExprStruct
-        call!(unstable_async_block) => { Expr::Verbatim }
-        |
-        // must be before expr_path
-        cond_reduce!(allow_struct, syn!(ExprStruct)) => { Expr::Struct }
-        |
-        syn!(ExprParen) => { Expr::Paren } // must be before expr_tup
-        |
-        syn!(ExprMacro) => { Expr::Macro } // must be before expr_path
-        |
-        call!(expr_break, allow_struct) // must be before expr_path
-        |
-        syn!(ExprContinue) => { Expr::Continue } // must be before expr_path
-        |
-        call!(expr_ret, allow_struct) // must be before expr_path
-        |
-        syn!(ExprArray) => { Expr::Array }
-        |
-        syn!(ExprTuple) => { Expr::Tuple }
-        |
-        syn!(ExprIf) => { Expr::If }
-        |
-        syn!(ExprIfLet) => { Expr::IfLet }
-        |
-        syn!(ExprWhile) => { Expr::While }
-        |
-        syn!(ExprWhileLet) => { Expr::WhileLet }
-        |
-        syn!(ExprForLoop) => { Expr::ForLoop }
-        |
-        syn!(ExprLoop) => { Expr::Loop }
-        |
-        syn!(ExprMatch) => { Expr::Match }
-        |
-        syn!(ExprCatch) => { Expr::Catch }
-        |
-        syn!(ExprYield) => { Expr::Yield }
-        |
-        syn!(ExprUnsafe) => { Expr::Unsafe }
-        |
-        call!(expr_closure, allow_struct)
-        |
-        cond_reduce!(allow_block, syn!(ExprBlock)) => { Expr::Block }
-        |
-        call!(unstable_labeled_block) => { Expr::Verbatim }
-        |
-        // NOTE: This is the prefix-form of range
-        call!(expr_range, allow_struct)
-        |
-        syn!(ExprPath) => { Expr::Path }
-        |
-        syn!(ExprRepeat) => { Expr::Repeat }
-    ));
-
-    #[cfg(not(feature = "full"))]
-    named!(atom_expr(_allow_struct: bool, _allow_block: bool) -> Expr, alt!(
-        syn!(ExprLit) => { Expr::Lit }
-        |
-        syn!(ExprParen) => { Expr::Paren }
-        |
-        syn!(ExprPath) => { Expr::Path }
-    ));
-
-    #[cfg(feature = "full")]
-    named!(expr_nosemi -> Expr, do_parse!(
-        nosemi: alt!(
-            syn!(ExprIf) => { Expr::If }
-            |
-            syn!(ExprIfLet) => { Expr::IfLet }
-            |
-            syn!(ExprWhile) => { Expr::While }
-            |
-            syn!(ExprWhileLet) => { Expr::WhileLet }
-            |
-            syn!(ExprForLoop) => { Expr::ForLoop }
-            |
-            syn!(ExprLoop) => { Expr::Loop }
-            |
-            syn!(ExprMatch) => { Expr::Match }
-            |
-            syn!(ExprCatch) => { Expr::Catch }
-            |
-            syn!(ExprYield) => { Expr::Yield }
-            |
-            syn!(ExprUnsafe) => { Expr::Unsafe }
-            |
-            syn!(ExprBlock) => { Expr::Block }
-            |
-            call!(unstable_labeled_block) => { Expr::Verbatim }
-        ) >>
-        // If the next token is a `.` or a `?` it is special-cased to parse
-        // as an expression instead of a blockexpression.
-        not!(punct!(.)) >>
-        not!(punct!(?)) >>
-        (nosemi)
-    ));
-
-    impl Synom for ExprLit {
-        #[cfg(not(feature = "full"))]
-        named!(parse -> Self, do_parse!(
-            lit: syn!(Lit) >>
-            (ExprLit {
-                attrs: Vec::new(),
-                lit: lit,
-            })
-        ));
-
-        #[cfg(feature = "full")]
-        named!(parse -> Self, do_parse!(
-            attrs: many0!(Attribute::parse_outer) >>
-            lit: syn!(Lit) >>
-            (ExprLit {
-                attrs: attrs,
-                lit: lit,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("literal")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for ExprMacro {
-        named!(parse -> Self, do_parse!(
-            attrs: many0!(Attribute::parse_outer) >>
-            mac: syn!(Macro) >>
-            (ExprMacro {
-                attrs: attrs,
-                mac: mac,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("macro invocation expression")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for ExprGroup {
-        named!(parse -> Self, do_parse!(
-            e: grouped!(syn!(Expr)) >>
-            (ExprGroup {
-                attrs: Vec::new(),
-                expr: Box::new(e.1),
-                group_token: e.0,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("expression surrounded by invisible delimiters")
-        }
-    }
-
-    impl Synom for ExprParen {
-        #[cfg(not(feature = "full"))]
-        named!(parse -> Self, do_parse!(
-            e: parens!(syn!(Expr)) >>
-            (ExprParen {
-                attrs: Vec::new(),
-                paren_token: e.0,
-                expr: Box::new(e.1),
-            })
-        ));
-
-        #[cfg(feature = "full")]
-        named!(parse -> Self, do_parse!(
-            outer_attrs: many0!(Attribute::parse_outer) >>
-            e: parens!(tuple!(
-                many0!(Attribute::parse_inner),
-                syn!(Expr),
-            )) >>
-            (ExprParen {
-                attrs: {
-                    let mut attrs = outer_attrs;
-                    attrs.extend((e.1).0);
-                    attrs
-                },
-                paren_token: e.0,
-                expr: Box::new((e.1).1),
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("parenthesized expression")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for ExprArray {
-        named!(parse -> Self, do_parse!(
-            outer_attrs: many0!(Attribute::parse_outer) >>
-            elems: brackets!(tuple!(
-                many0!(Attribute::parse_inner),
-                call!(Punctuated::parse_terminated),
-            )) >>
-            (ExprArray {
-                attrs: {
-                    let mut attrs = outer_attrs;
-                    attrs.extend((elems.1).0);
-                    attrs
-                },
-                bracket_token: elems.0,
-                elems: (elems.1).1,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("array expression")
-        }
-    }
-
-    named!(and_call -> (token::Paren, Punctuated<Expr, Token![,]>),
-        parens!(Punctuated::parse_terminated)
-    );
-
-    #[cfg(feature = "full")]
-    named!(and_method_call -> ExprMethodCall, do_parse!(
-        dot: punct!(.) >>
-        method: syn!(Ident) >>
-        turbofish: option!(tuple!(
-            punct!(::),
-            punct!(<),
-            call!(Punctuated::parse_terminated),
-            punct!(>),
-        )) >>
-        args: parens!(Punctuated::parse_terminated) >>
-        ({
-            ExprMethodCall {
-                attrs: Vec::new(),
-                // this expr will get overwritten after being returned
-                receiver: Box::new(Expr::Verbatim(ExprVerbatim {
-                    tts: TokenStream::new(),
-                })),
-
-                method: method,
-                turbofish: turbofish.map(|fish| MethodTurbofish {
-                    colon2_token: fish.0,
-                    lt_token: fish.1,
-                    args: fish.2,
-                    gt_token: fish.3,
-                }),
-                args: args.1,
-                paren_token: args.0,
-                dot_token: dot,
-            }
-        })
-    ));
-
-    #[cfg(feature = "full")]
-    impl Synom for GenericMethodArgument {
-        // TODO parse const generics as well
-        named!(parse -> Self, map!(ty_no_eq_after, GenericMethodArgument::Type));
-
-        fn description() -> Option<&'static str> {
-            Some("generic method argument")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for ExprTuple {
-        named!(parse -> Self, do_parse!(
-            outer_attrs: many0!(Attribute::parse_outer) >>
-            elems: parens!(tuple!(
-                many0!(Attribute::parse_inner),
-                call!(Punctuated::parse_terminated),
-            )) >>
-            (ExprTuple {
-                attrs: {
-                    let mut attrs = outer_attrs;
-                    attrs.extend((elems.1).0);
-                    attrs
-                },
-                elems: (elems.1).1,
-                paren_token: elems.0,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("tuple")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for ExprIfLet {
-        named!(parse -> Self, do_parse!(
-            if_: keyword!(if) >>
-            let_: keyword!(let) >>
-            pats: call!(Punctuated::parse_separated_nonempty) >>
-            eq: punct!(=) >>
-            cond: expr_no_struct >>
-            then_block: braces!(Block::parse_within) >>
-            else_block: option!(else_block) >>
-            (ExprIfLet {
-                attrs: Vec::new(),
-                pats: pats,
-                let_token: let_,
-                eq_token: eq,
-                expr: Box::new(cond),
-                then_branch: Block {
-                    brace_token: then_block.0,
-                    stmts: then_block.1,
-                },
-                if_token: if_,
-                else_branch: else_block,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("`if let` expression")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for ExprIf {
-        named!(parse -> Self, do_parse!(
-            if_: keyword!(if) >>
-            cond: expr_no_struct >>
-            then_block: braces!(Block::parse_within) >>
-            else_block: option!(else_block) >>
-            (ExprIf {
-                attrs: Vec::new(),
-                cond: Box::new(cond),
-                then_branch: Block {
-                    brace_token: then_block.0,
-                    stmts: then_block.1,
-                },
-                if_token: if_,
-                else_branch: else_block,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("`if` expression")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    named!(else_block -> (Token![else], Box<Expr>), do_parse!(
-        else_: keyword!(else) >>
-        expr: alt!(
-            syn!(ExprIf) => { Expr::If }
-            |
-            syn!(ExprIfLet) => { Expr::IfLet }
-            |
-            do_parse!(
-                else_block: braces!(Block::parse_within) >>
-                (Expr::Block(ExprBlock {
-                    attrs: Vec::new(),
-                    block: Block {
-                        brace_token: else_block.0,
-                        stmts: else_block.1,
-                    },
-                }))
-            )
-        ) >>
-        (else_, Box::new(expr))
-    ));
-
-    #[cfg(feature = "full")]
-    impl Synom for ExprForLoop {
-        named!(parse -> Self, do_parse!(
-            outer_attrs: many0!(Attribute::parse_outer) >>
-            label: option!(syn!(Label)) >>
-            for_: keyword!(for) >>
-            pat: syn!(Pat) >>
-            in_: keyword!(in) >>
-            expr: expr_no_struct >>
-            block: braces!(tuple!(
-                many0!(Attribute::parse_inner),
-                call!(Block::parse_within),
-            )) >>
-            (ExprForLoop {
-                attrs: {
-                    let mut attrs = outer_attrs;
-                    attrs.extend((block.1).0);
-                    attrs
-                },
-                label: label,
-                for_token: for_,
-                pat: Box::new(pat),
-                in_token: in_,
-                expr: Box::new(expr),
-                body: Block {
-                    brace_token: block.0,
-                    stmts: (block.1).1,
-                },
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("`for` loop")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for ExprLoop {
-        named!(parse -> Self, do_parse!(
-            outer_attrs: many0!(Attribute::parse_outer) >>
-            label: option!(syn!(Label)) >>
-            loop_: keyword!(loop) >>
-            block: braces!(tuple!(
-                many0!(Attribute::parse_inner),
-                call!(Block::parse_within),
-            )) >>
-            (ExprLoop {
-                attrs: {
-                    let mut attrs = outer_attrs;
-                    attrs.extend((block.1).0);
-                    attrs
-                },
-                label: label,
-                loop_token: loop_,
-                body: Block {
-                    brace_token: block.0,
-                    stmts: (block.1).1,
-                },
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("`loop`")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for ExprMatch {
-        named!(parse -> Self, do_parse!(
-            outer_attrs: many0!(Attribute::parse_outer) >>
-            match_: keyword!(match) >>
-            obj: expr_no_struct >>
-            braced_content: braces!(tuple!(
-                many0!(Attribute::parse_inner),
-                many0!(syn!(Arm)),
-            )) >>
-            (ExprMatch {
-                attrs: {
-                    let mut attrs = outer_attrs;
-                    attrs.extend((braced_content.1).0);
-                    attrs
-                },
-                expr: Box::new(obj),
-                match_token: match_,
-                brace_token: braced_content.0,
-                arms: (braced_content.1).1,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("`match` expression")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for ExprCatch {
-        named!(parse -> Self, do_parse!(
-            do_: keyword!(do) >>
-            catch_: keyword!(catch) >>
-            catch_block: syn!(Block) >>
-            (ExprCatch {
-                attrs: Vec::new(),
-                block: catch_block,
-                do_token: do_,
-                catch_token: catch_,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("`catch` expression")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for ExprYield {
-        named!(parse -> Self, do_parse!(
-            yield_: keyword!(yield) >>
-            expr: option!(syn!(Expr)) >>
-            (ExprYield {
-                attrs: Vec::new(),
-                yield_token: yield_,
-                expr: expr.map(Box::new),
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("`yield` expression")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for Arm {
-        named!(parse -> Self, do_parse!(
-            attrs: many0!(Attribute::parse_outer) >>
-            leading_vert: option!(punct!(|)) >>
-            pats: call!(Punctuated::parse_separated_nonempty) >>
-            guard: option!(tuple!(keyword!(if), syn!(Expr))) >>
-            fat_arrow: punct!(=>) >>
-            body: do_parse!(
-                expr: alt!(expr_nosemi | syn!(Expr)) >>
-                comma: switch!(value!(arm_expr_requires_comma(&expr)),
-                    true => alt!(
-                        input_end!() => { |_| None }
-                        |
-                        punct!(,) => { Some }
-                    )
-                    |
-                    false => option!(punct!(,))
-                ) >>
-                (expr, comma)
-            ) >>
-            (Arm {
-                fat_arrow_token: fat_arrow,
-                attrs: attrs,
-                leading_vert: leading_vert,
-                pats: pats,
-                guard: guard.map(|(if_, guard)| (if_, Box::new(guard))),
-                body: Box::new(body.0),
-                comma: body.1,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("`match` arm")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    named!(expr_closure(allow_struct: bool) -> Expr, do_parse!(
-        begin: call!(verbatim::grab_cursor) >>
-        attrs: many0!(Attribute::parse_outer) >>
-        asyncness: option!(keyword!(async)) >>
-        movability: option!(cond_reduce!(asyncness.is_none(), keyword!(static))) >>
-        capture: option!(keyword!(move)) >>
-        or1: punct!(|) >>
-        inputs: call!(Punctuated::parse_terminated_with, fn_arg) >>
-        or2: punct!(|) >>
-        ret_and_body: alt!(
-            do_parse!(
-                arrow: punct!(->) >>
-                ty: syn!(Type) >>
-                body: syn!(Block) >>
-                (
-                    ReturnType::Type(arrow, Box::new(ty)),
-                    Expr::Block(ExprBlock {
-                        attrs: Vec::new(),
-                        block: body,
-                    },
-                ))
-            )
-            |
-            map!(ambiguous_expr!(allow_struct), |e| (ReturnType::Default, e))
-        ) >>
-        end: call!(verbatim::grab_cursor) >>
-        ({
-            if asyncness.is_some() {
-                // TODO: include asyncness in ExprClosure
-                // https://github.com/dtolnay/syn/issues/396
-                Expr::Verbatim(ExprVerbatim {
-                    tts: verbatim::token_range(begin..end),
-                })
-            } else {
-                Expr::Closure(ExprClosure {
-                    attrs: attrs,
-                    movability: movability,
-                    capture: capture,
-                    or1_token: or1,
-                    inputs: inputs,
-                    or2_token: or2,
-                    output: ret_and_body.0,
-                    body: Box::new(ret_and_body.1),
-                })
-            }
-        })
-    ));
-
-    #[cfg(feature = "full")]
-    named!(unstable_async_block -> ExprVerbatim, do_parse!(
-        begin: call!(verbatim::grab_cursor) >>
-        many0!(Attribute::parse_outer) >>
-        keyword!(async) >>
-        option!(keyword!(move)) >>
-        syn!(Block) >>
-        end: call!(verbatim::grab_cursor) >>
-        (ExprVerbatim {
-            tts: verbatim::token_range(begin..end),
-        })
-    ));
-
-    #[cfg(feature = "full")]
-    named!(fn_arg -> FnArg, do_parse!(
-        pat: syn!(Pat) >>
-        ty: option!(tuple!(punct!(:), syn!(Type))) >>
-        ({
-            if let Some((colon, ty)) = ty {
-                FnArg::Captured(ArgCaptured {
-                    pat: pat,
-                    colon_token: colon,
-                    ty: ty,
-                })
-            } else {
-                FnArg::Inferred(pat)
-            }
-        })
-    ));
-
-    #[cfg(feature = "full")]
-    impl Synom for ExprWhile {
-        named!(parse -> Self, do_parse!(
-            outer_attrs: many0!(Attribute::parse_outer) >>
-            label: option!(syn!(Label)) >>
-            while_: keyword!(while) >>
-            cond: expr_no_struct >>
-            block: braces!(tuple!(
-                many0!(Attribute::parse_inner),
-                call!(Block::parse_within),
-            )) >>
-            (ExprWhile {
-                attrs: {
-                    let mut attrs = outer_attrs;
-                    attrs.extend((block.1).0);
-                    attrs
-                },
-                label: label,
-                while_token: while_,
-                cond: Box::new(cond),
-                body: Block {
-                    brace_token: block.0,
-                    stmts: (block.1).1,
-                },
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("`while` expression")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for ExprWhileLet {
-        named!(parse -> Self, do_parse!(
-            outer_attrs: many0!(Attribute::parse_outer) >>
-            label: option!(syn!(Label)) >>
-            while_: keyword!(while) >>
-            let_: keyword!(let) >>
-            pats: call!(Punctuated::parse_separated_nonempty) >>
-            eq: punct!(=) >>
-            value: expr_no_struct >>
-            block: braces!(tuple!(
-                many0!(Attribute::parse_inner),
-                call!(Block::parse_within),
-            )) >>
-            (ExprWhileLet {
-                attrs: {
-                    let mut attrs = outer_attrs;
-                    attrs.extend((block.1).0);
-                    attrs
-                },
-                label: label,
-                while_token: while_,
-                let_token: let_,
-                pats: pats,
-                eq_token: eq,
-                expr: Box::new(value),
-                body: Block {
-                    brace_token: block.0,
-                    stmts: (block.1).1,
-                },
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("`while let` expression")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for Label {
-        named!(parse -> Self, do_parse!(
-            name: syn!(Lifetime) >>
-            colon: punct!(:) >>
-            (Label {
-                name: name,
-                colon_token: colon,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("`while let` expression")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for ExprContinue {
-        named!(parse -> Self, do_parse!(
-            attrs: many0!(Attribute::parse_outer) >>
-            cont: keyword!(continue) >>
-            label: option!(syn!(Lifetime)) >>
-            (ExprContinue {
-                attrs: attrs,
-                continue_token: cont,
-                label: label,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("`continue`")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    named!(expr_break(allow_struct: bool) -> Expr, do_parse!(
-        attrs: many0!(Attribute::parse_outer) >>
-        break_: keyword!(break) >>
-        label: option!(syn!(Lifetime)) >>
-        // We can't allow blocks after a `break` expression when we wouldn't
-        // allow structs, as this expression is ambiguous.
-        val: opt_ambiguous_expr!(allow_struct) >>
-        (ExprBreak {
-            attrs: attrs,
-            label: label,
-            expr: val.map(Box::new),
-            break_token: break_,
-        }.into())
-    ));
-
-    #[cfg(feature = "full")]
-    named!(expr_ret(allow_struct: bool) -> Expr, do_parse!(
-        attrs: many0!(Attribute::parse_outer) >>
-        return_: keyword!(return) >>
-        // NOTE: return is greedy and eats blocks after it even when in a
-        // position where structs are not allowed, such as in if statement
-        // conditions. For example:
-        //
-        // if return { println!("A") } {} // Prints "A"
-        ret_value: option!(ambiguous_expr!(allow_struct)) >>
-        (ExprReturn {
-            attrs: attrs,
-            expr: ret_value.map(Box::new),
-            return_token: return_,
-        }.into())
-    ));
-
-    #[cfg(feature = "full")]
-    impl Synom for ExprStruct {
-        named!(parse -> Self, do_parse!(
-            outer_attrs: many0!(Attribute::parse_outer) >>
-            path: syn!(Path) >>
-            data: braces!(do_parse!(
-                inner_attrs: many0!(Attribute::parse_inner) >>
-                fields: call!(Punctuated::parse_terminated) >>
-                base: option!(cond!(fields.empty_or_trailing(), do_parse!(
-                    dots: punct!(..) >>
-                    base: syn!(Expr) >>
-                    (dots, base)
-                ))) >>
-                (inner_attrs, fields, base)
-            )) >>
-            ({
-                let (brace, (inner_attrs, fields, base)) = data;
-                let (dots, rest) = match base.and_then(|b| b) {
-                    Some((dots, base)) => (Some(dots), Some(base)),
-                    None => (None, None),
-                };
-                ExprStruct {
-                    attrs: {
-                        let mut attrs = outer_attrs;
-                        attrs.extend(inner_attrs);
-                        attrs
-                    },
-                    brace_token: brace,
-                    path: path,
-                    fields: fields,
-                    dot2_token: dots,
-                    rest: rest.map(Box::new),
-                }
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("struct literal expression")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for FieldValue {
-        named!(parse -> Self, do_parse!(
-            attrs: many0!(Attribute::parse_outer) >>
-            field_value: alt!(
-                tuple!(syn!(Member), map!(punct!(:), Some), syn!(Expr))
-                |
-                map!(syn!(Ident), |name| (
-                    Member::Named(name.clone()),
-                    None,
-                    Expr::Path(ExprPath {
-                        attrs: Vec::new(),
-                        qself: None,
-                        path: name.into(),
-                    }),
-                ))
-            ) >>
-            (FieldValue {
-                attrs: attrs,
-                member: field_value.0,
-                colon_token: field_value.1,
-                expr: field_value.2,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("field-value pair: `field: value`")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for ExprRepeat {
-        named!(parse -> Self, do_parse!(
-            outer_attrs: many0!(Attribute::parse_outer) >>
-            data: brackets!(tuple!(
-                many0!(Attribute::parse_inner),
-                syn!(Expr),
-                punct!(;),
-                syn!(Expr),
-            )) >>
-            (ExprRepeat {
-                attrs: {
-                    let mut attrs = outer_attrs;
-                    attrs.extend((data.1).0);
-                    attrs
-                },
-                expr: Box::new((data.1).1),
-                len: Box::new((data.1).3),
-                bracket_token: data.0,
-                semi_token: (data.1).2,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("repeated array literal: `[val; N]`")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for ExprUnsafe {
-        named!(parse -> Self, do_parse!(
-            unsafe_: keyword!(unsafe) >>
-            b: syn!(Block) >>
-            (ExprUnsafe {
-                attrs: Vec::new(),
-                unsafe_token: unsafe_,
-                block: b,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("unsafe block: `unsafe { .. }`")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for ExprBlock {
-        named!(parse -> Self, do_parse!(
-            outer_attrs: many0!(Attribute::parse_outer) >>
-            block: braces!(tuple!(
-                many0!(Attribute::parse_inner),
-                call!(Block::parse_within),
-            )) >>
-            (ExprBlock {
-                attrs: {
-                    let mut attrs = outer_attrs;
-                    attrs.extend((block.1).0);
-                    attrs
-                },
-                block: Block {
-                    brace_token: block.0,
-                    stmts: (block.1).1,
-                },
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("block: `{ .. }`")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    named!(unstable_labeled_block -> ExprVerbatim, do_parse!(
-        begin: call!(verbatim::grab_cursor) >>
-        many0!(Attribute::parse_outer) >>
-        syn!(Label) >>
-        braces!(tuple!(
-            many0!(Attribute::parse_inner),
-            call!(Block::parse_within),
-        )) >>
-        end: call!(verbatim::grab_cursor) >>
-        (ExprVerbatim {
-            tts: verbatim::token_range(begin..end),
-        })
-    ));
-
-    #[cfg(feature = "full")]
-    named!(expr_range(allow_struct: bool) -> Expr, do_parse!(
-        limits: syn!(RangeLimits) >>
-        hi: opt_ambiguous_expr!(allow_struct) >>
-        (ExprRange {
-            attrs: Vec::new(),
-            from: None,
-            to: hi.map(Box::new),
-            limits: limits,
-        }.into())
-    ));
-
-    #[cfg(feature = "full")]
-    impl Synom for RangeLimits {
-        named!(parse -> Self, alt!(
-            // Must come before Dot2
-            punct!(..=) => { RangeLimits::Closed }
-            |
-            // Must come before Dot2
-            punct!(...) => { |dot3| RangeLimits::Closed(Token![..=](dot3.0)) }
-            |
-            punct!(..) => { RangeLimits::HalfOpen }
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("range limit: `..`, `...` or `..=`")
-        }
-    }
-
-    impl Synom for ExprPath {
-        #[cfg(not(feature = "full"))]
-        named!(parse -> Self, do_parse!(
-            pair: qpath >>
-            (ExprPath {
-                attrs: Vec::new(),
-                qself: pair.0,
-                path: pair.1,
-            })
-        ));
-
-        #[cfg(feature = "full")]
-        named!(parse -> Self, do_parse!(
-            attrs: many0!(Attribute::parse_outer) >>
-            pair: qpath >>
-            (ExprPath {
-                attrs: attrs,
-                qself: pair.0,
-                path: pair.1,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("path: `a::b::c`")
-        }
-    }
-
-    named!(and_field -> (Token![.], Member), tuple!(punct!(.), syn!(Member)));
-
-    named!(and_index -> (token::Bracket, Expr), brackets!(syn!(Expr)));
-
-    #[cfg(feature = "full")]
-    impl Synom for Block {
-        named!(parse -> Self, do_parse!(
-            stmts: braces!(Block::parse_within) >>
-            (Block {
-                brace_token: stmts.0,
-                stmts: stmts.1,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("block: `{ .. }`")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Block {
-        named!(pub parse_within -> Vec<Stmt>, do_parse!(
-            many0!(punct!(;)) >>
-            mut standalone: many0!(do_parse!(
-                stmt: syn!(Stmt) >>
-                many0!(punct!(;)) >>
-                (stmt)
-            )) >>
-            last: option!(do_parse!(
-                attrs: many0!(Attribute::parse_outer) >>
-                mut e: syn!(Expr) >>
-                ({
-                    e.replace_attrs(attrs);
-                    Stmt::Expr(e)
-                })
-            )) >>
-            (match last {
-                None => standalone,
-                Some(last) => {
-                    standalone.push(last);
-                    standalone
-                }
-            })
-        ));
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for Stmt {
-        named!(parse -> Self, alt!(
-            stmt_mac
-            |
-            stmt_local
-            |
-            stmt_item
-            |
-            stmt_blockexpr
-            |
-            stmt_expr
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("statement")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    named!(stmt_mac -> Stmt, do_parse!(
-        attrs: many0!(Attribute::parse_outer) >>
-        what: call!(Path::parse_mod_style) >>
-        bang: punct!(!) >>
-    // Only parse braces here; paren and bracket will get parsed as
-    // expression statements
-        data: braces!(syn!(TokenStream)) >>
-        semi: option!(punct!(;)) >>
-        (Stmt::Item(Item::Macro(ItemMacro {
-            attrs: attrs,
-            ident: None,
-            mac: Macro {
-                path: what,
-                bang_token: bang,
-                delimiter: MacroDelimiter::Brace(data.0),
-                tts: data.1,
-            },
-            semi_token: semi,
-        })))
-    ));
-
-    #[cfg(feature = "full")]
-    named!(stmt_local -> Stmt, do_parse!(
-        attrs: many0!(Attribute::parse_outer) >>
-        let_: keyword!(let) >>
-        pats: call!(Punctuated::parse_separated_nonempty) >>
-        ty: option!(tuple!(punct!(:), syn!(Type))) >>
-        init: option!(tuple!(punct!(=), syn!(Expr))) >>
-        semi: punct!(;) >>
-        (Stmt::Local(Local {
-            attrs: attrs,
-            let_token: let_,
-            pats: pats,
-            ty: ty.map(|(colon, ty)| (colon, Box::new(ty))),
-            init: init.map(|(eq, expr)| (eq, Box::new(expr))),
-            semi_token: semi,
-        }))
-    ));
-
-    #[cfg(feature = "full")]
-    named!(stmt_item -> Stmt, map!(syn!(Item), |i| Stmt::Item(i)));
-
-    #[cfg(feature = "full")]
-    named!(stmt_blockexpr -> Stmt, do_parse!(
-        mut attrs: many0!(Attribute::parse_outer) >>
-        mut e: expr_nosemi >>
-        semi: option!(punct!(;)) >>
-        ({
-            attrs.extend(e.replace_attrs(Vec::new()));
-            e.replace_attrs(attrs);
-            if let Some(semi) = semi {
-                Stmt::Semi(e, semi)
-            } else {
-                Stmt::Expr(e)
-            }
-        })
-    ));
-
-    #[cfg(feature = "full")]
-    named!(stmt_expr -> Stmt, do_parse!(
-        mut attrs: many0!(Attribute::parse_outer) >>
-        mut e: syn!(Expr) >>
-        semi: punct!(;) >>
-        ({
-            attrs.extend(e.replace_attrs(Vec::new()));
-            e.replace_attrs(attrs);
-            Stmt::Semi(e, semi)
-        })
-    ));
-
-    #[cfg(feature = "full")]
-    impl Synom for Pat {
-        named!(parse -> Self, alt!(
-            syn!(PatWild) => { Pat::Wild } // must be before pat_ident
-            |
-            syn!(PatBox) => { Pat::Box }  // must be before pat_ident
-            |
-            syn!(PatRange) => { Pat::Range } // must be before pat_lit
-            |
-            syn!(PatTupleStruct) => { Pat::TupleStruct }  // must be before pat_ident
-            |
-            syn!(PatStruct) => { Pat::Struct } // must be before pat_ident
-            |
-            syn!(PatMacro) => { Pat::Macro } // must be before pat_ident
-            |
-            syn!(PatLit) => { Pat::Lit } // must be before pat_ident
-            |
-            syn!(PatIdent) => { Pat::Ident } // must be before pat_path
-            |
-            syn!(PatPath) => { Pat::Path }
-            |
-            syn!(PatTuple) => { Pat::Tuple }
-            |
-            syn!(PatRef) => { Pat::Ref }
-            |
-            syn!(PatSlice) => { Pat::Slice }
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("pattern")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for PatWild {
-        named!(parse -> Self, map!(
-            punct!(_),
-            |u| PatWild { underscore_token: u }
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("wild pattern: `_`")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for PatBox {
-        named!(parse -> Self, do_parse!(
-            boxed: keyword!(box) >>
-            pat: syn!(Pat) >>
-            (PatBox {
-                pat: Box::new(pat),
-                box_token: boxed,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("box pattern")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for PatIdent {
-        named!(parse -> Self, do_parse!(
-            by_ref: option!(keyword!(ref)) >>
-            mutability: option!(keyword!(mut)) >>
-            name: alt!(
-                syn!(Ident)
-                |
-                keyword!(self) => { Into::into }
-            ) >>
-            not!(punct!(<)) >>
-            not!(punct!(::)) >>
-            subpat: option!(tuple!(punct!(@), syn!(Pat))) >>
-            (PatIdent {
-                by_ref: by_ref,
-                mutability: mutability,
-                ident: name,
-                subpat: subpat.map(|(at, pat)| (at, Box::new(pat))),
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("pattern identifier binding")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for PatTupleStruct {
-        named!(parse -> Self, do_parse!(
-            path: syn!(Path) >>
-            tuple: syn!(PatTuple) >>
-            (PatTupleStruct {
-                path: path,
-                pat: tuple,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("tuple struct pattern")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for PatStruct {
-        named!(parse -> Self, do_parse!(
-            path: syn!(Path) >>
-            data: braces!(do_parse!(
-                fields: call!(Punctuated::parse_terminated) >>
-                base: option!(cond!(fields.empty_or_trailing(), punct!(..))) >>
-                (fields, base)
-            )) >>
-            (PatStruct {
-                path: path,
-                fields: (data.1).0,
-                brace_token: data.0,
-                dot2_token: (data.1).1.and_then(|m| m),
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("struct pattern")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for FieldPat {
-        named!(parse -> Self, alt!(
-            do_parse!(
-                member: syn!(Member) >>
-                colon: punct!(:) >>
-                pat: syn!(Pat) >>
-                (FieldPat {
-                    member: member,
-                    pat: Box::new(pat),
-                    attrs: Vec::new(),
-                    colon_token: Some(colon),
-                })
-            )
-            |
-            do_parse!(
-                boxed: option!(keyword!(box)) >>
-                by_ref: option!(keyword!(ref)) >>
-                mutability: option!(keyword!(mut)) >>
-                ident: syn!(Ident) >>
-                ({
-                    let mut pat: Pat = PatIdent {
-                        by_ref: by_ref,
-                        mutability: mutability,
-                        ident: ident.clone(),
-                        subpat: None,
-                    }.into();
-                    if let Some(boxed) = boxed {
-                        pat = PatBox {
-                            pat: Box::new(pat),
-                            box_token: boxed,
-                        }.into();
-                    }
-                    FieldPat {
-                        member: Member::Named(ident),
-                        pat: Box::new(pat),
-                        attrs: Vec::new(),
-                        colon_token: None,
-                    }
-                })
-            )
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("field pattern")
-        }
-    }
-
-    impl Synom for Member {
-        named!(parse -> Self, alt!(
-            syn!(Ident) => { Member::Named }
-            |
-            syn!(Index) => { Member::Unnamed }
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("field member")
-        }
-    }
-
-    impl Synom for Index {
-        named!(parse -> Self, do_parse!(
-            lit: syn!(LitInt) >>
-            ({
-                if let IntSuffix::None = lit.suffix() {
-                    Index { index: lit.value() as u32, span: lit.span() }
-                } else {
-                    return parse_error();
-                }
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("field index")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for PatPath {
-        named!(parse -> Self, map!(
-            syn!(ExprPath),
-            |p| PatPath { qself: p.qself, path: p.path }
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("path pattern")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for PatTuple {
-        named!(parse -> Self, do_parse!(
-            data: parens!(do_parse!(
-                front: call!(Punctuated::parse_terminated) >>
-                dotdot: option!(cond_reduce!(front.empty_or_trailing(),
-                    tuple!(punct!(..), option!(punct!(,)))
-                )) >>
-                back: cond!(match dotdot {
-                                Some((_, Some(_))) => true,
-                                _ => false,
-                            },
-                            Punctuated::parse_terminated) >>
-                (front, dotdot, back)
-            )) >>
-            ({
-                let (parens, (front, dotdot, back)) = data;
-                let (dotdot, trailing) = match dotdot {
-                    Some((a, b)) => (Some(a), Some(b)),
-                    None => (None, None),
-                };
-                PatTuple {
-                    paren_token: parens,
-                    front: front,
-                    dot2_token: dotdot,
-                    comma_token: trailing.unwrap_or_default(),
-                    back: back.unwrap_or_default(),
-                }
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("tuple pattern")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for PatRef {
-        named!(parse -> Self, do_parse!(
-            and: punct!(&) >>
-            mutability: option!(keyword!(mut)) >>
-            pat: syn!(Pat) >>
-            (PatRef {
-                pat: Box::new(pat),
-                mutability: mutability,
-                and_token: and,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("reference pattern")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for PatLit {
-        named!(parse -> Self, do_parse!(
-            lit: pat_lit_expr >>
-            (if let Expr::Path(_) = lit {
-                return parse_error(); // these need to be parsed by pat_path
-            } else {
-                PatLit {
-                    expr: Box::new(lit),
-                }
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("literal pattern")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for PatRange {
-        named!(parse -> Self, do_parse!(
-            lo: pat_lit_expr >>
-            limits: syn!(RangeLimits) >>
-            hi: pat_lit_expr >>
-            (PatRange {
-                lo: Box::new(lo),
-                hi: Box::new(hi),
-                limits: limits,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("range pattern")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    named!(pat_lit_expr -> Expr, do_parse!(
-        neg: option!(punct!(-)) >>
-        v: alt!(
-            syn!(ExprLit) => { Expr::Lit }
-            |
-            syn!(ExprPath) => { Expr::Path }
-        ) >>
-        (if let Some(neg) = neg {
-            Expr::Unary(ExprUnary {
-                attrs: Vec::new(),
-                op: UnOp::Neg(neg),
-                expr: Box::new(v)
-            })
-        } else {
-            v
-        })
-    ));
-
-    #[cfg(feature = "full")]
-    impl Synom for PatSlice {
-        named!(parse -> Self, map!(
-            brackets!(do_parse!(
-                before: call!(Punctuated::parse_terminated) >>
-                middle: option!(do_parse!(
-                    dots: punct!(..) >>
-                    trailing: option!(punct!(,)) >>
-                    (dots, trailing)
-                )) >>
-                after: cond!(
-                    match middle {
-                        Some((_, ref trailing)) => trailing.is_some(),
-                        _ => false,
-                    },
-                    Punctuated::parse_terminated
-                ) >>
-                (before, middle, after)
-            )),
-            |(brackets, (before, middle, after))| {
-                let mut before: Punctuated<Pat, Token![,]> = before;
-                let after: Option<Punctuated<Pat, Token![,]>> = after;
-                let middle: Option<(Token![..], Option<Token![,]>)> = middle;
-                PatSlice {
-                    dot2_token: middle.as_ref().map(|m| Token![..]((m.0).0)),
-                    comma_token: middle.as_ref().and_then(|m| {
-                        m.1.as_ref().map(|m| Token![,](m.0))
-                    }),
-                    bracket_token: brackets,
-                    middle: middle.and_then(|_| {
-                        if before.empty_or_trailing() {
-                            None
-                        } else {
-                            Some(Box::new(before.pop().unwrap().into_value()))
-                        }
-                    }),
-                    front: before,
-                    back: after.unwrap_or_default(),
-                }
-            }
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("slice pattern")
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl Synom for PatMacro {
-        named!(parse -> Self, map!(syn!(Macro), |mac| PatMacro { mac: mac }));
-
-        fn description() -> Option<&'static str> {
-            Some("macro pattern")
-        }
-    }
-}
-
-#[cfg(feature = "printing")]
-mod printing {
-    use super::*;
-    #[cfg(feature = "full")]
-    use attr::FilterAttrs;
-    use proc_macro2::{Literal, TokenStream};
-    use quote::{ToTokens, TokenStreamExt};
-
-    // If the given expression is a bare `ExprStruct`, wraps it in parenthesis
-    // before appending it to `TokenStream`.
-    #[cfg(feature = "full")]
-    fn wrap_bare_struct(tokens: &mut TokenStream, e: &Expr) {
-        if let Expr::Struct(_) = *e {
-            token::Paren::default().surround(tokens, |tokens| {
-                e.to_tokens(tokens);
-            });
-        } else {
-            e.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    fn outer_attrs_to_tokens(attrs: &[Attribute], tokens: &mut TokenStream) {
-        tokens.append_all(attrs.outer());
-    }
-
-    #[cfg(feature = "full")]
-    fn inner_attrs_to_tokens(attrs: &[Attribute], tokens: &mut TokenStream) {
-        tokens.append_all(attrs.inner());
-    }
-
-    #[cfg(not(feature = "full"))]
-    fn outer_attrs_to_tokens(_attrs: &[Attribute], _tokens: &mut TokenStream) {}
-
-    #[cfg(not(feature = "full"))]
-    fn inner_attrs_to_tokens(_attrs: &[Attribute], _tokens: &mut TokenStream) {}
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprBox {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.box_token.to_tokens(tokens);
-            self.expr.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprInPlace {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.place.to_tokens(tokens);
-            self.arrow_token.to_tokens(tokens);
-            self.value.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprArray {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.bracket_token.surround(tokens, |tokens| {
-                inner_attrs_to_tokens(&self.attrs, tokens);
-                self.elems.to_tokens(tokens);
-            })
-        }
-    }
-
-    impl ToTokens for ExprCall {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.func.to_tokens(tokens);
-            self.paren_token.surround(tokens, |tokens| {
-                self.args.to_tokens(tokens);
-            })
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprMethodCall {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.receiver.to_tokens(tokens);
-            self.dot_token.to_tokens(tokens);
-            self.method.to_tokens(tokens);
-            self.turbofish.to_tokens(tokens);
-            self.paren_token.surround(tokens, |tokens| {
-                self.args.to_tokens(tokens);
-            });
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for MethodTurbofish {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.colon2_token.to_tokens(tokens);
-            self.lt_token.to_tokens(tokens);
-            self.args.to_tokens(tokens);
-            self.gt_token.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for GenericMethodArgument {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            match *self {
-                GenericMethodArgument::Type(ref t) => t.to_tokens(tokens),
-                GenericMethodArgument::Const(ref c) => c.to_tokens(tokens),
-            }
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprTuple {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.paren_token.surround(tokens, |tokens| {
-                inner_attrs_to_tokens(&self.attrs, tokens);
-                self.elems.to_tokens(tokens);
-                // If we only have one argument, we need a trailing comma to
-                // distinguish ExprTuple from ExprParen.
-                if self.elems.len() == 1 && !self.elems.trailing_punct() {
-                    <Token![,]>::default().to_tokens(tokens);
-                }
-            })
-        }
-    }
-
-    impl ToTokens for ExprBinary {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.left.to_tokens(tokens);
-            self.op.to_tokens(tokens);
-            self.right.to_tokens(tokens);
-        }
-    }
-
-    impl ToTokens for ExprUnary {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.op.to_tokens(tokens);
-            self.expr.to_tokens(tokens);
-        }
-    }
-
-    impl ToTokens for ExprLit {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.lit.to_tokens(tokens);
-        }
-    }
-
-    impl ToTokens for ExprCast {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.expr.to_tokens(tokens);
-            self.as_token.to_tokens(tokens);
-            self.ty.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprType {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.expr.to_tokens(tokens);
-            self.colon_token.to_tokens(tokens);
-            self.ty.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    fn maybe_wrap_else(tokens: &mut TokenStream, else_: &Option<(Token![else], Box<Expr>)>) {
-        if let Some((ref else_token, ref else_)) = *else_ {
-            else_token.to_tokens(tokens);
-
-            // If we are not one of the valid expressions to exist in an else
-            // clause, wrap ourselves in a block.
-            match **else_ {
-                Expr::If(_) | Expr::IfLet(_) | Expr::Block(_) => {
-                    else_.to_tokens(tokens);
-                }
-                _ => {
-                    token::Brace::default().surround(tokens, |tokens| {
-                        else_.to_tokens(tokens);
-                    });
-                }
-            }
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprIf {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.if_token.to_tokens(tokens);
-            wrap_bare_struct(tokens, &self.cond);
-            self.then_branch.to_tokens(tokens);
-            maybe_wrap_else(tokens, &self.else_branch);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprIfLet {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.if_token.to_tokens(tokens);
-            self.let_token.to_tokens(tokens);
-            self.pats.to_tokens(tokens);
-            self.eq_token.to_tokens(tokens);
-            wrap_bare_struct(tokens, &self.expr);
-            self.then_branch.to_tokens(tokens);
-            maybe_wrap_else(tokens, &self.else_branch);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprWhile {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.label.to_tokens(tokens);
-            self.while_token.to_tokens(tokens);
-            wrap_bare_struct(tokens, &self.cond);
-            self.body.brace_token.surround(tokens, |tokens| {
-                inner_attrs_to_tokens(&self.attrs, tokens);
-                tokens.append_all(&self.body.stmts);
-            });
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprWhileLet {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.label.to_tokens(tokens);
-            self.while_token.to_tokens(tokens);
-            self.let_token.to_tokens(tokens);
-            self.pats.to_tokens(tokens);
-            self.eq_token.to_tokens(tokens);
-            wrap_bare_struct(tokens, &self.expr);
-            self.body.brace_token.surround(tokens, |tokens| {
-                inner_attrs_to_tokens(&self.attrs, tokens);
-                tokens.append_all(&self.body.stmts);
-            });
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprForLoop {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.label.to_tokens(tokens);
-            self.for_token.to_tokens(tokens);
-            self.pat.to_tokens(tokens);
-            self.in_token.to_tokens(tokens);
-            wrap_bare_struct(tokens, &self.expr);
-            self.body.brace_token.surround(tokens, |tokens| {
-                inner_attrs_to_tokens(&self.attrs, tokens);
-                tokens.append_all(&self.body.stmts);
-            });
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprLoop {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.label.to_tokens(tokens);
-            self.loop_token.to_tokens(tokens);
-            self.body.brace_token.surround(tokens, |tokens| {
-                inner_attrs_to_tokens(&self.attrs, tokens);
-                tokens.append_all(&self.body.stmts);
-            });
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprMatch {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.match_token.to_tokens(tokens);
-            wrap_bare_struct(tokens, &self.expr);
-            self.brace_token.surround(tokens, |tokens| {
-                inner_attrs_to_tokens(&self.attrs, tokens);
-                for (i, arm) in self.arms.iter().enumerate() {
-                    arm.to_tokens(tokens);
-                    // Ensure that we have a comma after a non-block arm, except
-                    // for the last one.
-                    let is_last = i == self.arms.len() - 1;
-                    if !is_last && arm_expr_requires_comma(&arm.body) && arm.comma.is_none() {
-                        <Token![,]>::default().to_tokens(tokens);
-                    }
-                }
-            });
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprCatch {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.do_token.to_tokens(tokens);
-            self.catch_token.to_tokens(tokens);
-            self.block.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprYield {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.yield_token.to_tokens(tokens);
-            self.expr.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprClosure {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.movability.to_tokens(tokens);
-            self.capture.to_tokens(tokens);
-            self.or1_token.to_tokens(tokens);
-            for input in self.inputs.pairs() {
-                match **input.value() {
-                    FnArg::Captured(ArgCaptured {
-                        ref pat,
-                        ty: Type::Infer(_),
-                        ..
-                    }) => {
-                        pat.to_tokens(tokens);
-                    }
-                    _ => input.value().to_tokens(tokens),
-                }
-                input.punct().to_tokens(tokens);
-            }
-            self.or2_token.to_tokens(tokens);
-            self.output.to_tokens(tokens);
-            self.body.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprUnsafe {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.unsafe_token.to_tokens(tokens);
-            self.block.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprBlock {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.block.brace_token.surround(tokens, |tokens| {
-                inner_attrs_to_tokens(&self.attrs, tokens);
-                tokens.append_all(&self.block.stmts);
-            });
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprAssign {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.left.to_tokens(tokens);
-            self.eq_token.to_tokens(tokens);
-            self.right.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprAssignOp {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.left.to_tokens(tokens);
-            self.op.to_tokens(tokens);
-            self.right.to_tokens(tokens);
-        }
-    }
-
-    impl ToTokens for ExprField {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.base.to_tokens(tokens);
-            self.dot_token.to_tokens(tokens);
-            self.member.to_tokens(tokens);
-        }
-    }
-
-    impl ToTokens for Member {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            match *self {
-                Member::Named(ref ident) => ident.to_tokens(tokens),
-                Member::Unnamed(ref index) => index.to_tokens(tokens),
-            }
-        }
-    }
-
-    impl ToTokens for Index {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            let mut lit = Literal::i64_unsuffixed(i64::from(self.index));
-            lit.set_span(self.span);
-            tokens.append(lit);
-        }
-    }
-
-    impl ToTokens for ExprIndex {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.expr.to_tokens(tokens);
-            self.bracket_token.surround(tokens, |tokens| {
-                self.index.to_tokens(tokens);
-            });
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprRange {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.from.to_tokens(tokens);
-            match self.limits {
-                RangeLimits::HalfOpen(ref t) => t.to_tokens(tokens),
-                RangeLimits::Closed(ref t) => t.to_tokens(tokens),
-            }
-            self.to.to_tokens(tokens);
-        }
-    }
-
-    impl ToTokens for ExprPath {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            ::PathTokens(&self.qself, &self.path).to_tokens(tokens)
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprReference {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.and_token.to_tokens(tokens);
-            self.mutability.to_tokens(tokens);
-            self.expr.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprBreak {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.break_token.to_tokens(tokens);
-            self.label.to_tokens(tokens);
-            self.expr.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprContinue {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.continue_token.to_tokens(tokens);
-            self.label.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprReturn {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.return_token.to_tokens(tokens);
-            self.expr.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprMacro {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.mac.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprStruct {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.path.to_tokens(tokens);
-            self.brace_token.surround(tokens, |tokens| {
-                inner_attrs_to_tokens(&self.attrs, tokens);
-                self.fields.to_tokens(tokens);
-                if self.rest.is_some() {
-                    TokensOrDefault(&self.dot2_token).to_tokens(tokens);
-                    self.rest.to_tokens(tokens);
-                }
-            })
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprRepeat {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.bracket_token.surround(tokens, |tokens| {
-                inner_attrs_to_tokens(&self.attrs, tokens);
-                self.expr.to_tokens(tokens);
-                self.semi_token.to_tokens(tokens);
-                self.len.to_tokens(tokens);
-            })
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprGroup {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.group_token.surround(tokens, |tokens| {
-                self.expr.to_tokens(tokens);
-            });
-        }
-    }
-
-    impl ToTokens for ExprParen {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.paren_token.surround(tokens, |tokens| {
-                inner_attrs_to_tokens(&self.attrs, tokens);
-                self.expr.to_tokens(tokens);
-            });
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for ExprTry {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.expr.to_tokens(tokens);
-            self.question_token.to_tokens(tokens);
-        }
-    }
-
-    impl ToTokens for ExprVerbatim {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.tts.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for Label {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.name.to_tokens(tokens);
-            self.colon_token.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for FieldValue {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.member.to_tokens(tokens);
-            if let Some(ref colon_token) = self.colon_token {
-                colon_token.to_tokens(tokens);
-                self.expr.to_tokens(tokens);
-            }
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for Arm {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            tokens.append_all(&self.attrs);
-            self.leading_vert.to_tokens(tokens);
-            self.pats.to_tokens(tokens);
-            if let Some((ref if_token, ref guard)) = self.guard {
-                if_token.to_tokens(tokens);
-                guard.to_tokens(tokens);
-            }
-            self.fat_arrow_token.to_tokens(tokens);
-            self.body.to_tokens(tokens);
-            self.comma.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for PatWild {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.underscore_token.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for PatIdent {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.by_ref.to_tokens(tokens);
-            self.mutability.to_tokens(tokens);
-            self.ident.to_tokens(tokens);
-            if let Some((ref at_token, ref subpat)) = self.subpat {
-                at_token.to_tokens(tokens);
-                subpat.to_tokens(tokens);
-            }
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for PatStruct {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.path.to_tokens(tokens);
-            self.brace_token.surround(tokens, |tokens| {
-                self.fields.to_tokens(tokens);
-                // NOTE: We need a comma before the dot2 token if it is present.
-                if !self.fields.empty_or_trailing() && self.dot2_token.is_some() {
-                    <Token![,]>::default().to_tokens(tokens);
-                }
-                self.dot2_token.to_tokens(tokens);
-            });
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for PatTupleStruct {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.path.to_tokens(tokens);
-            self.pat.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for PatPath {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            ::PathTokens(&self.qself, &self.path).to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for PatTuple {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.paren_token.surround(tokens, |tokens| {
-                self.front.to_tokens(tokens);
-                if let Some(ref dot2_token) = self.dot2_token {
-                    if !self.front.empty_or_trailing() {
-                        // Ensure there is a comma before the .. token.
-                        <Token![,]>::default().to_tokens(tokens);
-                    }
-                    dot2_token.to_tokens(tokens);
-                    self.comma_token.to_tokens(tokens);
-                    if self.comma_token.is_none() && !self.back.is_empty() {
-                        // Ensure there is a comma after the .. token.
-                        <Token![,]>::default().to_tokens(tokens);
-                    }
-                }
-                self.back.to_tokens(tokens);
-            });
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for PatBox {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.box_token.to_tokens(tokens);
-            self.pat.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for PatRef {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.and_token.to_tokens(tokens);
-            self.mutability.to_tokens(tokens);
-            self.pat.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for PatLit {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.expr.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for PatRange {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.lo.to_tokens(tokens);
-            match self.limits {
-                RangeLimits::HalfOpen(ref t) => t.to_tokens(tokens),
-                RangeLimits::Closed(ref t) => Token![...](t.0).to_tokens(tokens),
-            }
-            self.hi.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for PatSlice {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            // XXX: This is a mess, and it will be so easy to screw it up. How
-            // do we make this correct itself better?
-            self.bracket_token.surround(tokens, |tokens| {
-                self.front.to_tokens(tokens);
-
-                // If we need a comma before the middle or standalone .. token,
-                // then make sure it's present.
-                if !self.front.empty_or_trailing()
-                    && (self.middle.is_some() || self.dot2_token.is_some())
-                {
-                    <Token![,]>::default().to_tokens(tokens);
-                }
-
-                // If we have an identifier, we always need a .. token.
-                if self.middle.is_some() {
-                    self.middle.to_tokens(tokens);
-                    TokensOrDefault(&self.dot2_token).to_tokens(tokens);
-                } else if self.dot2_token.is_some() {
-                    self.dot2_token.to_tokens(tokens);
-                }
-
-                // Make sure we have a comma before the back half.
-                if !self.back.is_empty() {
-                    TokensOrDefault(&self.comma_token).to_tokens(tokens);
-                    self.back.to_tokens(tokens);
-                } else {
-                    self.comma_token.to_tokens(tokens);
-                }
-            })
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for PatMacro {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.mac.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for PatVerbatim {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.tts.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for FieldPat {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            if let Some(ref colon_token) = self.colon_token {
-                self.member.to_tokens(tokens);
-                colon_token.to_tokens(tokens);
-            }
-            self.pat.to_tokens(tokens);
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for Block {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            self.brace_token.surround(tokens, |tokens| {
-                tokens.append_all(&self.stmts);
-            });
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for Stmt {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            match *self {
-                Stmt::Local(ref local) => local.to_tokens(tokens),
-                Stmt::Item(ref item) => item.to_tokens(tokens),
-                Stmt::Expr(ref expr) => expr.to_tokens(tokens),
-                Stmt::Semi(ref expr, ref semi) => {
-                    expr.to_tokens(tokens);
-                    semi.to_tokens(tokens);
-                }
-            }
-        }
-    }
-
-    #[cfg(feature = "full")]
-    impl ToTokens for Local {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            outer_attrs_to_tokens(&self.attrs, tokens);
-            self.let_token.to_tokens(tokens);
-            self.pats.to_tokens(tokens);
-            if let Some((ref colon_token, ref ty)) = self.ty {
-                colon_token.to_tokens(tokens);
-                ty.to_tokens(tokens);
-            }
-            if let Some((ref eq_token, ref init)) = self.init {
-                eq_token.to_tokens(tokens);
-                init.to_tokens(tokens);
-            }
-            self.semi_token.to_tokens(tokens);
-        }
-    }
-}
diff --git a/rustc_deps/vendor/syn-0.14.9/src/file.rs b/rustc_deps/vendor/syn-0.14.9/src/file.rs
deleted file mode 100644
index 9b5b11f..0000000
--- a/rustc_deps/vendor/syn-0.14.9/src/file.rs
+++ /dev/null
@@ -1,123 +0,0 @@
-// Copyright 2018 Syn Developers
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use super::*;
-
-ast_struct! {
-    /// A complete file of Rust source code.
-    ///
-    /// *This type is available if Syn is built with the `"full"` feature.*
-    ///
-    /// # Example
-    ///
-    /// Parse a Rust source file into a `syn::File` and print out a debug
-    /// representation of the syntax tree.
-    ///
-    /// ```
-    /// extern crate syn;
-    ///
-    /// use std::env;
-    /// use std::fs::File;
-    /// use std::io::Read;
-    /// use std::process;
-    ///
-    /// fn main() {
-    /// # }
-    /// #
-    /// # fn fake_main() {
-    ///     let mut args = env::args();
-    ///     let _ = args.next(); // executable name
-    ///
-    ///     let filename = match (args.next(), args.next()) {
-    ///         (Some(filename), None) => filename,
-    ///         _ => {
-    ///             eprintln!("Usage: dump-syntax path/to/filename.rs");
-    ///             process::exit(1);
-    ///         }
-    ///     };
-    ///
-    ///     let mut file = File::open(&filename).expect("Unable to open file");
-    ///
-    ///     let mut src = String::new();
-    ///     file.read_to_string(&mut src).expect("Unable to read file");
-    ///
-    ///     let syntax = syn::parse_file(&src).expect("Unable to parse file");
-    ///     println!("{:#?}", syntax);
-    /// }
-    /// ```
-    ///
-    /// Running with its own source code as input, this program prints output
-    /// that begins with:
-    ///
-    /// ```text
-    /// File {
-    ///     shebang: None,
-    ///     attrs: [],
-    ///     items: [
-    ///         ExternCrate(
-    ///             ItemExternCrate {
-    ///                 attrs: [],
-    ///                 vis: Inherited,
-    ///                 extern_token: Extern,
-    ///                 crate_token: Crate,
-    ///                 ident: Ident {
-    ///                     term: Term(
-    ///                         "syn"
-    ///                     ),
-    ///                     span: Span
-    ///                 },
-    ///                 rename: None,
-    ///                 semi_token: Semi
-    ///             }
-    ///         ),
-    /// ...
-    /// ```
-    pub struct File {
-        pub shebang: Option<String>,
-        pub attrs: Vec<Attribute>,
-        pub items: Vec<Item>,
-    }
-}
-
-#[cfg(feature = "parsing")]
-pub mod parsing {
-    use super::*;
-
-    use synom::Synom;
-
-    impl Synom for File {
-        named!(parse -> Self, do_parse!(
-            attrs: many0!(Attribute::parse_inner) >>
-            items: many0!(Item::parse) >>
-            (File {
-                shebang: None,
-                attrs: attrs,
-                items: items,
-            })
-        ));
-
-        fn description() -> Option<&'static str> {
-            Some("crate")
-        }
-    }
-}
-
-#[cfg(feature = "printing")]
-mod printing {
-    use super::*;
-    use attr::FilterAttrs;
-    use proc_macro2::TokenStream;
-    use quote::{ToTokens, TokenStreamExt};
-
-    impl ToTokens for File {
-        fn to_tokens(&self, tokens: &mut TokenStream) {
-            tokens.append_all(self.attrs.inner());
-            tokens.append_all(&self.items);
-        }
-    }
-}
diff --git a/rustc_deps/vendor/syn-0.14.9/src/gen/fold.rs b/rustc_deps/vendor/syn-0.14.9/src/gen/fold.rs
deleted file mode 100644
index 54eb51a..0000000
--- a/rustc_deps/vendor/syn-0.14.9/src/gen/fold.rs
+++ /dev/null
@@ -1,2948 +0,0 @@
-// THIS FILE IS AUTOMATICALLY GENERATED; DO NOT EDIT
-
-#![allow(unreachable_code)]
-#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
-#[cfg(any(feature = "full", feature = "derive"))]
-use gen::helper::fold::*;
-use proc_macro2::Span;
-#[cfg(any(feature = "full", feature = "derive"))]
-use token::{Brace, Bracket, Group, Paren};
-use *;
-#[cfg(feature = "full")]
-macro_rules! full {
-    ($e:expr) => {
-        $e
-    };
-}
-#[cfg(all(feature = "derive", not(feature = "full")))]
-macro_rules! full {
-    ($e:expr) => {
-        unreachable!()
-    };
-}
-#[doc = r" Syntax tree traversal to transform the nodes of an owned syntax tree."]
-#[doc = r""]
-#[doc = r" See the [module documentation] for details."]
-#[doc = r""]
-#[doc = r" [module documentation]: index.html"]
-#[doc = r""]
-#[doc = r#" *This trait is available if Syn is built with the `"fold"` feature.*"#]
-pub trait Fold {
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_abi(&mut self, i: Abi) -> Abi {
-        fold_abi(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_angle_bracketed_generic_arguments(
-        &mut self,
-        i: AngleBracketedGenericArguments,
-    ) -> AngleBracketedGenericArguments {
-        fold_angle_bracketed_generic_arguments(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_arg_captured(&mut self, i: ArgCaptured) -> ArgCaptured {
-        fold_arg_captured(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_arg_self(&mut self, i: ArgSelf) -> ArgSelf {
-        fold_arg_self(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_arg_self_ref(&mut self, i: ArgSelfRef) -> ArgSelfRef {
-        fold_arg_self_ref(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    #[cfg(feature = "full")]
-    fn fold_arm(&mut self, i: Arm) -> Arm {
-        fold_arm(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_attr_style(&mut self, i: AttrStyle) -> AttrStyle {
-        fold_attr_style(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_attribute(&mut self, i: Attribute) -> Attribute {
-        fold_attribute(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_bare_fn_arg(&mut self, i: BareFnArg) -> BareFnArg {
-        fold_bare_fn_arg(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_bare_fn_arg_name(&mut self, i: BareFnArgName) -> BareFnArgName {
-        fold_bare_fn_arg_name(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_bin_op(&mut self, i: BinOp) -> BinOp {
-        fold_bin_op(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_binding(&mut self, i: Binding) -> Binding {
-        fold_binding(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    #[cfg(feature = "full")]
-    fn fold_block(&mut self, i: Block) -> Block {
-        fold_block(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_bound_lifetimes(&mut self, i: BoundLifetimes) -> BoundLifetimes {
-        fold_bound_lifetimes(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_const_param(&mut self, i: ConstParam) -> ConstParam {
-        fold_const_param(self, i)
-    }
-    #[cfg(feature = "derive")]
-    fn fold_data(&mut self, i: Data) -> Data {
-        fold_data(self, i)
-    }
-    #[cfg(feature = "derive")]
-    fn fold_data_enum(&mut self, i: DataEnum) -> DataEnum {
-        fold_data_enum(self, i)
-    }
-    #[cfg(feature = "derive")]
-    fn fold_data_struct(&mut self, i: DataStruct) -> DataStruct {
-        fold_data_struct(self, i)
-    }
-    #[cfg(feature = "derive")]
-    fn fold_data_union(&mut self, i: DataUnion) -> DataUnion {
-        fold_data_union(self, i)
-    }
-    #[cfg(feature = "derive")]
-    fn fold_derive_input(&mut self, i: DeriveInput) -> DeriveInput {
-        fold_derive_input(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr(&mut self, i: Expr) -> Expr {
-        fold_expr(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_array(&mut self, i: ExprArray) -> ExprArray {
-        fold_expr_array(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_assign(&mut self, i: ExprAssign) -> ExprAssign {
-        fold_expr_assign(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_assign_op(&mut self, i: ExprAssignOp) -> ExprAssignOp {
-        fold_expr_assign_op(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_binary(&mut self, i: ExprBinary) -> ExprBinary {
-        fold_expr_binary(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_block(&mut self, i: ExprBlock) -> ExprBlock {
-        fold_expr_block(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_box(&mut self, i: ExprBox) -> ExprBox {
-        fold_expr_box(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_break(&mut self, i: ExprBreak) -> ExprBreak {
-        fold_expr_break(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_call(&mut self, i: ExprCall) -> ExprCall {
-        fold_expr_call(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_cast(&mut self, i: ExprCast) -> ExprCast {
-        fold_expr_cast(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_catch(&mut self, i: ExprCatch) -> ExprCatch {
-        fold_expr_catch(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_closure(&mut self, i: ExprClosure) -> ExprClosure {
-        fold_expr_closure(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_continue(&mut self, i: ExprContinue) -> ExprContinue {
-        fold_expr_continue(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_field(&mut self, i: ExprField) -> ExprField {
-        fold_expr_field(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_for_loop(&mut self, i: ExprForLoop) -> ExprForLoop {
-        fold_expr_for_loop(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_group(&mut self, i: ExprGroup) -> ExprGroup {
-        fold_expr_group(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_if(&mut self, i: ExprIf) -> ExprIf {
-        fold_expr_if(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_if_let(&mut self, i: ExprIfLet) -> ExprIfLet {
-        fold_expr_if_let(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_in_place(&mut self, i: ExprInPlace) -> ExprInPlace {
-        fold_expr_in_place(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_index(&mut self, i: ExprIndex) -> ExprIndex {
-        fold_expr_index(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_lit(&mut self, i: ExprLit) -> ExprLit {
-        fold_expr_lit(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_loop(&mut self, i: ExprLoop) -> ExprLoop {
-        fold_expr_loop(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_macro(&mut self, i: ExprMacro) -> ExprMacro {
-        fold_expr_macro(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_match(&mut self, i: ExprMatch) -> ExprMatch {
-        fold_expr_match(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_method_call(&mut self, i: ExprMethodCall) -> ExprMethodCall {
-        fold_expr_method_call(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_paren(&mut self, i: ExprParen) -> ExprParen {
-        fold_expr_paren(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_path(&mut self, i: ExprPath) -> ExprPath {
-        fold_expr_path(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_range(&mut self, i: ExprRange) -> ExprRange {
-        fold_expr_range(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_reference(&mut self, i: ExprReference) -> ExprReference {
-        fold_expr_reference(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_repeat(&mut self, i: ExprRepeat) -> ExprRepeat {
-        fold_expr_repeat(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_return(&mut self, i: ExprReturn) -> ExprReturn {
-        fold_expr_return(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_struct(&mut self, i: ExprStruct) -> ExprStruct {
-        fold_expr_struct(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_try(&mut self, i: ExprTry) -> ExprTry {
-        fold_expr_try(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_tuple(&mut self, i: ExprTuple) -> ExprTuple {
-        fold_expr_tuple(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_type(&mut self, i: ExprType) -> ExprType {
-        fold_expr_type(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_unary(&mut self, i: ExprUnary) -> ExprUnary {
-        fold_expr_unary(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_unsafe(&mut self, i: ExprUnsafe) -> ExprUnsafe {
-        fold_expr_unsafe(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_verbatim(&mut self, i: ExprVerbatim) -> ExprVerbatim {
-        fold_expr_verbatim(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_while(&mut self, i: ExprWhile) -> ExprWhile {
-        fold_expr_while(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_while_let(&mut self, i: ExprWhileLet) -> ExprWhileLet {
-        fold_expr_while_let(self, i)
-    }
-    #[cfg(feature = "full")]
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_expr_yield(&mut self, i: ExprYield) -> ExprYield {
-        fold_expr_yield(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_field(&mut self, i: Field) -> Field {
-        fold_field(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    #[cfg(feature = "full")]
-    fn fold_field_pat(&mut self, i: FieldPat) -> FieldPat {
-        fold_field_pat(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    #[cfg(feature = "full")]
-    fn fold_field_value(&mut self, i: FieldValue) -> FieldValue {
-        fold_field_value(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_fields(&mut self, i: Fields) -> Fields {
-        fold_fields(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_fields_named(&mut self, i: FieldsNamed) -> FieldsNamed {
-        fold_fields_named(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_fields_unnamed(&mut self, i: FieldsUnnamed) -> FieldsUnnamed {
-        fold_fields_unnamed(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_file(&mut self, i: File) -> File {
-        fold_file(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_fn_arg(&mut self, i: FnArg) -> FnArg {
-        fold_fn_arg(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_fn_decl(&mut self, i: FnDecl) -> FnDecl {
-        fold_fn_decl(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_foreign_item(&mut self, i: ForeignItem) -> ForeignItem {
-        fold_foreign_item(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_foreign_item_fn(&mut self, i: ForeignItemFn) -> ForeignItemFn {
-        fold_foreign_item_fn(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_foreign_item_static(&mut self, i: ForeignItemStatic) -> ForeignItemStatic {
-        fold_foreign_item_static(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_foreign_item_type(&mut self, i: ForeignItemType) -> ForeignItemType {
-        fold_foreign_item_type(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_foreign_item_verbatim(&mut self, i: ForeignItemVerbatim) -> ForeignItemVerbatim {
-        fold_foreign_item_verbatim(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_generic_argument(&mut self, i: GenericArgument) -> GenericArgument {
-        fold_generic_argument(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    #[cfg(feature = "full")]
-    fn fold_generic_method_argument(&mut self, i: GenericMethodArgument) -> GenericMethodArgument {
-        fold_generic_method_argument(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_generic_param(&mut self, i: GenericParam) -> GenericParam {
-        fold_generic_param(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_generics(&mut self, i: Generics) -> Generics {
-        fold_generics(self, i)
-    }
-    fn fold_ident(&mut self, i: Ident) -> Ident {
-        fold_ident(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_impl_item(&mut self, i: ImplItem) -> ImplItem {
-        fold_impl_item(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_impl_item_const(&mut self, i: ImplItemConst) -> ImplItemConst {
-        fold_impl_item_const(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_impl_item_macro(&mut self, i: ImplItemMacro) -> ImplItemMacro {
-        fold_impl_item_macro(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_impl_item_method(&mut self, i: ImplItemMethod) -> ImplItemMethod {
-        fold_impl_item_method(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_impl_item_type(&mut self, i: ImplItemType) -> ImplItemType {
-        fold_impl_item_type(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_impl_item_verbatim(&mut self, i: ImplItemVerbatim) -> ImplItemVerbatim {
-        fold_impl_item_verbatim(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_index(&mut self, i: Index) -> Index {
-        fold_index(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_item(&mut self, i: Item) -> Item {
-        fold_item(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_item_const(&mut self, i: ItemConst) -> ItemConst {
-        fold_item_const(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_item_enum(&mut self, i: ItemEnum) -> ItemEnum {
-        fold_item_enum(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_item_extern_crate(&mut self, i: ItemExternCrate) -> ItemExternCrate {
-        fold_item_extern_crate(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_item_fn(&mut self, i: ItemFn) -> ItemFn {
-        fold_item_fn(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_item_foreign_mod(&mut self, i: ItemForeignMod) -> ItemForeignMod {
-        fold_item_foreign_mod(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_item_impl(&mut self, i: ItemImpl) -> ItemImpl {
-        fold_item_impl(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_item_macro(&mut self, i: ItemMacro) -> ItemMacro {
-        fold_item_macro(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_item_macro2(&mut self, i: ItemMacro2) -> ItemMacro2 {
-        fold_item_macro2(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_item_mod(&mut self, i: ItemMod) -> ItemMod {
-        fold_item_mod(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_item_static(&mut self, i: ItemStatic) -> ItemStatic {
-        fold_item_static(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_item_struct(&mut self, i: ItemStruct) -> ItemStruct {
-        fold_item_struct(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_item_trait(&mut self, i: ItemTrait) -> ItemTrait {
-        fold_item_trait(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_item_type(&mut self, i: ItemType) -> ItemType {
-        fold_item_type(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_item_union(&mut self, i: ItemUnion) -> ItemUnion {
-        fold_item_union(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_item_use(&mut self, i: ItemUse) -> ItemUse {
-        fold_item_use(self, i)
-    }
-    #[cfg(feature = "full")]
-    fn fold_item_verbatim(&mut self, i: ItemVerbatim) -> ItemVerbatim {
-        fold_item_verbatim(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    #[cfg(feature = "full")]
-    fn fold_label(&mut self, i: Label) -> Label {
-        fold_label(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_lifetime(&mut self, i: Lifetime) -> Lifetime {
-        fold_lifetime(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_lifetime_def(&mut self, i: LifetimeDef) -> LifetimeDef {
-        fold_lifetime_def(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_lit(&mut self, i: Lit) -> Lit {
-        fold_lit(self, i)
-    }
-    #[cfg(any(feature = "full", feature = "derive"))]
-    fn fold_lit_bool(&mut self, i: LitBool) -> LitBool {